+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.YO85YbzMe7 --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools [2 ymakes processing] [7445/7445 modules configured] [2866/4745 modules rendered] [2 ymakes processing] [7445/7445 modules configured] [4684/4745 modules rendered] [2 ymakes processing] [7445/7445 modules configured] [4745/4745 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [7451/7451 modules configured] [4745/4745 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES |10.8%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.a |10.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |12.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |13.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/libcpp-client-ydb_topic.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/common/libclient-ydb_topic-common.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/include/libclient-ydb_persqueue_public-include.a |14.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_ss_tasks/libcpp-client-ydb_ss_tasks.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |15.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/credentials/libclient-ydb_types-credentials.a |15.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/fatal_error_handlers/libclient-ydb_types-fatal_error_handlers.a |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_table/query_stats/libclient-ydb_table-query_stats.a |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_proto/libcpp-client-ydb_proto.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/exceptions/libclient-ydb_types-exceptions.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_value/libcpp-client-ydb_value.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_result/libcpp-client-ydb_result.a |17.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/operation/libclient-ydb_types-operation.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/libcpp-client-ydb_types.a |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/impl/libclient-ydb_persqueue_core-impl.a |18.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_stats/libclient-impl-ydb_stats.a |18.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_query/impl/libclient-ydb_query-impl.a |18.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |18.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/resources/libcpp-client-resources.a |18.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_params/libcpp-client-ydb_params.a |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |19.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |19.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/resources/libcpp-client-resources.global.a |19.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |19.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/credentials/oauth2_token_exchange/libydb_types-credentials-oauth2_token_exchange.a |19.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_scheme/libcpp-client-ydb_scheme.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/status/libclient-ydb_types-status.a |19.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |20.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |20.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |20.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_import/libcpp-client-ydb_import.a |21.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_common_client/impl/libclient-ydb_common_client-impl.a |21.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bsconfig/libydb-services-bsconfig.a |21.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_rate_limiter/libcpp-client-ydb_rate_limiter.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/iam_private/libcpp-client-iam_private.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |22.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_export/libcpp-client-ydb_export.a |22.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |22.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |22.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |22.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_discovery/libcpp-client-ydb_discovery.a |23.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |23.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |23.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_operation/libcpp-client-ydb_operation.a |23.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_query/libcpp-client-ydb_query.a |25.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/crypto/liblibs-openssl-crypto.a |25.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |25.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |26.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_coordination/libcpp-client-ydb_coordination.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_table/libcpp-client-ydb_table.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.global.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.global.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |29.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |32.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |32.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_table/impl/libclient-ydb_table-impl.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/libsbuckets-logic-one_head.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/impl/libclient-ydb_persqueue_public-impl.a |34.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_insert_table.cpp |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/liboptimizer-sbuckets-optimizer.global.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/impl/libclient-ydb_topic-impl.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/counters/liboptimizer-sbuckets-counters.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_datastreams/libcpp-client-ydb_datastreams.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/transactions/libdata_sharing-common-transactions.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/libsbuckets-logic-slices.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/system/libsystem_allocator.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |48.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/libydb-core-protos.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |43.7%| PREPARE $(VCS) |44.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |44.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libsan/liblibs-cxxsupp-libsan.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_cxx/libclang_rt.asan_cxx-x86_64.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libsan/liblibs-cxxsupp-libsan.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |45.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan/libclang_rt.asan-x86_64.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_static/libclang_rt.asan_static-x86_64.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/IRReader/libllvm14-lib-IRReader.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/libllvm14-lib-ExecutionEngine.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Remarks/libllvm14-lib-Remarks.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Linker/libllvm14-lib-Linker.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx512/liblibs-hyperscan-runtime_avx512.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm14/libminikql-codegen-llvm14.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm14/libminikql-computation-llvm14.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/TextAPI/libllvm14-lib-TextAPI.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/py3/libpy3python-websocket-client-py3.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |47.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |43.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/py3/libpy3python-websocket-client-py3.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |43.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/ee1c849f6822d0ae9877348f75.auxcpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/ba6443d0375bfb6f8ec8a6f4e9.auxcpp |44.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/_ba6443d0375bfb6f8ec8a6f4e9.yasm |44.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/_ee1c849f6822d0ae9877348f75.yasm |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/b74ebee90bb7903d84da5b42f7.auxcpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/d328b88e4d44d441b3413acc15.auxcpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |44.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/_b74ebee90bb7903d84da5b42f7.yasm |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_context.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_impl.cpp |44.9%| PREPARE $(YMAKE_PYTHON3-4256832079) |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/zookeeper/requests.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table_desc.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_wide_flow.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/zookeeper/protocol.cpp |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/liblib-Target-X86.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/zookeeper/packet.cpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_peephole.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.a |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |46.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/_d328b88e4d44d441b3413acc15.yasm |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |47.6%| PREPARE $(CLANG_FORMAT-2313326005) |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Analysis/libllvm14-lib-Analysis.a |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |49.0%| PREPARE $(LLD_ROOT-2644097164) |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_gateway.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_hash.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_optimize.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_settings.cpp |49.2%| PREPARE $(FLAKE8_PY2-2255386470) |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_intent_determination.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_reorder.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_epoch.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_input.cpp |49.5%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_integration.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_input_filter.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.grpc.pb.cc |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.grpc.pb.cc |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.grpc.pb.cc |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.pb.cc |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.pb.cc |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.grpc.pb.cc |50.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.pb.cc |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_key.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.grpc.pb.cc |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.grpc.pb.cc |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_helpers.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.pb.cc |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.grpc.pb.cc |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.pb.cc |50.5%| PREPARE $(FLAKE8_PY3-1006195188) |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |50.7%| PREPARE $(PYTHON) |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.grpc.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.pb.cc |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_optimize.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.grpc.pb.cc |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.grpc.pb.cc |51.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.pb.cc |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.pb.cc |51.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.pb.cc |51.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.cc |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.pb.cc |51.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.grpc.pb.cc |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.grpc.pb.cc |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.grpc.pb.cc |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm14/libminikql-invoke_builtins-llvm14.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_impl.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.pb.cc |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |51.6%| [CP] {default-linux-x86_64, release, asan} $(B)/common_test.context |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/libllvm14-lib-CodeGen.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_buffered_dynamic_table_writer.cpp |51.4%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |51.4%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |51.4%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/flake8_linter/flake8_linter |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/new_fair_share_thread_pool.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |51.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/origin_attributes.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/crash_handler.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |51.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/phoenix.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |51.5%| PREPARE $(TEST_TOOL_HOST-sbr:7573699373) |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |51.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |51.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/benchmark |51.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |51.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error_code.cpp |51.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |51.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.grpc.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/stripped_error.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.pb.cc |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.grpc.pb.cc |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.grpc.pb.cc |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.grpc.pb.cc |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.grpc.pb.cc |50.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.grpc.pb.cc |50.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.grpc.pb.cc |50.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.pb.cc |50.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.pb.cc |50.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.grpc.pb.cc |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |50.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.pb.cc |51.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.grpc.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.grpc.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.grpc.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.grpc.pb.cc |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.pb.cc |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.pb.cc |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.pb.cc |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.pb.cc |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.grpc.pb.cc |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.grpc.pb.cc |50.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.h_serialized.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.grpc.pb.cc |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.pb.cc |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.grpc.pb.cc |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/string_helpers.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.grpc.pb.cc |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.grpc.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.grpc.pb.cc |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.pb.cc |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.grpc.pb.cc |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.pb.cc |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.grpc.pb.cc |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.grpc.pb.cc |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.pb.cc |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.pb.cc |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.grpc.pb.cc |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.grpc.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.grpc.pb.cc |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.pb.cc |50.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.pb.cc |50.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.grpc.pb.cc |50.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.pb.cc |50.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.grpc.pb.cc |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.pb.cc |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.pb.cc |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.grpc.pb.cc |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.grpc.pb.cc |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.pb.cc |50.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.pb.cc |50.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.grpc.pb.cc |50.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.grpc.pb.cc |50.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.grpc.pb.cc |50.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.pb.cc |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.pb.cc |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.grpc.pb.cc |50.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.pb.cc |50.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.pb.cc |50.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.grpc.pb.cc |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.pb.cc |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.grpc.pb.cc |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.pb.cc |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.grpc.pb.cc |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.grpc.pb.cc |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.pb.cc |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.grpc.pb.cc |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.grpc.pb.cc |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.grpc.pb.cc |49.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.pb.cc |49.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.pb.cc |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.grpc.pb.cc |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.pb.cc |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.pb.cc |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.grpc.pb.cc |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.grpc.pb.cc |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.pb.cc |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.pb.cc |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.grpc.pb.cc |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.pb.cc |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.pb.cc |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.grpc.pb.cc |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.grpc.pb.cc |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.grpc.pb.cc |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.pb.cc |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.grpc.pb.cc |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.grpc.pb.cc |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.grpc.pb.cc |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.pb.cc |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.h_serialized.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.grpc.pb.cc |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.pb.cc |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.pb.cc |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.cc |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.grpc.pb.cc |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.pb.cc |49.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.pb.cc |49.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.grpc.pb.cc |49.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.grpc.pb.cc |49.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.grpc.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.grpc.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.grpc.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.grpc.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.grpc.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.grpc.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.grpc.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.grpc.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.grpc.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.pb.cc |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.pb.cc |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.grpc.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.grpc.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.grpc.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.grpc.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.grpc.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.grpc.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.grpc.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.grpc.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.grpc.pb.cc |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.pb.cc |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc.grpc.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.grpc.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.grpc.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.grpc.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.grpc.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.grpc.pb.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.grpc.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.grpc.pb.cc |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.pb.cc |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.pb.cc |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.pb.cc |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.pb.cc |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.grpc.pb.cc |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.grpc.pb.cc |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.pb.cc |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.grpc.pb.cc |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.grpc.pb.cc |48.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.grpc.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.grpc.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.grpc.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.grpc.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.pb.cc |48.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.pb.cc |48.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.pb.cc |48.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.grpc.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.pb.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.grpc.pb.cc |48.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.pb.cc |48.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.pb.cc |48.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.grpc.pb.cc |48.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.pb.cc |48.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.pb.cc |48.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.grpc.pb.cc |48.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.pb.cc |48.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.pb.cc |48.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |48.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.pb.cc |48.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.pb.cc |48.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.pb.cc |48.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.grpc.pb.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.grpc.pb.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.grpc.pb.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.h_serialized.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.grpc.pb.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.grpc.pb.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.pb.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.grpc.pb.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.pb.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.grpc.pb.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.pb.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.grpc.pb.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.pb.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.pb.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.grpc.pb.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.pb.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.grpc.pb.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.grpc.pb.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.pb.cc |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.grpc.pb.cc |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.pb.cc |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.pb.cc |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.grpc.pb.cc |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.pb.cc |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.grpc.pb.cc |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.grpc.pb.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.grpc.pb.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.pb.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.grpc.pb.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.pb.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.pb.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.pb.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.grpc.pb.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.pb.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.grpc.pb.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.grpc.pb.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.grpc.pb.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.pb.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.pb.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.pb.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.grpc.pb.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.grpc.pb.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.pb.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.grpc.pb.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.grpc.pb.cc |47.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.grpc.pb.cc |47.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.pb.cc |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/libyt-codec-codegen.global.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.pb.cc |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/tz/libpublic-udf-tz.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |47.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.pb.cc |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.pb.cc |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.pb.cc |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/libyt-codec-codegen.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm14/libyt-comp_nodes-llvm14.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/raw_client/libcpp-mapreduce-raw_client.a |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.pb.cc |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cansel_build_index.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/libessentials-parser-proto_ast.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/libyt-comp_nodes-dq.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |46.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpython-symbols-python.global.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.a |44.4%| PREPARE $(BLACK_LINTER-sbr:6648883615) |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |43.6%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/black_linter/black_linter |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_getter.cpp |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.a |42.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.a |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |42.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyparsing/py3/libpy3python-pyparsing-py3.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyparsing/py3/libpy3python-pyparsing-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/service/libtx-limiter-service.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |43.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/_b74ebee90bb7903d84da5b42f7.yasm |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |43.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/_a5874f235d39dc6d1df389245e.yasm |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/backup_unit.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/async_io/libproviders-solomon-async_io.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_effects.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_failpoints.cpp |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/backup_restore_traits.h_serialized.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_collector.cpp |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/backup_restore_traits.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/backup_restore_common.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_exchange.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/change_exchange.h_serialized.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record_cdc_serializer.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record_body_serializer.cpp |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__init.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__write.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/erase_rows_condition.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_read_table.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_lookup_table.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_delete_rows.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/datashard_s3_upload.h_serialized.cpp |43.0%| PREPARE $(WITH_JDK-sbr:6941855347) |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execution_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/extstorage_usage_config.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/execution_unit_kind.h_serialized.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_user_table.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_iface.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_scan.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_common.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_zstd.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/incr_restore_helpers.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_raw.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/key_conflicts.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/follower_edge.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |43.0%| PREPARE $(JDK11-1325468316) |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/kmeans_helper.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/key_validator.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/import_s3.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/local_kmeans.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/range_ops.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |43.0%| PREPARE $(JDK_DEFAULT-4020545899) |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/operation.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/probes.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/remove_locks.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/reshuffle_kmeans.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/scan_common.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/restore_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/sample_k.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |43.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/upload_stats.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/stream_scan_common.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/type_serialization.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |43.1%| PREPARE $(WITH_JDK11-sbr:6936090488) |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |43.1%| PREPARE $(CLANG14-1922233694) |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |43.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |43.2%| PREPARE $(WITH_JDK17-sbr:6941855347) |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/jwt/libpublic-lib-jwt.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |43.1%| [AS] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/utils/_102e130291da49e85913bbc5a2.yasm |43.1%| [AS] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/utils/_0b80f5655025bb61914d1a380e.yasm |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |42.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |43.2%| PREPARE $(CLANG-1922233694) |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |43.3%| PREPARE $(JDK17-4020545899) |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_build_index.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |43.3%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |43.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.h_serialized.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |43.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |43.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/control_plane_storage_requester_actor.cpp |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |43.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |43.7%| PREPARE $(GDB) |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |43.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/libpy3recipe.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |43.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |43.8%| [AS] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fq_runner/_d1f75e9778c0cbb4144b453225.yasm |43.8%| [AS] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fq_runner/_095356409510fa86c3c78c8de9.yasm |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |43.8%| [AS] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/_c0d55d34dff7576aee52216011.yasm |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |43.8%| [AS] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/_34d79c26d54ce2949b2c9fc34e.yasm |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/323be4a89ad1864399ea311db4.auxcpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/9725501498f74c7e358c80ca6f.auxcpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/b74ebee90bb7903d84da5b42f7.auxcpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |43.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/libcore-io_formats-arrow.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |43.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |43.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/MCParser/liblib-MC-MCParser.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/functional/kqp/kqp_query_svc/main.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/libllvm14-lib-Target.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ProfileData/libllvm14-lib-ProfileData.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |44.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_append.cpp |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |44.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_discard.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_factory.cpp |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |44.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_dictitems.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_chain_map.cpp |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |44.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_iterable.cpp |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |44.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_if.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_decimal_mul.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_ifpresent.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/libllvm14-lib-MC.a |44.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_hopping.cpp |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_heap.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_aggrcount.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |44.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_guess.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_frombytes.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_coalesce.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_flatmap.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_match_recognize.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_way.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_nop.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_unwrap.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_multihopping.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_udf.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_lookup.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_lazy_list.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_filter.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_map.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_length.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_mapnext.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_tooptional.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_now.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_null.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_pickle.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Object/libllvm14-lib-Object.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_queue.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_multimap.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_random.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_listfromrange.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_switch.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_seq.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_prepend.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_range.cpp |44.4%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_removemember.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_rh_hash.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_map_join.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_reverse.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_replicate.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_reduce.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_tostring.cpp |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_safe_circular_buffer.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_squeeze_to_list.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_round.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_source.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_skip.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_squeeze_state.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Utils/liblib-Transforms-Utils.a |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_size.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_take.cpp |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_toindexdict.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fold.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_tobytes.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_time_order_recover.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_next_value.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_sort.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_timezone.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_invoke.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_varitem.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_filter.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_weakmember.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_visitall.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_chain_map.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_chopper.cpp |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_while.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_map.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_condense.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_zip.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_top_sort.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_withcontext.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_combine.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fold1.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fromyson.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_iterator.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_logical.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_grace_join.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_join_dict.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fromstring.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_scalar_apply.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_apply.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_hasitems.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_flow.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_group.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_addmember.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_grace_join_imp.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_exists.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_factory.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_count.cpp |44.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_container.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_join.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_skiptake.cpp |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_just.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_getelem.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_func.cpp |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_compress.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_callable.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_check_args.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_logical.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_chain1_map.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_coalesce.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_collect.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_decimal.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_contains.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_top.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_decimal_mod.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_chopper.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_some.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_exists.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_condense.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_enumerate.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_condense1.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/020e2413fa05acf6fcc0b6a0a8.auxcpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_extend.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_element.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_ensure.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_decimal_div.cpp |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.a |44.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/_020e2413fa05acf6fcc0b6a0a8.yasm |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_combine.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |44.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_4ffb80773cd819c6f64ae3337b.yasm |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/4ffb80773cd819c6f64ae3337b.auxcpp |44.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_4e6881630bb7d87e9ab9f3d91f.yasm |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/4e6881630bb7d87e9ab9f3d91f.auxcpp |44.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_c51b3218d1d85449db60fbe731.yasm |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_sum.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/c51b3218d1d85449db60fbe731.auxcpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_if.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/b74ebee90bb7903d84da5b42f7.auxcpp |44.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_b74ebee90bb7903d84da5b42f7.yasm |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/a0eea144e748338d07d6e2c675.auxcpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_blocks.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_map_join.cpp |44.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_a0eea144e748338d07d6e2c675.yasm |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_minmax.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/usage/libtx-limiter-usage.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/ucontext_impl/libboost-context-ucontext_impl.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |44.9%| PREPARE $(CLANG16-1380963495) |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Demangle/libllvm14-lib-Demangle.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/accessor/abstract/liblibrary-formats-arrow-accessor-abstract.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |45.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/common/libcolumnshard-counters-common.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/BinaryFormat/libllvm14-lib-BinaryFormat.a |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/AsmParser/libllvm14-lib-AsmParser.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_common_client/libcpp-client-ydb_common_client.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/IPO/liblib-Transforms-IPO.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |45.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/35c2f7a04f289a9f72763c2025.auxcpp |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |45.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/_35c2f7a04f289a9f72763c2025.yasm |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |45.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/_61e4b816cf79b7606ca15b5877.yasm |45.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/b74ebee90bb7903d84da5b42f7.auxcpp |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.a |45.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/61e4b816cf79b7606ca15b5877.auxcpp |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |45.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_todict.cpp |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |45.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/_b74ebee90bb7903d84da5b42f7.yasm |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |45.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/c36460c1f3f976caa23b5bd087.auxcpp |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |45.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/_ae6accdc802b0e073e8d19156b.yasm |45.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/_b74ebee90bb7903d84da5b42f7.yasm |45.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/_c36460c1f3f976caa23b5bd087.yasm |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |45.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/b74ebee90bb7903d84da5b42f7.auxcpp |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |45.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/ae6accdc802b0e073e8d19156b.auxcpp |45.3%| PREPARE $(CLANG18-390461695) |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/functional/kqp/kqp_indexes/main.cpp |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/client/liblibrary-grpc-client.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |45.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |45.4%| PREPARE $(CLANG-1735056821) |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/operation_id/libpublic-lib-operation_id.a |45.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/_9725501498f74c7e358c80ca6f.yasm |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |45.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |45.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |45.5%| [BI] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/buildinfo_data.h |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |45.6%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svn_interface.c |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |45.7%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/obfuscate/liblibrary-persqueue-obfuscate.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_driver/libcpp-client-ydb_driver.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a >> endpoint_determiner.py::black [GOOD] >> settings.py::black [GOOD] >> __main__.py::black [GOOD] >> test.py::py2_flake8 [GOOD] |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/iam/common/libclient-iam-common.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |45.7%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svnversion.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a >> conftest.py::black [GOOD] |45.7%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/build_info/build_info_static.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser_public/liblibrary-persqueue-topic_parser_public.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |45.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/_2549b9c50b780e2386d838ff17.yasm |45.8%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp >> test_join.py::black [GOOD] >> conftest.py::black [GOOD] >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] >> test_join.py::black [GOOD] >> test_mysql.py::black [GOOD] >> test_postgresql.py::black [GOOD] >> test_ydb.py::black [GOOD] |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |45.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/ef8d28aaeb50572325dd14d9b4.auxcpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |45.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/_b74ebee90bb7903d84da5b42f7.yasm >> test.py::py2_flake8 [GOOD] |45.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/b74ebee90bb7903d84da5b42f7.auxcpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |45.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/_ef8d28aaeb50572325dd14d9b4.yasm |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |45.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/2549b9c50b780e2386d838ff17.auxcpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp >> test.py::py2_flake8 [GOOD] |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a >> conftest.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |44.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/mdb_mock/black >> __main__.py::black [GOOD] |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/utils/black >> settings.py::black [GOOD] |44.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/liboptimizer-sbuckets-common.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_quoting.py::flake8 [GOOD] |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/credentials/login/libydb_types-credentials-login.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a >> test.py::py2_flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |45.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |45.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_case.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/liboptimizer-sbuckets-constructor.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |45.2%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a >> test_alter_ops.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_clickhouse.py::flake8 [GOOD] >> test_greenplum.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test_mysql.py::flake8 [GOOD] >> test_postgresql.py::flake8 [GOOD] >> test_ydb.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |45.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |45.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> run_tests.py::flake8 [GOOD] |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |45.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/liboptimizer-sbuckets-index.a >> test.py::flake8 [GOOD] |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_extension/libcpp-client-ydb_extension.a |45.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |45.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] |45.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/solomon/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/abstract/libsbuckets-logic-abstract.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a >> test_cms_erasure.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> hive_matchers.py::flake8 [GOOD] >> tablet_scheme_tests.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] >> test_compatibility.py::flake8 [GOOD] >> test_followers.py::flake8 [GOOD] >> test_dynumber.py::flake8 [GOOD] >> runner.py::flake8 [GOOD] |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a >> test_ttl.py::flake8 [GOOD] |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |45.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |45.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |45.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |45.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |45.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/mdb_mock/flake8 >> __main__.py::flake8 [GOOD] |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] |45.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/flake8 >> test_auditlog.py::flake8 [GOOD] |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |45.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/draft/libcpp-client-draft.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/StringRef.cpp |45.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/preciseExp10.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_logs_engine.cpp |45.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadProfileEvents.cpp |45.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/run_tests/flake8 >> run_tests.py::flake8 [GOOD] |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseUserName.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseQuery.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnString.cpp |45.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowOutputFormat.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIntervalKind.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUseQuery.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIdentifierOrStringLiteral.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSystemQuery.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUserNameWithHost.cpp |46.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |46.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |46.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/dynumber/flake8 >> test_dynumber.py::flake8 [GOOD] |46.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowGrantsQuery.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowPrivilegesQuery.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowTablesQuery.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablePropertiesQuery.cpp |46.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/compatibility/flake8 >> test_followers.py::flake8 [GOOD] |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablesInSelectQuery.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUnionQueryElement.cpp |46.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/runner/flake8 >> runner.py::flake8 [GOOD] |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |46.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/TokenIterator.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatAST.cpp |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWatchQuery.cpp >> test_sql.py::flake8 [GOOD] |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWithElement.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithAlias.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTablesInSelectQuery.cpp |46.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/QueryWithOutputSettingsPushDownVisitor.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTTLElement.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCreateQuery.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseDatabaseAndTableName.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatSettingName.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOptimizeQuery.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAsterisk.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAlterQuery.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/AvroRowInputFormat.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTLiteral.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowInputFormat.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/queryToString.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ThreadPoolReader.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IOutputFormat.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Chunk.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IInputFormat.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ConcatProcessor.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadSettings.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Executors/PollingQueue.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowInputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/Progress.cpp |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/PeekableReadBuffer.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFile.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/DoubleConverter.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONAsStringRowInputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBufferedStreams.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowOutputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNullable.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBlockInputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CHColumnToArrowColumn.cpp |46.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeMap.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISource.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISimpleTransform.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/RawBLOBRowInputFormat.cpp |46.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowInputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowOutputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IProcessor.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ExecutionSpeedLimits.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockOutputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowInputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/OutputStreamToOutputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowOutputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowOutputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedWriteBuffer.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ORCBlockInputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferFromFile.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowInputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/escapeForFileName.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/RowInputFormatWithDiagnosticInfo.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatIPv6.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/createHardLink.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IAccumulatingTransform.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetQuery.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/LimitTransform.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ResizeProcessor.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISink.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Port.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorToString.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Epoll.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/clickhouse_client_udf.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnNullable.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnTuple.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnsCommon.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFixedString.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnMap.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/sleep.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/mremap.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnAggregateFunction.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionCombinatorFactory.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetRoleQuery.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ClickHouseRevision.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSettingsProfileElement.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/demangle.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Config/AbstractConfigurationComparison.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getResource.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/JSON.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNothing.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getPageSize.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUTImpl.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getFQDNOrHostName.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/errnoToString.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnArray.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/IAggregateFunction.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/shift10.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUT.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnConst.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PipeFDs.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/FilterDescription.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionFactory.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMemoryTracker.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnCompressed.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFunction.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/MaskOperations.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/IColumn.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Allocator.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentThread.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IPv6ToBinary.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorDump.cpp |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/AlignedBuffer.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/OpenSSLHelpers.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinalityHelpers.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/LZ4_decompress_faster.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnLowCardinality.cpp |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMetrics.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/DNSResolver.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Exception.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IntervalKind.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ErrorCodes.cpp |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnDecimal.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadStatus.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/MemoryTracker.cpp |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProcfsMetricsProvider.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TimerDescriptor.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorWriteBinary.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/checkStackSize.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ZooKeeper/IKeeper.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TaskStatsInfoGetter.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PODArray.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnVector.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/RemoteHostFilter.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hex.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hasLinuxCapability.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Throttler.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/quoteString.cpp >> test.py::py2_flake8 [GOOD] |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/setThreadName.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadPool.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferBase.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/parseAddress.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionFactory.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getNumberOfPhysicalCPUCores.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatReadable.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/ICompressionCodec.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getMultipleKeysFromConfig.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProfileEvents.cpp >> test_schemeshard_limits.py::flake8 [GOOD] |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Block.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/ColumnWithTypeAndName.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/isLocalAddress.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/randomSeed.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecNone.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBuffer.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsFields.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/BlockStreamProfileInfo.cpp >> tpc_tests.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/thread_local_rng.cpp >> test_liveness_wardens.py::flake8 [GOOD] |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/SizeLimits.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomGeo.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomIPv4AndIPv6.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecLZ4.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecMultiple.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNested.cpp |46.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BaseSettings.cpp |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BlockInfo.cpp >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_commit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] |46.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Field.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/IBlockInputStream.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockOutputStream.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/NamesAndTypes.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ColumnGathererStream.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockInputStream.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeAggregateFunction.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNullable.cpp |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/materializeBlock.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsEnums.cpp >> test_disk.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> gen-report.py::flake8 [GOOD] >> test_multinode_cluster.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_restarts.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] |46.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate32.cpp |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeUUID.cpp |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate.cpp |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomSimpleAggregateFunction.cpp >> test_cp_ic.py::flake8 [GOOD] |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDecimalBase.cpp >> test_dispatch.py::flake8 [GOOD] |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_retry.py::flake8 [GOOD] |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeEnum.cpp >> test_retry_high_rate.py::flake8 [GOOD] |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeArray.cpp >> test_base.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] >> test_encryption.py::flake8 [GOOD] >> compare.py::flake8 [GOOD] |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime64.cpp |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/IDataType.cpp |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeInterval.cpp |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFunction.cpp |46.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/EnumValues.cpp |46.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Settings.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFactory.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFixedString.cpp |46.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeTuple.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinality.cpp |46.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetQuery.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ClientInfo.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/getLeastSupertype.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationCustomSimpleText.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime64.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNumberBase.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeString.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimal.cpp |46.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/flake8 >> tpc_tests.py::flake8 [GOOD] |46.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesNumber.cpp |46.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationMap.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNothing.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationLowCardinality.cpp |46.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |46.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesDecimal.cpp |46.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTuple.cpp |46.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationAggregateFunction.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationArray.cpp |46.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |46.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] |46.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/NestedUtils.cpp |46.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/NativeFormat.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/JSONEachRowUtils.cpp |46.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/ISerialization.cpp |46.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/config/flake8 >> test_config_with_metadata.py::flake8 [GOOD] |46.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |46.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/result_convert/flake8 >> gen-report.py::flake8 [GOOD] |46.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] >> test_account_actions.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_fifo_messaging.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/verbosePrintString.cpp |46.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/registerDataTypeDateTime.cpp |46.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationFixedString.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys >> custom_hooks.py::flake8 [GOOD] >> fq_client.py::flake8 [GOOD] >> kikimr_metrics.py::flake8 [GOOD] >> kikimr_runner.py::flake8 [GOOD] >> kikimr_utils.py::flake8 [GOOD] >> mvp_mock.py::flake8 [GOOD] |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/IFunction.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationEnum.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate32.cpp |46.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/result_compare/flake8 >> compare.py::flake8 [GOOD] |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationIP.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFileDescriptor.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimalBase.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/CompressionMethod.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationWrapper.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFileDescriptor.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationString.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileBase.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromPocoSocket.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> test.py::py2_flake8 [GOOD] |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptor.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/SynchronousReader.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNumber.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptorDiscardOnFailure.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/FormatFactory.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteHelpers.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ProfileEventsExt.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTupleElement.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationUUID.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadHelpers.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufReader.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/registerFormats.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/readFloatText.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/OpenedFile.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufWriter.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionHelpers.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFile.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTConstraintDeclaration.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionFactory.cpp >> test.py::py2_flake8 [GOOD] |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFile.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/extractTimeZoneFromFunctionArguments.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryLog.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionaryAttributeDeclaration.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/toFixedString.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileDescriptor.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunctionWithKeyValueArguments.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileWithCache.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTKillQueryQuery.cpp |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTInsertQuery.cpp |46.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tenants/flake8 >> test_tenants.py::flake8 [GOOD] |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTPartition.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryParameter.cpp >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromMemory.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFile.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileBase.cpp >> test_actorsystem.py::flake8 [GOOD] |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileDescriptor.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/parseDateTimeBestEffort.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/TimeoutSetter.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/UseSSL.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFile.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/copyData.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/createReadBufferFromFileBase.cpp >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromPocoSocket.cpp >> test_self_heal.py::flake8 [GOOD] |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferValidUTF8.cpp >> test_tablet_channel_migration.py::flake8 [GOOD] |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectWithUnionQuery.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowGrantsQuery.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSystemQuery.cpp |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/CommonParsers.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetRoleQuery.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectWithUnionQuery.cpp >> test.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> endpoint_determiner.py::flake8 [GOOD] >> settings.py::flake8 [GOOD] |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIdentifier.cpp >> test.py::flake8 [GOOD] |46.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |46.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |46.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/fq_runner/flake8 >> mvp_mock.py::flake8 [GOOD] |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/TablesStatus.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsTransformers.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/InternalTextLogsQueue.cpp >> test_kqprun_recipe.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_query_cache.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] >> test_alloc_default.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] >> test_leader_start_inflight.py::flake8 [GOOD] |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryThreadLog.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/InsertQuerySettingsPushDownVisitor.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IParserBase.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsMatcher.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnDeclaration.cpp >> conftest.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] >> test_kill_pq_bill.py::flake8 [GOOD] |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTBackupQuery.cpp >> test_mem_alloc.py::flake8 [GOOD] |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDatabaseOrNone.cpp >> test_metrics_cleanup.py::flake8 [GOOD] |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunction.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCase.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDatabaseOrNone.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionary.cpp |46.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDropQuery.cpp |47.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExternalDDLQuery.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTCreateQuery.cpp >> test_pq_read_write.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserOptimizeQuery.cpp |47.0%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDropQuery.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTExpressionList.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOrderByElement.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQualifiedAsterisk.cpp |47.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIndexDeclaration.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectQuery.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTNameTypePair.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionDeclaration.cpp |47.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionSelectQuery.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTRolesOrUsersSet.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOutput.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithTableAndOutput.cpp |47.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOnCluster.cpp |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSampleRatio.cpp |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectQuery.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDataType.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithElement.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSubquery.cpp |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |47.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSettingsProfileElement.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowTablesQuery.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTUserNameWithHost.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWindowDefinition.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCheckQuery.cpp |47.1%| RESOURCE $(sbr:4966407557) |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserBackupQuery.cpp |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/client/bin/sqs |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/Lexer.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IAST.cpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserAlterQuery.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionListParsers.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSampleRatio.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserPartition.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionaryAttributeDeclaration.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserKillQueryQuery.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionElementParsers.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionary.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExplainQuery.cpp |47.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] >> test_common.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserQuery.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDescribeTableQuery.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRolesOrUsersSet.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserInsertQuery.cpp |47.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserProjectionSelectQuery.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRenameQuery.cpp |47.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |47.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] |47.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |47.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |47.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/bsconfig/bsconfig_ut.cpp |47.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |47.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |47.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/utils/flake8 >> settings.py::flake8 [GOOD] |47.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |47.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |47.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/castColumn.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionsConversion.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/operation_id/protos/liblib-operation_id-protos.a |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |47.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |47.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |47.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |47.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/_323be4a89ad1864399ea311db4.yasm |47.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |47.4%| [SB] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |47.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/CastOverloadResolver.cpp |47.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |47.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |47.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |47.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |47.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/b74ebee90bb7903d84da5b42f7.auxcpp |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |47.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/_b74ebee90bb7903d84da5b42f7.yasm |47.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/_7b240b071767564ebe8b43187b.yasm |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |47.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/7b240b071767564ebe8b43187b.auxcpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/833dd80e79c977aa58b8ac97ec.auxcpp |47.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/_833dd80e79c977aa58b8ac97ec.yasm |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/client/cpp/libymq-client-cpp.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/client/bin/main.cpp |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |47.6%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/_{34d79c26d54ce2949b2c9fc34e.yasm ... bfff40605939b1d940cec3e5dc.rodata} |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |47.7%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.h_serialized.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |47.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |47.7%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |47.7%| [UN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bitmap.cc |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |47.8%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/__main__.py.yapyc3 |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |47.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |47.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.pb.cc |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |47.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |47.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/tools/protoc/protoc |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/tools/protobuf_plugin/config_proto_plugin |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/tools/protoc/plugins/grpc_cpp/grpc_cpp |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/tools/protoc/plugins/cpp_styleguide/cpp_styleguide |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |47.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/issue_id.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_import.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/yq_internal.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |48.0%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_replication.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/feather.cc |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/column_builder.cc |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/parser.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Support/libllvm14-lib-Support.a |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/extension_type.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/options.cc |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/path_util.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/caching.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/dictionary.cc |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/file.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/transform.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/options.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_nested.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/filesystem.cc |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/tools/rescompressor/rescompressor |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/tools/enum_parser/enum_parser/enum_parser |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/level_conversion.cc |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/reader_internal.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/exception.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/reader.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/reader.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/writer.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/encryption_internal_nossl.cc |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_topic_v1.{pb.h ... grpc.pb.h} |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/internal_file_decryptor.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/murmur3.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/platform.cc |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.a |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/file_reader.cc |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_validity.cc |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/column_reader.cc |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.a |48.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/b74ebee90bb7903d84da5b42f7.auxcpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/86d3e302364c3382a2b168ce57.auxcpp |48.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/_c00748ee9f4a8df0a497895714.yasm |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |48.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/c43757827e03b03f81c937ad5a.auxcpp |48.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/_86d3e302364c3382a2b168ce57.yasm |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |48.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/_c43757827e03b03f81c937ad5a.yasm |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_sort.cc |48.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/_b74ebee90bb7903d84da5b42f7.yasm |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.a |48.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/0c234afaa407a4418f9cfff531.auxcpp |48.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_4795fb4850b9d88b2c7b5e8ec2.yasm |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_empty.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] |48.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_b74ebee90bb7903d84da5b42f7.yasm |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |48.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_a0ac6bff4d1f5e5b56eb56eb04.yasm |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/4795fb4850b9d88b2c7b5e8ec2.auxcpp |48.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_51b73721929f13078ecfb118b8.yasm |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |48.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_0c234afaa407a4418f9cfff531.yasm |48.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/51b73721929f13078ecfb118b8.auxcpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/b74ebee90bb7903d84da5b42f7.auxcpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reshuffle_kmeans.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/libcpp-client-ydb_federated_topic.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/a0ac6bff4d1f5e5b56eb56eb04.auxcpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ut_utils/libydb_persqueue_public-ut-ut_utils.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/IR/libllvm14-lib-IR.a |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/c00748ee9f4a8df0a497895714.auxcpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/basic_usage_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/impl/libclient-ydb_federated_topic-impl.a |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |48.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/_b74ebee90bb7903d84da5b42f7.yasm |48.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/0edefb735db84420d76f6da5ad.auxcpp |48.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/_0edefb735db84420d76f6da5ad.yasm |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.a |48.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/_3d91683202a822f8cc1b66c627.yasm |48.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/3d91683202a822f8cc1b66c627.auxcpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/b74ebee90bb7903d84da5b42f7.auxcpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/memory_pool.cc |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/json/writer/libcpp-json-writer.a |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/stream_reader.cc |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |48.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |48.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |48.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |48.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/util/charset/libutil-charset.a |48.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |48.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/protos/libcore-config-protos.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/grpc/src/compiler/grpc_plugin_support/libsrc-compiler-grpc_plugin_support.a |48.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/grpc/src/compiler/cpp_plugin.cc |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/cppparser/liblibrary-cpp-cppparser.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |48.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/tools/enum_parser/parse_enum/libtools-enum_parser-parse_enum.a |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/dynumber/ydb-tests-functional-dynumber |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |48.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/tools/enum_parser/enum_parser/main.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |48.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |48.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |48.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |48.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/utils/libcore-config-utils.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/tools/rescompressor/main.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/protobuf/libpublic-lib-protobuf.a |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/util/libyutil.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/config/tools/protobuf_plugin/main.cpp |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/yqlrun/yqlrun |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/buffered.cc |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/compressed.cc |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |48.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/json/liblibrary-cpp-json.a |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/stdio.cc |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/tools/py3cc/py3cc |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |48.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/utf8.cc |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |48.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |48.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ut_utils/libydb_topic-ut-ut_utils.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/schema_internal.cc |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/metadata_internal.cc |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_boolean.cc |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |48.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |48.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |48.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/column_scanner.cc |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_zstd.cc |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/internal_file_encryptor.cc |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/file_writer.cc |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/protoc/plugins/cpp_styleguide/cpp_styleguide.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/writer.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/chunker.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/properties.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/trie.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_brotli.cc |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/stream_writer.cc |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/vendored/musl/strptime.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/metadata.cc |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/util_internal.cc |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/Message.fbs.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/feather.fbs.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/int_util.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_replication.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/parser.rl6.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/Tensor.fbs.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_base.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/thread_pool.cc |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/77ff0e3be10902817b4214e3df.auxcpp |48.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/_77ff0e3be10902817b4214e3df.yasm |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/ba4cba5dcefc679d9e6b854354.auxcpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |49.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/_ba4cba5dcefc679d9e6b854354.yasm |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/b74ebee90bb7903d84da5b42f7.auxcpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dynumber/013701e9e21d7e09e202127262.auxcpp |49.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/_b74ebee90bb7903d84da5b42f7.yasm |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |49.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |49.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dynumber/_013701e9e21d7e09e202127262.yasm |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dynumber/b74ebee90bb7903d84da5b42f7.auxcpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |49.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/_e4737a1de2b548fed21b6733c7.yasm |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |49.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/_b74ebee90bb7903d84da5b42f7.yasm |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/e8e379b61234dd7ed260efcc27.auxcpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/scalar.cc |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/yqlrun/http/libtools-yqlrun-http.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |49.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |49.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/_b74ebee90bb7903d84da5b42f7.yasm |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/common_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compression_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/yqlrun/gateway_spec.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/51e39740d1d758840624baee66.auxcpp |49.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/_51e39740d1d758840624baee66.yasm |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/43bac175e95a3fec996063d2b4.auxcpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/yqlrun/yqlrun.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/b74ebee90bb7903d84da5b42f7.auxcpp |49.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/_43bac175e95a3fec996063d2b4.yasm |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |49.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/b74ebee90bb7903d84da5b42f7.auxcpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/filesystem/filesystem_error.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/condition_variable_destructor.cpp |47.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/colorizer/output.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/json/json2proto.cpp |47.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/_e8e379b61234dd7ed260efcc27.yasm |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/util/is_equal.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/colorizer/colors.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/chrono.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/modchooser.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_network.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_string.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/any.cpp |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/jemalloc/libcpp-malloc-jemalloc.a |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/algorithm.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/atomic.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |47.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dynumber/_b74ebee90bb7903d84da5b42f7.yasm |47.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/e4737a1de2b548fed21b6733c7.auxcpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/completion_generator.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/filesystem/directory_iterator.cpp |47.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dynumber/_48a078239a7d32a31a8d7798bb.yasm |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/condition_variable.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/shared_mutex.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/filesystem/operations.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/filesystem/path.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/functional.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/memory.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/memory_resource.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/mutex.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/future.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/ios.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/mutex_destructor.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/typeinfo.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/new_helpers.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/random_shuffle.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/new_handler.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/stdexcept.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jemalloc/libcontrib-libs-jemalloc.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/legacy_pointer_safety.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dynumber/48a078239a7d32a31a8d7798bb.auxcpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/cord_rep_btree_navigator.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/tzdb_list.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/new.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/charconv_parse.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/tz.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/ios.instantiations.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/string.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/cord_internal.cc |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/system_error.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/charconv_bigint.cc |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/cord_rep_btree_reader.cc |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/strstream.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/ryu/f2s.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/cord_rep_crc.cc |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/ryu/d2fixed.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/memutil.cc |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/ostringstream.cc |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/str_format/arg.cc |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/cordz_info.cc |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/iostream.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/cordz_handle.cc |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/str_format/extension.cc |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/cordz_functions.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/internal/check_op.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/internal/globals.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/internal/fnmatch.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/internal/nullguard.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/internal/log_sink_set.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/random/internal/chi_square.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/random/internal/distribution_test_util.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/random/internal/randen_detect.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/random/internal/randen.cc |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/internal/conditions.cc |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/ryu/d2s.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/random/internal/randen_hwaes.cc |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/synchronization/internal/kernel_timeout.cc |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/random/internal/randen_round_keys.cc |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/internal/log_format.cc |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/random/internal/randen_slow.cc |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/str_format/parser.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/random/gaussian_distribution.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/synchronization/internal/futex_waiter.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/synchronization/internal/create_thread_identity.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/die_if_null.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/hash.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/internal/proto.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/flags.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/escaping.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/random/seed_sequences.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/synchronization/internal/pthread_waiter.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/synchronization/internal/win32_waiter.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/synchronization/internal/stdcpp_waiter.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/synchronization/internal/waiter_base.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/random/internal/seed_material.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/str_format/bind.cc |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/random/discrete_distribution.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/synchronization/internal/sem_waiter.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/random/seed_gen_exception.cc |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/regex.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/synchronization/barrier.cc |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/synchronization/notification.cc |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/utf8.cc |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/stringify_sink.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/base/internal/low_level_alloc.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/synchronization/mutex.cc |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/base/internal/spinlock.cc |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/base/internal/raw_logging.cc |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/base/internal/poison.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/log_sink.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/base/internal/strerror.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/base/internal/sysinfo.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/base/internal/throw_delegate.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/crc/internal/crc_non_temporal_memcpy.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/base/internal/unscaledcycleclock.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/crc/crc32c.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/crc/internal/cpu_detect.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compare.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/crc/internal/crc_cord_state.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/base/internal/spinlock_wait.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/variant.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/base/internal/cycleclock.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/cord.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/crc/internal/crc.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/base/internal/thread_identity.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/status/statusor.cc |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/ascii.cc |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/crc/internal/crc_x86_arm_combined.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/cord_buffer.cc |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/cord_analysis.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/charconv.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/match.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/sysmodule.c |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/str_cat.cc |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rc2/rc2cfb64.c |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_gen.c |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_crpt.c |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/escaping.cc |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_add.c |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/pk12err.c |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pem/pem_sign.c |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs7/pk7_smime.c |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_kiss.c |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pem/pem_all.c |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_decr.c |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs7/bio_pk7.c |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pem/pem_xaux.c |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ocsp/ocsp_lib.c |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pem/pvkfmt.c |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/seed/seed_ofb.c |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs7/pk7_lib.c |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/objects/obj_lib.c |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pem/pem_err.c |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs7/pkcs7err.c |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ocsp/ocsp_cl.c |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs7/pk7_doit.c |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ocsp/v3_ocsp.c |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pem/pem_x509.c |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/modes/ocb128.c |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/modes/cbc128.c |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ocsp/ocsp_err.c |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/modes/ctr128.c |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/mdc2/mdc2_one.c |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/objects/obj_err.c |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_asn.c |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/p_verify.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/kdf/scrypt.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pem/pem_pkey.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/modes/wrap128.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/hmac/hmac.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/hmac/hm_pmeth.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/lhash/lh_stats.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_key.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/mdc2/mdc2dgst.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/sha/sha1dgst.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ocsp/ocsp_ext.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/m_sha1.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/m_md5.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_null.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/evp_err.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/idea/i_cbc.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/idea/i_ofb64.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/m_sha3.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_aes_cbc_hmac_sha1.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_idea.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/err/err_all.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_old.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/p_enc.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/evp_enc.c |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_aes.c |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/cmeth_lib.c |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/m_ripemd.c |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/eng_table.c |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/c_alld.c |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/tb_cipher.c |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/eng_openssl.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/bio_ok.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecp_nist.c |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/eng_ctrl.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecdsa_ossl.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/bio_md.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecdh_ossl.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec_print.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/err/err.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_seed.c |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/evp_pbe.c |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/eng_err.c |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/curve448/curve448.c |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec_lib.c |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecp_mont.c |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec2_smpl.c |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dh/dh_check.c |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dsa/dsa_gen.c |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dsa/dsa_key.c |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/fcrypt.c |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dh/dh_meth.c |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/cfb64enc.c |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/conf/conf_err.c |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/conf/conf_api.c |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/comp/comp_err.c |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ct/ct_sct.c |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/xcbc_enc.c |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dh/dh_asn1.c |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ct/ct_oct.c |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cms/cms_kari.c |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/fcrypt_b.c |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cms/cms_lib.c |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/conf/conf_sap.c |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/comp/c_zlib.c |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/ecb_enc.c |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cms/cms_err.c |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/comp/comp_lib.c |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cms/cms_asn1.c |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/sm2/sm2_err.c |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/curve448/scalar.c |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cmac/cmac.c |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/cfb64ede.c |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/conf/conf_lib.c |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ct/ct_prn.c |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cast/c_ecb.c |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/camellia/cmll_ecb.c |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cmac/cm_ameth.c |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_word.c |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_sqr.c |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cms/cms_env.c |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cast/c_skey.c |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_recp.c |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cast/c_enc.c |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_shift.c |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/sm2/sm2_sign.c |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_exp2.c |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_depr.c |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_gf2m.c |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_ctx.c |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_prime.c |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_add.c |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/blake2/m_blake2b.c |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_rand.c |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_dh.c |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bss_fd.c |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bss_file.c |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_div.c |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bss_bio.c |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/srp/srp_lib.c |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_mpi.c |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_const.c |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bf_null.c |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/b_sock.c |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/b_print.c |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bf_nbio.c |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_mod.c |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_kron.c |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bf/bf_ofb64.c |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bf/bf_ecb.c |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/async/arch/async_win.c |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/async/async.c |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/srp/srp_vfy.c |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/async/async_err.c |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/b_addr.c |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/asm/x86_64-gcc.c |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_lib.c |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/x_info.c |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/t_pkey.c |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/x_int64.c |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/p8_pkey.c |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/tasn_typ.c |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/stack/stack.c |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/i2d_pu.c |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/p5_scrypt.c |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/d2i_pr.c |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/tasn_utl.c |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/evp_asn1.c |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/aes/aes_wrap.c |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/pymath.c |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/aes/aes_ige.c |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/aes/aes_core.c |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/ameth_lib.c |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/pystate.c |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/store/store_err.c |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/store/loader_file.c |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_strex.c |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_utf8.c |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_octet.c |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_object.c |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/pathconfig.c |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/preconfig.c |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/aes/aes_ecb.c |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/asn1_lib.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/marshal.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/pylifecycle.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/x_long.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/aes/aes_cbc.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/frozen.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/bio_asn1.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/getargs.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/pyarena.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/modsupport.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/mystrtoul.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/getopt.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/formatter_unicode.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/mysnprintf.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/aes/aes_cfb.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/bootstrap_hash.c |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/ceval.c |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_type.c |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Parser/peg_api.c |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Parser/parser.c |49.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/openssl/crypto/liblibs-openssl-crypto.a |49.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.a |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/unicodectype.c |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/ast_unparse.c |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/unionobject.c |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/object.c |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Parser/myreadline.c |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/interpreteridobject.c |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/genericaliasobject.c |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/typevarobject.c |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/genobject.c |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/exceptions.c |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/call.c |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/enumobject.c |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/fileobject.c |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/cellobject.c |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/bytesobject.c |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/timemodule.c |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/setobject.c |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/spwdmodule.c |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/_warnings.c |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/socketmodule.c |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/signalmodule.c |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/unicodedata.c |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/md5module.c |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/sha3module.c |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/pwdmodule.c |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/mathmodule.c |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/syslogmodule.c |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/selectmodule.c |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/mmapmodule.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/posixmodule.c |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/termios.c |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/cjkcodecs/_codecs_cn.c |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/audioop.c |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/cjkcodecs/_codecs_iso2022.c |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/bytes_methods.c |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_posixsubprocess.c |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/cjkcodecs/_codecs_jp.c |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_localemodule.c |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_threadmodule.c |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_heapqmodule.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/sixstep.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/fnt.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/gcmodule.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_io/iobase.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/mpdecimal.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/fourstep.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/convolute.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/mpalloc.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/constants.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/context.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/transpose.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_pickle.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_elementtree.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_multiprocessing/posixshmem.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_functoolsmodule.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/_decimal.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/basearith.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_hacl/Hacl_Hash_MD5.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/getbuildinfo.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_zoneinfo.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/numbertheory.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_ctypes/cfield.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_cryptmodule.c |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_io/textio.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_blake2/blake2s_impl.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_csv.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ts/ts_lib.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_contextvarsmodule.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_blake2/blake2b_impl.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_collectionsmodule.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_hacl/Hacl_Hash_SHA3.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_hacl/Hacl_Hash_SHA1.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/binascii.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_ctypes/_ctypes.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/difradix2.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_io/_iomodule.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_ctypes/stgdict.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/io.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/cjkcodecs/_codecs_hk.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_hashopenssl.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ts/ts_req_print.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_typingmodule.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_io/bufferedio.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_codecsmodule.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_struct.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_sre/sre.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_statisticsmodule.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_io/bytesio.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_stat.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/errnomodule.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_json.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_io/winconsoleio.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_io/fileio.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_weakref.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_datetimemodule.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_lsprof.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_multiprocessing/multiprocessing.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/symtablemodule.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_lzmamodule.c |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_multiprocessing/semaphore.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_ssl.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/cjkcodecs/multibytecodec.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_operator.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_opcode.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_queuemodule.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/cjkcodecs/_codecs_kr.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_xxinterpchannelsmodule.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/config.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_xxsubinterpretersmodule.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/abstract.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/atexitmodule.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_xxtestfuzz/fuzzer.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_tracemalloc.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_xxtestfuzz/_xxtestfuzz.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/itertoolsmodule.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/cjkcodecs/_codecs_tw.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/getpath.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/arraymodule.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/cmathmodule.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/sha2module.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/faulthandler.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/fcntlmodule.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/crt.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_asynciomodule.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ts/ts_rsp_print.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_blake2/blake2module.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_hacl/Hacl_Hash_SHA2.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/methodobject.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ts/ts_err.c |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/grpmodule.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/main.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ts/ts_asn1.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_io/stringio.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ts/ts_conf.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ts/ts_rsp_sign.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ts/ts_rsp_utils.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ui/ui_err.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/txt_db/txt_db.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ts/ts_verify_ctx.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/by_dir.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ui/ui_lib.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ts/ts_req_utils.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_txt.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ui/ui_util.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_vfy.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/sha1module.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/simple/ia64.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/simple/arm64.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/simple/powerpc.c |50.4%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/ec/ecp_nistz256-x86_64.s |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/lzma/lzma_encoder_optimum_fast.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/lzma/lzma2_decoder.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/lzma/fastpos_table.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/lz/lz_decoder.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/vli_size.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/outqueue.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/index.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/hardware_cputhreads.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/filter_decoder.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/file_info.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/easy_buffer_encoder.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/auto_decoder.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/block_buffer_decoder.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/o_time.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/o_fips.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/mem.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/getenv.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ebcdic.c |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cversion.c |50.5%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/bn/x86_64-gf2m.s |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_skey.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/statem/statem_srvr.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/statem/statem_quic.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x_req.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x_attrib.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/pcy_lib.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/by_file.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_att.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_def.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/whrlpool/wp_dgst.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/t_req.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_meth.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_lu.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_v3.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_vpm.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509cset.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_trs.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509rset.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509spki.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509type.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x_crl.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x_name.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x_x509a.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x_x509.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x_exten.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ctype.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/pcy_cache.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_admis.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_addr.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/pcy_node.c |50.5%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/sha/sha256-x86_64.s |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/pcy_tree.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_akey.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_asid.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_akeya.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_alt.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_bcons.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_err.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_bitst.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_extku.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_crld.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_genn.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_conf.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_ia5.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_r2x.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_enum.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_info.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/engines/e_afalg.c |50.5%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/engines/e_padlock-x86_64.s |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/engines/e_capi.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_set.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_cpols.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/d1_lib.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/bio_ssl.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/d1_srtp.c |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/methods.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/engines/e_padlock.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/packet.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/pqueue.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/d1_msg.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/record/dtls1_bitmap.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/s3_cbc.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/record/ssl3_buffer.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/s3_enc.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/record/ssl3_record_tls13.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/record/rec_layer_d1.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/s3_msg.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/s3_lib.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/record/ssl3_record.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/ssl_cert.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/ssl_lib.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/ssl_ciph.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/ssl_err.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/ssl_conf.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/ssl_sess.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/ssl_quic.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/ssl_mcnf.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/ssl_init.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/ssl_rsa.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/ssl_txt.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/ssl_utst.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/ssl_stat.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/statem/extensions.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/statem/extensions_clnt.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/statem/extensions_srvr.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/statem/extensions_cust.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/statem/statem.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/statem/statem_clnt.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/statem/statem_dtls.c |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/t1_lib.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/statem/statem_lib.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/tls13_enc.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/t1_enc.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/t1_trce.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/tls_srp.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_int.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_ncons.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_lib.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_pci.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_pcons.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_pku.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_pmaps.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_pcia.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_prn.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_purp.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/ssl_asn1.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_sxnet.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_tlsf.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3_utl.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/v3err.c |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/aes/aesni-sha1-x86_64.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/aes/aesni-mb-x86_64.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/aes/aesni-x86_64.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/aes/aesni-sha256-x86_64.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/aes/vpaes-x86_64.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/bn/rsaz-avx2.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/bn/rsaz-x86_64.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/bn/x86_64-mont5.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/bn/x86_64-mont.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/camellia/cmll-x86_64.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/chacha/chacha-x86_64.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/ec/x25519-x86_64.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/md5/md5-x86_64.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/modes/aesni-gcm-x86_64.s |50.8%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/modes/ghash-x86_64.s |50.8%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/poly1305/poly1305-x86_64.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/rc4/rc4-md5-x86_64.s |50.7%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/rc4/rc4-x86_64.s |50.8%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/sha/keccak1600-x86_64.s |50.8%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/sha/sha1-mb-x86_64.s |50.8%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/sha/sha1-x86_64.s |50.8%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/sha/sha256-mb-x86_64.s |50.8%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/sha/sha512-x86_64.s |50.8%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/whrlpool/wp-x86_64.s |50.8%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/asm/linux/crypto/x86_64cpuid.s |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cpt_err.c |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cryptlib.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/mem_sec.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/mem_dbg.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ex_data.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/init.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/o_dir.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/o_fopen.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/threads_pthread.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/o_init.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/threads_none.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/o_str.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/uid.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/threads_win.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/common/tuklib_physmem.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/common/tuklib_cpucores.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/check/check.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/check/crc32_table.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/check/crc32_fast.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/check/crc64_fast.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/check/crc64_table.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/check/sha256.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/alone_decoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/alone_encoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/block_buffer_encoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/block_encoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/block_decoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/block_header_encoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/easy_decoder_memusage.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/block_util.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/easy_encoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/common.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/block_header_decoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/easy_preset.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/filter_buffer_decoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/easy_encoder_memusage.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/filter_buffer_encoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/filter_common.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/filter_flags_encoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/filter_encoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/filter_flags_decoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/hardware_physmem.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/index_decoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/index_encoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/microlzma_decoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/index_hash.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/stream_buffer_decoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/stream_decoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/stream_buffer_encoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/microlzma_encoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/lzip_decoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/stream_encoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/string_conversion.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/stream_decoder_mt.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/stream_flags_decoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/stream_flags_encoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/stream_encoder_mt.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/vli_decoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/stream_flags_common.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/common/vli_encoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/delta/delta_decoder.c |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/delta/delta_encoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/delta/delta_common.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/lz/lz_encoder.c |50.9%| [AS] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/_ad2e0a44e0b30c8fb934928212.yasm |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/lz/lz_encoder_mf.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/lzma/lzma2_encoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/simple/arm.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/lzma/lzma_decoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/lzma/lzma_encoder_presets.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/lzma/lzma_encoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/lzma/lzma_encoder_optimum_normal.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/rangecoder/price_table.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/simple/riscv.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/simple/armthumb.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/simple/simple_decoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/simple/simple_encoder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/simple/sparc.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/simple/simple_coder.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/ssl/record/rec_layer_s3.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/pcy_data.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/lzma/liblzma/simple/x86.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509name.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509v3/pcy_map.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x_pubkey.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_req.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_d2.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x_all.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_ext.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_cmp.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/x509_obj.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/t_x509.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ui/ui_openssl.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/x509/t_crl.c |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ui/ui_null.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_ctypes/callbacks.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/boolobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ts/ts_rsp_verify.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/store/store_register.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/resource.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/pyexpat.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/rotatingtree.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/bytearrayobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/namespaceobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/descrobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/odictobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/store/store_strings.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/floatobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/funcobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/frameobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/zlibmodule.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/classobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/obmalloc.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Parser/string_parser.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/codeobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/complexobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/Python-tokenize.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/sliceobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/dictobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/iterobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/longobject.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/listobject.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/future.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/memoryobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/moduleobject.c |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Parser/pegen.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/getplatform.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/picklebufobject.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/Python-ast.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/asdl.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/structseq.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/fileutils.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/rangeobject.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/assemble.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/tupleobject.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/typeobject.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/unicodeobject.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/ast.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/ast_opt.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/weakrefobject.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Parser/action_helpers.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/ceval_gil.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Parser/pegen_errors.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/context.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Parser/tokenizer.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Parser/token.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/importdl.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/store/store_lib.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/errors.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/dynload_shlib.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/store/store_init.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/flowgraph.c |51.1%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/asm_trampoline.S |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/dtoa.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/bltinmodule.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/codecs.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/getcopyright.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/getversion.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/deepfreeze/deepfreeze.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/import.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/dynamic_annotations.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/compile.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/frame.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_digest.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/getcompiler.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/pyctype.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/pyfpe.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/hamt.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_int.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/initconfig.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/hashtable.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/pyhash.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/instrumentation.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_print.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/legacy_tracing.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/pystrcmp.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/pystrhex.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/intrinsics.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/aes/aes_ofb.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/pythonrun.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/pystrtod.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/aria/aria.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/bio_ndef.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/asn1_par.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/perf_trampoline.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/aes/aes_misc.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_d2i_fp.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/tasn_scn.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_bitstr.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/asn_moid.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/asn_mime.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_i2d_fp.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_gentm.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/x_sig.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_utctm.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/asn_pack.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_time.c |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_dup.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_mbstr.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_verify.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_sign.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/d2i_pu.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/asn1_gen.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/asn1_err.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/asn1_item_list.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/asn_mstbl.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/p5_pbev2.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/tasn_dec.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/f_int.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/i2d_pr.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/tasn_new.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/tasn_prn.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/x_bignum.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bf/bf_skey.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/n_pkey.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/f_string.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/nsseq.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/p5_pbe.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/t_spki.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/async/arch/async_posix.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/tasn_enc.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bf_lbuf.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/t_bitst.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/sm4/sm4.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bf/bf_enc.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_blind.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/tasn_fre.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/x_spki.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/x_pkey.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/x_algor.c |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/b_dump.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/async/arch/async_null.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/x_val.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/async/async_wait.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bio_err.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bf/bf_cfb64.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/b_sock2.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bio_meth.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cms/cms_ess.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bio_lib.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bf_buff.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bio_cb.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bss_acpt.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_print.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_exp.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bss_conn.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bss_dgram.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_gcd.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bss_log.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bss_null.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bss_mem.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_x931p.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bio/bss_sock.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/rsaz_exp.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/blake2/blake2b.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/blake2/m_blake2s.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_err.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_intern.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/blake2/blake2s.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_nist.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_mont.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/sm3/sm3.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cms/cms_smime.c |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_mul.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cms/cms_att.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/camellia/cmll_misc.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cms/cms_io.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_sqrt.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cmac/cm_pmeth.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/bn/bn_srp.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/buffer/buf_err.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/conf/conf_def.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/camellia/cmll_cfb.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dso/dso_win32.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cms/cms_dd.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/buffer/buffer.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cms/cms_enc.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ct/ct_b64.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/str2key.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cms/cms_pwri.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/camellia/cmll_ctr.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/sm3/m_sm3.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cast/c_cfb64.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/camellia/cmll_ofb.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dh/dh_kdf.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cast/c_ofb64.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/conf/conf_mall.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dh/dh_lib.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cms/cms_cd.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/conf/conf_mod.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/cms/cms_sd.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ct/ct_err.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dh/dh_rfc5114.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/conf/conf_ssl.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ct/ct_log.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ct/ct_policy.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/sm2/sm2_pmeth.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ct/ct_sct_ctx.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ct/ct_vfy.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ct/ct_x509v3.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/cbc_cksm.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dh/dh_err.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/cfb_enc.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dsa/dsa_meth.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/cbc_enc.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/des_enc.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/ofb64ede.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dh/dh_pmeth.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/ofb64enc.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/ecb3_enc.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/ofb_enc.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/qud_cksm.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/pcbc_enc.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/rand_key.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/des/set_key.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dsa/dsa_asn1.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dh/dh_ameth.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dsa/dsa_pmeth.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dsa/dsa_prn.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dh/dh_depr.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dh/dh_key.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dsa/dsa_sign.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dh/dh_gen.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec_check.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dh/dh_prn.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dh/dh_rfc7919.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec_oct.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dsa/dsa_ameth.c |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/curve448/eddsa.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dsa/dsa_ossl.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dsa/dsa_lib.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dso/dso_err.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/curve448/curve448_tables.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dsa/dsa_vrf.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dsa/dsa_depr.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dso/dso_lib.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dsa/dsa_err.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dso/dso_vms.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dso/dso_openssl.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec_err.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/dso/dso_dl.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/curve448/arch_32/f_impl.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec2_oct.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec_curve.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec_key.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/curve25519.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecdsa_sign.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/curve448/f_generic.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec_ameth.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/eng_all.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecdsa_vrf.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/eng_cnf.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecp_nistz256.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/eck_prn.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/eng_pkey.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec_asn1.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/eng_rdrand.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec_pmeth.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/m_md4.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec_mult.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/sm2/sm2_crypt.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec_kmeth.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecp_oct.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecp_nistp224.c |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecp_nistp256.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecdh_kdf.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ec_cvt.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/eng_list.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecp_smpl.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/tb_asnmth.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecx_meth.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecp_nistp521.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/tb_dh.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/tb_eckey.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/tb_pkmeth.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/eng_dyn.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/encode.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/eng_init.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/eng_fat.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_camellia.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/eng_lib.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/modes/gcm128.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/tb_dsa.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/tb_digest.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_xcbc_d.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/digest.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/err/err_prn.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/tb_rand.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/engine/tb_rsa.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_rc4_hmac_md5.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_chacha20_poly1305.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/bio_b64.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/bio_enc.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/c_allc.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_aes_cbc_hmac_sha256.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_sm4.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/siphash/siphash_pmeth.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_aria.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_bf.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_cast.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/evp_key.c |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/p_sign.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_des.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_des3.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/m_md2.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_rc2.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_rc4.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/siphash/siphash.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/siphash/siphash_ameth.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/pbe_scrypt.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/e_rc5.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/m_sigver.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/pmeth_lib.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/evp_cnf.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/m_wp.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/names.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/evp_lib.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/p5_crpt.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/evp_pkey.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/p_seal.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/m_md5_sha1.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/m_null.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/m_mdc2.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/kdf/hkdf.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/p5_crpt2.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/md5/md5_one.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/modes/cfb128.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/p_dec.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/md4/md4_dgst.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/md5/md5_dgst.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/sha/sha512.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/p_lib.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/md4/md4_one.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/p_open.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/pmeth_fn.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/hmac/hm_ameth.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/idea/i_cfb64.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/modes/ccm128.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/idea/i_skey.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/evp/pmeth_gn.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/modes/cts128.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/kdf/kdf_err.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/idea/i_ecb.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/sha/sha256.c |51.7%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/kdf/tls1_prf.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/modes/ofb128.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ocsp/ocsp_srv.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/lhash/lhash.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/modes/xts128.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pem/pem_info.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pem/pem_lib.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ocsp/ocsp_ht.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_attr.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/objects/o_names.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pem/pem_pk8.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ocsp/ocsp_vfy.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pem/pem_oth.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/sha/sha1_one.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/objects/obj_dat.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ocsp/ocsp_prn.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_sbag.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_p8d.c |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_crt.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_init.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/objects/obj_xref.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/seed/seed_ecb.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs7/pk7_asn1.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_mutl.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ocsp/ocsp_asn.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs7/pk7_attr.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_npas.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_p8e.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/seed/seed_cfb.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rc2/rc2ofb64.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs12/p12_utl.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rand/rand_egd.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rc2/rc2_ecb.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/pkcs7/pk7_mime.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/poly1305/poly1305.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rand/rand_win.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rand/drbg_ctr.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/poly1305/poly1305_pmeth.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/poly1305/poly1305_ameth.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rc2/rc2_cbc.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/seed/seed_cbc.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rand/drbg_lib.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rand/rand_unix.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rand/randfile.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rand/rand_lib.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rc2/rc2_skey.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rand/rand_err.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_x931g.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_crpt.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_asn1.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ripemd/rmd_dgst.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/seed/seed.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_depr.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_x931.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_ameth.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_lib.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_meth.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ripemd/rmd_one.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_mp.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_ssl.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_sign.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_saos.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_err.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_none.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_chk.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_pmeth.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_ossl.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_prn.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_oaep.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_pk1.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/rsa/rsa_pss.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/asn1/a_strnid.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/pytime.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/openssl/crypto/ec/ecp_nistputil.c |51.8%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fq_runner/_095356409510fa86c3c78c8de9.yasm |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/structmember.c |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/symtable.c |51.9%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fq_runner/_{d1f75e9778c0cbb4144b453225.yasm ... 4fc6ad86f2e153876bf4a9e18e.rodata} |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/specialize.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/suggestions.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/tracemalloc.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/traceback.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Python/thread.c |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |51.9%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/_c0d55d34dff7576aee52216011.yasm |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_randommodule.c |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_ctypes/callproc.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_decimal/libmpdec/mpsignal.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Objects/capsule.c |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_bz2module.c |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_bisectmodule.c |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/str_replace.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_abc.c |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/string_view.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/crc/internal/crc_memcpy_x86_arm_combined.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/substitute.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/vector.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/str_split.cc |51.9%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/verbose_abort.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/tools/py3cc/95a0a5470f134f5bf26138089b.auxcpp |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |52.0%| [AS] {BAZEL_DOWNLOAD} $(B)/tools/py3cc/_95a0a5470f134f5bf26138089b.yasm |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/thread.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/tools/py3cc/cddcf62f24147893e906fb2494.auxcpp |52.0%| [AS] {BAZEL_DOWNLOAD} $(B)/tools/py3cc/_cddcf62f24147893e906fb2494.yasm |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/formatted_output.cpp |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/completer.cpp |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/valarray.cpp |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/last_getopt_easy_setup.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/int128.cc |52.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fq_runner/fq_client.py.yapyc3 |51.9%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fq_runner/kikimr_utils.py.yapyc3 |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/last_getopt_opt.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/last_getopt_opts.cpp |52.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fq_runner/mvp_mock.py.yapyc3 |52.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fq_runner/kikimr_metrics.py.yapyc3 |52.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fq_runner/custom_hooks.py.yapyc3 |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/util/digest/city.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/xxhash/xxhash.c |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/locale.cpp |52.0%| [AS] {BAZEL_DOWNLOAD} $(B)/tools/py3cc/_9b3f68facc6b0bda566979a5f9.yasm |51.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |51.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |52.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/tools/enum_parser/enum_serialization_runtime/enum_runtime.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/last_getopt.cpp |52.0%| [AS] {BAZEL_DOWNLOAD} $(S)/util/system/context_x86.asm |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/util/random/random.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/numbers.cc |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/util/system/valgrind.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_datetime.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/str_format/output.cc |52.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/json/json_prettifier.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/json/json_writer.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/util/system/mktemp_system.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/damerau_levenshtein_distance.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/json/rapidjson_helpers.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/json/json_reader.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/internal/log_message.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/optional.cpp |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/internal/cache_topology.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/util/path.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/column_decoder.cc |52.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_digest.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_system_2.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_generic.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_stream.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/util/system/compiler.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_system_1.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_util.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/tools/enum_parser/enum_serialization_runtime/ordered_pairs.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_system_4.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_system_3.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/config/utils/config_traverse.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/protobuf/helpers.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/last_getopt_parse_result.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/resource/registry.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/opt.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_folder.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/blockcodecs/codecs/zstd/zstd.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/localfs.cc |51.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/common.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bit_block_counter.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/resource/resource.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/barrier.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/ygetopt.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/posix_getopt.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/cpu_cache.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/completer_command.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |51.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/experimental_pow2_size_class.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/experiment.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/blockcodecs/core/codecs.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/huge_address_map.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/guarded_page_allocator.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/huge_allocator.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/internal/mincore.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/huge_cache.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/internal/memory_stats.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/huge_page_aware_allocator.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/internal/percpu.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/internal/numa.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/internal/percpu_rseq_unsupported.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/internal/util.cc |51.9%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/internal/percpu_rseq_asm.S |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/page_allocator.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/pagemap.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/page_heap.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/page_allocator_interface.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/noruntime_size_classes.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/legacy_size_classes.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/tools/enum_parser/parse_enum/parse_enum.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/grpc/src/compiler/php_generator.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/grpc/src/compiler/objective_c_generator.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/grpc/src/compiler/csharp_generator.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/grpc/src/compiler/proto_parser_helper.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/grpc/src/compiler/cpp_generator.cc |51.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/grpc/src/compiler/python_generator.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/sampler.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/grpc/src/compiler/ruby_generator.cc |51.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |51.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/static_vars.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_nested.cc |52.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/stack_trace_table.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/stats.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.cc |52.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/tcmalloc.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/message.cc |52.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/thread_cache.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/want_hpaa.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/experimental_pow2_below64_size_class.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/internal/environment.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/peak_heap_tracker.cc |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/arena.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/util/charset/wide_sse41.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/wrap.cpp |52.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/string_utils/base64/base64.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/enum_options.pb.cc |52.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_a1a4fef3e58eac5c8cd56e360e.yasm |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/a85123d3cf465cba982424dc08.auxcpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/util/charset/all_charset.cpp |52.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_b74ebee90bb7903d84da5b42f7.yasm |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/ff94c99b3d9492ea47f26af81f.auxcpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/bf91d2c3152cd9f79aee642443.auxcpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/a1a4fef3e58eac5c8cd56e360e.auxcpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/b74ebee90bb7903d84da5b42f7.auxcpp |52.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_ff94c99b3d9492ea47f26af81f.yasm |52.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_a85123d3cf465cba982424dc08.yasm |52.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_bf91d2c3152cd9f79aee642443.yasm |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/join_b8a3bcab4fd16bee3bf8c97a5b.auxcpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/ad2e0a44e0b30c8fb934928212.auxcpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/json/json_writer_output.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/json/name_generator.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/json/json_output_create.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.global.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/45bf9e1d124d3a4ab8f9f012d8.auxcpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.global.a |52.3%| [AS] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/join_85e8f74af512453f26a6c19724.yasm |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.global.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/b74ebee90bb7903d84da5b42f7.auxcpp |52.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/_b74ebee90bb7903d84da5b42f7.yasm |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/6b2f2f7191f2fb9fffba30b043.auxcpp |52.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/_45bf9e1d124d3a4ab8f9f012d8.yasm |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.a |52.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/_6b2f2f7191f2fb9fffba30b043.yasm |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.global.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/json/util.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/json/proto2json.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/json/proto2json_printer.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/util/pb_io.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_monitoring/libcpp-client-ydb_monitoring.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/util/walk.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/util/simple_reflection.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/binsaver/blob_io.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/binsaver/util_stream_io.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/binsaver/buffered_io.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/json/writer/json_value.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/binsaver/bin_saver.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/udivmodsi4.c |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/config.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/ucmpdi2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floatsitf.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/subvti3.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/truncdfbf2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/negdi2.c |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/moddi3.c |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floatuntisf.c |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/gcc_personality_v0.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floatunsitf.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floatunsidf.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floattixf.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floatunsisf.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixxfti.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floatsisf.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floattisf.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixxfdi.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixunsxfti.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixunstfsi.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/extendbfsf2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixunstfdi.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixunssfti.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixunsdfti.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixunsdfdi.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/extendhftf2.c |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/extendxftf2.c |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixdfti.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/ffsti2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixsfdi.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/ffsdi2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/ashldi3.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/atomic_signal_fence.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/divdc3.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/addvti3.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/divmoddi4.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/atomic_flag_test_and_set_explicit.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/ctzdi2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/ctzti2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/clear_cache.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/atomic_flag_test_and_set.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/ctzsi2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/divmodsi4.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/bswapdi2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/addvsi3.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/atomic_thread_fence.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/atomic_flag_clear.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/atomic_flag_clear_explicit.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixdfdi.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/ashrti3.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/clzti2.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/clzsi2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/atomic.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/divdi3.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/absvti2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/addtf3.c |52.4%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/x86_64/floatundixf.S |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/absvdi2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/x86_64/floatdixf.c |52.4%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/x86_64/floatundidf.S |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/comparedf2.c |52.4%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/x86_64/chkstk.S |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/apple_versioning.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/absvsi2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/comparesf2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/adddf3.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/extendsfdf2.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/eprintf.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/divdf3.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/ashrdi3.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/addsf3.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/crtend.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/cmpti2.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/comparetf2.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/divmodti4.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/x86_64/floatdisf.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/divsf3.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/ashlti3.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/divsc3.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/divtc3.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/divsi3.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/divtf3.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/spilling/kqp_scan_spilling_ut.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/emutls.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/divxc3.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/divti3.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/clzdi2.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/crtbegin.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/cmpdi2.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/extendhfsf2.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/enable_execute_stack.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/extendsftf2.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixdfsi.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixtfdi.c |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixsfsi.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floatditf.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixtfsi.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixtfti.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixunsdfsi.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixunssfsi.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixunssfdi.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |52.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixunsxfsi.c |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixunstfti.c |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixunsxfdi.c |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fixsfti.c |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floatsidf.c |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floatunditf.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floattitf.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floatundixf.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/muldc3.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floatdixf.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floatuntitf.c |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floattidf.c |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floatuntixf.c |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/int_util.c |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/lshrti3.c |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/lshrdi3.c |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/fp_mode.c |52.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/modti3.c |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/modsi3.c |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/muldi3.c |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/floatuntidf.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/muldf3.c |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |52.6%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/mulodi4.c |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/mulosi4.c |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/multc3.c |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/mulsf3.c |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/muloti4.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/multf3.c |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/multi3.c |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/mulvsi3.c |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/mulvdi3.c |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/mulvti3.c |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/negsf2.c |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/mulxc3.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/negti2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/negvdi2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/negvsi2.c |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/paritydi2.c |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/os_version_check.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/negvti2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/paritysi2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/negdf2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/parityti2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/popcountdi2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/subvdi3.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/powidf2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/popcountsi2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/subdf3.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/powixf2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/trampoline_setup.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/subsf3.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/subvsi3.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/powitf2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/powisf2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/popcountti2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/truncdfhf2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/subtf3.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/truncdfsf2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/truncsfbf2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/trunctfhf2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/truncsfhf2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/trunctfdf2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/trunctfsf2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/trunctfxf2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/udivdi3.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/udivmoddi4.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/ucmpti2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/udivmodti4.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/udivsi3.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/umodsi3.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/umoddi3.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/udivti3.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/umodti3.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/extenddftf2.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/mulsc3.c |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/bswapsi2.c |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/addvdi3.c |52.8%| [AS] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/x86_64/floatundisf.S |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/cpu_model/x86.c |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/x86_64/floatdidf.c |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_primitive.cc |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/json/writer/json.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/util/merge.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_decimal.cc |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.pb.cc |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/string_builder.cc |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/vendored/datetime/tz.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/ut/test_connection_ut.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |52.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/_22e0b4e28e344fbe4b14fc4e7f.yasm |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls.cpp |52.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_3a6f4eda1ec5d2bd4b5d7ab909.yasm |52.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/129761279a8f635b5cb25be6f6.auxcpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/statistics.cc |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/json_simple.cc |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |52.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_129761279a8f635b5cb25be6f6.yasm |52.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_e0190ea6b9626b7936bb01e6fa.yasm |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |52.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_9a3d5b70802b945274f285f587.yasm |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/common_ut.cpp |52.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/_b74ebee90bb7903d84da5b42f7.yasm |52.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/_c08ba1db5492c87ddfd8611d8f.yasm |52.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/9320981177f1bb46a5cf7bb627.auxcpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/functional/backup/backup_ut.cpp |52.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/_9320981177f1bb46a5cf7bb627.yasm |52.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/c08ba1db5492c87ddfd8611d8f.auxcpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/b74ebee90bb7903d84da5b42f7.auxcpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/2549b9c50b780e2386d838ff17.auxcpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |52.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/_2549b9c50b780e2386d838ff17.yasm |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.a |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/68875c7d34f9bbe09248b5ec55.auxcpp |52.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/_68875c7d34f9bbe09248b5ec55.yasm |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/270cc1cc4ad07a20fdc1de7945.auxcpp |52.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |52.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |52.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/_b74ebee90bb7903d84da5b42f7.yasm |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/join_c8aed7aa9a190238ac421a9657.auxcpp |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |52.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/b74ebee90bb7903d84da5b42f7.auxcpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compression_ut.cpp |52.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/join_2e65a3bc8d7db29fed5b5bb7ff.yasm |52.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/_0c6a7b86ca2476db99d999e3e2.yasm |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/b74ebee90bb7903d84da5b42f7.auxcpp |52.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/_4a6a74a0ab38f783afd5375054.yasm |52.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/0c6a7b86ca2476db99d999e3e2.auxcpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/4a6a74a0ab38f783afd5375054.auxcpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |52.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/_270cc1cc4ad07a20fdc1de7945.yasm |52.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/_b74ebee90bb7903d84da5b42f7.yasm |52.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/_b74ebee90bb7903d84da5b42f7.yasm |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/b74ebee90bb7903d84da5b42f7.auxcpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/4ddfb97a4ab74c0f9e6dca9ae4.auxcpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/3a6f4eda1ec5d2bd4b5d7ab909.auxcpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |52.6%| [AS] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/_4ddfb97a4ab74c0f9e6dca9ae4.yasm |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/45903b27f276ed27d9f37309a4.auxcpp |52.7%| [AS] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/_45903b27f276ed27d9f37309a4.yasm |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/9a3d5b70802b945274f285f587.auxcpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/b74ebee90bb7903d84da5b42f7.auxcpp |52.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_b74ebee90bb7903d84da5b42f7.yasm |52.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/3094db96f925466f57c2e99df3.auxcpp |52.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/_3094db96f925466f57c2e99df3.yasm |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |52.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/22e0b4e28e344fbe4b14fc4e7f.auxcpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/e0190ea6b9626b7936bb01e6fa.auxcpp |52.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/_b74ebee90bb7903d84da5b42f7.yasm |52.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/b74ebee90bb7903d84da5b42f7.auxcpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/schema.cc |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/util.cc |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_ping.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/generated/parquet_constants.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_replace.cc |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernel.cc |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec.cc |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/formatting.cc |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/chunked_array.cc |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_snappy.cc |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/path_internal.cc |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/tdigest.cc |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_compare.cc |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/function_internal.cc |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/dc40bacd6a1983f1de3e155468.auxcpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/b74ebee90bb7903d84da5b42f7.auxcpp |53.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/_b74ebee90bb7903d84da5b42f7.yasm |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/b8f2779e20208045d5f4aadd3f.auxcpp |53.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/_b8f2779e20208045d5f4aadd3f.yasm |53.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/_dc40bacd6a1983f1de3e155468.yasm |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/b74ebee90bb7903d84da5b42f7.auxcpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |53.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/_b74ebee90bb7903d84da5b42f7.yasm |53.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/_15bedb40aa24416f66f53388b9.yasm |53.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/475563fb51fb0e7131a897a5c7.auxcpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/15bedb40aa24416f66f53388b9.auxcpp |53.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/_b74ebee90bb7903d84da5b42f7.yasm |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |53.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/_475563fb51fb0e7131a897a5c7.yasm |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/b74ebee90bb7903d84da5b42f7.auxcpp |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/dq_effects.pb.{h, cc} |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |53.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/03a7e7319c52b37778aca2325e.auxcpp |53.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/_2549b9c50b780e2386d838ff17.yasm |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/level_conversion.cc |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/exception.cc |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/file_writer.cc |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/trie.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/time.cc |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/uri.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/tdigest.cc |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/thread_pool.cc |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/path_internal.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/task_group.cc |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/metadata.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/string_builder.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/logging.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/key_value_metadata.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/io_util.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/int_util.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/reader.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/delimiting.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/decimal.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/string.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_brotli.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/cpu_info.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bitmap.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor/coo_converter.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/table.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/memory_pool.cc |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/type.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/reader.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/options.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/options.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_adaptive.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/chunked_array.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/buffer.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_binary.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/parser.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/data.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_binary.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/validate.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_decimal.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_union.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/c/bridge.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/concatenate.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_mode.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_primitive.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_nested.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_base.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_quantile.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/cast.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_decimal.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_basic.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/function.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_hash.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/function_internal.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/util.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compare.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/diff.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/adapters/orc/adapter_util.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/util.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/api_aggregate.cc |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/builder.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_dict.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_compare.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/expression.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/converter.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_map.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_encode.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/exec_plan.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernel.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/api_vector.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_dict.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_nested.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_dictionary.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_var_std.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_boolean.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_tdigest.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/codegen_internal.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/api_scalar.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_boolean.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_fill_null.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/hash_aggregate.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_if_else.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_nested.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/path_util.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_internal.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/util_internal.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_temporal.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/column_decoder.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_string.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_nested.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/chunker.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/config.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/column_builder.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_replace.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_validity.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_set_lookup.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/registry.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/object_parser.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/memory.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_numeric.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/mockfs.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/extension_type.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/datum.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/localfs.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/device.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/writer.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/parser.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/util_internal.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_temporal.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/interfaces.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/caching.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_selection.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/options.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/file.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/compressed.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_hash.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/slow.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/stdio.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/filesystem.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/dictionary.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/transform.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_compare.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/feather.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/message.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/object_writer.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/chunker.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/buffered.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/metadata_internal.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/reader.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/result.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/writer.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/sparse_tensor.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/chunked_builder.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/table_builder.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/json_simple.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/record_batch.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/status.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bitmap_builders.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bit_util.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/converter.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bit_run_reader.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/basic_decimal.cc |53.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor/csf_converter.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_sort.cc |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_lz4.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/cancel.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/pretty_print.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/reader.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bit_block_counter.cc |53.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bitmap_ops.cc |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bpacking.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_zstd.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_snappy.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_zlib.cc |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_arithmetic.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/mutex.cc |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/memory.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/future.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/scalar.cc |53.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/formatting.cc |53.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |53.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |53.5%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/Tensor.fbs.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/Schema.fbs.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/vendored/musl/strptime.c |53.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/value_parsing.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/generated/parquet_types.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/vendored/base64.cpp |53.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |53.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/utf8.cc |53.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |53.4%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/Message.fbs.cpp |53.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |53.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |53.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |53.5%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/SparseTensor.fbs.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/vendored/datetime/tz.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/visitor.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/File.fbs.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/feather.fbs.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/generated/parquet_constants.cpp |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/schema_internal.cc |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/column_scanner.cc |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/writer.cc |53.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/internal_file_decryptor.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/encryption_internal_nossl.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/schema.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/bloom_filter.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/internal_file_encryptor.cc |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |53.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/_03a7e7319c52b37778aca2325e.yasm |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/level_comparison.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/encryption.cc |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/properties.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/reader_internal.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/file_reader.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/murmur3.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/adapters/orc/adapter.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/platform.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/types.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/printer.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encoding.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/column_writer.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/stream_reader.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/schema.cc |53.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/statistics.cc |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor/csx_converter.cc |53.6%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/column_reader.cc |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec.cc |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/stream_writer.cc |53.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_primitive.cc |53.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |53.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/_b74ebee90bb7903d84da5b42f7.yasm |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |53.6%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_base.cc |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/b74ebee90bb7903d84da5b42f7.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_nested.cc |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/2549b9c50b780e2386d838ff17.auxcpp |53.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/cpu_info.cc |53.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_var_std.cc |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/tools/yasm/yasm |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_boolean.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/types.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/uri.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/future.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_string.cc |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_debug/libcpp-client-ydb_debug.a |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |53.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_view.cpp |53.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/yql_mount.pb.{h, cc} |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/concatenate.cc |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/table_builder.cc |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |53.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.grpc.pb.cc |53.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_view.pb.{h, cc} |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_selection.cc |53.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |53.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_login.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bpacking.cc |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.pb.cc |53.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |53.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |53.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |53.7%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |53.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.pb.cc |53.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_lz4.cc |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_local_kmeans.cpp |53.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |53.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_persqueue_v1.{pb.h ... grpc.pb.h} |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |53.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/validation/validation |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |53.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |53.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |53.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor/csf_converter.cc |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/adapters/orc/adapter.cc |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |53.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/_b74ebee90bb7903d84da5b42f7.yasm |53.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/_dc6742774f7f7be07b72a0f255.yasm |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/data.cc |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/dc6742774f7f7be07b72a0f255.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/c7dcee7daed3ea80f68bb6b1c8.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/mkql_proto/mkql_proto_ut.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/b74ebee90bb7903d84da5b42f7.auxcpp |53.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/_c7dcee7daed3ea80f68bb6b1c8.yasm |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_dict.cc |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_base.cc |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_adaptive.cc |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/visitor.cc |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_bsconfig.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |53.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/_e9446a953b5a015999d71407a6.yasm |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |53.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/_18e05a10f6ea49dd0f554fa51f.yasm |53.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/18e05a10f6ea49dd0f554fa51f.auxcpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |53.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/_b74ebee90bb7903d84da5b42f7.yasm |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/e9446a953b5a015999d71407a6.auxcpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/b74ebee90bb7903d84da5b42f7.auxcpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/filesystem/directory_entry.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |53.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |53.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/79b13353271c8cfe46ea4b9f1e.auxcpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |53.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |53.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |53.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |53.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |53.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_clock_pro_ut.cpp |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |53.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |53.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |53.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |53.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |53.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/_79b13353271c8cfe46ea4b9f1e.yasm |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_publisher_service_actor.cpp |53.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/parsers/gas/gas-parse.c |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/objfmts/elf/elf.c |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/dbgfmts/dwarf2/dwarf2-dbgfmt.c |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_switchable_ut.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/md5.c |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/mergesort.c |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/dbgfmts/codeview/cv-symline.c |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/dbgfmts/codeview/cv-type.c |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/dbgfmts/dwarf2/dwarf2-info.c |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/dbgfmts/dwarf2/dwarf2-aranges.c |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/dbgfmts/null/null-dbgfmt.c |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/dbgfmts/dwarf2/dwarf2-line.c |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/gas-token.c |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/dbgfmts/stabs/stabs-dbgfmt.c |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/init_plugin.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/arch/x86/x86bc.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/listfmts/nasm/nasm-listfmt.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/objfmts/bin/bin-objfmt.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/objfmts/coff/win64-except.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/objfmts/dbg/dbg-objfmt.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/objfmts/elf/elf-x86-amd64.c |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/validation/main.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/objfmts/elf/elf-x86-x32.c |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/objfmts/xdf/xdf-objfmt.c |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/parsers/gas/gas-parser.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/parsers/gas/gas-parse-intel.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/parsers/nasm/nasm-parse.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/objfmts/rdf/rdf-objfmt.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/preprocs/gas/gas-eval.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/parsers/nasm/nasm-parser.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/preprocs/cpp/cpp-preproc.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/preprocs/gas/gas-preproc.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/arch/x86/x86arch.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/preprocs/nasm/nasmlib.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/preprocs/nasm/nasm-preproc.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/objfmts/macho/macho-objfmt.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/preprocs/raw/raw-preproc.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/preprocs/nasm/nasm-pp.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/x86regtmod.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/file.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/frontends/yasm/yasm.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/bc-incbin.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/x86cpu.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/value.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/vendored/base64.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_primitive.cc |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/objfmts/elf/elf-x86-x86.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/nasm-token.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/intnum.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/converter.cc |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/arch/lc3b/lc3barch.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/linemap.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/bc-reserve.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/generated/parquet_types.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/File.fbs.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/cmake-module.c |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_nested.cc |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/expr.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_export.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/exception.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/floatnum.c |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/key_value_metadata.cc |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_basic.cc |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/io_util.cc |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bit_run_reader.cc |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/inttree.c |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/task_group.cc |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/config.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |54.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/_55acb3440d202d5436c3eebe8d.yasm |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |54.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/_b74ebee90bb7903d84da5b42f7.yasm |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/actors/common/ut/retry_events_queue_ut.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |54.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/_60cfd0c71f99697efa7d884ea6.yasm |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |54.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/join_bf2c9ed2f082df133ad2524c35.yasm |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/55acb3440d202d5436c3eebe8d.auxcpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |54.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/_b74ebee90bb7903d84da5b42f7.yasm |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/b74ebee90bb7903d84da5b42f7.auxcpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/join_774f9df586ad2dd13cb3b71a58.auxcpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/b74ebee90bb7903d84da5b42f7.auxcpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/60cfd0c71f99697efa7d884ea6.auxcpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |54.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/_6b2d83fc4b34dc0640579a5038.yasm |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/new_fair_share_thread_pool.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/6b2d83fc4b34dc0640579a5038.auxcpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/crash_handler.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/phoenix.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/origin_attributes.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error_code.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/string_helpers.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/stripped_error.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |54.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/trace_ut.cpp |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |54.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/utils/_0b80f5655025bb61914d1a380e.yasm |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |54.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |54.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |54.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |54.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |54.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |54.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |54.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |54.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |54.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |54.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |54.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |54.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/basic_usage_ut.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |54.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |54.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/describe_topic_ut.cpp |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/local_partition_ut.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/_e3fb7cb86d5a545297f158946c.yasm |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/internal/logging.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |54.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/udf_resolver.pb.{h, cc} |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |54.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |54.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_encode.cc |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/api_vector.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/parser.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_fill_null.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/builder.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/type.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_temporal.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/status.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/log_entry.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/pretty_print.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/util/_/datetime/parser.rl6.cpp |54.6%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/_45903b27f276ed27d9f37309a4.yasm |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_zlib.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bitmap_ops.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/SparseTensor.fbs.cpp |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_quantile.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/hash_aggregate.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/expression.cc |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/topic_to_table_ut.cpp |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |54.5%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/45903b27f276ed27d9f37309a4.auxcpp |54.5%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/_{4ddfb97a4ab74c0f9e6dca9ae4.yasm ... eb2d1cce6188adc65cfc8d8301.rodata} |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/str_format/float_conversion.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_compare.cc |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |54.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/b74ebee90bb7903d84da5b42f7.auxcpp |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/_b74ebee90bb7903d84da5b42f7.yasm |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/join_faf7f8a83413e98910a74c5e89.auxcpp |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/util.cc |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/join_b7c10b4864a820ed988f274a3b.yasm |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/recipe |54.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/c17932a1c7065b959cf7db2c7d.auxcpp |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/_c17932a1c7065b959cf7db2c7d.yasm |54.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/b74ebee90bb7903d84da5b42f7.auxcpp |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/_cf6645a401208904e46568ab36.yasm |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/_ab01ce74939d56b8d778b13758.yasm |54.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/cf6645a401208904e46568ab36.auxcpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_sample_k.cpp |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/_b74ebee90bb7903d84da5b42f7.yasm |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/ab01ce74939d56b8d778b13758.auxcpp |54.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/synchronization/blocking_counter.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.grpc.pb.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/base/internal/scoped_set_env.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/synchronization/internal/graphcycles.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/random/internal/pool_urbg.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_string.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/core/expr_nodes_gen/gen/gen |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |54.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/random.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/cord_rep_btree.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/print.cpp |54.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/json/json_value_output.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/cordz_sample_token.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/json/fast_sax/unescape.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/config.cc |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/registry.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/device.cc |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/Schema.fbs.cpp |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/last_getopt_parser.cpp |54.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |54.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |54.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/fq_private.pb.{h, cc} |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |54.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/config.pb.{h, cc} |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |54.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/converter.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/c4e4ec332438e4082188ee7013.auxcpp |54.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |54.7%| [AS] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/_c4e4ec332438e4082188ee7013.yasm |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/schema.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/reader.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |54.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/sitecustomize.pyx.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/protobuf/json/string_transform.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/__res.pyx.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/attributes_md5_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/message_delay_stats_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/sha256_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/background.cc |54.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/protoc/libcontrib-libs-protoc.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |54.4%| [AS] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/_5016a11705f811023511787125.yasm |54.4%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_memory.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/metering_ut.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/infly_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_numeric.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/charconv.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/exec_plan.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_nested.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/b74ebee90bb7903d84da5b42f7.auxcpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/_1f45adb640e82c46627e2b2d3a.yasm |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |54.6%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/c4e4ec332438e4082188ee7013.auxcpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/1f45adb640e82c46627e2b2d3a.auxcpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/b5385522105a31f0a0c490bbb8.auxcpp |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/_b74ebee90bb7903d84da5b42f7.yasm |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.pb.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/level_comparison.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/_b5385522105a31f0a0c490bbb8.yasm |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |54.7%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/_c4e4ec332438e4082188ee7013.yasm |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_mon.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_logins.cpp |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/_e93b94c36ea8b5ce684eea2c49.yasm |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/join_dcd97fcac6a334ca569de2c107.auxcpp |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/join_487d6f374d6d03f9641be7dbbc.yasm |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/e93b94c36ea8b5ce684eea2c49.auxcpp |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/_b74ebee90bb7903d84da5b42f7.yasm |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/b74ebee90bb7903d84da5b42f7.auxcpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/_2ef1517045ab9cce02fdf81d44.yasm |54.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/_b74ebee90bb7903d84da5b42f7.yasm |54.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/b74ebee90bb7903d84da5b42f7.auxcpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/2ef1517045ab9cce02fdf81d44.auxcpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/4a4d1a0f629769ad18cbbbf6ac.auxcpp |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/_4a4d1a0f629769ad18cbbbf6ac.yasm |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/libpy3recipe.global.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/object_writer.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/delimiting.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_binary.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/310c4457f755fd2f8a60e32d31.auxcpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/internal/program_name.cc |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/internal/usage.cc |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/certs/libcerts.global.a |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/gen/e8af3a52f9acc84b7164fcb145.auxcpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/internal/flag.cc |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/usage.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/commandlineflag.cc |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/functional/backup/s3_path_style/s3_path_style_backup_ut.cpp |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/reflection.cc |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.a |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpython-symbols-python.global.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/python/testing/import_test/libpy3python-testing-import_test.global.a |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |54.8%| [AS] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/_310c4457f755fd2f8a60e32d31.yasm |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/core/expr_nodes_gen/gen/libpy3core-expr_nodes_gen-gen.a |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/core/expr_nodes_gen/gen/libpy3core-expr_nodes_gen-gen.global.a |54.8%| [AS] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/gen/_e8af3a52f9acc84b7164fcb145.yasm |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/certs/libcerts.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/__res.reg3.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/stacktrace.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/internal/demangle.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/internal/examine_stack.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/failure_signal_handler.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/internal/decode_rust_punycode.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/internal/elf_mem_image.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/internal/demangle_rust.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/leak_check.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/symbolize.cc |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/c0d55d34dff7576aee52216011.auxcpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/34d79c26d54ce2949b2c9fc34e.auxcpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/internal/utf8_for_code_point.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/internal/address_is_readable.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/str_cat.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/internal/cctz/src/civil_time_detail.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/damerau_levenshtein_distance.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_crc.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/internal/cctz/src/time_zone_impl.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/internal/cctz/src/time_zone_if.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/internal/cctz/src/time_zone_fixed.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/internal/cctz/src/time_zone_lookup.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/charconv_parse.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/internal/cctz/src/time_zone_info.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/internal/cctz/src/time_zone_posix.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/internal/cctz/src/zone_info_source.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/civil_time.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/code_generator.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/cord_analysis.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_repeated_message_field.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/primitive_field_lite.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/file.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/message_serialization.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/context.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/message_field_lite.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/message_field.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/names.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/message_builder_lite.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/service.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/field_generators/enum_field.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/generator.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_enum_field.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/field.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/field_generators/string_field.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/field_generators/primitive_field.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/helpers.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/extension.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/parse_function_generator.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/field_generators/message_field.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/file.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/message.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/command_line_interface.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_doc_comment.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/tracker.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/clock.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_helpers.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_fq.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_field_base.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_generator.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_enum.cc |54.9%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/34d79c26d54ce2949b2c9fc34e.auxcpp |54.9%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/c0d55d34dff7576aee52216011.auxcpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_map_field.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_primitive_field.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_repeated_enum_field.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_reflection_class.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_wrapper_field.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_source_generator_base.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/importer.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/doc_comment.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_nested.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/enum.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/enum_field_lite.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/enum_field.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/field.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/extension_lite.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/enum_lite.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/file.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/extension.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/generator.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/kotlin_generator.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/generator_factory.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/join_e415064e97e05e55193961012a.auxcpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/helpers.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/13cdeac3c2ea1d9a1fea0666f0.auxcpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/map_field.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/map_field_lite.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/message_builder.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/message_lite.cc |55.0%| [AS] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/join_5e875f1482894a405c6fa9505b.yasm |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/message.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/service.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/markupsafe._speedups.reg3.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/name_resolver.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/shared_code_generator.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/enum.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/string_field_lite.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/names.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/primitive_field.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/certs/507932c176c51682bda6445e3b.auxcpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_sqlite/module.c |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_sqlite/microprotocols.c |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_sqlite/connection.c |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/join_b360a9c7c825b8437c8e572c7a.auxcpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/java/string_field.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/join_167b57b3ea58aa1317ce4a63f0.auxcpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/enum_field.cc |55.0%| [AS] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/join_4fb5ad8a13092e413b8ecdbff6.yasm |55.0%| [AS] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/_32a78a2ddd15197ee4ea3cdb45.yasm |55.0%| [AS] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/_83647e096613975133da2c02d2.yasm |55.0%| [AS] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/_13cdeac3c2ea1d9a1fea0666f0.yasm |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_sqlite/cursor.c |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/32a78a2ddd15197ee4ea3cdb45.auxcpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/83647e096613975133da2c02d2.auxcpp |55.0%| [AS] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/_c8958d52437af6328fd19cbf32.yasm |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/library/python/symbols/module/module.cpp |55.0%| [AS] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/_ac625d468dde52bb27fd799da7.yasm |55.0%| [AS] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/_2c0463a40f92eb557c9f6cd8e2.yasm |55.0%| [AS] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/_25af6766d8b3b32f33d204304e.yasm |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/python/MarkupSafe/py3/markupsafe/_speedups.c |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_sqlite/prepare_protocol.c |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_sqlite/statement.c |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_sqlite/util.c |55.1%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/e1be1ffa5b08409c35f7bf3d05.auxcpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/ac625d468dde52bb27fd799da7.auxcpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/a1b6c5ebbdfb24bd182ffbf1cd.auxcpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/gen/13606ddbba4b01d0d50e6386fd.auxcpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/library.python.symbols.module.syms.reg3.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/gen/1a2af4b3aa213cda4f3eb8ecb6.auxcpp |55.0%| [AS] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/gen/_13606ddbba4b01d0d50e6386fd.yasm |55.1%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/testing/import_test/08f7a60a64ed48068c802fb284.auxcpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/446c02d65d5e3146666227eb89.auxcpp |55.1%| [AS] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/_a1b6c5ebbdfb24bd182ffbf1cd.yasm |55.1%| [AS] {BAZEL_DOWNLOAD} $(B)/certs/_507932c176c51682bda6445e3b.yasm |55.1%| [AS] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/gen/_1a2af4b3aa213cda4f3eb8ecb6.yasm |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/library/python/symbols/registry/syms.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/2c0463a40f92eb557c9f6cd8e2.auxcpp |55.1%| [AS] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/_e1be1ffa5b08409c35f7bf3d05.yasm |55.1%| [AS] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/_446c02d65d5e3146666227eb89.yasm |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_sqlite/row.c |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/python3/Modules/_sqlite/blob.c |55.1%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/25af6766d8b3b32f33d204304e.auxcpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/c8958d52437af6328fd19cbf32.auxcpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/extension.cc |55.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/test_clickhouse.py.yapyc3 |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/field.cc |55.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/test_greenplum.py.yapyc3 |55.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/test_postgresql.py.yapyc3 |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/generator.cc |55.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/test_join.py.yapyc3 |55.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/test_ydb.py.yapyc3 |55.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/conftest.py.yapyc3 |55.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/test_mysql.py.yapyc3 |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |55.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fq_runner/kikimr_runner.py.yapyc3 |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/import_writer.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/line_consumer.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/helpers.cc |55.1%| [AS] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/join_b9721e5e52a855af8ed2780001.yasm |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/message_field.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/map_field.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/oneof.cc |55.1%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/names.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/primitive_field.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/message.cc |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/php/names.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/objectivec/text_format_decode_data.cc |55.1%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/plugin.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/php/php_generator.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/plugin.pb.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/parser.cc |55.0%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/python/generator.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/python/helpers.cc |55.0%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/python/pyi_generator.cc |55.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/retention.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/ruby/ruby_generator.cc |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/duration.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/subprocess.cc |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |55.1%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/debugging/internal/address_is_readable.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/format.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_consume.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/debugging/internal/vdso_support.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/debugging/leak_check.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/debugging/failure_signal_handler.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/field_generators/map_field.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/debugging/internal/demangle_rust.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/debugging/internal/examine_stack.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/enum.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/debugging/internal/utf8_for_code_point.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/debugging/internal/decode_rust_punycode.cc |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/zip_writer.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/debugging/internal/demangle.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/charconv_bigint.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_navigator.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/debugging/internal/elf_mem_image.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/cpp/padding_optimizer.cc |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_internal.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/memutil.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/escaping.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info.cc |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/ostringstream.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/arg.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/bind.cc |55.2%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/extension.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/float_conversion.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/output.cc |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/stringify_sink.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/crc/crc32c.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/utf8.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/parser.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/crc/internal/cpu_detect.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/crc/internal/crc.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/crc/internal/crc_cord_state.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/crc/internal/crc_memcpy_x86_arm_combined.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/status/statusor.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/crc/internal/crc_memcpy_fallback.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/crc/internal/crc_non_temporal_memcpy.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/ascii.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/crc/internal/crc_x86_arm_combined.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/status/internal/status_internal.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/cord.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/status/status_payload_printer.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/status/status.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/escaping.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/charconv.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/numbers.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/match.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/str_replace.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/string_view.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/str_split.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_reader.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/substitute.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/cord_buffer.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/time.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/debugging/symbolize.cc |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/tools/rorescompiler/rorescompiler |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/debugging/stacktrace.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/marshalling.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/internal/cctz/src/time_zone_libc.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/e3fb7cb86d5a545297f158946c.auxcpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/internal/commandlineflag.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/usage_config.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/internal/private_handle_accessor.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_random.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/time/internal/cctz/src/time_zone_format.cc |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_log.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/arch/x86/x86expr.c |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/cancel.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers.cpp |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/lc3bid.c |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/preprocs/nasm/nasm-eval.c |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_cms.cpp |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_helper.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_temporal.cc |55.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/chunker.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/strcasecmp.c |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_hash.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/valparam.c |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/objfmts/elf/elf-objfmt.c |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/strsep.c |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_binary.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/error_category.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/parameters.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/errwarn.c |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_b74ebee90bb7903d84da5b42f7.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/4a7a64454c9245b8cfbbd6c568.auxcpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/efe80a67dc5fbde40e7e446fba.auxcpp |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_efe80a67dc5fbde40e7e446fba.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/6d1763e0cdc6e301e2989d8343.auxcpp |55.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_6d1763e0cdc6e301e2989d8343.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/b74ebee90bb7903d84da5b42f7.auxcpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_dict.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/5f751080373d4214d525810354.auxcpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_5f751080373d4214d525810354.yasm |55.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_4a7a64454c9245b8cfbbd6c568.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/dc9abab7075b555a3ef54c0d31.auxcpp |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/_dc9abab7075b555a3ef54c0d31.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/microseconds_sliding_window_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/bloom_filter.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/type_codecs_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bitmap_builders.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/span.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/2df5d6ce0e7b4d20016c681571.auxcpp |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/_a38dc35da146e8497390eb9070.yasm |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/fetch_request_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/b74ebee90bb7903d84da5b42f7.auxcpp |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/_2df5d6ce0e7b4d20016c681571.yasm |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/a38dc35da146e8497390eb9070.auxcpp |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/_b74ebee90bb7903d84da5b42f7.yasm |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/bitvect.c |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_arithmetic.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/xstrdup.c |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/size_classes.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |55.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |55.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/chunked_builder.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/bc-data.c |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/bc-align.c |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/api_aggregate.cc |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/bc-org.c |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/frontends/yasm/yasm-options.c |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/reader.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/insn.c |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/hamt.c |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_decimal.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/options.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/object_parser.cc |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/result.cc |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor/coo_converter.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_internal.cc |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/_f876c7e5551ebce27aee411303.yasm |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/f876c7e5551ebce27aee411303.auxcpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/basic_decimal.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/table.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression.cc |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_string.cc |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |55.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |55.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |55.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_8e0314ef7ed855a3126c9e5eb6.yasm |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |55.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/tools/rorescompiler/main.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_d4428c6555fc34a79b567ae531.yasm |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/8e0314ef7ed855a3126c9e5eb6.auxcpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/f00d69da9467a4a52da9b22496.auxcpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/d4428c6555fc34a79b567ae531.auxcpp |55.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_b74ebee90bb7903d84da5b42f7.yasm |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/6f74072898e36b4312ab75a0db.auxcpp |55.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/_b74ebee90bb7903d84da5b42f7.yasm |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/3989ea13006d67e89dd1a8ad12.auxcpp |55.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/_5bb9b4714ab16ef374043b6486.yasm |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/b74ebee90bb7903d84da5b42f7.auxcpp |55.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |55.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/_8e7a34ec2df8fda1ade7839923.yasm |55.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/_e68548efa11d3a00711f021bed.yasm |55.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/_3989ea13006d67e89dd1a8ad12.yasm |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |55.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_6f74072898e36b4312ab75a0db.yasm |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/b74ebee90bb7903d84da5b42f7.auxcpp |55.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_f00d69da9467a4a52da9b22496.yasm |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |55.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/_b74ebee90bb7903d84da5b42f7.yasm |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/b74ebee90bb7903d84da5b42f7.auxcpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/e68548efa11d3a00711f021bed.auxcpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/5bb9b4714ab16ef374043b6486.auxcpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/8e7a34ec2df8fda1ade7839923.auxcpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.grpc.pb.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/adapters/orc/adapter_util.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/value_parsing.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.grpc.pb.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.pb.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/tools/enum_parser/enum_serialization_runtime/dispatch_methods.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/base/log_severity.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/internal/vlog_config.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/getopt/small/opt2.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/datum.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/writer.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/dbgfmts/codeview/cv-dbgfmt.c |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/interfaces.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |55.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |55.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/quota_internal.pb.{h, cc} |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |55.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |55.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |55.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/replace_path.c |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/phash.c |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/encryption.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/bind.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |55.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |55.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/service.pb.{h, cc} |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/function.cc |55.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |55.4%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |55.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_backup.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/mutex.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/memory.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_dml_operations.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/util_internal.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |55.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |55.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/sitecustomize.reg3.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/cppparser/parser.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_import.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/logging.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_tdigest.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_if_else.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/9cde4489f7fa94a76b9b02d638.auxcpp |55.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/ed475535b561d333796c95a705.auxcpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/cast.cc |55.6%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/utils/_{102e130291da49e85913bbc5a2.yasm ... 03b95eb580b174a014a90b2beb.rodata} |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/_b74ebee90bb7903d84da5b42f7.yasm |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |55.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/join_d158d6388395f7fac32a213c83.yasm |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/join_3ef875ff1dc5749e1fcb922b21.auxcpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/common_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/ee50fbcae5d7c3ae1b7c168722.auxcpp |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/_ee50fbcae5d7c3ae1b7c168722.yasm |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/_ed475535b561d333796c95a705.yasm |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/_9cde4489f7fa94a76b9b02d638.yasm |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/column_writer.cc |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/b74ebee90bb7903d84da5b42f7.auxcpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/754a90f95994461130feaa1756.auxcpp |55.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/_754a90f95994461130feaa1756.yasm |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/join_9049d840b669fdc5bfce521dda.yasm |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |55.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/da0981963f86194066f883caf8.auxcpp |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/_da0981963f86194066f883caf8.yasm |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/_b74ebee90bb7903d84da5b42f7.yasm |55.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/14453aaeaf36a596bef15bc685.auxcpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/_b74ebee90bb7903d84da5b42f7.yasm |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/_48a0e969cc306fdb22d55c035b.yasm |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |55.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/_14453aaeaf36a596bef15bc685.yasm |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/_b74ebee90bb7903d84da5b42f7.yasm |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/b74ebee90bb7903d84da5b42f7.auxcpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/b74ebee90bb7903d84da5b42f7.auxcpp |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/_f2c85040cc1290644ebb21b197.yasm |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.a |55.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/_8dffc5726b9be6abfc5e0f9557.yasm |55.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/f2c85040cc1290644ebb21b197.auxcpp |55.7%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/utils/settings.py.yapyc3 |55.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/8dffc5726b9be6abfc5e0f9557.auxcpp |55.7%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/utils/endpoint_determiner.py.yapyc3 |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |55.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/48a0e969cc306fdb22d55c035b.auxcpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.global.a |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.a |55.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/_55a8ee17216c8627b2de1b874d.yasm |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/5c7efee738caa61f00e33b41e9.auxcpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/55a8ee17216c8627b2de1b874d.auxcpp |55.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/_5c7efee738caa61f00e33b41e9.yasm |55.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/b74ebee90bb7903d84da5b42f7.auxcpp |55.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/_b74ebee90bb7903d84da5b42f7.yasm |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/b74ebee90bb7903d84da5b42f7.auxcpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/61e39f83bb1c7466cec418c177.auxcpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/join_d00e29c6b73da4719cad82b752.auxcpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |55.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/_61e39f83bb1c7466cec418c177.yasm |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/buffer.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/internal/conditions.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/log_entry.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/internal/nullguard.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/arch/lc3b/lc3bbc.c |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/internal/globals.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/internal/log_format.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/internal/log_message.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/internal/proto.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/die_if_null.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/flags.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/internal/vlog_config.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/log_sink.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/globals.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/c/bridge.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/raw_logging.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/low_level_alloc.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/scoped_set_env.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/strerror.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/spinlock.cc |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/sysinfo.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/thread_identity.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/util/field_mask_util.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/map.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/repeated_field.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/arenaz_sampler.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/io/zero_copy_stream_impl.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/generated_message_tctable_lite.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/arena.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/any.pb.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/io/printer.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/extension_set.cc |55.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/arena_align.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/arenastring.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/descriptor.pb.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/api.pb.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/duration.pb.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/descriptor_database.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/extension_set_heavy.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/field_mask.pb.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/dynamic_message.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/descriptor.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/generated_message_bases.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/generated_enum_util.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/generated_message_tctable_gen.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/implicit_weak_message.cc |55.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/inlined_string_field.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/generated_message_reflection.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/io/coded_stream.cc |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/io/io_win32.cc |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/run_ydb.cpp |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/supported_codecs.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/supported_codecs_fixture.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/ydb-dump.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/io/zero_copy_stream.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/json_value.h_serialized.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/io/zero_copy_sink.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/workload-transfer-topic-to-table.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/workload-topic.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/io/zero_copy_stream_impl_lite.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/io/tokenizer.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/json/internal/message_path.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/util/field_comparator.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/json/internal/lexer.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/json/internal/parser.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/json/internal/writer.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/json/internal/zero_copy_buffered_stream.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/json/internal/unparser.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/json/internal/untyped_message.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/json_util.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/map_field.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/json/json.cc |55.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/json/old_json.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/message.cc |55.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/messagext.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/reflection_ops.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/parse_context.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/service.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/source_context.pb.cc |55.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/stubs/common.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/stubs/status.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/struct.pb.cc |55.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/stubs/stringpiece.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/stubs/substitute.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/stubs/structurally_valid.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/type.pb.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/timestamp.pb.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/unknown_field_set.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/util/message_differencer.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/util/time_util.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/text_format.cc |55.8%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |55.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/util/type_resolver_util.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/third_party/utf8_range/utf8_validity.cc |55.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |55.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/wire_format.cc |55.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/wrappers.pb.cc |55.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/wire_format_lite.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/io/strtod.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/stubs/strutil.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/any.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/empty.pb.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/arena_config.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/any_lite.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/message_lite.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/throw_delegate.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/base/log_severity.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/unscaledcycleclock.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/spinlock_wait.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/poison.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/initialize.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/cycleclock.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/flags/internal/commandlineflag.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/flags/internal/flag.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/flags/internal/private_handle_accessor.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/util/delimited_message_util.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/flags/internal/program_name.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/flags/marshalling.cc |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/flags/commandlineflag.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/flags/parse.cc |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/flags/reflection.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/flags/usage.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/flags/usage_config.cc |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/internal/log_sink_set.cc |55.9%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/internal/fnmatch.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/log/internal/check_op.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/central_freelist.cc |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |55.9%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/4ddfb97a4ab74c0f9e6dca9ae4.auxcpp |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/main.cc |55.9%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_message_field.cc |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_union.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protoc/src/google/protobuf/compiler/csharp/csharp_message.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/initialize.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/memory.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/crc/internal/crc_memcpy_fallback.cc |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/log/globals.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bit_util.cc |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/diff.cc |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/util/string/cast.cpp |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_mode.cc |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/io/gzip_stream.cc |55.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/synchronization/internal/per_thread_sem.cc |55.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/mockfs.cc |55.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/time.cc |55.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/python/testing/import_test/libpy3python-testing-import_test.a |55.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/parse.cc |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/assocdat.c |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |56.0%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/slow.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/table_settings.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_map.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |56.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |56.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.global.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.a |56.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_dictionary.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_hash.cc |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/internal/vdso_support.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/blockcodecs/core/stream.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/validate.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/grpc/src/compiler/node_generator.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/sparse_tensor.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/decimal.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/record_batch.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/builtins/ffssi2.c |56.0%| [CC] {BAZEL_DOWNLOAD} $(B)/tools/py3cc/9b3f68facc6b0bda566979a5f9.auxcpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/testing/import_test/a41561dc3f56a825fcd4bcc1c3.auxcpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/cxxsupp/libcxx/src/filesystem/filesystem_clock.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/protobuf/src/google/protobuf/generated_message_tctable_full.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_set_lookup.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/library/cpp/binsaver/mem_io.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/system-alloc.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/flags/internal/usage.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor/csx_converter.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/section.c |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/3d2ad5aac03dd48ea1a0a83eb8.auxcpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/b74ebee90bb7903d84da5b42f7.auxcpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/5689282d9693ccac57318ca874.auxcpp |56.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/_5689282d9693ccac57318ca874.yasm |56.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/_3d2ad5aac03dd48ea1a0a83eb8.yasm |56.0%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/_b74ebee90bb7903d84da5b42f7.yasm |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/kikimr_tpch/kqp_tpch_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/objfmts/coff/coff-objfmt.c |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.grpc.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/bytecode.c |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/printer.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/codegen_internal.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/symrec.c |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/string.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/tcmalloc/tcmalloc/transfer_cache.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/strings/internal/cord_rep_consume.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/5016a11705f811023511787125.auxcpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/libyasm/xmalloc.c |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/operation_id/protos/operation_id.pb.{h, cc} |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/tools/yasm/modules/arch/x86/x86id.c |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/util/all_thread.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/583eccaec03903a04e0516e9bb.auxcpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/api_scalar.cc |56.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/_109168012f4665542dd2bafba9.yasm |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/109168012f4665542dd2bafba9.auxcpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/b74ebee90bb7903d84da5b42f7.auxcpp |56.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/_b74ebee90bb7903d84da5b42f7.yasm |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |56.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/_583eccaec03903a04e0516e9bb.yasm |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encoding.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/mutex.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal/per_thread_sem.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal/futex_waiter.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal/waiter_base.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/internal/cctz/src/time_zone_format.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/internal/cctz/src/time_zone_fixed.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/internal/cctz/src/time_zone_impl.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/internal/cctz/src/time_zone_if.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal/pthread_waiter.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal/kernel_timeout.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/time.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/internal/cctz/src/time_zone_info.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/barrier.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/clock.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/notification.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/status/status_payload_printer.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal/sem_waiter.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/internal/cctz/src/time_zone_lookup.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/status/status.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal/stdcpp_waiter.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/civil_time.cc |56.4%| [AS] {BAZEL_DOWNLOAD} $(B)/library/python/testing/import_test/_a41561dc3f56a825fcd4bcc1c3.yasm |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/duration.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/status/internal/status_internal.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/internal/cctz/src/time_zone_posix.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/internal/cctz/src/civil_time_detail.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal/create_thread_identity.cc |56.4%| [AS] {BAZEL_DOWNLOAD} $(B)/library/python/testing/import_test/_08f7a60a64ed48068c802fb284.yasm |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/internal/cctz/src/zone_info_source.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/format.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/blocking_counter.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal/graphcycles.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal/win32_waiter.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/time/internal/cctz/src/time_zone_libc.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/restricted/abseil-cpp/absl/numeric/int128.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |55.7%| COMPACTING CACHE 196.9MiB |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |55.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/py2_flake8 |55.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/compatibility/flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |55.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/py2_flake8 |55.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/py2_flake8 |55.8%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 |55.8%| [TS] {RESULT} ydb/tests/fq/generic/utils/flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |55.8%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |55.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/py2_flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 |55.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/py2_flake8 |55.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/py2_flake8 |55.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/py2_flake8 |55.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |55.8%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |55.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 |55.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |55.8%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |55.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 |55.9%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 |55.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/py2_flake8 |55.9%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 |55.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/py2_flake8 |55.9%| [TS] {RESULT} ydb/tests/functional/config/flake8 |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |55.9%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |55.9%| [TS] {RESULT} ydb/tests/tools/mdb_mock/flake8 |55.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/py2_flake8 |55.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 |55.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/py2_flake8 |55.9%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 |55.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/py2_flake8 |55.9%| [TS] {RESULT} ydb/tests/functional/api/flake8 |55.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 |55.9%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 |55.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |55.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 |55.9%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/flake8 |55.9%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |55.9%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |55.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/py2_flake8 |55.9%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |55.9%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |55.9%| [TS] {RESULT} ydb/tests/functional/dynumber/flake8 |55.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 |55.9%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |55.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/py2_flake8 |55.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/py2_flake8 |56.0%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |56.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 |56.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/py2_flake8 |56.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 |56.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 |56.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 |56.0%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |56.0%| [TS] {RESULT} ydb/tests/fq/generic/utils/black |56.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 |56.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 |56.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 |56.0%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 |56.0%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |56.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 |56.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 |56.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/py2_flake8 |56.0%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/flake8 |56.0%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |56.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/py2_flake8 |56.0%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 |56.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 |56.0%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/flake8 |56.0%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |56.0%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 |56.0%| [TS] {RESULT} ydb/tests/tools/fq_runner/flake8 |56.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 |56.0%| [TS] {RESULT} ydb/tests/tools/mdb_mock/black |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |56.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 |56.1%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |56.1%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 |56.1%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |56.1%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |56.1%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 |56.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 |56.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/py2_flake8 |56.1%| [TS] {RESULT} ydb/tests/fq/common/flake8 |56.1%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 |56.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 |56.1%| [TS] {RESULT} ydb/library/benchmarks/runner/flake8 |56.1%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 |56.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 |56.1%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/flake8 |56.1%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 |56.1%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |56.1%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |56.1%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |56.1%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |56.1%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |56.1%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |56.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 |56.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 |56.3%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |56.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 |56.3%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |56.4%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 |56.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/py2_flake8 |56.4%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |56.4%| [TS] {RESULT} ydb/tests/functional/rename/flake8 |56.4%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 |56.4%| [TS] {RESULT} ydb/library/yql/tests/sql/solomon/py2_flake8 |64.7%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |64.8%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |66.4%| [AR] {default-linux-x86_64, release, asan, pic} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |66.4%| [AR] {RESULT} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |66.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |66.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |66.8%| [AR] {RESULT} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |66.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |68.9%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |69.0%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |69.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |70.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/dynumber/ydb-tests-functional-dynumber |70.3%| [LD] {RESULT} $(B)/ydb/tests/functional/dynumber/ydb-tests-functional-dynumber |70.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/dynumber/ydb-tests-functional-dynumber |70.7%| [AR] {default-linux-x86_64, release, asan} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |70.7%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |70.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |71.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/libydb-core-protos.a |71.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/protos/libydb-core-protos.a |71.1%| [AR] {RESULT} $(B)/ydb/core/protos/libydb-core-protos.a |71.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |71.1%| [AR] {RESULT} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |71.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |71.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |71.1%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |71.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |71.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |71.1%| [LD] {RESULT} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |71.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |71.1%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/client/libyt-yt-client.a |71.1%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |71.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |71.1%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/core/libyt-yt-core.a |71.1%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |71.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |71.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |71.1%| [LD] {RESULT} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |71.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |71.1%| [AR] {default-linux-x86_64, release, asan} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |71.1%| [AR] {RESULT} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |71.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |71.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |71.1%| [LD] {RESULT} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |71.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |71.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |71.1%| [LD] {RESULT} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |71.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/client/bin/sqs |71.2%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |71.2%| [LD] {RESULT} $(B)/ydb/core/ymq/client/bin/sqs |71.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/client/bin/sqs |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |71.2%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |71.2%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |71.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |71.2%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |71.2%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |71.2%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |71.2%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |71.2%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |71.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |71.3%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |71.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |71.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |71.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |71.3%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |71.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |71.3%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |71.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |71.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |71.4%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |71.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |71.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |71.7%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |71.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |71.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |71.8%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |71.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |71.8%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |71.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |71.8%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |71.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |72.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |72.0%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |73.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |74.3%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |75.0%| [AR] {tool} $(B)/yql/essentials/core/expr_nodes_gen/gen/libpy3core-expr_nodes_gen-gen.a |75.0%| [AR] {tool} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |75.2%| [AR] {tool} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |75.3%| [AR] {tool} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |75.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |75.3%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |75.4%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |75.4%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |75.5%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |75.5%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |75.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |75.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |75.6%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |75.7%| [AR] {tool} $(B)/library/python/testing/import_test/libpy3python-testing-import_test.a |75.8%| [AR] {tool} $(B)/certs/libcerts.a |75.8%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |75.9%| [AR] {tool} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |75.9%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |75.9%| [AR] {tool} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |76.2%| [AR] {tool} $(B)/library/cpp/json/writer/libcpp-json-writer.a |76.3%| [AR] {tool} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |76.3%| [AR] {tool} $(B)/library/cpp/cppparser/liblibrary-cpp-cppparser.a |76.4%| [AR] {tool} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |76.4%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |76.5%| [AR] {tool} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |76.5%| [AR] {tool} $(B)/certs/libcerts.global.a |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |76.5%| [AR] {tool} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |76.5%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |76.5%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |76.6%| [AR] {default-linux-x86_64, release, asan, pic} $(B)/yt/yt/core/libyt-yt-core.a |76.6%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |76.6%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |76.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |76.7%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |76.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |76.8%| [AR] {tool} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |77.3%| [AR] {tool} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |77.4%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |77.5%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |77.5%| [AR] {tool} $(B)/ydb/core/config/protos/libcore-config-protos.a |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |77.6%| [AR] {tool} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |77.9%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |77.9%| [AR] {tool} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |77.9%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |78.9%| [AR] {tool} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |78.9%| [AR] {tool} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |78.9%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |79.0%| [AR] {tool} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |79.2%| [AR] {tool} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |79.2%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |79.2%| [AR] {tool} $(B)/library/python/testing/import_test/libpy3python-testing-import_test.global.a |79.3%| [AR] {tool} $(B)/contrib/libs/grpc/src/compiler/grpc_plugin_support/libsrc-compiler-grpc_plugin_support.a |79.4%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |79.7%| [AR] {tool} $(B)/yql/essentials/core/expr_nodes_gen/gen/libpy3core-expr_nodes_gen-gen.global.a |79.7%| [AR] {tool} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |79.8%| [AR] {tool} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |79.8%| [AR] {tool} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |79.8%| [LD] {RESULT} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |79.9%| [AR] {tool} $(B)/util/charset/libutil-charset.a |80.0%| [AR] {tool} $(B)/library/cpp/json/liblibrary-cpp-json.a |80.0%| [AR] {tool} $(B)/contrib/libs/openssl/crypto/liblibs-openssl-crypto.a |80.0%| [AR] {tool} $(B)/ydb/core/config/utils/libcore-config-utils.a |80.1%| [AR] {tool} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |80.4%| [AR] {tool} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |80.5%| [AR] {tool} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |80.6%| [AR] {tool} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |80.9%| [AR] {tool} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.a |80.9%| [AR] {tool} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |80.9%| [AR] {tool} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |80.9%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |80.9%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |81.0%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |81.0%| [AR] {tool} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |81.0%| [AR] {tool} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.a |81.1%| [AR] {tool} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |81.1%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |81.2%| [AR] {tool} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |81.3%| [AR] {tool} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |81.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |81.6%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |81.7%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |81.7%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |81.7%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |81.8%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |81.9%| [AR] {tool} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.a |81.9%| [AR] {tool} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |81.9%| [AR] {tool} $(B)/tools/enum_parser/parse_enum/libtools-enum_parser-parse_enum.a |81.9%| [AR] {tool} $(B)/ydb/public/lib/protobuf/libpublic-lib-protobuf.a |82.0%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |82.1%| [AR] {tool} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.a |82.5%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |82.5%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |82.5%| [AR] {tool} $(B)/util/libyutil.a |82.5%| [AR] {tool} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |82.5%| [AR] {tool} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |82.6%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |82.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |82.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |82.6%| [LD] {tool} $(B)/tools/rorescompiler/rorescompiler |82.6%| [LD] {tool} $(B)/tools/rescompressor/rescompressor |82.6%| [LD] {tool} $(B)/tools/enum_parser/enum_parser/enum_parser |82.5%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/_45903b27f276ed27d9f37309a4.yasm |82.5%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/c0d55d34dff7576aee52216011.auxcpp |82.6%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/_c4e4ec332438e4082188ee7013.yasm |82.6%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/c4e4ec332438e4082188ee7013.auxcpp |82.6%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/45903b27f276ed27d9f37309a4.auxcpp |82.6%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/4ddfb97a4ab74c0f9e6dca9ae4.auxcpp |82.6%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fq_runner/_095356409510fa86c3c78c8de9.yasm |82.6%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/34d79c26d54ce2949b2c9fc34e.auxcpp |82.6%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/_c0d55d34dff7576aee52216011.yasm |82.6%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/utils/_0b80f5655025bb61914d1a380e.yasm |82.6%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.h_serialized.cpp |82.6%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/fq_runner/_095356409510fa86c3c78c8de9.yasm |82.6%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/_45903b27f276ed27d9f37309a4.yasm |82.6%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/_c4e4ec332438e4082188ee7013.yasm |82.6%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/_c0d55d34dff7576aee52216011.yasm |82.6%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/utils/_0b80f5655025bb61914d1a380e.yasm |82.6%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/c0d55d34dff7576aee52216011.auxcpp |82.6%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/45903b27f276ed27d9f37309a4.auxcpp |82.6%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/34d79c26d54ce2949b2c9fc34e.auxcpp |82.6%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/4ddfb97a4ab74c0f9e6dca9ae4.auxcpp |82.6%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/c4e4ec332438e4082188ee7013.auxcpp |82.6%| [LD] {tool} $(B)/contrib/tools/yasm/yasm |82.7%| [AS] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/_c0d55d34dff7576aee52216011.yasm |82.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.h_serialized.cpp |82.6%| [AS] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fq_runner/_095356409510fa86c3c78c8de9.yasm |82.6%| [AS] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/_c4e4ec332438e4082188ee7013.yasm |82.6%| [AS] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/utils/_0b80f5655025bb61914d1a380e.yasm |82.7%| [AS] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/_45903b27f276ed27d9f37309a4.yasm |82.7%| [AS] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/_c0d55d34dff7576aee52216011.yasm |82.7%| [AR] {tool} $(B)/contrib/libs/protoc/libcontrib-libs-protoc.a |82.7%| [AS] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/fq_runner/_095356409510fa86c3c78c8de9.yasm |82.7%| [AS] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/_c4e4ec332438e4082188ee7013.yasm |82.7%| [AS] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/utils/_0b80f5655025bb61914d1a380e.yasm |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |82.7%| [AS] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/_45903b27f276ed27d9f37309a4.yasm |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |82.7%| [AR] {tool} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |82.7%| [LD] {tool} $(B)/ydb/public/lib/validation/validation |82.7%| [LD] {tool} $(B)/contrib/tools/protoc/plugins/cpp_styleguide/cpp_styleguide |82.7%| [LD] {tool} $(B)/contrib/tools/protoc/protoc |82.7%| [LD] {tool} $(B)/contrib/tools/protoc/plugins/grpc_cpp/grpc_cpp |82.7%| [LD] {tool} $(B)/ydb/core/config/tools/protobuf_plugin/config_proto_plugin |82.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/c0d55d34dff7576aee52216011.auxcpp |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |82.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/c4e4ec332438e4082188ee7013.auxcpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/45903b27f276ed27d9f37309a4.auxcpp |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |82.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/34d79c26d54ce2949b2c9fc34e.auxcpp |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |82.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/4ddfb97a4ab74c0f9e6dca9ae4.auxcpp |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |82.7%| [LD] {tool} $(B)/tools/py3cc/py3cc |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |82.7%| [LD] {tool} $(B)/yql/essentials/core/expr_nodes_gen/gen/gen |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |82.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/libpy3recipe.global.a |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |82.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/45903b27f276ed27d9f37309a4.auxcpp |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |82.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/c4e4ec332438e4082188ee7013.auxcpp |82.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/c0d55d34dff7576aee52216011.auxcpp |82.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/34d79c26d54ce2949b2c9fc34e.auxcpp |82.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/4ddfb97a4ab74c0f9e6dca9ae4.auxcpp |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |82.7%| [PB] {BAZEL_UPLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |82.7%| [PB] {BAZEL_UPLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |82.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |82.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |82.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |82.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |82.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |82.8%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |82.8%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |82.8%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |82.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |82.8%| [PR] {default-linux-x86_64, release, asan} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |82.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |82.7%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fq_runner/kikimr_utils.py.yapyc3 |82.7%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/utils/settings.py.yapyc3 |82.7%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/conftest.py.yapyc3 |82.7%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/test_ydb.py.yapyc3 |82.8%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fq_runner/kikimr_runner.py.yapyc3 |82.8%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/__main__.py.yapyc3 |82.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |82.8%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/test_clickhouse.py.yapyc3 |82.8%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fq_runner/fq_client.py.yapyc3 |82.8%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fq_runner/mvp_mock.py.yapyc3 |82.8%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/utils/endpoint_determiner.py.yapyc3 |82.8%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/test_join.py.yapyc3 |82.8%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/test_mysql.py.yapyc3 |82.8%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/test_greenplum.py.yapyc3 |82.8%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/test_postgresql.py.yapyc3 |82.8%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fq_runner/kikimr_metrics.py.yapyc3 |82.8%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fq_runner/custom_hooks.py.yapyc3 |82.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |82.8%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |82.8%| [PB] {BAZEL_UPLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |82.8%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |82.9%| [PB] {BAZEL_UPLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |82.9%| [PB] {BAZEL_UPLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |82.9%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/_{34d79c26d54ce2949b2c9fc34e.yasm ... bfff40605939b1d940cec3e5dc.rodata} |82.9%| [PB] {BAZEL_UPLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |82.9%| [PB] {BAZEL_UPLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |82.9%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/utils/_{102e130291da49e85913bbc5a2.yasm ... 03b95eb580b174a014a90b2beb.rodata} |82.9%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |82.9%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |82.9%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |82.9%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |82.9%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |82.9%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |82.9%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |83.0%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/utils/endpoint_determiner.py.yapyc3 |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |83.0%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/fq_runner/mvp_mock.py.yapyc3 |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |83.0%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/conftest.py.yapyc3 |83.0%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/utils/settings.py.yapyc3 |83.0%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/fq_runner/kikimr_runner.py.yapyc3 |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |83.0%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/test_clickhouse.py.yapyc3 |83.0%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/fq_runner/fq_client.py.yapyc3 |83.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |83.1%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |83.1%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |83.1%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/fq_runner/kikimr_utils.py.yapyc3 |83.1%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/test_mysql.py.yapyc3 |83.1%| [PR] {BAZEL_UPLOAD} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |83.1%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/_{4ddfb97a4ab74c0f9e6dca9ae4.yasm ... eb2d1cce6188adc65cfc8d8301.rodata} |83.1%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/test_ydb.py.yapyc3 |83.1%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |83.1%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/__main__.py.yapyc3 |83.1%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |83.1%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fq_runner/_{d1f75e9778c0cbb4144b453225.yasm ... 4fc6ad86f2e153876bf4a9e18e.rodata} |83.1%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/test_postgresql.py.yapyc3 |83.1%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/fq_runner/kikimr_metrics.py.yapyc3 |83.1%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/test_join.py.yapyc3 |83.1%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |83.1%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/test_greenplum.py.yapyc3 |83.1%| [AS] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/_34d79c26d54ce2949b2c9fc34e.yasm |83.1%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |83.1%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/fq_runner/custom_hooks.py.yapyc3 |83.1%| [AS] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/utils/_102e130291da49e85913bbc5a2.yasm |83.1%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |83.1%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/_{34d79c26d54ce2949b2c9fc34e.yasm ... bfff40605939b1d940cec3e5dc.rodata} |83.1%| [AS] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/_4ddfb97a4ab74c0f9e6dca9ae4.yasm |83.1%| [AS] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fq_runner/_d1f75e9778c0cbb4144b453225.yasm |83.2%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/utils/_{102e130291da49e85913bbc5a2.yasm ... 03b95eb580b174a014a90b2beb.rodata} |83.2%| [AS] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/_34d79c26d54ce2949b2c9fc34e.yasm |83.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/libpy3recipe.a |83.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.a |83.1%| [AR] {RESULT} $(B)/ydb/tests/tools/mdb_mock/libpy3recipe.a |83.2%| [AR] {RESULT} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.a |83.2%| [AS] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/_4ddfb97a4ab74c0f9e6dca9ae4.yasm |83.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.a |83.2%| [AS] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/utils/_102e130291da49e85913bbc5a2.yasm |83.2%| [AR] {RESULT} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.a |83.2%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/libpy3recipe.a |83.2%| [AS] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/fq_runner/_d1f75e9778c0cbb4144b453225.yasm |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.a |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.a |83.2%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/fq_runner/_{d1f75e9778c0cbb4144b453225.yasm ... 4fc6ad86f2e153876bf4a9e18e.rodata} |83.2%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/_{4ddfb97a4ab74c0f9e6dca9ae4.yasm ... eb2d1cce6188adc65cfc8d8301.rodata} |83.2%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/yqlrun/yqlrun ld.lld: warning: version script assignment of 'global' to symbol '__after_morecore_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'daylight' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__free_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_initialize_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__memalign_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_short_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__realloc_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timezone' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__libc_start_main' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensAfter' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensBefore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'abort' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bind' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'closedir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'connect' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_ctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'nanosleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'on_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_broadcast' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_timedlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_once' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_rdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedwrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_tryrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_trywrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_wrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'raise' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__res_iclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rmdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signalfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsuspend' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socket' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'unlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'usleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bcopy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dladdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'forkpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrusage' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbrtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memccpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'openpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_key_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'putenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'stpcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'swprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vswprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcschr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemset' failed: symbol not defined |83.2%| [LD] {RESULT} $(B)/ydb/library/yql/tools/yqlrun/yqlrun |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/yqlrun/yqlrun |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/mdb_mock/recipe |83.2%| [LD] {RESULT} $(B)/ydb/tests/tools/mdb_mock/recipe |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.cpp |83.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/recipe |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/control_plane_storage_requester_actor.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/control_plane_storage_requester_actor.cpp |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |83.2%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |83.2%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |83.2%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |83.3%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |83.3%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |83.3%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |83.3%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |83.3%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |83.3%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |83.3%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |83.3%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |83.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.h_serialized.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.h_serialized.cpp |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |83.3%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |83.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |83.4%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |83.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |83.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut >> TVPatchTests::FindingPartsWhenPartsAreDontExist >> TOosLogicTests::RenderHtml >> TOosLogicTests::RenderHtml [GOOD] >> TVPatchTests::FindingPartsWhenError >> TVPatchTests::PatchPartOk >> TVPatchTests::FindingPartsWhenError [GOOD] >> TVPatchTests::FullPatchTest [GOOD] >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> TVPatchTests::PatchPartPutError >> TVPatchTests::PatchPartGetError >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob >> TVPatchTests::FindingPartsWhenPartsAreDontExist [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists >> TVPatchTests::PatchPartFastXorDiffDisorder >> TVPatchTests::PatchPartOk [GOOD] >> TVPatchTests::FindingPartsWhenSeveralPartsExist >> TVPatchTests::PatchPartPutError [GOOD] >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] >> TVPatchTests::PatchPartGetError [GOOD] >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob [GOOD] >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] >> TVPatchTests::FindingPartsWhenSeveralPartsExist [GOOD] >> TVPatchTests::FindingPartsWithTimeout |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenError [GOOD] Test command err: Recv 65537 2025-04-07T13:45:48.614699Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-07T13:45:48.619565Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# ERROR ResultSize# 1 2025-04-07T13:45:48.620290Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-04-07T13:45:48.620892Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] >> TVPatchTests::FindingPartsWithTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] Test command err: Recv 65537 2025-04-07T13:45:48.879051Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-07T13:45:48.880178Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-07T13:45:48.880238Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-07T13:45:48.880416Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-04-07T13:45:48.880474Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-07T13:45:48.880627Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-04-07T13:45:48.880714Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-04-07T13:45:48.880774Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-04-07T13:45:48.880931Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR 2025-04-07T13:45:48.880981Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VPutResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-07T13:45:48.881042Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] Test command err: Recv 65537 2025-04-07T13:45:48.890689Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-07T13:45:48.892113Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-07T13:45:48.892176Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-07T13:45:48.892365Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-04-07T13:45:48.892422Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-07T13:45:48.892597Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VGetResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-07T13:45:48.892696Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] Test command err: Recv 65537 2025-04-07T13:45:48.950746Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-07T13:45:48.952842Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-04-07T13:45:48.952903Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-04-07T13:45:48.953105Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-07T13:45:48.953259Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 5 PatchedPartId# 5 XorReceiver# yes ParityPart# yes ForceEnd# no 2025-04-07T13:45:48.953320Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:100:0] PullingPart# 5 Send NKikimr::TEvBlobStorage::TEvVGet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartOk [GOOD] Test command err: Recv 65537 2025-04-07T13:45:48.842081Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-07T13:45:48.842760Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-07T13:45:48.842811Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-07T13:45:48.843010Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-04-07T13:45:48.843061Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-07T13:45:48.843225Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-04-07T13:45:48.843283Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-04-07T13:45:48.843337Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-04-07T13:45:48.843501Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK 2025-04-07T13:45:48.843551Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-07T13:45:48.843613Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] Test command err: Recv 65537 2025-04-07T13:45:48.897178Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-07T13:45:48.897863Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-04-07T13:45:48.897917Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-04-07T13:45:48.898103Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-04-07T13:45:48.898176Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-07T13:45:48.898321Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# The diff at index 0 went beyound the blob part; DiffStart# 100 DiffEnd# 96 BlobPartSize# 32 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] Test command err: Recv 65537 2025-04-07T13:45:48.709191Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-07T13:45:48.716645Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-07T13:45:48.716845Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-04-07T13:45:48.717692Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-04-07T13:45:48.977416Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-07T13:45:48.977606Z node 2 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-07T13:45:48.977688Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-07T13:45:48.977839Z node 2 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-04-07T13:45:48.977899Z node 2 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-07T13:45:48.977961Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] Test command err: Recv 65537 2025-04-07T13:45:48.987004Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-07T13:45:48.988003Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-04-07T13:45:48.988052Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-04-07T13:45:48.988207Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-04-07T13:45:48.988271Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-07T13:45:48.988421Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# [XorDiff from datapart] the start of the diff at index 0 righter than the start of the diff at index 1; PrevDiffStart# 2 DiffStart# 0 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWithTimeout [GOOD] Test command err: Recv 65537 2025-04-07T13:45:49.129563Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-07T13:45:49.130366Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-07T13:45:49.130481Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1 2] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-07T13:45:49.130668Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-04-07T13:45:49.130758Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-07T13:45:49.130834Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-04-07T13:45:49.205575Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NActors::TEvents::TEvWakeup 2025-04-07T13:45:49.216856Z node 2 :BS_VDISK_PATCH ERROR: {BSVSP11@skeleton_vpatch_actor.cpp:734} [0:1:0:0:0] TEvVPatch: the vpatch actor died due to a deadline, before receiving diff; 2025-04-07T13:45:49.216963Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-04-07T13:45:49.217034Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |83.4%| [TA] $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |83.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |83.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |83.4%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut >> SequenceShardTests::Basics |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |83.4%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersRevCompression >> TColumnShardTestSchema::HotTiers >> TColumnShardTestSchema::InternalTTL_Types >> NodeWardenDsProxyConfigRetrieval::Disconnect >> TColumnShardTestSchema::RebootOneTier >> TColumnShardTestSchema::HotTiersWithStat |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace >> TColumnShardTestSchema::RebootDrop |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportAfterFail |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl >> TColumnShardTestSchema::OneColdTier |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> SequenceShardTests::Basics [GOOD] >> SequenceShardTests::MarkedPipeRetries |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl >> TColumnShardTestSchema::ColdCompactionSmoke >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction >> TColumnShardTestSchema::InternalTTL >> TColumnShardTestSchema::ExternalTTL_Types >> TColumnShardTestSchema::RebootForgetAfterFail >> TColumnShardTestSchema::RebootForgetWithLostAnswer >> TColumnShardTestSchema::Drop >> TColumnShardTestSchema::ForgetAfterFail >> TColumnShardTestSchema::RebootHotTiersRevCompression >> TColumnShardTestSchema::RebootOneTierExternalTtl >> TColumnShardTestSchema::CreateTable >> TColumnShardTestSchema::RebootHotTiers >> TColumnShardTestSchema::RebootHotTiersTtlWithStat >> TColumnShardTestSchema::ExternalTTL >> TColumnShardTestSchema::ColdTiersWithStat >> TColumnShardTestSchema::RebootExportAfterFail >> TColumnShardTestSchema::RebootHotTiersWithStat >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] >> TColumnShardTestSchema::ForgetWithLostAnswer >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl >> TColumnShardTestSchema::HotTiersTtl >> TColumnShardTestSchema::RebootColdTiersWithStat >> TColumnShardTestSchema::RebootExternalTTL >> TColumnShardTestSchema::RebootInternalTTL >> TColumnShardTestSchema::OneTier ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] Test command err: Caught NodeWarden registration actorId# [1:11:2058] 2025-04-07T13:45:59.699360Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T13:45:59.853424Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/u6pr/000cab/r3tmp/tmpkmBnK6/static.dat" PDiskGuid: 8068472542122121107 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 8068472542122121107 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 8068472542122121107 } } } } AvailabilityDomains: 0 } 2025-04-07T13:45:59.853647Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:/home/runner/.ya/build/build_root/u6pr/000cab/r3tmp/tmpkmBnK6/static.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-07T13:45:59.854503Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-07T13:45:59.855367Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 8068472542122121107 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-07T13:45:59.860481Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 8068472542122121107 2025-04-07T13:45:59.864932Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T13:45:59.875180Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2073] ControllerId# 72057594037932033 2025-04-07T13:45:59.875421Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T13:45:59.875866Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T13:45:59.879010Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T13:45:59.879129Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T13:45:59.879430Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T13:46:00.208449Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T13:46:00.208508Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T13:46:00.448256Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T13:46:00.448482Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-07T13:46:00.467014Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-07T13:46:00.476019Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-07T13:46:00.477615Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/u6pr/000cab/r3tmp/tmpkmBnK6/static.dat" PDiskGuid: 8068472542122121107 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 8068472542122121107 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 8068472542122121107 } } } } AvailabilityDomains: 0 } 2025-04-07T13:46:00.479556Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-07T13:46:00.481117Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-07T13:46:00.529319Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-07T13:46:00.529362Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T13:46:00.529806Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\235\r\250=;\236%\t\266\326\317\305\236u:\304\262rf`" } 2025-04-07T13:46:00.530286Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-04-07T13:46:00.530514Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 2146435075 Sender# [1:44:2087] SessionId# [0:0:0] Cookie# 0 2025-04-07T13:46:00.531271Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.081649s 2025-04-07T13:46:00.531472Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-04-07T13:46:00.531494Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2025-04-07T13:46:00.571676Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2025-04-07T13:46:00.581421Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2025-04-07T13:46:00.652986Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:46:00.760564Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:46:00.770931Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:46:00.771785Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:46:00.783143Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:46:00.783179Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:46:00.784332Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:46:00.893859Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:46:00.894553Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:46:00.895013Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:46:00.896003Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:46:00.896979Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:46:00.897096Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:46:00.941583Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:46:00.942790Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:46:00.966088Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:46:00.966363Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:46:00.966597Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:46:00.967372Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:46:00.967854Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:46:00.968280Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:46:00.968308Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:46:00.968528Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:46:00.990852Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:46:00.990938Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2025-04-07T13:46:01.008196Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2025-04-07T13:46:01.008435Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2161} LoadFinished 2025-04-07T13:46:01.121816Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed Pipe connected clientId# [1:28:2073] 2025-04-07T13:46:01.124184Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:48} TEvTabletPipe::TEvClientConnected OK ClientId# [1:28:2073] ServerId# [1:122:2144] TabletId# 72057594037932033 PipeClientId# [1:28:2073] 2025-04-07T13:46:01.125107Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 8068472542122121107 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-04-07T13:46:01.126802Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:/home/runner/.ya/build/build_root/u6pr/000cab/r3tmp/tmpkmBnK6/static.dat" PDiskConfig { ExpectedSlotCount: 2 } } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 ErasureSpecies: "none" VDiskKind: "Default" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-07T13:46:01.128457Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# SectorMap:/home/runner/.ya/build/build_root/u6pr/000cab/r3tmp/tmpkmBnK6/static.dat 2025-04-07T13:46:01.157353Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: Initial Replicated: false DiskSpace: Green } } 2025-04-07T13:46:01.158361Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-04-07T13:46:01.158643Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } 2025-04-07T13:46:01.159732Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 8068472542122121107 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-04-07T13:46:01.160195Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 8068472542122121107 Status: READY OnlyPhantomsRemain: false } } 2025-04-07T13:46:01.172709Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2025-04-07T13:46:01.183203Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2025-04-07T13:46:01.189855Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2025-04-07T13:46:01.190515Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2025-04-07T13:46:01.190790Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-04-07T13:46:01.212560Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] === Waiting for pipe to establish === === Breaking pipe === === Sending put === Pipe disconnected clientId# [1:28:2073] 2025-04-07T13:46:01.225712Z node 1 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [1:28:2073] ServerId# [1:122:2144] TabletId# 72057594037932033 PipeClientId# [1:28:2073] 2025-04-07T13:46:01.226604Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:132:2154] ControllerId# 72057594037932033 2025-04-07T13:46:01.226974Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T13:46:01.234798Z node 1 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:127} HandleForwarded GroupId# 2147483648 EnableProxyMock# false NoGroup# false 2025-04-07T13:46:01.235004Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 2147483648 2025-04-07T13:46:01.235403Z node 1 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:260} RequestGroupConfig GroupId# 2147483648 2025-04-07T13:46:01.241179Z node 1 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2147483648 Pipe connected clientId# [1:132:2154] 2025-04-07T13:46:01.242521Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:48} TEvTabletPipe::TEvClientConnected OK ClientId# [1:132:2154] ServerId# [1:135:2156] TabletId# 72057594037932033 PipeClientId# [1:132:2154] 2025-04-07T13:46:01.242683Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 8068472542122121107 Status: READY OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-04-07T13:46:01.244177Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2025-04-07T13:46:01.249541Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:11:2058] Cookie# 0 Recipient# [1:135:2156] RecipientRewrite# [1:88:2120] Request# {NodeID: 1 GroupIDs: 2147483648 } StopGivingGroups# false 2025-04-07T13:46:01.249986Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 1 GroupIDs: 2147483648 } 2025-04-07T13:46:01.251160Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-04-07T13:46:01.251585Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: OK Replicated: true DiskSpace: Green } } 2025-04-07T13:46:01.280876Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/u6pr/000cab/r3tmp/tmpkmBnK6/static.dat" PDiskGuid: 8068472542122121107 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 8068472542122121107 } VDiskKind: Default StoragePoolName: "" } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 8068472542122121107 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "562b688d-794cfd18-e3fe95f5-b1fc1b8f" Comprehensive: true AvailDomain: 0 } 2025-04-07T13:46:01.282178Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# true Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/u6pr/000cab/r3tmp/tmpkmBnK6/static.dat" PDiskGuid: 8068472542122121107 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 8068472542122121107 } VDiskKind: Default StoragePoolName: "" } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 8068472542122121107 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-04-07T13:46:01.288855Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 8068472542122121107 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-07T13:46:01.298440Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 8068472542122121107 2025-04-07T13:46:01.299708Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-04-07T13:46:01.348538Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 8068472542122121107 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2025-04-07T13:46:01.348998Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 8068472542122121107 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-04-07T13:46:01.366696Z node 1 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2147483648 2025-04-07T13:46:01.395064Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 8068472542122121107 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2025-04-07T13:46:01.417710Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2025-04-07T13:46:01.516245Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } } 2025-04-07T13:46:01.561756Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2025-04-07T13:46:01.564215Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 8068472542122121107 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-04-07T13:46:01.566887Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2025-04-07T13:46:01.584950Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 8068472542122121107 Status: READY OnlyPhantomsRemain: false } } >> SequenceShardTests::MarkedPipeRetries [GOOD] >> SequenceShardTests::FreezeRestoreRedirect >> TColumnShardTestSchema::ExportWithLostAnswer >> TColumnShardTestSchema::DropWriteRace [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterTtl |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace [GOOD] Test command err: 2025-04-07T13:46:02.698326Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:04.486717Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:04.514019Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:04.514081Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:04.514286Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:04.561587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:04.561761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:04.561917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:04.562007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:04.562088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:04.562198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:04.562274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:04.562343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:04.562424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:04.562493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:04.562568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:04.562658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:04.647900Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:04.676378Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:04.677307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:04.677742Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:04.678701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:04.684900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:04.685637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:04.685881Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:04.686543Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:04.687013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:04.687250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:04.687460Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:04.693917Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:04.694596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:04.694632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:04.694657Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:04.695513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:04.695938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:04.696394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:04.696630Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:04.701087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:04.701134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:04.701157Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:04.701426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:04.701453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:04.701472Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:04.702713Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=411; 2025-04-07T13:46:04.705794Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=2272; 2025-04-07T13:46:04.706241Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=237; 2025-04-07T13:46:04.707379Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=465; 2025-04-07T13:46:04.708480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:04.716885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:04.717125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:04.718491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:04.718938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:04.719338Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:04.719966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:46:04.719997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:46:04.720026Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:46:04.725382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:46:04.725820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:46:04.726039Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:46:04.726854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event ... t; 2025-04-07T13:46:05.753183Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=101;this=88923004752960;method=TTxController::StartProposeOnExecute;tx_info=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;fline=tx_controller.cpp:311;event=start; 2025-04-07T13:46:05.760045Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=101;this=88923004752960;method=TTxController::StartProposeOnExecute;tx_info=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;fline=schema.h:35;event=sync_schema; 2025-04-07T13:46:05.761181Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=101;this=88923004752960;method=TTxController::StartProposeOnExecute;tx_info=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;fline=tx_controller.cpp:340;event=registered; 2025-04-07T13:46:05.799470Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;this=88923004752960;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_this=89197881129024;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2025-04-07T13:46:05.800264Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;this=88923004752960;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_this=89197881129024;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=tx_controller.cpp:371;event=start;tx_info=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;; 2025-04-07T13:46:05.802346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;this=88923004752960;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_this=89197881129024;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:97:2132]; 2025-04-07T13:46:05.802821Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;this=88923004752960;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_this=89197881129024;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=101; 2025-04-07T13:46:05.804845Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000001 at tablet 9437184, mediator 0 2025-04-07T13:46:05.805695Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-04-07T13:46:05.806888Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:46:05.807907Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2025-04-07T13:46:05.899296Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine_logs.cpp:557;event=OnTieringModified;new_count_tierings=0; 2025-04-07T13:46:05.900532Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=tables_manager.cpp:262;method=RegisterTable;path_id=1; 2025-04-07T13:46:05.901256Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine.h:339;event=RegisterTable;path_id=1; 2025-04-07T13:46:06.013086Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:46:06.070398Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-07T13:46:06.071375Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6120;columns=10; 2025-04-07T13:46:06.134927Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:46:06.135682Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:46:06.379628Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];fline=actor.cpp:22;event=flush_writing;size=6120;count=1; 2025-04-07T13:46:06.407243Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-04-07T13:46:06.409731Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-07T13:46:06.437997Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-07T13:46:06.438663Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:46:06.442368Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;method=TTxController::StartProposeOnExecute;tx_info=102:TX_KIND_COMMIT_WRITE;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;fline=tx_controller.cpp:311;event=start; 2025-04-07T13:46:06.451404Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;method=TTxController::StartProposeOnExecute;tx_info=102:TX_KIND_COMMIT_WRITE;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;fline=tx_controller.cpp:340;event=registered; 2025-04-07T13:46:06.486740Z node 1 :TX_COLUMNSHARD DEBUG: method=TTxController::FinishProposeOnComplete;tx_id=102;fline=tx_controller.cpp:371;event=start;tx_info=102:TX_KIND_COMMIT_WRITE;min=1744033564881;max=1744033594881;plan=0;src=[1:97:2132];cookie=0; 2025-04-07T13:46:06.489886Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=103;this=88923005045728;method=TTxController::StartProposeOnExecute;tx_info=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;fline=tx_controller.cpp:311;event=start; 2025-04-07T13:46:06.501500Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=103;this=88923005045728;method=TTxController::StartProposeOnExecute;tx_info=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;fline=tx_controller.cpp:340;event=registered; 2025-04-07T13:46:06.534821Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;this=88923005045728;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_this=89197881208128;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2025-04-07T13:46:06.536007Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;this=88923005045728;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_this=89197881208128;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=tx_controller.cpp:371;event=start;tx_info=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;; 2025-04-07T13:46:06.542281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;this=88923005045728;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_this=89197881208128;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:97:2132]; 2025-04-07T13:46:06.542543Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;this=88923005045728;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_this=89197881208128;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=103; 2025-04-07T13:46:06.544287Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000002 at tablet 9437184, mediator 0 2025-04-07T13:46:06.549392Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] execute at tablet 9437184 2025-04-07T13:46:06.550303Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:46:06.551445Z node 1 :TX_COLUMNSHARD DEBUG: DropTable for pathId: 1 at tablet 9437184 2025-04-07T13:46:06.590669Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] complete at tablet 9437184 2025-04-07T13:46:06.591344Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:46:06.593575Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000003 at tablet 9437184, mediator 0 2025-04-07T13:46:06.601986Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[7] execute at tablet 9437184 2025-04-07T13:46:06.602780Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:46:06.603743Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=abstract.h:78;progress_tx_id=102;lock_id=1;broken=0; 2025-04-07T13:46:06.614555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;tx_state=execute;commit_tx_id=102;commit_lock_id=1;fline=insert_table.cpp:50;event=abort_insertion;path_id=1;blob_range={ Blob: DS:0:[9437184:2:1:3:0:7080:0] Offset: 0 Size: 7080 }; 2025-04-07T13:46:06.647505Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[7] complete at tablet 9437184 2025-04-07T13:46:06.648012Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=abstract.h:88;progress_tx_id=102;lock_id=1;broken=0; 2025-04-07T13:46:06.649169Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; >> SequenceShardTests::FreezeRestoreRedirect [GOOD] >> SequenceShardTests::NegativeIncrement |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer >> YdbSdkSessions::TestMultipleSessions >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> YdbSdkSessions::TestActiveSessionCountAfterBadSession >> TestProgram::JsonValueBinary >> SequenceShardTests::NegativeIncrement [GOOD] |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |83.5%| [TM] {RESULT} ydb/core/blobstorage/nodewarden/ut_sequence/unittest |83.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/ut/unittest >> SequenceShardTests::NegativeIncrement [GOOD] Test command err: 2025-04-07T13:45:58.807781Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-07T13:45:58.807871Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-07T13:45:58.818084Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-07T13:45:58.821557Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-07T13:45:58.821625Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-07T13:45:58.870001Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-04-07T13:45:58.870826Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2025-04-07T13:45:58.913446Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-07T13:45:58.913920Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-04-07T13:45:58.914307Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SEQUENCE_ALREADY_EXISTS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-07T13:45:58.916400Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-07T13:45:58.918163Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } StartValue: 100001 Cache: 10 2025-04-07T13:45:58.918845Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 100001 Cache# 10 Increment# 1 Cycle# false State# Active 2025-04-07T13:45:58.937965Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-07T13:45:58.939424Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-07T13:45:58.939847Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 1 AllocationCount# 1 AllocationIncrement# 1 2025-04-07T13:45:58.966492Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:45:58.968244Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2025-04-07T13:45:58.972866Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 2 AllocationCount# 10 AllocationIncrement# 1 2025-04-07T13:45:58.990057Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:45:58.990308Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-07T13:45:58.990398Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100001 AllocationCount# 10 AllocationIncrement# 1 2025-04-07T13:45:59.006406Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:45:59.006708Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 50 2025-04-07T13:45:59.006800Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100011 AllocationCount# 50 AllocationIncrement# 1 2025-04-07T13:45:59.030693Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:45:59.032522Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 99] Cache# 0 2025-04-07T13:45:59.032742Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 99] 2025-04-07T13:45:59.033131Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:45:59.034857Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2025-04-07T13:45:59.034924Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 12 AllocationCount# 9223372036854775796 AllocationIncrement# 1 2025-04-07T13:45:59.056784Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:45:59.058338Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2025-04-07T13:45:59.058582Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-07T13:45:59.058962Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:45:59.060267Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-07T13:45:59.060689Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxDropSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-07T13:45:59.085001Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2025-04-07T13:45:59.086713Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-07T13:45:59.086958Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-07T13:45:59.087361Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2025-04-07T13:45:59.139210Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-07T13:45:59.139300Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-07T13:45:59.139769Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-07T13:45:59.140208Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-07T13:45:59.140383Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-07T13:45:59.176396Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-07T13:45:59.176984Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-07T13:45:59.177379Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:45:59.185024Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-07T13:45:59.185470Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100061 AllocationCount# 10 AllocationIncrement# 1 2025-04-07T13:45:59.235185Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:45:59.237715Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } NextValue: 200000 NextUsed: true 2025-04-07T13:45:59.237964Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-07T13:45:59.256346Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-04-07T13:45:59.258031Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-07T13:45:59.258416Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200001 AllocationCount# 10 AllocationIncrement# 1 2025-04-07T13:45:59.274453Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:45:59.274908Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } Cache: 5 2025-04-07T13:45:59.274983Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-07T13:45:59.287150Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-04-07T13:45:59.287496Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-07T13:45:59.287576Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200011 AllocationCount# 5 AllocationIncrement# 1 2025-04-07T13:45:59.324099Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:45:59.334385Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxGetSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-07T13:45:59.334840Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxGetSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-07T13:45:59.335081Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxGetSequence.Complete 2025-04-07T13:46:06.056751Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-07T13:46:06.057019Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-07T13:46:06.092136Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-07T13:46:06.103349Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-07T13:46:06.103605Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-07T13:46:06.118687Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxMarkSchemeShardPipe.Execute SchemeShardId# 123 Generation# 1 Round# 1 2025-04-07T13:46:06.119454Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-04-07T13:46:06.119906Z node 2 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2025-04-07T13:46:06.151779Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 7205 ... ENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-07T13:46:09.396924Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-07T13:46:09.396996Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-07T13:46:09.417411Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-07T13:46:09.418154Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2025-04-07T13:46:09.418569Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_FROZEN PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-07T13:46:09.419023Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:46:09.420445Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2025-04-07T13:46:09.420950Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-07T13:46:09.452122Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-04-07T13:46:09.452463Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Cache# 0 2025-04-07T13:46:09.452556Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] AllocationStart# 11 AllocationCount# 100 AllocationIncrement# 1 2025-04-07T13:46:09.470738Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:46:09.472649Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2025-04-07T13:46:09.477101Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SEQUENCE_ALREADY_ACTIVE PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-07T13:46:09.477342Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-04-07T13:46:09.478529Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-07T13:46:09.478967Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-07T13:46:09.497858Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-07T13:46:09.498652Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-07T13:46:09.498715Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-07T13:46:09.522295Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-07T13:46:09.522505Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-07T13:46:09.522561Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-07T13:46:09.536576Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-07T13:46:09.537619Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-07T13:46:09.537659Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 42] MovedTo# 12345 2025-04-07T13:46:09.537704Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:46:09.538113Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-07T13:46:09.538547Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-07T13:46:09.558857Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-07T13:46:09.564762Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2025-04-07T13:46:09.565511Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2025-04-07T13:46:09.590332Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-04-07T13:46:09.591056Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2025-04-07T13:46:09.591124Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2025-04-07T13:46:09.617399Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-07T13:46:09.618754Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-07T13:46:09.619190Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 43] MovedTo# 54321 2025-04-07T13:46:09.619622Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-07T13:46:09.625717Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-07T13:46:09.626204Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 111 AllocationCount# 100 AllocationIncrement# 1 2025-04-07T13:46:09.654451Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:46:11.615654Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-07T13:46:11.615830Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-07T13:46:11.629095Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-07T13:46:11.633230Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-07T13:46:11.633292Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-07T13:46:11.637730Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cache: 10 Increment: -1 2025-04-07T13:46:11.637876Z node 4 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# -9223372036854775808 MaxValue# -1 StartValue# -1 Cache# 10 Increment# -1 Cycle# false State# Active 2025-04-07T13:46:11.660975Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-07T13:46:11.661268Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-07T13:46:11.661371Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2025-04-07T13:46:11.674123Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:46:11.674422Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-07T13:46:11.674517Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2025-04-07T13:46:11.687316Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:46:11.687656Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2025-04-07T13:46:11.687745Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -21 AllocationCount# 9223372036854775788 AllocationIncrement# -1 2025-04-07T13:46:11.702124Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:46:11.702489Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2025-04-07T13:46:11.702544Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-07T13:46:11.702603Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:46:11.702887Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cycle: true 2025-04-07T13:46:11.702970Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-07T13:46:11.716104Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-04-07T13:46:11.716483Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-07T13:46:11.716592Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2025-04-07T13:46:11.729756Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-07T13:46:11.730116Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-07T13:46:11.730207Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2025-04-07T13:46:11.743849Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete >> TColumnShardTestSchema::RebootHotTiersAfterTtl >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions >> TColumnShardTestSchema::CreateTable [GOOD] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService >> YdbSdkSessions::MultiThreadSync >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable [GOOD] Test command err: 2025-04-07T13:46:09.323752Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:11.152021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:11.374487Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:11.374715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:11.376499Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:11.487601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:11.487780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:11.487999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:11.488113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:11.488219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:11.488325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:11.488413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:11.488511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:11.488599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:11.500800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.501952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:11.502260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:11.653134Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:11.682620Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:11.683195Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:11.683471Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:11.689274Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:11.690414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:11.690741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:11.690854Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:11.691206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:11.691331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:11.691359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:11.691378Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:11.692461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:11.696745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:11.696790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:11.696818Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:11.697051Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:11.697289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:11.697755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:11.697782Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:11.697912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:11.698101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:11.698210Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:11.698307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:11.698428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:11.698449Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:11.698990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=184; 2025-04-07T13:46:11.699379Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=185; 2025-04-07T13:46:11.699609Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=119; 2025-04-07T13:46:11.699945Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=119; 2025-04-07T13:46:11.700394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:11.700597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:11.700622Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:11.714068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:11.714121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.714313Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.714877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:46:11.714909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:46:11.715095Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:46:11.715803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:46:11.716008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:46:11.716033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:46:11.716543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event= ... 8923004812096;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=89197881218112;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2025-04-07T13:46:14.390154Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;this=88923004812096;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=89197881218112;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=tx_controller.cpp:371;event=start;tx_info=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;; 2025-04-07T13:46:14.390580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;this=88923004812096;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=89197881218112;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2025-04-07T13:46:14.390991Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;this=88923004812096;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=89197881218112;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=118; 2025-04-07T13:46:14.392416Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1018 at tablet 9437184, mediator 0 2025-04-07T13:46:14.392643Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] execute at tablet 9437184 2025-04-07T13:46:14.397200Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:46:14.398055Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 19 ttl settings: { Version: 1 } at tablet 9437184 2025-04-07T13:46:14.398106Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=tables_manager.cpp:262;method=RegisterTable;path_id=19; 2025-04-07T13:46:14.398134Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine.h:339;event=RegisterTable;path_id=19; 2025-04-07T13:46:14.413631Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=19; 2025-04-07T13:46:14.430234Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] complete at tablet 9437184 2025-04-07T13:46:14.430542Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 20 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-07T13:46:14.450530Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=119;this=88923004814784;method=TTxController::StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=tx_controller.cpp:311;event=start; 2025-04-07T13:46:14.456700Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=119;this=88923004814784;method=TTxController::StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=tx_controller.cpp:342;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-04-07T13:46:14.476437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;this=88923004814784;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2025-04-07T13:46:14.477412Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;this=88923004814784;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=119; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-07T13:46:14.481468Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=88923004816576;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=tx_controller.cpp:311;event=start; 2025-04-07T13:46:14.482731Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=88923004816576;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=tx_controller.cpp:342;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-04-07T13:46:14.499024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;this=88923004816576;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2025-04-07T13:46:14.499281Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;this=88923004816576;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-07T13:46:14.502174Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=88923004818368;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=tx_controller.cpp:311;event=start; 2025-04-07T13:46:14.504322Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=88923004818368;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=tx_controller.cpp:342;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-04-07T13:46:14.535433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;this=88923004818368;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2025-04-07T13:46:14.535481Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;this=88923004818368;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; >> TestProgram::JsonValueBinary [GOOD] >> YdbSdkSessions::TestSessionPool >> YdbSdkSessions::SessionsServerLimit >> TestProgram::SimpleFunction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\001H\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\001H?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?pNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; json_data: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_data: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?pJson2.JsonDocumentSqlValueInt64\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; json_data: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_data: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Int64 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\010\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\006?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p>Json2.JsonDocumentSqlValueInt64\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\024ToIntegral?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; json_data: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_data: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Uint64 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; json_data: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_data: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; json_data: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_data: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10DoubleTypeE; >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::SimpleFunction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Function { Id: 8 Arguments { Name: "uid" } } } } Command { Projection { Columns { Name: "0" } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=BinaryLength;arguments=[uid;];column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10UInt64TypeE; >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:339;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:339;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=6184;portion_bytes=6184;portion_raw_bytes=5070; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=1;before_size=0;after_size=6184;before_rows=0;after_rows=100; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=6184;portion_bytes=6184;portion_raw_bytes=5070; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |83.5%| [TS] {RESULT} ydb/core/tx/sequenceshard/ut/unittest |83.5%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut >> TColumnShardTestSchema::RebootDrop [GOOD] |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |83.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydbd/ydbd |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |83.6%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd >> TestProgram::YqlKernel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootDrop [GOOD] Test command err: 2025-04-07T13:46:03.816087Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:05.244336Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:05.426531Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:05.426809Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:05.438283Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:05.665603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:05.667184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:05.669294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:05.669618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:05.670287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:05.670569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:05.670810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:05.671071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:05.671739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:05.671998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:05.672263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:05.673189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:05.783724Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:05.799623Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:05.800273Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:05.800541Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:05.801621Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:05.802830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:05.803491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:05.803950Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:05.804402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:05.812746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:05.813011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:05.813255Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:05.814987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:05.815710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:05.815934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:05.816152Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:05.816842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:05.817065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:05.820192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:05.820267Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:05.820348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:05.820584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:05.820605Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:05.820639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:05.820675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:05.820696Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:05.821994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=606; 2025-04-07T13:46:05.823098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=438; 2025-04-07T13:46:05.823723Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=205; 2025-04-07T13:46:05.833372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=8792; 2025-04-07T13:46:05.834375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:05.834640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:05.834875Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:05.836623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:05.840878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:05.841114Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:05.842179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:46:05.842219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:46:05.842242Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:46:05.843923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:46:05.844288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:46:05.848710Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:46:05.849636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event ... OLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2025-04-07T13:46:23.945795Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1713272;portion_bytes=1713300;portion_raw_bytes=2584610; 2025-04-07T13:46:23.945879Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ERASE;path_id=1;portion=7;before_size=5352628;after_size=3639328;before_rows=80000;after_rows=53332; 2025-04-07T13:46:23.945934Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1713272;portion_bytes=1713300;portion_raw_bytes=2584610; 2025-04-07T13:46:23.946160Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1822928;portion_bytes=1822956;portion_raw_bytes=2640160; 2025-04-07T13:46:23.946218Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ERASE;path_id=1;portion=5;before_size=3639328;after_size=1816372;before_rows=53332;after_rows=26664; 2025-04-07T13:46:23.946254Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1822928;portion_bytes=1822956;portion_raw_bytes=2640160; 2025-04-07T13:46:23.946361Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1816344;portion_bytes=1816372;portion_raw_bytes=2639764; 2025-04-07T13:46:23.946417Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ERASE;path_id=1;portion=6;before_size=1816372;after_size=0;before_rows=26664;after_rows=0; 2025-04-07T13:46:23.946453Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1816344;portion_bytes=1816372;portion_raw_bytes=2639764; 2025-04-07T13:46:23.946559Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;fline=manager.cpp:14;event=unlock;process_id=CS::CLEANUP::PORTIONS::COMPOSITE::b1f5ab28-13b611f0-b5712b47-930975b8; 2025-04-07T13:46:23.946643Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:46:23.946717Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:46:23.946794Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=1; 2025-04-07T13:46:23.946874Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700004;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-07T13:46:23.946932Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:46:23.947000Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:46:23.947074Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:46:23.947128Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:46:23.947191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:46:23.947253Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b1f5ab28-13b611f0-b5712b47-930975b8;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:46:23.947393Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:2:4:0:1351840:0] 2025-04-07T13:46:23.947466Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:2:4:1:1347832:0] 2025-04-07T13:46:23.947501Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:3:2:0:1286024:0] 2025-04-07T13:46:23.947583Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:46:23.947647Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=7;external_task_id=b1f5ab28-13b611f0-b5712b47-930975b8;mem=4104;cpu=0; 2025-04-07T13:46:24.052906Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000003:max} readable: {1000000004:max} at tablet 9437184 2025-04-07T13:46:24.057689Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T13:46:24.080347Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:379:2378];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "saved_at" } } } ; 2025-04-07T13:46:24.080471Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:379:2378];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[saved_at;];};]; 2025-04-07T13:46:24.089999Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:379:2378];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:435:2426];trace_detailed=; 2025-04-07T13:46:24.117968Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:355;ff_first=(column_ids=9;column_names=saved_at;);; 2025-04-07T13:46:24.118169Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; 2025-04-07T13:46:24.118462Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:435:2426];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:24.118568Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:435:2426];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:24.119855Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:435:2426];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:24.120506Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:435:2426] finished for tablet 9437184 2025-04-07T13:46:24.121018Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:435:2426] send ScanData to [1:428:2420] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:46:24.123703Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:435:2426] and sent to [1:428:2420] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.002},{"events":["l_bootstrap","f_ProduceResults"],"t":0.028},{"events":["f_ack","f_processing"],"t":0.029},{"events":["l_ProduceResults","f_Finish"],"t":0.03},{"events":["l_ack","l_processing","l_Finish"],"t":0.031}],"full":{"a":1744033584089322,"name":"_full_task","f":1744033584089322,"d_finished":0,"c":0,"l":1744033584121250,"d":31928},"events":[{"name":"bootstrap","f":1744033584091337,"d_finished":26953,"c":1,"l":1744033584118290,"d":26953},{"a":1744033584118441,"name":"ack","f":1744033584118441,"d_finished":0,"c":0,"l":1744033584121250,"d":2809},{"a":1744033584118426,"name":"processing","f":1744033584118426,"d_finished":0,"c":0,"l":1744033584121250,"d":2824},{"name":"ProduceResults","f":1744033584118273,"d_finished":1805,"c":2,"l":1744033584120287,"d":1805},{"a":1744033584120288,"name":"Finish","f":1744033584120288,"d_finished":0,"c":0,"l":1744033584121250,"d":962}],"id":"9437184::1"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2025-04-07T13:46:24.124381Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:435:2426];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:46:24.080550Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T13:46:24.124825Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:435:2426];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:46:24.125004Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:435:2426];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T13:46:24.126237Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:435:2426];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> TestProgram::YqlKernel [GOOD] >> TColumnShardTestSchema::Drop [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Function { Arguments { Name: "sum" } Arguments { Name: "vat" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{arguments=[sum;vat;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:46:21.978622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:46:21.978721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:46:21.978772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:46:21.978806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:46:21.978843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:46:21.978866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:46:21.978928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:46:21.979217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:46:22.044331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:46:22.044390Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:22.054453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:46:22.054583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:46:22.054708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:46:22.059576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:46:22.059794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:46:22.060335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:46:22.060531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:46:22.063730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:46:22.066396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:46:22.066466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:46:22.066591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:46:22.066636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:46:22.066668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:46:22.066837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:46:22.073313Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:46:22.455208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:46:22.456793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:22.458011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:46:22.459917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:46:22.460363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:22.477298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:46:22.478692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:46:22.479653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:22.480300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:46:22.480801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:46:22.481221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:46:22.492815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:22.492876Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:46:22.492909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:46:22.501151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:22.501214Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:22.501953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:46:22.502854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:46:22.514798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:46:22.518946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:46:22.520558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:46:22.530844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:46:22.531011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:46:22.531058Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:46:22.531295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:46:22.531338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:46:22.531486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:46:22.531559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:46:22.538826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:46:22.539081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:46:22.539900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:46:22.540335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:46:22.541729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:22.542149Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:46:22.543521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:46:22.543792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:46:22.544229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:46:22.544623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:46:22.544863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:46:22.545071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:46:22.545510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:46:22.545725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:46:22.546234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:46:22.558000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:46:22.559641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:46:22.560161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:46:22.560453Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:46:22.565594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:46:22.566529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... rationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-07T13:46:26.791382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-07T13:46:26.791594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-07T13:46:26.797335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-04-07T13:46:26.797858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-07T13:46:26.798135Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-04-07T13:46:26.839704Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:46:26.840218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 377 RawX2: 8589936938 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:46:26.840465Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-04-07T13:46:26.844998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-07T13:46:27.052105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-04-07T13:46:27.052502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-07T13:46:27.052555Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-07T13:46:27.057332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-07T13:46:27.057618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-07T13:46:27.058429Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-04-07T13:46:27.059965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T13:46:27.074047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:46:27.074938Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:46:27.075425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:46:27.082420Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:46:27.082701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:201:2204], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-07T13:46:27.084916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:46:27.085159Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-07T13:46:27.086494Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-07T13:46:27.086990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T13:46:27.087476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-07T13:46:27.087763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T13:46:27.088218Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-07T13:46:27.088476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-07T13:46:27.089002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-07T13:46:27.089482Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-04-07T13:46:27.089707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-07T13:46:27.092248Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:46:27.092330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:46:27.093075Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-07T13:46:27.093546Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T13:46:27.093808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T13:46:27.094088Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-04-07T13:46:27.094548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:406:2373] 2025-04-07T13:46:27.105237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:46:27.105542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T13:46:27.105802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:686:2608] TestWaitNotification: OK eventTxId 105 2025-04-07T13:46:27.166654Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:46:27.168345Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 1.72ms result status StatusSuccess 2025-04-07T13:46:27.174076Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestProgram::YqlKernelEndsWith ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop [GOOD] Test command err: 2025-04-07T13:46:09.368141Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:10.843642Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:11.206811Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:11.207058Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:11.218419Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:11.421239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:11.421667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:11.429460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:11.429674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:11.430020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:11.430193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:11.430552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:11.430798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:11.430976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:11.431480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.431664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:11.431911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:11.541335Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:11.550428Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:11.550564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:11.550613Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:11.550785Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:11.550900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:11.551029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:11.551064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:11.551142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:11.551199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:11.551243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:11.551272Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:11.551411Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:11.551468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:11.551501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:11.551521Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:11.551602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:11.551652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:11.551703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:11.551735Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:11.551797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:11.551827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:11.551849Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:11.551884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:11.551911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:11.551930Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:11.552056Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-07T13:46:11.552477Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=374; 2025-04-07T13:46:11.552565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=47; 2025-04-07T13:46:11.552724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=57; 2025-04-07T13:46:11.552872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:11.552934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:11.552968Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:11.553157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:11.553204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.553237Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.553397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:46:11.553437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:46:11.553465Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:46:11.553645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:46:11.553691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:46:11.553721Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:46:11.553852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=nor ... aa6-4b552695;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1713296;portion_bytes=1713324;portion_raw_bytes=2584610; 2025-04-07T13:46:26.896047Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ERASE;path_id=1;portion=1;before_size=5362960;after_size=3649636;before_rows=80100;after_rows=53432; 2025-04-07T13:46:26.896587Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1713296;portion_bytes=1713324;portion_raw_bytes=2584610; 2025-04-07T13:46:26.901582Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1822928;portion_bytes=1822956;portion_raw_bytes=2640160; 2025-04-07T13:46:26.901636Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ERASE;path_id=1;portion=4;before_size=3649636;after_size=1826680;before_rows=53432;after_rows=26764; 2025-04-07T13:46:26.901897Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1822928;portion_bytes=1822956;portion_raw_bytes=2640160; 2025-04-07T13:46:26.901988Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1816344;portion_bytes=1816372;portion_raw_bytes=2639764; 2025-04-07T13:46:26.902022Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ERASE;path_id=1;portion=5;before_size=1826680;after_size=10308;before_rows=26764;after_rows=100; 2025-04-07T13:46:26.902041Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1816344;portion_bytes=1816372;portion_raw_bytes=2639764; 2025-04-07T13:46:26.902115Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=10280;portion_bytes=10308;portion_raw_bytes=8378; 2025-04-07T13:46:26.902149Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ERASE;path_id=1;portion=6;before_size=10308;after_size=0;before_rows=100;after_rows=0; 2025-04-07T13:46:26.902171Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=10280;portion_bytes=10308;portion_raw_bytes=8378; 2025-04-07T13:46:26.902477Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;fline=manager.cpp:14;event=unlock;process_id=CS::CLEANUP::PORTIONS::COMPOSITE::b3b28c42-13b611f0-899c0aa6-4b552695; 2025-04-07T13:46:26.903296Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:46:26.904048Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:46:26.904310Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=1; 2025-04-07T13:46:26.916969Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700004;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-07T13:46:26.917492Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:46:26.918010Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:46:26.918483Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:46:26.918978Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:46:26.919749Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b3b28c42-13b611f0-899c0aa6-4b552695;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:46:26.929694Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:4:3:1:1347832:0] 2025-04-07T13:46:26.930199Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:5:4:0:8760:0] 2025-04-07T13:46:26.930228Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:4:3:0:1351840:0] 2025-04-07T13:46:26.930255Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:2:4:0:1286032:0] 2025-04-07T13:46:26.930789Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:46:26.931554Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T13:46:26.945870Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "saved_at" } } } ; 2025-04-07T13:46:26.946462Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[saved_at;];};]; 2025-04-07T13:46:26.986495Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:305:2320];trace_detailed=; 2025-04-07T13:46:26.987098Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=4;external_task_id=b3b28c42-13b611f0-899c0aa6-4b552695;mem=4688;cpu=0; 2025-04-07T13:46:27.003652Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:355;ff_first=(column_ids=9;column_names=saved_at;);; 2025-04-07T13:46:27.007867Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; 2025-04-07T13:46:27.009251Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2320];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:27.010293Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2320];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:27.011766Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2320];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:27.012029Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:305:2320] finished for tablet 9437184 2025-04-07T13:46:27.012973Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:305:2320] send ScanData to [1:296:2311] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:46:27.017033Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:305:2320] and sent to [1:296:2311] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.003},{"events":["l_bootstrap","f_ProduceResults"],"t":0.022},{"events":["f_ack","f_processing"],"t":0.023},{"events":["l_ProduceResults","f_Finish"],"t":0.026},{"events":["l_ack","l_processing","l_Finish"],"t":0.027}],"full":{"a":1744033586985725,"name":"_full_task","f":1744033586985725,"d_finished":0,"c":0,"l":1744033587013201,"d":27476},"events":[{"name":"bootstrap","f":1744033586988813,"d_finished":19877,"c":1,"l":1744033587008690,"d":19877},{"a":1744033587009226,"name":"ack","f":1744033587009226,"d_finished":0,"c":0,"l":1744033587013201,"d":3975},{"a":1744033587009210,"name":"processing","f":1744033587009210,"d_finished":0,"c":0,"l":1744033587013201,"d":3991},{"name":"ProduceResults","f":1744033587008657,"d_finished":2760,"c":2,"l":1744033587012014,"d":2760},{"a":1744033587012017,"name":"Finish","f":1744033587012017,"d_finished":0,"c":0,"l":1744033587013201,"d":1184}],"id":"9437184::1"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2025-04-07T13:46:27.017541Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2320];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:46:26.946974Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T13:46:27.018041Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2320];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:46:27.018631Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:305:2320];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T13:46:27.019668Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:305:2320];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnShardTestSchema::RebootOneTier [GOOD] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneTier [GOOD] Test command err: 2025-04-07T13:46:01.769255Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:03.071715Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:46:03.286303Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:46:03.289659Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:03.487907Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:03.488733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:03.490823Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:03.726561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:03.728356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:03.738395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:03.738696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:03.739191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:03.739682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:03.742412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:03.742897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:03.743157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:03.743401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:03.743690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:03.744485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:03.982801Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:46:03.984641Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2025-04-07T13:46:03.984910Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:03.985121Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:04.012469Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:04.012994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:04.013645Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:04.014694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:04.016582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:04.016636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:04.024768Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:04.025600Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:04.025855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:04.026113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:04.026329Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:04.028137Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:04.028676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:04.028714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:04.028736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:04.029483Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:04.029730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:04.029952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:04.029977Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:04.030418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:04.030445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:04.030652Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:04.030683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:04.030709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:04.031125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:04.036842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=416; 2025-04-07T13:46:04.037701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=225; 2025-04-07T13:46:04.038753Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=424; 2025-04-07T13:46:04.039717Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=567; 2025-04-07T13:46:04.040394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:04.040615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:04.040640Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:04.052211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:04.052444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:04.052772Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:04.053604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:46:04.053845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V ... ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:29.765227Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:29.765343Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=40002; 2025-04-07T13:46:29.765394Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=320016;num_rows=40002;batch_columns=timestamp; 2025-04-07T13:46:29.765613Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:546:2537] send ScanData to [1:545:2536] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 320016 rows: 40002 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2025-04-07T13:46:29.765994Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:29.766098Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:29.766133Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:29.766178Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:46:29.766277Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:29.766349Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:29.766373Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:29.766448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=13330; 2025-04-07T13:46:29.766484Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=106640;num_rows=13330;batch_columns=timestamp; 2025-04-07T13:46:29.766647Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:546:2537] send ScanData to [1:545:2536] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 106640 rows: 13330 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2025-04-07T13:46:29.766722Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:29.767058Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:29.767158Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:29.767256Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:29.767337Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:29.767405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:29.767434Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:546:2537] finished for tablet 9437184 2025-04-07T13:46:29.767506Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:546:2537] send ScanData to [1:545:2536] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:46:29.767933Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:546:2537] and sent to [1:545:2536] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.53},{"events":["l_ProduceResults","f_Finish"],"t":0.533},{"events":["l_ack","l_processing","l_Finish"],"t":0.534}],"full":{"a":1744033589233445,"name":"_full_task","f":1744033589233445,"d_finished":0,"c":0,"l":1744033589767555,"d":534110},"events":[{"name":"bootstrap","f":1744033589233634,"d_finished":4442,"c":1,"l":1744033589238076,"d":4442},{"a":1744033589767243,"name":"ack","f":1744033589763804,"d_finished":3242,"c":3,"l":1744033589767190,"d":3554},{"a":1744033589767235,"name":"processing","f":1744033589238151,"d_finished":220804,"c":24,"l":1744033589767192,"d":221124},{"name":"ProduceResults","f":1744033589235621,"d_finished":9535,"c":29,"l":1744033589767422,"d":9535},{"a":1744033589767424,"name":"Finish","f":1744033589767424,"d_finished":0,"c":0,"l":1744033589767555,"d":131},{"name":"task_result","f":1744033589238172,"d_finished":217148,"c":21,"l":1744033589763705,"d":217148}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) 2025-04-07T13:46:29.768168Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:46:29.232336Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4710140;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4710140;selected_rows=0; 2025-04-07T13:46:29.768205Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:46:29.768359Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.354942s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.822088s;size=0.002187128;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.012627s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.045173s;size=0.00311667;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2025-04-07T13:46:29.768438Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut |83.6%| [LD] {RESULT} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut >> TColumnShardTestSchema::InternalTTL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Function { Arguments { Name: "string" } Arguments { Name: "suffix" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{arguments=[G:string;G:suffix;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; >> TestProgram::JsonValue >> TColumnShardTestSchema::ExternalTTL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::InternalTTL [GOOD] Test command err: 2025-04-07T13:46:06.737939Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:07.971825Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:46:08.115633Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:46:08.124837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:08.253246Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:08.254077Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:08.256605Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:08.355794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:08.356909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:08.358157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:08.358241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:08.358486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:08.358796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:08.359215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:08.359294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:08.361267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:08.361552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:08.361974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:08.362959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:08.461786Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:46:08.463149Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2025-04-07T13:46:08.463755Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:08.463965Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:08.480695Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:08.481420Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:08.481611Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:08.481760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:08.481878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:08.481934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:08.481964Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:08.482022Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:08.482063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:08.482117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:08.482148Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:08.482283Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:08.482333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:08.482360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:08.482386Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:08.482471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:08.482514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:08.482731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:08.482754Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:08.482799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:08.482825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:08.482843Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:08.482873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:08.482900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:08.482923Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:08.483050Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=34; 2025-04-07T13:46:08.483127Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-07T13:46:08.483948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-04-07T13:46:08.484377Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=210; 2025-04-07T13:46:08.487337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:08.487748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:08.487777Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:08.489460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:08.489497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:08.489520Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:08.490317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:46:08.490702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; ... 5;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-04-07T13:46:31.982101Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:31.982143Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:46:31.982179Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:46:32.019605Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:46:32.019940Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T13:46:32.020222Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=4; 2025-04-07T13:46:32.020284Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=4; 2025-04-07T13:46:32.020336Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T13:46:32.020464Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:32.020506Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2025-04-07T13:46:32.020546Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:46:32.020789Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:32.022747Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:32.023053Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:32.024504Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=80000; 2025-04-07T13:46:32.037332Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=saved_at; 2025-04-07T13:46:32.037753Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:366:2378] send ScanData to [1:361:2373] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2025-04-07T13:46:32.039087Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:32.039704Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:32.044707Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:32.045830Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:32.047144Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:32.048160Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:32.052552Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:366:2378] finished for tablet 9437184 2025-04-07T13:46:32.052889Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:366:2378] send ScanData to [1:361:2373] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:46:32.056410Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:366:2378] and sent to [1:361:2373] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.002},{"events":["f_ProduceResults"],"t":0.023},{"events":["l_bootstrap"],"t":0.04},{"events":["f_processing","f_task_result"],"t":0.048},{"events":["f_ack","l_task_result"],"t":2.675},{"events":["l_ProduceResults","f_Finish"],"t":2.703},{"events":["l_ack","l_processing","l_Finish"],"t":2.708}],"full":{"a":1744033589345019,"name":"_full_task","f":1744033589345019,"d_finished":0,"c":0,"l":1744033592053504,"d":2708485},"events":[{"name":"bootstrap","f":1744033589347565,"d_finished":37932,"c":1,"l":1744033589385497,"d":37932},{"a":1744033592045566,"name":"ack","f":1744033592020763,"d_finished":24429,"c":1,"l":1744033592045192,"d":32367},{"a":1744033592045546,"name":"processing","f":1744033589393918,"d_finished":82128,"c":5,"l":1744033592045196,"d":90086},{"name":"ProduceResults","f":1744033589368904,"d_finished":36073,"c":8,"l":1744033592048656,"d":36073},{"a":1744033592048659,"name":"Finish","f":1744033592048659,"d_finished":0,"c":0,"l":1744033592053504,"d":4845},{"name":"task_result","f":1744033589393935,"d_finished":57586,"c":4,"l":1744033592020598,"d":57586}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2025-04-07T13:46:32.060993Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:46:29.332873Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2025-04-07T13:46:32.061290Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:46:32.062677Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:2.600673s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=2.359412s;size=0.0063152;details={columns=9;};};{name=ASSEMBLER::LAST;duration=0.240132s;size=0.0063152;details={columns=(column_ids=9;column_names=saved_at;);;};};]};; 2025-04-07T13:46:32.063986Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; >> TColumnShardTestSchema::RebootInternalTTL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExternalTTL [GOOD] Test command err: 2025-04-07T13:46:10.301426Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:11.171812Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:46:11.335342Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:46:11.335698Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:11.368204Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:11.369083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:11.371844Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:11.409371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:11.409632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:11.409850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:11.409955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:11.410061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:11.410170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:11.410281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:11.410394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:11.410497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:11.410645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.410761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:11.410868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:11.433933Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:46:11.434260Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2025-04-07T13:46:11.434336Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:11.434388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:11.436258Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:11.436379Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:11.436426Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:11.436593Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:11.436771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:11.436843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:11.436877Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:11.436958Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:11.437026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:11.437080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:11.437118Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:11.437282Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:11.437350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:11.437404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:11.437431Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:11.437535Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:11.437595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:11.437636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:11.437665Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:11.437731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:11.437763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:11.437788Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:11.437830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:11.437865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:11.437901Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:11.438067Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-07T13:46:11.438142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-04-07T13:46:11.438232Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-07T13:46:11.438326Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=51; 2025-04-07T13:46:11.438465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:11.438510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:11.438539Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:11.438766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:11.438818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.438849Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.438982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:46:11.439016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2 ... ource.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-04-07T13:46:36.156647Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:36.156697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:46:36.156730Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:46:36.170414Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:46:36.170477Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T13:46:36.171043Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=4; 2025-04-07T13:46:36.171549Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=4; 2025-04-07T13:46:36.172058Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T13:46:36.172405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:36.172443Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2025-04-07T13:46:36.172481Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:46:36.173132Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:36.174411Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:36.174689Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:36.176198Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=80000; 2025-04-07T13:46:36.176716Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=saved_at; 2025-04-07T13:46:36.177492Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:366:2378] send ScanData to [1:361:2373] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2025-04-07T13:46:36.179164Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:36.179537Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:36.180844Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:36.181431Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:36.183162Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:36.184466Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:36.184736Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:366:2378] finished for tablet 9437184 2025-04-07T13:46:36.185249Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:366:2378] send ScanData to [1:361:2373] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:46:36.188723Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:366:2378] and sent to [1:361:2373] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.002},{"events":["f_ProduceResults"],"t":0.014},{"events":["l_bootstrap"],"t":0.025},{"events":["f_processing","f_task_result"],"t":0.03},{"events":["l_task_result"],"t":0.778},{"events":["f_ack"],"t":0.779},{"events":["l_ProduceResults","f_Finish"],"t":0.79},{"events":["l_ack","l_processing","l_Finish"],"t":0.791}],"full":{"a":1744033595393871,"name":"_full_task","f":1744033595393871,"d_finished":0,"c":0,"l":1744033596185503,"d":791632},"events":[{"name":"bootstrap","f":1744033595396550,"d_finished":22844,"c":1,"l":1744033595419394,"d":22844},{"a":1744033596181412,"name":"ack","f":1744033596172888,"d_finished":7984,"c":1,"l":1744033596180872,"d":12075},{"a":1744033596181153,"name":"processing","f":1744033595424442,"d_finished":33114,"c":5,"l":1744033596181083,"d":37464},{"name":"ProduceResults","f":1744033595408442,"d_finished":14157,"c":8,"l":1744033596184723,"d":14157},{"a":1744033596184725,"name":"Finish","f":1744033596184725,"d_finished":0,"c":0,"l":1744033596185503,"d":778},{"name":"task_result","f":1744033595424710,"d_finished":24575,"c":4,"l":1744033596172753,"d":24575}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2025-04-07T13:46:36.189040Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:46:35.387111Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2025-04-07T13:46:36.189529Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:46:36.190379Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.740611s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.647916s;size=0.0063152;details={columns=9;};};{name=ASSEMBLER::LAST;duration=0.091532s;size=0.0063152;details={columns=(column_ids=9;column_names=saved_at;);;};};]};; 2025-04-07T13:46:36.191421Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:366:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootInternalTTL [GOOD] Test command err: 2025-04-07T13:46:09.831533Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:11.012050Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:46:11.119852Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:46:11.120181Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:11.258700Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:11.259953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:11.262250Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:11.340486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:11.340708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:11.340909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:11.341005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:11.341108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:11.341202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:11.341315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:11.341413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:11.341606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:11.341701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.341797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:11.341906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:11.405356Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:46:11.406807Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2025-04-07T13:46:11.407178Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:11.407362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:11.415125Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:11.415239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:11.415280Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:11.415425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:11.415541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:11.415599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:11.415636Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:11.415713Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:11.415763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:11.415809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:11.415839Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:11.415996Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:11.416041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:11.416078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:11.416103Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:11.416188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:11.416235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:11.416271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:11.416296Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:11.416346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:11.416371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:11.416392Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:11.416437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:11.416462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:11.416483Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:11.416628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-07T13:46:11.416737Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=76; 2025-04-07T13:46:11.416852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=72; 2025-04-07T13:46:11.416926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-07T13:46:11.417053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:11.417100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:11.417128Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:11.417298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:11.417338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.417363Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.417491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:46:11.417526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2 ... =;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:35.787326Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:35.801408Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=26668; 2025-04-07T13:46:35.802252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213344;num_rows=26668;batch_columns=saved_at; 2025-04-07T13:46:35.803452Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:544:2539] send ScanData to [1:543:2538] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213344 rows: 26668 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2025-04-07T13:46:35.820626Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:35.821243Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:35.821458Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:35.821854Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:46:35.822331Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:35.822435Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:35.822618Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:35.822706Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=26664; 2025-04-07T13:46:35.822882Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213312;num_rows=26664;batch_columns=saved_at; 2025-04-07T13:46:35.823499Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:544:2539] send ScanData to [1:543:2538] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213312 rows: 26664 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2025-04-07T13:46:35.823671Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:35.823819Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:35.824291Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:35.849256Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:35.849357Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:35.849652Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:35.849903Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:544:2539] finished for tablet 9437184 2025-04-07T13:46:35.850588Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:544:2539] send ScanData to [1:543:2538] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:46:35.865258Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:544:2539] and sent to [1:543:2538] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":0.017},{"events":["l_bootstrap"],"t":0.034},{"events":["f_processing","f_task_result"],"t":0.041},{"events":["f_ack","l_task_result"],"t":1.212},{"events":["l_ProduceResults","f_Finish"],"t":1.275},{"events":["l_ack","l_processing","l_Finish"],"t":1.276}],"full":{"a":1744033594573954,"name":"_full_task","f":1744033594573954,"d_finished":0,"c":0,"l":1744033595850864,"d":1276910},"events":[{"name":"bootstrap","f":1744033594575060,"d_finished":33218,"c":1,"l":1744033594608278,"d":33218},{"a":1744033595848969,"name":"ack","f":1744033595786328,"d_finished":62165,"c":3,"l":1744033595848841,"d":64060},{"a":1744033595848954,"name":"processing","f":1744033594615642,"d_finished":408472,"c":27,"l":1744033595848845,"d":410382},{"name":"ProduceResults","f":1744033594591488,"d_finished":82423,"c":32,"l":1744033595849888,"d":82423},{"a":1744033595849891,"name":"Finish","f":1744033595849891,"d_finished":0,"c":0,"l":1744033595850864,"d":973},{"name":"task_result","f":1744033594615874,"d_finished":345405,"c":24,"l":1744033595786231,"d":345405}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2025-04-07T13:46:35.865782Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:46:34.564684Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4997532;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4997532;selected_rows=0; 2025-04-07T13:46:35.866481Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:46:35.868065Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.928318s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=1.844267s;size=0.002681008;details={columns=1,2,3,4,9,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.064281s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.147551s;size=0.00311667;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2025-04-07T13:46:35.873747Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> YdbSdkSessions::TestMultipleSessions [GOOD] >> TestProgram::JsonValue [GOOD] >> TColumnShardTestSchema::OneTier [GOOD] >> TColumnShardTestSchema::RebootOneTierExternalTtl [GOOD] >> KqpSinkMvcc::SnapshotExpiration >> TColumnShardTestSchema::RebootExternalTTL [GOOD] >> TSchemeShardMoveTest::Boot >> KqpLocksTricky::TestNoLocksIssue-withSink >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestMultipleSessions [GOOD] Test command err: 2025-04-07T13:46:17.509853Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567178629039516:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:17.510104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001c04/r3tmp/tmp3Et0km/pdisk_1.dat 2025-04-07T13:46:21.673557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 16005, node 1 2025-04-07T13:46:22.483371Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567178629039516:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:22.491772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "PostgreSQL" } 2025-04-07T13:46:22.491789Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:22.529068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:46:22.777940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:46:22.778518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:46:22.787966Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:46:22.787974Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:46:22.788200Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:46:22.804859Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:46:23.004461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25828 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:46:23.329831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:23.350300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:23.351521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:46:23.355832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:46:23.357529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:46:23.357551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T13:46:23.359841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:46:23.369097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:23.369118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:46:23.373855Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:23.386109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033583432, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:23.386142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:46:23.386743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:46:23.399028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:23.400060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:23.400193Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:46:23.400710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:46:23.400819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:46:23.400861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:46:23.407885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:46:23.407954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:46:23.407968Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:46:23.408081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T13:46:33.685992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567247348517289:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:33.686085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:33.686399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567247348517301:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:33.689626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:46:33.689801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:33.689825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2025-04-07T13:46:33.689897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:33.689916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-07T13:46:33.689970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:33.690034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:46:33.690266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 1 -> 128 2025-04-07T13:46:33.690473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:33.690488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:46:33.696860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:46:33.698781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:33.699790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:33.699849Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2025-04-07T13:46:33.703321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567247348517339:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:33.703892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567247348517337:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:33.704183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:33.716797Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:33.717889Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:33.718728Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13: ... neration: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:46:33.754220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:46:33.754228Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T13:46:33.754696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:46:33.754709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:46:33.754715Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2025-04-07T13:46:33.755982Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T13:46:33.757696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:46:33.757950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate), at schemeshard: 72057594046644480 2025-04-07T13:46:33.774332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusMultipleModifications, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:46:33.842796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033593820, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:33.842852Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033593820, at schemeshard: 72057594046644480 2025-04-07T13:46:33.843071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-04-07T13:46:33.843509Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033593820, at schemeshard: 72057594046644480 2025-04-07T13:46:33.843543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2025-04-07T13:46:33.843582Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033593820, at schemeshard: 72057594046644480 2025-04-07T13:46:33.843708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2025-04-07T13:46:33.843753Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1744033593820 2025-04-07T13:46:33.843907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2025-04-07T13:46:33.852431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:33.881097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:33.889541Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2025-04-07T13:46:33.889596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2025-04-07T13:46:33.889731Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2025-04-07T13:46:33.898112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2025-04-07T13:46:33.898488Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T13:46:33.898538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2025-04-07T13:46:33.898632Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2025-04-07T13:46:33.898660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2025-04-07T13:46:33.898684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T13:46:33.898712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2025-04-07T13:46:33.898728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2025-04-07T13:46:33.898855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2025-04-07T13:46:33.898872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2025-04-07T13:46:33.899684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:46:33.899843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:46:33.899857Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:46:33.907644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:46:33.907686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:46:33.907700Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2025-04-07T13:46:33.907868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:46:33.907887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:46:33.907898Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T13:46:33.907976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:46:33.907988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:46:33.907995Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T13:46:33.908081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:46:33.908092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:46:33.908099Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2025-04-07T13:46:33.908124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-04-07T13:46:34.001598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567247348517303:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:46:34.013305Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T13:46:34.014018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567247348517342:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:46:34.242906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:46:34.243700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:46:34.253477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715661:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:46:34.253581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:46:34.260740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:46:34.261926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715661, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:46:37.164625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:46:37.164896Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink >> KqpLocks::Invalidate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\001H\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\001H?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r6Json2.SqlValueConvertToUtf8\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; json_data: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; json_data: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Bool FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\006\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\006?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r&Json2.SqlValueInt64\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; json_data: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Int64 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\010\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\006?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r&Json2.SqlValueInt64\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\024ToIntegral?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; json_data: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Uint64 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; json_data: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; json_data: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10DoubleTypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneTierExternalTtl [GOOD] Test command err: 2025-04-07T13:46:10.083714Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:11.486565Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:46:11.538029Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:46:11.538386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:11.659283Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:11.659862Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:11.665232Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:11.728253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:11.736947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:11.737880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:11.738241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:11.738392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:11.738462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:11.739740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:11.740176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:11.740254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:11.740511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.740758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:11.741291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:11.814722Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:46:11.815882Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2025-04-07T13:46:11.816111Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:11.816251Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:11.823457Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:11.823784Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:11.824019Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:11.824888Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:11.825638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:11.825890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:11.826102Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:11.826359Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:11.826690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:11.827142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:11.827259Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:11.828195Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:11.828334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:11.828364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:11.828387Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:11.828829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:11.828949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:11.829218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:11.829317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:11.829538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:11.829635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:11.829655Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:11.829685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:11.829792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:11.829975Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:11.830700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=214; 2025-04-07T13:46:11.830966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=161; 2025-04-07T13:46:11.831482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=153; 2025-04-07T13:46:11.831920Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=212; 2025-04-07T13:46:11.832564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:11.840750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:11.840806Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:11.842359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:11.842562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.842702Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.843341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:46:11.843387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V ... amp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.039084Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:40.039238Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=40002; 2025-04-07T13:46:40.039301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=320016;num_rows=40002;batch_columns=timestamp; 2025-04-07T13:46:40.039603Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:551:2542] send ScanData to [1:550:2541] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 320016 rows: 40002 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2025-04-07T13:46:40.039761Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.039893Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.039929Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:40.040447Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:46:40.041279Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:40.041853Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.041881Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:40.042219Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=13330; 2025-04-07T13:46:40.042486Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=106640;num_rows=13330;batch_columns=timestamp; 2025-04-07T13:46:40.043836Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:551:2542] send ScanData to [1:550:2541] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 106640 rows: 13330 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2025-04-07T13:46:40.044129Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.044464Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.058707Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.059353Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:40.059918Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.060457Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.060732Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:551:2542] finished for tablet 9437184 2025-04-07T13:46:40.061258Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:551:2542] send ScanData to [1:550:2541] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:46:40.064549Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:551:2542] and sent to [1:550:2541] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":0.013},{"events":["l_bootstrap"],"t":0.031},{"events":["f_processing","f_task_result"],"t":0.032},{"events":["f_ack","l_task_result"],"t":1.003},{"events":["l_ProduceResults","f_Finish"],"t":1.054},{"events":["l_ack","l_processing","l_Finish"],"t":1.055}],"full":{"a":1744033599006002,"name":"_full_task","f":1744033599006002,"d_finished":0,"c":0,"l":1744033600061752,"d":1055750},"events":[{"name":"bootstrap","f":1744033599007019,"d_finished":30038,"c":1,"l":1744033599037057,"d":30038},{"a":1744033600059100,"name":"ack","f":1744033600009730,"d_finished":35957,"c":3,"l":1744033600058762,"d":38609},{"a":1744033600059087,"name":"processing","f":1744033599038704,"d_finished":439855,"c":24,"l":1744033600058766,"d":442520},{"name":"ProduceResults","f":1744033599019113,"d_finished":41406,"c":29,"l":1744033600060476,"d":41406},{"a":1744033600060478,"name":"Finish","f":1744033600060478,"d_finished":0,"c":0,"l":1744033600061752,"d":1274},{"name":"task_result","f":1744033599038722,"d_finished":389630,"c":21,"l":1744033600009366,"d":389630}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) 2025-04-07T13:46:40.065375Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:46:39.001404Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4710140;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4710140;selected_rows=0; 2025-04-07T13:46:40.065616Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:46:40.067490Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.657182s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=1.039150s;size=0.002187128;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.077026s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.135965s;size=0.00311667;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};{name=SNAPSHOT;duration=0.015131s;size=0;details={};};]};; 2025-04-07T13:46:40.068491Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:551:2542];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> TSchemeShardMoveTest::MoveIndexSameDst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneTier [GOOD] Test command err: 2025-04-07T13:46:09.810087Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:10.719524Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:46:10.876548Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:46:10.879125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:11.140229Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:11.140855Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:11.142599Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:11.322068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:11.323805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:11.325652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:11.325932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:11.326772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:11.326842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:11.327496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:11.328357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:11.335706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:11.335989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.336060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:11.336853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:11.408222Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:46:11.409237Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2025-04-07T13:46:11.409296Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:11.409333Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:11.415123Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:11.415239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:11.415281Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:11.415425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:11.415540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:11.415600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:11.415636Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:11.415721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:11.415781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:11.415845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:11.415878Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:11.416050Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:11.416114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:11.416151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:11.416176Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:11.416266Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:11.416318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:11.416356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:11.416382Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:11.416437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:11.416467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:11.416487Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:11.416532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:11.416560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:11.416585Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:11.416762Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-07T13:46:11.416847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-07T13:46:11.416965Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=71; 2025-04-07T13:46:11.417044Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-07T13:46:11.417190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:11.417247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:11.417280Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:11.417486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:11.417534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.417577Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.417703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:46:11.417741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2 ... 09551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:46:40.075886Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T13:46:40.076162Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-07T13:46:40.076691Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=2; 2025-04-07T13:46:40.077153Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T13:46:40.077943Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.077981Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2025-04-07T13:46:40.078235Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:46:40.078848Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [1:352:2363], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-04-07T13:46:40.078875Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-07T13:46:40.093625Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[25] (CS::INDEXATION) apply at tablet 9437184 2025-04-07T13:46:40.103482Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 3 2025-04-07T13:46:40.103616Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9575952;raw_bytes=15906843; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:46:40.103668Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=b91bb0dc-13b611f0-9cce3385-7907634b;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:46:40.104046Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:40.104199Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.104243Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:40.104354Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=80000; 2025-04-07T13:46:40.104412Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=timestamp; 2025-04-07T13:46:40.104513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:354:2365] send ScanData to [1:349:2360] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2025-04-07T13:46:40.104616Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.108830Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.109133Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.109295Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:40.109402Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.109519Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:46:40.109568Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:354:2365] finished for tablet 9437184 2025-04-07T13:46:40.109637Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:354:2365] send ScanData to [1:349:2360] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:46:40.110033Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:354:2365] and sent to [1:349:2360] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":1.106},{"events":["f_ack"],"t":1.132},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":1.137}],"full":{"a":1744033598971749,"name":"_full_task","f":1744033598971749,"d_finished":0,"c":0,"l":1744033600109682,"d":1137933},"events":[{"name":"bootstrap","f":1744033598972309,"d_finished":2842,"c":1,"l":1744033598975151,"d":2842},{"a":1744033600109272,"name":"ack","f":1744033600104022,"d_finished":5139,"c":1,"l":1744033600109161,"d":5549},{"a":1744033600109260,"name":"processing","f":1744033598975463,"d_finished":57346,"c":5,"l":1744033600109163,"d":57768},{"name":"ProduceResults","f":1744033598973879,"d_finished":6911,"c":8,"l":1744033600109553,"d":6911},{"a":1744033600109556,"name":"Finish","f":1744033600109556,"d_finished":0,"c":0,"l":1744033600109682,"d":126},{"name":"task_result","f":1744033598975488,"d_finished":52095,"c":4,"l":1744033600078554,"d":52095}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) 2025-04-07T13:46:40.110287Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:46:38.970638Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2025-04-07T13:46:40.110330Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:46:40.110458Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:1.050810s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.882086s;size=0.0063152;details={columns=1;};};{name=ASSEMBLER::LAST;duration=0.168471s;size=0.0063152;details={columns=(column_ids=1;column_names=timestamp;);;};};]};; 2025-04-07T13:46:40.110539Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:354:2365];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExternalTTL [GOOD] Test command err: 2025-04-07T13:46:11.613740Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:13.036214Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:46:13.049906Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:46:13.050794Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:13.379506Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:13.380568Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:13.389343Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:13.579667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:13.586439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:13.588000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:13.588281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:13.592898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:13.593200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:13.593689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:13.594207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:13.594922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:13.600780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:13.601278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:13.602141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:13.767832Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:46:13.769150Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2025-04-07T13:46:13.769885Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:13.770159Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:13.804602Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:13.805418Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:13.806097Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:13.813074Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:13.815005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:13.815558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:13.815795Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:13.816501Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:13.820794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:13.821228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:13.821473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:13.823724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:13.824193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:13.824224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:13.824245Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:13.829208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:13.829771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:13.830028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:13.830054Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:13.830564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:13.830590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:13.830816Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:13.830847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:13.830872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:13.831095Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:13.832382Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=216; 2025-04-07T13:46:13.837392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4807; 2025-04-07T13:46:13.838532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=451; 2025-04-07T13:46:13.839482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=286; 2025-04-07T13:46:13.844832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:13.845105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:13.848488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:13.852654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:13.852906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:13.852933Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:13.853922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:46:13.854174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_ ... umn_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:41.912684Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:41.912817Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=26668; 2025-04-07T13:46:41.912866Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213344;num_rows=26668;batch_columns=saved_at; 2025-04-07T13:46:41.913090Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:548:2540] send ScanData to [1:547:2539] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213344 rows: 26668 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2025-04-07T13:46:41.913210Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:41.913445Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:41.913480Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:41.913517Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:46:41.913635Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:41.913727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:41.913754Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:46:41.913814Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=26664; 2025-04-07T13:46:41.913848Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213312;num_rows=26664;batch_columns=saved_at; 2025-04-07T13:46:41.914016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:548:2540] send ScanData to [1:547:2539] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213312 rows: 26664 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2025-04-07T13:46:41.914094Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:41.914252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:41.914353Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:41.914454Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:46:41.914516Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:41.914568Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:46:41.914597Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:548:2540] finished for tablet 9437184 2025-04-07T13:46:41.914658Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:548:2540] send ScanData to [1:547:2539] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:46:41.920601Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:548:2540] and sent to [1:547:2539] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.019},{"events":["f_processing","f_task_result"],"t":0.02},{"events":["f_ack","l_task_result"],"t":1.093},{"events":["l_ProduceResults","f_Finish"],"t":1.096},{"events":["l_ack","l_processing","l_Finish"],"t":1.097}],"full":{"a":1744033600817660,"name":"_full_task","f":1744033600817660,"d_finished":0,"c":0,"l":1744033601914693,"d":1097033},"events":[{"name":"bootstrap","f":1744033600817782,"d_finished":19646,"c":1,"l":1744033600837428,"d":19646},{"a":1744033601914441,"name":"ack","f":1744033601911422,"d_finished":2807,"c":3,"l":1744033601914383,"d":3059},{"a":1744033601914432,"name":"processing","f":1744033600838469,"d_finished":218983,"c":27,"l":1744033601914384,"d":219244},{"name":"ProduceResults","f":1744033600819222,"d_finished":11410,"c":32,"l":1744033601914584,"d":11410},{"a":1744033601914585,"name":"Finish","f":1744033601914585,"d_finished":0,"c":0,"l":1744033601914693,"d":108},{"name":"task_result","f":1744033600838485,"d_finished":215342,"c":24,"l":1744033601911256,"d":215342}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2025-04-07T13:46:41.920727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:46:40.816851Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4997532;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4997532;selected_rows=0; 2025-04-07T13:46:41.920764Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:46:41.921070Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.865310s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=1.775985s;size=0.002681008;details={columns=1,2,3,4,9,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.025966s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.284181s;size=0.00311667;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2025-04-07T13:46:41.921668Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:548:2540];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [FAIL] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient >> YdbSdkSessions::SessionsServerLimit [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [GOOD] Test command err: 2025-04-07T13:46:18.554384Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567183940749466:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:18.554568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bd4/r3tmp/tmpZiXTHW/pdisk_1.dat 2025-04-07T13:46:22.045490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:46:22.155506Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15584, node 1 2025-04-07T13:46:22.645013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:46:22.717186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:46:22.942757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:46:23.017494Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:46:23.017516Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:46:23.017522Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:46:23.017604Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:46:23.471216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:23.481051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:23.486739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:23.492037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:46:23.492263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:46:23.492282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:46:23.495777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:46:23.500792Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:23.500826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:46:23.509346Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:23.526148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033583565, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:23.526191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:46:23.526942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:46:23.536161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:23.536320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:23.536382Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:46:23.536459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:46:23.536490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:46:23.536549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:46:23.539348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:46:23.539390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:46:23.539405Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:46:23.539535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:46:23.545042Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567183940749466:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:23.545087Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:46:33.917268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567248365260087:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:33.917359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:34.525922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:46:34.528286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:46:34.538119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:34.538152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:46:34.557268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T13:46:34.558762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:34.559580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:34.559864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:46:34.575912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:46:34.575952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:46:34.576180Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:46:34.584315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:46:34.584559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:46:34.584571Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:46:34.588439Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:46:34.616777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:46:34.617401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:46:34.637164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:46:34.918144Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:46:34.918166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:46:34.918716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T13:46:34.926274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:46:34.958404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033594989, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:34.958446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744033594989 2025-04-07T13:46:34.958779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T13:46:34.989702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:34.990161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:34.990491Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTab ... State 2025-04-07T13:46:36.308501Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:36.318268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:36.318755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:36.318770Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T13:46:36.319664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:36.319676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:36.319683Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T13:46:36.324904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:36.325179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:36.325190Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T13:46:36.326030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:36.326044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:36.326054Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2025-04-07T13:46:36.327475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:36.327816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:36.327825Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2025-04-07T13:46:36.341702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:46:36.383969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033596424, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:36.384010Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033596424, at schemeshard: 72057594046644480 2025-04-07T13:46:36.384653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 128 -> 240 2025-04-07T13:46:36.389577Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033596424, at schemeshard: 72057594046644480 2025-04-07T13:46:36.389650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:1 128 -> 240 2025-04-07T13:46:36.389698Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033596424, at schemeshard: 72057594046644480 2025-04-07T13:46:36.389732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:2 128 -> 240 2025-04-07T13:46:36.389987Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710659:3, HandleReply TEvOperationPlan: step# 1744033596424 2025-04-07T13:46:36.390241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:3 128 -> 240 2025-04-07T13:46:36.410414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:36.412069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:36.412430Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:2 ProgressState 2025-04-07T13:46:36.412478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:2 progress is 1/4 2025-04-07T13:46:36.412612Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:0 ProgressState 2025-04-07T13:46:36.412635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 2/4 2025-04-07T13:46:36.412760Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:3 ProgressState 2025-04-07T13:46:36.413017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:3 progress is 3/4 2025-04-07T13:46:36.413080Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:1 ProgressState 2025-04-07T13:46:36.413107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:1 progress is 4/4 2025-04-07T13:46:36.413124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:46:36.413147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:1 2025-04-07T13:46:36.413387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:2 2025-04-07T13:46:36.413396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:3 2025-04-07T13:46:36.413408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 5, subscribers: 1 2025-04-07T13:46:36.429832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:36.429860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:36.429871Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T13:46:36.430671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:36.431220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:36.431229Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T13:46:36.432827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:36.433228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:36.433235Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T13:46:36.434032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:36.434293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:36.434299Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2025-04-07T13:46:36.435376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:36.435385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:36.435391Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2025-04-07T13:46:36.435813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-04-07T13:46:36.472644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567261250162226:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:46:36.524875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:46:36.525458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:46:36.533723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:46:37.141468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:46:37.141494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:37.791085Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr881gv8b6gneh9q3ejra6fe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWJjNTc4N2ItNTM4NzcyYWMtMTk1ZTdmNzAtODEzMjNhN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:46:38.830657Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr881kb87gwv29dr360qtqpf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTYzMzMxYTYtOGU4MzVjYjctODg4YjJhYWItZjQxMDMxNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex >> TSchemeShardMoveTest::Boot [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly >> TMiniKQLProtoTest::TestExportEmptyListType >> TMiniKQLProtoTest::TestExportEmptyListType [GOOD] >> TMiniKQLProtoTest::TestExportEmptyDictType >> TMiniKQLProtoTest::TestExportEmptyDictType [GOOD] >> TMiniKQLProtoTest::TestExportEmptyOptional >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] >> TMiniKQLProtoTest::TestExportNullType >> TMiniKQLProtoTest::TestExportEmptyOptional [GOOD] >> TMiniKQLProtoTest::TestExportEmptyOptionalOptional >> TMiniKQLProtoTest::TestExportNullType [GOOD] >> TMiniKQLProtoTest::TestExportOptionalType >> TMiniKQLProtoTest::TestExportEmptyOptionalOptional [GOOD] >> TMiniKQLProtoTest::TestExportEmptyList >> TMiniKQLProtoTest::TestExportOptionalType [GOOD] >> TMiniKQLProtoTest::TestExportOptionalOptionalType ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] Test command err: 2025-04-07T13:46:22.825435Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567197970562128:2118];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:22.829109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001be5/r3tmp/tmpBFVIJA/pdisk_1.dat 2025-04-07T13:46:23.968978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:46:23.982194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:46:23.983511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:46:24.011718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:46:24.087938Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:24.550705Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.114936s 2025-04-07T13:46:24.550808Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.115045s TServer::EnableGrpc on GrpcPort 31786, node 1 2025-04-07T13:46:24.734111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:46:24.749521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:24.773183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:46:26.016021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:26.042769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:26.042804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:26.043056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:46:26.043112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T13:46:27.470487Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:46:27.470507Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:46:27.470512Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:46:27.470594Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:46:27.753193Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567197970562128:2118];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:27.753697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:14596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:46:33.017444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:33.034975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:33.040468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:33.050950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:46:33.052174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:46:33.052213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:46:33.059036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:33.059051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-07T13:46:33.064028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:33.071148Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:46:33.087161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033593120, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:33.087193Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:46:33.087409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:46:33.102210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:33.102363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:33.102412Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:46:33.102480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:46:33.102512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:46:33.102561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:46:33.124554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:46:33.124621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:46:33.124873Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:46:33.125131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:46:39.081491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:46:39.081517Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:42.280041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567283869909362:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:42.289216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:42.939083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:46:42.939628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:46:42.971545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:42.971583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:46:42.982275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T13:46:42.983585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:42.983999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:42.984048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:46:42.994589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:46:42.994640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:46:42.994656Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:46:42.995008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:46:42.995023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:46:42.995031Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:46:43.007038Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:46:43.023221Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 7205759 ... 2057594046644480 2025-04-07T13:46:44.182706Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710659:3, ProgressState 2025-04-07T13:46:44.182758Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:44.219634Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:46:44.219957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:44.220242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:44.220256Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T13:46:44.221006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:44.221024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:44.221034Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T13:46:44.221597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:44.221625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:44.221635Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T13:46:44.222134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:44.222152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:44.222162Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2025-04-07T13:46:44.222646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:44.222847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:44.222857Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2025-04-07T13:46:44.313583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033604327, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:44.313628Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033604327, at schemeshard: 72057594046644480 2025-04-07T13:46:44.314490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 128 -> 240 2025-04-07T13:46:44.315066Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033604327, at schemeshard: 72057594046644480 2025-04-07T13:46:44.315096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:1 128 -> 240 2025-04-07T13:46:44.315129Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033604327, at schemeshard: 72057594046644480 2025-04-07T13:46:44.315669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:2 128 -> 240 2025-04-07T13:46:44.315713Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710659:3, HandleReply TEvOperationPlan: step# 1744033604327 2025-04-07T13:46:44.315999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:3 128 -> 240 2025-04-07T13:46:44.337137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:44.338030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:44.338080Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:2 ProgressState 2025-04-07T13:46:44.338129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:2 progress is 1/4 2025-04-07T13:46:44.340645Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:0 ProgressState 2025-04-07T13:46:44.352833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 2/4 2025-04-07T13:46:44.361902Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:3 ProgressState 2025-04-07T13:46:44.361953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:3 progress is 3/4 2025-04-07T13:46:44.362119Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:1 ProgressState 2025-04-07T13:46:44.362143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:1 progress is 4/4 2025-04-07T13:46:44.362159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:46:44.362187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:1 2025-04-07T13:46:44.362197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:2 2025-04-07T13:46:44.362206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:3 2025-04-07T13:46:44.362524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 5, subscribers: 1 2025-04-07T13:46:44.362932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:44.362953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:44.362963Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T13:46:44.363734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:44.363745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:44.363751Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T13:46:44.364082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:44.364329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:44.372841Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T13:46:44.373105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:44.373587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:44.373597Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2025-04-07T13:46:44.373953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:46:44.373965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:46:44.373973Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2025-04-07T13:46:44.373995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-04-07T13:46:44.398010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567292459844190:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:46:44.460972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:46:44.461342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:46:44.479478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:46:46.696015Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr881rh68fz1fezh15wk8gxp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQzZTQxZTItZTU1ZWU1MmEtNzQxZTkzYmEtMWU4NTEyZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:46:48.801489Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr881x2cf6xdt2kv59sbh7f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1YzQwMjQtMjk1YzlkNTEtMjVjMDAzOTYtODFmNWQ5NTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] >> YdbSdkSessions::TestSessionPool [GOOD] >> TMiniKQLProtoTest::TestExportEmptyList [GOOD] >> TMiniKQLProtoTest::TestExportUuidType >> TMiniKQLProtoTest::TestExportOptionalOptionalType [GOOD] >> TMiniKQLProtoTest::TestExportNull |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TMiniKQLProtoTest::TestExportUuidType [GOOD] >> TMiniKQLProtoTest::TestExportVariantTupleType >> TMiniKQLProtoTest::TestExportVariantTupleType [GOOD] >> TMiniKQLProtoTest::TestExportVariantStructType >> TMiniKQLProtoTest::TestExportNull [GOOD] >> TMiniKQLProtoTest::TestExportOptional >> TMiniKQLProtoTest::TestExportOptional [GOOD] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportEmptyList [GOOD] >> TMiniKQLProtoTest::TestExportVariantStructType [GOOD] >> TMiniKQLProtoTest::TestExportUuid ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:46:45.425774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:46:45.425887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:46:45.425925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:46:45.425956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:46:45.425996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:46:45.426022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:46:45.426078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:46:45.426404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:46:46.036993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:46:46.037294Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:46.244031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:46:46.244365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:46:46.250105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:46:46.318592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:46:46.320065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:46:46.325990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:46:46.327161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:46:46.364264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:46:46.383120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:46:46.383460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:46:46.384319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:46:46.384588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:46:46.385221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:46:46.387035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:46:46.455360Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:46:47.643962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:46:47.646491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:47.647925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:46:47.652021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:46:47.652084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:47.674311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:46:47.676145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:46:47.677997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:47.678380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:46:47.678877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:46:47.679392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:46:47.704258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:47.704551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:46:47.704899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:46:47.738728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:47.738783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:47.739269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:46:47.740102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:46:47.770579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:46:47.801445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:46:47.801636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:46:47.802470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:46:47.802589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:46:47.802629Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:46:47.802836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:46:47.802884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:46:47.803018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:46:47.803091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:46:47.814399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:46:47.814923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:46:47.816243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:46:47.816503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:46:47.819292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:47.819550Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:46:47.820833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:46:47.821313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:46:47.822061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:46:47.822582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:46:47.822839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:46:47.823367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:46:47.823987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:46:47.824017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:46:47.824048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:46:47.887187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:46:47.888009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:46:47.888272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:46:47.889176Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:46:47.889422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:46:47.890998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 5Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T13:47:00.578786Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:437:2401], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T13:47:00.580091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-07T13:47:00.580395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-04-07T13:47:00.580737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-07T13:47:00.580952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-07T13:47:00.580977Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-07T13:47:00.582025Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:00.582130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 8589936745 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:00.582173Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-07T13:47:00.582580Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-07T13:47:00.611142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-07T13:47:00.611180Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-07T13:47:00.611236Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-07T13:47:00.611256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-07T13:47:00.611542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-07T13:47:00.611587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:123:2149] message: TxId: 281474976710760 2025-04-07T13:47:00.611893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-07T13:47:00.611915Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-07T13:47:00.612201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-07T13:47:00.612253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-07T13:47:00.624075Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-07T13:47:00.624123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-07T13:47:00.624162Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-07T13:47:00.625094Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:437:2401], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T13:47:00.639485Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T13:47:00.639549Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:437:2401], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T13:47:00.639877Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-07T13:47:00.654959Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T13:47:00.655017Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:437:2401], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T13:47:00.655291Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-07T13:47:00.655878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:47:00.656441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:613:2566] TestWaitNotification: OK eventTxId 102 2025-04-07T13:47:00.685818Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:47:00.687303Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 1.54ms result status StatusSuccess 2025-04-07T13:47:00.702716Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSessionPool [GOOD] Test command err: 2025-04-07T13:46:23.178491Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567202633443210:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:23.178572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ba6/r3tmp/tmpaWPgmX/pdisk_1.dat 2025-04-07T13:46:25.389994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:46:26.459092Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:26.881177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:46:26.985175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:46:27.005120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:46:27.201633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:46:27.728504Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.167701s 2025-04-07T13:46:27.733888Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.168005s TServer::EnableGrpc on GrpcPort 20576, node 1 2025-04-07T13:46:29.974260Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567202633443210:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:29.974833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:46:30.070327Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:46:30.070343Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:46:30.070348Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:46:30.070427Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:46:33.660821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:33.666121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:33.667351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:33.670882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:46:33.671049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:46:33.671070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:46:33.673554Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:33.673579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:46:33.675178Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:33.677621Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:46:33.678976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033593722, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:33.679022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:46:33.679249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:46:33.683566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:33.683727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:33.683771Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:46:33.683865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:46:33.683899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:46:33.683960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:46:33.685995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:46:33.686044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:46:33.686061Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:46:33.686137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:46:41.438831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:46:41.438858Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:42.636404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567284237823020:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:42.636514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:42.640880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567284237823032:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:42.647378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:46:42.647542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:42.647557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-07T13:46:42.647602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:42.647613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:46:42.647653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:42.647686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710658:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:46:42.647926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 1 -> 128 2025-04-07T13:46:42.648116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:42.648129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:46:42.669552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:46:42.669836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:42.670342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:42.670409Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, ProgressState 2025-04-07T13:46:42.670609Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:42.670687Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:42.670752Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:42.673822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:46:42.673880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:46:42.673899Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:46:42.674191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:46:42.674210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:46:42.674223Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:46:42.674341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:46:42.674354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:46:42.674363Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T13:46:42.674464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:46:42.674478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:46:42.674486Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T13:46:42.674582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:46:42.674596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:46:42.674605Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2025-04-07T13:46:42.675830Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:46:42.689719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033602731, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:42.689793Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033602731, at schemeshard: 72057594046644480 2025-04-07T13:46:42.689935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-07T13:46:42.690057Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033602731, at schemeshard: 72057594046644480 2025-04-07T13:46:42.690099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 128 -> 240 2025-04-07T13:46:42.690151Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033602731, at schemeshard: 72057594046644480 2025-04-07T13:46:42.690189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:2 128 -> 240 2025-04-07T13:46:42.690245Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, HandleReply TEvOperationPlan: step# 1744033602731 2025-04-07T13:46:42.690291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 128 -> 240 2025-04-07T13:46:42.693574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:42.694039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:42.694105Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:3 ProgressState 2025-04-07T13:46:42.694161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:3 progress is 1/4 2025-04-07T13:46:42.694382Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:1 ProgressState 2025-04-07T13:46:42.694419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:1 progress is 2/4 2025-04-07T13:46:42.694530Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:2 ProgressState 2025-04-07T13:46:42.694560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:2 progress is 3/4 2025-04-07T13:46:42.694662Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2025-04-07T13:46:42.694702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 4/4 2025-04-07T13:46:42.694723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T13:46:42.694754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:1 2025-04-07T13:46:42.694767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:2 2025-04-07T13:46:42.694778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:3 2025-04-07T13:46:42.694793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 5, subscribers: 1 2025-04-07T13:46:42.699378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:46:42.699423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:46:42.699435Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:46:42.699625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:46:42.699641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:46:42.699649Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2025-04-07T13:46:42.699735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:46:42.699748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:46:42.699756Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T13:46:42.699843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:46:42.699856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:46:42.699864Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T13:46:42.699948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:46:42.699960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:46:42.699967Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2025-04-07T13:46:42.699991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2025-04-07T13:46:42.708985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567284237823034:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T13:46:42.781204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710659:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:46:42.781380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:46:42.784943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin >> TMiniKQLProtoTest::TestExportUuid [GOOD] >> TMiniKQLProtoTest::TestExportVariant |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportOptional [GOOD] >> TMiniKQLProtoTest::TestExportVariant [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:46:49.139938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:46:49.149028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:46:49.149522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:46:49.149829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:46:49.150105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:46:49.150414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:46:49.150974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:46:49.163520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:46:49.843433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:46:49.843784Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:49.940995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:46:49.941607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:46:49.943212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:46:50.015346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:46:50.017027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:46:50.025006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:46:50.025232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:46:50.033279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:46:50.034439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:46:50.034500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:46:50.034615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:46:50.034662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:46:50.034822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:46:50.034961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:46:50.058081Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:46:51.188381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:46:51.188565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:51.188751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:46:51.192985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:46:51.193057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:51.199614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:46:51.199744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:46:51.199901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:51.199958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:46:51.199989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:46:51.200031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:46:51.205313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:51.205374Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:46:51.205405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:46:51.213348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:51.213399Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:51.213444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:46:51.213520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:46:51.217252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:46:51.222109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:46:51.223449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:46:51.237758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:46:51.238668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:46:51.238931Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:46:51.247024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:46:51.247333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:46:51.248945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:46:51.249022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:46:51.281877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:46:51.282408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:46:51.282806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:46:51.282845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:46:51.288043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:46:51.288845Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:46:51.290090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:46:51.290321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:46:51.290828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:46:51.291146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:46:51.291657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:46:51.292175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:46:51.292476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:46:51.298483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:46:51.298731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:46:51.345529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:46:51.346124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:46:51.346628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:46:51.347364Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:46:51.347600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:46:51.348839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 57594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 4738 } } 2025-04-07T13:47:02.912189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2025-04-07T13:47:02.916794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 4738 } } 2025-04-07T13:47:02.917121Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 4738 } } FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 FAKE_COORDINATOR: Erasing txId 103 2025-04-07T13:47:02.939197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 8589936893 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-07T13:47:02.939771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 2 2025-04-07T13:47:02.945008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:2, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 8589936893 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-07T13:47:02.945337Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T13:47:02.946498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 316 RawX2: 8589936893 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-07T13:47:02.947073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:02.947355Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-07T13:47:02.948174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T13:47:02.952917Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:2 129 -> 240 2025-04-07T13:47:02.974073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 8589936895 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-07T13:47:02.974394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2025-04-07T13:47:02.975329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 8589936895 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-07T13:47:02.975364Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T13:47:02.975409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 319 RawX2: 8589936895 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-07T13:47:02.976085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:02.976106Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:47:02.976126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T13:47:02.976149Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-07T13:47:03.011486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-07T13:47:03.019762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:47:03.026828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-07T13:47:03.027256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-07T13:47:03.027291Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:47:03.027326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-07T13:47:03.027410Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 2/3 2025-04-07T13:47:03.027436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-04-07T13:47:03.027466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2025-04-07T13:47:03.027635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:47:03.027917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:47:03.027963Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:47:03.027988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-07T13:47:03.028029Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 3/3 2025-04-07T13:47:03.028045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-04-07T13:47:03.028066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2025-04-07T13:47:03.028092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-04-07T13:47:03.028123Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T13:47:03.028148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T13:47:03.028234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-07T13:47:03.028262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:47:03.028286Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-07T13:47:03.028300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-07T13:47:03.028318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-07T13:47:03.028387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:47:03.028415Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-04-07T13:47:03.028430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-04-07T13:47:03.028464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-07T13:47:03.028480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T13:47:03.032895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:47:03.032940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-07T13:47:03.033018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T13:47:03.033060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T13:47:03.033087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T13:47:03.033106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:47:03.033140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:47:03.042663Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T13:47:03.043701Z node 2 :TX_PROXY DEBUG: actor# [2:266:2258] Handle TEvGetProxyServicesRequest TestWaitNotification wait txId: 103 2025-04-07T13:47:03.106965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T13:47:03.107017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T13:47:03.107460Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T13:47:03.107542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:47:03.107579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:662:2550] TestWaitNotification: OK eventTxId 103 |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportVariant [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] >> TColumnShardTestSchema::ExportAfterFail [GOOD] >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit >> TMiniKQLProtoTest::TestCanExport >> TMiniKQLProtoTest::TestCanExport [GOOD] >> TMiniKQLProtoTest::TestExportDataType >> TMiniKQLProtoTest::TestExportPgType >> TMiniKQLProtoTest::TestExportDataType [GOOD] >> TMiniKQLProtoTest::TestExportBool >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] >> TMiniKQLProtoTest::TestExportDecimalType [GOOD] >> TMiniKQLProtoTest::TestExportDictType >> TMiniKQLProtoTest::TestExportListType >> TMiniKQLProtoTest::TestExportPgType [GOOD] >> TMiniKQLProtoTest::TestExportOptionalType2 >> TMiniKQLProtoTest::TestExportDictType [GOOD] >> TMiniKQLProtoTest::TestExportDouble >> TMiniKQLProtoTest::TestExportBool [GOOD] >> TMiniKQLProtoTest::TestExportDecimal [GOOD] >> TMiniKQLProtoTest::TestExportDecimalHuge >> TMiniKQLProtoTest::TestExportListType [GOOD] >> TMiniKQLProtoTest::TestExportEmptyTupleType >> TMiniKQLProtoTest::TestExportDecimalHuge [GOOD] >> TMiniKQLProtoTest::TestExportOptionalType2 [GOOD] >> TMiniKQLProtoTest::TestExportString >> TMiniKQLProtoTest::TestExportDouble [GOOD] >> TMiniKQLProtoTest::TestExportEmptyDict >> TMiniKQLProtoTest::TestExportVoidType >> TMiniKQLProtoTest::TestExportEmptyTupleType [GOOD] >> TMiniKQLProtoTest::TestExportEmptyStructType >> TMiniKQLProtoTest::TestExportString [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportAfterFail [GOOD] >> TMiniKQLProtoTest::TestExportEmptyDict [GOOD] >> TMiniKQLProtoTest::TestExportStruct >> TMiniKQLProtoTest::TestExportVoidType [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034159.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034159.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032959.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-07T13:46:11.805927Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:11.892738Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:11.950674Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:11.951071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:11.953083Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:12.053491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:12.055090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:12.056687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:12.057004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:12.057313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:12.057901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:12.058407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:12.058735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:12.059264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:12.059383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:12.059475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:12.059973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:12.113039Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:12.113670Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:12.128397Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:12.129053Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:12.129623Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:12.131108Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:12.132327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:12.132794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:12.133001Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:12.133716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:12.134217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:12.134248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:12.134684Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:12.135479Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:12.135932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:12.135961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:12.135981Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:12.136447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:12.136689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:12.136929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:12.136952Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:12.137198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:12.137224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:12.137242Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:12.137273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:12.137553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:12.137576Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:12.139147Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=638; 2025-04-07T13:46:12.139969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=610; 2025-04-07T13:46:12.140763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=357; 2025-04-07T13:46:12.141616Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=510; 2025-04-07T13:46:12.142258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:12.142455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:12.142480Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:12.143843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:12.143874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:12.143897Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:12.145103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... SHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=26668; 2025-04-07T13:47:09.584362Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213344;num_rows=26668;batch_columns=timestamp; 2025-04-07T13:47:09.584515Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:667:2672] send ScanData to [1:666:2671] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213344 rows: 26668 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] Got TEvKqpCompute::TEvScanData [1:667:2672]->[1:666:2671] 2025-04-07T13:47:09.584604Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:09.584710Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:09.584740Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:47:09.584770Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:47:09.584860Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:09.584934Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:09.584960Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:47:09.585024Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=26664; 2025-04-07T13:47:09.585056Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213312;num_rows=26664;batch_columns=timestamp; 2025-04-07T13:47:09.585203Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:667:2672] send ScanData to [1:666:2671] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213312 rows: 26664 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] Got TEvKqpCompute::TEvScanData [1:667:2672]->[1:666:2671] 2025-04-07T13:47:09.585284Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:09.585363Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:09.585436Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:09.585522Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:09.585585Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:09.585700Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:09.585734Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:667:2672] finished for tablet 9437184 2025-04-07T13:47:09.585798Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:667:2672] send ScanData to [1:666:2671] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:09.586165Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:667:2672] and sent to [1:666:2671] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.008},{"events":["f_processing","f_task_result"],"t":0.009},{"events":["f_ack","l_task_result"],"t":0.453},{"events":["l_ProduceResults","f_Finish"],"t":0.457},{"events":["l_ack","l_processing","l_Finish"],"t":0.458}],"full":{"a":1744033629127748,"name":"_full_task","f":1744033629127748,"d_finished":0,"c":0,"l":1744033629585832,"d":458084},"events":[{"name":"bootstrap","f":1744033629127890,"d_finished":7929,"c":1,"l":1744033629135819,"d":7929},{"a":1744033629585509,"name":"ack","f":1744033629581021,"d_finished":4093,"c":6,"l":1744033629585456,"d":4416},{"a":1744033629585502,"name":"processing","f":1744033629136791,"d_finished":352200,"c":48,"l":1744033629585458,"d":352530},{"name":"ProduceResults","f":1744033629130868,"d_finished":10376,"c":56,"l":1744033629585723,"d":10376},{"a":1744033629585725,"name":"Finish","f":1744033629585725,"d_finished":0,"c":0,"l":1744033629585832,"d":107},{"name":"task_result","f":1744033629136809,"d_finished":347523,"c":42,"l":1744033629580893,"d":347523}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:667:2672]->[1:666:2671] 2025-04-07T13:47:09.586236Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:09.127402Z;index_granules=0;index_portions=6;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9495312;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9495312;selected_rows=0; 2025-04-07T13:47:09.586265Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:09.586392Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.096916s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.107802s;size=0.004421752;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::LAST_PK;duration=0.033992s;size=0.00638667;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};{name=SNAPSHOT;duration=0.011939s;size=0;details={};};]};; 2025-04-07T13:47:09.586456Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:667:2672];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 160000/9495312 160000/9495312 |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> TMiniKQLProtoTest::TestExportVoid >> TMiniKQLProtoTest::TestExportDict >> TMiniKQLProtoTest::TestExportDecimalNegative >> TMiniKQLProtoTest::TestExportEmptyStructType [GOOD] >> TMiniKQLProtoTest::TestExportIntegral >> TMiniKQLProtoTest::TestExportStruct [GOOD] >> TMiniKQLProtoTest::TestExportStructColumnOrderAffectsTopLevelOnly >> TMiniKQLProtoTest::TestExportDict [GOOD] >> TMiniKQLProtoTest::TestExportVoid [GOOD] >> TMiniKQLProtoTest::TestImportDecimal >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |83.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |83.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> TSchemeShardUserAttrsTest::VariousUse >> TMiniKQLProtoTest::TestExportDecimalNegative [GOOD] >> TMiniKQLProtoTest::TestExportDecimalMax64bit >> TMiniKQLProtoTest::TestExportTupleType >> TMiniKQLProtoTest::TestExportStructColumnOrderAffectsTopLevelOnly [GOOD] >> TMiniKQLProtoTest::TestImportDecimal [GOOD] >> TMiniKQLProtoTest::TestImportVariant >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] >> TMiniKQLProtoTest::TestExportDecimalMax64bit [GOOD] >> TMiniKQLProtoTest::TestExportDecimalHugePlusOne [GOOD] >> TMiniKQLProtoTest::TestExportDecimalNan >> TMiniKQLProtoTest::TestExportTupleType [GOOD] >> TMiniKQLProtoTest::TestExportStructType [GOOD] >> TMiniKQLProtoTest::TestExportTuple >> TMiniKQLProtoTest::TestExportIntegral [GOOD] >> TMiniKQLProtoTest::TestExportList >> TMiniKQLProtoTest::TestImportVariant [GOOD] >> TMiniKQLProtoTest::TestImportUuid |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportDecimalHuge [GOOD] >> TMiniKQLProtoTest::TestExportDecimalNan [GOOD] >> TMiniKQLProtoTest::TestExportDecimalMunusInf >> TMiniKQLProtoTest::TestExportTuple [GOOD] >> TMiniKQLProtoTest::TestExportStructEmptyColumnOrder >> TMiniKQLProtoTest::TestImportUuid [GOOD] >> TMiniKQLProtoTest::TestExportList [GOOD] >> TMiniKQLProtoTest::TestExportDecimalMunusInf [GOOD] >> TMiniKQLProtoTest::TestExportStructEmptyColumnOrder [GOOD] >> TMiniKQLProtoTest::TestExportStructWithColumnOrder >> TMiniKQLProtoTest::TestExportStructWithColumnOrder [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportDict [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportStructColumnOrderAffectsTopLevelOnly [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestImportUuid [GOOD] |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |83.7%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportList [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportDecimalMunusInf [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportStructWithColumnOrder [GOOD] >> KqpLocks::Invalidate [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken |83.7%| [TA] $(B)/ydb/library/mkql_proto/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardUserAttrsTest::VariousUse [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:47:12.528046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:47:12.528970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:47:12.529500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:47:12.529861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:47:12.530151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:47:12.530417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:47:12.530701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:47:12.534211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:47:13.303627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:47:13.305496Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:13.361397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:47:13.361599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:47:13.361841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:47:13.374410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:47:13.374609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:47:13.375077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:13.375273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:47:13.376925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:13.378183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:13.378237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:13.378384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:47:13.378429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:13.378469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:47:13.378652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:47:13.419130Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:47:14.204844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:47:14.207499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:14.208548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:47:14.222920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:47:14.223600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:14.253588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:14.256207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:47:14.258274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:14.264901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:47:14.265179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:47:14.265750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:47:14.296071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:14.296946Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:47:14.297288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:47:14.318660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:14.318954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:14.319263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:14.319830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:47:14.368318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:47:14.394223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:47:14.395808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:47:14.417269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:14.418835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:14.419662Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:14.425592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:47:14.426469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:14.428325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:14.429131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:47:14.461821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:14.462435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:14.463533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:14.463804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:47:14.465266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:14.465520Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:47:14.467479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:47:14.467786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:47:14.468761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:47:14.469060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:47:14.469352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:47:14.469634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:47:14.470236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:47:14.471179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:47:14.471442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:47:14.489839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:47:14.490277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:47:14.490306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:47:14.490339Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:47:14.491522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:14.492730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 78944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-07T13:47:16.498271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T13:47:16.498643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-07T13:47:16.513617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2025-04-07T13:47:16.514957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000010 2025-04-07T13:47:16.521372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:16.529150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:16.529838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 112:0, step: 5000010, at schemeshard: 72057594046678944 2025-04-07T13:47:16.530585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 112:0, at schemeshard: 72057594046678944 2025-04-07T13:47:16.531915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-07T13:47:16.532252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-07T13:47:16.533504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T13:47:16.534597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T13:47:16.534650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2025-04-07T13:47:16.534981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-07T13:47:16.535631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-04-07T13:47:16.535937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-04-07T13:47:16.540842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T13:47:16.541503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2025-04-07T13:47:16.542138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 3], 7 2025-04-07T13:47:16.542454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-07T13:47:16.558797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-07T13:47:16.559457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-07T13:47:16.576843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:16.577134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:47:16.577509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T13:47:16.578984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:16.579299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 112, path id: 3 2025-04-07T13:47:16.579328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 112, path id: 4 FAKE_COORDINATOR: Erasing txId 112 2025-04-07T13:47:16.583475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:47:16.583542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:47:16.584152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-04-07T13:47:16.584755Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-07T13:47:16.585103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:47:16.589182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:47:16.590170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:47:16.590521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-04-07T13:47:16.590548Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-07T13:47:16.590876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T13:47:16.590971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-04-07T13:47:16.595451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:47:16.595507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-07T13:47:16.595579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T13:47:16.612876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-07T13:47:16.626777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-07T13:47:16.626904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-04-07T13:47:16.630741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-04-07T13:47:16.631084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-04-07T13:47:16.641603Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-04-07T13:47:16.642281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-04-07T13:47:16.642593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:482:2474] TestWaitNotification: OK eventTxId 112 2025-04-07T13:47:16.656647Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:47:16.659846Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 2.54ms result status StatusSuccess 2025-04-07T13:47:16.663007Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000008 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2025-04-07T13:47:16.684972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 2 PathVersion: 8 } ApplyIf { PathId: 3 PathVersion: 7 } ApplyIf { PathId: 4 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:47:16.686093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2025-04-07T13:47:16.686996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-07T13:47:16.702096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:16.703427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors >> TSchemeShardUserAttrsTest::SetAttrs >> TSchemeShardUserAttrsTest::MkDir >> TSchemeShardUserAttrsTest::UserConditionsAtAlter >> TSchemeShardUserAttrsTest::SpecialAttributes >> YdbSdkSessions::SessionsServerLimitWithSessionPool [GOOD] >> TSchemeShardUserAttrsTest::Boot >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] >> TDataShardTrace::TestTraceWriteImmediateOnShard >> TSchemeShardUserAttrsTest::MkDir [GOOD] >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] >> TSchemeShardUserAttrsTest::Boot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [GOOD] Test command err: 2025-04-07T13:46:23.234178Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567202896269137:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:23.234206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001c07/r3tmp/tmpYwdMbr/pdisk_1.dat 2025-04-07T13:46:23.795624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:46:23.795760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:46:23.807961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:46:23.909444Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21777, node 1 2025-04-07T13:46:24.424131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-07T13:46:24.425215Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:46:24.425448Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:46:24.453321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:46:25.037262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:46:25.040155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:25.091287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:46:25.140873Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:46:25.140889Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:46:25.141102Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:46:25.141174Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:46:25.333838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:25.349024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:25.351658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:25.352424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:46:25.353114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T13:46:28.237316Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567202896269137:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:28.237351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:11317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:46:29.200316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:29.209811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:29.211050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:46:29.217898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:46:29.218045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:46:29.218060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:46:29.220257Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:29.220276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:46:29.224757Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:46:29.225436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:29.233687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033589277, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:29.233718Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:46:29.233965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:46:29.237548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:29.237698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:29.237748Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:46:29.237850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:46:29.237892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:46:29.237936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:46:29.240615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:46:29.240678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:46:29.240693Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:46:29.240787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:46:38.900755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:46:38.900774Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:40.753293Z node 1 :KQP_PROXY WARN: TraceId: "01jr881n73e7bvvb5zeqcn8q9g", Active sessions limit exceeded, maximum allowed: 2
: Error: Active sessions limit exceeded, maximum allowed: 2 2025-04-07T13:46:41.069731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567280205681648:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:41.085114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:41.087434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567280205681660:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:41.091910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:46:41.092059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:41.092077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-07T13:46:41.092134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:41.092146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:46:41.092189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:41.092312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710658:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:46:41.092537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 1 -> 128 2025-04-07T13:46:41.100922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:41.100959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:46:41.137955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:46:41.166357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-0 ... r: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:47:15.019146Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:47:15.019156Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:47:15.020064Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:47:15.020081Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:47:15.020090Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T13:47:15.021216Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:47:15.021484Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:47:15.021494Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T13:47:15.022362Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:47:15.022380Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:47:15.022389Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2025-04-07T13:47:15.054565Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033635092, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:47:15.054601Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033635092, at schemeshard: 72057594046644480 2025-04-07T13:47:15.054723Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-07T13:47:15.054832Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033635092, at schemeshard: 72057594046644480 2025-04-07T13:47:15.054889Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 128 -> 240 2025-04-07T13:47:15.054941Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033635092, at schemeshard: 72057594046644480 2025-04-07T13:47:15.054980Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:2 128 -> 240 2025-04-07T13:47:15.055036Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, HandleReply TEvOperationPlan: step# 1744033635092 2025-04-07T13:47:15.055081Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 128 -> 240 2025-04-07T13:47:15.057116Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:47:15.057657Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:47:15.057721Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:3 ProgressState 2025-04-07T13:47:15.057783Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:3 progress is 1/4 2025-04-07T13:47:15.057923Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:1 ProgressState 2025-04-07T13:47:15.057957Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:1 progress is 2/4 2025-04-07T13:47:15.058033Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:2 ProgressState 2025-04-07T13:47:15.058062Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:2 progress is 3/4 2025-04-07T13:47:15.058134Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2025-04-07T13:47:15.058163Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 4/4 2025-04-07T13:47:15.058185Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T13:47:15.058210Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:1 2025-04-07T13:47:15.058224Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:2 2025-04-07T13:47:15.058235Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:3 2025-04-07T13:47:15.058253Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 5, subscribers: 1 2025-04-07T13:47:15.062699Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:47:15.062733Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:47:15.062747Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:47:15.062903Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:47:15.062922Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:47:15.062931Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2025-04-07T13:47:15.063034Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:47:15.063051Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:47:15.063060Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T13:47:15.063152Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:47:15.063165Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:47:15.063173Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T13:47:15.063560Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:47:15.064386Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:47:15.064397Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2025-04-07T13:47:15.064433Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2025-04-07T13:47:15.079678Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490567423462771228:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T13:47:15.172469Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710659:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:47:15.172593Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:47:15.186442Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:15.733555Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:47:15.733575Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:15.872227Z node 4 :KQP_PROXY WARN: TraceId: "01jr882qgy94x45fzgt0jckn2p", Active sessions limit exceeded, maximum allowed: 2 2025-04-07T13:47:16.189918Z node 4 :KQP_PROXY WARN: TraceId: "01jr882qtw20bcwn3702520wc5", Active sessions limit exceeded, maximum allowed: 2 2025-04-07T13:47:16.460417Z node 4 :KQP_PROXY WARN: TraceId: "01jr882r396rg3znhzyeq95caz", Active sessions limit exceeded, maximum allowed: 2 2025-04-07T13:47:16.730880Z node 4 :KQP_PROXY WARN: TraceId: "01jr882rbme8gmyrm8mhhnb0a0", Active sessions limit exceeded, maximum allowed: 2 2025-04-07T13:47:16.921638Z node 4 :KQP_PROXY WARN: TraceId: "01jr882rhp8qzxzzt34150vkzm", Active sessions limit exceeded, maximum allowed: 2 2025-04-07T13:47:17.182761Z node 4 :KQP_PROXY WARN: TraceId: "01jr882rsvdj4zwn0kwch479jp", Active sessions limit exceeded, maximum allowed: 2 2025-04-07T13:47:17.292950Z node 4 :KQP_PROXY WARN: TraceId: "01jr882rxa7x1mx2ay3as0h4nh", Active sessions limit exceeded, maximum allowed: 2 2025-04-07T13:47:17.541039Z node 4 :KQP_PROXY WARN: TraceId: "01jr882s53209568cf3rqrag8h", Active sessions limit exceeded, maximum allowed: 2 2025-04-07T13:47:17.833859Z node 4 :KQP_PROXY WARN: TraceId: "01jr882se9c28d06tgc1h1y1s0", Active sessions limit exceeded, maximum allowed: 2 2025-04-07T13:47:18.251912Z node 4 :KQP_PROXY WARN: TraceId: "01jr882sva8rgbdhd114cv5w3q", Active sessions limit exceeded, maximum allowed: 2 2025-04-07T13:47:18.677913Z node 4 :KQP_PROXY WARN: TraceId: "01jr882t752zxd6szzxgdpefs0", Active sessions limit exceeded, maximum allowed: 2 2025-04-07T13:47:19.129401Z node 4 :KQP_PROXY WARN: TraceId: "01jr882tpbepfmz8vjv31j90c9", Active sessions limit exceeded, maximum allowed: 2 >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> TDataShardTrace::TestTraceDistributedSelect >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:47:21.777019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:47:21.777120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:47:21.777163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:47:21.777200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:47:21.777246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:47:21.777271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:47:21.777318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:47:21.777602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:47:21.835418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:47:21.835473Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:21.843380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:47:21.843477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:47:21.843596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:47:21.848102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:47:21.848292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:47:21.848863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:21.849069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:47:21.851029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:21.852177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:21.852248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:21.852370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:47:21.852416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:21.852447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:47:21.852598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:47:21.859426Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:47:21.971221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:47:21.971483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:21.971714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:47:21.971935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:47:21.971992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:21.974776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:21.974912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:47:21.975138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:21.975215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:47:21.975249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:47:21.975280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:47:21.977495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:21.977554Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:47:21.977593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:47:21.979558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:21.979610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:21.979650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:21.979731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:47:21.983530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:47:21.985906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:47:21.986128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:47:21.987169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:21.987331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:21.987386Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:21.987657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:47:21.987713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:21.987902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:21.988000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:47:21.990251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:21.990316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:21.990507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:21.990548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:47:21.990799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:21.990846Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:47:21.990938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:47:21.990970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:47:21.991029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:47:21.991078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:47:21.991111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:47:21.991140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:47:21.991206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:47:21.991269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:47:21.991302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:47:21.993260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:47:21.993369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:47:21.993398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:47:21.993433Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:47:21.993479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:21.993580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... T_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:315:2307] 2025-04-07T13:47:22.061119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:47:22.061153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:315:2307] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-07T13:47:22.061566Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:47:22.061766Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 211us result status StatusSuccess 2025-04-07T13:47:22.062106Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-07T13:47:22.064462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "MyRoot" UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:47:22.064588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.064686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:22.064815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:47:22.064857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.066989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.067106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: MyRoot 2025-04-07T13:47:22.067279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.067317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.067386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-07T13:47:22.067501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:47:22.069216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-07T13:47:22.069349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-07T13:47:22.069681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.069795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.069837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-04-07T13:47:22.070014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T13:47:22.070065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:47:22.070146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:47:22.070222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-07T13:47:22.070271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:47:22.070301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:47:22.070346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T13:47:22.070376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T13:47:22.070423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T13:47:22.070457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-07T13:47:22.070487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-04-07T13:47:22.072481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:22.072531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:22.072755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.072803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 1 FAKE_COORDINATOR: Erasing txId 103 2025-04-07T13:47:22.073291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:47:22.073383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:47:22.074161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:47:22.074208Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-07T13:47:22.074274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:47:22.074366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-07T13:47:22.076160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T13:47:22.076426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T13:47:22.076479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T13:47:22.076888Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T13:47:22.076995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:47:22.077031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:334:2326] TestWaitNotification: OK eventTxId 103 2025-04-07T13:47:22.077517Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:47:22.077756Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 230us result status StatusSuccess 2025-04-07T13:47:22.078129Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:47:21.848960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:47:21.849058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:47:21.849094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:47:21.849125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:47:21.849175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:47:21.849204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:47:21.849253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:47:21.849543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:47:21.911350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:47:21.911405Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:21.921781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:47:21.921926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:47:21.922089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:47:21.927328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:47:21.927526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:47:21.928139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:21.928347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:47:21.930252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:21.931433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:21.931486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:21.931600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:47:21.931659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:21.931712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:47:21.931878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:47:21.939442Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:47:22.060402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:47:22.060640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.060870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:47:22.061075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:47:22.061136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.063360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.063487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:47:22.063645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.063704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:47:22.063737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:47:22.063771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:47:22.071088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.071176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:47:22.071215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:47:22.074084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.074147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.074199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.074275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.077907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:47:22.080029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:47:22.080235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:47:22.081249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.081410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.081470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.081747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:47:22.081817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.081993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:22.082070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:47:22.084772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:22.084847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:22.085001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.085030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:47:22.085229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.085263Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:47:22.085340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:47:22.085365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.085411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:47:22.085443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.085470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:47:22.085492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:47:22.085539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:47:22.085582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:47:22.085607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:47:22.087157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:47:22.087245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:47:22.087279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:47:22.087315Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:47:22.087355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:22.087430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... riber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-04-07T13:47:22.217230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T13:47:22.217249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T13:47:22.217796Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-07T13:47:22.217910Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T13:47:22.217970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T13:47:22.218019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:371:2363] 2025-04-07T13:47:22.218184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T13:47:22.218206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:371:2363] 2025-04-07T13:47:22.218252Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T13:47:22.218381Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T13:47:22.218437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:47:22.218455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:371:2363] 2025-04-07T13:47:22.218559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:47:22.218582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:371:2363] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-07T13:47:22.219066Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:47:22.219280Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 198us result status StatusSuccess 2025-04-07T13:47:22.219720Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.220286Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:47:22.220423Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 150us result status StatusSuccess 2025-04-07T13:47:22.220813Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.221303Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:47:22.221455Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 156us result status StatusSuccess 2025-04-07T13:47:22.221741Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.222245Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:47:22.222497Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 133us result status StatusSuccess 2025-04-07T13:47:22.222777Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.223327Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:47:22.223486Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 171us result status StatusSuccess 2025-04-07T13:47:22.223764Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034163.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034163.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032963.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-07T13:46:22.070003Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:22.227174Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:22.271453Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:22.271525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:22.271751Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:22.281537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:22.281765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:22.281994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:22.282105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:22.282212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:22.282314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:22.282409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:22.282522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:22.282637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:22.282767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.282878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:22.283011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:22.392197Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:22.392619Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:22.419204Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:22.419654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:22.420445Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:22.421331Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:22.422300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:22.422504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:22.422668Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:22.423110Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:22.423412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:22.423444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:22.423471Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:22.424579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:22.424823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:22.424855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:22.424880Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:22.425192Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:22.425480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:22.425725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:22.425750Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:22.426462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:22.426492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:22.426513Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:22.426547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:22.426578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:22.426836Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:22.427964Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=426; 2025-04-07T13:46:22.438304Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=9666; 2025-04-07T13:46:22.438936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=201; 2025-04-07T13:46:22.439690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=503; 2025-04-07T13:46:22.440852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:22.441094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:22.441335Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:22.442441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:22.442721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.442742Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.443655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description ... 47:21.935481Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=64; 2025-04-07T13:47:21.935584Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:47:21.935625Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=99; 2025-04-07T13:47:21.935744Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=84; 2025-04-07T13:47:21.935855Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=68; 2025-04-07T13:47:21.936068Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=169; 2025-04-07T13:47:21.936560Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=439; 2025-04-07T13:47:21.936630Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=24; 2025-04-07T13:47:21.936731Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=62; 2025-04-07T13:47:21.936770Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-07T13:47:21.936812Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-04-07T13:47:21.936852Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-07T13:47:21.936932Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=33; 2025-04-07T13:47:21.936984Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-07T13:47:21.937064Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=47; 2025-04-07T13:47:21.937102Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-07T13:47:21.937163Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2025-04-07T13:47:21.937208Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=15572; 2025-04-07T13:47:21.937331Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=6;blobs=6;rows=160000;bytes=9495312;raw_bytes=16084618; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9495312;raw_bytes=16084618; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:47:21.937435Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:47:21.937488Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=cold; 2025-04-07T13:47:21.937525Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T13:47:21.937594Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:47:21.937790Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:47:21.937824Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:47:21.937886Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:47:21.937924Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:21.937974Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:21.938036Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=2; 2025-04-07T13:47:21.938090Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2025-04-07T13:47:21.938137Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:21.938191Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:21.938230Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:21.938266Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:21.938341Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:21.938770Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:21.938912Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1160:3057];tablet_id=9437184;parent=[1:1123:3027];fline=manager.h:99;event=ask_data;request=request_id=85;1={portions_count=12};; 2025-04-07T13:47:21.939627Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:47:21.939989Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:47:21.940021Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:47:21.940046Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:47:21.940089Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:21.940143Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:21.940199Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=2; 2025-04-07T13:47:21.940253Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2025-04-07T13:47:21.940295Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:21.940337Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:21.940373Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:21.940410Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:21.940482Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:21.942162Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=12;path_id=1; 2025-04-07T13:47:21.942681Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=12;path_id=1; 2025-04-07T13:47:21.943799Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:47:21.943854Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1123:3027];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 160000/9495312 160000/9495312 |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:47:22.030685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:47:22.030769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:47:22.030797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:47:22.030940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:47:22.030978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:47:22.031001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:47:22.031045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:47:22.031287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:47:22.088720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:47:22.088779Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:22.101048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:47:22.101184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:47:22.101337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:47:22.116246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:47:22.116460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:47:22.117109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.117292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:47:22.119236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.120439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:22.120512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.120683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:47:22.120732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:22.120778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:47:22.120940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.129025Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:47:22.250183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:47:22.250406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.250595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:47:22.250798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:47:22.250855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.257131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.257267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:47:22.257454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.257518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:47:22.257571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:47:22.257616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:47:22.260501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.260565Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:47:22.260602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:47:22.262388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.262437Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.262474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.262546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.265901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:47:22.267588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:47:22.267748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:47:22.268653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.268806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.268854Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.269113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:47:22.269159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.269317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:22.269396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:47:22.271252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:22.271304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:22.271447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.271483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:47:22.271685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.271727Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:47:22.271810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:47:22.271848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.271907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:47:22.271942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.271985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:47:22.272014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:47:22.272076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:47:22.272122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:47:22.272154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:47:22.273645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:47:22.273723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:47:22.273748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:47:22.273776Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:47:22.273807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:22.273876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... hard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:47:22.303451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.303488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-07T13:47:22.303542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T13:47:22.303713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.303755Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:47:22.303821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-07T13:47:22.303926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:47:22.304594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:47:22.304686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:47:22.304828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:47:22.304863Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-07T13:47:22.304896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:47:22.305563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:47:22.305625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:47:22.305669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:47:22.305701Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T13:47:22.305729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:47:22.305781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T13:47:22.307801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T13:47:22.307911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 2025-04-07T13:47:22.308216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.308317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.308365Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002, at schemeshard: 72057594046678944 2025-04-07T13:47:22.308535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-07T13:47:22.308710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:22.308767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T13:47:22.309535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:47:22.309671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T13:47:22.312641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:22.312725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:22.312866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:47:22.312962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.313030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-07T13:47:22.313068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T13:47:22.313243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.313286Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T13:47:22.313379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T13:47:22.313414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:47:22.313454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-07T13:47:22.313535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:47:22.313572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T13:47:22.313598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T13:47:22.313681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:47:22.313725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-07T13:47:22.313765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T13:47:22.313807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-07T13:47:22.314432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:47:22.314511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:47:22.314550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:47:22.314591Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T13:47:22.314628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:47:22.315296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:47:22.315359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:47:22.315391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:47:22.315420Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T13:47:22.315446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T13:47:22.315508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-07T13:47:22.322453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:47:22.322821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-04-07T13:47:22.325234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "DirD" } AlterUserAttributes { UserAttributes { Key: "__extra_path_symbols_allowed" Value: "./_" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:47:22.325464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/DirD, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.325560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, at schemeshard: 72057594046678944 2025-04-07T13:47:22.327356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "UserAttributes: attribute \'__extra_path_symbols_allowed\' has invalid value \'./_\', forbidden symbols are found" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.327531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, operation: CREATE DIRECTORY, path: /MyRoot/DirD TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:47:22.098179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:47:22.098291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:47:22.098332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:47:22.098365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:47:22.098407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:47:22.098434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:47:22.098506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:47:22.098833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:47:22.168086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:47:22.168152Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:22.178189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:47:22.178327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:47:22.178492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:47:22.183691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:47:22.183854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:47:22.184370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.184543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:47:22.187926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.189347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:22.189416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.189610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:47:22.189684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:22.189742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:47:22.189929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.196627Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:47:22.313821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:47:22.314035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.314219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:47:22.314408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:47:22.314461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.316713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.316831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:47:22.317017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.317076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:47:22.317119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:47:22.317161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:47:22.318788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.318842Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:47:22.318871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:47:22.321112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.321157Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.321191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.321255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.324601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:47:22.326319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:47:22.326465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:47:22.327336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.327461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.327516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.327743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:47:22.327794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.327957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:22.328033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:47:22.329721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:22.329775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:22.329927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.329960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:47:22.330145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.330180Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:47:22.330259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:47:22.330286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.330335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:47:22.330370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.330405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:47:22.330435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:47:22.330484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:47:22.330526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:47:22.330556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:47:22.332062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:47:22.332163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:47:22.332192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:47:22.332229Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:47:22.332267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:22.332349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... t TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T13:47:22.389456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T13:47:22.389491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T13:47:22.389889Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T13:47:22.389960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:47:22.389990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:325:2317] TestWaitNotification: OK eventTxId 102 2025-04-07T13:47:22.390423Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:47:22.390555Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 146us result status StatusSuccess 2025-04-07T13:47:22.390802Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-07T13:47:22.393181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "DirA" UserAttributes { Key: "AttrA2" Value: "ValA2" } } ApplyIf { PathId: 2 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:47:22.393351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.393453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-07T13:47:22.393558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:47:22.393596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.395340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.395480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2025-04-07T13:47:22.395634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.395669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.395719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-07T13:47:22.395814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:47:22.397312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-07T13:47:22.397432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-07T13:47:22.397701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.397793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.397843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-04-07T13:47:22.398009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T13:47:22.398047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:47:22.398099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T13:47:22.398153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-07T13:47:22.398255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:47:22.398292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:47:22.398323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T13:47:22.398361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T13:47:22.398504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:47:22.398541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-07T13:47:22.398575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-07T13:47:22.401135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:22.401191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:47:22.401326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.401369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 2 FAKE_COORDINATOR: Erasing txId 103 2025-04-07T13:47:22.401813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:47:22.401895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:47:22.401926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:47:22.401961Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-07T13:47:22.401993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T13:47:22.402070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-07T13:47:22.407411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T13:47:22.407686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T13:47:22.407721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T13:47:22.408080Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T13:47:22.408164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:47:22.408197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:342:2334] TestWaitNotification: OK eventTxId 103 2025-04-07T13:47:22.408727Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:47:22.408893Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 204us result status StatusSuccess 2025-04-07T13:47:22.409182Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:47:22.289759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:47:22.289874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:47:22.289917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:47:22.289950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:47:22.289992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:47:22.290018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:47:22.290067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:47:22.290422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:47:22.356505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:47:22.356551Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:22.376977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:47:22.377080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:47:22.377209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:47:22.382576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:47:22.382731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:47:22.383251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.383420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:47:22.384918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.385870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:22.385918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.386017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:47:22.386062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:22.386102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:47:22.386238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.395778Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:47:22.490752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:47:22.490937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.491087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:47:22.491209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:47:22.491240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.492983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.493088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:47:22.493235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.493287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:47:22.493318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:47:22.493348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:47:22.495097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.495146Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:47:22.495174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:47:22.496461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.496487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.496511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.496558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.503544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:47:22.505020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:47:22.505206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:47:22.505858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.505962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.505996Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.506167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:47:22.506198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.506310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:22.506362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:47:22.507899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:22.507951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:22.508111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.508149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:47:22.508364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.508409Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:47:22.508492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:47:22.508518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.508564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:47:22.508605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.508637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:47:22.508661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:47:22.508756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:47:22.508808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:47:22.508842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:47:22.510552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:47:22.510640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:47:22.510681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:47:22.510714Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:47:22.510757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:22.510851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:47:22.513217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:47:22.513577Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:47:22.371689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:47:22.371788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:47:22.371821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:47:22.371850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:47:22.371899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:47:22.371934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:47:22.371996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:47:22.372286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:47:22.425302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:47:22.425343Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:22.432332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:47:22.432424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:47:22.432527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:47:22.437756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:47:22.437887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:47:22.438250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.438438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:47:22.440584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.441673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:22.441730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.441852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:47:22.441888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:22.441919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:47:22.442081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.448048Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:47:22.548637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:47:22.548878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.549056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:47:22.549242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:47:22.549291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.551414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.551527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:47:22.551685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.551739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:47:22.551781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:47:22.551813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:47:22.553557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.553606Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:47:22.553658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:47:22.555183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.555220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.555254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.555316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.558529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:47:22.560050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:47:22.560196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:47:22.561040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.561156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.561201Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.561416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:47:22.561458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:47:22.561603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:22.561699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:47:22.563436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:22.563480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:22.563626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.563660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:47:22.563926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.563959Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:47:22.564040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:47:22.564067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.564110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:47:22.564149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:47:22.564187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:47:22.564212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:47:22.564260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:47:22.564303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:47:22.564337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:47:22.566091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:47:22.566273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:47:22.566306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:47:22.566336Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:47:22.566377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:47:22.566461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... lPathId: 4] was 2 2025-04-07T13:47:22.677515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-07T13:47:22.678746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2025-04-07T13:47:22.678928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000005 2025-04-07T13:47:22.679902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:47:22.680006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:47:22.680045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 105:0, step: 5000005, at schemeshard: 72057594046678944 2025-04-07T13:47:22.680141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:47:22.680186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-07T13:47:22.680221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T13:47:22.680274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T13:47:22.680318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T13:47:22.680341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-07T13:47:22.680379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T13:47:22.680412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-07T13:47:22.680449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-07T13:47:22.680486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T13:47:22.680513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-04-07T13:47:22.680536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-07T13:47:22.680576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-07T13:47:22.681380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:47:22.682911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:47:22.684157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:47:22.684186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:47:22.684313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T13:47:22.684496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:47:22.684528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-04-07T13:47:22.684570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 105, path id: 4 FAKE_COORDINATOR: Erasing txId 105 2025-04-07T13:47:22.685135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:47:22.685211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:47:22.685241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-07T13:47:22.685281Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-07T13:47:22.685322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-07T13:47:22.685732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:47:22.685800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:47:22.685823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-07T13:47:22.685848Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-07T13:47:22.685884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T13:47:22.685970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-07T13:47:22.686689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:47:22.686733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-07T13:47:22.686792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T13:47:22.688307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:47:22.689150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:47:22.689241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-07T13:47:22.689534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-07T13:47:22.689573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-07T13:47:22.690047Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-07T13:47:22.690118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T13:47:22.690171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:394:2386] TestWaitNotification: OK eventTxId 105 2025-04-07T13:47:22.690694Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:47:22.690888Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 183us result status StatusPathDoesNotExist 2025-04-07T13:47:22.691074Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T13:47:22.691598Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:47:22.691741Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 156us result status StatusSuccess 2025-04-07T13:47:22.692117Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |83.8%| [TA] {RESULT} $(B)/ydb/library/mkql_proto/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLocksTricky::TestNoLocksIssue-withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink |83.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |83.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |83.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} >> TDataShardTrace::TestTraceDistributedUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] Test command err: 2025-04-07T13:46:22.729319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567199139610630:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:22.729373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bac/r3tmp/tmp8IenNJ/pdisk_1.dat 2025-04-07T13:46:23.515600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:46:23.515776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:46:23.522194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:46:23.533555Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1590, node 1 2025-04-07T13:46:23.787918Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:46:23.787938Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:46:23.787944Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:46:23.788023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62084 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:46:27.468590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:27.481954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:27.483353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:27.498362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:46:27.511266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:46:27.511292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:46:27.529757Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:27.529785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:46:27.531543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:27.593100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033587597, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:27.593347Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:46:27.594820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:46:27.647197Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567199139610630:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:27.647239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:46:27.668269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:27.669449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:27.669502Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:46:27.697556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:46:27.697809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:46:27.698064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 waiting... 2025-04-07T13:46:27.793463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:46:27.793521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:46:27.793756Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:46:27.794228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2025-04-07T13:46:27.898486Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:46:38.537565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:46:38.537590Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:01.097340Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490567368289370709:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:01.097613Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bac/r3tmp/tmpH3d8Ml/pdisk_1.dat 2025-04-07T13:47:02.769875Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:03.985768Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:05.113190Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:06.014286Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:06.094622Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490567368289370709:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:06.094659Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:47:06.125207Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:06.203200Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:06.212862Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:06.242956Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30628, node 4 2025-04-07T13:47:08.104046Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:08.104063Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:08.104070Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:08.104157Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:09.391766Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:47:09.392180Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:47:09.392207Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:47:09.394492Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:47:09.394673Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:47:09.394687Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:47:09.396913Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 Progres ... t', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.767539Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715732, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.767662Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715735, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.767771Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715734, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.768936Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715742:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:47:21.769012Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715742:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:47:21.770175Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715743:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:47:21.770270Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715743:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:47:21.772469Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715736, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.772656Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715737, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.772895Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715745:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:47:21.772967Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715745:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:47:21.775469Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715744:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:47:21.775527Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715744:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:47:21.776223Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715746:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:47:21.776277Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715746:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:47:21.776590Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715747:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:47:21.776639Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715747:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:47:21.778947Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715738, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.779092Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715739, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.779202Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715740, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.779318Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715741, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.780355Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715742, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.780544Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715743, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.780871Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715745, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.781946Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715744, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.782175Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715746, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.782377Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715747, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] Test command err: 2025-04-07T13:46:21.859926Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567197070231563:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:21.860010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bd2/r3tmp/tmpu91xH2/pdisk_1.dat 2025-04-07T13:46:23.020857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:46:23.314615Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:23.318695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:46:23.318815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:46:23.333653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29768, node 1 2025-04-07T13:46:23.607591Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:46:23.607613Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:46:23.607619Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:46:23.607687Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1278 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:46:24.387518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:24.409452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:24.456773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:24.484229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:46:24.492849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:46:24.492874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:46:24.556286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:24.556529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:46:24.608304Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:24.775558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033584748, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:24.775587Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:46:24.788374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:46:24.837970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:24.838587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:24.838846Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:46:24.839586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:46:24.839613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:46:24.839876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2025-04-07T13:46:25.002397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:46:25.002451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:46:25.002700Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:46:25.003429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2025-04-07T13:46:25.083313Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:46:26.872940Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567197070231563:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:26.872992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:46:36.900138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567261494742249:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:36.900650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:36.900950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567261494742261:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:46:36.903190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:46:36.903322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:36.903340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2025-04-07T13:46:36.903388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:36.903398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-07T13:46:36.903440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:36.903489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:46:36.903675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 1 -> 128 2025-04-07T13:46:36.903847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:36.903860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:46:36.926733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:46:36.966106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:36.968461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:36.968531Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2025-04-07T13:46:36.971922Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:36.974129Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:36.974200Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:36.974278Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T13:46:37.015047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:46:37.015092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:46:37.015108Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:46:37.016120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:46:37.016133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2 ... lt, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:47:21.708260Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:47:21.708790Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:47:21.708871Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:47:21.709063Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:47:21.709147Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710659:3, ProgressState 2025-04-07T13:47:21.709207Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:47:21.712418Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:47:21.713680Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:47:21.713714Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T13:47:21.714008Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:47:21.714047Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:47:21.714058Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T13:47:21.714209Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:47:21.714224Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:47:21.714234Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T13:47:21.714339Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:47:21.714356Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:47:21.714365Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2025-04-07T13:47:21.714447Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:47:21.714462Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:47:21.714472Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2025-04-07T13:47:21.714620Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:47:21.719976Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033641763, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:47:21.720018Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033641763, at schemeshard: 72057594046644480 2025-04-07T13:47:21.720135Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 128 -> 240 2025-04-07T13:47:21.720236Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033641763, at schemeshard: 72057594046644480 2025-04-07T13:47:21.720287Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:1 128 -> 240 2025-04-07T13:47:21.720337Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033641763, at schemeshard: 72057594046644480 2025-04-07T13:47:21.720379Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:2 128 -> 240 2025-04-07T13:47:21.720468Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710659:3, HandleReply TEvOperationPlan: step# 1744033641763 2025-04-07T13:47:21.720522Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:3 128 -> 240 2025-04-07T13:47:21.722523Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:47:21.723103Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:47:21.723169Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:2 ProgressState 2025-04-07T13:47:21.723236Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:2 progress is 1/4 2025-04-07T13:47:21.723432Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:0 ProgressState 2025-04-07T13:47:21.723463Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 2/4 2025-04-07T13:47:21.723521Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:3 ProgressState 2025-04-07T13:47:21.723549Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:3 progress is 3/4 2025-04-07T13:47:21.723619Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:1 ProgressState 2025-04-07T13:47:21.723722Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:1 progress is 4/4 2025-04-07T13:47:21.723746Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:47:21.723779Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:1 2025-04-07T13:47:21.723792Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:2 2025-04-07T13:47:21.723802Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:3 2025-04-07T13:47:21.723818Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 5, subscribers: 1 2025-04-07T13:47:21.726848Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:47:21.726902Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:47:21.726917Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T13:47:21.727139Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:47:21.727165Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:47:21.727174Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T13:47:21.727280Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:47:21.727307Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:47:21.727316Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T13:47:21.727415Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:47:21.727428Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:47:21.727435Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2025-04-07T13:47:21.727558Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:47:21.727578Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:47:21.727587Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2025-04-07T13:47:21.727615Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-04-07T13:47:21.731330Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490567451596932348:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:47:21.805971Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:47:21.806126Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:47:21.808269Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] >> KqpSinkLocks::DifferentKeyUpdate >> TColumnShardTestSchema::OneColdTier [GOOD] >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034167.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144034167.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034167.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124034167.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032967.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124032967.000000s;Name=;Codec=}; 2025-04-07T13:46:21.773985Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:21.918595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:21.939637Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:21.939980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:21.940217Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:21.947394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:21.947598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:21.947810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:21.947935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:21.948027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:21.948110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:21.948202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:21.948293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:21.948398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:21.948500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:21.948587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:21.948722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:21.970946Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:21.971036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:21.973265Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:21.973393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:21.973461Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:21.973642Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:21.973789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:21.973858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:21.973894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:21.973978Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:21.974530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:21.974600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:21.974636Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:21.974803Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:21.974869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:21.974903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:21.974928Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:21.975003Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:21.975047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:21.975083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:21.975147Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:21.975211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:21.975244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:21.975267Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:21.975309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:21.975345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:21.975369Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:21.975523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=37; 2025-04-07T13:46:21.975614Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-04-07T13:46:21.975683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-04-07T13:46:21.975751Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2025-04-07T13:46:21.975882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:21.975943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:21.975986Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:21.976184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:21.976230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:21.976258Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:21.976474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_f ... BUG: TEST_STEP=3;SelfId=[1:684:2687];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:25.981562Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:684:2687] finished for tablet 9437184 2025-04-07T13:47:25.981601Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:684:2687] send ScanData to [1:683:2686] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:25.981884Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:684:2687] and sent to [1:683:2686] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.004},{"events":["f_ack"],"t":0.609},{"events":["l_task_result"],"t":1.4},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":1.402}],"full":{"a":1744033644579091,"name":"_full_task","f":1744033644579091,"d_finished":0,"c":0,"l":1744033645981622,"d":1402531},"events":[{"name":"bootstrap","f":1744033644579253,"d_finished":3850,"c":1,"l":1744033644583103,"d":3850},{"a":1744033645981446,"name":"ack","f":1744033645188847,"d_finished":2072,"c":3,"l":1744033645981392,"d":2248},{"a":1744033645981439,"name":"processing","f":1744033644583829,"d_finished":564923,"c":24,"l":1744033645981393,"d":565106},{"name":"ProduceResults","f":1744033644580944,"d_finished":11587,"c":29,"l":1744033645981555,"d":11587},{"a":1744033645981556,"name":"Finish","f":1744033645981556,"d_finished":0,"c":0,"l":1744033645981622,"d":66},{"name":"task_result","f":1744033644583841,"d_finished":562475,"c":21,"l":1744033645979867,"d":562475}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:684:2687]->[1:683:2686] 2025-04-07T13:47:25.981927Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:684:2687];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:24.578621Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4749668;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4749668;selected_rows=0; 2025-04-07T13:47:25.981947Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:684:2687];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:25.982045Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:684:2687];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:1.345430s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=1.240365s;size=0.002211992;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.025261s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.199468s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2025-04-07T13:47:25.982088Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:684:2687];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:25.983102Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-07T13:47:25.983218Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-04-07T13:47:25.983293Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T13:47:25.983404Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2025-04-07T13:47:25.983450Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2025-04-07T13:47:25.983741Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:700:2703];trace_detailed=; 2025-04-07T13:47:25.983956Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-07T13:47:25.984082Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:25.984178Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:25.984255Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:25.984373Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:25.984433Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:25.984507Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:25.984531Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:700:2703] finished for tablet 9437184 2025-04-07T13:47:25.984576Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:700:2703] send ScanData to [1:699:2702] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:25.984841Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:700:2703] and sent to [1:699:2702] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1744033645983706,"name":"_full_task","f":1744033645983706,"d_finished":0,"c":0,"l":1744033645984603,"d":897},"events":[{"name":"bootstrap","f":1744033645983790,"d_finished":486,"c":1,"l":1744033645984276,"d":486},{"a":1744033645984359,"name":"ack","f":1744033645984359,"d_finished":0,"c":0,"l":1744033645984603,"d":244},{"a":1744033645984350,"name":"processing","f":1744033645984350,"d_finished":0,"c":0,"l":1744033645984603,"d":253},{"name":"ProduceResults","f":1744033645984134,"d_finished":274,"c":2,"l":1744033645984521,"d":274},{"a":1744033645984523,"name":"Finish","f":1744033645984523,"d_finished":0,"c":0,"l":1744033645984603,"d":80}],"id":"9437184::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:700:2703]->[1:699:2702] 2025-04-07T13:47:25.984893Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:25.983484Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T13:47:25.984925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:25.984953Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T13:47:25.985016Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034159.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144034159.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034159.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124034159.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032959.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124032959.000000s;Name=;Codec=}; 2025-04-07T13:46:22.057772Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:22.267778Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:22.424508Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:22.424743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:22.427503Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:22.518156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:22.520061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:22.530850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:22.531026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:22.531108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:22.531207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:22.531305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:22.531405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:22.531500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:22.531613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.531720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:22.532083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:22.667952Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:22.668437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:22.688855Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:22.689640Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:22.690368Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:22.692674Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:22.694270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:22.694841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:22.695103Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:22.695867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:22.696812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:22.696856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:22.697119Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:22.699050Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:22.699300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:22.699336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:22.699361Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:22.700075Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:22.700361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:22.700396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:22.700420Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:22.701413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:22.701463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:22.701930Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:22.701984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:22.702018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:22.702041Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:22.702804Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-07T13:46:22.702870Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-04-07T13:46:22.704178Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=502; 2025-04-07T13:46:22.704923Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=490; 2025-04-07T13:46:22.706155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:22.706412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:22.706639Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:22.707821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:22.707860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.708082Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.709771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer ... dy_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:27.316533Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:684:2687] finished for tablet 9437184 2025-04-07T13:47:27.316583Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:684:2687] send ScanData to [1:683:2686] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:27.330083Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:684:2687] and sent to [1:683:2686] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack"],"t":1.303},{"events":["l_task_result"],"t":2.652},{"events":["l_ProduceResults","f_Finish"],"t":2.676},{"events":["l_ack","l_processing","l_Finish"],"t":2.677}],"full":{"a":1744033644639567,"name":"_full_task","f":1744033644639567,"d_finished":0,"c":0,"l":1744033647316612,"d":2677045},"events":[{"name":"bootstrap","f":1744033644639695,"d_finished":4857,"c":1,"l":1744033644644552,"d":4857},{"a":1744033647316099,"name":"ack","f":1744033645943377,"d_finished":21804,"c":3,"l":1744033647315439,"d":22317},{"a":1744033647316090,"name":"processing","f":1744033644646100,"d_finished":580124,"c":24,"l":1744033647315441,"d":580646},{"name":"ProduceResults","f":1744033644641710,"d_finished":32192,"c":29,"l":1744033647316523,"d":32192},{"a":1744033647316525,"name":"Finish","f":1744033647316525,"d_finished":0,"c":0,"l":1744033647316612,"d":87},{"name":"task_result","f":1744033644646114,"d_finished":557942,"c":21,"l":1744033647292115,"d":557942}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:684:2687]->[1:683:2686] 2025-04-07T13:47:27.330479Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:684:2687];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:24.639209Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4749668;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4749668;selected_rows=0; 2025-04-07T13:47:27.330509Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:684:2687];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:27.330964Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:684:2687];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:2.294645s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=3.754215s;size=0.002211992;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.201282s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.412699s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2025-04-07T13:47:27.331025Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:684:2687];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:27.584820Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-07T13:47:27.585298Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-04-07T13:47:27.585390Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T13:47:27.585519Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2025-04-07T13:47:27.585570Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2025-04-07T13:47:27.586792Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:700:2703];trace_detailed=; 2025-04-07T13:47:27.587966Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-07T13:47:27.588394Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:27.608972Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:27.610322Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:27.611561Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:27.612496Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:27.622026Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:27.622601Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:700:2703] finished for tablet 9437184 2025-04-07T13:47:27.624091Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:700:2703] send ScanData to [1:699:2702] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:27.653068Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:700:2703] and sent to [1:699:2702] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.024},{"events":["f_ack","f_processing"],"t":0.025},{"events":["l_ProduceResults","f_Finish"],"t":0.036},{"events":["l_ack","l_processing","l_Finish"],"t":0.038}],"full":{"a":1744033647585906,"name":"_full_task","f":1744033647585906,"d_finished":0,"c":0,"l":1744033647624400,"d":38494},"events":[{"name":"bootstrap","f":1744033647587206,"d_finished":23402,"c":1,"l":1744033647610608,"d":23402},{"a":1744033647611536,"name":"ack","f":1744033647611536,"d_finished":0,"c":0,"l":1744033647624400,"d":12864},{"a":1744033647611513,"name":"processing","f":1744033647611513,"d_finished":0,"c":0,"l":1744033647624400,"d":12887},{"name":"ProduceResults","f":1744033647588473,"d_finished":33121,"c":2,"l":1744033647622582,"d":33121},{"a":1744033647622587,"name":"Finish","f":1744033647622587,"d_finished":0,"c":0,"l":1744033647624400,"d":1813}],"id":"9437184::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:700:2703]->[1:699:2702] 2025-04-07T13:47:27.653209Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:27.585609Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T13:47:27.653246Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:27.655192Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T13:47:27.669992Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034161.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144034161.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034161.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124034161.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032961.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124032961.000000s;Name=;Codec=}; 2025-04-07T13:46:21.977560Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:22.073156Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:22.100268Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:22.100359Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:22.100656Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:22.118044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:22.118324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:22.118607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:22.118752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:22.118849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:22.118972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:22.119099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:22.119230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:22.119376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:22.119541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.119666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:22.119797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:22.166757Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:22.166842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:22.169068Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:22.169209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:22.169261Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:22.169507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:22.169667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:22.169751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:22.169798Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:22.169907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:22.169998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:22.170051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:22.170105Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:22.170304Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:22.170378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:22.170426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:22.170462Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:22.170586Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:22.170658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:22.170706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:22.170752Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:22.170850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:22.170900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:22.170935Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:22.170990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:22.171033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:22.171065Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:22.171305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-04-07T13:46:22.171405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-07T13:46:22.171485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-07T13:46:22.171595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=62; 2025-04-07T13:46:22.171763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:22.171818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:22.171854Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:22.172085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:22.172142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.172178Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.172360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_f ... ; 2025-04-07T13:47:29.570173Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=288; 2025-04-07T13:47:29.570797Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:47:29.570835Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=633; 2025-04-07T13:47:29.571419Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=554; 2025-04-07T13:47:29.572013Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=297; 2025-04-07T13:47:29.573529Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1220; 2025-04-07T13:47:29.577387Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=3813; 2025-04-07T13:47:29.578025Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=595; 2025-04-07T13:47:29.578084Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=20; 2025-04-07T13:47:29.578116Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-04-07T13:47:29.578156Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-04-07T13:47:29.578189Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-04-07T13:47:29.578242Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=25; 2025-04-07T13:47:29.578272Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-07T13:47:29.579139Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=836; 2025-04-07T13:47:29.579434Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T13:47:29.579475Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=19; 2025-04-07T13:47:29.579498Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=80011; 2025-04-07T13:47:29.580212Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9495312;raw_bytes=16084618; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:47:29.580597Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:47:29.580634Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=$$DELETE,cold; 2025-04-07T13:47:29.580965Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T13:47:29.581022Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:47:29.581092Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:47:29.581119Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:47:29.581774Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:47:29.582058Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:29.582386Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:29.582711Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=2; 2025-04-07T13:47:29.583020Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2025-04-07T13:47:29.583307Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:29.583343Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:29.583369Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:29.583400Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:29.583453Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:29.584877Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:29.584947Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1186:3082];tablet_id=9437184;parent=[1:1149:3052];fline=manager.h:99;event=ask_data;request=request_id=73;1={portions_count=6};; 2025-04-07T13:47:29.587767Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:47:29.588208Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:47:29.588513Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:47:29.588534Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:47:29.588565Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:29.588604Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:29.588641Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=2; 2025-04-07T13:47:29.588691Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2025-04-07T13:47:29.588722Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:29.589323Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:29.589603Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:29.589642Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:29.589948Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:29.593572Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=6;path_id=1; 2025-04-07T13:47:29.601518Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=6;path_id=1; 2025-04-07T13:47:29.608441Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:47:29.608783Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034164.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144034164.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034164.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124034164.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032964.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124032964.000000s;Name=;Codec=}; 2025-04-07T13:46:22.523181Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:22.955394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:22.974826Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:22.974892Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:22.975121Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:22.982468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:22.982680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:22.982874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:22.982966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:22.983053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:22.983182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:22.983283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:22.983381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:22.983526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:22.983620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.983706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:22.983800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:23.003901Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:23.003972Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:23.013823Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:23.014022Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:23.014078Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:23.014296Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.014440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:23.014520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:23.014562Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:23.014662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:23.014751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:23.014799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:23.014834Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:23.015047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.015145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:23.015194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:23.015238Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:23.015334Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:23.015395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:23.015451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:23.015485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:23.015573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:23.015615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:23.015645Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:23.015698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:23.015741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:23.015771Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:23.015966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-04-07T13:46:23.016064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-07T13:46:23.016149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-07T13:46:23.016254Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=57; 2025-04-07T13:46:23.016402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:23.016452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:23.016484Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:23.019441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:23.019555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:23.019598Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:23.019735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_f ... lts:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:28.925487Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:684:2687] finished for tablet 9437184 2025-04-07T13:47:28.925536Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:684:2687] send ScanData to [1:683:2686] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:28.926103Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:684:2687] and sent to [1:683:2686] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.039},{"events":["l_bootstrap"],"t":0.1},{"events":["f_processing","f_task_result"],"t":0.129},{"events":["f_ack"],"t":2.605},{"events":["l_task_result"],"t":3.96},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":3.973}],"full":{"a":1744033644952312,"name":"_full_task","f":1744033644952312,"d_finished":0,"c":0,"l":1744033648925562,"d":3973250},"events":[{"name":"bootstrap","f":1744033644953095,"d_finished":99462,"c":1,"l":1744033645052557,"d":99462},{"a":1744033648925025,"name":"ack","f":1744033647558283,"d_finished":11777,"c":3,"l":1744033648924970,"d":12314},{"a":1744033648925018,"name":"processing","f":1744033645081636,"d_finished":1233986,"c":24,"l":1744033648924972,"d":1234530},{"name":"ProduceResults","f":1744033644992138,"d_finished":86092,"c":29,"l":1744033648925478,"d":86092},{"a":1744033648925479,"name":"Finish","f":1744033648925479,"d_finished":0,"c":0,"l":1744033648925562,"d":83},{"name":"task_result","f":1744033645081661,"d_finished":1221836,"c":21,"l":1744033648913058,"d":1221836}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:684:2687]->[1:683:2686] 2025-04-07T13:47:28.926159Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:684:2687];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:24.951960Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4749668;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4749668;selected_rows=0; 2025-04-07T13:47:28.926185Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:684:2687];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:28.926534Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:684:2687];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:3.475909s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=6.129032s;size=0.002211992;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.137437s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.214213s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};{name=SNAPSHOT;duration=0.021208s;size=0;details={};};]};; 2025-04-07T13:47:28.926840Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:684:2687];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:29.174635Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-07T13:47:29.175044Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-04-07T13:47:29.175139Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T13:47:29.175270Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2025-04-07T13:47:29.175324Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2025-04-07T13:47:29.175689Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:700:2703];trace_detailed=; 2025-04-07T13:47:29.197348Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-07T13:47:29.197558Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:29.197753Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:29.205146Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:29.205896Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:29.205977Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:29.207592Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:29.207914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:700:2703] finished for tablet 9437184 2025-04-07T13:47:29.208505Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:700:2703] send ScanData to [1:699:2702] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:29.227868Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:700:2703] and sent to [1:699:2702] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.022},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.03},{"events":["l_ProduceResults","f_Finish"],"t":0.032},{"events":["l_ack","l_processing","l_Finish"],"t":0.046}],"full":{"a":1744033649175648,"name":"_full_task","f":1744033649175648,"d_finished":0,"c":0,"l":1744033649222299,"d":46651},"events":[{"name":"bootstrap","f":1744033649176004,"d_finished":29714,"c":1,"l":1744033649205718,"d":29714},{"a":1744033649205878,"name":"ack","f":1744033649205878,"d_finished":0,"c":0,"l":1744033649222299,"d":16421},{"a":1744033649205865,"name":"processing","f":1744033649205865,"d_finished":0,"c":0,"l":1744033649222299,"d":16434},{"name":"ProduceResults","f":1744033649197665,"d_finished":10033,"c":2,"l":1744033649207901,"d":10033},{"a":1744033649207904,"name":"Finish","f":1744033649207904,"d_finished":0,"c":0,"l":1744033649222299,"d":14395}],"id":"9437184::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:700:2703]->[1:699:2702] 2025-04-07T13:47:29.228568Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:29.175367Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T13:47:29.265000Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:29.265642Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T13:47:29.267447Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:700:2703];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 >> TDataShardTrace::TestTraceDistributedSelectViaReadActors >> KqpSinkMvcc::SnapshotExpiration [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034170.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144034170.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034170.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124034170.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032970.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124032970.000000s;Name=;Codec=}; 2025-04-07T13:46:22.938275Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:23.045750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:23.067969Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:23.068045Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:23.068267Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:23.076194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:23.076427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:23.076743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:23.076917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:23.077021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:23.077114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:23.077207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:23.077316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:23.077433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:23.077573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:23.077677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:23.077800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:23.100352Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:23.100435Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:23.109669Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:23.109830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:23.109884Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:23.110083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.110247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:23.110325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:23.110367Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:23.110465Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:23.110545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:23.110616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:23.110652Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:23.110847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.110929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:23.110973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:23.111003Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:23.111106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:23.111165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:23.111215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:23.111247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:23.111317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:23.111353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:23.111378Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:23.111421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:23.111457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:23.111485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:23.111690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=65; 2025-04-07T13:46:23.111791Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=48; 2025-04-07T13:46:23.111879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-07T13:46:23.111979Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=59; 2025-04-07T13:46:23.112129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:23.112198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:23.112245Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:23.112439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:23.112489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:23.112521Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:23.116936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_f ... 25-04-07T13:47:29.554277Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=278; 2025-04-07T13:47:29.554596Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:47:29.554625Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=324; 2025-04-07T13:47:29.555730Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=591; 2025-04-07T13:47:29.558698Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=2937; 2025-04-07T13:47:29.560054Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1321; 2025-04-07T13:47:29.563187Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2758; 2025-04-07T13:47:29.564041Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=300; 2025-04-07T13:47:29.564872Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=536; 2025-04-07T13:47:29.564908Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-04-07T13:47:29.564937Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2025-04-07T13:47:29.564964Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-04-07T13:47:29.565536Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=549; 2025-04-07T13:47:29.565567Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-04-07T13:47:29.565900Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=309; 2025-04-07T13:47:29.565929Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T13:47:29.566524Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=308; 2025-04-07T13:47:29.574421Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=86456; 2025-04-07T13:47:29.574772Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9495312;raw_bytes=16084618; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:47:29.574843Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:47:29.575152Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=$$DELETE,cold; 2025-04-07T13:47:29.575455Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T13:47:29.575507Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:47:29.575833Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:47:29.576128Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:47:29.576491Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:47:29.576522Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:29.576563Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:29.576610Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=2; 2025-04-07T13:47:29.576915Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2025-04-07T13:47:29.576948Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:29.577339Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:29.577364Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:29.577678Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:29.578013Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:29.578806Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:29.579409Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1186:3082];tablet_id=9437184;parent=[1:1149:3052];fline=manager.h:99;event=ask_data;request=request_id=73;1={portions_count=6};; 2025-04-07T13:47:29.580230Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:47:29.580708Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:47:29.580999Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:47:29.581016Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:47:29.581045Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:29.581337Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:29.581371Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=2; 2025-04-07T13:47:29.581407Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2025-04-07T13:47:29.582035Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:29.582369Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:29.582677Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:29.582707Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:29.583019Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:29.585912Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=6;path_id=1; 2025-04-07T13:47:29.587763Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=6;path_id=1; 2025-04-07T13:47:29.591601Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:47:29.591886Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 >> TColumnShardTestSchema::HotTiersTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 13044, MsgBus: 2691 2025-04-07T13:46:43.528068Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567289245024475:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:43.528187Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001153/r3tmp/tmpHBwW3Z/pdisk_1.dat 2025-04-07T13:46:44.522526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:46:44.522858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:46:44.572370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13044, node 1 2025-04-07T13:46:46.366597Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:46.554690Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:46:46.554705Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:46:46.554952Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:46:46.556073Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:46:48.525493Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567289245024475:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:48.527406Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:2691 TClient is connected to server localhost:2691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:46:59.330261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:46:59.392352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:46:59.757611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:46:59.757629Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:00.543921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:01.649859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:02.241824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:07.715803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567392324241208:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:07.902343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:08.322929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:47:08.430831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:47:08.551538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:47:08.601857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:47:08.657647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:47:08.871194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:47:09.071061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567400914176323:2449], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:09.071127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:09.072041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567400914176328:2452], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:09.081757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:47:09.103198Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:47:09.106881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567400914176330:2453], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:47:11.227644Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjU5MjY4Mi04MzVjN2YwLTNiOTljNGE4LTU5Y2Q4Y2Y2, ActorId: [1:7490567405209143938:2484], ActorState: ExecuteState, TraceId: 01jr882jx6egpgjgy67yxee959, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 4368, MsgBus: 9954 2025-04-07T13:47:15.287319Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567429359099423:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:15.287370Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001153/r3tmp/tmpIJxi5O/pdisk_1.dat 2025-04-07T13:47:16.961512Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:16.962130Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:16.976182Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4368, node 2 2025-04-07T13:47:18.232613Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:18.381858Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:18.381879Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:18.381886Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:18.381974Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9954 2025-04-07T13:47:20.322190Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567429359099423:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:20.322254Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:9954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:21.361750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:21.429131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:21.661644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:21.805074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:21.891329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:23.539558Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567463718839512:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:23.539659Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:23.584525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:47:23.637530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:47:23.709227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:47:23.785290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:47:23.839572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:47:23.879964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:47:23.974044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567463718840019:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:23.974119Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:23.974444Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567463718840024:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:23.978013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:47:23.986120Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567463718840026:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:47:30.252854Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWVmY2Y1Yy1hMmI4MTg5NC00MzQ5MGIwMy1mMGIyNQ==, ActorId: [2:7490567476603742263:2469], ActorState: ExecuteState, TraceId: 01jr8834s129nmsfhq4jgan8px, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034185.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034185.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034185.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034185.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034185.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124034185.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=144034185.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034185.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124032985.000000s;Name=;Codec=}; 2025-04-07T13:46:28.553066Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:29.087344Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:29.121976Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:29.122157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:29.122414Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:29.138491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:29.138702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:29.138919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:29.139005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:29.139088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:29.139171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:29.139249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:29.139341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:29.139439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:29.139558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:29.139643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:29.139756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:29.192372Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:29.192437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:29.194871Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:29.195005Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:29.195057Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:29.195210Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:29.195340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:29.195404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:29.195439Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:29.195516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:29.195567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:29.195637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:29.195672Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:29.195828Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:29.195889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:29.195924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:29.195951Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:29.196028Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:29.196069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:29.196102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:29.196126Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:29.196184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:29.196216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:29.196243Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:29.196281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:29.196313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:29.196352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:29.196489Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=38; 2025-04-07T13:46:29.196561Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-07T13:46:29.196627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-04-07T13:46:29.196716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=57; 2025-04-07T13:46:29.196849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:29.196897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:29.196926Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:29.197113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abst ... 578885Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:913:2918];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:33.578911Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:913:2918] finished for tablet 9437184 2025-04-07T13:47:33.578967Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:913:2918] send ScanData to [1:912:2917] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:33.579347Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:913:2918] and sent to [1:912:2917] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.011},{"events":["f_processing","f_task_result"],"t":0.015},{"events":["f_ack","l_task_result"],"t":0.314},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.317}],"full":{"a":1744033653261593,"name":"_full_task","f":1744033653261593,"d_finished":0,"c":0,"l":1744033653578998,"d":317405},"events":[{"name":"bootstrap","f":1744033653261719,"d_finished":11622,"c":1,"l":1744033653273341,"d":11622},{"a":1744033653578757,"name":"ack","f":1744033653575821,"d_finished":2213,"c":3,"l":1744033653578695,"d":2454},{"a":1744033653578748,"name":"processing","f":1744033653276637,"d_finished":192615,"c":24,"l":1744033653578696,"d":192865},{"name":"ProduceResults","f":1744033653264221,"d_finished":13664,"c":29,"l":1744033653578900,"d":13664},{"a":1744033653578901,"name":"Finish","f":1744033653578901,"d_finished":0,"c":0,"l":1744033653578998,"d":97},{"name":"task_result","f":1744033653276648,"d_finished":190046,"c":21,"l":1744033653575648,"d":190046}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:913:2918]->[1:912:2917] 2025-04-07T13:47:33.579418Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:913:2918];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:33.261361Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2025-04-07T13:47:33.579450Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:913:2918];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:33.579565Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:913:2918];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.188626s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.271923s;size=0.002211944;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::LAST_PK;duration=0.013961s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2025-04-07T13:47:33.579631Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:913:2918];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:33.582200Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-07T13:47:33.582380Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000011:max} readable: {1000000011:max} at tablet 9437184 2025-04-07T13:47:33.582481Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T13:47:33.582629Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2025-04-07T13:47:33.582692Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2025-04-07T13:47:33.583104Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:929:2934];trace_detailed=; 2025-04-07T13:47:33.583432Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-07T13:47:33.583610Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:33.583746Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:33.583852Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:33.584035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:33.584114Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:33.584214Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:33.584255Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:929:2934] finished for tablet 9437184 2025-04-07T13:47:33.584333Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:929:2934] send ScanData to [1:928:2933] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:33.584702Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:929:2934] and sent to [1:928:2933] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744033653583053,"name":"_full_task","f":1744033653583053,"d_finished":0,"c":0,"l":1744033653584373,"d":1320},"events":[{"name":"bootstrap","f":1744033653583199,"d_finished":681,"c":1,"l":1744033653583880,"d":681},{"a":1744033653584018,"name":"ack","f":1744033653584018,"d_finished":0,"c":0,"l":1744033653584373,"d":355},{"a":1744033653584005,"name":"processing","f":1744033653584005,"d_finished":0,"c":0,"l":1744033653584373,"d":368},{"name":"ProduceResults","f":1744033653583685,"d_finished":379,"c":2,"l":1744033653584241,"d":379},{"a":1744033653584243,"name":"Finish","f":1744033653584243,"d_finished":0,"c":0,"l":1744033653584373,"d":130}],"id":"9437184::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:929:2934]->[1:928:2933] 2025-04-07T13:47:33.584796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:33.582738Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T13:47:33.584839Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:33.584894Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T13:47:33.584982Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 80000/4750028 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] Test command err: 2025-04-07T13:46:22.497657Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567201822842768:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:22.497685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bf3/r3tmp/tmpKluT25/pdisk_1.dat 2025-04-07T13:46:23.525228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:46:23.624946Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65503, node 1 2025-04-07T13:46:23.745892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:46:23.760816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:46:23.812958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:46:23.836573Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:46:23.836601Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:46:23.836610Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:46:23.836734Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:46:27.331191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:27.350291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:27.394202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:27.410903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:46:27.411079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:46:27.411098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:46:27.439005Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:27.439024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:46:27.442324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:46:27.452759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:27.486871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033587520, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:27.486913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:46:27.487237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:46:27.505208Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567201822842768:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:27.505264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:46:27.519027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:27.519786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:27.519850Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:46:27.533076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:46:27.533306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:46:27.533554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:46:27.553402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:46:27.553445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:46:27.553657Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:46:27.554596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:46:28.906190Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.169887s 2025-04-07T13:46:28.906231Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.184848s 2025-04-07T13:46:38.617463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:46:38.617486Z node 1 :IMPORT WARN: Table profiles were not loaded GRpc shutdown warning: left infly: 45, spent: 3.70458 sec 2025-04-07T13:46:42.249241Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881bey6bq77d68gedeqv26", Request deadline has expired for 8.685301s seconds 2025-04-07T13:46:42.254088Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881bgh5qwad22ex7bybyjt", Request deadline has expired for 8.020405s seconds 2025-04-07T13:46:42.254131Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881bhjdjwebk77dq71x00r", Request deadline has expired for 7.310462s seconds 2025-04-07T13:46:42.254157Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881bsd44zbtcyrg3n85ek9", Request deadline has expired for 8.020487s seconds 2025-04-07T13:46:42.254182Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881c582s23a96deh7qhatv", Request deadline has expired for 8.840515s seconds 2025-04-07T13:46:42.254206Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881c6saz3qpjr4c12axtfv", Request deadline has expired for 8.660539s seconds 2025-04-07T13:46:42.254228Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881c6t7a6we7awf9pxnaje", Request deadline has expired for 8.260560s seconds 2025-04-07T13:46:42.254259Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881c6vejz3w189q727n34s", Request deadline has expired for 8.630592s seconds 2025-04-07T13:46:42.254293Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881c755ve40z4de1j9ypxv", Request deadline has expired for 7.830614s seconds 2025-04-07T13:46:42.254326Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881c77fpmncjs2f6y6acek", Request deadline has expired for 8.690658s seconds 2025-04-07T13:46:42.254350Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881c849qcfrmakr1bda37h", Request deadline has expired for 7.830680s seconds 2025-04-07T13:46:42.254375Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881c86c463me80srpqsykc", Request deadline has expired for 8.210708s seconds 2025-04-07T13:46:42.254397Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881c870tf8qg9fkmp1v4sf", Request deadline has expired for 8.660731s seconds 2025-04-07T13:46:42.254421Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881c8h85kjdrh4weycj19g", Request deadline has expired for 9.350754s seconds 2025-04-07T13:46:42.254444Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881c8k53sgf6wbe50jyk16", Request deadline has expired for 8.660775s seconds 2025-04-07T13:46:42.254467Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881c8zaryepbxg7nm54q03", Request deadline has expired for 8.660800s seconds 2025-04-07T13:46:42.254502Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881cc4fmn4hhd5vjgvampy", Request deadline has expired for 9.010821s seconds 2025-04-07T13:46:42.254530Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881cc7amnpk2qpx8t6jfp4", Request deadline has expired for 7.970863s seconds 2025-04-07T13:46:42.254552Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881cc80764jx1tez001veg", Request deadline has expired for 9.570883s seconds 2025-04-07T13:46:42.254584Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881cc1ct310x4s73cv0wrs", Request deadline has expired for 7.970909s seconds 2025-04-07T13:46:42.254609Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881cc36wjjzgz5s937jywn", Request deadline has expired for 8.840942s seconds 2025-04-07T13:46:42.254631Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881ccp0g8f6zzyw5mxgp0b", Request deadline has expired for 8.170964s seconds 2025-04-07T13:46:42.254655Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881cea9s7yaga1mdrz2ze7", Request deadline has expired for 8.840985s seconds 2025-04-07T13:46:42.254677Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881cec2wehwb5edgfv2hsd", Request deadline has expired for 9.571011s seconds 2025-04-07T13:46:42.254696Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881cetc22tbjb12w3fp65q", Request deadline has expired for 8.451030s seconds 2025-04-07T13:46:42.254717Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881cev95w1r92y4zpkjq34", Request deadline has expired for 7.971050s seconds 2025-04-07T13:46:42.254744Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881cfm6jj5t0nebdf4s4b5", Request deadline has expired for 8.161071s seconds 2025-04-07T13:46:42.254767Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881cfn3gjg9392e3fthqec", Request deadline has expired for 8.431100s seconds 2025-04-07T13:46:42.254786Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881cg98k9s96gxsqvd2yrj", Request deadline has expired for 9.011120s seconds 2025-04-07T13:46:42.254807Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881cgy8xq34c0fyymatwp7", Request deadline has expired for 9.571142s seconds 2025-04-07T13:46:42.254844Z node 1 :KQP_PROXY ERROR: TraceId: "01jr881cgyaykzxm4gasaacnmf", Request deadline has expired for 9.921173s seconds 2025-04-07T13:46:42.254873Z node 1 :KQP_PROXY ERROR: ... uestId: 495 2025-04-07T13:47:21.448246Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzNhY2Q5NTUtYmI3YmQ3ZDctY2UzMTU0ZWYtODdkNTM0ZTQ=, ActorId: [4:7490567404103427197:2325], ActorState: ExecuteState, TraceId: 01jr882wf749ke9mzefv57c93p, Reply query error, msg: Pending previous query completion proxyRequestId: 377 2025-04-07T13:47:21.448286Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzNhY2Q5NTUtYmI3YmQ3ZDctY2UzMTU0ZWYtODdkNTM0ZTQ=, ActorId: [4:7490567404103427197:2325], ActorState: ExecuteState, TraceId: 01jr882wf749ke9mzefv57c93p, Reply query error, msg: Pending previous query completion proxyRequestId: 385 2025-04-07T13:47:21.448322Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzNhY2Q5NTUtYmI3YmQ3ZDctY2UzMTU0ZWYtODdkNTM0ZTQ=, ActorId: [4:7490567404103427197:2325], ActorState: ExecuteState, TraceId: 01jr882wf749ke9mzefv57c93p, Reply query error, msg: Pending previous query completion proxyRequestId: 420 2025-04-07T13:47:21.448357Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzNhY2Q5NTUtYmI3YmQ3ZDctY2UzMTU0ZWYtODdkNTM0ZTQ=, ActorId: [4:7490567404103427197:2325], ActorState: ExecuteState, TraceId: 01jr882wf749ke9mzefv57c93p, Reply query error, msg: Pending previous query completion proxyRequestId: 440 2025-04-07T13:47:21.448394Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzNhY2Q5NTUtYmI3YmQ3ZDctY2UzMTU0ZWYtODdkNTM0ZTQ=, ActorId: [4:7490567404103427197:2325], ActorState: ExecuteState, TraceId: 01jr882wf749ke9mzefv57c93p, Reply query error, msg: Pending previous query completion proxyRequestId: 443 2025-04-07T13:47:21.448427Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzNhY2Q5NTUtYmI3YmQ3ZDctY2UzMTU0ZWYtODdkNTM0ZTQ=, ActorId: [4:7490567404103427197:2325], ActorState: ExecuteState, TraceId: 01jr882wf749ke9mzefv57c93p, Reply query error, msg: Pending previous query completion proxyRequestId: 449 2025-04-07T13:47:21.448458Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzNhY2Q5NTUtYmI3YmQ3ZDctY2UzMTU0ZWYtODdkNTM0ZTQ=, ActorId: [4:7490567404103427197:2325], ActorState: ExecuteState, TraceId: 01jr882wf749ke9mzefv57c93p, Reply query error, msg: Pending previous query completion proxyRequestId: 470 2025-04-07T13:47:21.448579Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 381 2025-04-07T13:47:21.448614Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 383 2025-04-07T13:47:21.448679Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 388 2025-04-07T13:47:21.448719Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 411 2025-04-07T13:47:21.448769Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 414 2025-04-07T13:47:21.448801Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 422 2025-04-07T13:47:21.448833Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 423 2025-04-07T13:47:21.448867Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 433 2025-04-07T13:47:21.448907Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 441 2025-04-07T13:47:21.448939Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 442 2025-04-07T13:47:21.448993Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 460 2025-04-07T13:47:21.449026Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 472 2025-04-07T13:47:21.449055Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 474 2025-04-07T13:47:21.449091Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 489 2025-04-07T13:47:21.449122Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 500 2025-04-07T13:47:21.449151Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 506 2025-04-07T13:47:21.449186Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 508 2025-04-07T13:47:21.449218Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc1NDNlMGItMzY5ZWE2MzQtY2E3MmQ2MmEtZjliNGFjN2I=, ActorId: [4:7490567404103427195:2323], ActorState: ExecuteState, TraceId: 01jr882wf5bym6ydtycy3dh4zs, Reply query error, msg: Pending previous query completion proxyRequestId: 509 2025-04-07T13:47:21.449267Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTZlOTJkNzYtOTM2Yjk1YTgtM2YwNTAwYmYtY2JjZTg1Yjg=, ActorId: [4:7490567404103427193:2321], ActorState: ExecuteState, TraceId: 01jr882wf6febvvn4ak000xxem, Reply query error, msg: Pending previous query completion proxyRequestId: 481 2025-04-07T13:47:21.449305Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTZlOTJkNzYtOTM2Yjk1YTgtM2YwNTAwYmYtY2JjZTg1Yjg=, ActorId: [4:7490567404103427193:2321], ActorState: ExecuteState, TraceId: 01jr882wf6febvvn4ak000xxem, Reply query error, msg: Pending previous query completion proxyRequestId: 488 2025-04-07T13:47:21.449356Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTZlOTJkNzYtOTM2Yjk1YTgtM2YwNTAwYmYtY2JjZTg1Yjg=, ActorId: [4:7490567404103427193:2321], ActorState: ExecuteState, TraceId: 01jr882wf6febvvn4ak000xxem, Reply query error, msg: Pending previous query completion proxyRequestId: 502 2025-04-07T13:47:21.449394Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTZlOTJkNzYtOTM2Yjk1YTgtM2YwNTAwYmYtY2JjZTg1Yjg=, ActorId: [4:7490567404103427193:2321], ActorState: ExecuteState, TraceId: 01jr882wf6febvvn4ak000xxem, Reply query error, msg: Pending previous query completion proxyRequestId: 505 2025-04-07T13:47:21.449425Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTZlOTJkNzYtOTM2Yjk1YTgtM2YwNTAwYmYtY2JjZTg1Yjg=, ActorId: [4:7490567404103427193:2321], ActorState: ExecuteState, TraceId: 01jr882wf6febvvn4ak000xxem, Reply query error, msg: Pending previous query completion proxyRequestId: 510 2025-04-07T13:47:21.449576Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZDM1ZTg0NzMtYTZmMTg0MTctMTE3N2I4OTYtYmEyYzlhODI=, ActorId: [4:7490567404103427189:2318], ActorState: ExecuteState, TraceId: 01jr882wf69cxw33zjnmsj9m1v, Reply query error, msg: Pending previous query completion proxyRequestId: 492 2025-04-07T13:47:21.449645Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZDM1ZTg0NzMtYTZmMTg0MTctMTE3N2I4OTYtYmEyYzlhODI=, ActorId: [4:7490567404103427189:2318], ActorState: ExecuteState, TraceId: 01jr882wf69cxw33zjnmsj9m1v, Reply query error, msg: Pending previous query completion proxyRequestId: 493 2025-04-07T13:47:21.449676Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZDM1ZTg0NzMtYTZmMTg0MTctMTE3N2I4OTYtYmEyYzlhODI=, ActorId: [4:7490567404103427189:2318], ActorState: ExecuteState, TraceId: 01jr882wf69cxw33zjnmsj9m1v, Reply query error, msg: Pending previous query completion proxyRequestId: 494 2025-04-07T13:47:21.449741Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZDM1ZTg0NzMtYTZmMTg0MTctMTE3N2I4OTYtYmEyYzlhODI=, ActorId: [4:7490567404103427189:2318], ActorState: ExecuteState, TraceId: 01jr882wf69cxw33zjnmsj9m1v, Reply query error, msg: Pending previous query completion proxyRequestId: 503 2025-04-07T13:47:21.449877Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YTZlNmE3ZWItNTgwYjljMTQtYjcxN2MwNjUtOGVmMzMwMWM=, ActorId: [4:7490567404103427191:2319], ActorState: ExecuteState, TraceId: 01jr882wkh7pza42w4phf03abm, Reply query error, msg: Pending previous query completion proxyRequestId: 496 2025-04-07T13:47:21.449920Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YTZlNmE3ZWItNTgwYjljMTQtYjcxN2MwNjUtOGVmMzMwMWM=, ActorId: [4:7490567404103427191:2319], ActorState: ExecuteState, TraceId: 01jr882wkh7pza42w4phf03abm, Reply query error, msg: Pending previous query completion proxyRequestId: 507 2025-04-07T13:47:21.449953Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YTZlNmE3ZWItNTgwYjljMTQtYjcxN2MwNjUtOGVmMzMwMWM=, ActorId: [4:7490567404103427191:2319], ActorState: ExecuteState, TraceId: 01jr882wkh7pza42w4phf03abm, Reply query error, msg: Pending previous query completion proxyRequestId: 511 >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] >> Viewer::JsonAutocompleteEmpty |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] >> TStorageTenantTest::Empty [GOOD] >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] >> Viewer::PDiskMerging >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] >> Viewer::PDiskMerging [GOOD] >> Viewer::LevenshteinDistance [GOOD] >> Viewer::QueryExecuteScript >> Viewer::FuzzySearcherLimit2OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit3OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit4OutOf4 [GOOD] >> Viewer::FuzzySearcherLongWord [GOOD] >> Viewer::FuzzySearcherPriority [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: 2025-04-07T13:47:32.697934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:47:32.698210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:32.698428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a81/r3tmp/tmpaFYTha/pdisk_1.dat 2025-04-07T13:47:33.634416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:47:33.684006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:33.734166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:33.734325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:33.745712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:47:33.869944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034183.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034183.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124034183.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=144034183.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124032983.000000s;Name=;Codec=}; 2025-04-07T13:46:26.765399Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:27.594541Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:27.772720Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:27.773265Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:27.775995Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:28.030628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:28.030834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:28.031033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:28.031120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:28.031200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:28.031285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:28.031376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:28.031464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:28.031551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:28.031648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:28.031730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:28.031809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:28.149441Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:28.149961Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:28.157755Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:28.157891Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:28.157933Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:28.158093Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:28.158231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:28.158300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:28.158333Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:28.158401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:28.158446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:28.158496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:28.158522Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:28.158653Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:28.158703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:28.158735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:28.158758Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:28.158829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:28.158873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:28.158909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:28.158947Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:28.159018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:28.159054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:28.159086Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:28.159128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:28.159164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:28.159189Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:28.159364Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=58; 2025-04-07T13:46:28.159446Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-07T13:46:28.159523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-07T13:46:28.159611Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-07T13:46:28.159776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:28.159834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:28.159867Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:28.160038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abst ... ad_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=45; 2025-04-07T13:47:39.229194Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:47:39.229246Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=81; 2025-04-07T13:47:39.229345Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=71; 2025-04-07T13:47:39.229413Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=43; 2025-04-07T13:47:39.229570Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=130; 2025-04-07T13:47:39.229996Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=395; 2025-04-07T13:47:39.230046Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=19; 2025-04-07T13:47:39.230093Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=21; 2025-04-07T13:47:39.230130Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-04-07T13:47:39.230174Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-04-07T13:47:39.230208Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-04-07T13:47:39.230263Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=30; 2025-04-07T13:47:39.230294Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2025-04-07T13:47:39.230348Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=30; 2025-04-07T13:47:39.230385Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T13:47:39.230431Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=20; 2025-04-07T13:47:39.230457Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=10212; 2025-04-07T13:47:39.230561Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=15;blobs=30;rows=400000;bytes=23741012;raw_bytes=40211545; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:47:39.230631Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:47:39.230670Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=$$DELETE,tier1,tier0; 2025-04-07T13:47:39.230696Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T13:47:39.230748Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:47:39.230813Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:47:39.230839Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:47:39.230896Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:47:39.230927Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:39.230965Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:39.231020Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=4; 2025-04-07T13:47:39.231061Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:47:39.231099Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:39.231134Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:39.231162Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:39.231200Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:39.231256Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:39.231749Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:39.231804Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1488:3386];tablet_id=9437184;parent=[1:1439:3344];fline=manager.h:99;event=ask_data;request=request_id=98;1={portions_count=15};; 2025-04-07T13:47:39.232289Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:47:39.237595Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:47:39.237913Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:47:39.237933Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:47:39.237962Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:39.238001Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:39.238053Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=4; 2025-04-07T13:47:39.238370Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:47:39.238950Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:39.239243Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:39.239537Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:39.239568Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:39.239620Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:39.245114Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=15;path_id=1; 2025-04-07T13:47:39.248172Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=15;path_id=1; 2025-04-07T13:47:39.258298Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:47:39.258338Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 80000/4750028 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034183.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034183.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124034183.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=144034183.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124032983.000000s;Name=;Codec=}; 2025-04-07T13:46:25.625691Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:26.562157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:26.734071Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:26.734593Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:26.740977Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:26.955079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:26.956582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:26.958570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:26.958912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:26.959459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:26.959562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:26.959639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:26.960233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:26.960552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:26.960920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:26.960999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:26.961300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:27.032861Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:27.033089Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:27.048252Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:27.057597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:27.058321Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:27.059605Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:27.060869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:27.061359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:27.061619Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:27.062147Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:27.062434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:27.062695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:27.062959Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:27.070650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:27.070925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:27.070961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:27.070987Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:27.071703Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:27.071949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:27.072175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:27.072195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:27.076680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:27.076733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:27.076760Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:27.076805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:27.076835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:27.076857Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:27.078067Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=251; 2025-04-07T13:46:27.079304Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=746; 2025-04-07T13:46:27.080314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=484; 2025-04-07T13:46:27.083636Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=3074; 2025-04-07T13:46:27.084951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:27.085214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:27.088918Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:27.090476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline ... XECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=276; 2025-04-07T13:47:38.568975Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:47:38.569049Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=12517; 2025-04-07T13:47:38.569716Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=622; 2025-04-07T13:47:38.570376Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=335; 2025-04-07T13:47:38.571827Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1422; 2025-04-07T13:47:38.584245Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=12104; 2025-04-07T13:47:38.584334Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=27; 2025-04-07T13:47:38.596784Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=12133; 2025-04-07T13:47:38.596853Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-04-07T13:47:38.599039Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-04-07T13:47:38.599070Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-04-07T13:47:38.599129Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=36; 2025-04-07T13:47:38.599168Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2025-04-07T13:47:38.600026Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=827; 2025-04-07T13:47:38.600348Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-07T13:47:38.614807Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=22; 2025-04-07T13:47:38.614862Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=287635; 2025-04-07T13:47:38.614971Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=15;blobs=30;rows=400000;bytes=23741012;raw_bytes=40211545; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:47:38.615354Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:47:38.615689Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=$$DELETE,tier1,tier0; 2025-04-07T13:47:38.616032Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T13:47:38.616442Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:47:38.616811Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:47:38.616842Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:47:38.616895Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:47:38.616925Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:38.617524Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:38.617588Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=4; 2025-04-07T13:47:38.617628Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:47:38.617666Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:38.617708Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:38.617736Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:38.618288Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:38.618625Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:38.640969Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1486:3384];tablet_id=9437184;parent=[1:1437:3342];fline=manager.h:99;event=ask_data;request=request_id=98;1={portions_count=15};; 2025-04-07T13:47:38.643459Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:38.653264Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:47:38.653673Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:47:38.653695Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:47:38.653715Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:47:38.653746Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:38.654027Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:38.654577Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=4; 2025-04-07T13:47:38.654877Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:47:38.654906Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:38.655194Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:38.655721Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:38.655752Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:38.655804Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:38.671210Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=15;path_id=1; 2025-04-07T13:47:38.688235Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=15;path_id=1; 2025-04-07T13:47:38.711013Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:47:38.711416Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 80000/4750028 0/0 |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::FuzzySearcherPriority [GOOD] >> Viewer::Cluster10000Tablets >> KqpImmediateEffects::UpsertAfterInsert >> TColumnShardTestSchema::InternalTTL_Types [GOOD] >> TColumnShardTestSchema::HotTiersWithStat [GOOD] >> Viewer::JsonStorageListingV2 >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] >> TColumnShardTestSchema::HotTiers [GOOD] >> TColumnShardTestSchema::RebootColdTiersWithStat [GOOD] >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::InternalTTL_Types [GOOD] Test command err: 2025-04-07T13:45:59.961552Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:00.821344Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:46:01.287096Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:46:01.301580Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:01.745523Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:01.746357Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:01.748440Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:01.965768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:01.974427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:01.976917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:01.977062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:01.977556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:01.977830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:01.978305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:01.978387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:01.985750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:01.986058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:01.987753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:01.988882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:02.225502Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:46:02.228229Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2025-04-07T13:46:02.228624Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:02.228884Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:02.260053Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:02.261074Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:02.261455Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:02.262791Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:02.264443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:02.268600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:02.268833Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:02.269637Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:02.269876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:02.276993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:02.277177Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:02.279307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:02.279377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:02.279419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:02.279448Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:02.279922Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:02.280168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:02.280398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:02.280421Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:02.280894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:02.281114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:02.281137Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:02.281172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:02.281199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:02.281446Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:02.283199Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=685; 2025-04-07T13:46:02.284633Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=596; 2025-04-07T13:46:02.288003Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=215; 2025-04-07T13:46:02.290314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=206; 2025-04-07T13:46:02.292353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:02.292801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:02.293002Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:02.294471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:02.294692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:02.294717Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:02.295607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:46:02.296024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V ... t;interval_idx=0;interval_id=16; 2025-04-07T13:47:41.234403Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-04-07T13:47:41.234546Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:41.234591Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:47:41.234629Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:47:41.235112Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:47:41.235170Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T13:47:41.235209Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=16; 2025-04-07T13:47:41.235256Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=16; 2025-04-07T13:47:41.235296Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T13:47:41.235402Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:41.235445Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2025-04-07T13:47:41.235487Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:47:41.235691Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:41.235826Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:41.235867Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:47:41.235959Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=80000; 2025-04-07T13:47:41.236007Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=saved_at; 2025-04-07T13:47:41.236109Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] send ScanData to [4:272:2284] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: uint64 2025-04-07T13:47:41.236242Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:41.236398Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:41.236483Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:41.236611Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:41.236939Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:41.237040Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:41.237076Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] finished for tablet 9437184 2025-04-07T13:47:41.237143Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] send ScanData to [4:272:2284] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:41.237540Z node 4 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [4:277:2289] and sent to [4:272:2284] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.104},{"events":["l_ProduceResults","f_Finish"],"t":0.105},{"events":["l_ack","l_processing","l_Finish"],"t":0.106}],"full":{"a":1744033661131108,"name":"_full_task","f":1744033661131108,"d_finished":0,"c":0,"l":1744033661237180,"d":106072},"events":[{"name":"bootstrap","f":1744033661131698,"d_finished":2411,"c":1,"l":1744033661134109,"d":2411},{"a":1744033661236593,"name":"ack","f":1744033661235667,"d_finished":837,"c":1,"l":1744033661236504,"d":1424},{"a":1744033661236582,"name":"processing","f":1744033661134310,"d_finished":3072,"c":5,"l":1744033661236508,"d":3670},{"name":"ProduceResults","f":1744033661132985,"d_finished":1975,"c":8,"l":1744033661237062,"d":1975},{"a":1744033661237064,"name":"Finish","f":1744033661237064,"d_finished":0,"c":0,"l":1744033661237180,"d":116},{"name":"task_result","f":1744033661134324,"d_finished":2137,"c":4,"l":1744033661235540,"d":2137}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2025-04-07T13:47:41.237614Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:41.130036Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2025-04-07T13:47:41.237664Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:41.237760Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.100252s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.094386s;size=0.0063152;details={columns=9;};};]};; 2025-04-07T13:47:41.237829Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; >> Viewer::TabletMerging >> KqpImmediateEffects::UpsertAfterInsertWithIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034183.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034183.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032983.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=124034183.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032983.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=124032983.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124032983.000000s;Name=;Codec=}; 2025-04-07T13:46:23.719660Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:23.829548Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:23.852805Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:23.852877Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:23.853087Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:23.860279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:23.860498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:23.860801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:23.860921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:23.861020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:23.861128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:23.861241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:23.861382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:23.861504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:23.861600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:23.861698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:23.861819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:23.884089Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:23.884166Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:23.886426Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:23.886574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:23.886618Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:23.886813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.886966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:23.887044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:23.887085Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:23.887176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:23.887235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:23.887277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:23.887322Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:23.887472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.887528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:23.887565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:23.887591Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:23.887669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:23.887716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:23.887757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:23.887784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:23.887854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:23.887892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:23.887919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:23.887969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:23.888007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:23.888036Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:23.888188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-07T13:46:23.888263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-04-07T13:46:23.888336Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-07T13:46:23.888424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2025-04-07T13:46:23.888553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:23.888603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:23.888633Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTx ... ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:45.666024Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:939:2939] finished for tablet 9437184 2025-04-07T13:47:45.666080Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:939:2939] send ScanData to [1:938:2938] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:45.666379Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:939:2939] and sent to [1:938:2938] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.01},{"events":["l_bootstrap"],"t":0.012},{"events":["f_processing","f_task_result"],"t":0.014},{"events":["f_ack","l_task_result"],"t":3.044},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":3.051}],"full":{"a":1744033662614382,"name":"_full_task","f":1744033662614382,"d_finished":0,"c":0,"l":1744033665666112,"d":3051730},"events":[{"name":"bootstrap","f":1744033662614486,"d_finished":12546,"c":1,"l":1744033662627032,"d":12546},{"a":1744033665665854,"name":"ack","f":1744033665659058,"d_finished":6116,"c":3,"l":1744033665665790,"d":6374},{"a":1744033665665846,"name":"processing","f":1744033662628471,"d_finished":1227657,"c":24,"l":1744033665665791,"d":1227923},{"name":"ProduceResults","f":1744033662625065,"d_finished":80364,"c":29,"l":1744033665666013,"d":80364},{"a":1744033665666014,"name":"Finish","f":1744033665666014,"d_finished":0,"c":0,"l":1744033665666112,"d":98},{"name":"task_result","f":1744033662628484,"d_finished":1221211,"c":21,"l":1744033665658927,"d":1221211}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:939:2939]->[1:938:2938] 2025-04-07T13:47:45.666434Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:42.614114Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2025-04-07T13:47:45.666458Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:45.666598Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:1.754987s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=2.449028s;size=0.002211944;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.040898s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.306568s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};{name=SNAPSHOT;duration=0.028348s;size=0;details={};};]};; 2025-04-07T13:47:45.666654Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:45.667755Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-07T13:47:45.667890Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2025-04-07T13:47:45.667958Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T13:47:45.668075Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2025-04-07T13:47:45.668123Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2025-04-07T13:47:45.668447Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:946:2946];trace_detailed=; 2025-04-07T13:47:45.676719Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-07T13:47:45.676895Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:45.677020Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:45.677119Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:45.677275Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:45.677338Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:45.677420Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:45.677457Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] finished for tablet 9437184 2025-04-07T13:47:45.677513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] send ScanData to [1:945:2945] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:45.677814Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:946:2946] and sent to [1:945:2945] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.009}],"full":{"a":1744033665668410,"name":"_full_task","f":1744033665668410,"d_finished":0,"c":0,"l":1744033665677551,"d":9141},"events":[{"name":"bootstrap","f":1744033665668492,"d_finished":8651,"c":1,"l":1744033665677143,"d":8651},{"a":1744033665677262,"name":"ack","f":1744033665677262,"d_finished":0,"c":0,"l":1744033665677551,"d":289},{"a":1744033665677252,"name":"processing","f":1744033665677252,"d_finished":0,"c":0,"l":1744033665677551,"d":299},{"name":"ProduceResults","f":1744033665676970,"d_finished":326,"c":2,"l":1744033665677445,"d":326},{"a":1744033665677446,"name":"Finish","f":1744033665677446,"d_finished":0,"c":0,"l":1744033665677551,"d":105}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:946:2946]->[1:945:2945] 2025-04-07T13:47:45.677878Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:45.668160Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T13:47:45.677910Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:45.677946Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T13:47:45.678009Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034180.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034180.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034180.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034180.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034180.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034180.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032980.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124034180.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124034180.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032980.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124032980.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124032980.000000s;Name=;Codec=}; 2025-04-07T13:46:22.304030Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:22.971891Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:23.007208Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:23.007286Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:23.007542Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:23.015878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:23.016137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:23.016377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:23.016493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:23.016600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:23.016734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:23.016878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:23.016999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:23.017112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:23.017214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:23.017318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:23.017427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:23.043678Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:23.043751Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:23.049974Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:23.050137Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:23.050201Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:23.050383Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.050555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:23.050674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:23.050718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:23.050819Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:23.050890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:23.050942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:23.050977Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:23.051155Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.051236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:23.051283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:23.051314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:23.051403Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:23.051462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:23.051505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:23.051535Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:23.051606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:23.051648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:23.051676Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:23.051724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:23.051762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:23.051790Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:23.051963Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-07T13:46:23.052053Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-07T13:46:23.052132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-07T13:46:23.052227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-04-07T13:46:23.052386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:23.052456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:23.052497Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTx ... ds_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:43.981644Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:939:2939] finished for tablet 9437184 2025-04-07T13:47:43.981701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:939:2939] send ScanData to [1:938:2938] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:43.982021Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:939:2939] and sent to [1:938:2938] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.006},{"events":["l_bootstrap"],"t":0.008},{"events":["f_processing","f_task_result"],"t":0.014},{"events":["f_ack","l_task_result"],"t":1.585},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":1.606}],"full":{"a":1744033662374813,"name":"_full_task","f":1744033662374813,"d_finished":0,"c":0,"l":1744033663981731,"d":1606918},"events":[{"name":"bootstrap","f":1744033662374956,"d_finished":8815,"c":1,"l":1744033662383771,"d":8815},{"a":1744033663981202,"name":"ack","f":1744033663960214,"d_finished":19416,"c":3,"l":1744033663981126,"d":19945},{"a":1744033663981194,"name":"processing","f":1744033662389089,"d_finished":693738,"c":24,"l":1744033663981129,"d":694275},{"name":"ProduceResults","f":1744033662381084,"d_finished":23948,"c":29,"l":1744033663981627,"d":23948},{"a":1744033663981628,"name":"Finish","f":1744033663981628,"d_finished":0,"c":0,"l":1744033663981731,"d":103},{"name":"task_result","f":1744033662389106,"d_finished":665977,"c":21,"l":1744033663960088,"d":665977}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:939:2939]->[1:938:2938] 2025-04-07T13:47:43.982074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:42.374448Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2025-04-07T13:47:43.982100Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:43.983804Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.858007s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.560319s;size=0.002211944;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.148757s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.352923s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};{name=SNAPSHOT;duration=0.017280s;size=0;details={};};]};; 2025-04-07T13:47:43.984216Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:44.190225Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-07T13:47:44.190398Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2025-04-07T13:47:44.190491Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T13:47:44.190625Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2025-04-07T13:47:44.190684Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2025-04-07T13:47:44.191109Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:946:2946];trace_detailed=; 2025-04-07T13:47:44.191404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-07T13:47:44.191587Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:44.191725Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:44.191845Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:44.192038Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:44.192125Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:44.192229Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:44.192268Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] finished for tablet 9437184 2025-04-07T13:47:44.192338Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] send ScanData to [1:945:2945] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:44.200883Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:946:2946] and sent to [1:945:2945] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744033664191057,"name":"_full_task","f":1744033664191057,"d_finished":0,"c":0,"l":1744033664192379,"d":1322},"events":[{"name":"bootstrap","f":1744033664191175,"d_finished":697,"c":1,"l":1744033664191872,"d":697},{"a":1744033664192018,"name":"ack","f":1744033664192018,"d_finished":0,"c":0,"l":1744033664192379,"d":361},{"a":1744033664192005,"name":"processing","f":1744033664192005,"d_finished":0,"c":0,"l":1744033664192379,"d":374},{"name":"ProduceResults","f":1744033664191658,"d_finished":407,"c":2,"l":1744033664192255,"d":407},{"a":1744033664192257,"name":"Finish","f":1744033664192257,"d_finished":0,"c":0,"l":1744033664192379,"d":122}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:946:2946]->[1:945:2945] 2025-04-07T13:47:44.201009Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:44.190730Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T13:47:44.201056Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:44.201108Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T13:47:44.201209Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034179.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034179.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144034179.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034179.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034179.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144034179.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032979.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124034179.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124034179.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032979.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124032979.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124032979.000000s;Name=;Codec=}; 2025-04-07T13:46:22.112148Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:22.530439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:22.642793Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:22.642876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:22.644628Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:22.828335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:22.829768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:22.830032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:22.830146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:22.830249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:22.830346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:22.830441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:22.830559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:22.830682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:22.830783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.830877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:22.830999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:22.876242Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:22.876418Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:22.907468Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:22.907649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:22.907718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:22.907910Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:22.908045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:22.908116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:22.908158Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:22.908247Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:22.908300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:22.908336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:22.908369Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:22.908534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:22.908597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:22.908637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:22.908674Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:22.908751Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:22.908799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:22.908847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:22.908878Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:22.908948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:22.908979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:22.909000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:22.909035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:22.909066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:22.909103Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:22.909251Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-04-07T13:46:22.909983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=234; 2025-04-07T13:46:22.910802Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=563; 2025-04-07T13:46:22.911545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=260; 2025-04-07T13:46:22.939091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:22.939254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:22.939306Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:22.939589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:22.939646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_swi ... count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:45.509322Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:796:2796] finished for tablet 9437184 2025-04-07T13:47:45.509370Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:796:2796] send ScanData to [1:795:2795] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:45.509952Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:796:2796] and sent to [1:795:2795] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.035},{"events":["f_processing","f_task_result"],"t":0.046},{"events":["f_ack","l_task_result"],"t":3.298},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":3.302}],"full":{"a":1744033662206759,"name":"_full_task","f":1744033662206759,"d_finished":0,"c":0,"l":1744033665509680,"d":3302921},"events":[{"name":"bootstrap","f":1744033662207265,"d_finished":35241,"c":1,"l":1744033662242506,"d":35241},{"a":1744033665509192,"name":"ack","f":1744033665505650,"d_finished":2931,"c":3,"l":1744033665509137,"d":3419},{"a":1744033665509185,"name":"processing","f":1744033662253328,"d_finished":1157192,"c":24,"l":1744033665509139,"d":1157687},{"name":"ProduceResults","f":1744033662210516,"d_finished":23100,"c":29,"l":1744033665509314,"d":23100},{"a":1744033665509315,"name":"Finish","f":1744033665509315,"d_finished":0,"c":0,"l":1744033665509680,"d":365},{"name":"task_result","f":1744033662253347,"d_finished":1153941,"c":21,"l":1744033665505524,"d":1153941}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:796:2796]->[1:795:2795] 2025-04-07T13:47:45.510009Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:796:2796];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:42.206358Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4749668;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4749668;selected_rows=0; 2025-04-07T13:47:45.510280Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:796:2796];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:45.510690Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:796:2796];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:2.046007s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=1.742526s;size=0.002211992;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.153635s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.614494s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};{name=SNAPSHOT;duration=0.034549s;size=0;details={};};]};; 2025-04-07T13:47:45.510750Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:796:2796];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:45.526044Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-07T13:47:45.526213Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000011:max} readable: {1000000011:max} at tablet 9437184 2025-04-07T13:47:45.526301Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T13:47:45.526447Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2025-04-07T13:47:45.526506Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2025-04-07T13:47:45.526916Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:803:2803];trace_detailed=; 2025-04-07T13:47:45.527217Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-07T13:47:45.527377Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:45.527516Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:45.527616Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:45.527782Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:803:2803];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:45.527852Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:803:2803];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:45.527942Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:803:2803];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:45.527987Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:803:2803] finished for tablet 9437184 2025-04-07T13:47:45.528060Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:803:2803] send ScanData to [1:802:2802] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:45.528376Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:803:2803] and sent to [1:802:2802] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744033665526870,"name":"_full_task","f":1744033665526870,"d_finished":0,"c":0,"l":1744033665528098,"d":1228},"events":[{"name":"bootstrap","f":1744033665526982,"d_finished":657,"c":1,"l":1744033665527639,"d":657},{"a":1744033665527767,"name":"ack","f":1744033665527767,"d_finished":0,"c":0,"l":1744033665528098,"d":331},{"a":1744033665527756,"name":"processing","f":1744033665527756,"d_finished":0,"c":0,"l":1744033665528098,"d":342},{"name":"ProduceResults","f":1744033665527449,"d_finished":359,"c":2,"l":1744033665527971,"d":359},{"a":1744033665527974,"name":"Finish","f":1744033665527974,"d_finished":0,"c":0,"l":1744033665528098,"d":124}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:803:2803]->[1:802:2802] 2025-04-07T13:47:45.528458Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:803:2803];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:45.526567Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T13:47:45.528499Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:803:2803];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:45.528534Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:803:2803];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T13:47:45.528617Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:803:2803];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495312 160000/9495312 160000/9495312 80000/4749668 0/0 >> Viewer::SelectStringWithBase64Encoding >> Viewer::JsonAutocompleteStartOfDatabaseName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034180.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034180.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144034180.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034180.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034180.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144034180.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032980.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124034180.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124034180.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032980.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124032980.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124032980.000000s;Name=;Codec=}; 2025-04-07T13:46:22.654629Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:22.960527Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:22.975940Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:22.976011Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:22.976339Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:22.982911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:22.983098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:22.983362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:22.983468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:22.983564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:22.983671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:22.983780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:22.983936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:22.984049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:22.984166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.984255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:22.984357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:23.001624Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:23.001684Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:23.003477Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:23.003607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:23.003658Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:23.003875Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.004092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:23.004167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:23.004206Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:23.004305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:23.004372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:23.004411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:23.004438Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:23.004599Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.004676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:23.004719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:23.004746Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:23.004839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:23.004897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:23.004947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:23.004979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:23.005042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:23.005074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:23.005097Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:23.005141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:23.005176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:23.005203Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:23.005373Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-07T13:46:23.005479Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-07T13:46:23.005560Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-07T13:46:23.005651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=52; 2025-04-07T13:46:23.005804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:23.005860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:23.005894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:23.006090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:23.006145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switch ... CUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=64; 2025-04-07T13:47:46.221149Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:47:46.221203Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=114; 2025-04-07T13:47:46.221315Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=71; 2025-04-07T13:47:46.221425Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=68; 2025-04-07T13:47:46.221623Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=143; 2025-04-07T13:47:46.222168Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=473; 2025-04-07T13:47:46.222246Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=31; 2025-04-07T13:47:46.222307Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=25; 2025-04-07T13:47:46.222349Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-04-07T13:47:46.222389Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-04-07T13:47:46.222437Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-07T13:47:46.222517Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=37; 2025-04-07T13:47:46.222561Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-07T13:47:46.222635Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=45; 2025-04-07T13:47:46.222672Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-07T13:47:46.222726Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=23; 2025-04-07T13:47:46.222759Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=15243; 2025-04-07T13:47:46.222902Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=12;blobs=18;rows=320000;bytes=18990624;raw_bytes=32169236; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:47:46.222999Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:47:46.223063Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=$$DELETE,tier1,tier0; 2025-04-07T13:47:46.223095Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T13:47:46.223169Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:47:46.223265Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:47:46.223299Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:47:46.223370Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:47:46.223406Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:46.223457Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:46.223522Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=3; 2025-04-07T13:47:46.223577Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2025-04-07T13:47:46.223615Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:46.223660Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:46.223692Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:46.223731Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:46.223824Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:46.224511Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:46.224734Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1411:3280];tablet_id=9437184;parent=[1:1372:3248];fline=manager.h:99;event=ask_data;request=request_id=102;1={portions_count=12};; 2025-04-07T13:47:46.229469Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:47:46.229762Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:47:46.229796Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:47:46.229819Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:47:46.229863Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:46.229924Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:46.230009Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=3; 2025-04-07T13:47:46.230084Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2025-04-07T13:47:46.230140Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:46.230185Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:46.230219Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:46.230258Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:46.230353Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:46.230960Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=12;path_id=1; 2025-04-07T13:47:46.231417Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=12;path_id=1; 2025-04-07T13:47:46.232325Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:47:46.232371Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495312 160000/9495312 160000/9495312 80000/4749668 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034183.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034183.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032983.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124034183.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032983.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124032983.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124032983.000000s;Name=;Codec=}; 2025-04-07T13:46:23.396394Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:23.505659Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:23.529045Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:23.529125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:23.529388Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:23.538877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:23.539111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:23.539487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:23.539614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:23.539724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:23.539823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:23.539916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:23.540023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:23.540128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:23.540239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:23.540326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:23.540423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:23.566666Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:23.566742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:23.570184Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:23.570349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:23.570404Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:23.570594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.570760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:23.570840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:23.570881Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:23.570980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:23.571044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:23.571085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:23.571122Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:23.571284Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.571347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:23.571390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:23.571423Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:23.571512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:23.571560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:23.571599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:23.571639Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:23.571726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:23.571764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:23.571791Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:23.571837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:23.571873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:23.571905Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:23.572070Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-04-07T13:46:23.572151Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-04-07T13:46:23.572229Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-07T13:46:23.572314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-04-07T13:46:23.572487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:23.572554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:23.572592Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTx ... iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:46.362815Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:939:2939] finished for tablet 9437184 2025-04-07T13:47:46.362864Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:939:2939] send ScanData to [1:938:2938] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:46.363155Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:939:2939] and sent to [1:938:2938] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.357},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.367}],"full":{"a":1744033665994943,"name":"_full_task","f":1744033665994943,"d_finished":0,"c":0,"l":1744033666362893,"d":367950},"events":[{"name":"bootstrap","f":1744033665995058,"d_finished":3477,"c":1,"l":1744033665998535,"d":3477},{"a":1744033666362620,"name":"ack","f":1744033666352225,"d_finished":9684,"c":3,"l":1744033666362570,"d":9957},{"a":1744033666362613,"name":"processing","f":1744033665999842,"d_finished":164449,"c":24,"l":1744033666362571,"d":164729},{"name":"ProduceResults","f":1744033665996461,"d_finished":12914,"c":29,"l":1744033666362805,"d":12914},{"a":1744033666362807,"name":"Finish","f":1744033666362807,"d_finished":0,"c":0,"l":1744033666362893,"d":86},{"name":"task_result","f":1744033665999854,"d_finished":154480,"c":21,"l":1744033666352100,"d":154480}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:939:2939]->[1:938:2938] 2025-04-07T13:47:46.363210Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:45.994651Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2025-04-07T13:47:46.363239Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:46.363378Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.198536s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.191500s;size=0.002211944;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.036846s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.016531s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2025-04-07T13:47:46.363432Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:46.364615Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-07T13:47:46.364764Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2025-04-07T13:47:46.364832Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T13:47:46.364966Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2025-04-07T13:47:46.365018Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2025-04-07T13:47:46.365357Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:946:2946];trace_detailed=; 2025-04-07T13:47:46.365613Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-07T13:47:46.365763Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:46.365902Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:46.365996Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:46.366136Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:46.366216Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:46.366303Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:46.366334Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] finished for tablet 9437184 2025-04-07T13:47:46.366386Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] send ScanData to [1:945:2945] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:46.366664Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:946:2946] and sent to [1:945:2945] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744033666365321,"name":"_full_task","f":1744033666365321,"d_finished":0,"c":0,"l":1744033666366417,"d":1096},"events":[{"name":"bootstrap","f":1744033666365409,"d_finished":608,"c":1,"l":1744033666366017,"d":608},{"a":1744033666366123,"name":"ack","f":1744033666366123,"d_finished":0,"c":0,"l":1744033666366417,"d":294},{"a":1744033666366113,"name":"processing","f":1744033666366113,"d_finished":0,"c":0,"l":1744033666366417,"d":304},{"name":"ProduceResults","f":1744033666365833,"d_finished":352,"c":2,"l":1744033666366322,"d":352},{"a":1744033666366323,"name":"Finish","f":1744033666366323,"d_finished":0,"c":0,"l":1744033666366417,"d":94}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:946:2946]->[1:945:2945] 2025-04-07T13:47:46.366734Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:46.365058Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T13:47:46.366771Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:46.366818Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T13:47:46.366902Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseName >> TColumnShardTestSchema::ForgetAfterFail [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=144034184.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034184.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034184.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034184.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034184.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034184.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034184.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032984.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124034184.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124034184.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032984.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124032984.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124032984.000000s;Name=;Codec=}; 2025-04-07T13:46:27.343219Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:28.007865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:28.106185Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:28.106463Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:28.108864Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:28.122258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:28.122467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:28.122689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:28.122797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:28.122893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:28.123025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:28.123154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:28.123265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:28.123383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:28.123473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:28.123559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:28.123663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:28.152339Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:28.152388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:28.160521Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:28.160874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:28.161178Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:28.161711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:28.162588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:28.162862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:28.163015Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:28.163296Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:28.163535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:28.163567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:28.163702Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:28.164581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:28.164870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:28.164906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:28.164941Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:28.165111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:28.165255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:28.165508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:28.165531Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:28.165675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:28.165707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:28.165838Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:28.165995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:28.166029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:28.166051Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:28.166878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=179; 2025-04-07T13:46:28.167705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=590; 2025-04-07T13:46:28.169202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=452; 2025-04-07T13:46:28.170211Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=517; 2025-04-07T13:46:28.171068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:28.171681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:28.171714Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184 ... ,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:46.352329Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:915:2915] finished for tablet 9437184 2025-04-07T13:47:46.352656Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:915:2915] send ScanData to [1:914:2914] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:46.354107Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:915:2915] and sent to [1:914:2914] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.01},{"events":["l_task_result"],"t":0.654},{"events":["f_ack"],"t":0.655},{"events":["l_ProduceResults","f_Finish"],"t":0.662},{"events":["l_ack","l_processing","l_Finish"],"t":0.663}],"full":{"a":1744033665689760,"name":"_full_task","f":1744033665689760,"d_finished":0,"c":0,"l":1744033666352961,"d":663201},"events":[{"name":"bootstrap","f":1744033665689876,"d_finished":10055,"c":1,"l":1744033665699931,"d":10055},{"a":1744033666351898,"name":"ack","f":1744033666344956,"d_finished":3721,"c":3,"l":1744033666351134,"d":4784},{"a":1744033666351886,"name":"processing","f":1744033665699982,"d_finished":243933,"c":24,"l":1744033666351136,"d":245008},{"name":"ProduceResults","f":1744033665692532,"d_finished":11807,"c":29,"l":1744033666352316,"d":11807},{"a":1744033666352318,"name":"Finish","f":1744033666352318,"d_finished":0,"c":0,"l":1744033666352961,"d":643},{"name":"task_result","f":1744033665699994,"d_finished":239894,"c":21,"l":1744033666344028,"d":239894}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:915:2915]->[1:914:2914] 2025-04-07T13:47:46.354437Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:915:2915];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:45.689428Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2025-04-07T13:47:46.354724Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:915:2915];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:46.354852Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:915:2915];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.402848s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.218329s;size=0.002211944;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.064502s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.146515s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};{name=SNAPSHOT;duration=0.022005s;size=0;details={};};]};; 2025-04-07T13:47:46.354912Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:915:2915];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:46.407499Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-07T13:47:46.407664Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2025-04-07T13:47:46.407754Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T13:47:46.408119Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2025-04-07T13:47:46.408174Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2025-04-07T13:47:46.408545Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:922:2922];trace_detailed=; 2025-04-07T13:47:46.409089Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-07T13:47:46.409759Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:46.410115Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:46.411118Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:46.411568Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:922:2922];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:46.411645Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:922:2922];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:46.412305Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:922:2922];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:46.412760Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:922:2922] finished for tablet 9437184 2025-04-07T13:47:46.413412Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:922:2922] send ScanData to [1:921:2921] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:46.415602Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:922:2922] and sent to [1:921:2921] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing"],"t":0.002},{"events":["f_ack"],"t":0.003},{"events":["l_ProduceResults","f_Finish"],"t":0.004},{"events":["l_ack","l_processing","l_Finish"],"t":0.005}],"full":{"a":1744033666408504,"name":"_full_task","f":1744033666408504,"d_finished":0,"c":0,"l":1744033666413658,"d":5154},"events":[{"name":"bootstrap","f":1744033666408880,"d_finished":2266,"c":1,"l":1744033666411146,"d":2266},{"a":1744033666411551,"name":"ack","f":1744033666411551,"d_finished":0,"c":0,"l":1744033666413658,"d":2107},{"a":1744033666411280,"name":"processing","f":1744033666411280,"d_finished":0,"c":0,"l":1744033666413658,"d":2378},{"name":"ProduceResults","f":1744033666409832,"d_finished":2261,"c":2,"l":1744033666412538,"d":2261},{"a":1744033666412540,"name":"Finish","f":1744033666412540,"d_finished":0,"c":0,"l":1744033666413658,"d":1118}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:922:2922]->[1:921:2921] 2025-04-07T13:47:46.415982Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:922:2922];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:46.408215Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T13:47:46.416262Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:922:2922];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:46.416523Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:922:2922];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T13:47:46.417079Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:922:2922];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495672 160000/9495672 160000/9495672 80000/4750028 0/0 >> KqpImmediateEffects::ConflictingKeyRW1WR2 >> KqpImmediateEffects::WriteThenReadWithCommit >> KqpImmediateEffects::InsertExistingKey >> TColumnShardTestSchema::RebootHotTiers [GOOD] >> KqpInplaceUpdate::SingleRowStr >> KqpImmediateEffects::UpdateAfterUpsert >> TDataShardTrace::TestTraceDistributedSelect [GOOD] >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted >> TColumnShardTestSchema::ExternalTTL_Types [GOOD] >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034161.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034161.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032961.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-07T13:46:17.304091Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:18.848853Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:18.927591Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:18.927655Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:18.930357Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:19.006748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:19.006970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:19.007187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:19.007282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:19.007374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:19.007502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:19.007596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:19.008006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:19.008337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:19.008780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:19.008870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:19.009465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:19.071551Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:19.071987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:19.085030Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:19.085684Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:19.086097Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:19.087427Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:19.093378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:19.093822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:19.094023Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:19.094449Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:19.094924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:19.095142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:19.095164Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:19.097083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:19.097551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:19.097795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:19.097819Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:19.098281Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:19.098326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:19.098364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:19.098390Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:19.098439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:19.098464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:19.098667Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:19.098701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:19.098730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:19.098751Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:19.100132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=245; 2025-04-07T13:46:19.101076Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=675; 2025-04-07T13:46:19.102149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=652; 2025-04-07T13:46:19.102798Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=233; 2025-04-07T13:46:19.103758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:19.103973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:19.104211Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:19.105499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:19.105530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:19.105551Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:19.106794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... =actor.cpp:234;stage=ready result;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=26668; 2025-04-07T13:47:48.016798Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213344;num_rows=26668;batch_columns=timestamp; 2025-04-07T13:47:48.016954Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:776:2774] send ScanData to [1:775:2773] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213344 rows: 26668 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] Got TEvKqpCompute::TEvScanData [1:776:2774]->[1:775:2773] 2025-04-07T13:47:48.017042Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:48.017124Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:48.017149Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:47:48.017176Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:47:48.017268Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:48.017339Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:48.017366Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:47:48.017426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=26664; 2025-04-07T13:47:48.017461Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213312;num_rows=26664;batch_columns=timestamp; 2025-04-07T13:47:48.017614Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:776:2774] send ScanData to [1:775:2773] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213312 rows: 26664 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] Got TEvKqpCompute::TEvScanData [1:776:2774]->[1:775:2773] 2025-04-07T13:47:48.017703Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:48.017775Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:48.017844Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:48.017930Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:48.017993Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:48.018049Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:48.018074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:776:2774] finished for tablet 9437184 2025-04-07T13:47:48.018132Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:776:2774] send ScanData to [1:775:2773] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:48.018464Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:776:2774] and sent to [1:775:2773] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.006},{"events":["f_processing","f_task_result"],"t":0.008},{"events":["f_ack","l_task_result"],"t":0.652},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.657}],"full":{"a":1744033667360723,"name":"_full_task","f":1744033667360723,"d_finished":0,"c":0,"l":1744033668018162,"d":657439},"events":[{"name":"bootstrap","f":1744033667360825,"d_finished":6460,"c":1,"l":1744033667367285,"d":6460},{"a":1744033668017918,"name":"ack","f":1744033668013695,"d_finished":3836,"c":6,"l":1744033668017863,"d":4080},{"a":1744033668017910,"name":"processing","f":1744033667369339,"d_finished":361948,"c":48,"l":1744033668017865,"d":362200},{"name":"ProduceResults","f":1744033667363282,"d_finished":9912,"c":56,"l":1744033668018063,"d":9912},{"a":1744033668018065,"name":"Finish","f":1744033668018065,"d_finished":0,"c":0,"l":1744033668018162,"d":97},{"name":"task_result","f":1744033667369352,"d_finished":357515,"c":42,"l":1744033668013554,"d":357515}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:776:2774]->[1:775:2773] 2025-04-07T13:47:48.018528Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:47.360415Z;index_granules=0;index_portions=6;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9495312;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9495312;selected_rows=0; 2025-04-07T13:47:48.018555Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:48.018685Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.288077s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.502415s;size=0.004421752;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.032988s;size=0.00256;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.108198s;size=0.00638667;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};{name=SNAPSHOT;duration=0.013229s;size=0;details={};};]};; 2025-04-07T13:47:48.018744Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:776:2774];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 0/0 160000/9495312 |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |83.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |83.9%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] >> Viewer::StorageGroupOutputWithoutFilterNoDepends >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] Test command err: 2025-04-07T13:46:22.995759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567198915538883:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:22.995877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bbf/r3tmp/tmpzV4yPk/pdisk_1.dat 2025-04-07T13:46:23.775863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:46:23.776086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:46:23.777184Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:23.785836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20063, node 1 2025-04-07T13:46:25.691794Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:46:25.691817Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:46:25.691822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:46:25.692124Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:46:28.001523Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567198915538883:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:28.001580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:29268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:46:29.809357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:29.835337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:29.841917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:29.856555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-04-07T13:46:29.867161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:46:29.867194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:46:29.883333Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:46:29.890780Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:29.891050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:46:29.907101Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:29.947404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033589970, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:29.947443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:46:29.947825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:46:29.954940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:29.955213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:29.955836Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:46:29.956355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:46:29.956602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:46:29.956642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:46:29.983133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:46:29.983470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:46:29.983489Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:46:29.984253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:46:38.779660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:46:38.779682Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:11.014311Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490567411263284242:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:11.014961Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bbf/r3tmp/tmpl6sRxs/pdisk_1.dat 2025-04-07T13:47:11.137918Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:11.174743Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:11.174829Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:11.179150Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29526, node 4 2025-04-07T13:47:11.247966Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:11.247981Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:11.247989Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:11.248068Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:12.139519Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:47:12.139836Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:47:12.139865Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:12.142589Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:47:12.142740Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:47:12.142751Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T13:47:12.145560Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:47:12.145582Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:47:12.147955Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:47:12.148306Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:47:12.160535Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033632201, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:47:12.160570Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:47:12.161679Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:47:12.164771Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:47:12.164909Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:47:12.164953Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:47:12.165007Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:47:12.165041Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:47:12.165074Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:47:12.166478Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:47:12.166507Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:47:12.166520Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:47:12.166571Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T13:47:16.019713Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490567411263284242:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:16.019773Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:47:26.130781Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:47:26.130808Z node 4 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034180.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034180.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034180.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034180.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034180.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034180.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032980.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124034180.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124034180.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032980.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124032980.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124032980.000000s;Name=;Codec=}; 2025-04-07T13:46:22.859562Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:22.987485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:23.007842Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:23.007915Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:23.008142Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:23.016140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:23.016351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:23.016557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:23.016695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:23.016798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:23.016894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:23.016993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:23.017237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:23.017371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:23.017489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:23.017585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:23.017694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:23.040538Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:23.040596Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:23.042633Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:23.042765Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:23.042814Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:23.043000Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.043184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:23.043259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:23.043297Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:23.043397Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:23.043456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:23.043498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:23.043529Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:23.043710Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.043770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:23.043807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:23.043836Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:23.043931Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:23.043981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:23.044023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:23.044052Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:23.044133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:23.044174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:23.044202Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:23.044245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:23.044279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:23.044306Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:23.044472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-07T13:46:23.044555Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-07T13:46:23.044625Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-07T13:46:23.044744Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=72; 2025-04-07T13:46:23.044889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:23.044971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:23.045006Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTx ... mposite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=59; 2025-04-07T13:47:49.979018Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:47:49.979062Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=141; 2025-04-07T13:47:49.979205Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=103; 2025-04-07T13:47:49.979387Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=145; 2025-04-07T13:47:49.979812Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=381; 2025-04-07T13:47:49.981482Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1586; 2025-04-07T13:47:49.981954Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=109; 2025-04-07T13:47:49.982008Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=20; 2025-04-07T13:47:49.982181Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-07T13:47:49.982219Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-04-07T13:47:49.982383Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-04-07T13:47:49.982698Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=203; 2025-04-07T13:47:49.982737Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-04-07T13:47:49.982990Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=142; 2025-04-07T13:47:49.983024Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T13:47:49.983223Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=174; 2025-04-07T13:47:49.983250Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=24444; 2025-04-07T13:47:49.983364Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:47:49.983543Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:47:49.983780Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=$$DELETE,tier1,tier0; 2025-04-07T13:47:49.983922Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T13:47:49.984089Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:47:49.984177Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:47:49.984209Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:47:49.984371Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:47:49.984491Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:49.984537Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:49.984709Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=5; 2025-04-07T13:47:49.984758Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:47:49.984793Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:49.984834Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:49.984862Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:49.984898Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:49.985044Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:49.986318Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:49.986454Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1573:3442];tablet_id=9437184;parent=[1:1532:3408];fline=manager.h:99;event=ask_data;request=request_id=128;1={portions_count=21};; 2025-04-07T13:47:49.989839Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:47:49.990059Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:47:49.990093Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:47:49.990117Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:47:49.990156Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:49.990218Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:49.990292Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=5; 2025-04-07T13:47:49.990358Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:47:49.990398Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:49.990450Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:49.990491Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:49.990543Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:49.990628Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:49.990982Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2025-04-07T13:47:49.991755Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2025-04-07T13:47:49.995259Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:47:49.995327Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 >> KqpImmediateEffects::Upsert >> KqpWrite::Insert >> KqpImmediateEffects::ReplaceDuplicates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExternalTTL_Types [GOOD] Test command err: 2025-04-07T13:46:09.456517Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:10.914181Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:46:11.126960Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:46:11.129595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:11.326463Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:11.327512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:11.336020Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:11.346375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:11.346594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:11.346787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:11.346894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:11.347003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:11.347114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:11.347217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:11.347323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:11.347421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:11.347553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.347654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:11.347774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:11.410854Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:46:11.411149Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2025-04-07T13:46:11.411225Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:11.411264Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:11.416514Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:11.416981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:11.417242Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:11.417931Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:11.418698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:11.418929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:11.419049Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:11.419618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:11.419768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:11.420157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:11.420272Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:11.421246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:11.421404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:11.421441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:11.421467Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:11.421991Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:11.422208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:11.422409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:11.422437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:11.422575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:11.422693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:11.422718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:11.422752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:11.422872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:11.422976Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:11.423646Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=211; 2025-04-07T13:46:11.424036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=277; 2025-04-07T13:46:11.424423Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=106; 2025-04-07T13:46:11.426538Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=1981; 2025-04-07T13:46:11.427292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:11.427432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:11.427459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:11.429006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:11.429047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.429157Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:11.429605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:46:11.429729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_ ... unks=0);; 2025-04-07T13:47:50.249048Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:47:50.249091Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T13:47:50.249129Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=16; 2025-04-07T13:47:50.249173Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=16; 2025-04-07T13:47:50.249216Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T13:47:50.249308Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:50.249344Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2025-04-07T13:47:50.249381Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:47:50.249903Z node 4 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [4:275:2287], Recipient [4:106:2138]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-04-07T13:47:50.249936Z node 4 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-07T13:47:50.250201Z node 4 :TX_COLUMNSHARD DEBUG: TxWriteIndex[25] (CS::INDEXATION) apply at tablet 9437184 2025-04-07T13:47:50.252800Z node 4 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 3 2025-04-07T13:47:50.252914Z node 4 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9781736;raw_bytes=15906843; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:47:50.252954Z node 4 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[4:106:2138];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=e40fd61a-13b611f0-86db1877-bc442a4;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:47:50.253315Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:50.253455Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:50.253501Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:47:50.253587Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=80000; 2025-04-07T13:47:50.253651Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=saved_at; 2025-04-07T13:47:50.253738Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] send ScanData to [4:272:2284] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: uint64 2025-04-07T13:47:50.253843Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:50.253945Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:50.254019Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:50.254139Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:50.254216Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:50.254288Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-07T13:47:50.254316Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] finished for tablet 9437184 2025-04-07T13:47:50.254372Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] send ScanData to [4:272:2284] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:50.254722Z node 4 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [4:277:2289] and sent to [4:272:2284] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.102},{"events":["f_ack"],"t":0.106},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.107}],"full":{"a":1744033670147184,"name":"_full_task","f":1744033670147184,"d_finished":0,"c":0,"l":1744033670254405,"d":107221},"events":[{"name":"bootstrap","f":1744033670147721,"d_finished":2129,"c":1,"l":1744033670149850,"d":2129},{"a":1744033670254123,"name":"ack","f":1744033670253293,"d_finished":744,"c":1,"l":1744033670254037,"d":1026},{"a":1744033670254112,"name":"processing","f":1744033670150185,"d_finished":2840,"c":5,"l":1744033670254040,"d":3133},{"name":"ProduceResults","f":1744033670148857,"d_finished":1578,"c":8,"l":1744033670254306,"d":1578},{"a":1744033670254308,"name":"Finish","f":1744033670254308,"d_finished":0,"c":0,"l":1744033670254405,"d":97},{"name":"task_result","f":1744033670150199,"d_finished":2010,"c":4,"l":1744033670249431,"d":2010}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2025-04-07T13:47:50.254782Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:50.146218Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2025-04-07T13:47:50.254813Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:50.254904Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.098354s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.091703s;size=0.0063152;details={columns=9;};};]};; 2025-04-07T13:47:50.254966Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: 2025-04-07T13:47:36.854772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:47:36.869524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:36.871622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a58/r3tmp/tmpVLeWat/pdisk_1.dat 2025-04-07T13:47:39.101320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:47:39.191841Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:39.328284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:39.331947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:39.383559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:47:39.850519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:47:44.907812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:894:2740], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:44.917127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:904:2745], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:44.917948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:44.959369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T13:47:45.251057Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-07T13:47:45.553725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:908:2748], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T13:47:47.113129Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr883ktd00cf4sfjyxx5e0xr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWMxY2ExMGEtZjJiZDg3NjktMmM1M2E5NWMtNTliOGI3Mjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:47:47.252765Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr883p24ajs7j6bp36mvygct, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjE5ZTg5YTItZDM5MjM2OWItZDA0OTk5MWQtNDJiODkyMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:47:49.855706Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr883pn9bkw6pmrh2fh3zdbj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ5MjI0YTMtZTYyZTVjNy1jZWU2OTY2Yi02ZjkwOGE5ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpImmediateEffects::TxWithReadAtTheEnd >> KqpImmediateEffects::Replace >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034183.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034183.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032983.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=124034183.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032983.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=124032983.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124032983.000000s;Name=;Codec=}; 2025-04-07T13:46:25.078804Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:26.380066Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:26.400444Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:26.400507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:26.402408Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:26.448052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:26.448274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:26.448483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:26.448580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:26.448785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:26.448913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:26.449010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:26.449117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:26.449228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:26.449320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:26.449424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:26.449546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:26.499431Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:26.499512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:26.501698Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:26.501835Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:26.501884Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:26.502027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:26.502146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:26.502217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:26.502252Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:26.502338Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:26.502397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:26.502439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:26.502474Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:26.502609Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:26.502656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:26.502688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:26.502710Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:26.502775Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:26.502813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:26.502848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:26.502870Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:26.502924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:26.502957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:26.502995Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:26.503040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:26.503070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:26.503090Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:26.503226Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-07T13:46:26.503301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=27; 2025-04-07T13:46:26.503359Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=27; 2025-04-07T13:46:26.503965Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=392; 2025-04-07T13:46:26.505151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:26.505626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:26.505872Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TT ... ECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=49; 2025-04-07T13:47:50.290811Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:47:50.290848Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=86; 2025-04-07T13:47:50.290956Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=74; 2025-04-07T13:47:50.291072Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=78; 2025-04-07T13:47:50.291263Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=155; 2025-04-07T13:47:50.291758Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=455; 2025-04-07T13:47:50.291829Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=23; 2025-04-07T13:47:50.291881Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=21; 2025-04-07T13:47:50.291915Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-04-07T13:47:50.291947Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2025-04-07T13:47:50.291981Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-04-07T13:47:50.292046Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=36; 2025-04-07T13:47:50.292082Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-07T13:47:50.292150Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=41; 2025-04-07T13:47:50.292194Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T13:47:50.292267Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=22; 2025-04-07T13:47:50.292299Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=13861; 2025-04-07T13:47:50.292411Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:47:50.292489Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:47:50.292533Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=$$DELETE,tier1,tier0; 2025-04-07T13:47:50.292565Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T13:47:50.292637Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:47:50.292766Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:47:50.292801Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:47:50.292861Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:47:50.292898Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:50.292947Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:50.293003Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=5; 2025-04-07T13:47:50.293052Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:47:50.293088Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:50.293127Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:50.293178Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:50.293214Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:50.293281Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:50.293989Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:50.294142Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1573:3442];tablet_id=9437184;parent=[1:1532:3408];fline=manager.h:99;event=ask_data;request=request_id=128;1={portions_count=21};; 2025-04-07T13:47:50.295440Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:47:50.295535Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:47:50.295560Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:47:50.295582Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:47:50.295620Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:50.295672Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:50.295733Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=5; 2025-04-07T13:47:50.295790Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:47:50.295828Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:50.295871Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:50.295905Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:50.295952Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:50.296025Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:50.296444Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2025-04-07T13:47:50.297189Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2025-04-07T13:47:50.298735Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:47:50.298780Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034183.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034183.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032983.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124034183.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032983.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124032983.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124032983.000000s;Name=;Codec=}; 2025-04-07T13:46:23.973507Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:25.865892Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:26.080883Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:26.081392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:26.083854Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:26.280179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:26.283106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:26.284995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:26.285746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:26.285833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:26.286407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:26.286972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:26.287552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:26.287882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:26.287962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:26.288262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:26.288576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:26.367122Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:26.367601Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:26.395055Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:26.395691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:26.395764Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:26.395980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:26.396127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:26.396204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:26.396244Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:26.396342Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:26.396413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:26.396456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:26.396485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:26.396678Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:26.396734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:26.396771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:26.396812Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:26.396899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:26.396949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:26.396998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:26.397027Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:26.397102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:26.397142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:26.397171Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:26.397218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:26.397252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:26.397277Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:26.397445Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=64; 2025-04-07T13:46:26.397530Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=28; 2025-04-07T13:46:26.397590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=27; 2025-04-07T13:46:26.397665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-07T13:46:26.398354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:26.398575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:26.398607Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Ex ... CUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=56; 2025-04-07T13:47:50.748068Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:47:50.748108Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=107; 2025-04-07T13:47:50.748243Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=84; 2025-04-07T13:47:50.748345Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=64; 2025-04-07T13:47:50.748545Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=167; 2025-04-07T13:47:50.749308Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=722; 2025-04-07T13:47:50.749383Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=25; 2025-04-07T13:47:50.749468Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=31; 2025-04-07T13:47:50.749508Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-04-07T13:47:50.749547Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2025-04-07T13:47:50.749581Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-04-07T13:47:50.749662Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=46; 2025-04-07T13:47:50.749717Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-04-07T13:47:50.749826Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=47; 2025-04-07T13:47:50.749866Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T13:47:50.749917Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2025-04-07T13:47:50.749960Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=17212; 2025-04-07T13:47:50.750080Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:47:50.750178Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:47:50.750229Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=$$DELETE,tier1,tier0; 2025-04-07T13:47:50.750258Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T13:47:50.750326Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:47:50.750424Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:47:50.750467Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:47:50.750537Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:47:50.750704Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:50.750755Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:50.750815Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=5; 2025-04-07T13:47:50.750878Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:47:50.750935Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:50.750982Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:50.751011Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:50.751046Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:50.751127Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:50.751934Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:50.752297Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1573:3442];tablet_id=9437184;parent=[1:1532:3408];fline=manager.h:99;event=ask_data;request=request_id=128;1={portions_count=21};; 2025-04-07T13:47:50.752649Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:47:50.753342Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:47:50.753373Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:47:50.753396Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:47:50.753442Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:50.753510Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:50.753586Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=5; 2025-04-07T13:47:50.753654Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:47:50.753696Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:50.753741Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:50.753774Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:50.753842Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:50.753927Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:50.754571Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2025-04-07T13:47:50.755506Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2025-04-07T13:47:50.757272Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:47:50.757317Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 >> TDataShardTrace::TestTraceDistributedUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=144034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034183.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034183.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034183.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032983.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124034183.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124034183.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032983.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124032983.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124032983.000000s;Name=;Codec=}; 2025-04-07T13:46:25.268801Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:26.455063Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:26.474861Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:26.474924Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:26.475140Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:26.501155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:26.501386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:26.501598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:26.501701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:26.501806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:26.501901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:26.501989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:26.502094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:26.502210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:26.502297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:26.502390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:26.502661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:26.560903Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:26.561185Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:26.574636Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:26.575640Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:26.576264Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:26.578171Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:26.578958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:26.579423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:26.579626Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:26.580350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:26.581064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:26.581109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:26.581532Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:26.583020Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:26.583305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:26.583345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:26.583375Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:26.583778Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:26.584025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:26.584421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:26.584450Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:26.584716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:26.584759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:26.584787Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:26.585032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:26.585065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:26.585314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:26.586512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=225; 2025-04-07T13:46:26.587371Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=418; 2025-04-07T13:46:26.588398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=611; 2025-04-07T13:46:26.589293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=457; 2025-04-07T13:46:26.590165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:26.590856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:26.590890Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpda ... XECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=56; 2025-04-07T13:47:50.787571Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:47:50.787609Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=95; 2025-04-07T13:47:50.787710Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=69; 2025-04-07T13:47:50.787806Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=64; 2025-04-07T13:47:50.788017Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=172; 2025-04-07T13:47:50.788526Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=466; 2025-04-07T13:47:50.788592Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=23; 2025-04-07T13:47:50.788646Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=22; 2025-04-07T13:47:50.788699Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-07T13:47:50.788739Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-04-07T13:47:50.788776Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-07T13:47:50.788837Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=30; 2025-04-07T13:47:50.788876Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-07T13:47:50.788948Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=40; 2025-04-07T13:47:50.788989Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-07T13:47:50.789043Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=23; 2025-04-07T13:47:50.789076Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=15091; 2025-04-07T13:47:50.789198Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:47:50.789290Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:47:50.789333Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=$$DELETE,tier1,tier0; 2025-04-07T13:47:50.789364Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T13:47:50.789451Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:47:50.789547Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:47:50.789579Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:47:50.789653Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:47:50.789691Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:50.789741Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:50.789803Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=4; 2025-04-07T13:47:50.789856Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:47:50.789895Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:50.789941Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:50.789973Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:50.790009Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:50.790079Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:50.790446Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:50.790708Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1529:3398];tablet_id=9437184;parent=[1:1490:3366];fline=manager.h:99;event=ask_data;request=request_id=122;1={portions_count=21};; 2025-04-07T13:47:50.790973Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:47:50.791279Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:47:50.791310Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:47:50.791331Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:47:50.791366Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:50.791412Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:50.791456Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=4; 2025-04-07T13:47:50.791501Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:47:50.791537Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:50.791573Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:50.791603Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:50.791642Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:50.791703Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:50.797488Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2025-04-07T13:47:50.798198Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2025-04-07T13:47:50.799685Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:47:50.799728Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495672 160000/9495672 160000/9495672 80000/4750028 0/0 >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] >> KqpInplaceUpdate::SingleRowArithm >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034182.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034182.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034182.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034182.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034182.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034182.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032982.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124034182.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124034182.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032982.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124032982.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124032982.000000s;Name=;Codec=}; 2025-04-07T13:46:23.262891Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:23.412267Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:23.436955Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:23.437137Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:23.438193Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:23.481389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:23.481631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:23.481866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:23.481957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:23.482051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:23.482136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:23.482228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:23.482333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:23.482441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:23.482526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:23.482607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:23.482695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:23.505754Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:23.505855Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:23.509621Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:23.509765Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:23.509819Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:23.509984Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.510139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:23.510219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:23.510261Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:23.510353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:23.510410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:23.510448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:23.510481Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:23.510627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:23.510686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:23.510723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:23.510754Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:23.510830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:23.510872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:23.510908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:23.510939Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:23.511026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:23.511061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:23.511088Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:23.511128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:23.511161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:23.511189Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:23.511352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-07T13:46:23.511434Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-07T13:46:23.511523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=50; 2025-04-07T13:46:23.511600Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-04-07T13:46:23.511759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:23.511830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:23.511864Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTx ... CUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=50; 2025-04-07T13:47:52.314490Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:47:52.314541Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=103; 2025-04-07T13:47:52.314664Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=87; 2025-04-07T13:47:52.314760Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=61; 2025-04-07T13:47:52.314966Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=172; 2025-04-07T13:47:52.315458Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=449; 2025-04-07T13:47:52.315526Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=25; 2025-04-07T13:47:52.315581Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=21; 2025-04-07T13:47:52.315615Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-07T13:47:52.315652Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-04-07T13:47:52.315695Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-07T13:47:52.315771Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=43; 2025-04-07T13:47:52.315818Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-04-07T13:47:52.315903Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=42; 2025-04-07T13:47:52.315940Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T13:47:52.315988Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=22; 2025-04-07T13:47:52.316016Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=14524; 2025-04-07T13:47:52.316162Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:47:52.316256Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:47:52.316306Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=$$DELETE,tier1,tier0; 2025-04-07T13:47:52.316338Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T13:47:52.316417Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:47:52.316510Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:47:52.316544Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:47:52.316605Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:47:52.316653Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:52.316734Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:52.316795Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=5; 2025-04-07T13:47:52.316859Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:47:52.316904Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:52.316959Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:52.316995Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:52.317030Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:52.317100Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:52.317536Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:52.317815Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1573:3442];tablet_id=9437184;parent=[1:1532:3408];fline=manager.h:99;event=ask_data;request=request_id=128;1={portions_count=21};; 2025-04-07T13:47:52.318690Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:47:52.324187Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:47:52.324248Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:47:52.324276Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:47:52.324346Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:52.324418Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:52.324505Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=5; 2025-04-07T13:47:52.324575Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:47:52.324619Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:52.324700Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:52.324745Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:52.324788Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:52.324875Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:52.325309Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2025-04-07T13:47:52.326082Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2025-04-07T13:47:52.327766Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:47:52.327808Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1532:3408];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] Test command err: 2025-04-07T13:46:21.975303Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567194839554190:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:21.975367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bc0/r3tmp/tmp6mat63/pdisk_1.dat 2025-04-07T13:46:23.150227Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:46:23.397820Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:23.431695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:46:23.431813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:46:23.435443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5392, node 1 2025-04-07T13:46:23.600610Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:46:23.600644Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:46:23.600651Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:46:23.600748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-04-07T13:46:27.007130Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567194839554190:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:27.007186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:46:27.243063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:27.313821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:46:27.336650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:27.375947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:46:27.378435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:46:27.379842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:46:27.416730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:46:27.416961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:46:27.429963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:46:27.435540Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:46:27.482171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033587506, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:46:27.482200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:46:27.482452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:46:27.513739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:46:27.513914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:46:27.513958Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:46:27.514752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:46:27.515763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:46:27.515896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:46:27.538295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:46:27.538351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:46:27.538367Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:46:27.538432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T13:46:38.344826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:46:38.344856Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:21.617090Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490567453187421590:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:21.617137Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bc0/r3tmp/tmpDDKpMk/pdisk_1.dat 2025-04-07T13:47:21.861817Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9479, node 4 2025-04-07T13:47:21.958895Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:21.959012Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:21.993663Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:47:22.003080Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:22.003108Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:22.003115Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:22.003223Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:22.279940Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:47:22.280399Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:47:22.280433Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:47:22.282956Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:47:22.283133Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:47:22.283155Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:47:22.285294Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:47:22.285321Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-07T13:47:22.286587Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:47:22.287584Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:47:22.291191Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033642337, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:47:22.291225Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:47:22.291495Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:47:22.293607Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:47:22.293817Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:47:22.293862Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:47:22.293936Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:47:22.293977Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:47:22.294021Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:47:22.295811Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:47:22.295850Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:47:22.295865Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:47:22.295947Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:47:26.631520Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490567453187421590:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:26.632188Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:47:36.861372Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:47:36.861411Z node 4 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 15835, MsgBus: 10594 2025-04-07T13:47:02.472391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:47:02.472646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:02.481872Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001156/r3tmp/tmpM8txZS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15835, node 1 2025-04-07T13:47:06.545766Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:06.572125Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:06.572172Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:06.572196Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:06.572456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:47:06.630588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:06.630714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:06.649902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10594 TClient is connected to server localhost:10594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:09.565737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:09.696704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:09.907861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:10.411129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:10.769466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:12.443404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1774:3396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:12.444049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:12.572191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:47:12.963420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:47:13.391312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:47:13.875046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:47:14.372703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:47:15.087138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:47:15.913287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2323:3817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:15.913756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:15.914038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2328:3822], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:15.968516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:47:16.218326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2330:3824], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:47:20.557048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:47:21.310879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:47:21.780760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12693, MsgBus: 22421 2025-04-07T13:47:34.605621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:47:34.605950Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:34.606022Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001156/r3tmp/tmpACVcQD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12693, node 2 2025-04-07T13:47:36.762439Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:36.765402Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:36.765664Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:36.765919Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:36.767620Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:47:36.838731Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:36.838904Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:36.855280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22421 TClient is connected to server localhost:22421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:39.284839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:39.457229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:40.124112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:40.588305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:40.999089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:42.559247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1774:3393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:42.559389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:42.592043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:47:42.707970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:47:43.395953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:47:43.910358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:47:44.484719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:47:45.355305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:47:46.191654Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2312:3803], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:46.191759Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:46.192015Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2317:3808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:46.197006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:47:46.457527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2319:3810], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:47:48.054256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:47:48.902736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:47:49.944260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert [GOOD] Test command err: 2025-04-07T13:47:40.882324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:47:40.882618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:40.882845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a53/r3tmp/tmpfW2a7p/pdisk_1.dat 2025-04-07T13:47:46.305376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:47:46.463439Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:46.518462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:46.518627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:46.535375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:47:46.969900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:47:50.845468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:894:2740], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:50.845602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:904:2745], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:50.845704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:50.850474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T13:47:50.877624Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-07T13:47:51.165227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:908:2748], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T13:47:52.694759Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr883snveyv3hhhm119pv4c2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNiZDM2MGEtZjViODY5NmQtZTRjOGE3MDctYzc4YTNhMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> KqpSinkLocks::DifferentKeyUpdate [GOOD] >> KqpInplaceUpdate::BigRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: 2025-04-07T13:47:45.749942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:47:45.750181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:45.750392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a44/r3tmp/tmpEdw4oS/pdisk_1.dat 2025-04-07T13:47:47.283958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:47:47.318850Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:47.378743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:47.382107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:47.402639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:47:47.572082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:47:52.270608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:894:2740], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:52.270735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:904:2745], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:52.270809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:52.275537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T13:47:52.304282Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-07T13:47:52.616010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:908:2748], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T13:47:54.643706Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr883v2c6ag9gwa393ezjj2v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2RiZDQwMTktNWJkOGJlNWItOWZhM2Y5YTYtYWU4NzI0ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:47:55.210816Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr883xfx1csp8ge8cwzp7v0n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGYyYzIxZmYtZmM4NGZkMGUtZmE1MGRhMWQtM2Q2ZjRlZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:47:56.452599Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr883y1g3p2rx0wkzm9fmg03, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmNhNDdhZTItNzYwZjY1ZmMtZmY1YWIzMWItNzVhZmU5MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpImmediateEffects::ConflictingKeyR1WR2 >> KqpImmediateEffects::DeleteAfterUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=144034187.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034187.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034187.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144034187.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034187.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034187.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144034187.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032987.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124034187.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124034187.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032987.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124032987.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124032987.000000s;Name=;Codec=}; 2025-04-07T13:46:30.354149Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:31.361396Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:31.426676Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:31.427120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:31.429507Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:31.528339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:31.529933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:31.532001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:31.532332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:31.532639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:31.532785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:31.533096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:31.533923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:31.534512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:31.534589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:31.534657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:31.535204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:31.707557Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:31.707613Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:31.723982Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:31.725974Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:31.726021Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:31.726890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:31.728425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:31.728918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:31.729173Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:31.729708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:31.729981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:31.730010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:31.730537Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:31.734245Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:31.734546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:31.734867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:31.734894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:31.735202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:31.735464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:31.735492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:31.735754Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:31.736061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:31.736090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:31.736109Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:31.736140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:31.736398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:31.736419Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:31.741975Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=283; 2025-04-07T13:46:31.743106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=850; 2025-04-07T13:46:31.743949Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=310; 2025-04-07T13:46:31.747202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=273; 2025-04-07T13:46:31.748140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:31.748415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:31.748654Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpda ... column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:56.758890Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:915:2915] finished for tablet 9437184 2025-04-07T13:47:56.758945Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:915:2915] send ScanData to [1:914:2914] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:56.759557Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:915:2915] and sent to [1:914:2914] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.005},{"events":["l_bootstrap"],"t":0.02},{"events":["f_processing","f_task_result"],"t":0.028},{"events":["l_task_result"],"t":0.855},{"events":["f_ack"],"t":0.856},{"events":["l_ProduceResults","f_Finish"],"t":0.873},{"events":["l_ack","l_processing","l_Finish"],"t":0.874}],"full":{"a":1744033675884903,"name":"_full_task","f":1744033675884903,"d_finished":0,"c":0,"l":1744033676758975,"d":874072},"events":[{"name":"bootstrap","f":1744033675885030,"d_finished":20207,"c":1,"l":1744033675905237,"d":20207},{"a":1744033676758428,"name":"ack","f":1744033676741490,"d_finished":7357,"c":3,"l":1744033676758366,"d":7904},{"a":1744033676758421,"name":"processing","f":1744033675913195,"d_finished":296243,"c":24,"l":1744033676758368,"d":296797},{"name":"ProduceResults","f":1744033675890677,"d_finished":12496,"c":29,"l":1744033676758878,"d":12496},{"a":1744033676758880,"name":"Finish","f":1744033676758880,"d_finished":0,"c":0,"l":1744033676758975,"d":95},{"name":"task_result","f":1744033675913213,"d_finished":288617,"c":21,"l":1744033676740828,"d":288617}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:915:2915]->[1:914:2914] 2025-04-07T13:47:56.759913Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:915:2915];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:55.884566Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2025-04-07T13:47:56.759939Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:915:2915];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:56.760360Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:915:2915];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.546942s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.571259s;size=0.002211944;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.073975s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.133844s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};{name=SNAPSHOT;duration=0.010980s;size=0;details={};};]};; 2025-04-07T13:47:56.760416Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:915:2915];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:56.821855Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-07T13:47:56.822037Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2025-04-07T13:47:56.822132Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T13:47:56.822266Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2025-04-07T13:47:56.822329Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2025-04-07T13:47:56.822724Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:922:2922];trace_detailed=; 2025-04-07T13:47:56.823048Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-07T13:47:56.824847Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T13:47:56.824996Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:56.826660Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:56.827211Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:922:2922];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:47:56.827297Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:922:2922];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:56.840851Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:922:2922];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T13:47:56.841591Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:922:2922] finished for tablet 9437184 2025-04-07T13:47:56.842232Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:922:2922] send ScanData to [1:921:2921] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:47:56.849790Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:922:2922] and sent to [1:921:2921] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.004},{"events":["l_ProduceResults","f_Finish"],"t":0.018},{"events":["l_ack","l_processing","l_Finish"],"t":0.02}],"full":{"a":1744033676822678,"name":"_full_task","f":1744033676822678,"d_finished":0,"c":0,"l":1744033676842856,"d":20178},"events":[{"name":"bootstrap","f":1744033676822804,"d_finished":4229,"c":1,"l":1744033676827033,"d":4229},{"a":1744033676827190,"name":"ack","f":1744033676827190,"d_finished":0,"c":0,"l":1744033676842856,"d":15666},{"a":1744033676827176,"name":"processing","f":1744033676827176,"d_finished":0,"c":0,"l":1744033676842856,"d":15680},{"name":"ProduceResults","f":1744033676824929,"d_finished":16441,"c":2,"l":1744033676841573,"d":16441},{"a":1744033676841577,"name":"Finish","f":1744033676841577,"d_finished":0,"c":0,"l":1744033676842856,"d":1279}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:922:2922]->[1:921:2921] 2025-04-07T13:47:56.850442Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:922:2922];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:47:56.822376Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T13:47:56.851085Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:922:2922];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:47:56.851701Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:922:2922];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T13:47:56.864254Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:922:2922];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495672 160000/9495672 160000/9495672 80000/4750028 0/0 |83.9%| [TA] $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Viewer::QueryExecuteScript [FAIL] >> Viewer::Plan2SvgOK >> KqpWrite::CastValuesOptional |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |83.9%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] |84.0%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpInplaceUpdate::SingleRowSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::DifferentKeyUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 27305, MsgBus: 14550 2025-04-07T13:47:02.551393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:47:02.551659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:02.551904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001151/r3tmp/tmpqwFHh4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27305, node 1 2025-04-07T13:47:07.879076Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:07.951685Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:07.952004Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:07.952029Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:07.958569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:47:08.047464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:08.048400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:08.076975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14550 TClient is connected to server localhost:14550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:09.807493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:09.880259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:10.127642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:10.989088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:11.437107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:14.950111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1779:3400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:14.951398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:15.089399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:47:15.190433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:47:15.660170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:47:16.362189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:47:17.122567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:47:17.864238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:47:18.840389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2322:3814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:18.842558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:18.845274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2327:3819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:18.868360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:47:19.262993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2329:3821], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:47:21.493289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:47:21.810049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:47:22.239155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64731, MsgBus: 15288 2025-04-07T13:47:31.335536Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567496588243695:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:31.335579Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001151/r3tmp/tmpwQ0G9A/pdisk_1.dat 2025-04-07T13:47:33.049862Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:33.133173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:33.133868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:33.342598Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:33.353426Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64731, node 2 2025-04-07T13:47:33.578553Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:33.578682Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:33.578693Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:33.578971Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15288 TClient is connected to server localhost:15288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:34.748744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:34.773695Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:47:36.343479Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567496588243695:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:36.343556Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:47:40.653009Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567535242950005:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:40.653093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:40.653488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567535242950032:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:40.657708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:47:40.670072Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567535242950034:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T13:47:40.815016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:47:40.876504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:47:47.397777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:47:48.209349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:47:48.209374Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:50.817132Z node 2 :KQP_SLOW_LOG WARN: TraceId: "01jr883a42bpq6s7ks345eznc2", SessionId: ydb://session/3?node_id=2&id=YmMxNjg0YjUtZGI1MzVlODItNTgwYjJhNTEtMmVmYmI2YTg=, Slow query, duration: 10.166644s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = ROW,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = ROW,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = ROW,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |84.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |84.0%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034161.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034161.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032961.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-07T13:46:22.249712Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:22.867840Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:22.902915Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:22.902993Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:22.903260Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:22.937060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:22.937226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:22.937375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:22.937445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:22.937538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:22.937603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:22.937665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:22.937732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:22.937839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:22.937945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.938040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:22.938138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:22.962490Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:22.962560Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:22.964563Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:22.964715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:22.964764Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:22.964946Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:22.965093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:22.965160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:22.965199Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:22.965293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:22.965368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:22.965413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:22.965441Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:22.965672Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:22.965744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:22.965781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:22.965809Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:22.965900Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:22.965948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:22.965999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:22.966029Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:22.966110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:22.966151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:22.966184Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:22.966230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:22.966266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:22.966292Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:22.966448Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-07T13:46:22.966522Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-04-07T13:46:22.966597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-04-07T13:46:22.966701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=60; 2025-04-07T13:46:22.966847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:22.966906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:22.966945Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:22.967116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:22.967157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.967213Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:22.967363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLAS ... 5-04-07T13:47:59.507124Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=49; 2025-04-07T13:47:59.507203Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:47:59.507241Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=82; 2025-04-07T13:47:59.507337Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=62; 2025-04-07T13:47:59.507423Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=52; 2025-04-07T13:47:59.507594Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=135; 2025-04-07T13:47:59.507956Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=329; 2025-04-07T13:47:59.508017Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=23; 2025-04-07T13:47:59.508070Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=21; 2025-04-07T13:47:59.508108Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-07T13:47:59.508147Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-04-07T13:47:59.508187Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-07T13:47:59.508246Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=30; 2025-04-07T13:47:59.508286Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-07T13:47:59.508359Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=42; 2025-04-07T13:47:59.508397Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-07T13:47:59.508450Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=21; 2025-04-07T13:47:59.508480Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=12198; 2025-04-07T13:47:59.508597Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=6;blobs=6;rows=160000;bytes=9495312;raw_bytes=16084618; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9495312;raw_bytes=16084618; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:47:59.508902Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:47:59.508950Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=cold; 2025-04-07T13:47:59.508987Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T13:47:59.509054Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:47:59.509224Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:47:59.509260Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:47:59.509323Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:47:59.509361Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:59.509414Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:59.509481Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=2; 2025-04-07T13:47:59.509533Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2025-04-07T13:47:59.509573Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:59.509615Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:59.509685Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:59.509722Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:59.509787Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:59.510143Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:59.510213Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1283:3173];tablet_id=9437184;parent=[1:1246:3143];fline=manager.h:99;event=ask_data;request=request_id=82;1={portions_count=12};; 2025-04-07T13:47:59.511143Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:47:59.511238Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:47:59.511261Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:47:59.511284Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:47:59.511322Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:47:59.511373Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:47:59.511422Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=2; 2025-04-07T13:47:59.511472Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2025-04-07T13:47:59.511511Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:47:59.511553Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:59.511588Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:47:59.511628Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:47:59.511692Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:47:59.517824Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=12;path_id=1; 2025-04-07T13:47:59.518301Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=12;path_id=1; 2025-04-07T13:47:59.519189Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:47:59.519260Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1246:3143];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 0/0 160000/9495312 |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |84.0%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-04-07T13:48:00.749645Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-07T13:48:00.750844Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [1:34:2057], tablet id = 1, status = OK 2025-04-07T13:48:00.751149Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:34:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:00.751359Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-07T13:48:00.751473Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-07T13:48:00.751563Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-07T13:48:00.751691Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:38:2056], server id = [3:38:2056], tablet id = 3, status = OK 2025-04-07T13:48:00.751751Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:38:2056], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:00.751800Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-07T13:48:00.751823Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:00.751858Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-07T13:48:00.751897Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-07T13:48:00.752063Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-04-07T13:48:00.752147Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:38:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-07T13:48:00.752165Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:00.752222Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [4:43:2056], tablet id = 4, status = OK 2025-04-07T13:48:00.752267Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:43:2056], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:00.752294Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-04-07T13:48:00.752312Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T13:48:00.752408Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-04-07T13:48:00.752456Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-07T13:48:00.752483Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:00.752553Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-04-07T13:48:00.763616Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-07T13:48:00.763678Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T13:48:00.763724Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-07T13:48:00.763742Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T13:48:00.775795Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-07T13:48:00.775859Z node 1 :STATISTICS INFO: Node 2 is unavailable 2025-04-07T13:48:00.775892Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-07T13:48:00.775979Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-07T13:48:00.776003Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-07T13:48:00.776113Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-07T13:48:00.776135Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T13:48:00.776173Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-07T13:48:00.776217Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-04-07T13:48:00.912761Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-07T13:48:00.917157Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [1:34:2057], tablet id = 1, status = OK 2025-04-07T13:48:00.917495Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:34:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:00.917732Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-07T13:48:00.917917Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-07T13:48:00.918029Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-07T13:48:00.918174Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:38:2056], server id = [3:38:2056], tablet id = 3, status = OK 2025-04-07T13:48:00.918243Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:38:2056], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:00.918300Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-07T13:48:00.918319Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:00.918362Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-07T13:48:00.918398Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-07T13:48:00.918540Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-04-07T13:48:00.918622Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:38:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-07T13:48:00.918641Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:00.918694Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [4:43:2056], tablet id = 4, status = OK 2025-04-07T13:48:00.918738Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:43:2056], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:00.918764Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-04-07T13:48:00.918784Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T13:48:00.919226Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-04-07T13:48:00.919512Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-07T13:48:00.919760Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:00.920352Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-04-07T13:48:00.933387Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-07T13:48:00.933697Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T13:48:00.933738Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-07T13:48:00.933758Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T13:48:00.947280Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-07T13:48:00.947645Z node 1 :STATISTICS INFO: Node 2 is unavailable 2025-04-07T13:48:00.947978Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-07T13:48:00.948547Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-07T13:48:00.948760Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-07T13:48:00.957129Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-07T13:48:00.957169Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T13:48:00.957224Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-07T13:48:00.957247Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive >> KqpImmediateEffects::ConflictingKeyW1WR2 >> KqpEffects::InsertAbort_Literal_Success >> KqpImmediateEffects::AlreadyBrokenImmediateEffects >> KqpImmediateEffects::UpsertAfterInsert [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict [GOOD] >> KqpEffects::DeletePkPrefixWithIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 26044, MsgBus: 62013 2025-04-07T13:47:47.011701Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567559890854488:2112];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:47.014104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fb6/r3tmp/tmpXAQYjY/pdisk_1.dat 2025-04-07T13:47:47.739261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:47.739395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:47.753378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:47:47.814715Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26044, node 1 2025-04-07T13:47:49.184347Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:49.184628Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:49.184635Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:49.185261Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62013 TClient is connected to server localhost:62013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:50.378007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:50.394870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:50.516828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:50.784404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:50.853364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:51.986286Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567559890854488:2112];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:51.986660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:47:57.686627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567607135496463:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:57.687048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:58.086906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:47:58.118802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:47:58.193538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:47:58.227455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:47:58.280851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:47:58.330582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:47:58.419234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567611430464267:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:58.419311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:58.419646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567611430464272:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:58.423372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:47:58.454305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567611430464274:2444], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:00.863161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:48:02.813625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:02.813953Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpInplaceUpdate::SingleRowStr [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=144034192.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144034192.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034192.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034192.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124034192.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144034192.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144034192.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032992.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124034192.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124034192.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124032992.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124032992.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124032992.000000s;Name=;Codec=}; 2025-04-07T13:46:35.256985Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:36.244427Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:36.266387Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:36.266466Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:36.266693Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:36.273920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:36.274133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:36.274364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:36.274473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:36.274578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:36.274673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:36.274760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:36.274894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:36.274997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:36.275116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:36.275216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:36.275326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:36.297090Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:36.297151Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:46:36.299080Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:36.299216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:36.299258Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:36.299403Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:36.299535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:36.299603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:36.299636Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:36.299717Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:36.299768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:36.299802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:36.299837Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:36.299979Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:36.300036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:36.300071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:36.300097Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:36.300172Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:36.300213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:36.300250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:36.300274Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:36.300332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:36.300364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:36.300389Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:36.300434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:36.300466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:36.300490Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:36.300622Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=35; 2025-04-07T13:46:36.300707Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-04-07T13:46:36.300778Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-07T13:46:36.300865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-04-07T13:46:36.300993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:36.301036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:36.301062Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;pro ... XECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=41; 2025-04-07T13:48:06.811337Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:48:06.811367Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=68; 2025-04-07T13:48:06.811448Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=54; 2025-04-07T13:48:06.811515Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=41; 2025-04-07T13:48:06.811662Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=123; 2025-04-07T13:48:06.811994Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=306; 2025-04-07T13:48:06.812042Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=20; 2025-04-07T13:48:06.812083Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=15; 2025-04-07T13:48:06.812111Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-04-07T13:48:06.812141Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-04-07T13:48:06.812170Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-04-07T13:48:06.812219Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=26; 2025-04-07T13:48:06.812250Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-04-07T13:48:06.812306Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=31; 2025-04-07T13:48:06.812337Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T13:48:06.812379Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=17; 2025-04-07T13:48:06.812401Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=54831; 2025-04-07T13:48:06.812492Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:48:06.812555Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:48:06.812591Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=$$DELETE,tier1,tier0; 2025-04-07T13:48:06.812618Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T13:48:06.833161Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:48:06.833272Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:48:06.833301Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:48:06.833354Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:48:06.833386Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:48:06.833430Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:48:06.833481Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=4; 2025-04-07T13:48:06.833524Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:48:06.833556Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:48:06.833592Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:48:06.833617Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:48:06.833654Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:48:06.833710Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:48:06.834033Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:48:06.836068Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1529:3398];tablet_id=9437184;parent=[1:1490:3366];fline=manager.h:99;event=ask_data;request=request_id=122;1={portions_count=21};; 2025-04-07T13:48:06.836457Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:48:06.836699Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:48:06.836720Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:48:06.836738Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:48:06.836768Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:48:06.836806Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:48:06.836841Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=4; 2025-04-07T13:48:06.836880Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2025-04-07T13:48:06.836911Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:48:06.836940Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:48:06.836967Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:48:06.837000Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:48:06.837052Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:48:06.841221Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2025-04-07T13:48:06.842038Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2025-04-07T13:48:06.843240Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:48:06.843272Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1490:3366];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495672 160000/9495672 160000/9495672 80000/4750028 0/0 >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] >> AggregateStatistics::ShouldBePings >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] >> AggregateStatistics::ShouldBePings [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2025-04-07T13:48:13.325750Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-07T13:48:13.326601Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [1:34:2057], tablet id = 1, status = OK 2025-04-07T13:48:13.326905Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:34:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:13.327034Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:35:2058], server id = [1:35:2058], tablet id = 2, status = OK 2025-04-07T13:48:13.327082Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:35:2058], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:13.327173Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-07T13:48:13.327328Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-07T13:48:13.327402Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-07T13:48:13.327460Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:36:2059], server id = [1:36:2059], tablet id = 3, status = OK 2025-04-07T13:48:13.327486Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:36:2059], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:13.327656Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:40:2056], server id = [3:40:2056], tablet id = 5, status = OK 2025-04-07T13:48:13.327690Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:40:2056], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:13.327712Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:42:2056], server id = [2:42:2056], tablet id = 4, status = OK 2025-04-07T13:48:13.327729Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:42:2056], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:13.327771Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-04-07T13:48:13.327831Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-07T13:48:13.327841Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:13.327858Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2025-04-07T13:48:13.327889Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-07T13:48:13.327955Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-04-07T13:48:13.327992Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-07T13:48:13.328066Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-04-07T13:48:13.328111Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:40:2056], server id = [0:0:0], tablet id = 5, status = ERROR 2025-04-07T13:48:13.328120Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:13.328167Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:42:2056], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-07T13:48:13.328184Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:13.328198Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:35:2058], server id = [0:0:0], tablet id = 2, status = ERROR 2025-04-07T13:48:13.328206Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:13.328218Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:45:2056], server id = [4:45:2056], tablet id = 6, status = OK 2025-04-07T13:48:13.328244Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:45:2056], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:13.328286Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:36:2059], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-07T13:48:13.328297Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:13.328306Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-04-07T13:48:13.328324Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T13:48:13.328491Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-04-07T13:48:13.328549Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:45:2056], server id = [0:0:0], tablet id = 6, status = ERROR 2025-04-07T13:48:13.328558Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:13.328643Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-04-07T13:48:13.328701Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-07T13:48:13.328796Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-04-07T13:48:13.328826Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 17214, MsgBus: 24024 2025-04-07T13:46:43.092124Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567289270995959:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:43.092318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00115a/r3tmp/tmpDffWUa/pdisk_1.dat 2025-04-07T13:46:43.837224Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:46:43.855096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:46:43.855190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:46:43.856453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17214, node 1 2025-04-07T13:46:44.161257Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:46:44.161289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:46:44.161297Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:46:44.161412Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:46:48.017288Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567289270995959:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:46:48.017335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:24024 TClient is connected to server localhost:24024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:46:55.205280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:46:58.834422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:46:58.834442Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:03.997218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567375170342444:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:03.998425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:04.008844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567375170342459:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:04.033616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:47:04.211194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567379465309757:2325], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T13:47:06.527653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:47:07.247359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:47:10.986201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:47:16.831972Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr882473302bad45agamf095", SessionId: ydb://session/3?node_id=1&id=MWI4NjBmNDgtNjg0OWYwZmEtOTQ2OGRiNGQtMTNmYmQ1MDE=, Slow query, duration: 12.896121s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = ROW,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = ROW,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = ROW,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2025-04-07T13:47:25.942167Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567469659632653:3215], TxId: 281474976710669, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MjNlZGYwMGQtNzNjZWUzMWEtZWU2ODU1ZmEtZWUwYmJmOQ==. CustomerSuppliedId : . TraceId : 01jr88313s79fwng9f6tcsdp05. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1744033640000/18446744073709551615 shard 72075186224037889 with lowWatermark v1744033641000/18446744073709551615 (node# 1 state# Ready) } } 2025-04-07T13:47:25.943553Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567469659632653:3215], TxId: 281474976710669, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MjNlZGYwMGQtNzNjZWUzMWEtZWU2ODU1ZmEtZWUwYmJmOQ==. CustomerSuppliedId : . TraceId : 01jr88313s79fwng9f6tcsdp05. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1744033640000/18446744073709551615 shard 72075186224037889 with lowWatermark v1744033641000/18446744073709551615 (node# 1 state# Ready) } }. 2025-04-07T13:47:25.961211Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567469659632655:3217], TxId: 281474976710669, task: 3. Ctx: { TraceId : 01jr88313s79fwng9f6tcsdp05. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjNlZGYwMGQtNzNjZWUzMWEtZWU2ODU1ZmEtZWUwYmJmOQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490567469659632648:2968], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T13:47:25.961966Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567469659632654:3216], TxId: 281474976710669, task: 2. Ctx: { TraceId : 01jr88313s79fwng9f6tcsdp05. SessionId : ydb://session/3?node_id=1&id=MjNlZGYwMGQtNzNjZWUzMWEtZWU2ODU1ZmEtZWUwYmJmOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490567469659632648:2968], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T13:47:26.097162Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjNlZGYwMGQtNzNjZWUzMWEtZWU2ODU1ZmEtZWUwYmJmOQ==, ActorId: [1:7490567439594860845:2968], ActorState: ExecuteState, TraceId: 01jr88313s79fwng9f6tcsdp05, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 11045, MsgBus: 32380 2025-04-07T13:47:33.633955Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567506965979155:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:33.634001Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00115a/r3tmp/tmpOiHcfT/pdisk_1.dat 2025-04-07T13:47:33.909203Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11045, node 2 2025-04-07T13:47:34.001616Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:34.008652Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:34.105002Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:47:34.169221Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:34.169245Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:34.169252Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:34.169352Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32380 TClient is connected to server localhost:32380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:36.335637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:38.636179Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567506965979155:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:38.636581Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:47:42.204768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567545620685464:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:42.204848Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:42.205213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567545620685476:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:42.208059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:47:42.265749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567545620685478:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:47:44.384509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:47:45.866040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T13:47:48.894139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:47:48.894161Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:49.886013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:47:52.946058Z node 2 :KQP_SLOW_LOG WARN: TraceId: "01jr883c1fcywv0qd8jmvjk7bw", SessionId: ydb://session/3?node_id=2&id=MTc5M2ExNDgtZTc3MTY1M2QtNjc3MDE5MmItYTE4MWU3MjU=, Slow query, duration: 10.789701s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = ROW,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = ROW,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = ROW,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2025-04-07T13:47:58.193202Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmY2ZmRkNTQtNmYyN2QwNGMtZjAxMzY5ZWItYTYxZmZjZTM=, ActorId: [2:7490567597160301750:2957], ActorState: ExecuteState, TraceId: 01jr8840t59dap1zxddqq9gpg2, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2025-04-07T13:48:12.455402Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-07T13:48:12.456271Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:7:2054], server id = [1:7:2054], tablet id = 1, status = OK 2025-04-07T13:48:12.456581Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:7:2054], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:12.456700Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 2, status = OK 2025-04-07T13:48:12.456750Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:12.456783Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-07T13:48:12.456905Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 3, status = OK 2025-04-07T13:48:12.456945Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:12.456984Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 4, status = OK 2025-04-07T13:48:12.457024Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:12.457066Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:7:2054], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-07T13:48:12.457082Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:12.457116Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 5, status = OK 2025-04-07T13:48:12.457154Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:12.457187Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 6, status = OK 2025-04-07T13:48:12.457232Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:12.457265Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-07T13:48:12.457341Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2025-04-07T13:48:12.457386Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-07T13:48:12.457400Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:12.457421Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 7, status = OK 2025-04-07T13:48:12.457451Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2025-04-07T13:48:12.457495Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 5, status = ERROR 2025-04-07T13:48:12.457514Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:12.457541Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 7 2025-04-07T13:48:12.457582Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 7, status = ERROR 2025-04-07T13:48:12.457599Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:12.468615Z node 1 :STATISTICS DEBUG: Tablet 1 has already been processed 2025-04-07T13:48:12.468700Z node 1 :STATISTICS ERROR: No result was received from the tablet 2 2025-04-07T13:48:12.468726Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-04-07T13:48:12.468808Z node 1 :STATISTICS DEBUG: Tablet 3 has already been processed 2025-04-07T13:48:12.468830Z node 1 :STATISTICS ERROR: No result was received from the tablet 4 2025-04-07T13:48:12.468847Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-04-07T13:48:12.468890Z node 1 :STATISTICS DEBUG: Tablet 5 has already been processed 2025-04-07T13:48:12.468908Z node 1 :STATISTICS ERROR: No result was received from the tablet 6 2025-04-07T13:48:12.468921Z node 1 :STATISTICS DEBUG: Tablet 6 is not local. 2025-04-07T13:48:12.468952Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-07T13:48:12.469057Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-07T13:48:12.469084Z node 1 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-07T13:48:12.469130Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2025-04-07T13:48:12.469152Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:12.469179Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-07T13:48:12.469199Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:12.469220Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 6, status = ERROR 2025-04-07T13:48:12.469234Z node 1 :STATISTICS DEBUG: Skip EvClientConnected |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowStr [GOOD] Test command err: Trying to start YDB, gRPC: 25976, MsgBus: 25790 2025-04-07T13:47:50.768826Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567576304841275:2223];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:50.770113Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f6a/r3tmp/tmpIOccS2/pdisk_1.dat 2025-04-07T13:47:52.163415Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:52.188802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:52.192724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:52.192808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:52.195281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25976, node 1 2025-04-07T13:47:52.310290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:52.310308Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:52.311705Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:52.311820Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25790 2025-04-07T13:47:55.769052Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567576304841275:2223];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:55.769386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:25790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:57.849118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:57.868361Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:47:57.880119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:58.118123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:58.287447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:47:58.363215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:47:59.878082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567614959548470:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:59.894091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.118455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.174098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.259164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.339264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.431819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.512689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.693953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567619254516281:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.694017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.695303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567619254516286:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.709780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:00.754964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567619254516288:2443], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:07.185014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:07.185060Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:07.872274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::ConflictingKeyRW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_BatchUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 10742, MsgBus: 22229 2025-04-07T13:47:47.209023Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567565348452105:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:47.209194Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fab/r3tmp/tmpFI9q8x/pdisk_1.dat 2025-04-07T13:47:48.169127Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:48.171235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:48.171303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:48.192347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10742, node 1 2025-04-07T13:47:50.093143Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:50.093162Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:50.093168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:50.093238Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22229 2025-04-07T13:47:52.204779Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567565348452105:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:52.204832Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:22229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:52.705646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:52.752383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:54.913617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:57.808925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:47:57.931111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:47:59.745520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567616888061249:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:59.745608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.488740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.529717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.564309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.618416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.659839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.716066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.786024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567621183029049:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.786094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.786428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567621183029054:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.790143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:00.804823Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:48:00.805422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567621183029056:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:03.145392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:03.145416Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:07.669341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2025-04-07T13:48:13.082806Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-07T13:48:13.085730Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-07T13:48:13.094733Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-07T13:48:13.095118Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:13.096175Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:7:2054], server id = [1:7:2054], tablet id = 1, status = ERROR 2025-04-07T13:48:13.096521Z node 1 :STATISTICS DEBUG: Tablet 1 is not local. 2025-04-07T13:48:13.122108Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 2 2025-04-07T13:48:13.122454Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-04-07T13:48:13.123558Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 4, status = ERROR 2025-04-07T13:48:13.123580Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-04-07T13:48:13.123617Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 5 2025-04-07T13:48:13.123630Z node 1 :STATISTICS DEBUG: Tablet 5 is not local. 2025-04-07T13:48:13.145383Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-04-07T13:48:13.146092Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 7, status = ERROR 2025-04-07T13:48:13.146116Z node 1 :STATISTICS DEBUG: Tablet 7 is not local. 2025-04-07T13:48:13.146143Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 6, status = ERROR 2025-04-07T13:48:13.146157Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:13.146176Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 8 2025-04-07T13:48:13.146190Z node 1 :STATISTICS DEBUG: Tablet 8 is not local. 2025-04-07T13:48:13.146510Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 >> KqpInplaceUpdate::SingleRowArithm [GOOD] >> Viewer::Plan2SvgOK [FAIL] >> Viewer::Plan2SvgBad >> Viewer::JsonAutocompleteEmpty [GOOD] >> Viewer::JsonAutocompleteEndOfDatabaseName >> KqpImmediateEffects::Upsert [GOOD] >> KqpImmediateEffects::Replace [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex >> KqpImmediateEffects::TxWithReadAtTheEnd [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd >> KqpImmediateEffects::Insert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 7147, MsgBus: 19187 2025-04-07T13:47:50.824819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567579763456110:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:50.825016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f7f/r3tmp/tmpoX346G/pdisk_1.dat 2025-04-07T13:47:52.220754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:52.292009Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:52.295728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:52.295799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:52.300225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7147, node 1 2025-04-07T13:47:52.719579Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:52.719605Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:52.719611Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:52.719680Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19187 2025-04-07T13:47:57.678591Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567579763456110:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:57.678669Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:19187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:58.289888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:58.322939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:58.510928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:47:58.737778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:47:58.814287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:01.547177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567622713130671:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:01.861289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:02.366967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:02.699684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:02.864414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:03.118621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:03.473427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:03.869128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:04.797255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567639893000422:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:04.797323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:04.798957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567639893000427:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:04.839089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:04.928816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567639893000430:2444], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:07.285528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:07.285828Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:11.470431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2025-04-07T13:48:13.494708Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-07T13:48:13.632995Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-07T13:48:13.756496Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-04-07T13:48:13.756564Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-07T13:48:13.756621Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-07T13:48:13.758158Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:17:2054], server id = [0:0:0], tablet id = 2, status = ERROR 2025-04-07T13:48:13.758267Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:13.758660Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-07T13:48:13.758675Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-07T13:48:13.758709Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-04-07T13:48:13.758851Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::Upsert [GOOD] Test command err: Trying to start YDB, gRPC: 27153, MsgBus: 29000 2025-04-07T13:47:52.694298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567585785247852:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:52.694382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f5f/r3tmp/tmpfJOjW6/pdisk_1.dat 2025-04-07T13:47:57.696115Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567585785247852:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:57.696154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:47:57.720808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:57.764254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:57.764320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:57.769709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27153, node 1 2025-04-07T13:47:57.836693Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:58.061520Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:58.061537Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:58.061542Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:58.061613Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29000 TClient is connected to server localhost:29000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:59.071865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.089062Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:47:59.098900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.240273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.386640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.515337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:07.228686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567650209758834:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:07.230360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:09.576237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:10.166326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:10.680866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.197503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.326529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.425673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.512857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567667389628598:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.512981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.513089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567667389628603:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.519499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:11.532595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567667389628605:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:12.789751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:12.789770Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:13.382861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowArithm [GOOD] Test command err: Trying to start YDB, gRPC: 16240, MsgBus: 23972 2025-04-07T13:47:57.709466Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567608289943935:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:57.713915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f44/r3tmp/tmp6zjWwa/pdisk_1.dat 2025-04-07T13:47:58.227056Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:58.234689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:58.234766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:58.239068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16240, node 1 2025-04-07T13:47:58.433186Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:58.433205Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:58.433211Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:58.433288Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23972 TClient is connected to server localhost:23972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:59.208523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.231073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.379688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.532027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.601138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:02.709119Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567608289943935:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:02.709161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:11.392474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567664124520574:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.429120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.461345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.512741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.538782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.563907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.645298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.703187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.760652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567668419488394:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.760730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.760882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567668419488399:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.764423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:11.772107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567668419488401:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:13.225056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:13.225079Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:13.437629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> KikimrIcGateway::TestLoadExternalTable |84.1%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::InsertExistingKey [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> KqpInplaceUpdate::BigRow [GOOD] >> KqpImmediateEffects::MultiShardUpsertAfterRead >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution [GOOD] >> KqpImmediateEffects::ImmediateUpdate >> KqpImmediateEffects::ReplaceDuplicates [GOOD] >> KqpImmediateEffects::ReplaceExistingKey >> KqpEffects::InsertAbort_Select_Success >> KqpImmediateEffects::ConflictingKeyRW1RR2 >> Viewer::JsonAutocompleteStartOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit >> KqpImmediateEffects::ConflictingKeyR1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 >> KikimrIcGateway::TestCreateExternalTable >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata >> KikimrIcGateway::TestListPath >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::BigRow [GOOD] Test command err: Trying to start YDB, gRPC: 13637, MsgBus: 7315 2025-04-07T13:47:59.286820Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567617130950277:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:59.286857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f23/r3tmp/tmpFA6mO4/pdisk_1.dat 2025-04-07T13:47:59.751693Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:59.771743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:59.771831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:59.812187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13637, node 1 2025-04-07T13:47:59.872231Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:59.872252Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:59.872257Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:59.872321Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7315 TClient is connected to server localhost:7315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:02.014066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:02.402743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:04.320157Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567617130950277:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:04.320202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:07.253241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:10.372979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:11.311208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:13.642174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567677260493948:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:13.642242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:13.863140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:13.893749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:13.928050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:13.992298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:14.083166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:14.252070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:14.439809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567681555461763:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:14.439898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:14.440371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567681555461768:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:14.446183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:14.485092Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:48:14.485376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567681555461771:2443], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:14.747208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:14.747235Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:15.706531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpWrite::Insert [GOOD] >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] >> ColumnBuildTest::BaseCase >> KikimrIcGateway::TestLoadTableMetadata >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |84.1%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |84.1%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 4536, MsgBus: 10850 2025-04-07T13:47:52.133382Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567588575378953:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:52.133710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f71/r3tmp/tmplkSl3l/pdisk_1.dat 2025-04-07T13:47:52.816498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:52.816593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4536, node 1 2025-04-07T13:47:52.865247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:47:52.871720Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:54.905649Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:54.905672Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:54.905677Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:54.905732Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:47:57.159483Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567588575378953:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:57.160744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:10850 TClient is connected to server localhost:10850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:59.058724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.575027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.898554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:00.325103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:00.419602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:07.865433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:07.865451Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:11.191706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567670179759246:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.192193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:12.273936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:12.356684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:12.396572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:12.443315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:12.482486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:12.596183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:12.889295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567674474727067:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:12.889453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:12.896820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567674474727073:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:12.925323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:12.981624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567674474727076:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:15.159704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |84.1%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> KikimrIcGateway::TestCreateSameExternalTable |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |84.1%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::Insert [GOOD] Test command err: Trying to start YDB, gRPC: 29944, MsgBus: 17833 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f56/r3tmp/tmpzkS9p2/pdisk_1.dat 2025-04-07T13:47:57.831116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:57.897421Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:57.922757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:57.922835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:57.929098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29944, node 1 2025-04-07T13:47:58.141158Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:58.141174Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:58.141182Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:58.141262Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17833 TClient is connected to server localhost:17833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:00.343562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:00.467916Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:00.479861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:00.763579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:03.900563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:06.866988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:12.526209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567673747647170:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:12.526294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:12.897707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:12.897727Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:13.417545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:13.437498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:13.459853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:13.484003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:13.512595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:13.541543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:13.634353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567678042614982:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:13.634413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:13.634813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567678042614987:2449], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:13.647375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:13.656478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567678042614989:2450], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:14.749630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:48:17.611153Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567695222484699:2506], TxId: 281474976710673, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDBjOTcyMWUtNDM0YzUxY2QtYzZjOWQ5ZTctYjE2MWI2N2E=. TraceId : 01jr884h258z5w4gn89mcht7dt. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T13:48:17.633269Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567695222484700:2507], TxId: 281474976710673, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDBjOTcyMWUtNDM0YzUxY2QtYzZjOWQ5ZTctYjE2MWI2N2E=. TraceId : 01jr884h258z5w4gn89mcht7dt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490567695222484696:2479], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:48:17.737710Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDBjOTcyMWUtNDM0YzUxY2QtYzZjOWQ5ZTctYjE2MWI2N2E=, ActorId: [1:7490567682337582603:2479], ActorState: ExecuteState, TraceId: 01jr884h258z5w4gn89mcht7dt, Create QueryResponse for error on request, msg:
: Error: Conflict with existing key., code: 2012 2025-04-07T13:48:18.906288Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567699517452100:2530], TxId: 281474976710676, task: 1. Ctx: { TraceId : 01jr884m00ekjj6hrkqystwkvf. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDBjOTcyMWUtNDM0YzUxY2QtYzZjOWQ5ZTctYjE2MWI2N2E=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-07T13:48:18.907339Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567699517452101:2531], TxId: 281474976710676, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDBjOTcyMWUtNDM0YzUxY2QtYzZjOWQ5ZTctYjE2MWI2N2E=. CustomerSuppliedId : . TraceId : 01jr884m00ekjj6hrkqystwkvf. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490567699517452097:2479], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:48:18.908582Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDBjOTcyMWUtNDM0YzUxY2QtYzZjOWQ5ZTctYjE2MWI2N2E=, ActorId: [1:7490567682337582603:2479], ActorState: ExecuteState, TraceId: 01jr884m00ekjj6hrkqystwkvf, Create QueryResponse for error on request, msg:
: Error: Duplicated keys found., code: 2012 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] Test command err: Trying to start YDB, gRPC: 25682, MsgBus: 21275 2025-04-07T13:47:50.833075Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567578028447337:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:50.833261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f85/r3tmp/tmplkh0IN/pdisk_1.dat 2025-04-07T13:47:52.188799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:52.395415Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:52.413204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:52.413285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:52.418379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25682, node 1 2025-04-07T13:47:52.745089Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:52.745109Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:52.745117Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:52.745199Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:47:57.124149Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567578028447337:2190];send_to=[0:7307199536658146131:7762515]; TClient is connected to server localhost:21275 2025-04-07T13:47:57.699579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:21275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:58.234411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:58.276049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:58.472128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.653145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.767333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:07.377311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:07.377641Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:10.425894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567659632827594:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:10.486512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.322774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.464687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.534017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.568450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.604557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.629788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.735796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567668222762717:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.735860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.736157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567668222762723:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.740054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:11.756381Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:48:11.756629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567668222762725:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:14.407468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:48:18.382494Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567698287534548:2501] TxId: 281474976710675. Ctx: { TraceId: 01jr884kspcn4knzrwdd94n04d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-04-07T13:48:18.382543Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2025-04-07T13:48:18.382661Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710675. Resolved key sets: 1 2025-04-07T13:48:18.382776Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710675. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 16] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-07T13:48:18.382807Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567698287534548:2501] TxId: 281474976710675. Ctx: { TraceId: 01jr884kspcn4knzrwdd94n04d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-04-07T13:48:18.382942Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-07T13:48:18.383064Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710675. Shard resolve complete, resolved shards: 1 2025-04-07T13:48:18.383086Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567698287534548:2501] TxId: 281474976710675. Ctx: { TraceId: 01jr884kspcn4knzrwdd94n04d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-07T13:48:18.383109Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567698287534548:2501] TxId: 281474976710675. Ctx: { TraceId: 01jr884kspcn4knzrwdd94n04d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037919] 2025-04-07T13:48:18.383144Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710675. Ctx: { TraceId: 01jr884kspcn4knzrwdd94n04d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, pool: Data, localComputeTasks: 0, snapshot: {18446744073709551615, 1744033698001} 2025-04-07T13:48:18.383309Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7490567698287534548:2501] TxId: 281474976710675. Ctx: { TraceId: 01jr884kspcn4knzrwdd94n04d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-07 ... 594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-07T13:48:19.167191Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501885:2501] TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-04-07T13:48:19.167314Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-07T13:48:19.172871Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710677. Shard resolve complete, resolved shards: 1 2025-04-07T13:48:19.172925Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501885:2501] TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-07T13:48:19.172952Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501885:2501] TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037919] 2025-04-07T13:48:19.172996Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, pool: Data, localComputeTasks: 0, snapshot: {18446744073709551615, 1744033698001} 2025-04-07T13:48:19.173208Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7490567702582501885:2501] TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-07T13:48:19.173245Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501885:2501] TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7490567702582501889:2501], 2025-04-07T13:48:19.173272Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501885:2501] TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7490567702582501889:2501], 2025-04-07T13:48:19.173290Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501885:2501] TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-07T13:48:19.173605Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501885:2501] TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7490567702582501889:2501], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-07T13:48:19.173637Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501885:2501] TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7490567702582501889:2501], 2025-04-07T13:48:19.173660Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501885:2501] TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7490567702582501889:2501], 2025-04-07T13:48:19.175497Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501885:2501] TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7490567702582501889:2501], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 619 DurationUs: 2000 Tasks { TaskId: 1 CpuTimeUs: 137 FinishTimeMs: 1744033699175 OutputRows: 1 OutputBytes: 22 Tables { TablePath: "/Root/TestImmediateEffects" ReadRows: 1 ReadBytes: 22 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 22 ComputeCpuTimeUs: 61 BuildCpuTimeUs: 76 WaitInputTimeUs: 1192 HostName: "ghrun-lqtfcktism" NodeId: 1 StartTimeMs: 1744033699173 } MaxMemoryUsage: 1048576 } 2025-04-07T13:48:19.175532Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7490567702582501889:2501] 2025-04-07T13:48:19.175679Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501885:2501] TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T13:48:19.175703Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501885:2501] TxId: 281474976710677. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000619s ReadRows: 1 ReadBytes: 22 ru: 1 rate limiter was not found force flag: 1 2025-04-07T13:48:19.176774Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710678. Resolved key sets: 0 2025-04-07T13:48:19.176890Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710678. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {18446744073709551615, 1744033698001} 2025-04-07T13:48:19.177025Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501892:2501] TxId: 281474976710678. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037919, tasks: [], lockTxId: (empty maybe), locks: Locks { LockId: 281474976710675 DataShard: 72075186224037919 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 16 HasWrites: true } SendingShards: 72075186224037919 ReceivingShards: 72075186224037919 Op: Commit, immediate: 1 2025-04-07T13:48:19.177057Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501892:2501] TxId: 281474976710678. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2025-04-07T13:48:19.177089Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7490567702582501892:2501] TxId: 281474976710678. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-07T13:48:19.177105Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501892:2501] TxId: 281474976710678. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037919 not finished yet: Executing 2025-04-07T13:48:19.177126Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501892:2501] TxId: 281474976710678. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037919 (Executing), 2025-04-07T13:48:19.177139Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501892:2501] TxId: 281474976710678. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-07T13:48:19.179085Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501892:2501] TxId: 281474976710678. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037919, status: COMPLETE, error: 2025-04-07T13:48:19.179147Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501892:2501] TxId: 281474976710678. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T13:48:19.179170Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490567702582501892:2501] TxId: 281474976710678. Ctx: { TraceId: 01jr884n3w1q567j4jxsrrfa7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjOGM0Y2EtYTJkNDkwOWYtNWQ0Yjc5NTYtYTJiMGYxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 6800, MsgBus: 4025 2025-04-07T13:47:47.705909Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567564722358955:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:47.705960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fa2/r3tmp/tmpACHPZ6/pdisk_1.dat 2025-04-07T13:47:49.896780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:50.010163Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:50.023525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:50.023603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:50.036221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6800, node 1 2025-04-07T13:47:50.330599Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:50.330619Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:50.330624Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:50.330706Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4025 TClient is connected to server localhost:4025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:52.708777Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567564722358955:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:52.708836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:47:52.724456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:52.801887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:54.634336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:57.751699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:57.875609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.937436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567616261968045:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:59.937530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.279616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.348032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.376963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.413500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.504317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.555706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.662136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567620556935843:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.662210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.663621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567620556935848:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.667603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:00.688858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567620556935850:2439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:04.998002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:04.998346Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:08.478565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:48:08.612005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:48:08.702230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |84.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |84.1%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut >> Viewer::Plan2SvgBad [GOOD] >> KqpWrite::CastValuesOptional [GOOD] >> KqpEffects::DeletePkPrefixWithIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeletePkPrefixWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 27375, MsgBus: 29415 2025-04-07T13:47:50.271795Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567577413804335:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:50.271896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f90/r3tmp/tmpk5oMJj/pdisk_1.dat 2025-04-07T13:47:50.782981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:50.783062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:50.788140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:47:50.815037Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27375, node 1 2025-04-07T13:47:50.918251Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:50.918275Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:50.918282Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:50.918380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29415 TClient is connected to server localhost:29415 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:52.916629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:52.987052Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:47:53.001321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:54.991142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:56.724074Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567577413804335:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:56.730224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:47:57.784161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:57.881840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.569420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567616068511508:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:59.591836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:47:59.801448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:47:59.837933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:47:59.910127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:47:59.982355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.057364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.099666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:00.135999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567620363479311:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.136060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.136395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567620363479316:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.140148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:00.151130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567620363479318:2439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:05.765217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:05.765240Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:08.024784Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567650428250941:2504], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NjQxNzU2Yy1hOTk2YmI5LTIxZmM1NTAwLWMyMDZiYjBh. TraceId : 01jr8847zv9r2cnpmrwzapmchm. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T13:48:08.082573Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567650428250942:2505], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr8847zv9r2cnpmrwzapmchm. SessionId : ydb://session/3?node_id=1&id=NjQxNzU2Yy1hOTk2YmI5LTIxZmM1NTAwLWMyMDZiYjBh. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490567650428250923:2475], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:48:08.586836Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQxNzU2Yy1hOTk2YmI5LTIxZmM1NTAwLWMyMDZiYjBh, ActorId: [1:7490567641838316226:2475], ActorState: ExecuteState, TraceId: 01jr8847zv9r2cnpmrwzapmchm, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 8334, MsgBus: 25895 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f90/r3tmp/tmpWeDYLm/pdisk_1.dat 2025-04-07T13:48:12.400899Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:12.419968Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:12.427376Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:12.427457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:12.428816Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8334, node 2 2025-04-07T13:48:12.783023Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:12.783041Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:12.783379Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:12.783771Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25895 TClient is connected to server localhost:25895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:13.617867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:13.622925Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:13.642059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:13.713225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:13.925499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:14.006906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.017898Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567699878667241:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:19.018010Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:19.050545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:19.129962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:19.203560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:19.259727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:19.304224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:19.376950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:19.496843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567704173635051:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:19.496940Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:19.496950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567704173635056:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:19.509690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:19.550052Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567704173635058:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:21.196525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValuesOptional [GOOD] Test command err: Trying to start YDB, gRPC: 63339, MsgBus: 19608 2025-04-07T13:47:59.973828Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567615642377953:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:59.973995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eec/r3tmp/tmp1Nqbkh/pdisk_1.dat 2025-04-07T13:48:00.792324Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:00.802221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:00.802300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:00.805989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63339, node 1 2025-04-07T13:48:01.235638Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:01.236319Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:01.236327Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:01.237034Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:48:04.974573Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567615642377953:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:04.975317Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:19608 TClient is connected to server localhost:19608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:13.646686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:13.679357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:13.856453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:14.022908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:14.136907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:15.753413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:15.753574Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:18.129372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567697246758122:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:18.461406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:18.951549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:19.028126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:19.095060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:19.177067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:19.236873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:19.324160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:19.392112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567701541725943:2448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:19.392177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:19.392357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567701541725948:2451], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:19.396921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:19.407002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567701541725950:2452], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> Viewer::JsonAutocompleteSimilarDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST >> KqpImmediateEffects::TxWithWriteAtTheEnd [GOOD] >> KqpImmediateEffects::DeleteAfterUpsert [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] >> KqpEffects::InsertAbort_Literal_Success [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithWriteAtTheEnd [GOOD] Test command err: Trying to start YDB, gRPC: 14914, MsgBus: 61173 2025-04-07T13:47:56.020007Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567599234305798:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:56.167971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f53/r3tmp/tmpgA69N3/pdisk_1.dat 2025-04-07T13:47:58.039879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 14914, node 1 2025-04-07T13:47:58.121145Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:58.280647Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:58.280683Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:58.280691Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:58.280793Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61173 2025-04-07T13:47:58.743755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:58.743842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:58.751081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:59.127230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.148422Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:47:59.182438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.316840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.470058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.561162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:00.969784Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567599234305798:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:00.969845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:05.562021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567642183980388:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:05.562807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:08.062551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:08.547223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:08.691617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:08.795533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:08.890718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:09.291668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:09.981017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567659363850176:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:09.981105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:09.981366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567659363850183:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:10.019412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:10.107332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567659363850185:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:12.537278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:48:13.121103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:13.121136Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 18636, MsgBus: 28228 2025-04-07T13:48:15.025833Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567684015569816:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:15.025888Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f53/r3tmp/tmpTaXj8C/pdisk_1.dat 2025-04-07T13:48:15.276069Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:15.295402Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:15.295503Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:15.296954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18636, node 2 2025-04-07T13:48:15.399082Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:15.399106Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:15.399112Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:15.399213Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28228 TClient is connected to server localhost:28228 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:15.871362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:15.904317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:17.370272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:18.116015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:18.512353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.029035Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567684015569816:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:20.029094Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:21.171350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567709785375294:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.171459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.214580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.259628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.298046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.353738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.402013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.483241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.593982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567709785375796:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.594066Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.594261Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567709785375801:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.600027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:21.614930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567709785375803:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:26.912272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::AlreadyBrokenImmediateEffects [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata >> TCacheTest::List >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> KikimrIcGateway::TestCreateResourcePool |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> KqpImmediateEffects::ReplaceExistingKey [GOOD] >> KqpInplaceUpdate::SingleRowSimple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 11659, MsgBus: 5995 2025-04-07T13:47:50.476638Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567580008547865:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:50.476830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f73/r3tmp/tmpIVby1V/pdisk_1.dat 2025-04-07T13:47:50.970549Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:51.031164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:51.031275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11659, node 1 2025-04-07T13:47:51.033366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:47:52.123560Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:52.123583Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:52.123907Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:52.123986Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5995 TClient is connected to server localhost:5995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:55.394315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:57.256396Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567580008547865:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:57.257168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:47:57.258544Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:47:57.381860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:57.840003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:58.042188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:58.124206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:00.835611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567622958222417:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:00.835696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:01.757708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:01.843620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:01.947893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:02.026318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:02.160604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:02.432550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:03.315187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567635843124873:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:03.315288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:03.315831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567635843124879:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:03.382128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:03.509177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567635843124881:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:05.969776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:05.970161Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:11.041194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17554, MsgBus: 9139 2025-04-07T13:48:14.215141Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567679239143910:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:14.215207Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f73/r3tmp/tmpa6cKDy/pdisk_1.dat 2025-04-07T13:48:14.481906Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:14.481986Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:14.488044Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:14.489401Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17554, node 2 2025-04-07T13:48:14.629244Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:14.629262Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:14.629269Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:14.629364Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9139 TClient is connected to server localhost:9139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:16.592627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:16.767948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:18.076369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.024711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.115143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.349526Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567679239143910:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:19.351221Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:21.941827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567709303916679:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.942246Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:22.092481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:22.181433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:22.352315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:22.761879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:23.351763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:23.813590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:25.623191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567726483786439:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:25.623376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:25.623647Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567726483786448:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:25.627265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:26.084150Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:48:26.096137Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567726483786450:2447], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:29.500460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:29.500476Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:30.851152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:48:34.508913Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmMwMDBjZGQtM2RmMWYwMWYtYjg4NjQ3ZWItNDBjZDY0NGY=, ActorId: [2:7490567747958623355:2506], ActorState: ExecuteState, TraceId: 01jr88544f71dac3zpwv59c69x, Create QueryResponse for error on request, msg: Error while locks merge >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd [GOOD] >> TCacheTest::List [GOOD] >> TCacheTest::CheckSystemViewAccess >> KqpImmediateEffects::ImmediateUpdate [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WRR2 >> TCacheTest::CheckSystemViewAccess [GOOD] >> KqpImmediateEffects::Insert [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalTable >> KqpInplaceUpdate::Negative_SingleRowListFromRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ReplaceExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 31451, MsgBus: 11063 2025-04-07T13:47:53.274823Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567590966378658:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:53.305206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f65/r3tmp/tmpFpHHTe/pdisk_1.dat 2025-04-07T13:47:57.824463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:58.003151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:58.003238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:58.013279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:47:58.078016Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31451, node 1 2025-04-07T13:47:58.283888Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567590966378658:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:58.283950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:47:58.366189Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:58.366206Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:58.366211Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:58.366316Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11063 TClient is connected to server localhost:11063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:59.703689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.738147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.946666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:00.136322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:00.366224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:11.671928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567668275791619:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.672043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:12.407817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:12.491693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:12.530742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:12.572133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:12.728452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:12.944901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:12.944924Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:13.046018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:13.237761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567676865726727:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:13.237833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:13.238130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567676865726732:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:13.241864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:13.252985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567676865726734:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:48:14.617512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23377, MsgBus: 7599 2025-04-07T13:48:19.767934Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567703504397787:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f65/r3tmp/tmptW97a6/pdisk_1.dat 2025-04-07T13:48:19.907171Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:48:20.026815Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:20.061670Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:20.061754Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:20.069921Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23377, node 2 2025-04-07T13:48:20.233178Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:20.233200Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:20.233207Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:20.233307Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7599 TClient is connected to server localhost:7599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:20.926873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.934369Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:20.957289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:48:21.058975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.264259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:21.332651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:24.734993Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567703504397787:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:24.735379Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:30.276956Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567746454072326:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:30.277451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:30.446369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:30.595044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:30.764630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:30.824997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:30.935786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:31.142845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:31.549377Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567755044007472:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:31.549898Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:31.551394Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567755044007484:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:31.563030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:31.641709Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:48:31.641847Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567755044007487:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:34.989345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:34.989364Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:35.410617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::MultiShardUpsertAfterRead [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckSystemViewAccess [GOOD] Test command err: 2025-04-07T13:48:37.481326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:48:37.481381Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T13:48:37.734776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2025-04-07T13:48:37.871230Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T13:48:37.871503Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T13:48:37.871644Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-07T13:48:38.667843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:48:38.667891Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T13:48:38.711373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-07T13:48:38.716416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T13:48:38.722013Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-07T13:48:38.722596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2025-04-07T13:48:38.726709Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:202:2193], for# user1@builtin, access# DescribeSchema 2025-04-07T13:48:38.727358Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:208:2199], for# user1@builtin, access# >> TCacheTestWithDrops::LookupErrorUponEviction >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |84.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 16726, MsgBus: 3646 2025-04-07T13:47:58.013118Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567611955978952:2243];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:58.013159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f35/r3tmp/tmpgYScVg/pdisk_1.dat 2025-04-07T13:47:58.522738Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:58.560582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:58.560810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:58.567509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16726, node 1 2025-04-07T13:47:58.716627Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:58.716647Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:58.716654Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:58.716758Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3646 TClient is connected to server localhost:3646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:59.496182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.518320Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:47:59.522924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:47:59.714928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:47:59.975796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:00.040395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:03.016868Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567611955978952:2243];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:03.016927Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:11.677416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567667790555397:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.693637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.719240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.759974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.790227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.839788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.910909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:12.072025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:12.390889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567672085523207:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:12.390963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:12.392557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567672085523212:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:12.395867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:12.437198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567672085523214:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:13.515839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:13.515866Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:15.209963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15944, MsgBus: 63579 2025-04-07T13:48:19.556960Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567700874724384:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:19.675001Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f35/r3tmp/tmpKa2Xj9/pdisk_1.dat 2025-04-07T13:48:19.903072Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:19.954419Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:19.954491Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:19.955502Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15944, node 2 2025-04-07T13:48:20.173200Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:20.173219Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:20.173228Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:20.173308Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63579 TClient is connected to server localhost:63579 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:20.687981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.719335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.811246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:21.108451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:21.219684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:24.557400Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567700874724384:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:24.557450Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:33.788955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567761004268293:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:33.789177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:33.853207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:33.932871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:33.989848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:34.111301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:34.236454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:34.355388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:34.555598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567765299236098:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:34.555661Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:34.557523Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567765299236103:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:34.560798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:34.634305Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567765299236105:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:34.841999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:34.842016Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:36.317204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 5088, MsgBus: 26270 2025-04-07T13:47:55.196497Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567598637409567:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:55.196532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:47:57.785116Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567607227344416:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:57.785293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f4b/r3tmp/tmpA0ne6Z/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5088, node 1 TClient is connected to server localhost:26270 2025-04-07T13:47:58.771224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:58.771311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:58.775801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:47:58.854826Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:58.854846Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:58.854864Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:58.854942Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:59.340539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.361680Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:47:59.374539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:47:59.511489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:47:59.678310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.776406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:00.200037Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567598637409567:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:00.200075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:02.793056Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567607227344416:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:02.793097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:08.647721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567645882051593:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:08.648658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:09.310237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:09.462119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:10.341925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.127710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.156962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.282253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.431008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567667356888647:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.431081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.432896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567667356888652:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.436924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:11.450335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567667356888654:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:13.082072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:13.082094Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:13.286661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6719, MsgBus: 5325 2025-04-07T13:48:15.055488Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567684255535780:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:15.062099Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f4b/r3tmp/tmpZrsTAW/pdisk_1.dat 2025-04-07T13:48:15.153951Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:15.171497Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:15.171570Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:15.172948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6719, node 2 2025-04-07T13:48:15.249346Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:15.249365Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:15.249375Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:15.249471Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5325 TClient is connected to server localhost:5325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:15.749804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:15.761486Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:15.775683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:15.856844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:16.960876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:17.582173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.054805Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567684255535780:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:20.054860Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:21.196833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567710025341249:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.196932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.255632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.295643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.381833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.437598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.507465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.565524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.736782Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567710025341762:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.736868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.743004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567710025341767:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.756389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:21.781806Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T13:48:21.782472Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567710025341770:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:48:25.325878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:48:25.816091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:48:26.095275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:48:30.146255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:30.146272Z node 2 :IMPORT WARN: Table profiles were not loaded >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd [GOOD] Test command err: Trying to start YDB, gRPC: 32042, MsgBus: 25576 2025-04-07T13:47:50.949095Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567576504182305:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:50.949255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f8b/r3tmp/tmps2mZ5Y/pdisk_1.dat 2025-04-07T13:47:52.752976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:52.845839Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:47:52.850137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:52.850211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:52.857836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32042, node 1 2025-04-07T13:47:54.605825Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:54.605846Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:54.605852Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:54.606099Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:47:55.957412Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567576504182305:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:55.958060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:25576 TClient is connected to server localhost:25576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:58.433564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:58.470281Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:47:58.477659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:59.618470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:00.290154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:00.432534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:07.844130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:07.844160Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:11.519777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567666698497140:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.519896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:11.780044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.809051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.881741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.917914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.943794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:11.978631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:12.115248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567670993464935:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:12.115314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:12.115765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567670993464940:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:12.119751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:12.158067Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:48:12.159297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567670993464942:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:13.949238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:48:15.618926Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567683878367415:2536], TxId: 281474976710676, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YjAxZjEyNmItNzc0NzE1OWUtMTc3YTQyYWYtYTBlMmU4NA==. TraceId : 01jr884gv2cec10zf812tkzrx9. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T13:48:15.624528Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567683878367416:2537], TxId: 281474976710676, task: 2. Ctx: { TraceId : 01jr884gv2cec10zf812tkzrx9. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YjAxZjEyNmItNzc0NzE1OWUtMTc3YTQyYWYtYTBlMmU4NA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490567683878367412:2502], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:48:15.628014Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjAxZjEyNmItNzc0NzE1OWUtMTc3YTQyYWYtYTBlMmU4NA==, ActorId: [1:7490567675288432554:2502], ActorState: ExecuteState, TraceId: 01jr884gv2cec10zf812tkzrx9, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29455, MsgBus: 1137 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f8b/r3tmp/tmpftQZFO/pdisk_1.dat 2025-04-07T13:48:19.344795Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:19.410057Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:19.425031Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:19.425109Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:19.426737Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29455, node 2 2025-04-07T13:48:19.604970Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:19.604996Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:19.605002Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:19.605122Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1137 TClient is connected to server localhost:1137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:20.448153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.474857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.583252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.810776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.938799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:30.119178Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567745416249151:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:30.119884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:30.291365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:30.531038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:30.624184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:30.689717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:30.805020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:31.018921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:31.293552Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567754006184305:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:31.293634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:31.296137Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567754006184310:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:31.321681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:31.407694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567754006184312:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:34.369398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:34.369423Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:36.116104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 1624, MsgBus: 11957 2025-04-07T13:47:59.357736Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567618095823419:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:59.361005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f11/r3tmp/tmp80ElzX/pdisk_1.dat 2025-04-07T13:47:59.896828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:59.896908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:59.899234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:47:59.900961Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1624, node 1 2025-04-07T13:48:00.009160Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:00.009193Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:00.009208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:00.009290Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11957 TClient is connected to server localhost:11957 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-04-07T13:48:05.172193Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567618095823419:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:05.608099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:07.132623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:07.715995Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:07.785392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:10.365048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:11.952796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:48:12.405769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:48:14.387941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567682520334545:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:14.388026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:14.642470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:14.706777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:14.756497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:14.785146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:14.920545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:14.932709Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:14.982278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:15.028306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:15.114697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567686815302351:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:15.114772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:15.114930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567686815302356:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:15.118627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:15.130134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567686815302358:2443], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:17.600054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2544, MsgBus: 18297 2025-04-07T13:48:20.199918Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567708563556604:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:20.199951Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f11/r3tmp/tmpCl8VUB/pdisk_1.dat 2025-04-07T13:48:20.328912Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:20.341602Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:20.341692Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:20.343872Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2544, node 2 2025-04-07T13:48:20.581268Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:20.581289Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:20.581295Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:20.581380Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18297 TClient is connected to server localhost:18297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:21.256446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:21.265501Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:48:21.275585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:21.339404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:21.510826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:21.611777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:25.204957Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567708563556604:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:25.205003Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:34.265407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567768693100559:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:34.265701Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:34.306250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:34.392376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:34.500861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:34.623518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:34.682925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:34.796659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:34.982691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567768693101070:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:34.982748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:34.983008Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567768693101075:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:34.989453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:35.001423Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T13:48:35.002272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567768693101077:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:48:35.321064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:35.321091Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:36.715542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] >> ColumnBuildTest::ValidDefaultValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowSimple [GOOD] Test command err: Trying to start YDB, gRPC: 23367, MsgBus: 23699 2025-04-07T13:48:00.850228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567621910794385:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:00.850438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f07/r3tmp/tmpC10j03/pdisk_1.dat 2025-04-07T13:48:05.838320Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567621910794385:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:05.838380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:08.881181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:08.882500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:10.025779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:10.025801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:11.575852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:11.575881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:11.604745Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:11.607000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:11.607064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:11.623810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23367, node 1 2025-04-07T13:48:12.493027Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:12.493052Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:12.493057Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:12.493120Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23699 TClient is connected to server localhost:23699 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:18.877304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:18.936305Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:18.985037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.206862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.412280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:48:19.508082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.322476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567712105109206:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.322562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.689114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.792099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.843097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.886940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.929253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:22.001129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:23.080088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567720695044321:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:23.080306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:23.081080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567720695044335:2448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:23.138855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:23.386234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567720695044337:2449], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:26.549728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:26.549750Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:31.058854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 >> ColumnBuildTest::BaseCase [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange [GOOD] Test command err: Trying to start YDB, gRPC: 2486, MsgBus: 3325 2025-04-07T13:48:05.044310Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567644230816434:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:05.045028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:48:10.155655Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567644230816434:2052];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f10/r3tmp/tmpIaVeY5/pdisk_1.dat 2025-04-07T13:48:11.422917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:11.606960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:12.464202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:12.884604Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:12.956069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:12.958775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:12.976778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:12.986087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2486, node 1 2025-04-07T13:48:13.777120Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:13.777147Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:13.777155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:13.777232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3325 TClient is connected to server localhost:3325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:19.277741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.317620Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:19.328953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.559288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.867255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.028203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:24.572171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567721540229505:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:24.632386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:25.220947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:26.160639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:26.231346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:26.401220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:26.785031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:26.984873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:27.255520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567738720099244:2448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:27.255580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:27.255993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567738720099249:2451], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:27.262330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:27.377762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567738720099251:2452], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:27.485264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:27.485283Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:32.573788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> Viewer::JsonAutocompleteEndOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteEmptyColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::Plan2SvgBad [GOOD] Test command err: 2025-04-07T13:47:40.768882Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567534113652713:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:40.768951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:47:45.790246Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567534113652713:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:45.790446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:47:45.815156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:47:46.234804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:47:46.234888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:47:46.242076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:47:46.273269Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27663, node 1 2025-04-07T13:47:47.040699Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:47:47.048378Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:47:47.048406Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:47:47.048583Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:47:47.312264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:47:47.375786Z node 1 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream 2025-04-07T13:47:59.741017Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567617597877089:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:59.741072Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:48:00.262058Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:00.292639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:00.300926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:00.308893Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20338, node 2 2025-04-07T13:48:00.526188Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:00.526214Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:00.526225Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:00.526383Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:48:04.745309Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567617597877089:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:04.745660Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:3817 2025-04-07T13:48:08.302505Z node 2 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream 2025-04-07T13:48:14.724908Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567682148995889:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:14.724986Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:48:15.118684Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12758, node 3 2025-04-07T13:48:15.133184Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:15.136519Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:15.140076Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:15.180703Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:15.180725Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:15.180735Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:15.180888Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9855 2025-04-07T13:48:15.497732Z node 3 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2025-04-07T13:48:19.725165Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567682148995889:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:19.725237Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TCacheTest::TableSchemaVersion >> ClientStatsCollector::PrepareQuery >> YdbTableBulkUpsert::Simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:48:21.330890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:48:21.331005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:21.331052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:48:21.331081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:48:21.331120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:48:21.331144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:48:21.331193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:21.331514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:48:21.457197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:48:21.457254Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:21.467765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:48:21.467925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:48:21.468083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:48:21.473683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:48:21.473873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:48:21.474487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:21.474690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:48:21.483992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:21.485207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:21.485269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:21.485385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:48:21.485434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:21.485475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:48:21.485626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:48:21.493140Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:48:21.643863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:48:21.644030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:21.644178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:48:21.644311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:48:21.644345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:21.650876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:21.651018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:48:21.651193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:21.651255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:48:21.651291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:48:21.651348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:48:21.658945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:21.659014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:48:21.659357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:48:21.669519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:21.669585Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:21.669649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:21.669714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:48:21.677087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:48:21.685600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:48:21.685850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:48:21.686905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:21.687056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:48:21.687098Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:21.688008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:48:21.688061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:21.688217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:21.688295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:48:21.697925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:21.698012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:21.698192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:21.698245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:48:21.698498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:21.698547Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:48:21.698639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:48:21.698674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:21.698721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:48:21.698771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:21.698806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:48:21.698841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:48:21.698918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:48:21.698974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:48:21.699007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:48:21.700998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:21.701110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:21.701144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:48:21.701182Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:48:21.701239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:21.701357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... G: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1136:3011], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2025-04-07T13:48:40.679515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-07T13:48:40.679567Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2025-04-07T13:48:40.679609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2025-04-07T13:48:40.679706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-04-07T13:48:40.687254Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-07T13:48:40.687344Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1136:3011], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-07T13:48:40.687800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-04-07T13:48:40.687897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-04-07T13:48:40.688054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-07T13:48:40.688084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-04-07T13:48:40.688114Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-07T13:48:40.701252Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1811:3678], Recipient [1:744:2636]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T13:48:40.701307Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T13:48:40.778261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-04-07T13:48:40.778387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 670 RawX2: 4294969875 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-04-07T13:48:40.778436Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-04-07T13:48:40.778476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2025-04-07T13:48:40.786082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-07T13:48:40.786140Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-04-07T13:48:40.786225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-07T13:48:40.786252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-07T13:48:40.786286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-04-07T13:48:40.786349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:561:2501] message: TxId: 281474976725761 2025-04-07T13:48:40.786384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-07T13:48:40.786434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2025-04-07T13:48:40.786462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2025-04-07T13:48:40.786537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-07T13:48:40.794146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-04-07T13:48:40.794230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2025-04-07T13:48:40.794288Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-04-07T13:48:40.794367Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1136:3011], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-07T13:48:40.800227Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-07T13:48:40.800322Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1136:3011], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-07T13:48:40.800382Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-07T13:48:40.810087Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-07T13:48:40.810197Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1136:3011], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-07T13:48:40.810247Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-04-07T13:48:40.810393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-07T13:48:40.810439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1156:3031] TestWaitNotification: OK eventTxId 106 2025-04-07T13:48:40.813167Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-04-07T13:48:40.813459Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 100 } >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeys >> Viewer::StorageGroupOutputWithoutFilterNoDepends [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |84.2%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut >> YdbOlapStore::ManyTables >> KikimrIcGateway::TestDropExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalDataSource |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |84.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids >> YdbLogStore::LogTable >> TCacheTest::TableSchemaVersion [GOOD] >> TCacheTest::WatchRoot >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs >> KqpEffects::InsertAbort_Literal_Duplicates [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestALterResourcePool >> TUserAccountServiceTest::Get >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> TCacheTest::WatchRoot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2025-04-07T13:48:39.833846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:48:39.833915Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T13:48:39.987729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 >> TGRpcYdbTest::ExecuteQueryImplicitSession >> KikimrIcGateway::TestDropTable [GOOD] >> KikimrIcGateway::TestDropResourcePool >> YdbOlapStore::LogPagingBefore >> FolderServiceTest::TFolderServiceTransitional ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Duplicates [GOOD] Test command err: Trying to start YDB, gRPC: 2436, MsgBus: 11144 2025-04-07T13:48:11.597466Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567668162466164:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:11.597506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000efb/r3tmp/tmp5AA5s5/pdisk_1.dat 2025-04-07T13:48:14.145327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:15.080086Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:15.153143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:15.353839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:15.353944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:15.355768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2436, node 1 2025-04-07T13:48:15.437162Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:15.437186Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:15.437192Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:15.437291Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11144 2025-04-07T13:48:16.601080Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567668162466164:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:16.601130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:11144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:18.629644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.013069Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:19.037969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.274867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.562785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.694812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:23.987716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567711112140631:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:23.997922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:24.843973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:25.154703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:25.550805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:26.029425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:26.270702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:26.648043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:27.169771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567736881944981:2447], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:27.169841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:27.170160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567736881944986:2450], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:27.174172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:27.246159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567736881944988:2451], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:29.949639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:29.949663Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 16792, MsgBus: 11425 2025-04-07T13:48:36.532401Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567777150397371:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:36.532444Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000efb/r3tmp/tmpbzpwYa/pdisk_1.dat 2025-04-07T13:48:36.796285Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:36.810280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:36.810360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:36.817785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16792, node 2 2025-04-07T13:48:37.020291Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:37.020313Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:37.020320Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:37.020414Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11425 TClient is connected to server localhost:11425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:37.948755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:38.091195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:38.599361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:38.785784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:38.856229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:41.169821Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567798625235562:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:41.169907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:41.217492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.254804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.300100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.343036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.447585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.556442Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567777150397371:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:41.556494Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:41.703791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.762103Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567798625236069:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:41.762191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:41.762388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567798625236074:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:41.766323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:41.779272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567798625236076:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:44.050698Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490567807215171101:2479], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzE1M2IyYWEtNjU3YjFmLTUzMTBkZTllLTMwNjVlNTVl. TraceId : 01jr885crhd240xpqks6zdkhkn. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-07T13:48:44.153569Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490567807215171102:2480], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzE1M2IyYWEtNjU3YjFmLTUzMTBkZTllLTMwNjVlNTVl. TraceId : 01jr885crhd240xpqks6zdkhkn. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7490567807215171096:2463], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:48:44.197237Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzE1M2IyYWEtNjU3YjFmLTUzMTBkZTllLTMwNjVlNTVl, ActorId: [2:7490567807215170984:2463], ActorState: ExecuteState, TraceId: 01jr885crhd240xpqks6zdkhkn, Create QueryResponse for error on request, msg: |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::WatchRoot [GOOD] Test command err: 2025-04-07T13:48:42.333313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:48:42.333375Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T13:48:42.544294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-07T13:48:42.591543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 101:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-07T13:48:42.885626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 2025-04-07T13:48:47.161935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:48:47.162024Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T13:48:47.292198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T13:48:47.337828Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-07T13:48:47.339681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T13:48:47.363433Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 >> YdbYqlClient::TestReadTableMultiShard >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit [GOOD] >> Viewer::JsonStorageListingV1 >> KqpEffects::InsertAbort_Select_Success [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates >> Viewer::Cluster10000Tablets [GOOD] >> Viewer::FuzzySearcherLimit1OutOf4 [GOOD] >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] >> TAccessServiceTest::PassRequestId >> KikimrIcGateway::TestDropExternalDataSource [GOOD] |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |84.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 25874, MsgBus: 12954 2025-04-07T13:48:11.563648Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567667936728208:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:11.568902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ef1/r3tmp/tmpwuf4il/pdisk_1.dat 2025-04-07T13:48:13.681826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:14.502948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:14.503319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:14.529627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:14.622271Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:14.700234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 25874, node 1 2025-04-07T13:48:16.465115Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:16.465132Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:16.465137Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:16.465206Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:48:16.561631Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567667936728208:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:16.562037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:12954 TClient is connected to server localhost:12954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:19.898280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.928972Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:19.947636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.219459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:48:20.504704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:48:20.612151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:27.227252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567736656206572:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:27.227356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:29.210075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:29.388241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:29.588725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:29.588751Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:29.668820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:29.735845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:29.799795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:30.067856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:31.369272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567753836076352:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:31.369338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:31.369878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567753836076357:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:31.439740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:31.648115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567753836076359:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:36.052822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1308, MsgBus: 25088 2025-04-07T13:48:38.854118Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567783969068805:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:38.854766Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ef1/r3tmp/tmpGciJB7/pdisk_1.dat 2025-04-07T13:48:39.034424Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:39.050867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:39.050945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:39.055103Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1308, node 2 2025-04-07T13:48:39.197397Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:39.197414Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:39.197423Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:39.197517Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25088 TClient is connected to server localhost:25088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:39.854791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:39.873290Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:48:39.883107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.020258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.346988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.524372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:43.075772Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567801148939672:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:43.076186Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:43.106534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:43.244398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:43.394215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:43.509944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:43.696230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:43.866759Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567783969068805:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:43.867414Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:43.961660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:44.616914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567809738874794:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:44.617065Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:44.618445Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567809738874799:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:44.623962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:44.723241Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567809738874803:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:48:47.187680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.693000Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTdkZTQyMDMtYzQ0MDZhYi1iZTAzMmYwMi1lZDY4YjE0Yw==, ActorId: [2:7490567822623777012:2470], ActorState: ExecuteState, TraceId: 01jr885hsg8xkg0tqdn79gmgj5, Create QueryResponse for error on request, msg: Error while locks merge |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ColumnBuildTest::ValidDefaultValue [GOOD] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] >> KikimrIcGateway::TestALterResourcePool [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropExternalDataSource [GOOD] Test command err: Trying to start YDB, gRPC: 26771, MsgBus: 24121 2025-04-07T13:48:21.588841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567710122845341:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:21.623639Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bc6/r3tmp/tmpBeN2Mh/pdisk_1.dat 2025-04-07T13:48:26.941721Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567710122845341:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:26.941755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:27.318225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:28.017103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:28.370856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:28.529230Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:28.537383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:28.537449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:28.560470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26771, node 1 2025-04-07T13:48:30.249968Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:30.249985Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:30.249991Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:30.250051Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24121 TClient is connected to server localhost:24121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:36.234151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:36.250397Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:36.266566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:48:36.283774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:48:36.287630Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges) 2025-04-07T13:48:36.295966Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges) Trying to start YDB, gRPC: 30044, MsgBus: 8052 2025-04-07T13:48:39.261704Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567789538435093:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:39.261748Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bc6/r3tmp/tmpwPTamY/pdisk_1.dat 2025-04-07T13:48:39.512444Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30044, node 2 2025-04-07T13:48:39.546588Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:39.546640Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:39.580930Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:39.661201Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:39.661218Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:39.661224Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:39.661300Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8052 TClient is connected to server localhost:8052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:40.130891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.179131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:48:40.194150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28556, MsgBus: 26524 2025-04-07T13:48:46.375325Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567817025799674:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:46.375363Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bc6/r3tmp/tmpNh5Jlz/pdisk_1.dat 2025-04-07T13:48:46.876444Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:46.922922Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:46.923000Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:46.924875Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28556, node 3 2025-04-07T13:48:47.033174Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:47.033199Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:47.033206Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:47.033303Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26524 TClient is connected to server localhost:26524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:47.746333Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:47.761688Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:47.773753Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |84.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::ValidDefaultValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:48:41.894085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:48:41.894206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:41.894248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:48:41.894280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:48:41.894316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:48:41.894342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:48:41.894403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:41.894697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:48:42.029622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:48:42.029678Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:42.049501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:48:42.049681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:48:42.049845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:48:42.057504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:48:42.057697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:48:42.058227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:42.058426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:48:42.060290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:42.061441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:42.061490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:42.061620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:48:42.061670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:42.061708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:48:42.061854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:48:42.068448Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:48:42.234425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:48:42.234662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:42.234849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:48:42.235061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:48:42.235112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:42.237751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:42.237894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:48:42.238104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:42.238164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:48:42.238205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:48:42.238235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:48:42.240222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:42.240276Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:48:42.240316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:48:42.244010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:42.244058Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:42.244106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:42.244172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:48:42.247742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:48:42.249732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:48:42.249909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:48:42.250836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:42.250983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:48:42.251038Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:42.251287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:48:42.251333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:42.251479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:42.251546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:48:42.253438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:42.253503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:42.253685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:42.253720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:48:42.253932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:42.253970Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:48:42.254058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:48:42.254087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:42.254138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:48:42.254178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:42.254207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:48:42.254236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:48:42.254298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:48:42.254352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:48:42.254382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:48:42.256186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:42.256288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:42.256331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:48:42.256365Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:48:42.256409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:42.256494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1136:3011], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2025-04-07T13:48:50.986649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-07T13:48:50.986702Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2025-04-07T13:48:50.986766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2025-04-07T13:48:50.986873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-04-07T13:48:51.006116Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-07T13:48:51.006223Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1136:3011], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-07T13:48:51.006854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-04-07T13:48:51.006974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-04-07T13:48:51.007132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-07T13:48:51.007160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-04-07T13:48:51.007189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-07T13:48:51.023937Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1811:3678], Recipient [1:744:2636]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T13:48:51.023995Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T13:48:51.097323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-04-07T13:48:51.097457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 670 RawX2: 4294969875 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-04-07T13:48:51.097528Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-04-07T13:48:51.097586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2025-04-07T13:48:51.116262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-07T13:48:51.116327Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-04-07T13:48:51.116406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-07T13:48:51.116431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-07T13:48:51.116477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-04-07T13:48:51.116555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:561:2501] message: TxId: 281474976725761 2025-04-07T13:48:51.116594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-07T13:48:51.116637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2025-04-07T13:48:51.116678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2025-04-07T13:48:51.116755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-07T13:48:51.138376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-04-07T13:48:51.138480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2025-04-07T13:48:51.138545Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-04-07T13:48:51.138632Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1136:3011], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-07T13:48:51.140941Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-07T13:48:51.141026Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1136:3011], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-07T13:48:51.141079Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-07T13:48:51.153405Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-07T13:48:51.153515Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1136:3011], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-07T13:48:51.153571Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-04-07T13:48:51.153721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-07T13:48:51.153766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1156:3031] TestWaitNotification: OK eventTxId 106 2025-04-07T13:48:51.156021Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-04-07T13:48:51.156306Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } } Progress: 100 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestALterResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 63667, MsgBus: 13777 2025-04-07T13:48:20.369055Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567708802059132:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:20.369093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001cac/r3tmp/tmpi8vCGZ/pdisk_1.dat 2025-04-07T13:48:21.049682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:21.049749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:21.054179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:21.069083Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63667, node 1 2025-04-07T13:48:21.277132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:21.277150Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:21.277163Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:21.277244Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13777 2025-04-07T13:48:25.369419Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567708802059132:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:25.369755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:13777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:48:28.353010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:28.428001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:48:28.458235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62995, MsgBus: 65141 2025-04-07T13:48:37.645874Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567780482741584:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:37.645918Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001cac/r3tmp/tmpmSRtq0/pdisk_1.dat 2025-04-07T13:48:38.381448Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:38.440870Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:38.440949Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:38.442701Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62995, node 2 2025-04-07T13:48:38.745129Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:38.745148Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:38.745153Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:38.745228Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65141 TClient is connected to server localhost:65141 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:40.023431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.212110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2305, MsgBus: 15766 2025-04-07T13:48:47.300736Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567824013471838:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:47.384069Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001cac/r3tmp/tmpft2BfY/pdisk_1.dat 2025-04-07T13:48:47.539330Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:47.544369Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:47.544701Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:47.547431Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2305, node 3 2025-04-07T13:48:47.705162Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:47.705183Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:47.705190Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:47.705275Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15766 TClient is connected to server localhost:15766 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:48.429924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:48.494249Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:48.552155Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:48:48.625287Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976710659:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] Test command err: Trying to start YDB, gRPC: 24015, MsgBus: 12246 2025-04-07T13:48:19.411989Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567702673196785:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:19.412025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000edc/r3tmp/tmpcwmxJO/pdisk_1.dat 2025-04-07T13:48:20.085368Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:20.086957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:20.087031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:20.095755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24015, node 1 2025-04-07T13:48:20.252724Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:20.252742Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:20.252747Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:20.252836Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12246 TClient is connected to server localhost:12246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:21.171578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:21.221156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:21.411566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:21.811489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:23.286873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:25.198069Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567702673196785:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:26.932888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:34.795938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567767097708033:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:34.802200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:35.009593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:35.057789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:35.057811Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:35.166154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:35.298342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:35.418478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:35.573222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:35.701745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:36.008788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567771392675848:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:36.008942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:36.009314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567775687643149:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:36.012951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:36.041381Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:48:36.042046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567775687643152:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:37.280786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26137, MsgBus: 27808 2025-04-07T13:48:39.521201Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567789653254824:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000edc/r3tmp/tmpx98SP5/pdisk_1.dat 2025-04-07T13:48:39.663535Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:48:39.741622Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:39.763536Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:39.768786Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:39.770065Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26137, node 2 2025-04-07T13:48:39.854160Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:39.854176Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:39.854182Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:39.854265Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27808 TClient is connected to server localhost:27808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:40.353286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.365197Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:40.379793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.545981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.746455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.843578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:44.515742Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567789653254824:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:44.515803Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:47.794681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567824012994797:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:47.794827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:47.928733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.012048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.081833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.195475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.287471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.432566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.625625Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567828307962604:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:48.625694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:48.625916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567828307962609:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:48.629525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:48.646329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567828307962611:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:49.729445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> TUserAccountServiceTest::Get [GOOD] >> FolderServiceTest::TFolderService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 3272, MsgBus: 18155 2025-04-07T13:48:20.430830Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567707143449226:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000edd/r3tmp/tmpue8Gzh/pdisk_1.dat 2025-04-07T13:48:20.690306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:48:21.021041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:21.055600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:21.055684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:21.057548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3272, node 1 2025-04-07T13:48:21.345282Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:21.345304Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:21.345310Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:21.345447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18155 2025-04-07T13:48:25.386560Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567707143449226:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:25.393071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:18155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:28.553103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:28.889437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:32.254971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:34.492572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:34.821091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:36.031038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:36.031072Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:37.149503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567780157894820:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:37.149618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:37.418242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:37.464783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:37.520394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:37.581992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:37.656745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:37.909619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:38.205375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567784452862639:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:38.205457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:38.212768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567784452862644:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:38.224539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:38.299704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567784452862646:2444], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:39.435218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62226, MsgBus: 2266 2025-04-07T13:48:41.206624Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567798850870467:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:41.206669Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000edd/r3tmp/tmpmIgodY/pdisk_1.dat 2025-04-07T13:48:41.626356Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:41.681498Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:41.681612Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:41.687116Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62226, node 2 2025-04-07T13:48:41.801232Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:41.801254Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:41.801262Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:41.801393Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2266 TClient is connected to server localhost:2266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:42.398786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:42.418705Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:42.424228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:42.529186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:42.702641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:42.847745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:46.213087Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567798850870467:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:46.213134Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:47.987824Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567824620675948:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:47.987896Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:48.082534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.146662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.213892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.283934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.406675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.522804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.681141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567828915643761:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:48.681260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:48.688846Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567828915643766:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:48.693741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:48.707524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567828915643768:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:50.076051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:48:51.328000Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGQ1MmNkOTMtYzRlY2JiNzgtYmYwMWE0ZmQtYjZkYjU4ZGQ=, ActorId: [2:7490567833210611386:2470], ActorState: ExecuteState, TraceId: 01jr885mkt2qp74tr8sdyj2zcd, Create QueryResponse for error on request, msg: Error while locks merge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 18098, MsgBus: 18716 2025-04-07T13:48:11.474289Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567669738784060:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:11.474409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ef8/r3tmp/tmpu8pBQL/pdisk_1.dat 2025-04-07T13:48:11.914535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:11.941094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:11.941188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:11.942168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18098, node 1 2025-04-07T13:48:12.273748Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:12.273766Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:12.273772Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:12.274062Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18716 TClient is connected to server localhost:18716 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:16.239222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:16.477808Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567669738784060:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:16.478238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:17.553940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:48:18.994928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:48:19.272831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.363169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:23.693643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567716983426047:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:23.956308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:24.482162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:24.809193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:25.057264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:25.335265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:25.682289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:26.258119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:26.627303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567734163295797:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:26.627356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:26.630187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567734163295802:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:26.651545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:26.797545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567734163295804:2447], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:26.898611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:26.898630Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:31.470073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:48:35.765364Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2ViYWZmMzYtNDAwYThmMWUtNzJkYjcyNTAtNzc0MTEwMDY=, ActorId: [1:7490567768523034940:2546], ActorState: ExecuteState, TraceId: 01jr8855cfbq89hqg2r3xfwewd, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-07T13:48:35.904114Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2ViYWZmMzYtNDAwYThmMWUtNzJkYjcyNTAtNzc0MTEwMDY=, ActorId: [1:7490567768523034940:2546], ActorState: ReadyState, TraceId: 01jr8855nvdpn6jayqe8e76zby, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 20087, MsgBus: 22340 2025-04-07T13:48:37.041469Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567780293335957:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ef8/r3tmp/tmp9HOeHD/pdisk_1.dat 2025-04-07T13:48:37.168240Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:48:37.276940Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:37.328536Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:37.328610Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:37.329522Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20087, node 2 2025-04-07T13:48:38.716123Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:38.716143Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:38.716150Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:38.716248Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22340 TClient is connected to server localhost:22340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:40.887978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.900522Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:48:40.926002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:41.132708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:41.672810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:41.791956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:42.044199Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567780293335957:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:42.083782Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:47.785537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567823243010490:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:47.786260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:47.855807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.911037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.957607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.033124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.079944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.169341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.285714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567827537978295:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:48.285798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:48.285998Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567827537978301:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:48.289734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:48.310930Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T13:48:48.316838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567827537978303:2446], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:48:49.846324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] Test command err: Trying to start YDB, gRPC: 11866, MsgBus: 15558 2025-04-07T13:48:15.447386Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567686041334476:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:15.447842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ed0/r3tmp/tmp1NWONo/pdisk_1.dat 2025-04-07T13:48:15.839012Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11866, node 1 2025-04-07T13:48:15.856920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:15.857000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:15.869171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:17.511488Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:17.511504Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:17.511509Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:17.511580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15558 TClient is connected to server localhost:15558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:19.876046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:19.927220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.283041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.454124Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567686041334476:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:20.456531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:20.518624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.625024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:28.001340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567737580943785:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:28.001431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:29.667338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:30.000959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:30.161539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:30.732359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:30.818095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:30.818447Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:30.993723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:31.455409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:32.044800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567759055780867:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:32.044852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:32.047929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567759055780872:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:32.061432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:32.091487Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:48:32.093442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567759055780874:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:36.138683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3919, MsgBus: 25382 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ed0/r3tmp/tmpoqyhIj/pdisk_1.dat 2025-04-07T13:48:39.376957Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:39.389557Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:39.389648Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:39.391301Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:39.394520Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3919, node 2 2025-04-07T13:48:39.528165Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:39.528195Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:39.528202Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:39.528312Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25382 TClient is connected to server localhost:25382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:40.013357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.021470Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:48:40.043184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.134611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.359866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.514703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:47.010281Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567821064599793:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:47.010628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:47.086131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.201876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.279037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.367254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.464648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.579702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.729572Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567821064600296:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:47.729660Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:47.730029Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567821064600301:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:47.734415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:47.785180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567821064600303:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:48:49.675311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] >> Viewer::JsonAutocompleteEmptyColumns [GOOD] >> Viewer::JsonAutocompleteColumns >> ClientStatsCollector::PrepareQuery [GOOD] >> ClientStatsCollector::CounterCacheMiss >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TStorageTenantTest::CreateTableInsideSubDomain >> YdbTableBulkUpsert::Simple [GOOD] >> YdbTableBulkUpsert::Overload >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeys [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TServiceAccountServiceTest::Get [GOOD] >> TGRpcYdbTest::ExecuteQueryImplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryExplicitSession >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> TStorageTenantTest::CreateSolomonInsideSubDomain >> TServiceAccountServiceTest::IssueToken ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2025-04-07T13:48:47.617026Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567821167706747:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:47.617321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e52/r3tmp/tmph3hd7V/pdisk_1.dat 2025-04-07T13:48:48.525107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:48.525199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:48.530965Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:48.533500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:48.697594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:13974 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:49.161461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> TServiceAccountServiceTest::IssueToken [GOOD] >> ObjectStorageListingTest::FilterListing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] Test command err: 2025-04-07T13:46:07.210944Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:46:08.523687Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:46:08.545255Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:46:08.545334Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:46:08.545576Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:46:08.553381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:46:08.553585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:46:08.553776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:46:08.553873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:46:08.553987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:46:08.554096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:46:08.554198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:46:08.554310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:46:08.554402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:46:08.554500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:46:08.554632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:46:08.554737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:46:08.592735Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:46:08.608320Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:46:08.609131Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:46:08.609350Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:46:08.610991Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:08.613115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:46:08.613587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:46:08.613823Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:46:08.615126Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:46:08.615360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:46:08.615392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:46:08.615604Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:46:08.616626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:46:08.617139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:46:08.617175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:46:08.617199Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:46:08.617929Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:46:08.618173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:46:08.618576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:46:08.619001Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:46:08.619259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:46:08.619286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:46:08.619493Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:46:08.619705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:46:08.619895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:46:08.620092Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:46:08.621226Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=437; 2025-04-07T13:46:08.622074Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=234; 2025-04-07T13:46:08.623106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=429; 2025-04-07T13:46:08.624105Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=588; 2025-04-07T13:46:08.624774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:46:08.625160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:46:08.625425Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:46:08.628969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:46:08.629010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:46:08.629446Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:46:08.630939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:46:08.630967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:46:08.631174Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:46:08.632478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:46:08.633126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:46:08.633357Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:46:08.634255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event= ... ne=columnshard_impl.cpp:740;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=6265200;indexing_debug={task_ids=a1fd6f2-13b711f0-87350e6d-8771e5bc,;}; 2025-04-07T13:48:51.829663Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=61;external_task_id=9829bc6-13b711f0-84a5a2e9-ffc3e037;mem=19104780;cpu=0; 2025-04-07T13:48:51.830160Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[204] complete at tablet 9437184 2025-04-07T13:48:51.830233Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=abstract.h:88;progress_tx_id=141;lock_id=1;broken=0; 2025-04-07T13:48:51.830328Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:740;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=6265200;indexing_debug={task_ids=a1fd6f2-13b711f0-87350e6d-8771e5bc,;}; 2025-04-07T13:48:51.830442Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=6364460;external_task_id=a1fd6f2-13b711f0-87350e6d-8771e5bc;type=CS::INDEXATION;priority=0;; 2025-04-07T13:48:51.830695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=1;fline=storage.cpp:87;event=granule_compaction_weight;priority=(10,19996862644); 2025-04-07T13:48:51.830780Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=1;fline=optimizer.h:893;stop_instant=NO_VALUE_OPTIONAL;size=6274712;next=;count=2;info={bytes=3137356;count=1;records=53332};event=start_optimization;stop_point=;main_portion=88; 2025-04-07T13:48:51.830958Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=manager.cpp:9;event=lock;process_id=CS::GENERAL::a2028b4-13b711f0-9ea2a792-f7b29b09; 2025-04-07T13:48:51.831006Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=ro_controller.cpp:45;event=CS::GENERAL;tablet_id=9437184; 2025-04-07T13:48:51.831123Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=19104780;external_task_id=a2028b4-13b711f0-9ea2a792-f7b29b09;type=CS::GENERAL;priority=0;; 2025-04-07T13:48:51.831515Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=62;task=cpu=0;mem=6364460;external_task_id=a1fd6f2-13b711f0-87350e6d-8771e5bc;type=CS::INDEXATION;priority=0;; 2025-04-07T13:48:51.831556Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=a1fd6f2-13b711f0-87350e6d-8771e5bc;mem=6364460;cpu=0; 2025-04-07T13:48:51.831590Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=a1fd6f2-13b711f0-87350e6d-8771e5bc;task_id=62;mem=6364460;cpu=0; 2025-04-07T13:48:51.831682Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=a1fd6f2-13b711f0-87350e6d-8771e5bc;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=a1fd6f2-13b711f0-87350e6d-8771e5bc; 2025-04-07T13:48:52.590160Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=a1fd6f2-13b711f0-87350e6d-8771e5bc;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-07T13:48:52.590405Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; 2025-04-07T13:48:52.590811Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=63;task=cpu=0;mem=19104780;external_task_id=a2028b4-13b711f0-9ea2a792-f7b29b09;type=CS::GENERAL;priority=0;; 2025-04-07T13:48:52.590847Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=a2028b4-13b711f0-9ea2a792-f7b29b09;mem=19104780;cpu=0; 2025-04-07T13:48:52.590884Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=a2028b4-13b711f0-9ea2a792-f7b29b09;task_id=63;mem=19104780;cpu=0; 2025-04-07T13:48:52.590966Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:152:2180];tablet_id=9437184;parent=[1:136:2168];fline=manager.h:99;event=ask_data;request=request_id=149;1={portions_count=2};; 2025-04-07T13:48:52.591391Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:152:2180];tablet_id=9437184;parent=[1:136:2168];fline=columnshard_impl.cpp:852;event=compaction;external_task_id=a2028b4-13b711f0-9ea2a792-f7b29b09; 2025-04-07T13:48:52.591459Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:152:2180];tablet_id=9437184;parent=[1:136:2168];fline=columnshard_impl.cpp:615;event=start_changes;type=CS::GENERAL;task_id=a2028b4-13b711f0-9ea2a792-f7b29b09; 2025-04-07T13:48:52.591677Z node 1 :S3_WRAPPER DEBUG: external_task_id=a2028b4-13b711f0-9ea2a792-f7b29b09;fline=fake_storage.cpp:90;method=GetObject;id=[9437184:2:61:255:1:823712:0];range=bytes=0-823711;object_exists=1; 2025-04-07T13:48:52.593933Z node 1 :S3_WRAPPER DEBUG: external_task_id=a2028b4-13b711f0-9ea2a792-f7b29b09;fline=fake_storage.cpp:90;method=GetObject;id=[9437184:2:60:255:2:823712:0];range=bytes=0-823711;object_exists=1; 2025-04-07T13:48:52.596225Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=a2028b4-13b711f0-9ea2a792-f7b29b09;fline=actor.cpp:48;task=agents_waiting=2;additional_info=();; 2025-04-07T13:48:52.630059Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=a2028b4-13b711f0-9ea2a792-f7b29b09; 2025-04-07T13:48:53.558712Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:136:2168];fline=general_compaction.cpp:203;event=blobs_created_diff;appended=0;;column_id:4;chunk_idx:0;blob_range:[NO_BLOB:0:103496];;column_id:4;chunk_idx:1;blob_range:[NO_BLOB:103496:103456];;column_id:4;chunk_idx:2;blob_range:[NO_BLOB:206952:103392];;column_id:4;chunk_idx:3;blob_range:[NO_BLOB:310344:101512];;column_id:6;chunk_idx:0;blob_range:[NO_BLOB:411856:103496];;column_id:6;chunk_idx:1;blob_range:[NO_BLOB:515352:103456];;column_id:6;chunk_idx:2;blob_range:[NO_BLOB:618808:103392];;column_id:6;chunk_idx:3;blob_range:[NO_BLOB:722200:101512];;column_id:3;chunk_idx:0;blob_range:[NO_BLOB:0:156904];;column_id:3;chunk_idx:1;blob_range:[NO_BLOB:156904:156864];;column_id:3;chunk_idx:2;blob_range:[NO_BLOB:313768:156800];;column_id:3;chunk_idx:3;blob_range:[NO_BLOB:470568:154904];;column_id:2;chunk_idx:0;blob_range:[NO_BLOB:625472:103496];;column_id:2;chunk_idx:1;blob_range:[NO_BLOB:728968:103456];;column_id:2;chunk_idx:2;blob_range:[NO_BLOB:832424:103392];;column_id:2;chunk_idx:3;blob_range:[NO_BLOB:935816:101512];;column_id:10;chunk_idx:0;blob_range:[NO_BLOB:1037328:103496];;column_id:10;chunk_idx:1;blob_range:[NO_BLOB:1140824:103456];;column_id:10;chunk_idx:2;blob_range:[NO_BLOB:1244280:103392];;column_id:10;chunk_idx:3;blob_range:[NO_BLOB:1347672:101512];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:1449184:53688];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:1502872:53688];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:1556560:53688];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:1610248:53672];;column_id:8;chunk_idx:0;blob_range:[NO_BLOB:1663920:53592];;column_id:8;chunk_idx:1;blob_range:[NO_BLOB:1717512:53608];;column_id:8;chunk_idx:2;blob_range:[NO_BLOB:1771120:53600];;column_id:8;chunk_idx:3;blob_range:[NO_BLOB:1824720:53592];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:1878312:53592];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:1931904:53608];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:1985512:53600];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:2039112:53592];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:2092704:53520];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:2146224:53216];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:2199440:53288];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:2252728:53240];;column_id:4294967040;chunk_idx:0;blob_range:[NO_BLOB:2305968:640];;column_id:4294967040;chunk_idx:1;blob_range:[NO_BLOB:2306608:640];;column_id:4294967040;chunk_idx:2;blob_range:[NO_BLOB:2307248:640];;column_id:4294967040;chunk_idx:3;blob_range:[NO_BLOB:2307888:640];;column_id:4294967041;chunk_idx:0;blob_range:[NO_BLOB:2308528:640];;column_id:4294967041;chunk_idx:1;blob_range:[NO_BLOB:2309168:640];;column_id:4294967041;chunk_idx:2;blob_range:[NO_BLOB:2309808:640];;column_id:4294967041;chunk_idx:3;blob_range:[NO_BLOB:2310448:640];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:2311088:632];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:2311720:632];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:2312352:632];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:2312984:632];;;;switched=(portion_id:87;path_id:1;records_count:53332;min_schema_snapshot:(plan_step=101;tx_id=101;);schema_version:1;level:0;column_size:3137328;index_size:28;meta:((produced=INSERTED;)););(portion_id:88;path_id:1;records_count:53332;min_schema_snapshot:(plan_step=101;tx_id=101;);schema_version:1;level:0;column_size:3137328;index_size:28;meta:((produced=SPLIT_COMPACTED;)););; 2025-04-07T13:48:53.558798Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:136:2168];fline=general_compaction.cpp:205;event=blobs_created;appended=1;switched=2; 2025-04-07T13:48:53.559001Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'hot' stopped at tablet 9437184 |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KikimrIcGateway::TestDropResourcePool [GOOD] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 >> TAccessServiceTest::PassRequestId [GOOD] >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TAccessServiceTest::Authenticate >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2025-04-07T13:48:49.099794Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567832685379300:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:49.099823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e4b/r3tmp/tmpiv65Vy/pdisk_1.dat 2025-04-07T13:48:49.904023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:49.904119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:49.909216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:49.956450Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:50.494757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:50.538056Z node 1 :GRPC_CLIENT DEBUG: [51600009d8d0] Connect to grpc://localhost:6647 2025-04-07T13:48:50.541858Z node 1 :GRPC_CLIENT DEBUG: [51600009d8d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-07T13:48:50.709310Z node 1 :GRPC_CLIENT DEBUG: [51600009d8d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:6647: Failed to connect to remote host: Connection refused 2025-04-07T13:48:50.718388Z node 1 :GRPC_CLIENT DEBUG: [51600009d8d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-07T13:48:50.719129Z node 1 :GRPC_CLIENT DEBUG: [51600009d8d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:6647: Failed to connect to remote host: Connection refused 2025-04-07T13:48:51.721285Z node 1 :GRPC_CLIENT DEBUG: [51600009d8d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-07T13:48:51.724626Z node 1 :GRPC_CLIENT DEBUG: [51600009d8d0] Status 5 Not Found 2025-04-07T13:48:51.725140Z node 1 :GRPC_CLIENT DEBUG: [51600009d8d0] Request ListFoldersRequest { id: "i_am_exists" } 2025-04-07T13:48:51.727613Z node 1 :GRPC_CLIENT DEBUG: [51600009d8d0] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 16326, MsgBus: 9530 2025-04-07T13:47:59.895775Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567615878519602:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:47:59.896495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f08/r3tmp/tmphCGzdG/pdisk_1.dat 2025-04-07T13:48:00.808906Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:00.823489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:00.823560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:00.829942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16326, node 1 2025-04-07T13:48:01.916411Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:01.916429Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:01.916433Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:01.916495Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:48:04.862381Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567615878519602:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:04.862707Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:9530 TClient is connected to server localhost:9530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:14.177748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:14.236816Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:14.256149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:14.464096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:14.734976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:15.370738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:15.795160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:15.795291Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:20.914577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567706072834481:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:20.914684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.342089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.388422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.446650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.525318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.567370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.641131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:21.799132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567710367802297:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.799191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.801005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567710367802302:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:21.813649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:21.878117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567710367802304:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:27.952864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29085, MsgBus: 13816 2025-04-07T13:48:36.349766Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567775046227093:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:36.349811Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f08/r3tmp/tmpTEza12/pdisk_1.dat 2025-04-07T13:48:36.681461Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:36.729892Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:36.729988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:36.737433Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29085, node 2 2025-04-07T13:48:36.917257Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:36.917280Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:36.917286Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:36.917378Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13816 TClient is connected to server localhost:13816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:48:37.725976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:37.872131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:38.288790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:38.644129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:38.722698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:41.097714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567796521065272:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:41.097802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:41.159547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.231109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.273427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.351439Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567775046227093:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:41.351494Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:41.500319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.714085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.771951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.885173Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567796521065783:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:41.885270Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:41.885725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567796521065788:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:41.889285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:41.912519Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567796521065790:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:45.048180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:48:45.336272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:48:45.574095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:48:51.680987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:51.681018Z node 2 :IMPORT WARN: Table profiles were not loaded |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 21390, MsgBus: 16848 2025-04-07T13:48:20.365871Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567706244866967:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:20.365915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb4/r3tmp/tmpj4pI2W/pdisk_1.dat 2025-04-07T13:48:21.105162Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:21.107514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:21.107585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:21.122183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21390, node 1 2025-04-07T13:48:21.302823Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:21.302845Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:21.302853Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:21.302965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16848 2025-04-07T13:48:26.136864Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567706244866967:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:26.647388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:16848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:32.062557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:36.048796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:36.048821Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:37.079648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567779259311716:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:37.079745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:37.293981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:48:37.495769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:48:37.526988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:37.578383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:37.635496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567779259312021:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:37.635557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:37.635762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567779259312026:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:37.638935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-04-07T13:48:37.652399Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710664, at schemeshard: 72057594046644480 2025-04-07T13:48:37.652752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567779259312028:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } Trying to start YDB, gRPC: 13038, MsgBus: 14306 2025-04-07T13:48:39.780747Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567788878480700:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:39.783653Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb4/r3tmp/tmpFx2rXK/pdisk_1.dat 2025-04-07T13:48:39.928010Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:39.968464Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:39.968532Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:39.974402Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13038, node 2 2025-04-07T13:48:40.116929Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:40.116947Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:40.116953Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:40.117039Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14306 TClient is connected to server localhost:14306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:40.644190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.672348Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:48:44.766081Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567788878480700:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:44.766147Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:46.243509Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567818943252456:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:46.243575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:46.414768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:48:46.645190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T13:48:46.758670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:46.806000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:46.907115Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567818943252760:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:46.907193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:46.907813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567818943252765:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:46.911390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-04-07T13:48:46.936082Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567818943252767:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-07T13:48:47.367246Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found
: Info: Success, code: 4 Trying to start YDB, gRPC: 27660, MsgBus: 29522 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb4/r3tmp/tmpSDhCdW/pdisk_1.dat 2025-04-07T13:48:48.756439Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:48.791120Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:48.804310Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:48.810915Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:48.812956Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27660, node 3 2025-04-07T13:48:48.894444Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:48.894469Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:48.894478Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:48.894576Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29522 TClient is connected to server localhost:29522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:49.330899Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:49.342582Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:48:49.360595Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-04-07T13:48:50.491905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567833566511375:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:50.491937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e07/r3tmp/tmpcDEDXg/pdisk_1.dat 2025-04-07T13:48:51.343485Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:51.345737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:51.345834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:51.362298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13497 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:52.296102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:52.480885Z node 1 :GRPC_CLIENT DEBUG: [51600007e9d0]{trololo} Connect to grpc://localhost:8987 2025-04-07T13:48:52.482208Z node 1 :GRPC_CLIENT DEBUG: [51600007e9d0]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-04-07T13:48:52.537193Z node 1 :GRPC_CLIENT DEBUG: [51600007e9d0]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> TSchemeShardExtSubDomainTest::CreateAndWait >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::Create >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [FAIL] >> Viewer::FloatPointJsonQuery >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS >> FolderServiceTest::TFolderService [GOOD] >> YdbYqlClient::TestReadTableMultiShard [GOOD] >> YdbYqlClient::TestReadTableMultiShardUseSnapshot >> TSchemeShardExtSubDomainTest::Fake [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed >> KqpEffects::InsertAbort_Select_Duplicates [GOOD] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::Create [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter >> TSchemeShardExtSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |84.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2025-04-07T13:48:50.373585Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567836100989886:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:50.373628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001df3/r3tmp/tmpq3hbO5/pdisk_1.dat 2025-04-07T13:48:51.073162Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:51.075719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:51.075801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:51.085850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:51.659586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:51.693228Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:54.850531Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567851688240972:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:54.850751Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001df3/r3tmp/tmpzHM4WM/pdisk_1.dat 2025-04-07T13:48:55.118179Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:55.235507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:55.235592Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:55.240965Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:55.346185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:55.353273Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2025-04-07T13:48:54.353205Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567852249471217:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:54.353451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e2e/r3tmp/tmpXBEMTS/pdisk_1.dat 2025-04-07T13:48:55.120080Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:55.151140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:55.151263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:55.168498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:55.645224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:55.667982Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:55.689207Z node 1 :GRPC_CLIENT DEBUG: [51600008fdd0] Connect to grpc://localhost:8363 2025-04-07T13:48:55.692881Z node 1 :GRPC_CLIENT DEBUG: [51600008fdd0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-07T13:48:55.841748Z node 1 :GRPC_CLIENT DEBUG: [51600008fdd0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8363: Failed to connect to remote host: Connection refused 2025-04-07T13:48:55.843150Z node 1 :GRPC_CLIENT DEBUG: [51600008fdd0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-07T13:48:55.843677Z node 1 :GRPC_CLIENT DEBUG: [51600008fdd0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8363: Failed to connect to remote host: Connection refused 2025-04-07T13:48:56.848816Z node 1 :GRPC_CLIENT DEBUG: [51600008fdd0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-07T13:48:56.851784Z node 1 :GRPC_CLIENT DEBUG: [51600008fdd0] Status 5 Not Found 2025-04-07T13:48:56.854927Z node 1 :GRPC_CLIENT DEBUG: [51600008fdd0] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-04-07T13:48:56.858168Z node 1 :GRPC_CLIENT DEBUG: [51600008fdd0] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Duplicates [GOOD] Test command err: Trying to start YDB, gRPC: 7332, MsgBus: 63236 2025-04-07T13:48:20.209981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567707610845723:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:20.210022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f99/r3tmp/tmpFerQKz/pdisk_1.dat 2025-04-07T13:48:20.928957Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:20.959691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:20.972822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:20.981791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7332, node 1 2025-04-07T13:48:21.181642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:21.181661Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:21.181666Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:21.181736Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63236 TClient is connected to server localhost:63236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:25.213447Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567707610845723:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:25.213481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:25.245956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:26.393685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:27.425180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:30.940172Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 268637720 Duration# 0.101361s 2025-04-07T13:48:30.940201Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.101736s 2025-04-07T13:48:35.936084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:35.936102Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:36.030628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:36.190625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:40.588987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567793510193178:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:40.589086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:41.033785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.084545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.186562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.247192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.331024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.727787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.791780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567797805161019:2452], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:41.791899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:41.791962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567797805161027:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:41.794813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:41.802202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567797805161029:2456], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:46.031670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2897, MsgBus: 22680 2025-04-07T13:48:49.445607Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567829436297935:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f99/r3tmp/tmp54MF6p/pdisk_1.dat 2025-04-07T13:48:49.568867Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:48:49.773699Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:49.773771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:49.777792Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:49.801250Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2897, node 2 2025-04-07T13:48:50.017224Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:50.017242Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:50.017247Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:50.017340Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22680 TClient is connected to server localhost:22680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:50.892176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:50.907459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:51.025350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:51.270659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:51.467615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:54.428787Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567829436297935:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:54.428852Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:54.832552Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567850911135931:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:54.832621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:54.881238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:54.945194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:54.993311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:55.094367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:55.179895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:55.240694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:55.342211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567855206103728:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:55.342288Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:55.342612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567855206103733:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:55.346148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:55.368138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567855206103735:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:56.658825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:48:57.444012Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490567863796038898:2494], TxId: 281474976710675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjZmM2FhZmUtZWJhM2ZlMDktM2FjOWU1NzAtZjhjOTU1MzE=. CustomerSuppliedId : . TraceId : 01jr885t75c9aacsywwkw670cp. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-07T13:48:57.444852Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490567863796038899:2495], TxId: 281474976710675, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr885t75c9aacsywwkw670cp. SessionId : ydb://session/3?node_id=2&id=ZjZmM2FhZmUtZWJhM2ZlMDktM2FjOWU1NzAtZjhjOTU1MzE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490567863796038876:2465], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:48:57.452398Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjZmM2FhZmUtZWJhM2ZlMDktM2FjOWU1NzAtZjhjOTU1MzE=, ActorId: [2:7490567859501071358:2465], ActorState: ExecuteState, TraceId: 01jr885t75c9aacsywwkw670cp, Create QueryResponse for error on request, msg: |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlter [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools >> ClientStatsCollector::CounterCacheMiss [GOOD] >> ClientStatsCollector::CounterRetryOperation >> TAccessServiceTest::Authenticate [GOOD] >> ObjectStorageListingTest::FilterListing [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true >> TGRpcYdbTest::ExecuteQueryExplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::Drop |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2025-04-07T13:48:56.868610Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567863374918094:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:56.868647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e0d/r3tmp/tmpaHqqZ3/pdisk_1.dat 2025-04-07T13:48:57.357703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:57.357789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:57.359679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:57.363360Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18324 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:57.733590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:57.762661Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:57.882460Z node 1 :GRPC_CLIENT DEBUG: [5160000807d0] Connect to grpc://localhost:8721 2025-04-07T13:48:57.883790Z node 1 :GRPC_CLIENT DEBUG: [5160000807d0] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2025-04-07T13:48:57.934784Z node 1 :GRPC_CLIENT DEBUG: [5160000807d0] Status 7 Permission Denied 2025-04-07T13:48:57.943108Z node 1 :GRPC_CLIENT DEBUG: [5160000807d0] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2025-04-07T13:48:57.961434Z node 1 :GRPC_CLIENT DEBUG: [5160000807d0] Response AuthenticateResponse { subject { user_account { id: "1234" } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: 2025-04-07T13:48:59.011969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:59.012284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:59.012549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001277/r3tmp/tmpVNQx4s/pdisk_1.dat 2025-04-07T13:48:59.456316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:48:59.519969Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:59.590394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:59.590590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:59.605860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:59.747866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:59.790971Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:48:59.791210Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:48:59.834728Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:48:59.834867Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:48:59.836751Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:48:59.836879Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:48:59.836952Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:48:59.837305Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:48:59.873737Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:48:59.873972Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:48:59.874143Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:48:59.874184Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:48:59.874220Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:48:59.874256Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:48:59.874734Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:48:59.874839Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:48:59.875241Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:48:59.875330Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:48:59.875391Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:48:59.875432Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:48:59.875477Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:48:59.875642Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:48:59.875890Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:48:59.875982Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:48:59.877743Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:48:59.891673Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:48:59.891810Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:49:00.111102Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:49:00.121125Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:49:00.121236Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:00.122449Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:49:00.122516Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:49:00.122580Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:49:00.122855Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:49:00.123067Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:49:00.123765Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:49:00.123841Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:49:00.126332Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:49:00.126805Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:49:00.128958Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:49:00.129017Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:00.129826Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:49:00.129915Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:49:00.130001Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:49:00.131333Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:49:00.131381Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:49:00.131438Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:49:00.131505Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:49:00.131578Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:49:00.131671Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:00.135411Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:49:00.137488Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:49:00.137629Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:49:00.137685Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:49:00.156065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:00.156195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:00.156285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:00.162049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:49:00.176895Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:49:00.390591Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:49:00.393561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:49:00.838468Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr885xbrecjpa6sm1ntawjkx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQyYWNkNmMtYTA1ZDljN2YtMmNjMmFjMmItOGQxZDg0MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:49:00.852587Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2025-04-07T13:49:00.852927Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:49:00.866677Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:49:00.866827Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:00.870747Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2025-04-07T13:49:00.871011Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-07T13:49:00.871210Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2025-04-07T13:49:00.871406Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2025-04-07T13:49:00.876175Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:830:2667], serverId# [1:831:2668], sessionId# [0:0:0] 2025-04-07T13:49:00.876460Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-07T13:49:00.876683Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2025-04-07T13:49:00.876867Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:830:2667], serverId# [1:831:2668], sessionId# [0:0:0] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive >> DataShardSnapshots::LockedWriteReuseAfterCommit >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> DataShardSnapshots::MvccSnapshotTailCleanup >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] >> TSchemeShardExtSubDomainTest::Drop [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive >> DataShardSnapshots::LockedWriteBulkUpsertConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2025-04-07T13:48:57.537146Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567867410758963:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:57.537189Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e29/r3tmp/tmp59uDL6/pdisk_1.dat 2025-04-07T13:48:58.152862Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:58.194179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:58.194294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:58.197829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:58.651736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:58.669413Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:58.825205Z node 1 :GRPC_CLIENT DEBUG: [51600006f0d0] Connect to grpc://localhost:16448 2025-04-07T13:48:58.825987Z node 1 :GRPC_CLIENT DEBUG: [51600006f0d0] Request ListFoldersRequest { id: "i_am_exists" } 2025-04-07T13:48:58.873488Z node 1 :GRPC_CLIENT DEBUG: [51600006f0d0] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2025-04-07T13:48:58.874469Z node 1 :GRPC_CLIENT DEBUG: [51600006f3d0] Connect to grpc://localhost:3015 2025-04-07T13:48:58.875083Z node 1 :GRPC_CLIENT DEBUG: [51600006f3d0] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-04-07T13:48:58.902430Z node 1 :GRPC_CLIENT DEBUG: [51600006f3d0] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2025-04-07T13:48:58.904153Z node 1 :GRPC_CLIENT DEBUG: [51600006f3d0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-07T13:48:58.906483Z node 1 :GRPC_CLIENT DEBUG: [51600006f3d0] Status 5 Not Found 2025-04-07T13:48:58.907075Z node 1 :GRPC_CLIENT DEBUG: [51600006f0d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-07T13:48:58.912616Z node 1 :GRPC_CLIENT DEBUG: [51600006f0d0] Status 5 Not Found >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2025-04-07T13:48:54.055497Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567851299024291:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e3b/r3tmp/tmputTbNE/pdisk_1.dat 2025-04-07T13:48:54.428958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:48:54.674820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:54.674958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:54.676713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:54.700473Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:55.023265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:58.266577Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567871145407925:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:58.266604Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e3b/r3tmp/tmp4IXlTk/pdisk_1.dat 2025-04-07T13:48:58.526135Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:58.596330Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:58.596425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:58.598379Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:58.761883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] Test command err: 2025-04-07T13:48:56.109633Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567862406094263:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:56.128983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00156c/r3tmp/tmpTvxSIC/pdisk_1.dat 2025-04-07T13:48:56.791984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:56.792096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:56.807290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:56.809074Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14944 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:48:57.092260Z node 1 :TX_PROXY DEBUG: actor# [1:7490567862406094302:2132] Handle TEvNavigate describe path dc-1 2025-04-07T13:48:57.092311Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567866701062040:2433] HANDLE EvNavigateScheme dc-1 2025-04-07T13:48:57.092400Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490567862406094331:2146], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:48:57.092446Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490567862406094331:2146], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T13:48:57.092637Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567866701062041:2434][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T13:48:57.094486Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567858111126690:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490567866701062046:2434] 2025-04-07T13:48:57.094551Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490567858111126690:2054] Subscribe: subscriber# [1:7490567866701062046:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:48:57.094593Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567858111126693:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490567866701062047:2434] 2025-04-07T13:48:57.094608Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490567858111126693:2057] Subscribe: subscriber# [1:7490567866701062047:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:48:57.094648Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567866701062046:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490567858111126690:2054] 2025-04-07T13:48:57.094670Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567866701062047:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490567858111126693:2057] 2025-04-07T13:48:57.094716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567866701062041:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490567866701062043:2434] 2025-04-07T13:48:57.094749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567866701062041:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490567866701062044:2434] 2025-04-07T13:48:57.094798Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490567866701062041:2434][/dc-1] Set up state: owner# [1:7490567862406094331:2146], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:48:57.094918Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567866701062045:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567866701062042:2434], cookie# 1 2025-04-07T13:48:57.094937Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567866701062046:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567866701062043:2434], cookie# 1 2025-04-07T13:48:57.094949Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567866701062047:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567866701062044:2434], cookie# 1 2025-04-07T13:48:57.094969Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567858111126690:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490567866701062046:2434] 2025-04-07T13:48:57.094987Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567858111126690:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567866701062046:2434], cookie# 1 2025-04-07T13:48:57.095004Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567858111126693:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490567866701062047:2434] 2025-04-07T13:48:57.095016Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567858111126693:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567866701062047:2434], cookie# 1 2025-04-07T13:48:57.096732Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567858111126687:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490567866701062045:2434] 2025-04-07T13:48:57.096777Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490567858111126687:2051] Subscribe: subscriber# [1:7490567866701062045:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:48:57.096831Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567858111126687:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567866701062045:2434], cookie# 1 2025-04-07T13:48:57.096865Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567866701062046:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567858111126690:2054], cookie# 1 2025-04-07T13:48:57.096879Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567866701062047:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567858111126693:2057], cookie# 1 2025-04-07T13:48:57.096901Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567866701062045:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490567858111126687:2051] 2025-04-07T13:48:57.096921Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567866701062045:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567858111126687:2051], cookie# 1 2025-04-07T13:48:57.096968Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567866701062041:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567866701062043:2434], cookie# 1 2025-04-07T13:48:57.096984Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567866701062041:2434][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:48:57.096997Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567866701062041:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567866701062044:2434], cookie# 1 2025-04-07T13:48:57.097014Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567866701062041:2434][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:48:57.097059Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567866701062041:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490567866701062042:2434] 2025-04-07T13:48:57.097117Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490567866701062041:2434][/dc-1] Path was already updated: owner# [1:7490567862406094331:2146], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:48:57.097143Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567866701062041:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567866701062042:2434], cookie# 1 2025-04-07T13:48:57.097157Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567866701062041:2434][/dc-1] Unexpected sync response: sender# [1:7490567866701062042:2434], cookie# 1 2025-04-07T13:48:57.097190Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567858111126687:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490567866701062045:2434] 2025-04-07T13:48:57.173349Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490567862406094331:2146], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T13:48:57.173716Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490567862406094331:2146], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... lPathId: 2] was 2 2025-04-07T13:48:58.710250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-07T13:48:58.710262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T13:48:58.710331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-07T13:48:58.715813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-07T13:48:58.715845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-07T13:48:58.716099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-04-07T13:48:58.716108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-04-07T13:48:58.716237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-07T13:48:58.716243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-07T13:48:58.716264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-07T13:48:58.716269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-07T13:48:58.716282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-04-07T13:48:58.716289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-04-07T13:48:58.716331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-07T13:48:58.716340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-07T13:48:58.716360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2025-04-07T13:48:58.716365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-04-07T13:48:58.716374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-07T13:48:58.716388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-07T13:48:58.716408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046644480 2025-04-07T13:48:58.716436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T13:48:58.716449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-07T13:48:58.716461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T13:48:58.716506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T13:48:58.726639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T13:48:59.025310Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490567870593532295:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:48:59.025493Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490567870593532295:2100], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:48:59.025532Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [3:7490567870593532295:2100], path# /dc-1/USER_0, domainOwnerId# 72057594046644480 2025-04-07T13:48:59.025705Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490567874888499911:2293][/dc-1/USER_0] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T13:48:59.026281Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490567874888499911:2293][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7490567874888499912:2293] 2025-04-07T13:48:59.026320Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490567874888499911:2293][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7490567874888499913:2293] 2025-04-07T13:48:59.026350Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7490567874888499911:2293][/dc-1/USER_0] Set up state: owner# [3:7490567870593532295:2100], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:48:59.026374Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490567874888499911:2293][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7490567874888499914:2293] 2025-04-07T13:48:59.026399Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7490567874888499911:2293][/dc-1/USER_0] Ignore empty state: owner# [3:7490567870593532295:2100], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:48:59.026444Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7490567870593532295:2100], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 } 2025-04-07T13:48:59.026525Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7490567870593532295:2100], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [3:7490567874888499911:2293] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:48:59.026622Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490567870593532295:2100], cacheItem# { Subscriber: { Subscriber: [3:7490567874888499911:2293] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:48:59.026694Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490567874888499918:2294], recipient# [3:7490567874888499910:2292], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T13:48:59.026765Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490567870593532295:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:48:59.026915Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490567874888499919:2295], recipient# [3:7490567874888499909:2293], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T13:48:59.027788Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:00.028813Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490567870593532295:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:00.028950Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490567879183467218:2297], recipient# [3:7490567879183467217:2294], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:00.032809Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:01.037132Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490567870593532295:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:01.037262Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490567883478434516:2298], recipient# [3:7490567883478434515:2295], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:01.037454Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive >> KqpLimits::KqpMkqlMemoryLimitException |84.4%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] Test command err: 2025-04-07T13:48:55.750587Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567859151825491:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:55.750627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001570/r3tmp/tmpHfeNHg/pdisk_1.dat 2025-04-07T13:48:56.498026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:56.498107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:56.511398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:56.515826Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9372 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:48:56.799531Z node 1 :TX_PROXY DEBUG: actor# [1:7490567859151825710:2138] Handle TEvNavigate describe path dc-1 2025-04-07T13:48:56.799588Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567863446793457:2439] HANDLE EvNavigateScheme dc-1 2025-04-07T13:48:56.799690Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490567859151825735:2152], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:48:56.799782Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567863446793440:2434][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490567859151825735:2152], cookie# 1 2025-04-07T13:48:56.801371Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567863446793444:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567863446793441:2434], cookie# 1 2025-04-07T13:48:56.801418Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567863446793445:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567863446793442:2434], cookie# 1 2025-04-07T13:48:56.801432Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567863446793446:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567863446793443:2434], cookie# 1 2025-04-07T13:48:56.801466Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567859151825378:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567863446793444:2434], cookie# 1 2025-04-07T13:48:56.801488Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567859151825381:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567863446793445:2434], cookie# 1 2025-04-07T13:48:56.801503Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567859151825384:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567863446793446:2434], cookie# 1 2025-04-07T13:48:56.801543Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567863446793444:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567859151825378:2051], cookie# 1 2025-04-07T13:48:56.801556Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567863446793445:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567859151825381:2054], cookie# 1 2025-04-07T13:48:56.801575Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567863446793446:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567859151825384:2057], cookie# 1 2025-04-07T13:48:56.801626Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567863446793440:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567863446793441:2434], cookie# 1 2025-04-07T13:48:56.801643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567863446793440:2434][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:48:56.801656Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567863446793440:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567863446793442:2434], cookie# 1 2025-04-07T13:48:56.801673Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567863446793440:2434][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:48:56.801694Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567863446793440:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567863446793443:2434], cookie# 1 2025-04-07T13:48:56.801708Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567863446793440:2434][/dc-1] Unexpected sync response: sender# [1:7490567863446793443:2434], cookie# 1 2025-04-07T13:48:56.801775Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490567859151825735:2152], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T13:48:56.811294Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490567859151825735:2152], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490567863446793440:2434] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:48:56.811419Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490567859151825735:2152], cacheItem# { Subscriber: { Subscriber: [1:7490567863446793440:2434] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-07T13:48:56.817609Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490567863446793458:2440], recipient# [1:7490567863446793457:2439], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:48:56.817673Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567863446793457:2439] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:48:56.866261Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567863446793457:2439] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-07T13:48:56.868596Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567863446793457:2439] Handle TEvDescribeSchemeResult Forward to# [1:7490567863446793456:2438] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:48:56.889142Z node 1 :TX_PROXY DEBUG: actor# [1:7490567859151825710:2138] Handle TEvProposeTransaction 2025-04-07T13:48:56.889168Z node 1 :TX_PROXY DEBUG: actor# [1:7490567859151825710:2138] TxId# 281474976710657 ProcessProposeTransaction 2025-04-07T13:48:56.889255Z node 1 :TX_PROXY DEBUG: actor# [1:7490567859151825710:2138] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7490567863446793465:2446] 2025-04-07T13:48:56.983776Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567863446793465:2446] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-07T13:48:56.984100Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567863446793465:2446] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:48:56.984185Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490567859151825735:2152], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:48:56.984271Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567863446793440:2434][/dc ... us: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744033738800 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744033738800 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) 2025-04-07T13:48:59.138159Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567859151825378:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7490567866621141558:2100] 2025-04-07T13:48:59.138197Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490567859151825378:2051] Unsubscribe: subscriber# [3:7490567866621141558:2100], path# /dc-1/USER_0 2025-04-07T13:48:59.138224Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567859151825381:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7490567866621141559:2100] 2025-04-07T13:48:59.138236Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490567859151825381:2054] Unsubscribe: subscriber# [3:7490567866621141559:2100], path# /dc-1/USER_0 2025-04-07T13:48:59.138255Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567859151825384:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7490567866621141563:2100] 2025-04-07T13:48:59.138265Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490567859151825384:2057] Unsubscribe: subscriber# [3:7490567866621141563:2100], path# /dc-1/USER_0 2025-04-07T13:48:59.138678Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-07T13:48:59.139716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T13:48:59.749010Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490567866621141561:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:48:59.749109Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490567866621141561:2102], cacheItem# { Subscriber: { Subscriber: [3:7490567870916109151:2282] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:48:59.749201Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490567875211076587:2366], recipient# [3:7490567875211076586:2300], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:00.752836Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490567866621141561:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:00.752975Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490567866621141561:2102], cacheItem# { Subscriber: { Subscriber: [3:7490567870916109151:2282] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:00.753084Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490567879506043885:2367], recipient# [3:7490567879506043884:2301], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:01.753215Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490567866621141561:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:01.753345Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490567866621141561:2102], cacheItem# { Subscriber: { Subscriber: [3:7490567870916109151:2282] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:01.753451Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490567883801011183:2368], recipient# [3:7490567883801011182:2302], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] >> YdbTableBulkUpsert::Overload [GOOD] >> YdbTableBulkUpsert::RetryOperationSync >> KqpQuery::TryToUpdateNonExistentColumn >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst >> KqpStats::JoinNoStatsScan >> TSchemeShardExtSubDomainTest::Drop-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst >> KqpParams::MissingParameter >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> KqpStats::MultiTxStatsFullYql >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive >> KqpQuery::Now >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase [GOOD] >> Viewer::JsonAutocompleteScheme ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: 2025-04-07T13:48:53.051542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:53.051775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:53.051959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018d7/r3tmp/tmp4ltOI8/pdisk_1.dat 2025-04-07T13:48:53.770390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:48:53.775611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:53.786072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:48:53.787062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:48:53.789025Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-04-07T13:48:53.789071Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 1 Status# 16 SEND to# [1:380:2375] Proxy marker# C1 2025-04-07T13:48:53.823769Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:53.839978Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2025-04-07T13:48:53.897290Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:305:2345] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-07T13:48:53.897404Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-04-07T13:48:53.897523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:53.897583Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-07T13:48:53.897613Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-07T13:48:53.897654Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-07T13:48:53.897691Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-07T13:48:53.897786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:53.898010Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-07T13:48:53.898063Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-07T13:48:53.898116Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-07T13:48:53.898164Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-07T13:48:53.898296Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-04-07T13:48:53.909109Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-04-07T13:48:53.909173Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:305:2345]) 2025-04-07T13:48:53.909231Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-07T13:48:53.909667Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-04-07T13:48:53.909736Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:305:2345])::Execute 2025-04-07T13:48:53.909791Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-07T13:48:53.909852Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:305:2345])::Complete 2025-04-07T13:48:53.910017Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443315200 } 2025-04-07T13:48:53.910073Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-04-07T13:48:53.910115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:53.910241Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-04-07T13:48:53.910288Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-07T13:48:53.910315Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-07T13:48:53.910418Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-07T13:48:53.910444Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-07T13:48:53.910484Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-07T13:48:53.910515Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-07T13:48:53.921345Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-04-07T13:48:53.921402Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-07T13:48:54.041421Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-04-07T13:48:54.041506Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-04-07T13:48:54.041825Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-04-07T13:48:54.042179Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-04-07T13:48:54.042233Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:380:2375] Proxy 2025-04-07T13:48:54.043033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:54.044030Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-07T13:48:54.044108Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-07T13:48:54.044142Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-04-07T13:48:54.044190Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-04-07T13:48:54.044723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:48:54.044783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-07T13:48:54.045976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-07T13:48:54.048092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:54.053808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:54.053888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:54.054681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-04-07T13:48:54.062775Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-04-07T13:48:54.090706Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-07T13:48:54.090794Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-04-07T13:48:54.091008Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-04-07T13:48:54.091066Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-04-07T13:48:54.091118Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-04-07T13:48:54.091287Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-04-07T13:48:54.091811Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-04-07T13:48:54.091924Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-04-07T13:48:54.092456Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-04-07T13:48:54.093352Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-04-07T13:48:54.093555Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004595936}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-04-07T13:48:54.093738Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004595936}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-04-07T13:48:54.093929Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004595936}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-04-07T13:48:54.094000Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004595936}: tablet 72075186224037888 channel 2 assigned to group 2181038080 ... omplete(72075186224037888 OK) 2025-04-07T13:49:03.500727Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2025-04-07T13:49:03.501125Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T13:49:03.501318Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-04-07T13:49:03.501368Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-07T13:49:03.501692Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2025-04-07T13:49:03.512951Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T13:49:03.514563Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 HANDLE EvProposeTransaction marker# C0 2025-04-07T13:49:03.514647Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 step# 3001 Status# 16 SEND to# [2:380:2375] Proxy marker# C1 2025-04-07T13:49:03.525571Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2025-04-07T13:49:03.644197Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715666 has been planned 2025-04-07T13:49:03.644294Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 2025-04-07T13:49:03.644350Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 2025-04-07T13:49:03.644622Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4000 in 0.500000s at 3.950000s 2025-04-07T13:49:03.645156Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 3500, txid# 281474976715666 marker# C2 2025-04-07T13:49:03.645245Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 stepId# 3500 Status# 17 SEND EvProposeTransactionStatus to# [2:380:2375] Proxy 2025-04-07T13:49:03.645979Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715666 at step 3500 at tablet 72075186224037889 { Transactions { TxId: 281474976715666 AckTo { RawX1: 517 RawX2: 8589937047 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-07T13:49:03.646037Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T13:49:03.646199Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:49:03.646693Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:49:03.646745Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:49:03.646799Z node 2 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-04-07T13:49:03.647011Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-04-07T13:49:03.647150Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:49:03.647334Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:49:03.647418Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2025-04-07T13:49:03.647846Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:49:03.650881Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715666} 2025-04-07T13:49:03.650952Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-07T13:49:03.651043Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:49:03.651799Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-07T13:49:03.651902Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-07T13:49:03.651965Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-04-07T13:49:03.651998Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 acknowledged 2025-04-07T13:49:03.652043Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 acknowledged 2025-04-07T13:49:03.652425Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:49:03.652503Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [2:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:49:03.652564Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715666 state PreOffline TxInFly 0 2025-04-07T13:49:03.652686Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T13:49:03.653603Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715666, done: 0, blocked: 1 2025-04-07T13:49:03.656543Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715666:0 2025-04-07T13:49:03.656680Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715666, publications: 2, subscribers: 1 2025-04-07T13:49:03.657111Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715666 datashard 72075186224037889 state PreOffline 2025-04-07T13:49:03.657183Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-07T13:49:03.657981Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 1 2025-04-07T13:49:03.658407Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T13:49:03.659421Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODRmMzQ1MzgtY2ZmZmI4OTgtMjFlYjg5NDgtYTcwYmRlMzI= 2025-04-07 13:49:03.659 INFO ydb-core-tx-datashard-ut_minstep(pid=34256, tid=0x00007FEB298EBC80) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-04-07T13:49:03.659540Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODRmMzQ1MzgtY2ZmZmI4OTgtMjFlYjg5NDgtYTcwYmRlMzI= 2025-04-07 13:49:03.659 INFO ydb-core-tx-datashard-ut_minstep(pid=34256, tid=0x00007FEB298EBC80) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-04-07T13:49:03.659611Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODRmMzQ1MzgtY2ZmZmI4OTgtMjFlYjg5NDgtYTcwYmRlMzI= 2025-04-07 13:49:03.659 INFO ydb-core-tx-datashard-ut_minstep(pid=34256, tid=0x00007FEB298EBC80) [core exec] yql_execution.cpp:59: Begin, root #43 2025-04-07T13:49:03.659664Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODRmMzQ1MzgtY2ZmZmI4OTgtMjFlYjg5NDgtYTcwYmRlMzI= 2025-04-07 13:49:03.659 INFO ydb-core-tx-datashard-ut_minstep(pid=34256, tid=0x00007FEB298EBC80) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-04-07T13:49:03.659716Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=ODRmMzQ1MzgtY2ZmZmI4OTgtMjFlYjg5NDgtYTcwYmRlMzI= 2025-04-07 13:49:03.659 TRACE ydb-core-tx-datashard-ut_minstep(pid=34256, tid=0x00007FEB298EBC80) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-04-07T13:49:03.659793Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODRmMzQ1MzgtY2ZmZmI4OTgtMjFlYjg5NDgtYTcwYmRlMzI= 2025-04-07 13:49:03.659 INFO ydb-core-tx-datashard-ut_minstep(pid=34256, tid=0x00007FEB298EBC80) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-04-07T13:49:03.659867Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODRmMzQ1MzgtY2ZmZmI4OTgtMjFlYjg5NDgtYTcwYmRlMzI= 2025-04-07 13:49:03.659 INFO ydb-core-tx-datashard-ut_minstep(pid=34256, tid=0x00007FEB298EBC80) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-04-07T13:49:03.659919Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODRmMzQ1MzgtY2ZmZmI4OTgtMjFlYjg5NDgtYTcwYmRlMzI= 2025-04-07 13:49:03.659 INFO ydb-core-tx-datashard-ut_minstep(pid=34256, tid=0x00007FEB298EBC80) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-04-07T13:49:03.659974Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODRmMzQ1MzgtY2ZmZmI4OTgtMjFlYjg5NDgtYTcwYmRlMzI= 2025-04-07 13:49:03.659 INFO ydb-core-tx-datashard-ut_minstep(pid=34256, tid=0x00007FEB298EBC80) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-04-07T13:49:03.660147Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ODRmMzQ1MzgtY2ZmZmI4OTgtMjFlYjg5NDgtYTcwYmRlMzI= 2025-04-07 13:49:03.660 NOTE ydb-core-tx-datashard-ut_minstep(pid=34256, tid=0x00007FEB298EBC80) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2025-04-07T13:49:03.660200Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ODRmMzQ1MzgtY2ZmZmI4OTgtMjFlYjg5NDgtYTcwYmRlMzI= 2025-04-07 13:49:03.660 NOTE ydb-core-tx-datashard-ut_minstep(pid=34256, tid=0x00007FEB298EBC80) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2025-04-07T13:49:03.660242Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ODRmMzQ1MzgtY2ZmZmI4OTgtMjFlYjg5NDgtYTcwYmRlMzI= 2025-04-07 13:49:03.660 NOTE ydb-core-tx-datashard-ut_minstep(pid=34256, tid=0x00007FEB298EBC80) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2025-04-07T13:49:03.674034Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-07T13:49:03.674315Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-07T13:49:03.676237Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-07T13:49:03.677260Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-07T13:49:03.677720Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2025-04-07T13:49:03.677788Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-04-07T13:49:03.677901Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-04-07T13:49:03.678039Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-04-07T13:49:03.678169Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false >> KqpQuery::QueryTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:48:58.147856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:48:58.147981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:58.148022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:48:58.148070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:48:58.148126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:48:58.148164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:48:58.148220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:58.148522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:48:58.231820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:48:58.231907Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:58.268251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:48:58.268376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:48:58.268544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:48:58.287701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:48:58.287917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:48:58.288554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:58.288777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:48:58.293170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:58.294398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:58.294458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:58.294572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:48:58.294617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:58.294667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:48:58.294835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.302288Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:48:58.459112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:48:58.459331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.459580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:48:58.459791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:48:58.459848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.477614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:58.477759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:48:58.477952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.478004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:48:58.478036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:48:58.478066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:48:58.484644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.484741Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:48:58.484785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:48:58.490902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.490962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.491015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:58.491064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:48:58.502655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:48:58.509421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:48:58.509670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:48:58.510670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:58.510815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:48:58.510866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:58.511149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:48:58.511205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:58.511362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:58.511442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:48:58.521746Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:58.521820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:58.522019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:58.522061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:48:58.522346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.522393Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:48:58.522514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:48:58.522549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:58.522592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:48:58.522644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:58.522691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:48:58.522726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:48:58.522818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:48:58.522872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:48:58.522920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:48:58.529016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:58.529187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:58.529225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:48:58.529268Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:48:58.529317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:58.529468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... LAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:49:04.640285Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:49:04.646925Z node 7 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [7:123:2149] sender: [7:236:2058] recipient: [7:15:2062] 2025-04-07T13:49:04.656454Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:49:04.656677Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:04.656876Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:49:04.657057Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:49:04.657101Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:04.659180Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:04.659280Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:49:04.659474Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:04.659527Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:49:04.659565Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:49:04.659616Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:49:04.661299Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:04.661355Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:49:04.661402Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:49:04.662946Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:04.662995Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:04.663047Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:49:04.663106Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:49:04.663259Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:49:04.664643Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:49:04.664839Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:49:04.665560Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:04.665733Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 30064773225 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:49:04.665776Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:49:04.666027Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:49:04.666086Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:49:04.666279Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:49:04.666362Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:49:04.667976Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:49:04.668016Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:49:04.668175Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:04.668229Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:49:04.668298Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:04.668344Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:49:04.668466Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:49:04.668509Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:49:04.668548Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:49:04.668584Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:49:04.668620Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:49:04.668648Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:49:04.668744Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:49:04.668793Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:49:04.668839Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:49:04.669770Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:49:04.669867Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:49:04.669910Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:49:04.669951Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:49:04.669989Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:49:04.670079Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:49:04.672074Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:49:04.672578Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T13:49:04.673232Z node 7 :TX_PROXY DEBUG: actor# [7:266:2258] Bootstrap 2025-04-07T13:49:04.689581Z node 7 :TX_PROXY DEBUG: actor# [7:266:2258] Become StateWork (SchemeCache [7:271:2263]) 2025-04-07T13:49:04.691554Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:49:04.691706Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } 2025-04-07T13:49:04.691744Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, path /MyRoot/USER_1 2025-04-07T13:49:04.691873Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-04-07T13:49:04.691907Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-04-07T13:49:04.692942Z node 7 :TX_PROXY DEBUG: actor# [7:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:49:04.701444Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Invalid AlterExtSubDomain request: Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:49:04.701635Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: ALTER DATABASE, path: /MyRoot/USER_1 2025-04-07T13:49:04.702022Z node 7 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] >> KqpExplain::Explain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] Test command err: 2025-04-07T13:48:57.170855Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567865704832495:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:57.170890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00155b/r3tmp/tmpb2LkTO/pdisk_1.dat 2025-04-07T13:48:57.812717Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:57.864390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:57.864450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:57.878517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1318 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:48:58.132829Z node 1 :TX_PROXY DEBUG: actor# [1:7490567865704832506:2138] Handle TEvNavigate describe path dc-1 2025-04-07T13:48:58.132897Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567869999800242:2441] HANDLE EvNavigateScheme dc-1 2025-04-07T13:48:58.132987Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490567865704832529:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:48:58.133019Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490567865704832529:2151], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T13:48:58.133168Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869999800243:2442][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T13:48:58.134839Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861409864877:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490567869999800247:2442] 2025-04-07T13:48:58.134890Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490567861409864877:2051] Subscribe: subscriber# [1:7490567869999800247:2442], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:48:58.134937Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861409864880:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490567869999800248:2442] 2025-04-07T13:48:58.134952Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490567861409864880:2054] Subscribe: subscriber# [1:7490567869999800248:2442], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:48:58.134989Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861409864883:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490567869999800249:2442] 2025-04-07T13:48:58.135005Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490567861409864883:2057] Subscribe: subscriber# [1:7490567869999800249:2442], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:48:58.135068Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869999800247:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490567861409864877:2051] 2025-04-07T13:48:58.135109Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869999800248:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490567861409864880:2054] 2025-04-07T13:48:58.135128Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869999800249:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490567861409864883:2057] 2025-04-07T13:48:58.135158Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869999800243:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490567869999800244:2442] 2025-04-07T13:48:58.135188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869999800243:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490567869999800245:2442] 2025-04-07T13:48:58.135244Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490567869999800243:2442][/dc-1] Set up state: owner# [1:7490567865704832529:2151], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:48:58.141953Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861409864877:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490567869999800247:2442] 2025-04-07T13:48:58.142011Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861409864880:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490567869999800248:2442] 2025-04-07T13:48:58.142024Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861409864883:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490567869999800249:2442] 2025-04-07T13:48:58.142062Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869999800243:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490567869999800246:2442] 2025-04-07T13:48:58.142118Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490567869999800243:2442][/dc-1] Path was already updated: owner# [1:7490567865704832529:2151], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:48:58.142157Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869999800247:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567869999800244:2442], cookie# 1 2025-04-07T13:48:58.142182Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869999800248:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567869999800245:2442], cookie# 1 2025-04-07T13:48:58.142208Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869999800249:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567869999800246:2442], cookie# 1 2025-04-07T13:48:58.142246Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861409864877:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567869999800247:2442], cookie# 1 2025-04-07T13:48:58.142269Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861409864880:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567869999800248:2442], cookie# 1 2025-04-07T13:48:58.142302Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861409864883:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567869999800249:2442], cookie# 1 2025-04-07T13:48:58.142338Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869999800247:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567861409864877:2051], cookie# 1 2025-04-07T13:48:58.142353Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869999800248:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567861409864880:2054], cookie# 1 2025-04-07T13:48:58.142364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869999800249:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567861409864883:2057], cookie# 1 2025-04-07T13:48:58.142400Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869999800243:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567869999800244:2442], cookie# 1 2025-04-07T13:48:58.142416Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869999800243:2442][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:48:58.142443Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869999800243:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567869999800245:2442], cookie# 1 2025-04-07T13:48:58.142462Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869999800243:2442][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:48:58.142507Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869999800243:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567869999800246:2442], cookie# 1 2025-04-07T13:48:58.142521Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869999800243:2442][/dc-1] Unexpected sync response: sender# [1:7490567869999800246:2442], cookie# 1 2025-04-07T13:48:58.197022Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490567865704832529:2151], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T13:48:58.198546Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490567865704832529:2151], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... 19791Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567882884702690:2857][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 }: sender# [1:7490567882884702693:2857], cookie# 2 2025-04-07T13:49:01.319803Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567882884702690:2857][/dc-1/USER_0/SimpleTable] Unexpected sync response: sender# [1:7490567882884702693:2857], cookie# 2 2025-04-07T13:49:01.319842Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490567865704832529:2151], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 } 2025-04-07T13:49:01.319897Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490567865704832529:2151], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490567882884702690:2857] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744033741250 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2025-04-07T13:49:01.319969Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490567865704832529:2151], cacheItem# { Subscriber: { Subscriber: [1:7490567882884702690:2857] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744033741250 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/SimpleTable TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-04-07T13:49:01.320115Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490567882884702700:2861], recipient# [1:7490567882884702699:2860], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/SimpleTable TableId: [72057594046644480:3:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:01.320157Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567882884702699:2860] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:01.320206Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567882884702699:2860] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/SimpleTable" Options { ShowPrivateTable: true } 2025-04-07T13:49:01.321078Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567882884702699:2860] Handle TEvDescribeSchemeResult Forward to# [1:7490567882884702698:2859] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744033741250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744033741250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) 2025-04-07T13:49:01.397429Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861409864883:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7490567873282103451:2095] 2025-04-07T13:49:01.397466Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490567861409864883:2057] Unsubscribe: subscriber# [3:7490567873282103451:2095], path# /dc-1/USER_0 2025-04-07T13:49:01.397858Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861409864877:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7490567873282103449:2095] 2025-04-07T13:49:01.397871Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490567861409864877:2051] Unsubscribe: subscriber# [3:7490567873282103449:2095], path# /dc-1/USER_0 2025-04-07T13:49:01.397890Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861409864880:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7490567873282103450:2095] 2025-04-07T13:49:01.397896Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490567861409864880:2054] Unsubscribe: subscriber# [3:7490567873282103450:2095], path# /dc-1/USER_0 2025-04-07T13:49:01.402144Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-07T13:49:01.402870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T13:49:02.028946Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490567873282103461:2104], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:02.029048Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490567873282103461:2104], cacheItem# { Subscriber: { Subscriber: [3:7490567877577071009:2267] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:02.029105Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490567886167005788:2370], recipient# [3:7490567886167005787:2300], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] >> Viewer::JsonAutocompleteColumns [GOOD] >> Viewer::JsonAutocompleteColumnsPOST >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> KqpAnalyze::AnalyzeTable+ColumnStore >> YdbYqlClient::TestReadTableMultiShardUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardOneRow >> KqpLimits::OutOfSpaceBulkUpsertFail >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:48:58.902739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:48:58.902846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:58.902887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:48:58.902919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:48:58.902959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:48:58.902993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:48:58.903064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:58.903393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:48:58.972117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:48:58.972171Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:58.989215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:48:58.989344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:48:58.989482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:48:59.007002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:48:59.007196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:48:59.007754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:59.007941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:48:59.010224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:59.011325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:59.011380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:59.011506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:48:59.011553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:59.011588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:48:59.011722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.018291Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:48:59.159413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:48:59.159613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.159802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:48:59.159974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:48:59.160020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.167415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:59.167544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:48:59.167706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.167776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:48:59.167868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:48:59.167904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:48:59.172776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.172843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:48:59.172876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:48:59.174837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.174879Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.174915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:59.174967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:48:59.178402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:48:59.180430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:48:59.180608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:48:59.181565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:59.181688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:48:59.181727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:59.181977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:48:59.182022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:59.182167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:59.182228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:48:59.184600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:59.184646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:59.184802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:59.184837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:48:59.185062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.185104Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:48:59.185201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:48:59.185237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:59.185274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:48:59.185310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:59.185354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:48:59.185382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:48:59.185439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:48:59.185471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:48:59.185515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:48:59.187375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:59.187473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:59.187507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:48:59.187544Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:48:59.187580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:59.187682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... escribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:49:05.993261Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:05.993310Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-07T13:49:05.993403Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:49:05.993463Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-04-07T13:49:05.993520Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 240 -> 240 2025-04-07T13:49:05.994782Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:49:05.994896Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:49:05.994943Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:49:05.994992Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-07T13:49:05.995044Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-07T13:49:05.995140Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-07T13:49:05.996735Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-07T13:49:05.996836Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:49:05.996948Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:392:2363], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T13:49:05.997042Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-04-07T13:49:05.997073Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-04-07T13:49:05.997209Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-04-07T13:49:05.997244Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:486:2430], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-04-07T13:49:05.998663Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:49:05.998728Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-07T13:49:05.998890Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T13:49:05.998935Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:49:05.999004Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-07T13:49:05.999053Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:49:05.999106Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T13:49:05.999152Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T13:49:05.999242Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T13:49:05.999553Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-04-07T13:49:06.001044Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:49:06.001136Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T13:49:06.002800Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T13:49:06.002861Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T13:49:06.003339Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T13:49:06.003442Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:49:06.003500Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:562:2504] TestWaitNotification: OK eventTxId 103 2025-04-07T13:49:06.003993Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:49:06.004199Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 232us result status StatusSuccess 2025-04-07T13:49:06.004597Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:49:06.005168Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:49:06.005352Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 216us result status StatusSuccess 2025-04-07T13:49:06.005716Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:49:06.006278Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2025-04-07T13:49:06.006436Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 183us result status StatusSuccess 2025-04-07T13:49:06.006761Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 >> Viewer::FloatPointJsonQuery [GOOD] >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] Test command err: Trying to start YDB, gRPC: 22658, MsgBus: 25150 2025-04-07T13:48:20.860637Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567707311037625:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:20.860688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bec/r3tmp/tmpH2Nl28/pdisk_1.dat 2025-04-07T13:48:21.535740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:21.535817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:21.549149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22658, node 1 2025-04-07T13:48:21.920068Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:48:21.920242Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:48:22.232224Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:22.232241Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:22.232247Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:22.232619Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:48:22.851773Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:22.989091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:26.393014Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567707311037625:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:26.393052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:25150 TClient is connected to server localhost:25150 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:35.893456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:35.932298Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:36.012728Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:48:36.048233Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:48:36.753370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:36.753396Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:38.084444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567784620449678:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:38.084539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:38.629819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:48:38.762645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:48:38.799946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:38.882956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:38.944177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567784620449991:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:38.944243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:38.944349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567784620449996:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:38.946610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-04-07T13:48:38.953700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567784620449998:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } Trying to start YDB, gRPC: 16745, MsgBus: 14274 2025-04-07T13:48:40.073296Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567793087448562:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:40.073662Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bec/r3tmp/tmpQKH4sw/pdisk_1.dat 2025-04-07T13:48:40.230133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:40.230202Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:40.231983Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:40.245543Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16745, node 2 2025-04-07T13:48:40.431712Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:40.431729Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:40.431734Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:40.431809Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14274 TClient is connected to server localhost:14274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:41.194013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:41.201505Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:41.222680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:41.397486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:41.809031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:41.976416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:45.073836Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567793087448562:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:45.074179Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:47.036106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567823152221353:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: ... Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.422848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.546651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567823152221867:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:47.546711Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:47.546991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567823152221872:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:47.549507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:47.562865Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:48:47.563273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567823152221874:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:48.916614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-04-07T13:48:49.552445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:48:50.276319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480 2025-04-07T13:48:51.351096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-07T13:48:52.351166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:48:53.931441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-07T13:48:54.978735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:48:55.091427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:48:55.193044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:55.193066Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:58.380160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710720:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9753, MsgBus: 16400 2025-04-07T13:48:59.400822Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567873084000856:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:59.400954Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bec/r3tmp/tmp3Rvjde/pdisk_1.dat 2025-04-07T13:48:59.773265Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:59.788909Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:59.788996Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:59.790559Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9753, node 3 2025-04-07T13:48:59.889010Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:59.889028Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:59.889040Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:59.889150Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16400 TClient is connected to server localhost:16400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:00.518357Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:00.537431Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:00.550296Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:00.726214Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:00.989549Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:01.136052Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:03.571272Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567890263871730:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:03.571431Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:03.621670Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:03.659815Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:03.690081Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:03.729565Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:03.763936Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:03.806429Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:03.856715Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567890263872223:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:03.856799Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:03.857022Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567890263872228:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:03.861263Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:03.870692Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567890263872230:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:04.361698Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567873084000856:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:04.361769Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] Test command err: 2025-04-07T13:48:57.438760Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567865690852104:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:57.438825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001553/r3tmp/tmpuHV9pM/pdisk_1.dat 2025-04-07T13:48:58.295601Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:58.303537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:58.303623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:58.311823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27296 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:48:58.626495Z node 1 :TX_PROXY DEBUG: actor# [1:7490567865690852127:2138] Handle TEvNavigate describe path dc-1 2025-04-07T13:48:58.626558Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567869985819876:2439] HANDLE EvNavigateScheme dc-1 2025-04-07T13:48:58.626697Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490567865690852150:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:48:58.626818Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869985819856:2431][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490567865690852150:2151], cookie# 1 2025-04-07T13:48:58.628428Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869985819860:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567869985819857:2431], cookie# 1 2025-04-07T13:48:58.628476Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869985819861:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567869985819858:2431], cookie# 1 2025-04-07T13:48:58.628493Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869985819862:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567869985819859:2431], cookie# 1 2025-04-07T13:48:58.628533Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861395884498:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567869985819860:2431], cookie# 1 2025-04-07T13:48:58.628563Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861395884501:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567869985819861:2431], cookie# 1 2025-04-07T13:48:58.628592Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861395884504:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490567869985819862:2431], cookie# 1 2025-04-07T13:48:58.628632Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869985819860:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567861395884498:2051], cookie# 1 2025-04-07T13:48:58.628644Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869985819861:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567861395884501:2054], cookie# 1 2025-04-07T13:48:58.628656Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490567869985819862:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567861395884504:2057], cookie# 1 2025-04-07T13:48:58.628706Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869985819856:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567869985819857:2431], cookie# 1 2025-04-07T13:48:58.628725Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869985819856:2431][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:48:58.628744Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869985819856:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567869985819858:2431], cookie# 1 2025-04-07T13:48:58.628764Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869985819856:2431][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:48:58.628788Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869985819856:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490567869985819859:2431], cookie# 1 2025-04-07T13:48:58.628807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869985819856:2431][/dc-1] Unexpected sync response: sender# [1:7490567869985819859:2431], cookie# 1 2025-04-07T13:48:58.628891Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490567865690852150:2151], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T13:48:58.642886Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490567865690852150:2151], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490567869985819856:2431] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:48:58.643218Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490567865690852150:2151], cacheItem# { Subscriber: { Subscriber: [1:7490567869985819856:2431] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-07T13:48:58.650792Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490567869985819877:2440], recipient# [1:7490567869985819876:2439], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:48:58.650877Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567869985819876:2439] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:48:58.721800Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567869985819876:2439] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-07T13:48:58.724250Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567869985819876:2439] Handle TEvDescribeSchemeResult Forward to# [1:7490567869985819875:2438] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:48:58.761179Z node 1 :TX_PROXY DEBUG: actor# [1:7490567865690852127:2138] Handle TEvProposeTransaction 2025-04-07T13:48:58.761217Z node 1 :TX_PROXY DEBUG: actor# [1:7490567865690852127:2138] TxId# 281474976710657 ProcessProposeTransaction 2025-04-07T13:48:58.761328Z node 1 :TX_PROXY DEBUG: actor# [1:7490567865690852127:2138] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7490567869985819884:2446] 2025-04-07T13:48:59.019071Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567869985819884:2446] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-07T13:48:59.019170Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567869985819884:2446] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:48:59.019296Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490567865690852150:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:48:59.019375Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490567869985819856:2431][/d ... :2384] Ack for unknown update (already acked?): sender# [1:7490567869985819797:2387], cookie# 281474976710660 2025-04-07T13:49:00.084947Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:7490567869985819794:2384] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:7490567869985819797:2387], cookie# 281474976710660 2025-04-07T13:49:00.084955Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:7490567869985819794:2384] Ack for unknown update (already acked?): sender# [1:7490567869985819797:2387], cookie# 281474976710660 2025-04-07T13:49:00.085192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710660 2025-04-07T13:49:00.085246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710660 2025-04-07T13:49:00.085254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710660 2025-04-07T13:49:00.085265Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T13:49:00.085278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T13:49:00.085460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710660 2025-04-07T13:49:00.085501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710660 2025-04-07T13:49:00.085507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710660 2025-04-07T13:49:00.085532Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 18446744073709551615 2025-04-07T13:49:00.085541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-04-07T13:49:00.085568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710660, subscribers: 1 2025-04-07T13:49:00.085580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7490567878575754928:2285] 2025-04-07T13:49:00.085700Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861395884498:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7490567873665013746:2108] 2025-04-07T13:49:00.085727Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861395884501:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7490567873665013747:2108] 2025-04-07T13:49:00.085744Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490567861395884504:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7490567873665013748:2108] 2025-04-07T13:49:00.089320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-07T13:49:00.089342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-07T13:49:00.089351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-07T13:49:00.089361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-07T13:49:00.089773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710660 2025-04-07T13:49:00.089803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710660 2025-04-07T13:49:00.102794Z node 1 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-04-07T13:49:00.106114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-07T13:49:00.106385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-04-07T13:49:00.106608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-07T13:49:00.106734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-07T13:49:00.106840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-07T13:49:00.106941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-07T13:49:00.107023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-07T13:49:00.107099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-07T13:49:00.107184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-07T13:49:00.107196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T13:49:00.107283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-07T13:49:00.107814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-07T13:49:00.107837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T13:49:00.107878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T13:49:00.110533Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037890 2025-04-07T13:49:00.110592Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037889 2025-04-07T13:49:00.110745Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037891 2025-04-07T13:49:00.110792Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2025-04-07T13:49:00.162574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-07T13:49:00.162616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-07T13:49:00.162659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-07T13:49:00.162665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-07T13:49:00.162682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-07T13:49:00.162688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-07T13:49:00.162704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-07T13:49:00.162717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-07T13:49:00.162744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T13:49:00.162785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 TabletID: 72075186224037888 Status: OK Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } TabletType: Coordinator Version: 1 TenantIdOwner: 72057594046644480 TenantIdLocalId: 2 } 2025-04-07T13:49:00.173988Z node 1 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-04-07T13:49:00.450971Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490567865690852150:2151], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:00.455858Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490567865690852150:2151], cacheItem# { Subscriber: { Subscriber: [1:7490567869985819864:2433] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:00.455998Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490567878575755029:2870], recipient# [1:7490567878575755028:2286], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpParams::ImplicitParameterTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:48:58.159284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:48:58.159372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:58.159468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:48:58.159528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:48:58.159569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:48:58.159603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:48:58.159664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:58.159941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:48:58.224448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:48:58.224501Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:58.248208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:48:58.248328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:48:58.248460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:48:58.259289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:48:58.259477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:48:58.260042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:58.260234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:48:58.261874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:58.262960Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:58.263011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:58.263115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:48:58.263156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:58.263189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:48:58.263335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.277355Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:48:58.514570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:48:58.514774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.514987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:48:58.515218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:48:58.515276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.521794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:58.521915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:48:58.522151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.522202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:48:58.522237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:48:58.522275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:48:58.529694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.529771Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:48:58.529844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:48:58.539701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.539769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.539826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:58.539867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:48:58.543256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:48:58.550099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:48:58.550291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:48:58.551209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:58.551341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:48:58.551383Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:58.551647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:48:58.551694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:58.551848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:58.551923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:48:58.558160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:58.558232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:58.558390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:58.558425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:48:58.558641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.558684Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:48:58.558847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:48:58.558902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:58.558945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:48:58.558994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:58.559026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:48:58.559054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:48:58.559115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:48:58.559149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:48:58.559192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:48:58.561243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:58.561341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:58.561379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:48:58.561421Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:48:58.561454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:58.561566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... SHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Invalid AlterExtSubDomain request: Check failed: path: \'/MyRoot/USER_0\', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExtSubDomain, state: EPathStateAlter)" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:49:06.360342Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_0', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExtSubDomain, state: EPathStateAlter), operation: ALTER DATABASE, path: /MyRoot/USER_0 2025-04-07T13:49:06.361781Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.361844Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxAlterExtSubDomain, at tablet# 72057594046678944 2025-04-07T13:49:06.361890Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 ProgressState no shards to create, do next state 2025-04-07T13:49:06.361930Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2025-04-07T13:49:06.363380Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.363442Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:49:06.363482Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 3 -> 128 2025-04-07T13:49:06.364776Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.364817Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.364854Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 103:0, at tablet# 72057594046678944 2025-04-07T13:49:06.364903Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-07T13:49:06.365018Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:49:06.366225Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-07T13:49:06.366349Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-07T13:49:06.366569Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:06.366660Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 30064773225 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:49:06.366705Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-04-07T13:49:06.366918Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-07T13:49:06.366964Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-04-07T13:49:06.367069Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T13:49:06.367187Z node 7 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [7:388:2360], msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72057594046678944 2025-04-07T13:49:06.368369Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 2025-04-07T13:49:06.368463Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72075186234409546 2025-04-07T13:49:06.368576Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 FAKE_COORDINATOR: Erasing txId 103 2025-04-07T13:49:06.368852Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:49:06.368889Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:49:06.369041Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:06.369101Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-07T13:49:06.369489Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.369561Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-04-07T13:49:06.369604Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 240 -> 240 2025-04-07T13:49:06.370342Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:49:06.370450Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:49:06.370496Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:49:06.370542Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-07T13:49:06.370590Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-07T13:49:06.370670Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-07T13:49:06.372842Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-07T13:49:06.372923Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:49:06.373022Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:388:2360], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T13:49:06.373237Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-04-07T13:49:06.373270Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-04-07T13:49:06.373399Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-04-07T13:49:06.373428Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:482:2427], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-04-07T13:49:06.374566Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-04-07T13:49:06.374947Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.374998Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-07T13:49:06.375125Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T13:49:06.375168Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:49:06.375220Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-07T13:49:06.375269Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:49:06.375314Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T13:49:06.375351Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T13:49:06.375437Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T13:49:06.375840Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:49:06.375904Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 104 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-04-07T13:49:06.377485Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T13:49:06.377553Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T13:49:06.377894Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T13:49:06.377969Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:49:06.378008Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:570:2513] TestWaitNotification: OK eventTxId 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:48:58.810885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:48:58.810995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:58.811051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:48:58.811102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:48:58.811145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:48:58.811179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:48:58.811228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:58.811597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:48:58.881231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:48:58.881281Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:58.898760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:48:58.898898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:48:58.899039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:48:58.911020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:48:58.911217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:48:58.911803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:58.911995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:48:58.917211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:58.918397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:58.918453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:58.918595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:48:58.918639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:58.918676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:48:58.918825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.930565Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:48:59.120035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:48:59.120239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.120446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:48:59.120775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:48:59.120854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.123057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:59.123203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:48:59.123383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.123436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:48:59.123529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:48:59.123564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:48:59.125321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.125374Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:48:59.125423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:48:59.127066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.127112Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.127168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:59.127215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:48:59.136759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:48:59.138830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:48:59.139017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:48:59.139998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:59.140126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:48:59.140172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:59.140403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:48:59.140473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:59.140643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:59.140762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:48:59.142707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:59.142745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:59.142907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:59.142959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:48:59.143211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.143257Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:48:59.143342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:48:59.143424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:59.143465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:48:59.143498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:59.143528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:48:59.143566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:48:59.143620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:48:59.143658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:48:59.143686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:48:59.145415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:59.145537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:59.145571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:48:59.145605Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:48:59.145640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:59.146024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... r pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:49:06.821781Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T13:49:06.824864Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.825047Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:49:06.825078Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:49:06.825200Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:49:06.825399Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:06.825430Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-07T13:49:06.825513Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-07T13:49:06.825818Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.825863Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-04-07T13:49:06.825916Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-04-07T13:49:06.826764Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:49:06.826858Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:49:06.826889Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:49:06.826916Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T13:49:06.826946Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:49:06.827610Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:49:06.827680Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:49:06.827701Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:49:06.827725Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T13:49:06.827752Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T13:49:06.827805Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-07T13:49:06.829987Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T13:49:06.830040Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-07T13:49:06.830064Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T13:49:06.830450Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.830501Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-07T13:49:06.830645Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T13:49:06.830693Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:49:06.830746Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-07T13:49:06.830797Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:49:06.830842Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T13:49:06.830880Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T13:49:06.831037Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T13:49:06.832239Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:49:06.832566Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-07T13:49:06.833376Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:06.833687Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:49:06.833965Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-07T13:49:06.835039Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:49:06.838614Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:49:06.839354Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-07T13:49:06.840523Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T13:49:06.840788Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-04-07T13:49:06.841677Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T13:49:06.841829Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:49:06.842224Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:49:06.842274Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:49:06.842422Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T13:49:06.843306Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:49:06.843367Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:49:06.843439Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:49:06.846028Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T13:49:06.846093Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T13:49:06.848137Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T13:49:06.848177Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T13:49:06.848506Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T13:49:06.848561Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T13:49:06.848620Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T13:49:06.857417Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T13:49:06.857789Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T13:49:06.857839Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T13:49:06.858315Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T13:49:06.858425Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:49:06.858470Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:536:2488] TestWaitNotification: OK eventTxId 103 2025-04-07T13:49:06.859093Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:49:06.859306Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 247us result status StatusPathDoesNotExist 2025-04-07T13:49:06.859501Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings [GOOD] >> YdbYqlClient::CreateTableWithMESettings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:49:00.042327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:49:00.042446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:49:00.042485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:49:00.042515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:49:00.042554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:49:00.042580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:49:00.042648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:49:00.042989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:49:00.157262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:49:00.157320Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:00.167016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:49:00.167151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:49:00.167290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:49:00.172384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:49:00.172560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:49:00.173128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:00.173334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:49:00.174993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:00.176076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:49:00.176138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:00.176236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:49:00.176272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:49:00.176302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:49:00.176415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.182232Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:49:00.405421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:49:00.405631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.405809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:49:00.406013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:49:00.406079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.417620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:00.417745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:49:00.417937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.417992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:49:00.418109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:49:00.418148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:49:00.419791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.419848Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:49:00.419892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:49:00.421417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.421472Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.421509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:49:00.421564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:49:00.429179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:49:00.437404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:49:00.437621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:49:00.438599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:00.438720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:49:00.438782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:49:00.438998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:49:00.439055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:49:00.439193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:49:00.439289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:49:00.441142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:49:00.441182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:49:00.441360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:00.441396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:49:00.441708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.441751Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:49:00.441834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:49:00.441864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:49:00.441899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:49:00.441936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:49:00.441986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:49:00.442014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:49:00.442067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:49:00.442098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:49:00.442129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:49:00.443863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:49:00.443976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:49:00.444010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:49:00.444058Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:49:00.444093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:49:00.444173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 9:06.717909Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:06.717945Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-07T13:49:06.717983Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-07T13:49:06.718252Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.718311Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-04-07T13:49:06.718371Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-04-07T13:49:06.719381Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:49:06.719464Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:49:06.719496Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:49:06.719529Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T13:49:06.719564Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:49:06.720174Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:49:06.720257Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:49:06.720286Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:49:06.720315Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T13:49:06.720343Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-07T13:49:06.720402Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-07T13:49:06.722383Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72075186233409546 at ss 72057594046678944 2025-04-07T13:49:06.722462Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72075186233409546 at ss 72057594046678944 2025-04-07T13:49:06.722495Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72075186233409546 at ss 72057594046678944 2025-04-07T13:49:06.722525Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72075186233409546 at ss 72057594046678944 2025-04-07T13:49:06.722893Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.722946Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-07T13:49:06.723107Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T13:49:06.723150Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:49:06.723213Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-07T13:49:06.723261Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:49:06.723317Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T13:49:06.723360Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T13:49:06.723556Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T13:49:06.724826Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:49:06.725534Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-07T13:49:06.725897Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-04-07T13:49:06.726096Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:06.726402Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186234409547 2025-04-07T13:49:06.726885Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T13:49:06.727082Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:49:06.729583Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-04-07T13:49:06.729847Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T13:49:06.730037Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:49:06.730421Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 2025-04-07T13:49:06.731669Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:49:06.736818Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-07T13:49:06.737073Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186234409546 Forgetting tablet 72075186234409548 2025-04-07T13:49:06.738492Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:49:06.738554Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:49:06.738695Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T13:49:06.740063Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:49:06.740132Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:49:06.740212Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:49:06.740498Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:49:06.744706Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T13:49:06.744809Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T13:49:06.744922Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T13:49:06.744946Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-04-07T13:49:06.745381Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T13:49:06.745420Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-04-07T13:49:06.745571Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T13:49:06.745622Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-04-07T13:49:06.747081Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T13:49:06.747201Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T13:49:06.747508Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T13:49:06.747568Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T13:49:06.748043Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T13:49:06.748156Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:49:06.748205Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:579:2521] TestWaitNotification: OK eventTxId 103 2025-04-07T13:49:06.748801Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:49:06.749017Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 263us result status StatusPathDoesNotExist 2025-04-07T13:49:06.749194Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpLimits::BigParameter >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:48:58.597750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:48:58.597851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:58.597890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:48:58.597933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:48:58.597993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:48:58.598039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:48:58.598097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:58.598402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:48:58.683543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:48:58.683610Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:58.699488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:48:58.699634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:48:58.699781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:48:58.708316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:48:58.708522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:48:58.709120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:58.709319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:48:58.711139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:58.712256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:58.712340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:58.712457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:48:58.712498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:58.712541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:48:58.712717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.718323Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:48:58.861741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:48:58.861944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.862166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:48:58.862374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:48:58.862468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.865045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:58.865166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:48:58.865337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.865398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:48:58.865441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:48:58.865483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:48:58.867181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.867227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:48:58.867261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:48:58.868847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.868888Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.868928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:58.868999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:48:58.872228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:48:58.874616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:48:58.874760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:48:58.875435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:58.875526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:48:58.875566Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:58.875742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:48:58.875780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:58.875887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:58.875930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:48:58.877474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:58.877514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:58.877669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:58.877702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:48:58.877913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.877971Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:48:58.878080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:48:58.878114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:58.878155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:48:58.878216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:58.878252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:48:58.878277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:48:58.878328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:48:58.878365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:48:58.878393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:48:58.887550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:58.887665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:58.887700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:48:58.887738Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:48:58.887778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:58.887881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... d#102:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-04-07T13:49:06.824030Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.826756Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.826902Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T13:49:06.827392Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T13:49:06.827444Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T13:49:06.827772Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 102, at schemeshard: 72057594046678944 2025-04-07T13:49:06.827824Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T13:49:06.827875Z node 8 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-04-07T13:49:06.886090Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-04-07T13:49:06.886313Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409549 2025-04-07T13:49:06.886394Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#102:0 HandleReply TEvConfigureStatus operationId:102:0 at schemeshard:72057594046678944 2025-04-07T13:49:06.886458Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-04-07T13:49:06.886509Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-04-07T13:49:06.888483Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.888620Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.888684Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.888735Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2025-04-07T13:49:06.888779Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-07T13:49:06.888901Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:49:06.890529Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T13:49:06.890648Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-07T13:49:06.890904Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:06.891006Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 34359740521 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:49:06.891050Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-07T13:49:06.891300Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-07T13:49:06.891356Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-07T13:49:06.891471Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T13:49:06.891552Z node 8 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[8:360:2335], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T13:49:06.893300Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:49:06.893355Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:49:06.893538Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:06.893587Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T13:49:06.893692Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.893751Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2025-04-07T13:49:06.893796Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-04-07T13:49:06.894705Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:49:06.894825Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:49:06.894872Z node 8 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:49:06.894910Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-07T13:49:06.894946Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-07T13:49:06.895012Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T13:49:06.897287Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:49:06.897331Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T13:49:06.897470Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T13:49:06.897530Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:49:06.897580Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T13:49:06.897646Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:299:2291] message: TxId: 102 2025-04-07T13:49:06.897689Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:49:06.897730Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T13:49:06.897760Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T13:49:06.897912Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T13:49:06.898402Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:49:06.899447Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:49:06.899487Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:499:2439] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-04-07T13:49:06.901403Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:49:06.901555Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2025-04-07T13:49:06.901592Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2025-04-07T13:49:06.901715Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-04-07T13:49:06.901762Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-04-07T13:49:06.904365Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:49:06.904749Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] >> KqpStats::JoinNoStatsYql ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:48:59.919616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:48:59.919734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:59.919775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:48:59.919805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:48:59.919870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:48:59.919908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:48:59.919966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:59.920253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:49:00.007972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:49:00.008021Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:00.026747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:49:00.026854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:49:00.027005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:49:00.032059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:49:00.032258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:49:00.032857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:00.033047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:49:00.034651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:00.035804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:49:00.035854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:00.035978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:49:00.036022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:49:00.036057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:49:00.036193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.042088Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:49:00.196270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:49:00.196458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.196649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:49:00.196874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:49:00.196927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.200642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:00.200783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:49:00.200919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.200968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:49:00.201020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:49:00.201053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:49:00.206757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.206820Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:49:00.206857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:49:00.208847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.208905Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.208946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:49:00.208988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:49:00.212452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:49:00.214943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:49:00.215116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:49:00.216027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:00.216159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:49:00.216202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:49:00.216440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:49:00.216491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:49:00.216648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:49:00.216773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:49:00.221009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:49:00.221052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:49:00.221214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:00.221252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:49:00.221511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:49:00.221573Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:49:00.221668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:49:00.221702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:49:00.221760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:49:00.221801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:49:00.221853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:49:00.221883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:49:00.221948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:49:00.221988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:49:00.222018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:49:00.223921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:49:00.224013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:49:00.224052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:49:00.224102Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:49:00.224141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:49:00.224246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ss 72057594046678944 2025-04-07T13:49:07.844554Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72075186233409546 at ss 72057594046678944 2025-04-07T13:49:07.844743Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:49:07.844794Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-07T13:49:07.844933Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-07T13:49:07.844975Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T13:49:07.845037Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-04-07T13:49:07.845094Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T13:49:07.845141Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-07T13:49:07.845184Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-07T13:49:07.845395Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T13:49:07.853918Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:49:07.854640Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:49:07.854908Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-07T13:49:07.855084Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:07.855410Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T13:49:07.855834Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-04-07T13:49:07.856053Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T13:49:07.856250Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:49:07.867201Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-04-07T13:49:07.867515Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T13:49:07.867765Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:49:07.869060Z node 6 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409547 2025-04-07T13:49:07.894313Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 Forgetting tablet 72075186234409546 2025-04-07T13:49:07.894910Z node 6 :TX_DATASHARD ERROR: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409549 2025-04-07T13:49:07.894980Z node 6 :TX_DATASHARD ERROR: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409550 2025-04-07T13:49:07.895216Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-07T13:49:07.895468Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186234409548 2025-04-07T13:49:07.896501Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:49:07.896560Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:49:07.896728Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T13:49:07.899097Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:49:07.899165Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:49:07.899254Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:49:07.912113Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T13:49:07.912203Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T13:49:07.912782Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T13:49:07.912822Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-04-07T13:49:07.915019Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T13:49:07.915063Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-04-07T13:49:07.915135Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T13:49:07.915195Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-04-07T13:49:07.915698Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T13:49:07.921521Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-07T13:49:07.921942Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-07T13:49:07.921996Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-07T13:49:07.922550Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-07T13:49:07.922681Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T13:49:07.922728Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:771:2683] TestWaitNotification: OK eventTxId 105 2025-04-07T13:49:07.923374Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:49:07.923713Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir/table_1" took 386us result status StatusPathDoesNotExist 2025-04-07T13:49:07.923893Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/dir/table_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/dir/table_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T13:49:07.924465Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:49:07.924691Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 240us result status StatusPathDoesNotExist 2025-04-07T13:49:07.924842Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T13:49:07.925416Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:49:07.925612Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 220us result status StatusSuccess 2025-04-07T13:49:07.925974Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:48:59.549158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:48:59.549258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:59.549308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:48:59.549351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:48:59.549398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:48:59.549429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:48:59.549482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:59.549888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:48:59.628564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:48:59.628623Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:59.732755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:48:59.732890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:48:59.733046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:48:59.738660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:48:59.738871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:48:59.739422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:59.739622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:48:59.744112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:59.745268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:59.745321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:59.745433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:48:59.745473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:59.745507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:48:59.745685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.756155Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:48:59.875522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:48:59.875746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.875951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:48:59.876169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:48:59.876231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.878470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:59.878589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:48:59.878749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.878806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:48:59.878885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:48:59.878916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:48:59.881355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.881429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:48:59.881468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:48:59.883157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.883202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.883236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:59.883278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:48:59.886611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:48:59.888198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:48:59.888398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:48:59.889615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:59.889734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:48:59.889778Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:59.890048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:48:59.890097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:59.890249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:59.890312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:48:59.892158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:59.892195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:59.892355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:59.892406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:48:59.892682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.892737Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:48:59.892821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:48:59.892851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:59.892886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:48:59.892924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:59.892952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:48:59.892977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:48:59.893038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:48:59.893070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:48:59.893113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:48:59.894836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:59.894946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:59.894997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:48:59.895032Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:48:59.895066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:59.895168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... PathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:49:07.646847Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:49:07.646883Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T13:49:07.646916Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:49:07.646997Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T13:49:07.648598Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T13:49:07.648777Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-07T13:49:07.649428Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:07.649612Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 30064773225 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:49:07.649692Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-07T13:49:07.649803Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 102 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:49:07.649854Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:49:07.649911Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 134 2025-04-07T13:49:07.650766Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:49:07.652383Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:49:07.653467Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:49:07.653575Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:49:07.653716Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 134 -> 135 2025-04-07T13:49:07.654052Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:49:07.654146Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T13:49:07.656117Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:49:07.656164Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:49:07.656289Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:49:07.656468Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:07.656506Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-07T13:49:07.656548Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T13:49:07.656950Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:49:07.657011Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-04-07T13:49:07.657059Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 135 -> 240 2025-04-07T13:49:07.657817Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:49:07.657907Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:49:07.657942Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:49:07.657977Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T13:49:07.658013Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:49:07.658466Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:49:07.658541Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:49:07.658568Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:49:07.658600Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T13:49:07.658631Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:49:07.658694Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T13:49:07.661151Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:49:07.661211Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T13:49:07.661369Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T13:49:07.661421Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:49:07.661487Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T13:49:07.661565Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:49:07.661616Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T13:49:07.661671Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T13:49:07.661754Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:49:07.662147Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:49:07.662203Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:49:07.662277Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T13:49:07.662513Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:49:07.662563Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:49:07.662644Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:49:07.663645Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:49:07.663932Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:49:07.665365Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T13:49:07.666293Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T13:49:07.666579Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T13:49:07.666635Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T13:49:07.667110Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T13:49:07.667210Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:49:07.667265Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:338:2330] TestWaitNotification: OK eventTxId 102 2025-04-07T13:49:07.667800Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:49:07.667999Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 231us result status StatusPathDoesNotExist 2025-04-07T13:49:07.668162Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:48:58.750789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:48:58.750889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:58.750930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:48:58.750960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:48:58.750998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:48:58.751031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:48:58.751094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:48:58.751392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:48:58.830144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:48:58.830251Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:58.858275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:48:58.858400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:48:58.858547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:48:58.867427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:48:58.867631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:48:58.868258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:58.868431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:48:58.875019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:58.876198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:58.876264Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:58.876383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:48:58.876429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:58.876477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:48:58.876632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:48:58.882699Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:48:59.017172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:48:59.017379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.017597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:48:59.017824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:48:59.017881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.027239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:59.027407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:48:59.027629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.027714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:48:59.027760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:48:59.027791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:48:59.029733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.029791Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:48:59.029829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:48:59.031633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.031684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.031711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:59.031750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:48:59.034649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:48:59.036871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:48:59.037086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:48:59.038124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:48:59.038287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:48:59.038349Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:59.038592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:48:59.038638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:48:59.038789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:59.038881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:48:59.041255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:48:59.041300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:48:59.041484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:48:59.041547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:48:59.041774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:48:59.041808Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:48:59.041882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:48:59.041909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:59.041939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:48:59.041988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:48:59.042016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:48:59.042037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:48:59.042088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:48:59.042117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:48:59.042140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:48:59.043699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:59.043766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:48:59.043802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:48:59.043842Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:48:59.043875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:48:59.043946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... an to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-07T13:49:08.134759Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:08.134902Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 30064773225 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:49:08.134967Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 104:0, stepId:5000005, at schemeshard: 72057594046678944 2025-04-07T13:49:08.135228Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-07T13:49:08.135277Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T13:49:08.135372Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T13:49:08.135452Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-07T13:49:08.135617Z node 7 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [7:348:2329], msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72057594046678944 2025-04-07T13:49:08.135685Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T13:49:08.135746Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T13:49:08.135800Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T13:49:08.135895Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T13:49:08.135947Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-04-07T13:49:08.135990Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 6 FAKE_COORDINATOR: Erasing txId 104 2025-04-07T13:49:08.143651Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2 2025-04-07T13:49:08.143812Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72075186233409546 2025-04-07T13:49:08.144043Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-04-07T13:49:08.144306Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:49:08.144362Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:49:08.144596Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:08.145531Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-07T13:49:08.146932Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T13:49:08.147055Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T13:49:08.147112Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-07T13:49:08.147165Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-07T13:49:08.147219Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T13:49:08.147336Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-07T13:49:08.149506Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-07T13:49:08.149547Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-07T13:49:08.149697Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-07T13:49:08.149731Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:443:2397], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-07T13:49:08.150200Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 2 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-07T13:49:08.150286Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:49:08.150387Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:348:2329], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 2, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T13:49:08.156966Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 0 2025-04-07T13:49:08.157432Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T13:49:08.157528Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-07T13:49:08.157924Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-07T13:49:08.157977Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-07T13:49:08.158495Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-07T13:49:08.158616Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T13:49:08.158663Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:542:2494] TestWaitNotification: OK eventTxId 104 2025-04-07T13:49:08.159260Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:49:08.159490Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 270us result status StatusSuccess 2025-04-07T13:49:08.159905Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:49:08.160528Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-07T13:49:08.160871Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 350us result status StatusSuccess 2025-04-07T13:49:08.161242Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |84.5%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.5%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> DataShardSnapshots::LockedWriteBulkUpsertConflict [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted >> KqpQuery::GenericQueryNoRowsLimit >> KqpTypes::UnsafeTimestampCastV0 >> KqpExplain::ExplainScanQueryWithParams >> DataShardSnapshots::LockedWriteReuseAfterCommit [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess >> KqpQuery::QueryClientTimeout >> KqpQuery::YqlSyntaxV0 >> ClientStatsCollector::CounterRetryOperation [GOOD] >> ClientStatsCollector::ExternalMetricRegistryByRawPtr >> KqpLimits::WaitCAsStateOnAbort >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties >> KqpTypes::QuerySpecialTypes >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::DefaultParameterValue >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink >> KqpQuery::QueryCache >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpStats::MultiTxStatsFullScan >> KqpLimits::ComputeActorMemoryAllocationFailure >> KqpQuery::QueryTimeout [GOOD] >> KqpQuery::RandomNumber >> YdbTableBulkUpsert::RetryOperationSync [GOOD] >> YdbTableBulkUpsert::RetryOperation >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter >> KqpParams::MissingParameter [GOOD] >> KqpParams::ParameterTypes >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryCache >> KqpQuery::Now [GOOD] >> KqpQuery::OlapCreateAsSelect_Simple >> YdbYqlClient::TestReadTableMultiShardOneRow [GOOD] >> YdbYqlClient::TestReadTableBatchLimits >> YdbYqlClient::CreateTableWithMESettings [GOOD] >> KqpStats::JoinNoStatsScan [GOOD] >> KqpStats::DeferredEffects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] Test command err: Trying to start YDB, gRPC: 16814, MsgBus: 26286 2025-04-07T13:48:18.910731Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567699200911323:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:18.911212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d03/r3tmp/tmpamglAD/pdisk_1.dat 2025-04-07T13:48:19.619050Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16814, node 1 2025-04-07T13:48:19.783860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:19.783981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:19.822581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:19.881430Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:19.881448Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:19.881455Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:19.881551Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26286 TClient is connected to server localhost:26286 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:20.703233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.716210Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:20.728107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:20.868160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:21.072145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:21.159797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:24.989410Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567699200911323:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:24.989472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:31.663866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567750740520475:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:31.918070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:32.248271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:32.410759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:32.501197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:32.564441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:32.683496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:33.241946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:33.512590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567763625422940:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:33.512679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:33.512985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567763625422945:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:33.527607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:33.572234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567763625422947:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:34.617430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:34.617455Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:36.212751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:48:36.219288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64197, MsgBus: 21514 2025-04-07T13:48:37.177271Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567779832193344:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:37.177312Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d03/r3tmp/tmpmaTLZe/pdisk_1.dat 2025-04-07T13:48:37.328548Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:37.331018Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:37.331107Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:37.335822Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64197, node 2 2025-04-07T13:48:37.489604Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:37.489639Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:37.489647Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:37.489757Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21514 TClient is connected to server localhost:21514 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:38.723286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:38.731166Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:48:38.737951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:38.822711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at ... RN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:1, at schemeshard: 72057594046644480 2025-04-07T13:48:47.181684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.990580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.500077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.937695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T13:48:48.995685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-07T13:48:52.067853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715722:0, at schemeshard: 72057594046644480 2025-04-07T13:48:52.090332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715723:0, at schemeshard: 72057594046644480 2025-04-07T13:48:52.297049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:52.297073Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 25034, MsgBus: 24397 2025-04-07T13:48:53.357900Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567847350174531:2086];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:53.471110Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d03/r3tmp/tmpCoK8nH/pdisk_1.dat 2025-04-07T13:48:53.611973Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:53.633828Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:53.633914Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:53.635457Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25034, node 3 2025-04-07T13:48:53.789284Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:53.789305Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:53.789312Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:53.789433Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24397 TClient is connected to server localhost:24397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:54.698976Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:54.707256Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:54.718563Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:54.797710Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:55.127250Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:55.234382Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:58.357374Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567847350174531:2086];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:58.357468Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:59.098039Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567873119979982:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:59.098157Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:59.236216Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:59.288522Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:59.374868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:59.436170Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:59.507233Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:59.723911Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:59.865858Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567873119980493:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:59.865950Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:59.866468Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567873119980498:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:59.870050Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:59.878519Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567873119980500:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:01.205157Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-04-07T13:49:01.972856Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:49:03.036917Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:1, at schemeshard: 72057594046644480 2025-04-07T13:49:03.707238Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:49:04.445910Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-07T13:49:05.161446Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-07T13:49:05.913357Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:49:06.019174Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:49:08.579986Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:49:08.580010Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:11.712107Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710736:0, at schemeshard: 72057594046644480 >> KqpExplain::Explain [GOOD] >> KqpExplain::ComplexJoin >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithMESettings [GOOD] Test command err: 2025-04-07T13:48:46.971785Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567818066164967:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:46.971830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001838/r3tmp/tmpj7dlze/pdisk_1.dat 2025-04-07T13:48:47.612618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:47.612721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:47.635148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:47.647951Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23541, node 1 2025-04-07T13:48:47.929209Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:47.929234Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:47.929243Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:47.929336Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:48.673338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.686895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:48.688267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.691010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:48:48.691205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:48:48.691218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:48:48.693712Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:48:48.693739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:48:48.702593Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.707507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:48.707823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033728752, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:48.707843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:48:48.708053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:48:48.713605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:48.713766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:48.713808Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:48:48.713887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:48:48.713924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:48:48.713978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:48:48.718948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:48:48.719026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:48:48.719046Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:48:48.719132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:48:51.972880Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567818066164967:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:51.972941Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:52.809278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:52.809838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:48:52.810404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:52.810425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:52.817295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T13:48:52.817506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:52.817717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:52.817774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:48:52.819040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:48:52.819075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:48:52.819093Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:48:52.819317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:48:52.819338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:48:52.819348Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:48:52.832217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:48:52.841163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:52.841447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:52.841562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:52.841600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:48:52.852932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:48:53.046814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:48:53.046841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:48:53.047112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:48:53.047120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:48:53.076882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:48:53.076906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:48:53.076934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T13:48:53.085351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:48:53.096156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033733141, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:53.096215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744033733141 2025-04-07T13:48:53.096338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T13:48:53.106259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:53.106546Z node 1 ... e) 2025-04-07T13:49:08.785697Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:09.035508Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.035873Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:49:09.035900Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.038478Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:49:09.038687Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:49:09.038714Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:49:09.040061Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:09.041117Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:49:09.041148Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:49:09.042738Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.046458Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033749094, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:49:09.046496Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:49:09.046756Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:49:09.048536Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:49:09.048718Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:49:09.048767Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:49:09.048852Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:49:09.048887Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:49:09.048926Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:49:09.049757Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:49:09.049793Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:49:09.049810Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:49:09.049873Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:49:11.858446Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:49:11.858914Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:49:11.859541Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:49:11.859576Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:49:11.875494Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T13:49:11.875668Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:49:11.875837Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:49:11.875888Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:49:11.877420Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:11.877467Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:11.877499Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:49:11.877715Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:11.877735Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:11.877747Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:49:11.878683Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:49:11.885965Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:49:11.886046Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:49:11.888211Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:49:11.972810Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:49:11.972841Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:49:11.972904Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T13:49:11.979236Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:49:11.984309Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033752027, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:49:11.984361Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744033752027 2025-04-07T13:49:11.984469Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T13:49:11.986362Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:49:11.986673Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:49:11.986726Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:49:11.988617Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:11.988687Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:11.988708Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:49:11.988915Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:11.988930Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:11.988939Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:49:11.995832Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976710658 Step: 1744033752027 OrderId: 281474976710658 ExecLatency: 0 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1289 } } 2025-04-07T13:49:12.000134Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:49:12.000176Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:49:12.000201Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 129 -> 240 2025-04-07T13:49:12.003035Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2025-04-07T13:49:12.003135Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T13:49:12.003187Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue >> KqpLimits::KqpMkqlMemoryLimitException [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure >> KqpParams::ImplicitParameterTypes [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck >> TCacheTest::Recreate >> KqpStats::JoinNoStatsYql [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin >> KqpLimits::BigParameter [GOOD] >> KqpLimits::CancelAfterRoTx >> KqpExplain::ExplainScanQueryWithParams [GOOD] >> KqpExplain::ExplainDataQuery >> Viewer::JsonAutocompleteScheme [GOOD] >> Viewer::JsonAutocompleteSchemePOST >> DataShardSnapshots::LockedWriteDistributedCommitSuccess [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze >> KqpQuery::GenericQueryNoRowsLimit [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows >> TCacheTest::Navigate >> ClientStatsCollector::ExternalMetricRegistryByRawPtr [GOOD] >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr >> TCacheTest::Recreate [GOOD] >> TCacheTest::RacyRecreateAndSync >> DataShardSnapshots::LockedWriteDistributedCommitAborted [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 >> KqpParams::DefaultParameterValue [GOOD] >> KqpParams::Decimal-QueryService >> KqpQuery::QueryCache [GOOD] >> KqpQuery::QueryCacheInvalidate >> TCacheTest::RacyRecreateAndSync [GOOD] >> TCacheTest::Navigate [GOOD] >> TCacheTest::MigrationUndo >> KqpQuery::YqlSyntaxV0 [GOOD] >> KqpQuery::YqlTableSample >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter [GOOD] >> TGRpcNewClient::CreateAlterUpsertDrop >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink >> KqpStats::MultiTxStatsFullScan [GOOD] >> KqpStats::RequestUnitForBadRequestExecute ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyRecreateAndSync [GOOD] Test command err: 2025-04-07T13:49:16.508085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:49:16.508157Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T13:49:16.655476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T13:49:16.676761Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-07T13:49:16.678450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T13:49:16.705568Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T13:49:16.714465Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-07T13:49:17.054118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:49:17.054179Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T13:49:17.101993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T13:49:17.111594Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-07T13:49:17.113529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T13:49:17.117777Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T13:49:17.139605Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 >> KqpQuery::RandomNumber [GOOD] >> KqpQuery::QueryStats >> Viewer::SelectStringWithBase64Encoding [GOOD] >> Viewer::SelectStringWithNoBase64Encoding >> Viewer::JsonAutocompleteColumnsPOST [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForSuccessExplicitPrepare >> TGRpcYdbTest::ExecuteQueryCache [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] Test command err: 2025-04-07T13:48:35.816748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:636:2325], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:48:35.878277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:638:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:35.879170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:35.879915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:48:35.892409Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:35.892467Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:48:37.255763Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:37.849353Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-07T13:48:38.185324Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-07T13:48:40.771270Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 17214, node 1 TClient is connected to server localhost:19465 2025-04-07T13:48:41.260932Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:41.261000Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:41.261035Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:41.261192Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:48:49.552942Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567831133506026:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:49.564314Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:48:49.800190Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:49.800313Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:49.807491Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22951, node 3 2025-04-07T13:48:49.829015Z node 3 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:48:49.829048Z node 3 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:48:49.840750Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:50.073517Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:50.073539Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:50.073564Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:50.073723Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:50.616352Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:50.667384Z node 3 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2025-04-07T13:48:54.552931Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567831133506026:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:54.553003Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream 2025-04-07T13:48:58.631282Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490567871804057010:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:58.697313Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:48:58.944099Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:58.963152Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:58.963229Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:58.967179Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5782, node 4 2025-04-07T13:48:59.237245Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:59.237271Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:59.237277Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:59.237394Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30450 2025-04-07T13:48:59.743625Z node 4 :GRPC_SERVER DEBUG: Got grpc request# request auth and check internal request, traceId# undef, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# /Root, peer# , grpcInfo# undef, timeout# 9.999942s 2025-04-07T13:48:59.743772Z node 4 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2025-04-07T13:49:03.610758Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490567871804057010:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:03.610838Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:04.075135Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490567897573861278:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:04.075278Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:04.550865Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490567897573861291:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:04.550945Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:04.551331Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490567897573861296:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:04.556262Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-07T13:49:04.566784Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T13:49:04.566845Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T13:49:04.566907Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T13:49:04.566907Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T13:49:04.576085Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T13:49:04.576236Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T13:49:04.576252Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T13:49:04.576284Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T13:49:04.579106Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490567897573861298:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-04-07T13:49:04.901816Z node 4 :GRPC_SERVER DEBUG: [0x51a0000b1680] received request Name# Request ok# false data# peer# current inflight# 0 2025-04-07T13:49:04.902110Z node 4 :GRPC_SERVER DEBUG: [0x51a00000a880] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-07T13:49:04.902295Z node 4 :GRPC_SERVER DEBUG: [0x51a000019880] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-07T13:49:04.902485Z node 4 :GRPC_SERVER DEBUG: [0x51a00000c680] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-07T13:49:04.902662Z node 4 :GRPC_SERVER DEBUG: [0x51a00001a480] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-07T13:49:04.902846Z node ... # peer# 2025-04-07T13:49:12.866971Z node 5 :GRPC_SERVER DEBUG: [0x51b00051b480] received request Name# TopicService/DropTopic ok# false data# peer# 2025-04-07T13:49:12.867229Z node 5 :GRPC_SERVER DEBUG: [0x51b00054a880] received request Name# Coordination/CreateNode ok# false data# peer# 2025-04-07T13:49:12.867429Z node 5 :GRPC_SERVER DEBUG: [0x51b00050d480] received request Name# Coordination/AlterNode ok# false data# peer# 2025-04-07T13:49:12.867635Z node 5 :GRPC_SERVER DEBUG: [0x51b00050c680] received request Name# Coordination/DropNode ok# false data# peer# 2025-04-07T13:49:12.867824Z node 5 :GRPC_SERVER DEBUG: [0x51b00050bf80] received request Name# Coordination/DescribeNode ok# false data# peer# 2025-04-07T13:49:12.868022Z node 5 :GRPC_SERVER DEBUG: [0x51b00050b180] received request Name# CreateDatabase ok# false data# peer# 2025-04-07T13:49:12.868224Z node 5 :GRPC_SERVER DEBUG: [0x51b000509c80] received request Name# GetDatabaseStatus ok# false data# peer# 2025-04-07T13:49:12.868394Z node 5 :GRPC_SERVER DEBUG: [0x51b00050aa80] received request Name# AlterDatabase ok# false data# peer# 2025-04-07T13:49:12.868575Z node 5 :GRPC_SERVER DEBUG: [0x51b000509580] received request Name# ListDatabases ok# false data# peer# 2025-04-07T13:49:12.869244Z node 5 :GRPC_SERVER DEBUG: [0x51b000508780] received request Name# RemoveDatabase ok# false data# peer# 2025-04-07T13:49:12.869475Z node 5 :GRPC_SERVER DEBUG: [0x51b000508080] received request Name# DescribeDatabaseOptions ok# false data# peer# 2025-04-07T13:49:12.869687Z node 5 :GRPC_SERVER DEBUG: [0x51b000506480] received request Name# GetScaleRecommendation ok# false data# peer# 2025-04-07T13:49:12.869897Z node 5 :GRPC_SERVER DEBUG: [0x51b000505680] received request Name# ListEndpoints ok# false data# peer# 2025-04-07T13:49:12.870094Z node 5 :GRPC_SERVER DEBUG: [0x51b000507280] received request Name# WhoAmI ok# false data# peer# 2025-04-07T13:49:12.870296Z node 5 :GRPC_SERVER DEBUG: [0x51b000505d80] received request Name# NodeRegistration ok# false data# peer# 2025-04-07T13:49:12.870494Z node 5 :GRPC_SERVER DEBUG: [0x51b000504f80] received request Name# Scan ok# false data# peer# 2025-04-07T13:49:12.871001Z node 5 :GRPC_SERVER DEBUG: [0x51b000504180] received request Name# GetShardLocations ok# false data# peer# 2025-04-07T13:49:12.871224Z node 5 :GRPC_SERVER DEBUG: [0x51b000503380] received request Name# DescribeTable ok# false data# peer# 2025-04-07T13:49:12.871417Z node 5 :GRPC_SERVER DEBUG: [0x51b000502c80] received request Name# CreateSnapshot ok# false data# peer# 2025-04-07T13:49:12.871613Z node 5 :GRPC_SERVER DEBUG: [0x51b0004fbc80] received request Name# RefreshSnapshot ok# false data# peer# 2025-04-07T13:49:12.871804Z node 5 :GRPC_SERVER DEBUG: [0x51b0004fb580] received request Name# DiscardSnapshot ok# false data# peer# 2025-04-07T13:49:12.872010Z node 5 :GRPC_SERVER DEBUG: [0x51b0004fae80] received request Name# List ok# false data# peer# 2025-04-07T13:49:12.872202Z node 5 :GRPC_SERVER DEBUG: [0x51b0004fa780] received request Name# RateLimiter/CreateResource ok# false data# peer# 2025-04-07T13:49:12.872394Z node 5 :GRPC_SERVER DEBUG: [0x51b0004fa080] received request Name# RateLimiter/AlterResource ok# false data# peer# 2025-04-07T13:49:12.872569Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f9980] received request Name# RateLimiter/DropResource ok# false data# peer# 2025-04-07T13:49:12.872776Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f9280] received request Name# RateLimiter/ListResources ok# false data# peer# 2025-04-07T13:49:12.872945Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f8b80] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2025-04-07T13:49:12.873128Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f8480] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2025-04-07T13:49:12.873356Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f7680] received request Name# CreateStream ok# false data# peer# 2025-04-07T13:49:12.873555Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f6f80] received request Name# ListStreams ok# false data# peer# 2025-04-07T13:49:12.873735Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f6880] received request Name# DeleteStream ok# false data# peer# 2025-04-07T13:49:12.873910Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f7d80] received request Name# DescribeStream ok# false data# peer# 2025-04-07T13:49:12.874075Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f6180] received request Name# ListShards ok# false data# peer# 2025-04-07T13:49:12.874265Z node 5 :GRPC_SERVER DEBUG: [0x51b0004daf80] received request Name# SetWriteQuota ok# false data# peer# 2025-04-07T13:49:12.874444Z node 5 :GRPC_SERVER DEBUG: [0x51b0004db680] received request Name# UpdateStream ok# false data# peer# 2025-04-07T13:49:12.874613Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f5a80] received request Name# PutRecord ok# false data# peer# 2025-04-07T13:49:12.874781Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f5380] received request Name# PutRecords ok# false data# peer# 2025-04-07T13:49:12.874954Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f4c80] received request Name# GetRecords ok# false data# peer# 2025-04-07T13:49:12.875122Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f4580] received request Name# GetShardIterator ok# false data# peer# 2025-04-07T13:49:12.875289Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f3e80] received request Name# SubscribeToShard ok# false data# peer# 2025-04-07T13:49:12.875463Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f3080] received request Name# DescribeLimits ok# false data# peer# 2025-04-07T13:49:12.875629Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f2980] received request Name# DescribeStreamSummary ok# false data# peer# 2025-04-07T13:49:12.875815Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f2280] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2025-04-07T13:49:12.875994Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f1b80] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2025-04-07T13:49:12.876181Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f1480] received request Name# UpdateShardCount ok# false data# peer# 2025-04-07T13:49:12.876368Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f0d80] received request Name# UpdateStreamMode ok# false data# peer# 2025-04-07T13:49:12.876533Z node 5 :GRPC_SERVER DEBUG: [0x51b0004f0680] received request Name# RegisterStreamConsumer ok# false data# peer# 2025-04-07T13:49:12.877618Z node 5 :GRPC_SERVER DEBUG: [0x51b0004eff80] received request Name# DeregisterStreamConsumer ok# false data# peer# 2025-04-07T13:49:12.877826Z node 5 :GRPC_SERVER DEBUG: [0x51b000269580] received request Name# DescribeStreamConsumer ok# false data# peer# 2025-04-07T13:49:12.878021Z node 5 :GRPC_SERVER DEBUG: [0x51b0004ef880] received request Name# ListStreamConsumers ok# false data# peer# 2025-04-07T13:49:12.878197Z node 5 :GRPC_SERVER DEBUG: [0x51b0004ef180] received request Name# AddTagsToStream ok# false data# peer# 2025-04-07T13:49:12.878378Z node 5 :GRPC_SERVER DEBUG: [0x51b0004eea80] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2025-04-07T13:49:12.878572Z node 5 :GRPC_SERVER DEBUG: [0x51b0004ee380] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2025-04-07T13:49:12.878740Z node 5 :GRPC_SERVER DEBUG: [0x51b0004edc80] received request Name# ListTagsForStream ok# false data# peer# 2025-04-07T13:49:12.878900Z node 5 :GRPC_SERVER DEBUG: [0x51b0004ed580] received request Name# MergeShards ok# false data# peer# 2025-04-07T13:49:12.879084Z node 5 :GRPC_SERVER DEBUG: [0x51b0004ece80] received request Name# RemoveTagsFromStream ok# false data# peer# 2025-04-07T13:49:12.879277Z node 5 :GRPC_SERVER DEBUG: [0x51b0004dcb80] received request Name# SplitShard ok# false data# peer# 2025-04-07T13:49:12.879462Z node 5 :GRPC_SERVER DEBUG: [0x51b0004dc480] received request Name# StartStreamEncryption ok# false data# peer# 2025-04-07T13:49:12.879649Z node 5 :GRPC_SERVER DEBUG: [0x51b0004dbd80] received request Name# StopStreamEncryption ok# false data# peer# 2025-04-07T13:49:12.879828Z node 5 :GRPC_SERVER DEBUG: [0x51b0004da880] received request Name# SelfCheck ok# false data# peer# 2025-04-07T13:49:12.880012Z node 5 :GRPC_SERVER DEBUG: [0x51b0004da180] received request Name# NodeCheck ok# false data# peer# 2025-04-07T13:49:12.880205Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d7e80] received request Name# CreateSession ok# false data# peer# 2025-04-07T13:49:12.880408Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d7780] received request Name# DeleteSession ok# false data# peer# 2025-04-07T13:49:12.880631Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d7080] received request Name# AttachSession ok# false data# peer# 2025-04-07T13:49:12.881394Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d6280] received request Name# BeginTransaction ok# false data# peer# 2025-04-07T13:49:12.881617Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d5b80] received request Name# CommitTransaction ok# false data# peer# 2025-04-07T13:49:12.881822Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d5480] received request Name# RollbackTransaction ok# false data# peer# 2025-04-07T13:49:12.882027Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d9a80] received request Name# ExecuteQuery ok# false data# peer# 2025-04-07T13:49:12.882246Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d8c80] received request Name# ExecuteScript ok# false data# peer# 2025-04-07T13:49:12.882417Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d8580] received request Name# FetchScriptResults ok# false data# peer# 2025-04-07T13:49:12.882605Z node 5 :GRPC_SERVER DEBUG: [0x51b000501e80] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2025-04-07T13:49:12.882791Z node 5 :GRPC_SERVER DEBUG: [0x51b000268e80] received request Name# ChangeTabletSchema ok# false data# peer# 2025-04-07T13:49:12.883010Z node 5 :GRPC_SERVER DEBUG: [0x51b000501080] received request Name# RestartTablet ok# false data# peer# 2025-04-07T13:49:12.883204Z node 5 :GRPC_SERVER DEBUG: [0x51b0004fd880] received request Name# CreateLogStore ok# false data# peer# 2025-04-07T13:49:12.883387Z node 5 :GRPC_SERVER DEBUG: [0x51b0004fd180] received request Name# DescribeLogStore ok# false data# peer# 2025-04-07T13:49:12.883589Z node 5 :GRPC_SERVER DEBUG: [0x51b000503a80] received request Name# DropLogStore ok# false data# peer# 2025-04-07T13:49:12.883773Z node 5 :GRPC_SERVER DEBUG: [0x51b0004fca80] received request Name# AlterLogStore ok# false data# peer# 2025-04-07T13:49:12.883963Z node 5 :GRPC_SERVER DEBUG: [0x51b0004ec780] received request Name# CreateLogTable ok# false data# peer# 2025-04-07T13:49:12.884148Z node 5 :GRPC_SERVER DEBUG: [0x51b0004eb280] received request Name# DescribeLogTable ok# false data# peer# 2025-04-07T13:49:12.884341Z node 5 :GRPC_SERVER DEBUG: [0x51b0004ea480] received request Name# DropLogTable ok# false data# peer# 2025-04-07T13:49:12.884529Z node 5 :GRPC_SERVER DEBUG: [0x51b0004e9d80] received request Name# AlterLogTable ok# false data# peer# 2025-04-07T13:49:12.885225Z node 5 :GRPC_SERVER DEBUG: [0x51b0004e8f80] received request Name# Login ok# false data# peer# 2025-04-07T13:49:12.885494Z node 5 :GRPC_SERVER DEBUG: [0x51b0004e8880] received request Name# DescribeReplication ok# false data# peer# >> TCacheTest::MigrationUndo [GOOD] >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryClientTimeoutPrecompiled >> KqpParams::ParameterTypes [GOOD] >> KqpParams::InvalidJson ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] Test command err: 2025-04-07T13:49:16.913372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:49:16.913425Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T13:49:17.048818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T13:49:17.068624Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-04-07T13:49:17.567511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:49:17.567580Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T13:49:17.645059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-04-07T13:49:17.650498Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [2:171:2168], Recipient [2:68:2107]: NActors::TEvents::TEvPoison 2025-04-07T13:49:17.651129Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:172:2067] recipient: [2:45:2092] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:175:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:176:2067] recipient: [2:174:2169] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:178:2067] recipient: [2:174:2169] 2025-04-07T13:49:17.689720Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828672, Sender [2:174:2169], Recipient [2:177:2170]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:49:17.719180Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828673, Sender [2:174:2169], Recipient [2:177:2170]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:49:17.719335Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828684, Sender [2:174:2169], Recipient [2:177:2170]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:49:17.724281Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:49:17.724414Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:49:17.724459Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:49:17.724500Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:49:17.724544Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:49:17.724575Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:49:17.724727Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:49:17.725105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:49:17.745935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:49:17.747101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:49:17.747275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:49:17.747519Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 65542, Sender [2:7238242728502259555:7369577], Recipient [2:177:2170]: TSystem::Undelivered 2025-04-07T13:49:17.747562Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, processing event TEvents::TEvUndelivered 2025-04-07T13:49:17.747594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:49:17.747623Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:17.747834Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:49:17.758310Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:17.771333Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.771484Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.771922Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.782656Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.782850Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.794706Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.805560Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.805703Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.817830Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.818207Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.818347Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.855725Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.855873Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.865389Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.865607Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.865705Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.894553Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.894744Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.894914Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.895522Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.895689Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.895744Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.895799Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:17.922352Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:49:17.923947Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:49:17.924799Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [2:177:2170], Recipient [2:177:2170]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-07T13:49:17.924872Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-07T13:49:17.925440Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:49:17.931408Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:17.931791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:49:17.931868Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:49:17.931943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:49:17.931981Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:49:17.932275Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [2:192:2170], Recipient [2:177:2170]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T13:49:17.932331Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T13:49:17.932384Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:207:2067] recipient: [2:24:2071] 2025-04-07T13:49:17.956471Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [2:206:2187], Recipient [2:177:2170]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2025-04-07T13:49:17.956547Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:49:18.116431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:49:18.116744Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/USER_0, opId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.116855Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: Root, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:49:18.117027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target pa ... ype: Coordinator, at schemeshard: 72057594046678944 2025-04-07T13:49:18.831678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:49:18.831717Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: Mediator, at schemeshard: 72057594046678944 2025-04-07T13:49:18.831738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:49:18.831759Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:3, tabletId: 72075186233409548, PathId: [OwnerId: 72057594046678944, LocalPathId: 4], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-07T13:49:18.831777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T13:49:18.831902Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-07T13:49:18.832100Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.832288Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-07T13:49:18.832618Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.832780Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.833205Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.833308Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.833546Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.833780Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.833895Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.834151Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.834251Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.834457Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.834647Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.834761Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.834816Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.834868Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.835111Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:49:18.836502Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:49:18.837823Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [2:509:2398], Recipient [2:509:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-07T13:49:18.837883Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-07T13:49:18.838597Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:49:18.838644Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:49:18.838737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:49:18.838784Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:49:18.838832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:49:18.838865Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:49:18.838974Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [2:524:2398], Recipient [2:509:2398]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T13:49:18.839015Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T13:49:18.839071Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:49:18.861273Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:157:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:18.861477Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:157:2155], cacheItem# { Subscriber: { Subscriber: [2:376:2315] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 5000002 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:18.861737Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:536:2415], recipient# [2:535:2414], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } }] } { Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:49:18.862192Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:157:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:18.862315Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:157:2155], cacheItem# { Subscriber: { Subscriber: [2:385:2318] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 200 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:18.862537Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:538:2417], recipient# [2:537:2416], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } }] } { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:49:18.863021Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:157:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:18.863155Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:157:2155], cacheItem# { Subscriber: { Subscriber: [2:394:2321] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 300 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:18.863354Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:540:2419], recipient# [2:539:2418], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [72057594046678944:4:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } }] } >> YdbTableBulkUpsert::RetryOperation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumnsPOST [GOOD] Test command err: 2025-04-07T13:48:08.477412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:08.477751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:08.477850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 15422, node 1 TClient is connected to server localhost:15415 2025-04-07T13:48:39.051840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:285:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:39.052097Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:39.052187Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 4109, node 2 TClient is connected to server localhost:22391 2025-04-07T13:48:50.657898Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:297:2340], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:50.658377Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:50.658465Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 30163, node 3 TClient is connected to server localhost:13101 2025-04-07T13:49:03.246205Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:297:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:03.246442Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:03.246594Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 29257, node 4 TClient is connected to server localhost:13736 2025-04-07T13:49:15.089754Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:284:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:15.090100Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:15.090276Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 23148, node 5 TClient is connected to server localhost:20859 >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExecuteQueryCache [GOOD] Test command err: 2025-04-07T13:48:48.919190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567825609132621:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:48.919235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019e6/r3tmp/tmpNOUr0Q/pdisk_1.dat 2025-04-07T13:48:49.626042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:49.626136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:49.632142Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:49.691757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65343, node 1 2025-04-07T13:48:49.945417Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:49.945442Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:49.945451Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:49.945539Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:50.489245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:50.499395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:50.505065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:50.509922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:48:50.510112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:48:50.510126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:48:50.512982Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:48:50.513011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:48:50.515073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:50.516421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:50.519695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033730565, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:50.519730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:48:50.519972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:48:50.521716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:50.521873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:50.521926Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:48:50.522015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:48:50.522056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:48:50.522099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:48:50.527177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:48:50.527222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:48:50.527237Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:48:50.527335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:48:55.986854Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490567858576259421:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:55.986972Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019e6/r3tmp/tmpnwddOH/pdisk_1.dat 2025-04-07T13:48:56.248163Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:56.321872Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:56.321975Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 61202, node 4 2025-04-07T13:48:56.329850Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:56.437238Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:56.437268Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:56.437275Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:56.437384Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15903 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:56.849989Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:56.850319Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:56.850338Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:56.856248Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:48:56.856441Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:48:56.856456Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:48:56.861781Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:48:56.861810Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:48:56.862485Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:56.866696Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:56.882408Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033736928, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:56.882446Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:48:56.882709Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:48:56.884553Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:56.884708Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:56.884763Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:48:56.884849Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:48:56.884884Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:48:56.884930Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:48:56.886156Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72 ... known, add access: +(SR|DS):root@builtin 2025-04-07T13:49:17.190068Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:49:17.190575Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:49:17.190640Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, ProgressState 2025-04-07T13:49:17.190854Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:49:17.190932Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:49:17.191003Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:49:17.193093Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:17.193140Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:17.193159Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:49:17.193378Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:17.193395Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:17.193405Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:49:17.193529Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:17.193545Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:17.193554Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T13:49:17.193655Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:17.193670Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:17.193679Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T13:49:17.193772Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:17.193786Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:17.193795Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2025-04-07T13:49:17.193907Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:49:17.196804Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033757242, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:49:17.196835Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033757242, at schemeshard: 72057594046644480 2025-04-07T13:49:17.196945Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-07T13:49:17.197036Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033757242, at schemeshard: 72057594046644480 2025-04-07T13:49:17.197067Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 128 -> 240 2025-04-07T13:49:17.197097Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033757242, at schemeshard: 72057594046644480 2025-04-07T13:49:17.197121Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:2 128 -> 240 2025-04-07T13:49:17.197159Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, HandleReply TEvOperationPlan: step# 1744033757242 2025-04-07T13:49:17.197189Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 128 -> 240 2025-04-07T13:49:17.198675Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:49:17.199168Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:49:17.199263Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:3 ProgressState 2025-04-07T13:49:17.199345Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:3 progress is 1/4 2025-04-07T13:49:17.199512Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:1 ProgressState 2025-04-07T13:49:17.199551Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:1 progress is 2/4 2025-04-07T13:49:17.199628Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:2 ProgressState 2025-04-07T13:49:17.199657Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:2 progress is 3/4 2025-04-07T13:49:17.199726Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2025-04-07T13:49:17.199756Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 4/4 2025-04-07T13:49:17.199782Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T13:49:17.199813Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:1 2025-04-07T13:49:17.199828Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:2 2025-04-07T13:49:17.199839Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:3 2025-04-07T13:49:17.199857Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 5, subscribers: 1 2025-04-07T13:49:17.203331Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:17.203481Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:17.203502Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:49:17.203735Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:17.203785Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:17.203799Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2025-04-07T13:49:17.203924Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:17.203940Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:17.203948Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T13:49:17.204052Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:17.204067Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:17.204076Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T13:49:17.204196Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:17.204233Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:17.204243Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2025-04-07T13:49:17.204285Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2025-04-07T13:49:17.207212Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7490567949963260521:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T13:49:17.278514Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710659:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:49:17.278662Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:49:17.280930Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin >> YdbOlapStore::LogPagingBefore [GOOD] >> YdbOlapStore::LogPagingBetween >> KqpStats::MultiTxStatsFullExpYql >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::RetryOperation [GOOD] Test command err: 2025-04-07T13:48:44.006684Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567803813497605:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:44.007063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001837/r3tmp/tmp2uRMMf/pdisk_1.dat 2025-04-07T13:48:46.850218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:47.128889Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:47.141417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:47.141503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:47.159311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8767, node 1 2025-04-07T13:48:47.405999Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:47.406129Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:47.406152Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:47.406252Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:47.806012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.812554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:47.818197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:47.821013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:48:47.821230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:48:47.821243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:48:47.824028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:48:47.824052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:48:47.828694Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.832996Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:47.849774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033727891, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:47.849812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:48:47.850067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:48:47.857416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:47.857638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:47.857691Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:48:47.857794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:48:47.857849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:48:47.857928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:48:47.865407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:48:47.865468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:48:47.865487Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:48:47.865594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:48:48.788146Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567803813497605:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:48.788226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:50.841264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Logs, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:50.842119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:48:50.843602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:50.843622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:50.854071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Logs 2025-04-07T13:48:50.854336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:50.854561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:50.854629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:48:50.857942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:48:50.858004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:48:50.858029Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:48:50.858304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:48:50.858326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:48:50.858337Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:48:50.876900Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:48:50.907647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:50.908005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:50.908163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:50.908302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:50.908439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:50.908539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:50.908634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:50.908782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:50.908913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:50.909017Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:50.909168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:50.909297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:50.909410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:50.909532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:50.909682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCrea ... registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:14.100829Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.104473Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033754148, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:49:14.104506Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:49:14.104765Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:49:14.106671Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:49:14.106821Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:49:14.106865Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:49:14.106947Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:49:14.106984Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:49:14.107024Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:49:14.108587Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:49:14.108628Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:49:14.108649Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:49:14.108733Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:49:17.117510Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:49:17.117901Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:49:17.118281Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:49:17.118299Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:49:17.121400Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T13:49:17.121592Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:49:17.121761Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:49:17.121809Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:49:17.123064Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:17.123114Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:17.123133Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:49:17.123295Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:17.123306Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:17.123313Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:49:17.124107Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:49:17.129707Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:49:17.129816Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:49:17.135128Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:49:17.195426Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:49:17.195451Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:49:17.195519Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T13:49:17.201485Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:49:17.206260Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033757249, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:49:17.206306Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744033757249 2025-04-07T13:49:17.206416Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T13:49:17.216095Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:49:17.216421Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:49:17.216474Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:49:17.218381Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:17.218434Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:17.218456Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:49:17.218667Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:49:17.218688Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:49:17.218699Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:49:17.235681Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976710658 Step: 1744033757249 OrderId: 281474976710658 ExecLatency: 0 ProposeLatency: 27 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1628 } } 2025-04-07T13:49:17.242653Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:49:17.242700Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:49:17.242727Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 129 -> 240 2025-04-07T13:49:17.252685Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2025-04-07T13:49:17.252806Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T13:49:17.252858Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times 2025-04-07T13:49:18.082800Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7490567934970405637:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:18.082857Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS >> KqpStats::DeferredEffects [GOOD] >> KqpStats::DataQueryWithEffects >> KqpLimits::WaitCAsStateOnAbort [GOOD] >> KqpLimits::WaitCAsTimeout >> KqpExplain::PrecomputeRange >> KqpLimits::LargeParametersAndMkqlFailure [GOOD] >> KqpLimits::DatashardReplySize >> KqpExplain::ComplexJoin [GOOD] >> KqpExplain::CompoundKeyRange >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck >> KqpQuery::RowsLimitServiceOverride >> KqpLimits::DatashardProgramSize >> YdbYqlClient::TestReadTableBatchLimits [GOOD] >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] >> KqpQuery::YqlTableSample [GOOD] >> KqpStats::DataQueryMulti >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpStats::RequestUnitForBadRequestExecute [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicScan >> DataShardSnapshots::LockedWriteDistributedCommitFreeze [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart >> KqpTypes::Time64Columns+EnableTableDatetime64 >> KqpQuery::QueryCacheInvalidate [GOOD] >> KqpQuery::Pure >> KqpTypes::SelectNull [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] >> KqpQuery::NoEvaluate ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] Test command err: 2025-04-07T13:48:46.193105Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567817871140372:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:46.193204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001831/r3tmp/tmpmwHbzi/pdisk_1.dat 2025-04-07T13:48:47.216688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:47.316337Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29076, node 1 2025-04-07T13:48:47.471689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:47.472697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:47.633621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:47.679721Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:47.679749Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:47.679755Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:47.679835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:48.393398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.470504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:48.484512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.506266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:48:48.507100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:48:48.507248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:48:48.534655Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:48.550202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:48:48.550230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:48:48.561701Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.569259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033728612, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:48.569307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:48:48.569595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:48:48.583187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:48.583360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:48.583400Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:48:48.583488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:48:48.583535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:48:48.583585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:48:48.586853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:48:48.586913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:48:48.586939Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:48:48.587047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:48:51.164791Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567817871140372:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:51.164861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:51.757447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567839345977911:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:51.757594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:51.758258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567839345977923:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:51.761509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:51.761731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:51.761760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-07T13:48:51.761829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:51.761845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:48:51.761901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:51.761949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710658:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:48:51.762199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 1 -> 128 2025-04-07T13:48:51.762452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:51.762475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:48:51.776787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:48:51.777078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:51.777629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:51.777700Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, ProgressState 2025-04-07T13:48:51.777888Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:48:51.777978Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:48:51.778071Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:48:51.780790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:48:51.780827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:48:51.780845Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:48:51.781047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:48:51.781065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:48:51.781075Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 7205759404 ... known, add access: +(SR|DS):root@builtin 2025-04-07T13:49:21.050119Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:49:21.050723Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:49:21.050805Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2025-04-07T13:49:21.051030Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:49:21.051111Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:49:21.051217Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:49:21.056283Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:49:21.056361Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:49:21.056380Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:49:21.059607Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:49:21.059659Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:49:21.059678Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:49:21.060009Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:49:21.060035Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:49:21.060053Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T13:49:21.060396Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:49:21.060424Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:49:21.060439Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T13:49:21.060601Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:49:21.060618Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:49:21.060627Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2025-04-07T13:49:21.060785Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T13:49:21.064754Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033761106, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:49:21.064800Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033761106, at schemeshard: 72057594046644480 2025-04-07T13:49:21.064916Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-04-07T13:49:21.065057Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033761106, at schemeshard: 72057594046644480 2025-04-07T13:49:21.065106Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2025-04-07T13:49:21.065170Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033761106, at schemeshard: 72057594046644480 2025-04-07T13:49:21.065213Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2025-04-07T13:49:21.065320Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1744033761106 2025-04-07T13:49:21.065407Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2025-04-07T13:49:21.067665Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:49:21.068207Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:49:21.068276Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2025-04-07T13:49:21.068373Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2025-04-07T13:49:21.068570Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2025-04-07T13:49:21.068635Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2025-04-07T13:49:21.068752Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T13:49:21.068800Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2025-04-07T13:49:21.068880Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2025-04-07T13:49:21.068919Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2025-04-07T13:49:21.068944Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T13:49:21.068977Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2025-04-07T13:49:21.068990Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2025-04-07T13:49:21.069002Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2025-04-07T13:49:21.069021Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2025-04-07T13:49:21.074132Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:49:21.074202Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:49:21.074222Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:49:21.074490Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:49:21.074516Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:49:21.074529Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2025-04-07T13:49:21.074665Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:49:21.074683Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:49:21.074694Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T13:49:21.074837Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:49:21.074889Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:49:21.074911Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T13:49:21.075077Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:49:21.075125Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:49:21.075139Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2025-04-07T13:49:21.075198Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-04-07T13:49:21.096879Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7490567966988492094:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:49:21.161829Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:49:21.162029Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:49:21.164303Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin >> TGRpcNewClient::CreateAlterUpsertDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableBatchLimits [GOOD] Test command err: 2025-04-07T13:48:49.921238Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567832450058451:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:49.921297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a0e/r3tmp/tmpjRBfOF/pdisk_1.dat 2025-04-07T13:48:50.913660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:50.937566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:50.937769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:50.938003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:50.959489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19143, node 1 2025-04-07T13:48:51.429013Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:51.429042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:51.429050Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:51.429142Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:52.137079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:52.155647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:52.161117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:52.164490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:48:52.164655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:48:52.164686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:48:52.167130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:52.169557Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:48:52.169580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:48:52.171235Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:52.174795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033732217, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:52.174822Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:48:52.175043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:48:52.176819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:52.176956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:52.176996Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:48:52.177080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:48:52.177112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:48:52.177147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:48:52.179704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:48:52.179740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:48:52.179757Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:48:52.179822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:48:52.379797Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jr885nrt4ch9m58f3zy7fhym, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:44452, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 9.956913s 2025-04-07T13:48:52.447203Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jr885nsz798rn3ak79bz6azw, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:44466, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-07T13:48:54.924949Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567832450058451:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:54.925009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:48:55.808850Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jr885s3x8t9scqq08jf9dnk0, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:44482, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-07T13:48:55.814259Z node 1 :TX_PROXY DEBUG: actor# [1:7490567836745025977:2138] Handle TEvProposeTransaction 2025-04-07T13:48:55.814286Z node 1 :TX_PROXY DEBUG: actor# [1:7490567836745025977:2138] TxId# 281474976710658 ProcessProposeTransaction 2025-04-07T13:48:55.814323Z node 1 :TX_PROXY DEBUG: actor# [1:7490567836745025977:2138] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7490567858219863290:2634] 2025-04-07T13:48:55.873572Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567858219863290:2634] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:44482" 2025-04-07T13:48:55.873846Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567858219863290:2634] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T13:48:55.873900Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567858219863290:2634] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:48:55.873995Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567858219863290:2634] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T13:48:55.874085Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567858219863290:2634] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T13:48:55.874129Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567858219863290:2634] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-07T13:48:55.874229Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567858219863290:2634] txid# 281474976710658 HANDLE EvClientConnected 2025-04-07T13:48:55.874600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:55.875117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:48:55.875976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:55.875996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:55.878196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T13:48:55.878349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:55.878489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:55.878534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:48:55.879129Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567858219863290:2634] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-07T13:48:55.879167Z node 1 :TX_PROXY DEBUG: Actor# [1:7490567858219863290:2634] txid# 281474976710658 SEND to# [1:7490567858219863289:2302] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-07T13:48:55.880178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at sche ... :NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-07T13:49:21.167667Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-07T13:49:21.167708Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976710679, MessageQuota: 25 2025-04-07T13:49:21.167733Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2025-04-07T13:49:21.167741Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976710679, at: 72075186224037897 2025-04-07T13:49:21.167786Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7490567950691370556:2306], Recipient [10:7490567950691370556:2306]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:49:21.167796Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:49:21.167815Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-04-07T13:49:21.167825Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:49:21.167836Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976710679] at 72075186224037897 for ReadTableScan 2025-04-07T13:49:21.167844Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976710679] at 72075186224037897 on unit ReadTableScan 2025-04-07T13:49:21.167856Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976710679] at 72075186224037897 error: , IsFatalError: 0 2025-04-07T13:49:21.167868Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976710679] at 72075186224037897 is Executed 2025-04-07T13:49:21.167879Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976710679] at 72075186224037897 executing on unit ReadTableScan 2025-04-07T13:49:21.167889Z node 10 :TX_DATASHARD TRACE: Add [0:281474976710679] at 72075186224037897 to execution unit FinishPropose 2025-04-07T13:49:21.167898Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976710679] at 72075186224037897 on unit FinishPropose 2025-04-07T13:49:21.167913Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976710679] at 72075186224037897 is DelayComplete 2025-04-07T13:49:21.167924Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976710679] at 72075186224037897 executing on unit FinishPropose 2025-04-07T13:49:21.167932Z node 10 :TX_DATASHARD TRACE: Add [0:281474976710679] at 72075186224037897 to execution unit CompletedOperations 2025-04-07T13:49:21.167941Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976710679] at 72075186224037897 on unit CompletedOperations 2025-04-07T13:49:21.167958Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976710679] at 72075186224037897 is Executed 2025-04-07T13:49:21.167968Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976710679] at 72075186224037897 executing on unit CompletedOperations 2025-04-07T13:49:21.167976Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976710679] at 72075186224037897 has finished 2025-04-07T13:49:21.167999Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:49:21.168007Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-04-07T13:49:21.168015Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-04-07T13:49:21.168022Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-04-07T13:49:21.168041Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-04-07T13:49:21.168048Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976710679] at 72075186224037897 on unit FinishPropose 2025-04-07T13:49:21.168058Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976710679 at tablet 72075186224037897 send to client, exec latency: 32 ms, propose latency: 32 ms, status: COMPLETE 2025-04-07T13:49:21.168084Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2025-04-07T13:49:21.168524Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7490567967871241603:2372], Recipient [10:7490567950691370550:2303]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744033761162 TxId: 281474976710678 2025-04-07T13:49:21.168569Z node 10 :READ_TABLE_API NOTICE: [10:7490567967871241602:2372] Finish grpc stream, status: 400000 2025-04-07T13:49:21.168585Z node 10 :READ_TABLE_API DEBUG: [10:7490567967871241602:2372] Send zero quota to Shard 0, TxId 281474976710678 2025-04-07T13:49:21.168599Z node 10 :READ_TABLE_API DEBUG: [10:7490567967871241602:2372] Send zero quota to Shard 0, TxId 281474976710678 2025-04-07T13:49:21.168611Z node 10 :READ_TABLE_API DEBUG: [10:7490567967871241602:2372] Send zero quota to Shard 0, TxId 281474976710678 2025-04-07T13:49:21.168621Z node 10 :READ_TABLE_API DEBUG: [10:7490567967871241602:2372] Send zero quota to Shard 0, TxId 281474976710678 2025-04-07T13:49:21.168633Z node 10 :READ_TABLE_API DEBUG: [10:7490567967871241602:2372] Send zero quota to Shard 0, TxId 281474976710678 2025-04-07T13:49:21.168715Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7490567967871241603:2372], Recipient [10:7490567950691370558:2308]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744033761162 TxId: 281474976710678 2025-04-07T13:49:21.168860Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7490567967871241603:2372], Recipient [10:7490567950691370572:2311]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744033761162 TxId: 281474976710678 2025-04-07T13:49:21.168880Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7490567967871241603:2372], Recipient [10:7490567950691370571:2310]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744033761162 TxId: 281474976710678 2025-04-07T13:49:21.169010Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7490567967871241603:2372], Recipient [10:7490567950691370549:2302]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744033761162 TxId: 281474976710678 2025-04-07T13:49:21.169016Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7490567967871241603:2372], Recipient [10:7490567950691370557:2307]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744033761162 TxId: 281474976710678 2025-04-07T13:49:21.169132Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7490567967871241603:2372], Recipient [10:7490567950691370551:2304]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744033761162 TxId: 281474976710678 2025-04-07T13:49:21.169140Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7490567967871241603:2372], Recipient [10:7490567950691370563:2309]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744033761162 TxId: 281474976710678 2025-04-07T13:49:21.169243Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7490567967871241603:2372], Recipient [10:7490567950691370555:2305]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744033761162 TxId: 281474976710678 2025-04-07T13:49:21.169279Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7490567967871241603:2372], Recipient [10:7490567950691370556:2306]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744033761162 TxId: 281474976710678 2025-04-07T13:49:21.174904Z node 10 :GRPC_SERVER DEBUG: [0x51a000085880] received request Name# Request ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.175166Z node 10 :GRPC_SERVER DEBUG: [0x51a000087c80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.175386Z node 10 :GRPC_SERVER DEBUG: [0x51a000199280] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.175573Z node 10 :GRPC_SERVER DEBUG: [0x51a000107480] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.175738Z node 10 :GRPC_SERVER DEBUG: [0x51a00010a480] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.175895Z node 10 :GRPC_SERVER DEBUG: [0x51a000087080] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.176039Z node 10 :GRPC_SERVER DEBUG: [0x51a00006d280] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.176176Z node 10 :GRPC_SERVER DEBUG: [0x51a00010b080] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.176341Z node 10 :GRPC_SERVER DEBUG: [0x51a00006e480] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.176513Z node 10 :GRPC_SERVER DEBUG: [0x51a000113480] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.176675Z node 10 :GRPC_SERVER DEBUG: [0x51a000066080] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.176830Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b5880] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.176986Z node 10 :GRPC_SERVER DEBUG: [0x51a000094880] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.177167Z node 10 :GRPC_SERVER DEBUG: [0x51b000386280] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.177310Z node 10 :GRPC_SERVER DEBUG: [0x51a000178e80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.177471Z node 10 :GRPC_SERVER DEBUG: [0x51a000114680] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.177621Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ae680] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.177789Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ada80] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.177926Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ace80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.178075Z node 10 :GRPC_SERVER DEBUG: [0x51a000197480] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.178233Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e2280] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.178386Z node 10 :GRPC_SERVER DEBUG: [0x51a000198080] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.178547Z node 10 :GRPC_SERVER DEBUG: [0x51a000103280] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-07T13:49:21.178697Z node 10 :GRPC_SERVER DEBUG: [0x51a000095a80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> KqpQuery::QueryStats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::UnsafeTimestampCastV1 [GOOD] Test command err: Trying to start YDB, gRPC: 15282, MsgBus: 25938 2025-04-07T13:49:10.039731Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567921781805792:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:10.049446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e2/r3tmp/tmpG5niYE/pdisk_1.dat 2025-04-07T13:49:10.443247Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15282, node 1 2025-04-07T13:49:10.513305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:10.513409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:10.529377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:10.609224Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:10.609259Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:10.609268Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:10.609369Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25938 TClient is connected to server localhost:25938 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:11.235470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.248832Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:11.274426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.425638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.576311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.691098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.704185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567934666709422:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:13.716587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:13.743347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.799027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.871813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.903980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.976426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.027505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.159428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567938961677227:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.159520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.160070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567938961677232:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.163307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:14.180898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567938961677234:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:15.044780Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567921781805792:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:15.044874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:15.390011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480
: Warning: Optimization, code: 1070
:3:29: Warning: Unsafe conversion integral value to Timestamp, consider using date types, code: 1102 Trying to start YDB, gRPC: 17498, MsgBus: 21092 2025-04-07T13:49:17.060465Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567952176303550:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:17.060833Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e2/r3tmp/tmpQWWJ2b/pdisk_1.dat 2025-04-07T13:49:17.281406Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:17.287544Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:17.287625Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:17.305937Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17498, node 2 2025-04-07T13:49:17.529349Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:17.529371Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:17.529380Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:17.529494Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21092 TClient is connected to server localhost:21092 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:18.128957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.137480Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:18.150110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.231182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.413113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.499342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:20.922370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567965061206900:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.922464Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.962512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.004864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.037958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.109289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.134691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.187788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.274422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567969356174702:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.274518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.274804Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567969356174707:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.278162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:21.287703Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567969356174709:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:22.063036Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567952176303550:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:22.072093Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:22.323178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:49:22.412008Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490567973651142395:2472], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. 2025-04-07T13:49:22.413895Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTI2MzZhZjAtYmQ3OGY1NDgtNmJjZGFhMTMtNGY5MmY5MzI=, ActorId: [2:7490567973651142316:2461], ActorState: ExecuteState, TraceId: 01jr886k2n0e5d10p4j0zsmws5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForBadRequestExecute [GOOD] Test command err: Trying to start YDB, gRPC: 1544, MsgBus: 10919 2025-04-07T13:49:05.346172Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567899969575759:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:05.360916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00140f/r3tmp/tmpdEKoFO/pdisk_1.dat 2025-04-07T13:49:06.106505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:06.106599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:06.109759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:06.115099Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1544, node 1 2025-04-07T13:49:06.305181Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:06.305201Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:06.305212Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:06.305323Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10919 TClient is connected to server localhost:10919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:07.018216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.074932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.230306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:07.493351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:49:07.571196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:09.793543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567917149446518:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.806942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.844659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.902135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.935916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.970686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.999469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.065138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.113213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567921444414308:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.113284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.113355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567921444414313:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.117375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:10.126704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567921444414315:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:10.279522Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567899969575759:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:10.279576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:11.335471Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033751320, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 27877, MsgBus: 13586 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00140f/r3tmp/tmprszjmR/pdisk_1.dat 2025-04-07T13:49:12.314441Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:12.394622Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:12.416598Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:12.416733Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:12.419688Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27877, node 2 2025-04-07T13:49:12.565272Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:12.565293Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:12.565299Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:12.565397Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13586 TClient is connected to server localhost:13586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:13.058314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.090923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.191530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.366704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.430348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:15.431850Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567943395806837:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.431981Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.506898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.559619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.650414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.702310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.786720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.873008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.928914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567943395807341:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.928982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.929149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567943395807346:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.933288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:15.952352Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T13:49:15.953196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567943395807348:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:17.115033Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033757123, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 24259, MsgBus: 7331 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00140f/r3tmp/tmpAZsLQr/pdisk_1.dat 2025-04-07T13:49:18.213526Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:18.223386Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:18.225480Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:18.225536Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:18.227922Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24259, node 3 2025-04-07T13:49:18.266909Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:18.266929Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:18.266936Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:18.267015Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7331 TClient is connected to server localhost:7331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:18.712180Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.722136Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.803468Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.958557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:19.056180Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:21.061821Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567970354001671:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.061904Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.100881Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.125516Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.152836Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.228459Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.306068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.350322Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.447168Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567970354002175:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.447256Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.447397Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567970354002180:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.450767Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:21.468107Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567970354002182:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:22.590008Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490567974648969798:2465], status: GENERIC_ERROR, issues:
:2:12: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... 2025-04-07T13:49:22.590218Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2QwYzA0MTQtOTIxNGZjYi1jZmNlMDRiLTNhOGZhODI0, ActorId: [3:7490567974648969790:2460], ActorState: ExecuteState, TraceId: 01jr886k8ndpr1y2py35n15n1t, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:12: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] >> KqpExplain::ExplainDataQuery [GOOD] >> KqpExplain::ExplainDataQueryWithParams |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> KqpParams::Decimal-QueryService [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewClient::CreateAlterUpsertDrop [GOOD] Test command err: 2025-04-07T13:48:57.202674Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567866716948842:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:57.202812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001832/r3tmp/tmppaoKEi/pdisk_1.dat 2025-04-07T13:48:57.973291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:57.973377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:57.981204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:57.999007Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5774, node 1 2025-04-07T13:48:58.389293Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:58.389320Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:58.389327Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:58.389431Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:58.751824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:58.767223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:58.768449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:58.774703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:48:58.774911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:48:58.774922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:48:58.777406Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:48:58.777431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:48:58.779060Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:58.784682Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:58.785819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033738825, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:58.785886Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:48:58.786157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:48:58.789388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:58.789585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:58.789647Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:48:58.792603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:48:58.792652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:48:58.792724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:48:58.794903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:48:58.794977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:48:58.795011Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:48:58.795086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:48:59.407604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:59.407759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:59.413307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 0 2025-04-07T13:49:03.144421Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490567891915407958:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:03.144461Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001832/r3tmp/tmp6ROeoW/pdisk_1.dat 2025-04-07T13:49:03.314264Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:03.341704Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:03.341771Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:03.344370Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31901, node 4 2025-04-07T13:49:03.451203Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:03.451246Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:03.451253Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:03.451356Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:03.771622Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:03.772008Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:49:03.772048Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:03.785274Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:49:03.785482Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:49:03.785496Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T13:49:03.787659Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:49:03.787689Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 waiting... 2025-04-07T13:49:03.789422Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:03.792788Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:03.793357Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033743837, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:49:03.793391Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:49:03.793656Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:49:03.795403Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:49:03.795565Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:49:03.795618Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 Pr ... ARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-04-07T13:49:22.242078Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710661 2025-04-07T13:49:22.242085Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2025-04-07T13:49:22.242184Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-04-07T13:49:22.242224Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710661 2025-04-07T13:49:22.242237Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 2 2025-04-07T13:49:22.242277Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710661, subscribers: 1 2025-04-07T13:49:22.245186Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7490567973609551909:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:49:22.303508Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710662:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:49:22.303751Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710662:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:49:22.305864Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710662, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:49:22.691064Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr886jx56j341vq5tvs36tw0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YmJiYjUwNTEtNjAwNTBiZmUtMzVjNmM0N2QtYzUwZDUyY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:49:22.739035Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: /Root/TheDir/FooTable, pathId: 0, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:22.739227Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710664:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:49:22.741063Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710664, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: /Root/TheDir/FooTable 2025-04-07T13:49:22.741330Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:49:22.741578Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:49:22.741997Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDropParts operationId#281474976710664:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:49:22.742317Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710664, at schemeshard: 72057594046644480 2025-04-07T13:49:22.742455Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710664 2025-04-07T13:49:22.742514Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710664 2025-04-07T13:49:22.742528Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-07T13:49:22.742720Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710664 2025-04-07T13:49:22.742762Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710664 2025-04-07T13:49:22.742772Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2025-04-07T13:49:22.745550Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDropParts operationId#281474976710664:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:49:22.745570Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:49:22.745616Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710664:0 4 -> 128 2025-04-07T13:49:22.746611Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId#281474976710664:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:49:22.749130Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033762793, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:49:22.749176Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId#281474976710664:0 HandleReply TEvOperationPlan, step: 1744033762793, at schemeshard: 72057594046644480 2025-04-07T13:49:22.749237Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710664:0 128 -> 136 2025-04-07T13:49:22.750239Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 281474976710664:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046644480 2025-04-07T13:49:22.750306Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 281474976710664:0 ProgressState, no renaming has been detected for this operation 2025-04-07T13:49:22.750324Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710664:0 136 -> 137 2025-04-07T13:49:22.752191Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 281474976710664:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046644480 2025-04-07T13:49:22.752218Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976710664, done: 0, blocked: 1 2025-04-07T13:49:22.752289Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 281474976710664:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976710664 Name: RenamePathBarrier }, at tablet# 72057594046644480 2025-04-07T13:49:22.752367Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710664:0 137 -> 129 2025-04-07T13:49:22.754826Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976710664 Step: 1744033762793 OrderId: 281474976710664 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 552 } } 2025-04-07T13:49:22.755378Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710664:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:49:22.756108Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:49:22.756309Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710664:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:49:22.756363Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:22.756390Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710664:0 129 -> 240 2025-04-07T13:49:22.759600Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:49:22.759731Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976710664:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:49:22.760050Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710664:0 progress is 1/1 2025-04-07T13:49:22.760084Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710664:0 2025-04-07T13:49:22.760150Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710664, publications: 2, subscribers: 1 2025-04-07T13:49:22.761591Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710664 2025-04-07T13:49:22.761638Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710664 2025-04-07T13:49:22.761657Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2025-04-07T13:49:22.761848Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710664 2025-04-07T13:49:22.761887Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710664 2025-04-07T13:49:22.761901Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2025-04-07T13:49:22.761944Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710664, subscribers: 1 2025-04-07T13:49:22.774549Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-04-07T13:49:22.779990Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T13:49:22.781868Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037888 not found 2025-04-07T13:49:22.859026Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7490567952134714279:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:22.859102Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] >> KqpParams::InvalidJson [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryStats [GOOD] Test command err: Trying to start YDB, gRPC: 22253, MsgBus: 6235 2025-04-07T13:49:06.000601Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567899280227469:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:06.000689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013fd/r3tmp/tmpQbxSLq/pdisk_1.dat 2025-04-07T13:49:06.384465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:06.384555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:06.389841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:06.396904Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22253, node 1 2025-04-07T13:49:06.559890Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:06.559914Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:06.559922Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:06.560009Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6235 TClient is connected to server localhost:6235 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:07.204425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.214477Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:07.225213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.398039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.584848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.687352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:09.589560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567916460098373:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.589693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.780953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.815491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.865935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.934990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.974779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.040946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.105976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567920755066172:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.106035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.106172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567920755066177:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.109911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:10.123872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567920755066179:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:10.999759Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567899280227469:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:10.999835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:11.267331Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODkxYTUwYjItYWI2OTRmODAtMzlkZDIzYzYtNDAyM2NjNDY=, ActorId: [1:7490567925050033788:2461], ActorState: ExecuteState, TraceId: 01jr88685b31qen3zyqranjpsc, Create QueryResponse for error on request, msg:
: Error: Request timeout 50ms exceeded
: Error: Cancelling after 55ms during compilation Trying to start YDB, gRPC: 2184, MsgBus: 7579 2025-04-07T13:49:12.308954Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567928175238576:2178];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013fd/r3tmp/tmp3H0xax/pdisk_1.dat 2025-04-07T13:49:12.420391Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:12.578405Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:12.593153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:12.593228Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:12.594672Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2184, node 2 2025-04-07T13:49:12.739051Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:12.739071Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:12.739078Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:12.739166Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7579 TClient is connected to server localhost:7579 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:13.221835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.231000Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:13.247222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.294007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.432014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72 ... 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567941060142029:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.436874Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.468129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.551272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.586197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.627083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.660419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.732250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.798381Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567941060142534:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.798467Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.798684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567941060142539:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.802899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:15.821728Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567941060142541:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:17.309190Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567928175238576:2178];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:17.309260Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24398, MsgBus: 24698 2025-04-07T13:49:18.205837Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567956120129654:2189];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013fd/r3tmp/tmp9jGTRN/pdisk_1.dat 2025-04-07T13:49:18.328956Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:18.448390Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:18.546368Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:18.546461Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:18.562030Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24398, node 3 2025-04-07T13:49:18.697325Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:18.697348Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:18.697359Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:18.697485Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24698 TClient is connected to server localhost:24698 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:19.316924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:19.324525Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:19.331187Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:19.414138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:19.652523Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:19.743376Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:21.750334Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567969005033102:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.750421Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.808694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.858040Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.900505Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.936167Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.967656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:22.060521Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:22.138109Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567973300000899:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:22.138211Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:22.138552Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567973300000904:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:22.142990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:22.157220Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567973300000906:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:23.215372Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567956120129654:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:23.216321Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; query_phases { duration_us: 33060 table_access { name: "/Root/TwoShard" reads { rows: 3 bytes: 35 } partitions_count: 1 } cpu_time_us: 23576 affected_shards: 1 } query_phases { duration_us: 7018 table_access { name: "/Root/EightShard" updates { rows: 3 bytes: 47 } partitions_count: 1 } cpu_time_us: 2123 affected_shards: 2 } compilation { duration_us: 258259 cpu_time_us: 254965 } process_cpu_time_us: 682 total_duration_us: 311843 total_cpu_time_us: 281346 >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27843, MsgBus: 20179 2025-04-07T13:49:04.968078Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567895886255783:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:04.968153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00141b/r3tmp/tmpHbgU9s/pdisk_1.dat 2025-04-07T13:49:05.368505Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27843, node 1 2025-04-07T13:49:05.378300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:05.378403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:05.455056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:05.520699Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:05.520729Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:05.520736Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:05.520837Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20179 TClient is connected to server localhost:20179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:06.114633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:06.133746Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:06.152270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:06.290281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:06.519942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:06.616622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:08.397979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567913066126686:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:08.398085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:08.716020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:08.748100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:08.787399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:08.819565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:08.889057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:08.975199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.112816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567917361094489:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.113044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.115276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567917361094494:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.119354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:09.135389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567917361094498:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:09.976292Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567895886255783:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:09.976727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:10.392553Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490567921656062114:2467], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:3:84: Error: At function: KiUpdateTable!
:3:84: Error: Column 'NonExistentColumn' does not exist in table '/Root/KeyValue'., code: 2017 2025-04-07T13:49:10.392896Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzRjZjJkNTYtZjAxY2NiZTktYjBmMmFhOWUtY2Q5N2FhMmU=, ActorId: [1:7490567921656062104:2461], ActorState: ExecuteState, TraceId: 01jr8867aw004tr2a57srs6nc6, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 1911, MsgBus: 30533 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00141b/r3tmp/tmpnXtoPd/pdisk_1.dat 2025-04-07T13:49:11.306122Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:11.313458Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:11.328176Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:11.328466Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:11.330076Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1911, node 2 2025-04-07T13:49:11.405130Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:11.405150Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:11.405155Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:11.405241Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30533 TClient is connected to server localhost:30533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:49:11.939735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:11.954385Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:14.158064Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567938946881185:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.158365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.202294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.486028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.711794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567938946882518:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.711902Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.712170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567938946882523:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.715325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T13:49:14.726408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567938946882525:2416], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } Trying to start YDB, gRPC: 19871, MsgBus: 15625 2025-04-07T13:49:18.012193Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567957952636840:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:18.012244Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00141b/r3tmp/tmpv0Pbau/pdisk_1.dat 2025-04-07T13:49:18.195557Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:18.198728Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:18.198786Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:18.201419Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19871, node 3 2025-04-07T13:49:18.265531Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:18.265551Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:18.265558Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:18.265643Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15625 TClient is connected to server localhost:15625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:18.722762Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.739107Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:21.376803Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567970837539362:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.376909Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.395298Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.599193Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.844580Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567970837540691:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.844705Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.844770Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567970837540696:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.849577Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T13:49:21.859738Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T13:49:21.859974Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567970837540698:2416], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T13:49:23.019351Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567957952636840:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:23.019415Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpLimits::ComputeActorMemoryAllocationFailure [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 4184, MsgBus: 9487 2025-04-07T13:49:04.542480Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567894270327689:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:04.542571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00142c/r3tmp/tmpXWm93T/pdisk_1.dat 2025-04-07T13:49:05.014945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:05.015075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:05.016920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:05.018203Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4184, node 1 2025-04-07T13:49:05.101300Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:05.101326Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:05.101334Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:05.101428Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9487 TClient is connected to server localhost:9487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:05.638205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:05.685790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:05.854816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:06.036890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:06.115475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:08.291257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567911450198416:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:08.298974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:08.324455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:08.344390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:08.427956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:08.470752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:08.508916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:08.542950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:08.665731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567911450198918:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:08.665802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:08.666125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567911450198923:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:08.669839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:08.697627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567911450198925:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:09.540790Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567894270327689:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:09.540859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27726, MsgBus: 5826 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00142c/r3tmp/tmp2oPlU5/pdisk_1.dat 2025-04-07T13:49:11.112859Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:11.129389Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:11.162910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:11.162991Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 27726, node 2 2025-04-07T13:49:11.164447Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:11.234134Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:11.234159Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:11.234166Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:11.234252Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5826 TClient is connected to server localhost:5826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:49:11.759279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:11.768185Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:11.779551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.871731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.041058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.119934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:14.556416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567936609943296:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.556493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Re ... ] ActorId: [2:7490567936609943799:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.935470Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.935803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567936609943804:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.938893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:14.947376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567936609943806:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 13487, MsgBus: 14174 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00142c/r3tmp/tmpCU8PX3/pdisk_1.dat 2025-04-07T13:49:17.501095Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:17.504829Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:17.551720Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:17.551798Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:17.552727Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13487, node 3 2025-04-07T13:49:17.745224Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:17.745252Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:17.745260Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:17.745370Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14174 TClient is connected to server localhost:14174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:18.379132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.386465Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:18.400436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.461727Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.636604Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.734450Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:21.405630Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567968291711869:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.405733Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.445505Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.476890Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.520429Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.561479Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.628939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.689706Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.823562Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567968291712371:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.823631Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.823674Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567968291712376:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.827775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:21.840107Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567968291712378:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:22.897430Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:49:23.722340Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490567976881647531:2494], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2025-04-07T13:49:23.722971Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjllZGNiN2EtNjBmMzE0NS1jYzU3NTMzMS01NWFjY2ViOA==, ActorId: [3:7490567972586679986:2461], ActorState: ExecuteState, TraceId: 01jr886mbmfkgebthzqc90sew4, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T13:49:23.853964Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjllZGNiN2EtNjBmMzE0NS1jYzU3NTMzMS01NWFjY2ViOA==, ActorId: [3:7490567972586679986:2461], ActorState: ExecuteState, TraceId: 01jr886mcn56er7dndpvzddvmr, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1233: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2025-04-07T13:49:23.892545Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490567976881647567:2500], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-07T13:49:23.894530Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjllZGNiN2EtNjBmMzE0NS1jYzU3NTMzMS01NWFjY2ViOA==, ActorId: [3:7490567972586679986:2461], ActorState: ExecuteState, TraceId: 01jr886mh4e97q7mcfz0skvadc, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T13:49:23.920353Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490567976881647584:2504], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-07T13:49:23.922021Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjllZGNiN2EtNjBmMzE0NS1jYzU3NTMzMS01NWFjY2ViOA==, ActorId: [3:7490567972586679986:2461], ActorState: ExecuteState, TraceId: 01jr886mj6aazxcpgnr8en6a02, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson [GOOD] Test command err: Trying to start YDB, gRPC: 29828, MsgBus: 5570 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001415/r3tmp/tmpWGBXlV/pdisk_1.dat 2025-04-07T13:49:05.421069Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567901936203807:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:05.748453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:06.031214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:06.031333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:06.033582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:06.062864Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29828, node 1 2025-04-07T13:49:06.110318Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:49:06.110385Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:49:06.290620Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:06.290638Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:06.290652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:06.290730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5570 TClient is connected to server localhost:5570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:07.130948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.145858Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:07.161664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.362330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.559137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.650913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:09.884892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567919116074586:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.885067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.945045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.990008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.041049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.085067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.119780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.186331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.284931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567923411042382:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.285035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.285276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567923411042388:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.289004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:10.291312Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567901936203807:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:10.291349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:10.301167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567923411042390:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:11.746167Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWY4YTk1ZWEtZDM1YzlhYzQtYTU1YTcxZmQtMThlNmYwMmQ=, ActorId: [1:7490567927706010002:2462], ActorState: ExecuteState, TraceId: 01jr8868hgd5t2263kfkp43rgv, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1233: ydb/core/kqp/query_data/kqp_query_data.cpp:266: Missing value for parameter: $group Trying to start YDB, gRPC: 27948, MsgBus: 4629 2025-04-07T13:49:12.601909Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567931645463294:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:12.601955Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001415/r3tmp/tmprCTTv3/pdisk_1.dat 2025-04-07T13:49:12.925058Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:12.938073Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:12.938154Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:12.941849Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27948, node 2 2025-04-07T13:49:13.044164Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:13.044185Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:13.044191Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:13.044282Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4629 TClient is connected to server localhost:4629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:49:13.610121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.625029Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:13.634753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.708651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting. ... Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.114039Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.172928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.206885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.237608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.272912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.300354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.375021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.433630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567948825334605:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.433686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.433909Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567948825334610:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.437969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:16.451235Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567948825334612:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:17.618490Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567931645463294:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:17.618802Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64802, MsgBus: 22711 2025-04-07T13:49:19.409859Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567959930856472:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001415/r3tmp/tmpCA0ltf/pdisk_1.dat 2025-04-07T13:49:19.460999Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:19.500235Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:19.539650Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:19.549075Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:19.551073Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64802, node 3 2025-04-07T13:49:19.633206Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:19.633229Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:19.633237Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:19.633349Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22711 TClient is connected to server localhost:22711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:20.182050Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:20.194325Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:20.215103Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:20.280227Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:20.448301Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:20.548512Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:22.884791Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567972815759902:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:22.884897Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:22.973506Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:23.038090Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:23.074012Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:23.109907Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:23.140578Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:23.235403Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:23.322650Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567977110727702:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:23.322731Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:23.322919Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567977110727707:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:23.326663Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:23.342402Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567977110727709:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:24.354989Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567959930856472:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:24.355054Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:24.410009Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:49:24.569652Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTk0ZWIwN2ItNzk2ZDQ0ZjEtMTUwYjAzM2YtMmI1ODRiNTE=, ActorId: [3:7490567981405695319:2460], ActorState: ExecuteState, TraceId: 01jr886n3k8tm7e95jcay3mp4s, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:957: Invalid Json value
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:957: Invalid Json value >> KqpQuery::QueryCacheTtl >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpStats::MultiTxStatsFullExpScan >> TCacheTest::Attributes >> KqpExplain::PrecomputeRange [GOOD] >> KqpExplain::MultiUsedStage >> KqpStats::DataQueryWithEffects [GOOD] >> KqpQuery::RowsLimitServiceOverride [GOOD] >> KqpQuery::SelectWhereInSubquery >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpQuery::QueryCancelWriteImmediate >> Viewer::JsonAutocompleteSchemePOST [GOOD] >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess >> KqpStats::SysViewClientLost >> KqpExplain::CompoundKeyRange [GOOD] >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64 >> KqpQuery::RewriteIfPresentToMap >> TCacheTest::CheckAccess [GOOD] >> KqpLimits::ManyPartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] Test command err: Trying to start YDB, gRPC: 22634, MsgBus: 19888 2025-04-07T13:48:20.449744Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567708364509763:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:20.449901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001c05/r3tmp/tmpwhu7xv/pdisk_1.dat 2025-04-07T13:48:21.195728Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:21.200572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:21.200713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:21.209146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22634, node 1 2025-04-07T13:48:21.345175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:21.345197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:21.345204Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:21.345289Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19888 2025-04-07T13:48:25.412503Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567708364509763:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:25.412883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:19888 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:28.503728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:28.527382Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:28.562856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:31.576139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:34.009446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:34.369311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:36.185115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:48:36.185380Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:37.064545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567781378955405:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:37.064752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:37.468181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:48:38.003655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:48:38.367075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:48:38.707925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:48:38.807479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:48:38.962062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:48:39.042983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567789968890521:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:39.043048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567789968890526:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:39.043098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:39.046059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:48:39.056720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567789968890528:2446], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:48:41.556621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-04-07T13:48:42.312602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:48:42.783854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480 2025-04-07T13:48:44.426984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-07T13:48:46.264999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.057023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-07T13:48:47.570696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:48:47.663325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:48:51.030808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710717:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21284, MsgBus: 17960 2025-04-07T13:48:52.541643Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567846016123080:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001c05/r3tmp/tmpXFEdcI/pdisk_1.dat 2025-04-07T13:48:52.618327Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:48:52.766476Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:52.793373Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:52.793471Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:52.794886Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21284, node 2 2025-04-07T13:48:52.942831Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:52.942853Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:52.942872Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:52.942986Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17960 TClient is connected to server localhost:17960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateN ... T_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T13:49:01.255601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2025-04-07T13:49:02.126967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-07T13:49:02.790701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-07T13:49:03.497924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-07T13:49:04.154584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T13:49:04.210884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-07T13:49:07.730651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:49:07.730680Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:09.364157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715734:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20326, MsgBus: 10621 2025-04-07T13:49:10.733163Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567922197891106:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:10.733208Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001c05/r3tmp/tmpNQR8GR/pdisk_1.dat 2025-04-07T13:49:11.097290Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:11.143642Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:11.143729Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:11.150011Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20326, node 3 2025-04-07T13:49:11.369280Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:11.369304Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:11.369311Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:11.369429Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10621 TClient is connected to server localhost:10621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:12.363004Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.374443Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:12.396309Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.530382Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.909916Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.049213Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:15.296217Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567943672729293:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.296341Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.343168Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.389406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.440198Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.484325Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.517092Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.598989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.700224Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567943672729794:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.700313Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.700778Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567943672729799:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.705187Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:15.725123Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:49:15.725610Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567943672729801:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:15.737800Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567922197891106:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:15.737879Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:17.351682Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-04-07T13:49:18.123878Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:49:18.684858Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480 2025-04-07T13:49:19.356919Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.045795Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.712538Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.304219Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:49:21.454953Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.798298Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710732:0, at schemeshard: 72057594046644480 >> KqpQuery::Pure [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckAccess [GOOD] Test command err: 2025-04-07T13:49:27.702253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:49:27.702297Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T13:49:27.844416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T13:49:27.861067Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-04-07T13:49:28.132830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:49:28.132884Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T13:49:28.181229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-07T13:49:28.191358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2025-04-07T13:49:28.194735Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:194:2185], for# user1@builtin, access# DescribeSchema 2025-04-07T13:49:28.195163Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:198:2189], for# user1@builtin, access# DescribeSchema |84.6%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpStats::DataQueryMulti [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryWithEffects [GOOD] Test command err: Trying to start YDB, gRPC: 11682, MsgBus: 27550 2025-04-07T13:49:05.068060Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567898719818008:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:05.068100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001426/r3tmp/tmpiVntyj/pdisk_1.dat 2025-04-07T13:49:05.811455Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11682, node 1 2025-04-07T13:49:05.889149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:05.889238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:05.890947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:06.044599Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:06.044620Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:06.044625Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:06.044716Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27550 TClient is connected to server localhost:27550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:07.019777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.063110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.202773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.386838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.512150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:09.830634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567915899688825:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.852998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.897452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.968735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.009013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.069977Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567898719818008:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:10.070201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:10.079039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.122479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.163265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.244190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567920194656628:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.244276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.244570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567920194656633:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.248152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:10.264851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567920194656635:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:13.381635Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033752349, txId: 281474976710671] shutting down 2025-04-07T13:49:13.383294Z node 1 :KQP_NODE ERROR: node service cancelled the task, because it executer lost: 1, NodeId: 1, TxId: 281474976710673 Trying to start YDB, gRPC: 7752, MsgBus: 65287 2025-04-07T13:49:14.174589Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567937694826350:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001426/r3tmp/tmpaZ9seH/pdisk_1.dat 2025-04-07T13:49:14.224473Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:14.318196Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:14.347405Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:14.347484Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:14.349157Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7752, node 2 2025-04-07T13:49:14.473243Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:14.473266Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:14.473273Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:14.473358Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65287 TClient is connected to server localhost:65287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:14.971017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:14.977960Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:14.984329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:15.051811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:15.255931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:15.341114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.655603Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567950579729767:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:17.655682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:17.702151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:17.750619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:17.837592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:17.880893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:17.921694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:17.962377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:18.039753Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567954874697561:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:18.039833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:18.040147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567954874697566:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:18.043637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:18.061339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567954874697568:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:19.190541Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567937694826350:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:19.190602Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:3:46: Warning: At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 15729, MsgBus: 28855 2025-04-07T13:49:21.671701Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567969667172651:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:21.671788Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001426/r3tmp/tmp32vpTW/pdisk_1.dat 2025-04-07T13:49:21.788649Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:21.801552Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:21.801631Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:21.802456Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15729, node 3 2025-04-07T13:49:21.873992Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:21.874018Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:21.874032Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:21.874165Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28855 TClient is connected to server localhost:28855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:22.466594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:22.475193Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:22.485614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:22.595520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:22.824336Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:22.929655Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:25.408009Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567986847043514:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.408067Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.453893Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.541316Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.584256Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.652822Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.685570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.715914Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.799469Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567986847044015:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.799559Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.799624Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567986847044020:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.802683Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:25.813549Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567986847044022:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:26.688020Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567969667172651:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:26.688074Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> Viewer::TabletMerging [GOOD] >> Viewer::TabletMergingPacked >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::CompoundKeyRange [GOOD] Test command err: Trying to start YDB, gRPC: 16287, MsgBus: 18227 2025-04-07T13:49:06.106735Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567906090384502:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:06.107759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001403/r3tmp/tmpQuoLvg/pdisk_1.dat 2025-04-07T13:49:06.618173Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:06.627910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:06.627989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:06.637928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16287, node 1 2025-04-07T13:49:06.869168Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:06.869185Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:06.869193Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:06.869282Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18227 TClient is connected to server localhost:18227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:07.563282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.574053Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:07.588291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.919325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:08.191283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:08.280312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:10.248969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567923270255423:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.249066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.531842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.563140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.639981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.695275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.742323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.796249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.893533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567923270255925:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.893685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.894015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567923270255931:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.897890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:10.922613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567923270255933:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:11.111183Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567906090384502:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:11.111241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"InternalOperatorId":2},{"InternalOperatorId":4}],"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"},{"Inputs":[{"ExternalPlanNodeId":4}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 29836, MsgBus: 7515 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001403/r3tmp/tmpXwFrsQ/pdisk_1.dat 2025-04-07T13:49:14.309317Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:74905 ... 1474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:18.031637Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567954747610110:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:18.031724Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:18.031987Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567954747610115:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:18.035894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:18.053544Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:49:18.053772Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567954747610117:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:19.212345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:19.314320Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567937567739030:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:19.318212Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:19.651867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.693314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25710, MsgBus: 64673 2025-04-07T13:49:22.059890Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567974777369320:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:22.059949Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001403/r3tmp/tmpHamz6R/pdisk_1.dat 2025-04-07T13:49:22.169283Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:22.194669Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:22.194755Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:22.197712Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25710, node 3 2025-04-07T13:49:22.339434Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:22.339456Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:22.339463Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:22.339560Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64673 TClient is connected to server localhost:64673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:22.979433Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:23.009533Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:49:23.149744Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:23.320352Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:49:23.400299Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:25.708004Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567987662272897:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.708101Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.747847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.825814Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.863223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.907129Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.953556Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.040806Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.082443Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567991957240696:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.082535Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.082567Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567991957240701:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.085834Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:26.097129Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567991957240703:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:27.067157Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567974777369320:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:27.075249Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"lookup_by":["App (new_app_1)","Ts (49)"],"columns":["App","Host","Message","Ts"],"scan_by":["Host (null, xyz)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] Test command err: Trying to start YDB, gRPC: 13208, MsgBus: 20532 2025-04-07T13:49:07.903034Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567907235521148:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:07.903345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f5/r3tmp/tmpouvIlR/pdisk_1.dat 2025-04-07T13:49:08.631827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:08.631935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:08.638024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13208, node 1 2025-04-07T13:49:08.718723Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:08.859703Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:08.859724Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:08.859731Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:08.859807Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20532 TClient is connected to server localhost:20532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:09.907565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:09.958762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:10.110055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:10.246120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:10.320875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:49:12.904784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567928710359219:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:12.958132Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567907235521148:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:12.959418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:12.963082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:12.987133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.060164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.098472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.147025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.220019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.295193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.359139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567933005327022:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:13.359186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:13.359439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567933005327027:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:13.362946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:13.383952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567933005327029:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 26565, MsgBus: 30637 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f5/r3tmp/tmpR2TZ2p/pdisk_1.dat 2025-04-07T13:49:16.071095Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:16.082088Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:16.106684Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:16.106763Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:16.109235Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26565, node 2 2025-04-07T13:49:16.317234Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:16.317253Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:16.317261Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:16.317360Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30637 TClient is connected to server localhost:30637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:16.883837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:16.896884Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:16.910780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:16.993098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.234281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:17.362876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.816851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567960398509519:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:19.816938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:19.851264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.887518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.923307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.947290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.985671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.021510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.083145Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567964693477307:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.083259Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.083587Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567964693477312:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.087726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:20.106123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567964693477314:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 20862, MsgBus: 30038 2025-04-07T13:49:22.314663Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567972866744020:2143];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f5/r3tmp/tmp03PbAE/pdisk_1.dat 2025-04-07T13:49:22.364184Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:22.426311Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:22.458597Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:22.458674Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 20862, node 3 2025-04-07T13:49:22.459852Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:22.493185Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:22.493208Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:22.493215Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:22.493318Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30038 TClient is connected to server localhost:30038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:23.089847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:23.110857Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:23.220683Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:49:23.389613Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:23.473077Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.691499Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567985751647518:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.691578Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.735486Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.783611Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.817107Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.847922Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.906945Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.944378Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.022690Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567990046615316:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.022755Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.022811Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567990046615321:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.026467Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:26.038125Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567990046615323:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:27.309628Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567972866744020:2143];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:27.309696Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteSchemePOST [GOOD] Test command err: 2025-04-07T13:48:19.905560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:19.906022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:19.906136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 30899, node 1 TClient is connected to server localhost:64945 2025-04-07T13:48:49.979075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:285:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:49.979381Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:49.979451Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 24515, node 2 TClient is connected to server localhost:27160 2025-04-07T13:49:02.696512Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:297:2340], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:02.697022Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:02.697110Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 21192, node 3 TClient is connected to server localhost:1420 2025-04-07T13:49:13.646023Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:297:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:13.646244Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:13.646356Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 22602, node 4 TClient is connected to server localhost:26614 2025-04-07T13:49:25.080054Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:284:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:25.080433Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:25.080645Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 28204, node 5 TClient is connected to server localhost:27019 >> KqpQuery::NoEvaluate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 26945, MsgBus: 8481 2025-04-07T13:49:11.272261Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567923841695035:2177];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:11.272547Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013dc/r3tmp/tmpm10mKR/pdisk_1.dat 2025-04-07T13:49:11.699235Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:11.718306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:11.718377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:11.720421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26945, node 1 2025-04-07T13:49:11.914640Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:11.914661Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:11.914666Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:11.914739Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8481 TClient is connected to server localhost:8481 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:12.627713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.659354Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:12.664448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.782456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:12.927170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.010003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:14.750298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567936726598487:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.750385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.944725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.967250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.007393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.035542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.061938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.135597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.229083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567941021566280:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.229166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.229536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567941021566285:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.232868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:15.246656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567941021566287:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:16.256123Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567923841695035:2177];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:16.256183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12536, MsgBus: 31094 2025-04-07T13:49:17.166159Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567949714736498:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:17.166191Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013dc/r3tmp/tmpPvsZJC/pdisk_1.dat 2025-04-07T13:49:17.371392Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:17.391730Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:17.391800Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:17.393969Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12536, node 2 2025-04-07T13:49:17.589300Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:17.589329Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:17.589336Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:17.589437Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31094 TClient is connected to server localhost:31094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:18.247022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.267485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.381262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.616201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:18.690692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.850034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567962599640091:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.853378Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.872789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.902699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.934494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.964526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.991762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.059189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.156848Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567966894607889:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.157023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.157321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567966894607895:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.161180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:21.174856Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567966894607897:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:22.168686Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567949714736498:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:22.168742Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:22.470141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5220, MsgBus: 12968 2025-04-07T13:49:23.687914Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567975633869280:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:23.687963Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013dc/r3tmp/tmpkyP7sR/pdisk_1.dat 2025-04-07T13:49:23.833941Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:23.835540Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:23.835599Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:23.836768Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5220, node 3 2025-04-07T13:49:23.941174Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:23.941191Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:23.941198Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:23.941289Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12968 TClient is connected to server localhost:12968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:24.370096Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.377328Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:24.387947Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.443587Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.612416Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.717692Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:26.485811Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567988518772864:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.485894Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.543687Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.616969Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.650001Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.692659Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.731643Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.768434Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.853956Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567988518773369:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.854041Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.854243Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567988518773374:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.858041Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:26.868708Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567988518773376:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TCacheTest::RacyCreateAndSync >> KqpLimits::DatashardProgramSize [GOOD] >> KqpLimits::ComputeNodeMemoryLimit >> DataShardSnapshots::LockedWriteCleanupOnSplit [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] Test command err: Trying to start YDB, gRPC: 21028, MsgBus: 9781 2025-04-07T13:49:05.393435Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567898529125721:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:05.393486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001409/r3tmp/tmpd9E531/pdisk_1.dat 2025-04-07T13:49:05.847826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:05.847967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:05.851056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:05.916941Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21028, node 1 2025-04-07T13:49:06.037301Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:06.037331Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:06.037363Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:06.037456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9781 TClient is connected to server localhost:9781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:06.810386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:06.841795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:06.997820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.223326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.318940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:09.725103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567915708996629:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.725207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.937415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.008925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.048022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.118138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.147704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.198887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:10.268969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567920003964425:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.269052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.269273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567920003964430:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:10.292273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:10.306799Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:49:10.307106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567920003964432:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:10.397693Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567898529125721:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:10.397910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5677, MsgBus: 7547 2025-04-07T13:49:13.215862Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567933617234813:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001409/r3tmp/tmpxRbW2e/pdisk_1.dat 2025-04-07T13:49:13.307437Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:13.386288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:13.386358Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:13.388034Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:13.393787Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5677, node 2 2025-04-07T13:49:13.488158Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:13.488180Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:13.488187Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:13.488268Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7547 TClient is connected to server localhost:7547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:13.964083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.970392Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:16.474298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567946502137150:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.474416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.476348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567946502137162:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.479868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at sche ... :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[3:7490567991126946371:2451];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:49:26.227974Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:49:26.228001Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:49:26.228160Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:49:26.228188Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:49:26.228372Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:49:26.228397Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:49:26.228531Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:49:26.228556Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:49:26.228975Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:49:26.229005Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:49:26.229117Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:49:26.229152Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:49:26.229323Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:49:26.229365Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:49:26.229478Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:49:26.229506Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:49:26.229580Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:49:26.229604Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:49:26.229642Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:49:26.229664Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:49:26.230071Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:49:26.230108Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:49:26.230300Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:49:26.230324Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:49:26.230451Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:49:26.230468Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:49:26.230618Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:49:26.230634Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:49:26.230735Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:49:26.230756Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:49:26.231762Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:49:26.231802Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:49:26.231905Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:49:26.231933Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:49:26.232132Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:49:26.232168Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:49:26.232238Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:49:26.232301Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:49:26.232374Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:49:26.232398Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:49:26.232444Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:49:26.232468Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:49:26.232836Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:49:26.232884Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:49:26.233110Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:49:26.233138Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:49:26.233284Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:49:26.233310Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:49:26.233532Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:49:26.233559Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:49:26.233687Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:49:26.233709Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:49:26.535487Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567969652108092:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:26.535741Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpStats::JoinStatsBasicScan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryMulti [GOOD] Test command err: Trying to start YDB, gRPC: 17180, MsgBus: 61104 2025-04-07T13:49:10.375539Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567921926873107:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:10.375868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e7/r3tmp/tmpQPM9db/pdisk_1.dat 2025-04-07T13:49:11.261203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:11.265755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:11.265840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:11.279069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17180, node 1 2025-04-07T13:49:11.561193Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:11.561218Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:11.561223Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:11.561309Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61104 TClient is connected to server localhost:61104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:12.493110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.508841Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:12.525190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.730677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.937935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.012969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:14.747022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567939106743837:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.754102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.044120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.083587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.127385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.155176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.185952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.236040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.300276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567943401711626:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.300349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.300638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567943401711631:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.304449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:15.322366Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:49:15.322662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567943401711633:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:15.364826Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567921926873107:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:15.364902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:16.749755Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490567947696679323:2473], status: GENERIC_ERROR, issues:
:3:26: Error: Unexpected token '[' : cannot match to any predicted input...
:3:32: Error: Unexpected token '/' : cannot match to any predicted input... 2025-04-07T13:49:16.750951Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmFjZmE0YWQtZDJmZjM4MDItZGU1ODYyZjAtZWFmM2NjZjc=, ActorId: [1:7490567947696679242:2461], ActorState: ExecuteState, TraceId: 01jr886dhy767n758hjsm7a2t7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:3:26: Error: Unexpected token '[' : cannot match to any predicted input...
:3:32: Error: Unexpected token '/' : cannot match to any predicted input... Trying to start YDB, gRPC: 4227, MsgBus: 2980 2025-04-07T13:49:17.698931Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567953167859905:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e7/r3tmp/tmpZjuQTw/pdisk_1.dat 2025-04-07T13:49:17.799054Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:17.854870Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:17.874399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:17.874472Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:17.875227Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4227, node 2 2025-04-07T13:49:17.957074Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:17.957093Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:17.957101Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:17.957220Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2980 TClient is connected to server localhost:2980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:18.495935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.506188Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:18.524311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, su ... type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.860100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:21.077711Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567970347730610:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.077775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.133198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.184050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.211623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.239261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.277519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.318505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.391723Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567970347731107:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.391810Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.391995Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567970347731112:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.394950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:21.403804Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567970347731114:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:22.518766Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490567974642698730:2465], status: UNSUPPORTED, issues:
: Error: Default error
:1:15: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2025-04-07T13:49:22.520247Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjdhZTY4MjQtNjZlY2FhZDgtZDAxOTdhYWYtNzc2NjFlZWI=, ActorId: [2:7490567974642698721:2460], ActorState: ExecuteState, TraceId: 01jr886k63ah4j3m9zr9sgrq6z, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: Trying to start YDB, gRPC: 29833, MsgBus: 3030 2025-04-07T13:49:23.298076Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567976904830573:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:23.298129Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e7/r3tmp/tmp9oAzS9/pdisk_1.dat 2025-04-07T13:49:23.437677Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:23.442806Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:23.442892Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:23.449597Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29833, node 3 2025-04-07T13:49:23.548914Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:23.548937Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:23.548946Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:23.549031Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3030 TClient is connected to server localhost:3030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:24.058690Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.066421Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:24.074592Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.155489Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.321558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.404338Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:26.731291Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567989789734160:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.731381Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.772281Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.799148Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.865688Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.932076Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.003756Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.052842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.112856Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567994084701961:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.112922Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.113117Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567994084701966:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.116608Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:27.127526Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567994084701968:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:28.299147Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567976904830573:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:28.299200Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpStats::RequestUnitForExecute [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] Test command err: Trying to start YDB, gRPC: 10098, MsgBus: 12773 2025-04-07T13:49:10.898001Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567922983895437:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:10.898041Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e1/r3tmp/tmptNUSBk/pdisk_1.dat 2025-04-07T13:49:11.764962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:11.765084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:11.768242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:11.781708Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10098, node 1 2025-04-07T13:49:11.949170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:11.949186Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:11.949192Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:11.949265Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12773 TClient is connected to server localhost:12773 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:12.758936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.773624Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:12.783892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.987250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.146230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.236243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:15.026186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567944458733630:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.059232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.240465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.272642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.301146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.378948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.408626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.472279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:15.540210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567944458734128:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.540276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.540570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567944458734133:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.548203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:15.572154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567944458734135:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:15.898927Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567922983895437:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:15.898983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61493, MsgBus: 9731 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e1/r3tmp/tmpAA0P0k/pdisk_1.dat 2025-04-07T13:49:18.108477Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:18.156741Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:18.168652Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:18.168949Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:18.178751Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61493, node 2 2025-04-07T13:49:18.301243Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:18.301267Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:18.301274Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:18.301397Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9731 TClient is connected to server localhost:9731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:18.835422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.865520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.951889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:19.105533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.196702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:21.292385Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567967089955222:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.292445Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.345768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.387238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.418794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.446481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.489109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.567556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.610852Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567967089955721:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.610939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.611948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567967089955726:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:21.615749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:21.623665Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567967089955728:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 11892, MsgBus: 4986 2025-04-07T13:49:23.744471Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567978292214286:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:23.744562Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e1/r3tmp/tmpyjD84P/pdisk_1.dat 2025-04-07T13:49:23.923661Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:23.927485Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:23.927560Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:23.929955Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11892, node 3 2025-04-07T13:49:24.065234Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:24.065256Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:24.065263Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:24.065353Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4986 TClient is connected to server localhost:4986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:49:24.491163Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:24.502052Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:24.561223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:49:24.684306Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.747159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:27.081613Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567995472085163:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.081724Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.127854Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.174104Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.207151Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.236369Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.273253Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.350904Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.411508Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567995472085662:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.411599Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.411824Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567995472085667:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.414852Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:27.424710Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567995472085669:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:28.744830Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567978292214286:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:28.744883Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TCacheTest::RacyCreateAndSync [GOOD] >> TCacheTest::PathBelongsToDomain >> TKeyValueTracingTest::ReadSmall >> TKeyValueTracingTest::ReadHuge >> TKeyValueTracingTest::WriteSmall |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge >> KqpQuery::DdlInDataQuery >> KqpExplain::ExplainDataQueryWithParams [GOOD] >> TCacheTest::PathBelongsToDomain [GOOD] >> Viewer::TabletMergingPacked [GOOD] >> Viewer::VDiskMerging ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::NoEvaluate [GOOD] Test command err: Trying to start YDB, gRPC: 4517, MsgBus: 5702 2025-04-07T13:49:09.813348Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567917520357716:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:09.813991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ef/r3tmp/tmpCPcRnT/pdisk_1.dat 2025-04-07T13:49:10.375952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:10.376034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:10.381821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:10.451770Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4517, node 1 2025-04-07T13:49:10.689257Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:10.689274Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:10.689279Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:10.689358Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5702 TClient is connected to server localhost:5702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:11.639831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.670798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.894868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.095910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.178482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:14.169397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567934700228453:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.179845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.212299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.267699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.321977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.363163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.401695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.488505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.546126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567938995196251:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.546182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.546447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567938995196256:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.549862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:14.571240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567938995196258:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:14.860286Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567917520357716:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:14.860580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17906, MsgBus: 62607 2025-04-07T13:49:16.753202Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567945156726975:2197];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ef/r3tmp/tmpEtlyAE/pdisk_1.dat 2025-04-07T13:49:16.798545Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:16.945747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:16.945810Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:16.946068Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:16.968761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17906, node 2 2025-04-07T13:49:17.033235Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:17.033258Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:17.033265Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:17.033360Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62607 TClient is connected to server localhost:62607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:17.478501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.506750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.587772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.768000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.848006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:19.876797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567958041630408:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:19.876899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, ... at schemeshard: 72057594046644480 2025-04-07T13:49:20.084427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.169135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.253786Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567962336598214:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.253871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.254194Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567962336598219:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.258469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:20.282257Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567962336598221:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } waiting... 2025-04-07T13:49:21.341279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:49:21.723193Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567945156726975:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:21.723268Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6030, MsgBus: 10026 2025-04-07T13:49:24.009045Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567979801947167:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:24.009098Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ef/r3tmp/tmpivisk6/pdisk_1.dat 2025-04-07T13:49:24.174368Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:24.199526Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:24.199600Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:24.203359Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6030, node 3 2025-04-07T13:49:24.305101Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:24.305127Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:24.305134Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:24.305242Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10026 TClient is connected to server localhost:10026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:24.812491Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.823786Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:24.839486Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.915574Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:25.098691Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:25.174937Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:27.389349Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567992686850742:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.389471Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.426704Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.471186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.506863Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.549983Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.583700Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.626579Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.684372Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567992686851237:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.684456Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.684624Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567992686851242:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.688421Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:27.699181Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567992686851244:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:28.865145Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490567996981818861:2465], status: UNSUPPORTED, issues:
: Error: Default error
:7:24: Error: EVALUATE IF is not supported in YDB queries., code: 2030 2025-04-07T13:49:28.866639Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzZiYmI2ZjMtM2IzMzE4MGYtM2YwYjE5ZDUtYmJmMGFjZWI=, ActorId: [3:7490567996981818853:2460], ActorState: ExecuteState, TraceId: 01jr886sb28gj0jqjvfjp4e5m4, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: 2025-04-07T13:49:28.889880Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490567996981818865:2467], status: UNSUPPORTED, issues:
: Error: Default error
:4:28: Error: EVALUATE is not supported in YDB queries., code: 2030 2025-04-07T13:49:28.891304Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzZiYmI2ZjMtM2IzMzE4MGYtM2YwYjE5ZDUtYmJmMGFjZWI=, ActorId: [3:7490567996981818853:2460], ActorState: ExecuteState, TraceId: 01jr886sd92d0q3zp37pqh9z8p, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: 2025-04-07T13:49:29.006346Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490567996981818892:2471], status: UNSUPPORTED, issues:
: Error: Default error
:8:78: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2025-04-07T13:49:29.008192Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzZiYmI2ZjMtM2IzMzE4MGYtM2YwYjE5ZDUtYmJmMGFjZWI=, ActorId: [3:7490567996981818853:2460], ActorState: ExecuteState, TraceId: 01jr886sh1dqpczdpfss6r9cee, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: 2025-04-07T13:49:29.010028Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567979801947167:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:29.010088Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpExplain::LimitOffset >> KqpParams::CheckQueryCacheForPreparedQuery >> KqpQuery::UdfTerminate >> KqpExplain::SortStage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::PathBelongsToDomain [GOOD] Test command err: 2025-04-07T13:49:30.360498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:49:30.360554Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T13:49:30.514118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T13:49:30.534325Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-04-07T13:49:30.874521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:49:30.874579Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T13:49:30.928592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-07T13:49:30.933373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T13:49:30.939801Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 2025-04-07T13:49:30.948570Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:223:2202], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:49:30.948863Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:225:2204], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicScan [GOOD] Test command err: Trying to start YDB, gRPC: 14268, MsgBus: 17688 2025-04-07T13:49:09.176838Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567915308516193:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:09.177056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f1/r3tmp/tmpUYaoz4/pdisk_1.dat 2025-04-07T13:49:09.665314Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:09.667071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:09.667153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:09.671163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14268, node 1 2025-04-07T13:49:09.773565Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:09.773590Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:09.773596Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:09.773684Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17688 TClient is connected to server localhost:17688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:10.568068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:10.598884Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:10.627032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:10.846958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.246793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.383181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.485750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567932488386930:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:13.487191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:13.706416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.742050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.774370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.817749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.853163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.886800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.935301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567932488387424:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:13.935375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:13.935513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567932488387429:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:13.939084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:13.955038Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:49:13.955308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567932488387431:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:14.203415Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567915308516193:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:14.203662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12164, MsgBus: 26531 2025-04-07T13:49:16.464032Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567947103087271:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:16.464267Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f1/r3tmp/tmpp67cWe/pdisk_1.dat 2025-04-07T13:49:16.617750Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:16.653310Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:16.653383Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:16.656218Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12164, node 2 2025-04-07T13:49:16.786760Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:16.786779Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:16.786798Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:16.786883Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26531 TClient is connected to server localhost:26531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:49:17.398739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:17.421482Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:17.426369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.588809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.750633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:17.837903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.375628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567964282958136:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.375716Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.431060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.465321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.502963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.574103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.611052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.645795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:20.715128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567964282958636:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.715202Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.715385Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567964282958641:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.719174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:20.731176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567964282958643:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:21.440134Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567947103087271:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:21.491895Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17431, MsgBus: 21052 2025-04-07T13:49:23.536804Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567979025926300:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f1/r3tmp/tmpkQdu7b/pdisk_1.dat 2025-04-07T13:49:23.570138Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:23.619404Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17431, node 3 2025-04-07T13:49:23.668032Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:23.668115Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:23.672456Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:23.698595Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:23.698617Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:23.698626Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:23.698718Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21052 TClient is connected to server localhost:21052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:24.270063Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.278149Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:24.289065Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.367368Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.557088Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.645706Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:26.752536Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567991910829719:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.752617Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.797130Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.832187Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.880190Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.910981Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.948304Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.014595Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.062063Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567996205797515:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.062156Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.062532Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567996205797520:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.065162Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:27.078328Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567996205797522:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:28.542842Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567979025926300:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:28.542911Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:29.501224Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033768981, txId: 281474976715671] shutting down >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService [GOOD] >> KqpLimits::CancelAfterRwTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [GOOD] Test command err: Trying to start YDB, gRPC: 22200, MsgBus: 5853 2025-04-07T13:49:11.653533Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567927445768826:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:11.653826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e0/r3tmp/tmpAgt4Ck/pdisk_1.dat 2025-04-07T13:49:12.397067Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:12.438611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:12.438696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:12.440340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22200, node 1 2025-04-07T13:49:12.681051Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:12.681068Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:12.681071Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:12.681130Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5853 TClient is connected to server localhost:5853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:13.513047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.541458Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:13.554949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.729754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.916529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.986732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:16.387528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567948920606866:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.401314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.445346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.476120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.503973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.536953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.561786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.628801Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567927445768826:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:16.628854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:16.637896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.719360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567948920607370:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.719437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.719626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567948920607375:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.722891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:16.734858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567948920607377:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:18.108334Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490567957510542298:2469], status: GENERIC_ERROR, issues:
:2:8: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... 2025-04-07T13:49:18.118478Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWNlOGIwZWYtZmQ3OGMzZjEtMTMxNGVlYmUtOTFhZmVlMDc=, ActorId: [1:7490567957510542290:2464], ActorState: ExecuteState, TraceId: 01jr886ewq2f549v3jkp0t1p5c, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:8: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... Trying to start YDB, gRPC: 22377, MsgBus: 23662 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e0/r3tmp/tmpsvWreU/pdisk_1.dat 2025-04-07T13:49:19.121342Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:19.153542Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22377, node 2 2025-04-07T13:49:19.212591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:19.212726Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:19.221438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:19.325193Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:19.325217Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:19.325225Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:19.325376Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23662 TClient is connected to server localhost:23662 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:19.900011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:19.938190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:20.025971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:20.204641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:20.288019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:22.260470Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567971419505579:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:22.260540Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:22.304221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:22.330870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:22.370194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:22.411128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:22.448271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:22.532916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:22.635470Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567971419506082:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:22.635532Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:22.635583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567971419506087:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:22.643759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:22.664504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567971419506089:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 26968, MsgBus: 5706 2025-04-07T13:49:24.753309Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567981016312218:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:24.753961Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e0/r3tmp/tmpEv3kJu/pdisk_1.dat 2025-04-07T13:49:24.867628Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:24.882750Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:24.882839Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:24.888377Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26968, node 3 2025-04-07T13:49:24.936600Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:24.936626Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:24.936634Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:24.936758Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5706 TClient is connected to server localhost:5706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:25.376035Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:25.381326Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:25.391971Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:25.482130Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:25.617490Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:25.691586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:27.859681Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567993901215802:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.859758Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.915073Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.971516Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:28.038098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:28.069963Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:28.117395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:28.191837Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:28.253820Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567998196183602:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:28.253902Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567998196183607:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:28.253920Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:28.264835Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:28.277523Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567998196183609:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Consumed units: 238 2025-04-07T13:49:29.753539Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567981016312218:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:29.754000Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Consumed units: 6 >> KqpQuery::PreparedQueryInvalidate >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] >> KqpExplain::SqlIn >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart >> KqpLimits::CancelAfterRoTxWithFollowerLegacyDependedRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ExplainDataQueryWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 29814, MsgBus: 25497 2025-04-07T13:49:10.152389Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567921472059819:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:10.152426Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013eb/r3tmp/tmpcmZfm4/pdisk_1.dat 2025-04-07T13:49:10.621393Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:10.626627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:10.626727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29814, node 1 2025-04-07T13:49:10.630728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:10.713098Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:10.713118Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:10.713123Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:10.713197Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25497 TClient is connected to server localhost:25497 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:11.445827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.467007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.664301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:11.906090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.992725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.657623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567934356963266:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:13.657730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:13.931436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.960101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.989222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.026355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.058820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.146868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.263165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567938651931066:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.263241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.263474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567938651931071:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.267152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:14.282287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567938651931073:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:15.153356Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567921472059819:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:15.153426Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10711, MsgBus: 8937 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013eb/r3tmp/tmpIJQJpS/pdisk_1.dat 2025-04-07T13:49:16.772687Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:16.803428Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:16.818143Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:16.818223Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:16.820164Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10711, node 2 2025-04-07T13:49:16.949230Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:16.949251Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:16.949258Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:16.949355Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8937 TClient is connected to server localhost:8937 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:17.486597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.503131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.588680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.752351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.835189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:20.248823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567966271756281:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.248914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:20.304612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ... de Type":"Precompute_0_4","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"},{"columns":["Key","Value"],"limit":"1001","type":"Scan"},{"columns":["Key","Value"],"limit":"1001","type":"Scan"},{"columns":["Value"],"scan_by":["Key (20, 120]"],"type":"Scan"},{"columns":["Value"],"scan_by":["Key [10, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [10, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_4","PlanNodeType":"ResultSet"},{"PlanNodeId":12,"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":15,"Plans":[{"PlanNodeId":16,"Operators":[{"Scan":"Sequential","E-Size":"No estimate","ReadRanges":"%kqp%tx_result_binding_0_3","Name":"TableRangeScan","ReadLimit":"1001","ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":"4"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1_0","PlanNodeType":"ResultSet"},{"PlanNodeId":17,"Plans":[{"PlanNodeId":21,"Plans":[{"PlanNodeId":22,"Plans":[{"PlanNodeId":24,"Plans":[{"PlanNodeId":26,"Plans":[{"PlanNodeId":27,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key (20, 120]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_2","PlanNodeType":"ResultSet"},{"PlanNodeId":28,"Plans":[{"PlanNodeId":32,"Plans":[{"PlanNodeId":33,"Plans":[{"PlanNodeId":35,"Plans":[{"PlanNodeId":37,"Plans":[{"PlanNodeId":38,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_3","PlanNodeType":"ResultSet"},{"PlanNodeId":39,"Plans":[{"PlanNodeId":40,"Plans":[{"PlanNodeId":42,"Plans":[{"PlanNodeId":43,"Operators":[{"Scan":"Sequential","E-Size":"No estimate","ReadRanges":"%kqp%tx_result_binding_0_4","Name":"TableRangeScan","ReadLimit":"1001","ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":"4"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 62282, MsgBus: 19534 2025-04-07T13:49:25.197653Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567986258016214:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:25.197717Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013eb/r3tmp/tmpiap7eA/pdisk_1.dat 2025-04-07T13:49:25.364546Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:25.364635Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:25.369188Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:25.383929Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62282, node 3 2025-04-07T13:49:25.435003Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:25.435019Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:25.435024Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:25.435105Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19534 TClient is connected to server localhost:19534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:25.934989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:25.955813Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:25.963033Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:26.048917Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:26.253651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:26.339742Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:28.649399Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567999142919804:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:28.649515Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:28.698124Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:28.731146Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:28.765071Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:28.835845Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:28.869829Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:28.918254Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:29.000622Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567999142920306:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:29.000740Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:29.000802Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568003437887607:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:29.004423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:29.016468Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568003437887609:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:30.198185Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567986258016214:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:30.198271Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin >> KqpLimits::QueryReplySize >> KqpLimits::QSReplySizeEnsureMemoryLimits >> Viewer::VDiskMerging [GOOD] >> Viewer::TenantInfo5kkTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 28700, MsgBus: 30846 2025-04-07T13:49:23.716886Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567978799933043:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:23.717064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013c8/r3tmp/tmp0rBugA/pdisk_1.dat 2025-04-07T13:49:24.090903Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28700, node 1 2025-04-07T13:49:24.213042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:24.213156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:24.214182Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:49:24.214329Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:49:24.218415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:24.246332Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:24.246346Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:24.246351Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:24.246452Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30846 TClient is connected to server localhost:30846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:24.892773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.911365Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:26.762496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567991684835361:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.762593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.943262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:49:27.087353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567995979802758:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.087441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.087787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567995979802763:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:27.091146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T13:49:27.107156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567995979802765:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } Trying to start YDB, gRPC: 24219, MsgBus: 16729 2025-04-07T13:49:28.354117Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567999789167278:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:28.354186Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013c8/r3tmp/tmpQmdsF3/pdisk_1.dat 2025-04-07T13:49:28.494603Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:28.526589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:28.526676Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:28.534084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24219, node 2 2025-04-07T13:49:28.615690Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:28.615743Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:28.615753Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:28.615850Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16729 TClient is connected to server localhost:16729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:49:29.102615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:29.114062Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:31.358772Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568012674069800:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.358852Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpExplain::PureExpr >> KqpLimits::ManyPartitions [GOOD] >> KqpLimits::ManyPartitionsSorting >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries >> KqpExplain::MultiUsedStage [GOOD] >> KqpExplain::MultiJoinCteLinks >> DataShardSnapshots::VolatileSnapshotSplit >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> KqpQuery::QueryCancelWriteImmediate [GOOD] >> KqpQuery::SelectWhereInSubquery [GOOD] >> KqpQuery::SelectCountAsteriskFromVar >> GroupWriteTest::ByTableName >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::RandomUuid >> DataShardSnapshots::MvccSnapshotAndSplit >> TKeyValueTracingTest::ReadSmall [FAIL] >> TKeyValueTracingTest::ReadHuge [FAIL] >> TKeyValueTracingTest::WriteHuge [FAIL] >> TKeyValueTracingTest::WriteSmall [FAIL] >> DataShardSnapshots::MvccSnapshotReadLockedWrites [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |84.6%| [TA] {RESULT} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.6%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut >> RemoteTopicReader::ReadTopic >> KqpLimits::WaitCAsTimeout [GOOD] >> KqpParams::BadParameterType ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWriteImmediate [GOOD] Test command err: Trying to start YDB, gRPC: 4303, MsgBus: 7038 2025-04-07T13:49:10.233154Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567920458182940:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:10.233545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ed/r3tmp/tmp33CG9s/pdisk_1.dat 2025-04-07T13:49:10.624867Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4303, node 1 2025-04-07T13:49:10.654535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:10.654624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:10.670405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:10.773057Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:10.773082Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:10.773087Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:10.773151Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7038 TClient is connected to server localhost:7038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:11.401656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.428153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.546705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.724405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.813387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.729503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567933343086387:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:13.742056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:13.765908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.814358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.849394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.898991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:13.931788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.019805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.120820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567937638054185:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.120934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.123273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567937638054190:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:14.128075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:14.164889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567937638054192:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:15.227998Z node 1 :GRPC_SERVER DEBUG: [0x51b0001d1180] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=YmNjZTg2MjQtNWYyYTM1Ni00NmIwOGI0Yi03NDVmZmE0Ng==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:49804 2025-04-07T13:49:15.228075Z node 1 :GRPC_SERVER DEBUG: [0x51b000280880] created request Name# ExecuteDataQuery 2025-04-07T13:49:15.228224Z node 1 :GRPC_SERVER DEBUG: [0x51b0001d1180] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=YmNjZTg2MjQtNWYyYTM1Ni00NmIwOGI0Yi03NDVmZmE0Ng==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:49804 database# /Root 2025-04-07T13:49:15.228910Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jr886c2wcrvq8ptqbeqnbhmv, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:49804, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 2.995794s 2025-04-07T13:49:15.230119Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567920458182940:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:15.230420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4303 2025-04-07T13:49:18.225756Z node 1 :GRPC_SERVER DEBUG: [0x51b0001d1180] issuing response Name# ExecuteDataQuery data# operation { ready: true status: INTERNAL_ERROR issues { message: "Closing Grpc request, client should not see this message." severity: 1 } } peer# ipv6:%5B::1%5D:49804 2025-04-07T13:49:18.226352Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490567941933021834:2460] TxId: 281474976710671. Ctx: { TraceId: 01jr886c2wcrvq8ptqbeqnbhmv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmNjZTg2MjQtNWYyYTM1Ni00NmIwOGI0Yi03NDVmZmE0Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-07T13:49:18.228908Z node 1 :GRPC_SERVER DEBUG: [0x51b0001d1180] finished request Name# ExecuteDataQuery ok# false peer# unknown 2025-04-07T13:49:18.233458Z node 1 :GRPC_SERVER DEBUG: [0x51b000280880] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=YmNjZTg2MjQtNWYyYTM1Ni00NmIwOGI0Yi03NDVmZmE0Ng==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:56444 2025-04-07T13:49:18.233528Z node 1 :GRPC_SERVER DEBUG: [0x51b000280f80] created request Name# ExecuteDataQuery 2025-04-07T13:49:18.233640Z node 1 :GRPC_SERVER DEBUG: [0x51b000280880] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=YmNjZTg2MjQtNWYyYTM1Ni00NmIwOGI0Yi03NDVmZmE0Ng==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:56444 database# /Root 2025-04-07T13:49:18.238273Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567941933021841:2470], TxId: 281474976710671, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YmNjZTg2MjQtNWYyYTM1Ni00NmIwOGI0Yi03NDVmZmE0Ng==. TraceId : 01jr886c2wcrvq8ptqbeqnbhmv. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490567941933021834:2460], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T13:49:18.238758Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567941933021842:2471], TxId: 281474976710671, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YmNjZTg2MjQtNWYyYTM1Ni00NmIwOGI0Yi03NDVmZmE0Ng==. TraceId : 01jr886c2wcrvq8ptqbeqnbhmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490567941933021834:2460], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T13:49:18.238962Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, tra ... : [0x51b00029a580] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2025-04-07T13:49:27.326416Z node 2 :GRPC_SERVER DEBUG: [0x51b000299e80] received request Name# ListTagsForStream ok# false data# peer# 2025-04-07T13:49:27.326440Z node 2 :GRPC_SERVER DEBUG: [0x51b00013be80] received request Name# MergeShards ok# false data# peer# 2025-04-07T13:49:27.326533Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c9180] received request Name# RemoveTagsFromStream ok# false data# peer# 2025-04-07T13:49:27.326561Z node 2 :GRPC_SERVER DEBUG: [0x51b0003bb180] received request Name# SplitShard ok# false data# peer# 2025-04-07T13:49:27.326664Z node 2 :GRPC_SERVER DEBUG: [0x51b0003bb880] received request Name# StartStreamEncryption ok# false data# peer# 2025-04-07T13:49:27.326685Z node 2 :GRPC_SERVER DEBUG: [0x51b0003bbf80] received request Name# StopStreamEncryption ok# false data# peer# 2025-04-07T13:49:27.326797Z node 2 :GRPC_SERVER DEBUG: [0x51b0003bd480] received request Name# SelfCheck ok# false data# peer# 2025-04-07T13:49:27.326807Z node 2 :GRPC_SERVER DEBUG: [0x51b0003bdb80] received request Name# NodeCheck ok# false data# peer# 2025-04-07T13:49:27.326931Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c1380] received request Name# CreateSession ok# false data# peer# 2025-04-07T13:49:27.326952Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c0c80] received request Name# DeleteSession ok# false data# peer# 2025-04-07T13:49:27.327055Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c2180] received request Name# AttachSession ok# false data# peer# 2025-04-07T13:49:27.327092Z node 2 :GRPC_SERVER DEBUG: [0x51b000224a80] received request Name# BeginTransaction ok# false data# peer# 2025-04-07T13:49:27.327222Z node 2 :GRPC_SERVER DEBUG: [0x51b000224380] received request Name# CommitTransaction ok# false data# peer# 2025-04-07T13:49:27.327243Z node 2 :GRPC_SERVER DEBUG: [0x51b000225880] received request Name# RollbackTransaction ok# false data# peer# 2025-04-07T13:49:27.327354Z node 2 :GRPC_SERVER DEBUG: [0x51b0003be280] received request Name# ExecuteQuery ok# false data# peer# 2025-04-07T13:49:27.327380Z node 2 :GRPC_SERVER DEBUG: [0x51b0003bfe80] received request Name# ExecuteScript ok# false data# peer# 2025-04-07T13:49:27.327500Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c0580] received request Name# FetchScriptResults ok# false data# peer# 2025-04-07T13:49:27.327531Z node 2 :GRPC_SERVER DEBUG: [0x51b000225180] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2025-04-07T13:49:27.327618Z node 2 :GRPC_SERVER DEBUG: [0x51b000225f80] received request Name# ChangeTabletSchema ok# false data# peer# 2025-04-07T13:49:27.327676Z node 2 :GRPC_SERVER DEBUG: [0x51b000220b80] received request Name# RestartTablet ok# false data# peer# 2025-04-07T13:49:27.327750Z node 2 :GRPC_SERVER DEBUG: [0x51b000226d80] received request Name# CreateLogStore ok# false data# peer# 2025-04-07T13:49:27.327839Z node 2 :GRPC_SERVER DEBUG: [0x51b000226680] received request Name# DescribeLogStore ok# false data# peer# 2025-04-07T13:49:27.327874Z node 2 :GRPC_SERVER DEBUG: [0x51b000228280] received request Name# DropLogStore ok# false data# peer# 2025-04-07T13:49:27.327975Z node 2 :GRPC_SERVER DEBUG: [0x51b000227480] received request Name# AlterLogStore ok# false data# peer# 2025-04-07T13:49:27.327993Z node 2 :GRPC_SERVER DEBUG: [0x51b000228980] received request Name# CreateLogTable ok# false data# peer# 2025-04-07T13:49:27.328114Z node 2 :GRPC_SERVER DEBUG: [0x51b000227b80] received request Name# DescribeLogTable ok# false data# peer# 2025-04-07T13:49:27.328119Z node 2 :GRPC_SERVER DEBUG: [0x51b000229780] received request Name# DropLogTable ok# false data# peer# 2025-04-07T13:49:27.328242Z node 2 :GRPC_SERVER DEBUG: [0x51b000229080] received request Name# AlterLogTable ok# false data# peer# 2025-04-07T13:49:27.328248Z node 2 :GRPC_SERVER DEBUG: [0x51b000229e80] received request Name# Login ok# false data# peer# 2025-04-07T13:49:27.328385Z node 2 :GRPC_SERVER DEBUG: [0x51b00022a580] received request Name# DescribeReplication ok# false data# peer# Trying to start YDB, gRPC: 2068, MsgBus: 6231 2025-04-07T13:49:28.094995Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568000849045296:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:28.095043Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ed/r3tmp/tmp2SBbXR/pdisk_1.dat 2025-04-07T13:49:28.227275Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2068, node 3 2025-04-07T13:49:28.238486Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:28.238577Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:28.241408Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:28.293248Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:28.293272Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:28.293282Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:28.293376Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6231 TClient is connected to server localhost:6231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:28.782650Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:28.799302Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:28.873812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:29.066794Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:29.155056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:31.366288Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568013733948904:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.366383Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.407680Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.439869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.482651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.517383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.551281Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.621674Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.671263Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568013733949402:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.671347Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.671392Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568013733949407:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.674329Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:31.688931Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568013733949409:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:33.095568Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568000849045296:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:33.095636Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts >> DataShardSnapshots::LockedWriteCleanupOnCopyTable [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit >> KqpQuery::QueryCacheTtl [GOOD] >> KqpQuery::QueryCachePermissionsLoss >> KqpQuery::DdlInDataQuery [GOOD] >> KqpQuery::DeleteWhereInSubquery |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::MergeConnection >> KqpLimits::DatashardReplySize [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpExplain::SortStage [GOOD] >> KqpExplain::SelfJoin3xSameLabels >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery >> YdbOlapStore::ManyTables [GOOD] >> YdbQueryService::TestCreateAndAttachSession >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit >> KqpQuery::PreparedQueryInvalidate [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple >> KqpLimits::ManyPartitionsSorting [GOOD] >> KqpLimits::ManyPartitionsSortingLimit >> KqpExplain::SqlIn [GOOD] >> KqpExplain::SsaProgramInJsonPlan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DatashardReplySize [GOOD] Test command err: Trying to start YDB, gRPC: 17255, MsgBus: 10767 2025-04-07T13:49:04.725199Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567895791738965:2198];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001421/r3tmp/tmpaLIhPh/pdisk_1.dat 2025-04-07T13:49:05.088998Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:05.359357Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:05.391530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:05.391628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:05.412811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17255, node 1 2025-04-07T13:49:05.663405Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:05.663427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:05.663433Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:05.663512Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10767 TClient is connected to server localhost:10767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:06.419834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:06.471060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:06.706933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:06.905001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:07.000125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:09.497087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567917266577004:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.506393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.544917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.603682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.647186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.709558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.715692Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567895791738965:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:09.715753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:09.743743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.793537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:09.869777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567917266577499:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.869845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.870297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567917266577504:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:09.874339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:09.896846Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:49:09.897121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567917266577506:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:11.142444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:14.267033Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490567938741415249:2576] TxId: 281474976710672. Ctx: { TraceId: 01jr886azgf7kh46js9a4xd9qv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzAyODJkZGEtMzY4OWQ3ZWItY2E2ZTZkM2ItOGYwMzY0MzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. } 2025-04-07T13:49:14.273924Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzAyODJkZGEtMzY4OWQ3ZWItY2E2ZTZkM2ItOGYwMzY0MzQ=, ActorId: [1:7490567938741415234:2576], ActorState: ExecuteState, TraceId: 01jr886azgf7kh46js9a4xd9qv, Create QueryResponse for error on request, msg:
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. Trying to start YDB, gRPC: 8163, MsgBus: 1941 2025-04-07T13:49:15.280133Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567941367450018:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:15.280170Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001421/r3tmp/tmpM84Eto/pdisk_1.dat 2025-04-07T13:49:15.485305Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8163, node 2 2025-04-07T13:49:15.592390Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:15.592467Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:15.597902Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:15.608249Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:15.608271Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:15.608286Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:15.608398Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1941 TClient is connected to server localhost:1941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:49:16.126700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.141158Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 7205759404664 ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.364331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.399326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.471984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.527525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.581145Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567958547321408:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:19.581256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:19.581428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567958547321413:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:19.584271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:19.594945Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490567958547321415:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:20.280647Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567941367450018:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:20.280708Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:20.734320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:20.980957Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZlM2IyZjMtMzFkNWU1NzMtZDI4N2EwMzItNDYyZGIyMQ==, ActorId: [2:7490567962842289355:2486], ActorState: ExecuteState, TraceId: 01jr886hkj2a21qwrbzse66jb4, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:959: Memory limit exception at ExecuteState, current limit is 1024 bytes.
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:959: Memory limit exception at ExecuteState, current limit is 1024 bytes. Trying to start YDB, gRPC: 5343, MsgBus: 62612 2025-04-07T13:49:21.937973Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490567967483235361:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:21.938031Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001421/r3tmp/tmpyUvqic/pdisk_1.dat 2025-04-07T13:49:22.082730Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:22.082808Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:22.084525Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:22.086686Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5343, node 3 2025-04-07T13:49:22.196274Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:22.196300Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:22.196307Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:22.196405Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62612 TClient is connected to server localhost:62612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:22.735081Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:22.741519Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:22.748847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:22.850586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:23.010864Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:23.080497Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:25.599173Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567984663106095:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.599265Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.658838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.692616Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.761983Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.794214Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.825949Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.902980Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.949066Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567984663106592:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.949155Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.949228Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490567984663106597:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.953520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:25.966779Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490567984663106599:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:26.947534Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490567967483235361:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:26.949497Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:26.985790Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:36.703525Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OGUxNTEyNi1iYTA5MzM4ZS1lMTUwMDJlMS0xNGUwNDQzZA==, ActorId: [3:7490568023317814237:2730], ActorState: ExecuteState, TraceId: 01jr886ym219953tx5jdt66sk8, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (200003965 > 50331648), code: 2013 >> KqpExplain::PureExpr [GOOD] >> KqpExplain::ReadTableRanges >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries [GOOD] >> KqpParams::CheckCacheByAst >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge >> KqpExplain::MultiJoinCteLinks [GOOD] >> KqpQuery::RandomUuid [GOOD] >> KqpQuery::RowsLimit >> KqpLimits::QSReplySizeEnsureMemoryLimits [GOOD] >> KqpLimits::QueryExecTimeoutCancel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: 2025-04-07T13:49:22.861202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:22.861509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:22.861640Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018c6/r3tmp/tmpvFCoXz/pdisk_1.dat 2025-04-07T13:49:23.218583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:49:23.223441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:49:23.224793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:49:23.225775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:49:23.227798Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-04-07T13:49:23.227853Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 1 Status# 16 SEND to# [1:380:2375] Proxy marker# C1 2025-04-07T13:49:23.251283Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:23.254062Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2025-04-07T13:49:23.304766Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:305:2345] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-07T13:49:23.304897Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-04-07T13:49:23.305050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:23.305117Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-07T13:49:23.305155Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-07T13:49:23.305198Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-07T13:49:23.305247Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-07T13:49:23.305359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:23.305588Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-07T13:49:23.305629Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-07T13:49:23.305696Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-07T13:49:23.305738Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-07T13:49:23.305892Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-04-07T13:49:23.316623Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-04-07T13:49:23.316720Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:305:2345]) 2025-04-07T13:49:23.316796Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-07T13:49:23.317295Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-04-07T13:49:23.317380Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:305:2345])::Execute 2025-04-07T13:49:23.317434Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-07T13:49:23.317521Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:305:2345])::Complete 2025-04-07T13:49:23.317701Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443315200 } 2025-04-07T13:49:23.317779Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-04-07T13:49:23.317831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:23.317961Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-04-07T13:49:23.318012Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-07T13:49:23.318046Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-07T13:49:23.318177Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-07T13:49:23.318210Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-07T13:49:23.318237Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-07T13:49:23.318280Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-07T13:49:23.329051Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-04-07T13:49:23.329122Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-07T13:49:23.428029Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-04-07T13:49:23.428131Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-04-07T13:49:23.428477Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-04-07T13:49:23.428871Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-04-07T13:49:23.428933Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:380:2375] Proxy 2025-04-07T13:49:23.429726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:49:23.430963Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-07T13:49:23.431055Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-07T13:49:23.431099Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-04-07T13:49:23.431155Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-04-07T13:49:23.431730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:49:23.431794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-07T13:49:23.436624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-07T13:49:23.439158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:23.440283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:49:23.440353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:23.441096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-04-07T13:49:23.444219Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-04-07T13:49:23.462361Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-07T13:49:23.462471Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-04-07T13:49:23.462730Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-04-07T13:49:23.462806Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-04-07T13:49:23.462875Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-04-07T13:49:23.463064Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-04-07T13:49:23.467740Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-04-07T13:49:23.467935Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-04-07T13:49:23.468948Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-04-07T13:49:23.469328Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-04-07T13:49:23.469462Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004595936}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-04-07T13:49:23.469539Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004595936}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-04-07T13:49:23.469710Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004595936}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-04-07T13:49:23.469781Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004595936}: tablet 72075186224037888 channel 2 assigned to group 2181038080 ... 644480 seqNo 2:4 2025-04-07T13:49:38.328173Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715667 at tablet 72075186224037889 2025-04-07T13:49:38.328334Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:49:38.328481Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:49:38.328542Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:49:38.328568Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:49:38.328601Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2025-04-07T13:49:38.339335Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:49:38.339447Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:49:38.340741Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 HANDLE EvProposeTransaction marker# C0 2025-04-07T13:49:38.340808Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 step# 32501 Status# 16 SEND to# [2:380:2375] Proxy marker# C1 2025-04-07T13:49:38.453668Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715667 has been planned 2025-04-07T13:49:38.453773Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715667 for mediator 72057594046382081 tablet 72057594046644480 2025-04-07T13:49:38.453816Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715667 for mediator 72057594046382081 tablet 72075186224037889 2025-04-07T13:49:38.454066Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 33500 in 0.500000s at 33.450000s 2025-04-07T13:49:38.454514Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 33000, txid# 281474976715667 marker# C2 2025-04-07T13:49:38.454588Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 stepId# 33000 Status# 17 SEND EvProposeTransactionStatus to# [2:380:2375] Proxy 2025-04-07T13:49:38.455202Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715667 at step 33000 at tablet 72075186224037889 { Transactions { TxId: 281474976715667 AckTo { RawX1: 517 RawX2: 8589937047 } } Step: 33000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-07T13:49:38.455252Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T13:49:38.455400Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 33000, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:49:38.455772Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:49:38.455821Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:49:38.455863Z node 2 :TX_DATASHARD DEBUG: Found ready operation [33000:281474976715667] in PlanQueue unit at 72075186224037889 2025-04-07T13:49:38.456046Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 33000:281474976715667 keys extracted: 0 2025-04-07T13:49:38.456176Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:49:38.456410Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:49:38.456478Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2025-04-07T13:49:38.456902Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:49:38.458233Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 33000 txid# 281474976715667} 2025-04-07T13:49:38.458282Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 33000} 2025-04-07T13:49:38.458329Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:49:38.458957Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-07T13:49:38.459026Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-07T13:49:38.459054Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-04-07T13:49:38.459071Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 acknowledged 2025-04-07T13:49:38.459101Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 acknowledged 2025-04-07T13:49:38.459340Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:49:38.459386Z node 2 :TX_DATASHARD DEBUG: Complete [33000 : 281474976715667] from 72075186224037889 at tablet 72075186224037889 send result to client [2:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:49:38.459421Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715667 state PreOffline TxInFly 0 2025-04-07T13:49:38.459488Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T13:49:38.460215Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715667, done: 0, blocked: 1 2025-04-07T13:49:38.462918Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715667 datashard 72075186224037889 state PreOffline 2025-04-07T13:49:38.462966Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-07T13:49:38.463564Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:0 2025-04-07T13:49:38.463634Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715667, publications: 1, subscribers: 1 2025-04-07T13:49:38.463890Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T13:49:38.464110Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715667, subscribers: 1 2025-04-07T13:49:38.465023Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGVjNWUxMzctMmZhM2YxZGItODVjNzZkOWMtNWViMTEyZTA= 2025-04-07 13:49:38.464 INFO ydb-core-tx-datashard-ut_minstep(pid=52048, tid=0x00007F26D8E33C80) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-04-07T13:49:38.465146Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGVjNWUxMzctMmZhM2YxZGItODVjNzZkOWMtNWViMTEyZTA= 2025-04-07 13:49:38.465 INFO ydb-core-tx-datashard-ut_minstep(pid=52048, tid=0x00007F26D8E33C80) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-04-07T13:49:38.465234Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGVjNWUxMzctMmZhM2YxZGItODVjNzZkOWMtNWViMTEyZTA= 2025-04-07 13:49:38.465 INFO ydb-core-tx-datashard-ut_minstep(pid=52048, tid=0x00007F26D8E33C80) [core exec] yql_execution.cpp:59: Begin, root #43 2025-04-07T13:49:38.465317Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGVjNWUxMzctMmZhM2YxZGItODVjNzZkOWMtNWViMTEyZTA= 2025-04-07 13:49:38.465 INFO ydb-core-tx-datashard-ut_minstep(pid=52048, tid=0x00007F26D8E33C80) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-04-07T13:49:38.465375Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=ZGVjNWUxMzctMmZhM2YxZGItODVjNzZkOWMtNWViMTEyZTA= 2025-04-07 13:49:38.465 TRACE ydb-core-tx-datashard-ut_minstep(pid=52048, tid=0x00007F26D8E33C80) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-04-07T13:49:38.465450Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGVjNWUxMzctMmZhM2YxZGItODVjNzZkOWMtNWViMTEyZTA= 2025-04-07 13:49:38.465 INFO ydb-core-tx-datashard-ut_minstep(pid=52048, tid=0x00007F26D8E33C80) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-04-07T13:49:38.465525Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGVjNWUxMzctMmZhM2YxZGItODVjNzZkOWMtNWViMTEyZTA= 2025-04-07 13:49:38.465 INFO ydb-core-tx-datashard-ut_minstep(pid=52048, tid=0x00007F26D8E33C80) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-04-07T13:49:38.465595Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGVjNWUxMzctMmZhM2YxZGItODVjNzZkOWMtNWViMTEyZTA= 2025-04-07 13:49:38.465 INFO ydb-core-tx-datashard-ut_minstep(pid=52048, tid=0x00007F26D8E33C80) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-04-07T13:49:38.465642Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGVjNWUxMzctMmZhM2YxZGItODVjNzZkOWMtNWViMTEyZTA= 2025-04-07 13:49:38.465 INFO ydb-core-tx-datashard-ut_minstep(pid=52048, tid=0x00007F26D8E33C80) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-04-07T13:49:38.465836Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ZGVjNWUxMzctMmZhM2YxZGItODVjNzZkOWMtNWViMTEyZTA= 2025-04-07 13:49:38.465 NOTE ydb-core-tx-datashard-ut_minstep(pid=52048, tid=0x00007F26D8E33C80) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2025-04-07T13:49:38.465935Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ZGVjNWUxMzctMmZhM2YxZGItODVjNzZkOWMtNWViMTEyZTA= 2025-04-07 13:49:38.465 NOTE ydb-core-tx-datashard-ut_minstep(pid=52048, tid=0x00007F26D8E33C80) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2025-04-07T13:49:38.465996Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ZGVjNWUxMzctMmZhM2YxZGItODVjNzZkOWMtNWViMTEyZTA= 2025-04-07 13:49:38.465 NOTE ydb-core-tx-datashard-ut_minstep(pid=52048, tid=0x00007F26D8E33C80) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2025-04-07T13:49:38.480507Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-07T13:49:38.480710Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-07T13:49:38.482231Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-07T13:49:38.483199Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-07T13:49:38.483771Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2025-04-07T13:49:38.483828Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-04-07T13:49:38.483928Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-04-07T13:49:38.484067Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-04-07T13:49:38.484183Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> KqpQuery::SelectCountAsteriskFromVar [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 30069, MsgBus: 1945 2025-04-07T13:49:21.068831Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567967646539552:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:21.068874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013d6/r3tmp/tmpfOP6JP/pdisk_1.dat 2025-04-07T13:49:21.462823Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:21.481439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:21.481537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:21.484937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30069, node 1 2025-04-07T13:49:21.723289Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:21.723310Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:21.723319Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:21.723407Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1945 TClient is connected to server localhost:1945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:22.302048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:22.319568Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:22.335347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:22.500627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:22.683087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:22.782960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.401464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567980531443146:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:24.401563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:24.727681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:24.755289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:24.838136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:24.878263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:24.930501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:24.985101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.088628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567984826410944:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.088735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.089029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567984826410949:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.093467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:25.108081Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:49:25.112840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567984826410951:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:26.072145Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567967646539552:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:26.072241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:26.254982Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033766237, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 16527, MsgBus: 30106 2025-04-07T13:49:27.073059Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567996635343750:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:27.073108Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013d6/r3tmp/tmp7YGCdr/pdisk_1.dat 2025-04-07T13:49:27.199604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:27.199678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:27.201302Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:27.213128Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16527, node 2 2025-04-07T13:49:27.312962Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:27.312994Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:27.313001Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:27.313118Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30106 TClient is connected to server localhost:30106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:27.728587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:27.743306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:27.803771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:27.967404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:28.047893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:30.210041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568009520247326:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:30.210142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:30.244230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:30.278640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:30.306874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:30.376057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:30.410089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:30.448717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:30.503691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568009520247823:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:30.503759Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:30.503886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568009520247828:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:30.507557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:30.524743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568009520247830:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:31.626638Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033771634, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 3642, MsgBus: 4066 2025-04-07T13:49:32.527509Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568016907480664:2192];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013d6/r3tmp/tmp244LzT/pdisk_1.dat 2025-04-07T13:49:32.581947Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:32.648350Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:32.662646Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:32.662709Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:32.664055Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3642, node 3 2025-04-07T13:49:32.708235Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:32.708255Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:32.708263Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:32.708372Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4066 TClient is connected to server localhost:4066 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:33.208647Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.214951Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:33.230667Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.292228Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:33.457784Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:49:33.531840Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:36.088923Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568034087351393:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.089122Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.114402Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.152826Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.206478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.245656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.281076Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.330267Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.394741Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568034087351891:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.394831Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.397705Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568034087351896:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.402258Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:36.428362Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568034087351898:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:37.531326Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568016907480664:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:37.531398Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit >> KqpParams::BadParameterType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] Test command err: Trying to start YDB, gRPC: 19902, MsgBus: 31062 2025-04-07T13:49:21.899552Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567966677635680:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:21.900487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013d4/r3tmp/tmpdtEOmn/pdisk_1.dat 2025-04-07T13:49:22.376816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:22.376919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:22.401939Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:22.402617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19902, node 1 2025-04-07T13:49:22.609150Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:22.609176Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:22.609183Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:22.609272Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31062 TClient is connected to server localhost:31062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:23.413178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:23.444985Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:23.459381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:23.635708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:23.793421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:23.892583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:25.493229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567983857506437:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.493353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.701692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.734073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.774792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.855993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.890224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.958672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.048466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567988152474241:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.048542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.048625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567988152474247:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.051640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:26.060933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567988152474249:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:26.894418Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567966677635680:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:26.894480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"},{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 15845, MsgBus: 5303 2025-04-07T13:49:27.859296Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567995666788097:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:27.859355Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013d4/r3tmp/tmpZudon6/pdisk_1.dat 2025-04-07T13:49:27.938389Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15845, node 2 2025-04-07T13:49:27.993083Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:27.993155Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:27.994187Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:28.009590Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:28.009612Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:28.009619Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:28.009703Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5303 TClient is connected to server localhost:5303 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { Sche ... 596:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:33.743364Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013d4/r3tmp/tmpo72f42/pdisk_1.dat 2025-04-07T13:49:33.872877Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:33.892553Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:33.892651Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 10473, node 3 2025-04-07T13:49:33.895989Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:33.937235Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:33.937261Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:33.937272Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:33.937374Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19049 TClient is connected to server localhost:19049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:34.429929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.437313Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:34.449142Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:34.521565Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:49:34.706678Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:34.800624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.385716Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568038629964381:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.385799Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.431306Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.488359Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.523930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.558785Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.588957Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.630985Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.705105Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568038629964878:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.705177Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.705476Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568038629964883:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.709702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:37.720752Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568038629964885:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:38.744769Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568021450093596:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:38.744841Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"Tables":["EightShard"],"PlanNodeId":10,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":2},{"InternalOperatorId":5}],"E-Rows":"No estimate","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":4}],"E-Rows":"No estimate","ReadColumns":["Data","Key","Text"],"Name":"TablePointLookup","E-Size":"No estimate","E-Cost":"No estimate","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"},{"Inputs":[{"InternalOperatorId":6}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"}],"Node Type":"Limit-InnerJoin (MapJoin)-Filter-TablePointLookup-ConstantExpr-Filter-ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":11}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"Aggregate","CTE Name":"precompute_0_0"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"type":"Lookup"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","ReadColumns":["Data","Key","Text"],"Name":"TablePointLookup","E-Size":"No estimate","E-Cost":"No estimate","Table":"EightShard"}],"Node Type":"TablePointLookup"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":16,"Plans":[{"PlanNodeId":22,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar [GOOD] Test command err: Trying to start YDB, gRPC: 4143, MsgBus: 15883 2025-04-07T13:49:22.389332Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567973489535573:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:22.391063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013c9/r3tmp/tmpO8qxbZ/pdisk_1.dat 2025-04-07T13:49:22.749920Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4143, node 1 2025-04-07T13:49:22.817291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:22.835217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:22.843088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:22.881175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:22.881194Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:22.881200Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:22.881296Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15883 TClient is connected to server localhost:15883 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:23.421643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:23.448701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:23.580456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:23.751489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:23.826326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:25.630897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567986374438933:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.631233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:25.846614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.892833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.926087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:25.969784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.044446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.075261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.142505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567990669406727:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.142569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.142734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567990669406732:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.145398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:26.154278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567990669406734:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:27.389506Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567973489535573:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:27.389581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6169, MsgBus: 27468 2025-04-07T13:49:28.095704Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567999057372786:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:28.095969Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013c9/r3tmp/tmpnrTEqy/pdisk_1.dat 2025-04-07T13:49:28.206362Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:28.218042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:28.218109Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:28.222761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6169, node 2 2025-04-07T13:49:28.336121Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:28.336143Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:28.336149Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:28.336236Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27468 TClient is connected to server localhost:27468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:28.794332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:28.802998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:28.887435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:29.045889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:29.131528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:31.199023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568011942276344:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.199131Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.242163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.277507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.315897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.388798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.472185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.539775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.597115Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568011942276847:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.597204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.597579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568011942276852:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.600956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:31.613158Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568011942276854:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:33.095526Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490567999057372786:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:33.095587Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29182, MsgBus: 14556 2025-04-07T13:49:34.125253Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568024357363347:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:34.125304Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013c9/r3tmp/tmpodmIPV/pdisk_1.dat 2025-04-07T13:49:34.316574Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:34.343255Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:34.343336Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:34.344821Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29182, node 3 2025-04-07T13:49:34.437212Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:34.437237Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:34.437247Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:34.437347Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14556 TClient is connected to server localhost:14556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:49:35.017445Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.027170Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:35.041317Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.169457Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.380548Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.476482Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:37.734955Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568037242266938:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.735030Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.798991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.866524Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.901098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.945548Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.018063Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.075495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.163368Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568041537234738:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:38.163443Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:38.163493Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568041537234743:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:38.166909Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:38.221683Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568041537234745:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:39.125321Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568024357363347:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:39.125412Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> Viewer::JsonStorageListingV2 [GOOD] >> Viewer::JsonStorageListingV2GroupIdFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xFFEF080) TestOneRead(TBasicString>, TBasicString>)+4826 (0xF7863AA) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+318 (0xF78D06E) std::__y1::__function::__func, void ()>::operator()()+280 (0xF79E438) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x100249B9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xFFF5BE9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xF79D1E4) NUnitTest::TTestFactory::Execute()+2438 (0xFFF74B6) NUnitTest::RunMain(int, char**)+5149 (0x1001E5FD) ??+0 (0x7F0ECB4E3D90) __libc_start_main+128 (0x7F0ECB4E3E40) _start+41 (0xD858029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xFFEF080) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4252 (0xF780A0C) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xF78C5F8) std::__y1::__function::__func, void ()>::operator()()+280 (0xF79E438) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x100249B9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xFFF5BE9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xF79D1E4) NUnitTest::TTestFactory::Execute()+2438 (0xFFF74B6) NUnitTest::RunMain(int, char**)+5149 (0x1001E5FD) ??+0 (0x7F5878344D90) __libc_start_main+128 (0x7F5878344E40) _start+41 (0xD858029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xFFEF080) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4252 (0xF780A0C) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xF78C908) std::__y1::__function::__func, void ()>::operator()()+280 (0xF79E438) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x100249B9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xFFF5BE9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xF79D1E4) NUnitTest::TTestFactory::Execute()+2438 (0xFFF74B6) NUnitTest::RunMain(int, char**)+5149 (0x1001E5FD) ??+0 (0x7FDCA4C75D90) __libc_start_main+128 (0x7FDCA4C75E40) _start+41 (0xD858029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xFFEF080) TestOneRead(TBasicString>, TBasicString>)+4826 (0xF7863AA) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+318 (0xF78CC7E) std::__y1::__function::__func, void ()>::operator()()+280 (0xF79E438) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x100249B9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xFFF5BE9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xF79D1E4) NUnitTest::TTestFactory::Execute()+2438 (0xFFF74B6) NUnitTest::RunMain(int, char**)+5149 (0x1001E5FD) ??+0 (0x7FBECBA86D90) __libc_start_main+128 (0x7FBECBA86E40) _start+41 (0xD858029) |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> KqpLimits::QueryReplySize [GOOD] >> KqpLimits::ReadsetCountLimit >> YdbLogStore::LogTable [GOOD] >> YdbMonitoring::SelfCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::BadParameterType [GOOD] Test command err: Trying to start YDB, gRPC: 28937, MsgBus: 11247 2025-04-07T13:49:14.025066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:14.025353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:14.025597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e6/r3tmp/tmpwlzwEW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28937, node 1 2025-04-07T13:49:14.526888Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:14.534141Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:14.534202Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:14.534244Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:14.534434Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:49:14.597215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:14.597410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:14.609226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11247 TClient is connected to server localhost:11247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:14.989141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:15.073272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:15.503563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:15.970376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:16.361236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.307423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1730:3352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:17.307627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:17.331765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:17.652502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:17.995766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:18.340095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:18.710783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.101934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.492107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2300:3793], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:19.492206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:19.492558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2305:3798], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:19.507893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:19.731278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2307:3800], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:20.958621Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:2605:4015] TxId: 281474976715671. Ctx: { TraceId: 01jr886hns807q604jqkfdp2fa, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNlZDcwZjQtZTMzNWNlZGQtYjkwZjc0M2EtOTk2Y2NkMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. STATUS_CODE_UNSPECIFIED: 2025-04-07T13:49:20.984446Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2613:4056], TxId: 281474976715671, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr886hns807q604jqkfdp2fa. SessionId : ydb://session/3?node_id=1&id=NzNlZDcwZjQtZTMzNWNlZGQtYjkwZjc0M2EtOTk2Y2NkMzU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:2605:4015], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-07T13:49:20.985013Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2611:4054], TxId: 281474976715671, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NzNlZDcwZjQtZTMzNWNlZGQtYjkwZjc0M2EtOTk2Y2NkMzU=. TraceId : 01jr886hns807q604jqkfdp2fa. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:2605:4015], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-07T13:49:20.985231Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2612:4055], TxId: 281474976715671, task: 2. Ctx: { TraceId : 01jr886hns807q604jqkfdp2fa. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NzNlZDcwZjQtZTMzNWNlZGQtYjkwZjc0M2EtOTk2Y2NkMzU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:2605:4015], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-07T13:49:20.987351Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzNlZDcwZjQtZTMzNWNlZGQtYjkwZjc0M2EtOTk2Y2NkMzU=, ActorId: [1:2569:4015], ActorState: ExecuteState, TraceId: 01jr886hns807q604jqkfdp2fa, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 64563, MsgBus: 9777 2025-04-07T13:49:24.769658Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:24.770058Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:24.770146Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e6/r3tmp/tmpEGi3Vo/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64563, node 2 2025-04-07T13:49:25.162935Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:25.163884Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:25.163941Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:25.163996Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:25.164166Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:49:25.228197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:25.228323Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:9777 2025-04-07T13:49:25.239613Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 U ... 6:0, at schemeshard: 72057594046644480 2025-04-07T13:49:29.085142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:29.419929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2295:3789], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:29.420052Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:29.420416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2300:3794], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:29.427046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:29.630880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2302:3796], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:31.107607Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:2604:4032] TxId: 281474976715671. Ctx: { TraceId: 01jr886vjyaybbqpeyj987vsav, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjdhOWU5NGItOGQ3OTE4YzUtY2E3YTMyOWYtYzdjOGUwZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. STATUS_CODE_UNSPECIFIED: 2025-04-07T13:49:31.108300Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:2612:4056], TxId: 281474976715671, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr886vjyaybbqpeyj987vsav. SessionId : ydb://session/3?node_id=2&id=MjdhOWU5NGItOGQ3OTE4YzUtY2E3YTMyOWYtYzdjOGUwZWU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:2604:4032], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-07T13:49:31.109266Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:2610:4054], TxId: 281474976715671, task: 1. Ctx: { TraceId : 01jr886vjyaybbqpeyj987vsav. SessionId : ydb://session/3?node_id=2&id=MjdhOWU5NGItOGQ3OTE4YzUtY2E3YTMyOWYtYzdjOGUwZWU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:2604:4032], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-07T13:49:31.109580Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:2611:4055], TxId: 281474976715671, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjdhOWU5NGItOGQ3OTE4YzUtY2E3YTMyOWYtYzdjOGUwZWU=. TraceId : 01jr886vjyaybbqpeyj987vsav. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:2604:4032], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-07T13:49:33.904788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:49:33.904857Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:34.402539Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjdhOWU5NGItOGQ3OTE4YzUtY2E3YTMyOWYtYzdjOGUwZWU=, ActorId: [2:2585:4032], ActorState: ExecuteState, TraceId: 01jr886vjyaybbqpeyj987vsav, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 26739, MsgBus: 21161 2025-04-07T13:49:35.176875Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568028220957286:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:35.177003Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e6/r3tmp/tmpeWXkAo/pdisk_1.dat 2025-04-07T13:49:35.372501Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:35.391834Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:35.392124Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:35.394421Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26739, node 3 2025-04-07T13:49:35.446872Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:35.446900Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:35.446912Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:35.447017Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21161 TClient is connected to server localhost:21161 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:36.102420Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:36.127376Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:36.240947Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:36.387010Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:36.495197Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:38.686820Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568041105860872:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:38.686931Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:38.766364Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.834557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.873402Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.909325Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.948819Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.988350Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:39.089290Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568045400828669:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:39.089403Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:39.092995Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568045400828674:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:39.098322Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:39.116237Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568045400828676:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:40.176715Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568028220957286:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:40.176787Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:40.333181Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTZlNmYzMjYtOWQ1OWUzZTEtN2M4YjU2ZTItOWQ5MGJmMmQ=, ActorId: [3:7490568049695796283:2460], ActorState: ExecuteState, TraceId: 01jr8874fa6s85me2wrs190hr5, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1233: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $group type mismatch, expected: { Kind: Data Data { Scheme: 2 } }, actual: Type (Data), schemeType: Int32, schemeTypeId: 1 |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |84.7%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |84.7%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::IdxFullscan |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> KqpQuery::DeleteWhereInSubquery [GOOD] >> KqpQuery::DictJoin >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable >> PartitionStats::Collector >> PartitionStats::Collector [GOOD] >> KqpLimits::ManyPartitionsSortingLimit [GOOD] >> PartitionStats::CollectorOverload [GOOD] >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::ReadTableRangesFullScan >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpParams::Decimal+QueryService |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::UpdateWhereInSubquery |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |84.7%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbQueryService::TestCreateAndAttachSession [GOOD] >> YdbQueryService::TestAttachTwice >> TStorageTenantTest::LsLs >> TStorageTenantTest::GenericCases >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore >> KqpQuery::QueryCachePermissionsLoss [GOOD] >> KqpQuery::QueryCancelWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSortingLimit [GOOD] Test command err: Trying to start YDB, gRPC: 16820, MsgBus: 21170 2025-04-07T13:49:28.531344Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568000849267204:2252];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:28.531385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013bb/r3tmp/tmp1cncCW/pdisk_1.dat 2025-04-07T13:49:28.950066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:28.950184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:28.958018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:28.963103Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16820, node 1 2025-04-07T13:49:29.063384Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:29.063413Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:29.063424Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:29.063523Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21170 TClient is connected to server localhost:21170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:29.587630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:29.618827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:31.723207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568013734173794:2603], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.723370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.724019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568013734173815:2606], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.728390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T13:49:31.740334Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:49:31.742085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568013734173821:2607], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } Trying to start YDB, gRPC: 16820, MsgBus: 21171 2025-04-07T13:49:33.311381Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568022324103557:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:33.311427Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013bb/r3tmp/tmpBTCJ7a/pdisk_1.dat 2025-04-07T13:49:33.504755Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:33.529443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:33.529540Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:33.537953Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16820, node 2 2025-04-07T13:49:33.634448Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:33.634477Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:33.634489Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:33.634580Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21171 TClient is connected to server localhost:21171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:34.135472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.143348Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.157820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.088147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568039503977767:2603], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.088214Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568039503977788:2607], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.088285Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.091252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T13:49:37.102662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568039503977807:2608], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } query_phases { duration_us: 100103 table_access { name: "/Root/ManyShardsTable" reads { rows: 1100 bytes: 8800 } partitions_count: 100 } cpu_time_us: 94423 affected_shards: 100 } compilation { duration_us: 108886 cpu_time_us: 103089 } process_cpu_time_us: 256 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"ManyShardsTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Inputs\":[],\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Data\",\"Key\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"DurationUs\":{\"Count\":4,\"Sum\":317000,\"Max\":82000,\"Min\":77000},\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":343,\"Max\":87,\"Min\":84},\"ActiveMessageMs\":{\"Count\":4,\"Max\":87,\"Min\":7},\"FirstMessageMs\":{\"Count\":4,\"Sum\":30,\"Max\":8,\"Min\":7},\"Bytes\":{\"Count\":4,\"Sum\":8168,\"Max\":2075,\"Min\":2004},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":313000,\"Max\":80000,\"Min\":76000}},\"Name\":\"4\",\"Push\":{\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":343,\"Max\":87,\"Min\":84},\"Chunks\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"ResumeMessageMs\":{\"Count\":4,\"Sum\":343,\"Max\":87,\"Min\":84},\"FirstMessageMs\":{\"Count\":4,\"Sum\":29,\"Max\":8,\"Min\":6},\"ActiveMessageMs\":{\"Count\":4,\"Max\":87,\"Min\":6},\"Bytes\":{\"Count\":4,\"Sum\":8168,\"Max\":2075,\"Min\":2004},\"PauseMessageMs\":{\"Count\":4,\"Sum\":3,\"Max\":1,\"Min\":0},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":314000,\"Max\":81000,\"Min\":76000},\"WaitTimeUs\":{\"Count\":4,\"Sum\":320032,\"Max\":83894,\"Min\":75968},\"WaitPeriods\":{\"Count\":4,\"Sum\":87,\"Max\":24,\"Min\":19}}}],\"MaxMemoryUsage\":{\"Count\":4,\"Sum\":4194304,\"Max\":1048576,\"Min\":1048576},\"Tasks\":4,\"OutputRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"IngressRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"PhysicalStageId\":0,\"StageDurationUs\":82000,\"BaseTimeMs\":1744033777281,\"WaitInputTimeUs\":{\"Count\":4,\"Sum\":318198,\"Max\":83315,\"Min\":75443},\"OutputBytes\":{\"Count\":4,\"Sum\":8168,\"Max\":2075,\"Min\":2004},\"CpuTimeUs\":{\"Count\":4,\"Sum\":20504,\"Max\":7258,\"Min\":2865},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":343,\"Max\":87,\"Min\":84},\"ActiveMessageMs\":{\"Count\":4,\"Max\":87,\"Min\":6},\"FirstMessageMs\":{\"Count\":4,\"Sum\":29,\"Max\":8,\"Min\":6},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":314000,\"Max\":81000,\"Min\":76000}},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":343,\"Max\":87,\"Min\":84},\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"ResumeMessageMs\":{\"Count\":4,\"Sum\":343,\"Max\":87,\"Min\":84},\"FirstMessageMs\":{\"Count\":4,\"Sum\":29,\"Max\":8,\"Min\":6},\"ActiveMessageMs\":{\"Count\":4,\"Max\":87,\"Min\":6},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768},\"PauseMessageMs\":{\"Count\":4,\"Sum\":3,\"Max\":1,\"Min\":0},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":314000,\"Max\":81000,\"Min\":76000},\"WaitTimeUs\":{\"Count\":4,\"Sum\":321245,\"Max\":84365,\"Min\":76070},\"WaitPeriods\":{\"Count\":4,\"Sum\":82,\"Max\":24,\"Min\":18}}}]}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Key (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":17,\"Max\":17,\"Min\":17},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":93,\"Max\":93,\"Min\":93},\"ActiveMessageMs\":{\"Count\":1,\"Max\":93,\"Min\":35},\"FirstMessageMs\":{\"Count\":1,\"Sum\":35,\"Max\":35,\"Min\":35},\"Bytes\":{\"Count\":1,\"Sum\":7671,\"Max\":7671,\"Min\":7671},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":58000,\"Max\":58000,\"Min\":58000}},\"Name\":\"RESULT\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":93,\"Max\":93,\"Min\":93},\"Chunks\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":87,\"Max\":87,\"Min\":87},\"FirstMessageMs\":{\"Count\":1,\"Sum\":9,\"Max\":9,\"Min\":9},\"ActiveMessageMs\":{\"Count\":1,\"Max\":93,\"Min\":9},\"Bytes\":{\"Count\":1,\"Sum\":7671,\"Max\":7671,\"Min\":7671},\"PauseMessageMs\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":84000,\"Max\":84000,\"Min\":84000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":52409,\"Max\":52409,\"Min\":52409},\"WaitPeriods\":{\"Count\":1,\"Sum\":16,\"Max\":16,\"Min\":16},\"WaitMessageMs\":{\"Count\":1,\"Max\":87,\"Min\":7}}}],\"DurationUs\":{\"Count\":1,\"Sum\":88000,\"Max\":88000,\"Min\":88000},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576},\"InputBytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"ResultRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":7671,\"Max\":7671,\"Min\":7671},\"OutputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"PhysicalStageId\":1,\"StageDurationUs\":88000,\"BaseTimeMs\":1744033777281,\"WaitInputTimeUs\":{\"Count\":1,\"Sum\":50912,\"Max\":50912,\"Min\":50912},\"OutputBytes\":{\"Count\":1,\"Sum\":7671,\"Max\":7671,\"Min\":7671},\"CpuTimeUs\":{\"Count\":1,\"Sum\":40250,\"Max\":40250,\"Min\":40250},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":24,\"Max\":24,\"Min\":24},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":92,\"Max\":92,\"Min\":92},\"ActiveMessageMs\":{\"Count\":1,\"Max\":92,\"Min\":7},\"FirstMessageMs\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":85000,\"Max\":85000,\"Min\":85000}},\"Name\":\"2\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":92,\"Max\":92,\"Min\":92},\"Chunks\":{\"Count\":1,\"Sum\":100,\"Max\":100,\"Min\":100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":87,\"Max\":87,\"Min\":87},\"FirstMessageMs\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"ActiveMessageMs\":{\"Count\":1,\"Max\":92,\"Min\":7},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"PauseMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":85000,\"Max\":85000,\"Min\":85000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":13016,\"Max\":13016,\"Min\":13016},\"WaitPeriods\":{\"Count\":1,\"Sum\":18,\"Max\":18,\"Min\":18},\"WaitMessageMs\":{\"Count\":1,\"Max\":87,\"Min\":2}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":108886,\"CpuTimeUs\":103089},\"ProcessCpuTimeUs\":256,\"TotalDurationUs\":296182,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":81515},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Data\",\"Key\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:2\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'353) \'(\'\"_id\" \'\"68e562e9-e049b7ed-b222041a-94bd171e\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'365) \'(\'\"_id\" \'\"4ef59b86-8815acf8-56ec94d3-ee8d6ec8\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 296182 total_cpu_time_us: 197768 Trying to start YDB, gRPC: 16820, MsgBus: 21172 2025-04-07T13:49:38.649270Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568043798939960:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:38.649311Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013bb/r3tmp/tmpp54wHN/pdisk_1.dat 2025-04-07T13:49:38.761884Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:38.773331Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:38.773415Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:38.775852Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16820, node 3 2025-04-07T13:49:38.905284Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:38.905305Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:38.905312Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:38.905442Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21172 TClient is connected to server localhost:21172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:39.484883Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:39.504024Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:42.320944Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568060978814129:2607], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:42.321070Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:42.321479Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568060978814170:2611], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:42.324681Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T13:49:42.335159Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568060978814173:2612], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } >> KqpExplain::ReadTableRanges [GOOD] >> KqpExplain::Predicates >> TStorageTenantTest::Boot >> YdbOlapStore::LogPagingBetween [GOOD] >> YdbOlapStore::LogWithUnionAllAscending >> TStorageTenantTest::DeclareAndDefine >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish >> KqpQuery::RowsLimit [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> KqpLimits::AffectedShardsLimit >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus >> THealthCheckTest::Basic >> THealthCheckTest::StorageLimit95 >> THealthCheckTest::Issues100GroupsListing >> DataShardSnapshots::MvccSnapshotLockedWrites [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart >> THealthCheckTest::ShardsLimit999 >> THealthCheckTest::SpecificServerless >> THealthCheckTest::Issues100Groups100VCardListing >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::RowsLimit [GOOD] Test command err: Trying to start YDB, gRPC: 3018, MsgBus: 20155 2025-04-07T13:49:28.550950Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567998082831496:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:28.551170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013b7/r3tmp/tmpawYm8d/pdisk_1.dat 2025-04-07T13:49:28.971885Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:28.980313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:28.980389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:28.988969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3018, node 1 2025-04-07T13:49:29.086470Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:29.086491Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:29.086498Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:29.086603Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20155 TClient is connected to server localhost:20155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:29.580049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:29.591434Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:29.602687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:29.708270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:29.852978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:29.944686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:31.809450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568010967734959:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.819651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.871495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.920656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.950465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:32.017819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:32.068478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:32.111065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:32.168836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568015262702755:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:32.170169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:32.176816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568015262702760:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:32.183033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:32.196787Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:49:32.204483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568015262702762:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:33.522443Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567998082831496:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:33.522484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7821, MsgBus: 32557 2025-04-07T13:49:34.537252Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568023554789961:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:34.540167Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013b7/r3tmp/tmpqy3yhH/pdisk_1.dat 2025-04-07T13:49:34.706546Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:34.728972Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:34.729053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:34.730474Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7821, node 2 2025-04-07T13:49:34.837254Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:34.837277Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:34.837285Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:34.837384Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32557 TClient is connected to server localhost:32557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:35.303235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.310852Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.318175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.386690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.568345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.655727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:38.123848Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568040734660834:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:38.123953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:38.189032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.254328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.287851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.317467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.356616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.416256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.514538Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568040734661335:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:38.514624Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:38.515090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568040734661340:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:38.519035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:38.533044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568040734661343:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:39.541001Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568023554789961:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:39.541100Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28942, MsgBus: 8033 2025-04-07T13:49:40.419824Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568051304281033:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:40.419890Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013b7/r3tmp/tmpEqA1Rw/pdisk_1.dat 2025-04-07T13:49:40.486362Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28942, node 3 2025-04-07T13:49:40.521438Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:40.521530Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:40.524028Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:40.576071Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:40.576094Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:40.576101Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:40.576205Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8033 TClient is connected to server localhost:8033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:41.062177Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:41.075935Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:41.093990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:49:41.185409Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:41.388347Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:41.489699Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:43.858475Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568064189184628:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:43.858543Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:43.901736Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:43.953121Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:44.032473Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:44.080519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:44.135190Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:44.221267Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:44.324989Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568068484152435:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:44.325095Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:44.326974Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568068484152440:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:44.338165Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:44.360181Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T13:49:44.360657Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568068484152442:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:45.421383Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568051304281033:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:45.421443Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] Test command err: Trying to start YDB, gRPC: 30211, MsgBus: 15464 2025-04-07T13:49:32.132729Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568016625404369:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:32.132786Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00139c/r3tmp/tmp9zIRc7/pdisk_1.dat 2025-04-07T13:49:32.584096Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:32.586816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:32.586931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:32.591464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30211, node 1 2025-04-07T13:49:32.781281Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:32.781301Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:32.781310Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:32.781451Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15464 TClient is connected to server localhost:15464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:33.467306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.484988Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:33.492100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.652998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.800087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.875929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.802360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568029510307950:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.802509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.018826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.051889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.113755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.146755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.200563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.238217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.288701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568033805275742:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.288751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.288888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568033805275747:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.291961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:36.302415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568033805275749:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:37.134524Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568016625404369:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:37.134592Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:37.566848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25430, MsgBus: 28532 2025-04-07T13:49:38.648797Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568040326487964:2131];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00139c/r3tmp/tmpTeTkNk/pdisk_1.dat 2025-04-07T13:49:38.745709Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:38.826370Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25430, node 2 2025-04-07T13:49:38.903561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:38.903641Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:38.922773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:38.977135Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:38.977158Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:38.977166Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:38.977268Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28532 TClient is connected to server localhost:28532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:39.464792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:39.478972Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:41.744516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568053211390387:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:41.744524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568053211390395:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:41.744583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:41.747831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:49:41.769716Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568053211390401:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T13:49:41.897954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:49:42.144378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16067, MsgBus: 6859 2025-04-07T13:49:43.467085Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568062612333284:2189];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00139c/r3tmp/tmpXf3HPy/pdisk_1.dat 2025-04-07T13:49:43.526806Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:43.615282Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:43.615365Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:43.617329Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:43.633890Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16067, node 3 2025-04-07T13:49:43.706036Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:43.706058Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:43.706067Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:43.706159Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6859 TClient is connected to server localhost:6859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:44.090680Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:44.110060Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:46.852957Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568075497235647:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:46.853050Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:46.853342Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568075497235659:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:46.857229Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:49:46.872279Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T13:49:46.872444Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568075497235661:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:49:47.002687Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.223707Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490568079792203141:2328], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:5:49: Error: Creating table with data is not supported. 2025-04-07T13:49:47.224989Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTljZWY2MDUtMzQ2OWNiNGItOTkwYmVmODQtNmU2OTVhMjI=, ActorId: [3:7490568079792203139:2327], ActorState: ExecuteState, TraceId: 01jr887b9r9nsgn7dda3hex54r, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpLimits::ReadsetCountLimit [GOOD] >> KqpLimits::ReplySizeExceeded >> TStorageTenantTest::Boot [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit >> RemoteTopicReader::ReadTopic [GOOD] >> YdbMonitoring::SelfCheck [GOOD] >> YdbMonitoring::SelfCheckWithNodesDying >> THealthCheckTest::OneIssueListing >> KqpExplain::IdxFullscan [GOOD] >> KqpQuery::DictJoin [GOOD] |84.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |84.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |84.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |84.8%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |84.8%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> KqpQuery::UpdateWhereInSubquery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts [GOOD] Test command err: 2025-04-07T13:48:47.585527Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567822245526680:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:47.585589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a2c/r3tmp/tmpCXyj5m/pdisk_1.dat 2025-04-07T13:48:48.382191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:48.382271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:48.428143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6808, node 1 2025-04-07T13:48:48.538484Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:48:48.538510Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:48:48.624707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2025-04-07T13:48:48.649585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "PostgreSQL" } 2025-04-07T13:48:48.656911Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:48.789109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:48.821245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-07T13:48:48.875588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:48:48.876324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:48.876503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:48:48.900469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:48.901049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:48.901072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:48.901119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:48:48.901172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.972952Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:48.972979Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:48.972987Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:48.973098Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:49.473878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:49.482582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:49.483954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:49.506234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:48:49.506549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:48:49.506568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:48:49.509203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:48:49.509236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:48:49.510809Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:49.515577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033729557, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:49.515614Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:48:49.515858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:48:49.516787Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:49.520873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:49.521038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:49.521083Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:48:49.523684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:48:49.523741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:48:49.523857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:48:49.532895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:48:49.532954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:48:49.532977Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:48:49.533429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:48:49.655680Z node 1 :TICKET_PARSER DEBUG: Ticket 024260243142FA8669EDAFD9096AF5B61206CC27 (ipv6:[::1]:51952) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-07T13:48:49.764946Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:51966) has now valid token of root@builtin 2025-04-07T13:48:49.936728Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-07T13:48:55.104519Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490567855161055922:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:55.104560Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a2c/r3tmp/tmpe3veJ2/pdisk_1.dat 2025-04-07T13:48:55.347400Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2868, node 4 2025-04-07T13:48:55.451671Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:55.451780Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:55.546315Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:55.633466Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:55.633485Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:55.633492Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:55.633629Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:55.872397Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:55.872753Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:55.872772Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo uns ... chemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:49:37.624213Z node 25 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:49:37.624251Z node 25 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:49:37.624339Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 E0407 13:49:37.723221390 58976 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:37.747319695 59134 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:37.785413711 59134 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:37.803048652 59134 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:37.838218451 58975 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:37.864963631 59382 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:37.901363705 59381 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:37.940989381 58976 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:37.977152911 59134 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:38.001838135 58976 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:38.044837573 59451 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:38.070232644 58976 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2025-04-07T13:49:43.260406Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7490568061656791108:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:43.260507Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a2c/r3tmp/tmpNA2rIG/pdisk_1.dat 2025-04-07T13:49:43.503563Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:43.560617Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:43.560798Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:43.569178Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4221, node 28 2025-04-07T13:49:43.766466Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:43.766492Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:43.766505Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:43.766660Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:44.188129Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:44.188534Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:49:44.188563Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:44.190920Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:49:44.191117Z node 28 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:49:44.191131Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:49:44.193060Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:49:44.193090Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:49:44.193424Z node 28 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:44.194857Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:44.199015Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033784241, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:49:44.199049Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:49:44.199325Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:49:44.201154Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:49:44.201564Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:49:44.201635Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:49:44.201769Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:49:44.201812Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:49:44.201862Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:49:44.203789Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:49:44.203835Z node 28 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:49:44.203857Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:49:44.203933Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 E0407 13:49:44.338289852 61809 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:44.378899439 62016 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:44.432266390 62016 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:44.465577925 61467 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:44.506190319 61809 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:44.528580363 62098 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:44.586042637 62098 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:44.605913791 61465 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:44.642261370 61810 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:44.663752342 61465 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:44.693175666 61465 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 13:49:44.713823349 61467 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. >> KqpExplain::ReadTableRangesFullScan [GOOD] |84.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |84.8%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |84.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::ReadTopic [GOOD] Test command err: 2025-04-07T13:49:36.693341Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568033811373385:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:36.707091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018ba/r3tmp/tmpyBOLYM/pdisk_1.dat 2025-04-07T13:49:38.097021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:38.194714Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:38.253232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:38.253367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:38.285494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:38.626892Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.232450s 2025-04-07T13:49:38.626989Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.232603s TClient is connected to server localhost:21232 TServer::EnableGrpc on GrpcPort 17065, node 1 2025-04-07T13:49:39.697399Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:39.697792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:39.697842Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:39.697963Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:40.694339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:41.602511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-07T13:49:41.695623Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568033811373385:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:41.695682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:41.788900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568055286210728:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:41.788986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568055286210717:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:41.789239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:41.796048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-07T13:49:41.808163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568055286210731:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T13:49:44.288904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-07T13:49:44.833768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:49:45.370078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:49:45.880102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:49:46.383225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.424450Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490568081056015279:2800] Handshake: worker# [1:7490568050991243198:2297] 2025-04-07T13:49:47.453240Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490568081056015279:2800] Create read session: session# [1:7490568081056015281:2296] 2025-04-07T13:49:47.453554Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490568081056015279:2800] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-07T13:49:47.507125Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490568081056015279:2800] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 0 Data: 9b Codec: RAW }] } } 2025-04-07T13:49:47.509228Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490568081056015279:2800] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-07T13:49:47.608002Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490568081056015279:2800] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 1 Data: 9b Codec: RAW }] } } 2025-04-07T13:49:47.705598Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490568081056015368:2835] Handshake: worker# [1:7490568050991243198:2297] 2025-04-07T13:49:47.707197Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490568081056015368:2835] Create read session: session# [1:7490568081056015370:2296] 2025-04-07T13:49:47.710863Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490568081056015368:2835] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-07T13:49:47.742778Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490568081056015368:2835] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 1 Data: 9b Codec: RAW }] } } >> THealthCheckTest::StaticGroupIssue >> KqpQuery::QueryCancelWrite [GOOD] >> TStorageTenantTest::LsLs [GOOD] |84.8%| [TA] $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] Test command err: Trying to start YDB, gRPC: 9660, MsgBus: 29875 2025-04-07T13:49:31.178801Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568011701320165:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:31.178854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013b1/r3tmp/tmp9YBKly/pdisk_1.dat 2025-04-07T13:49:31.542935Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9660, node 1 2025-04-07T13:49:31.615191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:31.615271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:31.647634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:31.666973Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:31.667000Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:31.667008Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:31.667171Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29875 TClient is connected to server localhost:29875 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:32.188394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:32.232954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:32.400879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:32.557827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:32.626511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.445238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568024586223566:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:34.445371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:34.747987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:34.774296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:34.813647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:34.848119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:34.927308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:34.997641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.063437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568028881191364:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.063518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.063720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568028881191369:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.066997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:35.084487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568028881191371:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:36.184154Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568011701320165:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:36.184216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:36.299861Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568033176158991:2466], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2025-04-07T13:49:36.301320Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJmMzZmOTQtNTc4ZDc5ZTktYTE1YjkwNGEtMzYyM2RiYjU=, ActorId: [1:7490568033176158983:2461], ActorState: ExecuteState, TraceId: 01jr8870mpfekcve7mqvhfc69d, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2025-04-07T13:49:36.351125Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568033176159004:2469], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2025-04-07T13:49:36.352502Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJmMzZmOTQtNTc4ZDc5ZTktYTE1YjkwNGEtMzYyM2RiYjU=, ActorId: [1:7490568033176158983:2461], ActorState: ExecuteState, TraceId: 01jr8870p2b3dea7hgyh8wt099, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2025-04-07T13:49:36.462309Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568033176159013:2473], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 2025-04-07T13:49:36.462472Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJmMzZmOTQtNTc4ZDc5ZTktYTE1YjkwNGEtMzYyM2RiYjU=, ActorId: [1:7490568033176158983:2461], ActorState: ExecuteState, TraceId: 01jr8870qd66tk26e33rgqd1hw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 Trying to start YDB, gRPC: 2301, MsgBus: 15134 2025-04-07T13:49:37.360541Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568038410287024:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:37.364056Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013b1/r3tmp/tmpnWfJ00/pdisk_1.dat 2025-04-07T13:49:37.504866Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:37.504941Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:37.505130Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:37.521390Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2301, node 2 2025-04-07T13:49:37.651323Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:37.651355Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:37.651366Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:37.651467Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15134 TClient is connected to server localhost:15134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 Child ... _TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:38.464343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:38.535914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:40.802403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568051295190605:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:40.802492Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:40.835652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:40.917829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:40.987885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:41.021327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:41.045274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:41.113713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:41.160218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568055590158404:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:41.160302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:41.160502Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568055590158409:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:41.163402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:41.177982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568055590158411:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:42.360417Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568038410287024:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:42.360486Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7244, MsgBus: 1185 2025-04-07T13:49:43.511253Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568063287448821:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013b1/r3tmp/tmpvvp4At/pdisk_1.dat 2025-04-07T13:49:43.562191Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:43.625905Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:43.634035Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:43.634129Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7244, node 3 2025-04-07T13:49:43.639243Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:43.711933Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:43.711959Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:43.711968Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:43.712081Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1185 TClient is connected to server localhost:1185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:44.364573Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:44.430339Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:44.513928Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:44.713682Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:44.797187Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:47.073311Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568080467319513:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:47.073405Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:47.115044Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.200997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.285944Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.337649Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.413744Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.472570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.547849Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568080467320017:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:47.547973Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:47.548278Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568080467320022:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:47.552599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:47.565553Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568080467320024:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:48.506775Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568063287448821:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:48.506868Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::IdxFullscan [GOOD] Test command err: Trying to start YDB, gRPC: 14050, MsgBus: 13650 2025-04-07T13:49:31.383678Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568013600716285:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:31.383722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ac/r3tmp/tmpFQBkb3/pdisk_1.dat 2025-04-07T13:49:31.798630Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14050, node 1 2025-04-07T13:49:31.914871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:31.914977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:31.917565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:31.973190Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:31.973233Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:31.973245Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:31.973327Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13650 TClient is connected to server localhost:13650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:32.619972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:32.663746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:32.841516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:49:33.008369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.084774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.125286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568026485619647:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.140499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.164298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.193862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.240057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.290341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.318434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.373389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.439088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568030780587444:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.439152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.439191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568030780587449:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.442068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:35.449909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568030780587451:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:36.383897Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568013600716285:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:36.383976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"SUM(10,15)"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Inputs":[{"ExternalPlanNodeId":4}],"Offset":"15","Name":"Offset"}],"Node Type":"Limit-Offset"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"SUM(10,15)"}],"Node Type":"Limit"}],"Operators":[{"Offset":"15","Name":"Offset"}],"Node Type":"Offset"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 8708, MsgBus: 13254 2025-04-07T13:49:37.540688Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568037372844927:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:37.540725Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ac/r3tmp/tmpBaWU93/pdisk_1.dat 2025-04-07T13:49:37.659453Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:37.671438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:37.671515Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:37.673559Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8708, node 2 2025-04-07T13:49:37.720509Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:37.720527Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:37.720533Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:37.720622Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13254 TClient is connected to server localhost:13254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 Children ... :[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} 2025-04-07T13:49:42.544767Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568037372844927:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:42.544841Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21666, MsgBus: 13285 2025-04-07T13:49:43.249160Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568064507025401:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:43.249204Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ac/r3tmp/tmpHpCDtA/pdisk_1.dat 2025-04-07T13:49:43.335197Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:43.352624Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 21666, node 3 2025-04-07T13:49:43.355395Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:43.357519Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:43.453783Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:43.453805Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:43.453815Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:43.453930Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13285 TClient is connected to server localhost:13285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:44.085843Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:44.097151Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:44.111197Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:44.173636Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:44.357362Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:49:44.423580Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:46.641337Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568077391928980:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:46.641417Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:46.721921Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:46.762188Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:46.840923Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:46.885225Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:46.949882Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.028586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.089098Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568081686896778:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:47.089176Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:47.089478Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568081686896783:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:47.093465Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:47.109757Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568081686896785:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:48.278086Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568064507025401:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:48.278154Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:48.525094Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:48.892719Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.945289Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["id"],"Node Type":"TableLookupJoin","PlanNodeId":3,"Columns":["Value","complex_field","id","str_field"],"E-Rows":"No estimate","Table":"test_table_idx","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["test_table_idx_idx"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Stage"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":5}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":7}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/test_table_idx","reads":[{"lookup_by":["id"],"columns":["Value","complex_field","id","str_field"],"type":"Lookup"}]},{"name":"\/Root\/test_table_idx_idx","reads":[{"lookup_by":["str_field (null)"],"columns":["id"],"scan_by":["complex_field (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"},{"Operators":[{"E-Rows":"No estimate","Columns":["Value","complex_field","id","str_field"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"test_table_idx","LookupKeyColumns":["id"]}],"Node Type":"TableLookup","PlanNodeType":"TableLookup"}],"Operators":[{"Name":"LookupJoin","LookupKeyColumns":["id"]}],"Node Type":"LookupJoin","PlanNodeType":"Connection"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} >> KqpParams::Decimal+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateWhereInSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 15307, MsgBus: 26168 2025-04-07T13:49:31.590216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568010886398428:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:31.590320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013a6/r3tmp/tmpmjZIiu/pdisk_1.dat 2025-04-07T13:49:32.083779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:32.083876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 15307, node 1 2025-04-07T13:49:32.091471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:32.119248Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:32.179183Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:32.179206Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:32.179215Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:32.179313Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26168 TClient is connected to server localhost:26168 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:32.851429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:32.863825Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:32.871312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:32.989525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.156250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.245833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.022918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568028066269298:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.023022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.346567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.428503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.478636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.528315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.576019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.654300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.723308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568028066269804:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.723389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.723718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568028066269809:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.727950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:35.743913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568028066269811:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:36.672905Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568010886398428:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:36.673137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:37.401604Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490568036656204758:2470], TxId: 281474976710671, task: 1. Ctx: { TraceId : 01jr8871deap2ndhcm93g08k8w. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Y2M3YTE4MTItNDI1MzhmZTItNmM5MTYwZTUtM2E1MjY4YQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(17): Bad filter value. }. 2025-04-07T13:49:37.410142Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490568036656204759:2471], TxId: 281474976710671, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr8871deap2ndhcm93g08k8w. SessionId : ydb://session/3?node_id=1&id=Y2M3YTE4MTItNDI1MzhmZTItNmM5MTYwZTUtM2E1MjY4YQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490568036656204754:2462], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:49:37.411997Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2M3YTE4MTItNDI1MzhmZTItNmM5MTYwZTUtM2E1MjY4YQ==, ActorId: [1:7490568036656204719:2462], ActorState: ExecuteState, TraceId: 01jr8871deap2ndhcm93g08k8w, Create QueryResponse for error on request, msg:
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(17): Bad filter value. Trying to start YDB, gRPC: 8254, MsgBus: 26928 2025-04-07T13:49:38.173552Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568041925527825:2074];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013a6/r3tmp/tmpv4JXUx/pdisk_1.dat 2025-04-07T13:49:38.258974Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:38.299460Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:38.318827Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:38.318900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:38.320307Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8254, node 2 2025-04-07T13:49:38.415881Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:38.415907Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:38.415914Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:38.416019Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26928 TClient is connected to server localhost:26928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIs ... ard: 72057594046644480 waiting... 2025-04-07T13:49:39.378604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:39.461939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:41.485057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568054810431368:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:41.485273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:41.503268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:41.541309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:41.619498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:41.655276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:41.697293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:41.788723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:41.872047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568054810431863:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:41.872198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:41.872548Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568054810431868:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:41.876432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:41.889099Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568054810431871:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:43.172986Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568041925527825:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:43.173036Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29415, MsgBus: 7488 2025-04-07T13:49:44.325958Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568066370171793:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013a6/r3tmp/tmpHR5LKr/pdisk_1.dat 2025-04-07T13:49:44.407251Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:44.483017Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:44.504165Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:44.504267Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:44.515323Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29415, node 3 2025-04-07T13:49:44.599407Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:44.599422Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:44.599427Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:44.599497Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7488 TClient is connected to server localhost:7488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:45.180214Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:45.190959Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:45.205969Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:45.289818Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:45.456624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:45.544021Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:47.768891Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568079255075205:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:47.769046Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:47.818789Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.863095Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.942857Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.971418Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.001683Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.034573Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.127247Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568083550043000:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:48.127349Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:48.133739Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568083550043005:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:48.137256Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:48.158533Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568083550043008:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:49.318683Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568066370171793:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:49.318741Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> YdbQueryService::TestAttachTwice [GOOD] >> YdbQueryService::TestCreateDropAttachSession >> TStorageTenantTest::DeclareAndDefine [GOOD] >> TStorageTenantTest::GenericCases [GOOD] >> KqpExplain::Predicates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ReadTableRangesFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 27014, MsgBus: 8244 2025-04-07T13:49:31.673136Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568012636802296:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:31.673184Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013a0/r3tmp/tmpTl3YEo/pdisk_1.dat 2025-04-07T13:49:32.143157Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:32.145094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:32.145177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:32.152431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27014, node 1 2025-04-07T13:49:32.265797Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:32.265823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:32.265832Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:32.265919Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8244 TClient is connected to server localhost:8244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:32.906099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:32.925219Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:32.938696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.053362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.214999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:33.317804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.104279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568029816673183:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.104361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.410491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.480971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.512018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.541274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.591862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.666143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.745764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568029816673684:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.745828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.745872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568029816673689:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.753029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:35.766139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568029816673691:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:36.716857Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568012636802296:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:36.716925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Text","Name":"Sort"},{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"Sort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.Text","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 11082, MsgBus: 13763 2025-04-07T13:49:37.861437Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568036812767857:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:37.861493Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013a0/r3tmp/tmpjSqixt/pdisk_1.dat 2025-04-07T13:49:38.052949Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11082, node 2 2025-04-07T13:49:38.117188Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:38.117210Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:38.117217Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:38.117320Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:49:38.138142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:38.138238Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:38.139945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13763 TClient is connected to server localhost:13763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:38.602095Z ... E-Rows":"No estimate","Predicate":"Exist(item.t1.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":21,"Plans":[{"PlanNodeId":25,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Condition":"Foo.t1.Key = t1.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"SortBy":"row.Key","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":4},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 30442, MsgBus: 19532 2025-04-07T13:49:43.949820Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568064018669312:2092];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013a0/r3tmp/tmpiZbplD/pdisk_1.dat 2025-04-07T13:49:44.023490Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:44.132571Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:44.152444Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:44.152530Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:44.158570Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30442, node 3 2025-04-07T13:49:44.340990Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:44.341011Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:44.341020Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:44.341112Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19532 TClient is connected to server localhost:19532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:44.914799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:44.946907Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:44.967163Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:45.066123Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:45.242503Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:45.307626Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.833650Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568081198540157:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:47.833807Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:47.872028Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.918591Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.987848Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.026789Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.105827Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.169889Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.253046Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568085493507958:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:48.253175Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:48.253495Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568085493507963:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:48.257559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:48.275310Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568085493507965:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:48.952764Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568064018669312:2092];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:48.960214Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:49.460518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoKeys"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/TwoKeys","reads":[{"columns":["Key1","Key2","Value"],"scan_by":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoKeys"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/TwoKeys","reads":[{"columns":["Key1","Key2","Value"],"scan_by":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> THealthCheckTest::Basic [GOOD] >> THealthCheckTest::BasicNodeCheckRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::LsLs [GOOD] Test command err: 2025-04-07T13:49:45.593480Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568072406189704:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:45.606797Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:45.665548Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568072421137741:2118];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:45.665598Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001545/r3tmp/tmpHgPf0n/pdisk_1.dat 2025-04-07T13:49:46.195010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:46.195099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:46.196677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:46.196732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:46.201741Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:49:46.201886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:46.203174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:46.237408Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8466 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:46.533911Z node 1 :TX_PROXY DEBUG: actor# [1:7490568072406189712:2136] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:46.533971Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568076701157495:2449] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:46.534102Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568072406189768:2150], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:46.534163Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490568072406189768:2150], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T13:49:46.534341Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568076701157496:2450][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T13:49:46.536144Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568072406189419:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568076701157500:2450] 2025-04-07T13:49:46.536202Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568072406189419:2052] Subscribe: subscriber# [1:7490568076701157500:2450], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:46.536269Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568072406189422:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568076701157501:2450] 2025-04-07T13:49:46.536294Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568072406189422:2055] Subscribe: subscriber# [1:7490568076701157501:2450], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:46.536315Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568072406189425:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568076701157502:2450] 2025-04-07T13:49:46.536327Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568072406189425:2058] Subscribe: subscriber# [1:7490568076701157502:2450], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:46.536388Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568076701157500:2450][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568072406189419:2052] 2025-04-07T13:49:46.536419Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568076701157501:2450][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568072406189422:2055] 2025-04-07T13:49:46.536448Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568076701157502:2450][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568072406189425:2058] 2025-04-07T13:49:46.536503Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568076701157496:2450][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568076701157497:2450] 2025-04-07T13:49:46.536530Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568076701157496:2450][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568076701157498:2450] 2025-04-07T13:49:46.536612Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490568076701157496:2450][/dc-1] Set up state: owner# [1:7490568072406189768:2150], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:46.536741Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568076701157496:2450][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568076701157499:2450] 2025-04-07T13:49:46.536785Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490568076701157496:2450][/dc-1] Path was already updated: owner# [1:7490568072406189768:2150], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:46.536820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568076701157500:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568076701157497:2450], cookie# 1 2025-04-07T13:49:46.536834Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568076701157501:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568076701157498:2450], cookie# 1 2025-04-07T13:49:46.536845Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568076701157502:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568076701157499:2450], cookie# 1 2025-04-07T13:49:46.536966Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568072406189419:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568076701157500:2450] 2025-04-07T13:49:46.537008Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568072406189419:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568076701157500:2450], cookie# 1 2025-04-07T13:49:46.537032Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568072406189422:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568076701157501:2450] 2025-04-07T13:49:46.537061Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568072406189422:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568076701157501:2450], cookie# 1 2025-04-07T13:49:46.537084Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568072406189425:2058] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568076701157502:2450] 2025-04-07T13:49:46.537098Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568072406189425:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568076701157502:2450], cookie# 1 2025-04-07T13:49:46.539629Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568076701157500:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568072406189419:2052], cookie# 1 2025-04-07T13:49:46.539670Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568076701157501:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568072406189422:2055], cookie# 1 2025-04-07T13:49:46.539682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568076701157502:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568072406189425:2058], cookie# 1 2025-04-07T13:49:46.539713Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568076701157496:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568076701157497:2450], cookie# 1 2025-04-07T13:49:46.539764Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568076701157496:2450][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:46.539784Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568076701157496:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568076701157498:2450], cookie# 1 2025-04-07T13:49:46.539803Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568076701157496:2450][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:46.539849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568076701157496:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568076701157499:2450], cookie# 1 2025-04-07T13:49:46.539864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568076701157496:2450][/dc-1] Unexpected sync response: sender# [1:7490568076701157499:2450], cookie# 1 2025-04-07T13:49:46.594625Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568072406189768:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { Reser ... .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:50.923696Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490568072421137906:2104], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:50.923821Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490568072421137906:2104], cacheItem# { Subscriber: { Subscriber: [2:7490568089601007135:2114] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:50.923901Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490568093895974478:2132], recipient# [2:7490568093895974477:2283], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:50.924086Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:50.997656Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490568072421137906:2104], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:50.997798Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490568072421137906:2104], cacheItem# { Subscriber: { Subscriber: [2:7490568089601007149:2116] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:50.997846Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490568072421137906:2104], cacheItem# { Subscriber: { Subscriber: [2:7490568089601007150:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:50.997966Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490568093895974480:2133], recipient# [2:7490568089601007148:2281], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:50.998480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7490568089601007148:2281], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:51.072924Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490568072421137906:2104], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:51.073048Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490568072421137906:2104], cacheItem# { Subscriber: { Subscriber: [2:7490568089601007149:2116] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:51.073092Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490568072421137906:2104], cacheItem# { Subscriber: { Subscriber: [2:7490568089601007150:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:51.073179Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490568098190941777:2134], recipient# [2:7490568089601007148:2281], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:51.073322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7490568089601007148:2281], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:51.168945Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490568072421137906:2104], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:51.169074Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490568072421137906:2104], cacheItem# { Subscriber: { Subscriber: [2:7490568089601007149:2116] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:51.169134Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490568072421137906:2104], cacheItem# { Subscriber: { Subscriber: [2:7490568089601007150:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:51.169225Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490568098190941778:2135], recipient# [2:7490568089601007148:2281], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:51.172445Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7490568089601007148:2281], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWrite [GOOD] Test command err: Trying to start YDB, gRPC: 8960, MsgBus: 24364 2025-04-07T13:49:27.007906Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567994729481681:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:27.007942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013c7/r3tmp/tmpS5qIQO/pdisk_1.dat 2025-04-07T13:49:27.327996Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8960, node 1 2025-04-07T13:49:27.395465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:27.395566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:27.399832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:27.412432Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:27.412452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:27.412458Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:27.412544Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24364 TClient is connected to server localhost:24364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:27.904113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:27.952926Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:27.969554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:28.138458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:28.285824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:49:28.367429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:30.058571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568007614385259:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:30.058672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:30.250907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:30.297104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:30.324841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:30.400096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:30.426782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:30.498421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:30.582450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568007614385768:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:30.582600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:30.582804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568007614385773:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:30.586361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:30.596221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568007614385775:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:32.009660Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567994729481681:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:32.009762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10944, MsgBus: 21510 2025-04-07T13:49:37.071470Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568037707105766:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:37.071514Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013c7/r3tmp/tmpyziLR1/pdisk_1.dat 2025-04-07T13:49:37.182766Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:37.199961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:37.200043Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:37.205323Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10944, node 2 2025-04-07T13:49:37.257321Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:37.257347Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:37.257371Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:37.257492Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21510 TClient is connected to server localhost:21510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:37.673841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:37.687772Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:37.701746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:37.774196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:37.951446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:38.047492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:40.192159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568050592 ... DescribeSchema 2025-04-07T13:49:44.274209Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7490568067771879694:3872], for# user0@builtin, access# DescribeSchema 2025-04-07T13:49:44.277358Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568067771879691:2612], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:49:44.277843Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjY5NjkyYWItMzExMmY1OGUtYTljYTA1ZDYtMzliNjc2MzQ=, ActorId: [2:7490568067771879687:2610], ActorState: ExecuteState, TraceId: 01jr8878dx2wc4swggnmwv9pyy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:49:44.294771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-07T13:49:44.365964Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7490568067771879721:3884], for# user0@builtin, access# DescribeSchema 2025-04-07T13:49:44.365984Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7490568067771879721:3884], for# user0@builtin, access# DescribeSchema 2025-04-07T13:49:44.367425Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568067771879718:2622], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:49:44.367905Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzM1NDQ0NmUtNGEwNDdmYzYtM2JlZDhkZWQtNjFmZWJlYmM=, ActorId: [2:7490568067771879714:2620], ActorState: ExecuteState, TraceId: 01jr8878h38kabf8csb0tp9p98, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:49:44.429655Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7490568067771879738:3888], for# user0@builtin, access# DescribeSchema 2025-04-07T13:49:44.429681Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7490568067771879738:3888], for# user0@builtin, access# DescribeSchema 2025-04-07T13:49:44.431871Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568067771879735:2631], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:8:25: Error: At function: KiWriteTable!
:8:25: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:49:44.433225Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjkyNmRkOWQtZWIzMjUxMzYtNjJiZTFjMmMtOWU1ODg2ZWQ=, ActorId: [2:7490568067771879731:2629], ActorState: ExecuteState, TraceId: 01jr8878k11y2c334p5jad0bv8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:49:44.526871Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7490568067771879757:3894], for# user0@builtin, access# DescribeSchema 2025-04-07T13:49:44.526899Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7490568067771879757:3894], for# user0@builtin, access# DescribeSchema 2025-04-07T13:49:44.529279Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568067771879754:2640], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:49:44.531033Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWZmZGM5NzgtZDk4ZjA3MDYtNjQyMTFjYmQtMzQ2OWJlNTA=, ActorId: [2:7490568067771879750:2638], ActorState: ExecuteState, TraceId: 01jr8878nk482qjsrg0ffbqyez, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 7528, MsgBus: 17124 2025-04-07T13:49:45.365182Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568073088413409:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:45.365230Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013c7/r3tmp/tmpwTny6J/pdisk_1.dat 2025-04-07T13:49:45.482651Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7528, node 3 2025-04-07T13:49:45.508610Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:45.508715Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:45.510591Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:45.553205Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:45.553225Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:45.553231Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:45.553320Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17124 TClient is connected to server localhost:17124 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:45.977532Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:45.997635Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:46.059564Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:46.218243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:46.281949Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:48.770896Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568085973316973:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:48.770980Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:48.833390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.884146Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.923868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.960971Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:49.019734Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:49.087037Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:49.172799Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568090268284765:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:49.172900Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:49.176792Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568090268284770:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:49.182378Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:49.216907Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568090268284772:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:50.370920Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568073088413409:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:50.371436Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 16722, MsgBus: 30128 2025-04-07T13:49:31.531974Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568010786099244:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:31.532020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ab/r3tmp/tmpw24Uno/pdisk_1.dat 2025-04-07T13:49:31.924146Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16722, node 1 2025-04-07T13:49:31.986997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:31.987758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:31.989405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:32.050147Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:32.050169Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:32.050174Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:32.050244Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30128 TClient is connected to server localhost:30128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:32.660231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:32.674132Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:32.685751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:49:32.866388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.048651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.131818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.907001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568023671002683:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:34.907110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.196561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.231944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.258233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.281287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.373982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.449744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.545845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568027965970486:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.545947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.546234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568027965970492:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.550424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:35.562948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568027965970494:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:36.532770Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568010786099244:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:36.532841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25631, MsgBus: 3930 2025-04-07T13:49:37.917063Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568036457959024:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:37.917164Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ab/r3tmp/tmpSlk8Q4/pdisk_1.dat 2025-04-07T13:49:38.049437Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:38.063748Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:38.063818Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:38.065540Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25631, node 2 2025-04-07T13:49:38.177129Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:38.177152Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:38.177162Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:38.177262Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3930 TClient is connected to server localhost:3930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:38.637085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:38.663657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:38.760884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:38.942025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:39.003456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:41.209839Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568053637829884:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't hav ... eration part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:41.586814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568053637830389:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:42.924752Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568036457959024:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:42.924832Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5194, MsgBus: 22024 2025-04-07T13:49:44.124795Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568067008998636:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:44.125213Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ab/r3tmp/tmpodT6zg/pdisk_1.dat 2025-04-07T13:49:44.286906Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:44.308573Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:44.308643Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:44.315749Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5194, node 3 2025-04-07T13:49:44.373113Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:44.373133Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:44.373143Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:44.373252Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22024 TClient is connected to server localhost:22024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:44.917024Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:44.928094Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:45.022293Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:45.236723Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:45.319656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:47.812859Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568079893902214:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:47.812938Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:47.854210Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:47.921971Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.002126Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.037614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.094192Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.147358Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:48.230081Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568084188870013:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:48.230164Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:48.230502Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568084188870018:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:48.237044Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:48.249391Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568084188870020:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:49.153551Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568067008998636:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:49.153610Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:49.561905Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:49:50.390671Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490568092778805196:2500], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2025-04-07T13:49:50.392513Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODU2ODJmYy04NDI0YWU4MS03OWYyYTljZS1iMmI4MTBmOQ==, ActorId: [3:7490568092778805194:2499], ActorState: ExecuteState, TraceId: 01jr887ecs0vyngj8cwxnj5n5e, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T13:49:50.511494Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDU2YTJjMDUtYzcwNzBjYTctZGJiZGJkNjMtN2IyYzI5MmY=, ActorId: [3:7490568092778805201:2502], ActorState: ExecuteState, TraceId: 01jr887ee408thecck7vm74pbp, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1233: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2025-04-07T13:49:50.549431Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490568092778805246:2508], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-07T13:49:50.549658Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTMwNDMyM2EtMjg2ZGVmMzUtZjIxMzc0ZmMtZDM2NDYyYmM=, ActorId: [3:7490568092778805244:2507], ActorState: ExecuteState, TraceId: 01jr887ej68128rn523fqvmyye, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T13:49:50.598901Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490568092778805257:2513], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-07T13:49:50.601086Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTQzNjI5NWMtY2QwOTVjYTUtYjRhMzBiYjYtODNhZjMzN2E=, ActorId: [3:7490568092778805255:2512], ActorState: ExecuteState, TraceId: 01jr887ek7dett8jpbam3bx9ar, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] Test command err: 2025-04-07T13:49:46.563641Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568076041178368:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:46.563679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001514/r3tmp/tmpggPUs9/pdisk_1.dat 2025-04-07T13:49:47.104588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:47.104692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:47.113025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:47.134314Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22360 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:47.366505Z node 1 :TX_PROXY DEBUG: actor# [1:7490568076041178620:2130] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:47.366568Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568080336146349:2429] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:47.366709Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568076041178699:2164], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:47.366757Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490568076041178699:2164], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T13:49:47.366942Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080336146350:2430][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T13:49:47.368705Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178294:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568080336146354:2430] 2025-04-07T13:49:47.368766Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568076041178294:2050] Subscribe: subscriber# [1:7490568080336146354:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:47.368816Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178297:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568080336146355:2430] 2025-04-07T13:49:47.368839Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568076041178297:2053] Subscribe: subscriber# [1:7490568080336146355:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:47.368861Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178300:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568080336146356:2430] 2025-04-07T13:49:47.368876Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568076041178300:2056] Subscribe: subscriber# [1:7490568080336146356:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:47.368931Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080336146354:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568076041178294:2050] 2025-04-07T13:49:47.368958Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080336146355:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568076041178297:2053] 2025-04-07T13:49:47.368985Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080336146356:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568076041178300:2056] 2025-04-07T13:49:47.369027Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080336146350:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568080336146351:2430] 2025-04-07T13:49:47.369083Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080336146350:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568080336146352:2430] 2025-04-07T13:49:47.369131Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490568080336146350:2430][/dc-1] Set up state: owner# [1:7490568076041178699:2164], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:47.369215Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080336146350:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568080336146353:2430] 2025-04-07T13:49:47.369254Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490568080336146350:2430][/dc-1] Path was already updated: owner# [1:7490568076041178699:2164], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:47.369287Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080336146354:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568080336146351:2430], cookie# 1 2025-04-07T13:49:47.369301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080336146355:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568080336146352:2430], cookie# 1 2025-04-07T13:49:47.369315Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080336146356:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568080336146353:2430], cookie# 1 2025-04-07T13:49:47.369336Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178294:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568080336146354:2430] 2025-04-07T13:49:47.369378Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178294:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568080336146354:2430], cookie# 1 2025-04-07T13:49:47.369399Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178297:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568080336146355:2430] 2025-04-07T13:49:47.369412Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178297:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568080336146355:2430], cookie# 1 2025-04-07T13:49:47.369426Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178300:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568080336146356:2430] 2025-04-07T13:49:47.369438Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178300:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568080336146356:2430], cookie# 1 2025-04-07T13:49:47.373342Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080336146354:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568076041178294:2050], cookie# 1 2025-04-07T13:49:47.373391Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080336146355:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568076041178297:2053], cookie# 1 2025-04-07T13:49:47.373406Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080336146356:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568076041178300:2056], cookie# 1 2025-04-07T13:49:47.373436Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080336146350:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568080336146351:2430], cookie# 1 2025-04-07T13:49:47.373455Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080336146350:2430][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:47.373469Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080336146350:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568080336146352:2430], cookie# 1 2025-04-07T13:49:47.373488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080336146350:2430][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:47.373512Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080336146350:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568080336146353:2430], cookie# 1 2025-04-07T13:49:47.373550Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080336146350:2430][/dc-1] Unexpected sync response: sender# [1:7490568080336146353:2430], cookie# 1 2025-04-07T13:49:47.427906Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568076041178699:2164], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T13:49:47.428256Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568076041178699:2164], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... quests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7490568093221049036:3025] 2025-04-07T13:49:50.174674Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490568093221049028:3025][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7490568076041178699:2164], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:50.174691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568093221049028:3025][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7490568093221049037:3025] 2025-04-07T13:49:50.174709Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490568093221049028:3025][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7490568076041178699:2164], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:50.174726Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178294:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7490568093221049032:3024] 2025-04-07T13:49:50.174739Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178294:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7490568093221049038:3025] 2025-04-07T13:49:50.174752Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178297:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7490568093221049033:3024] 2025-04-07T13:49:50.174765Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178297:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7490568093221049039:3025] 2025-04-07T13:49:50.174778Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178300:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7490568093221049034:3024] 2025-04-07T13:49:50.174789Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076041178300:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7490568093221049040:3025] 2025-04-07T13:49:50.174824Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568076041178699:2164], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-07T13:49:50.174879Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568076041178699:2164], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7490568093221049027:3024] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:50.174947Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568076041178699:2164], cacheItem# { Subscriber: { Subscriber: [1:7490568093221049027:3024] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:50.174968Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568076041178699:2164], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-07T13:49:50.175004Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568076041178699:2164], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7490568093221049028:3025] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:50.175039Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568076041178699:2164], cacheItem# { Subscriber: { Subscriber: [1:7490568093221049028:3025] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:50.175102Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568093221049041:3026], recipient# [1:7490568093221049026:2304], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:50.578564Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568076041178699:2164], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:50.578668Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568076041178699:2164], cacheItem# { Subscriber: { Subscriber: [1:7490568080336146373:2444] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:50.578733Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568093221049046:3027], recipient# [1:7490568093221049045:2305], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:51.172738Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568076041178699:2164], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:51.172858Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568076041178699:2164], cacheItem# { Subscriber: { Subscriber: [1:7490568093221049018:3022] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:51.172934Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568097516016359:3031], recipient# [1:7490568097516016358:2306], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:51.564059Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568076041178368:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:51.564112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:51.581481Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568076041178699:2164], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:51.581607Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568076041178699:2164], cacheItem# { Subscriber: { Subscriber: [1:7490568080336146373:2444] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:51.581704Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568097516016366:3034], recipient# [1:7490568097516016365:2307], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] Test command err: 2025-04-07T13:49:45.559146Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568073643716431:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:45.559510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00153d/r3tmp/tmpEVNIFQ/pdisk_1.dat 2025-04-07T13:49:46.114502Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:46.135151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:46.135221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:46.149302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23299 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:46.471510Z node 1 :TX_PROXY DEBUG: actor# [1:7490568073643716518:2113] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:46.471551Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568077938684277:2429] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:46.471645Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568073643716559:2133], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:46.471680Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490568073643716559:2133], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T13:49:46.471823Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568077938684278:2430][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T13:49:46.477266Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568073643716224:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568077938684282:2430] 2025-04-07T13:49:46.477332Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568073643716224:2050] Subscribe: subscriber# [1:7490568077938684282:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:46.477393Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568073643716230:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568077938684284:2430] 2025-04-07T13:49:46.477410Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568073643716230:2056] Subscribe: subscriber# [1:7490568077938684284:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:46.477446Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568077938684282:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568073643716224:2050] 2025-04-07T13:49:46.477472Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568077938684284:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568073643716230:2056] 2025-04-07T13:49:46.477502Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568077938684278:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568077938684279:2430] 2025-04-07T13:49:46.477534Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568077938684278:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568077938684281:2430] 2025-04-07T13:49:46.477577Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490568077938684278:2430][/dc-1] Set up state: owner# [1:7490568073643716559:2133], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:46.477689Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568077938684282:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568077938684279:2430], cookie# 1 2025-04-07T13:49:46.477708Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568077938684283:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568077938684280:2430], cookie# 1 2025-04-07T13:49:46.477720Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568077938684284:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568077938684281:2430], cookie# 1 2025-04-07T13:49:46.477740Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568073643716224:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568077938684282:2430] 2025-04-07T13:49:46.477759Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568073643716224:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568077938684282:2430], cookie# 1 2025-04-07T13:49:46.477777Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568073643716230:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568077938684284:2430] 2025-04-07T13:49:46.477787Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568073643716230:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568077938684284:2430], cookie# 1 2025-04-07T13:49:46.481046Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568073643716227:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568077938684283:2430] 2025-04-07T13:49:46.481102Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568073643716227:2053] Subscribe: subscriber# [1:7490568077938684283:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:46.481158Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568073643716227:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568077938684283:2430], cookie# 1 2025-04-07T13:49:46.481211Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568077938684282:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568073643716224:2050], cookie# 1 2025-04-07T13:49:46.481239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568077938684284:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568073643716230:2056], cookie# 1 2025-04-07T13:49:46.481261Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568077938684283:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568073643716227:2053] 2025-04-07T13:49:46.481277Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568077938684283:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568073643716227:2053], cookie# 1 2025-04-07T13:49:46.481304Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568077938684278:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568077938684279:2430], cookie# 1 2025-04-07T13:49:46.481327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568077938684278:2430][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:46.481374Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568077938684278:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568077938684281:2430], cookie# 1 2025-04-07T13:49:46.481391Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568077938684278:2430][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:46.481428Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568077938684278:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568077938684280:2430] 2025-04-07T13:49:46.481511Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490568077938684278:2430][/dc-1] Path was already updated: owner# [1:7490568073643716559:2133], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:46.481541Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568077938684278:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568077938684280:2430], cookie# 1 2025-04-07T13:49:46.481555Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568077938684278:2430][/dc-1] Unexpected sync response: sender# [1:7490568077938684280:2430], cookie# 1 2025-04-07T13:49:46.481720Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568073643716227:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568077938684283:2430] 2025-04-07T13:49:46.519209Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568073643716559:2133], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T13:49:46.519492Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568073643716559:2133], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... d event# 269550080, Sender [2:7490568088756208347:2287], Recipient [1:7490568095118554424:3126] 2025-04-07T13:49:50.183323Z node 1 :TX_PROXY TRACE: StateWaitPlan, processing event TEvDataShard::TEvProposeTransactionResult 2025-04-07T13:49:50.183376Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568095118554424:3126] txid# 281474976710668 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2025-04-07T13:49:50.183429Z node 1 :TX_PROXY TRACE: StateWaitPlan, received event# 269550080, Sender [2:7490568088756208529:2301], Recipient [1:7490568095118554424:3126] 2025-04-07T13:49:50.183440Z node 1 :TX_PROXY TRACE: StateWaitPlan, processing event TEvDataShard::TEvProposeTransactionResult 2025-04-07T13:49:50.183459Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568095118554424:3126] txid# 281474976710668 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037894 marker# P12 2025-04-07T13:49:50.183685Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568095118554424:3126] txid# 281474976710668 MergeResult ExecComplete TDataReq marker# P17 2025-04-07T13:49:50.183783Z node 1 :TX_PROXY INFO: Actor# [1:7490568095118554424:3126] txid# 281474976710668 RESPONSE Status# ExecComplete prepare time: 0.012712s execute time: 0.011504s total time: 0.024216s marker# P13 2025-04-07T13:49:50.187983Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568073643716559:2133], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:50.188074Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568073643716559:2133], cacheItem# { Subscriber: { Subscriber: [1:7490568090823586611:2735] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:50.188124Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568095118554435:3135], recipient# [1:7490568095118554434:2310], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:50.215941Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568073643716224:2050] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7490568080166273511:2100] 2025-04-07T13:49:50.215971Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568073643716224:2050] Unsubscribe: subscriber# [2:7490568080166273511:2100], path# /dc-1/USER_0 2025-04-07T13:49:50.215994Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568073643716227:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7490568080166273512:2100] 2025-04-07T13:49:50.216005Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568073643716227:2053] Unsubscribe: subscriber# [2:7490568080166273512:2100], path# /dc-1/USER_0 2025-04-07T13:49:50.216021Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568073643716230:2056] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7490568080166273513:2100] 2025-04-07T13:49:50.216031Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568073643716230:2056] Unsubscribe: subscriber# [2:7490568080166273513:2100], path# /dc-1/USER_0 2025-04-07T13:49:50.216154Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 2 2025-04-07T13:49:50.217180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T13:49:50.538581Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568073643716431:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:50.538650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:50.599650Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568073643716559:2133], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:50.599767Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568073643716559:2133], cacheItem# { Subscriber: { Subscriber: [1:7490568077938684290:2434] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:50.599835Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568095118554444:3140], recipient# [1:7490568095118554443:2311], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:51.197386Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568073643716559:2133], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:51.197481Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568073643716559:2133], cacheItem# { Subscriber: { Subscriber: [1:7490568090823586611:2735] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:51.197540Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568099413521759:3146], recipient# [1:7490568099413521758:2312], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:51.541133Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568073643716559:2133], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:51.541262Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568073643716559:2133], cacheItem# { Subscriber: { Subscriber: [1:7490568077938684290:2434] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:51.541356Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568099413521764:3147], recipient# [1:7490568099413521763:2313], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:51.600973Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568073643716559:2133], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:51.601068Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568073643716559:2133], cacheItem# { Subscriber: { Subscriber: [1:7490568077938684290:2434] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:51.601131Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568099413521766:3148], recipient# [1:7490568099413521765:2314], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::Predicates [GOOD] Test command err: Trying to start YDB, gRPC: 5447, MsgBus: 28553 2025-04-07T13:49:33.251167Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568021031223750:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:33.251219Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001385/r3tmp/tmpoVj77h/pdisk_1.dat 2025-04-07T13:49:33.645174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:33.645286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:33.647907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:33.663501Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5447, node 1 2025-04-07T13:49:33.751873Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:33.751893Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:33.751904Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:33.751981Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28553 TClient is connected to server localhost:28553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:34.400141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.421737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.558887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.734828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.818127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:36.515785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568033916127329:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.515870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.857653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.886826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.915941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.938708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.965065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.037716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.104872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568038211095125:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.104990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.105437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568038211095130:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.109324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:37.123588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568038211095132:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:38.255952Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568021031223750:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:38.256056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"[{column0: 1,column1: 2,column2: 3},{column0: 4,column1: 5,column2: 6}]","Name":"Iterator"}],"Node Type":"ConstantExpr"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 7039, MsgBus: 24528 2025-04-07T13:49:39.036156Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568048165761464:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:39.040716Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001385/r3tmp/tmpgNNbs8/pdisk_1.dat 2025-04-07T13:49:39.261874Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:39.283100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:39.283180Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:39.286665Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7039, node 2 2025-04-07T13:49:39.393189Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:39.393214Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:39.393221Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:39.393313Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24528 TClient is connected to server localhost:24528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:49:39.946860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:39.969978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:40.065538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:40.241849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:40.327892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation ... elf is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:42.822165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568061050665547:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:42.822231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568061050665552:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:42.822250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:42.825302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:42.835739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568061050665554:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:43.923891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:44.030290Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568048165761464:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:44.030344Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:44.273946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:49:44.322268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, 100)","Key [2000, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 28571, MsgBus: 2793 2025-04-07T13:49:45.587745Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568073360968258:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:45.587827Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001385/r3tmp/tmp9zhJuz/pdisk_1.dat 2025-04-07T13:49:45.692128Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:45.705851Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:45.705920Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:45.707809Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28571, node 3 2025-04-07T13:49:45.753187Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:45.753210Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:45.753216Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:45.753311Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2793 TClient is connected to server localhost:2793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:46.263494Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:46.270140Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:46.276576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:46.383833Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:46.583681Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:46.706270Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:49.210656Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568090540839118:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:49.210737Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:49.261568Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:49.315507Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:49.369757Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:49.435101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:49.509502Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:49.594410Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:49.699636Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568090540839619:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:49.699715Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:49.700077Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568090540839624:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:49.704297Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:49.715962Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568090540839626:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:50.603145Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568073360968258:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:50.603482Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:50.897508Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> ObjectStorageListingTest::ListingNoFilter >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit >> DataShardSnapshots::MvccSnapshotLockedWritesRestart [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts >> THealthCheckTest::SpecificServerless [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] >> EntityId::Distinct [GOOD] >> EntityId::MaxId [GOOD] >> TExtSubDomainTest::DeclareAndLs >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] >> KqpLimits::AffectedShardsLimit [GOOD] >> TExtSubDomainTest::DeclareAndDrop >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::MaxId [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::GenericCases >> THealthCheckTest::BasicNodeCheckRequest [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus [GOOD] >> THealthCheckTest::RedGroupIssueOnRedSpace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] Test command err: 2025-04-07T13:49:46.196001Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568076439393234:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:46.196040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001528/r3tmp/tmpvMGYj8/pdisk_1.dat 2025-04-07T13:49:46.814724Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:46.841099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:46.841205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:46.844898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26333 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:47.071473Z node 1 :TX_PROXY DEBUG: actor# [1:7490568076439393325:2097] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:47.071512Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568080734361108:2424] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:47.071715Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568076439393366:2123], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:47.071755Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490568076439393366:2123], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T13:49:47.071967Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080734361109:2425][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T13:49:47.073860Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076439393063:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568080734361113:2425] 2025-04-07T13:49:47.073925Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568076439393063:2049] Subscribe: subscriber# [1:7490568080734361113:2425], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:47.073987Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076439393069:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568080734361115:2425] 2025-04-07T13:49:47.074018Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568076439393069:2055] Subscribe: subscriber# [1:7490568080734361115:2425], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:47.074082Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080734361113:2425][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568076439393063:2049] 2025-04-07T13:49:47.074102Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080734361115:2425][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568076439393069:2055] 2025-04-07T13:49:47.074155Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080734361109:2425][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568080734361110:2425] 2025-04-07T13:49:47.074190Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080734361109:2425][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568080734361112:2425] 2025-04-07T13:49:47.074240Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490568080734361109:2425][/dc-1] Set up state: owner# [1:7490568076439393366:2123], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:47.074354Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080734361113:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568080734361110:2425], cookie# 1 2025-04-07T13:49:47.074377Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080734361114:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568080734361111:2425], cookie# 1 2025-04-07T13:49:47.074398Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080734361115:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568080734361112:2425], cookie# 1 2025-04-07T13:49:47.074439Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076439393063:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568080734361113:2425] 2025-04-07T13:49:47.074461Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076439393063:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568080734361113:2425], cookie# 1 2025-04-07T13:49:47.074494Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076439393069:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568080734361115:2425] 2025-04-07T13:49:47.074520Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076439393069:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568080734361115:2425], cookie# 1 2025-04-07T13:49:47.076180Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076439393066:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568080734361114:2425] 2025-04-07T13:49:47.076215Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568076439393066:2052] Subscribe: subscriber# [1:7490568080734361114:2425], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:47.076259Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076439393066:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568080734361114:2425], cookie# 1 2025-04-07T13:49:47.076285Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080734361113:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568076439393063:2049], cookie# 1 2025-04-07T13:49:47.076294Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080734361115:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568076439393069:2055], cookie# 1 2025-04-07T13:49:47.076309Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080734361114:2425][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568076439393066:2052] 2025-04-07T13:49:47.076318Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568080734361114:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568076439393066:2052], cookie# 1 2025-04-07T13:49:47.076341Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080734361109:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568080734361110:2425], cookie# 1 2025-04-07T13:49:47.076354Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080734361109:2425][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:47.076369Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080734361109:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568080734361112:2425], cookie# 1 2025-04-07T13:49:47.076380Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080734361109:2425][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:47.076397Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080734361109:2425][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568080734361111:2425] 2025-04-07T13:49:47.076429Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490568080734361109:2425][/dc-1] Path was already updated: owner# [1:7490568076439393366:2123], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:47.076456Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080734361109:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568080734361111:2425], cookie# 1 2025-04-07T13:49:47.076473Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568080734361109:2425][/dc-1] Unexpected sync response: sender# [1:7490568080734361111:2425], cookie# 1 2025-04-07T13:49:47.076485Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568076439393066:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568080734361114:2425] 2025-04-07T13:49:47.135719Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568076439393366:2123], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T13:49:47.136042Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568076439393366:2123], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... ain][4:7490568109317047043:2738][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7490568109317047048:2738] 2025-04-07T13:49:54.945365Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][4:7490568109317047043:2738][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Set up state: owner# [4:7490568096432144194:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:54.945365Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][4:7490568109317047044:2739][/dc-1/USER_0/.metadata/workload_manager/running_requests] Set up state: owner# [4:7490568096432144194:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:54.945388Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7490568109317047044:2739][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7490568109317047049:2739] 2025-04-07T13:49:54.945393Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7490568109317047043:2738][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7490568109317047050:2738] 2025-04-07T13:49:54.945417Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7490568109317047044:2739][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7490568096432144194:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:54.945419Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7490568109317047043:2738][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7490568096432144194:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:54.945470Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7490568096432144194:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests PathId: Strong: 0 } 2025-04-07T13:49:54.945541Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7490568096432144194:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [4:7490568109317047043:2738] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:54.945629Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7490568096432144194:2105], cacheItem# { Subscriber: { Subscriber: [4:7490568109317047043:2738] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:54.945660Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7490568096432144194:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests PathId: Strong: 0 } 2025-04-07T13:49:54.945703Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7490568096432144194:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [4:7490568109317047044:2739] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:54.945766Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7490568096432144194:2105], cacheItem# { Subscriber: { Subscriber: [4:7490568109317047044:2739] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:54.945882Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7490568109317047057:2740], recipient# [4:7490568109317047041:2344], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:54.951369Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7490568109317047041:2344], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:54.951796Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7490568096432144194:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:54.951850Z node 4 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [4:7490568096432144194:2105], path# /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480 2025-04-07T13:49:54.952006Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7490568109317047059:2741][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T13:49:54.952534Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7490568109317047059:2741][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7490568109317047060:2741] 2025-04-07T13:49:54.952581Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7490568109317047059:2741][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7490568109317047061:2741] 2025-04-07T13:49:54.952613Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][4:7490568109317047059:2741][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [4:7490568096432144194:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:54.952639Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7490568109317047059:2741][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7490568109317047062:2741] 2025-04-07T13:49:54.952684Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7490568109317047059:2741][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7490568096432144194:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:54.952738Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7490568096432144194:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 0 } 2025-04-07T13:49:54.952812Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7490568096432144194:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [4:7490568109317047059:2741] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:54.952891Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7490568096432144194:2105], cacheItem# { Subscriber: { Subscriber: [4:7490568109317047059:2741] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:54.952958Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7490568109317047066:2742], recipient# [4:7490568109317047058:2345], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:54.953836Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; >> THealthCheckTest::Issues100GroupsListing [GOOD] >> THealthCheckTest::Issues100VCardListing >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::AffectedShardsLimit [GOOD] Test command err: Trying to start YDB, gRPC: 1415, MsgBus: 2112 2025-04-07T13:49:32.259103Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568014940675507:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:32.259133Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00139b/r3tmp/tmptBvahb/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1415, node 1 2025-04-07T13:49:32.668366Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:32.678478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:32.678761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:32.683622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:32.725768Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:32.725788Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:32.725794Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:32.725877Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2112 TClient is connected to server localhost:2112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:33.265191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.321246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.461745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:33.624595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:49:33.692046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.634768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568027825579076:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.642929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.682594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.725533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.762871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.813787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.851031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.919788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:35.973649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568027825579573:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.973732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.973997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568027825579578:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:35.977107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:35.988371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568027825579580:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:37.047495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:37.267246Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568014940675507:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:37.267306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:37.342903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.379719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [1, 4)","Key [42, 42]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":4}],"Node Type":"TableRangeScan"}],"Node Type":"Merge","SortColumns":["Key (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key [1, 4)","Key [42, 42]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [1, 4)","Key [42, 42]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":4}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 25852, MsgBus: 27179 2025-04-07T13:49:38.675381Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568040002326173:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:38.675424Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00139b/r3tmp/tmpzulbGs/pdisk_1.dat 2025-04-07T13:49:38.860179Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:38.872713Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:38.872786Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:38.874053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25852, node 2 2025-04-07T13:49:38.926589Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:38.926608Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:38.926615Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:38.926725Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27179 TClient is connected to server localhost:27179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } ... :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:49:45.466827Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:49:45.466848Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["OlapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"Value \u003E 0","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/OlapTable","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Value \u003E 0","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 6912, MsgBus: 22896 2025-04-07T13:49:47.618944Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568078477576632:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00139b/r3tmp/tmpMCrehR/pdisk_1.dat 2025-04-07T13:49:47.692044Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:47.754715Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:47.790440Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:47.790524Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:47.792005Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6912, node 3 2025-04-07T13:49:47.997236Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:47.997259Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:47.997267Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:47.997395Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22896 TClient is connected to server localhost:22896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:48.773222Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:48.799964Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:48.898125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:49.105764Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:49.205046Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:51.790359Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568095657447360:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:51.790440Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:51.845323Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:51.942540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:51.995586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:52.027619Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:52.063929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:52.104426Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:52.176581Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568099952415150:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:52.176694Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:52.176918Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568099952415155:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:52.182832Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:52.203764Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568099952415157:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:52.526492Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568078477576632:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:52.526581Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2025-04-07T13:49:53.458143Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:49:53.738936Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:54.913621Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7490568108542352125:2589] TxId: 281474976715674. Ctx: { TraceId: 01jr887jddfh8mrx1xz6w37z55, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGFhY2Y4OGUtNzUyZGRmY2EtMTRkMzhmMzUtYjdmNDJmMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Too many affected shards: datashardTasks=21, limit: 20 2025-04-07T13:49:54.920639Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGFhY2Y4OGUtNzUyZGRmY2EtMTRkMzhmMzUtYjdmNDJmMzA=, ActorId: [3:7490568108542351684:2589], ActorState: ExecuteState, TraceId: 01jr887jddfh8mrx1xz6w37z55, Create QueryResponse for error on request, msg:
: Error: Affected too many shards: 0, code: 2029 >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging >> Viewer::SharedDoesntShowExclusiveNodes [GOOD] >> Viewer::SimpleFeatureFlags >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageNoQuota >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false >> THealthCheckTest::ShardsLimit999 [GOOD] >> THealthCheckTest::ShardsLimit995 >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus >> Cache::Test1 [GOOD] >> Cache::Test2 [GOOD] >> TExtSubDomainTest::DeclareAndLs [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test2 [GOOD] >> EntityId::Order >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> KqpParams::CheckCacheByAst [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery >> EntityId::Order [GOOD] >> EntityId::MinId [GOOD] >> YdbQueryService::TestCreateDropAttachSession [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs [GOOD] Test command err: 2025-04-07T13:49:55.597756Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568116531607373:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:55.597809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a6d/r3tmp/tmpAhFpJt/pdisk_1.dat 2025-04-07T13:49:56.317037Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:56.319590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:56.319661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:56.342359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26906 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:56.632251Z node 1 :TX_PROXY DEBUG: actor# [1:7490568116531607545:2100] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:56.632304Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568120826575116:2253] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:56.632392Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568116531607570:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:56.632426Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490568116531607570:2113], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T13:49:56.632564Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T13:49:56.634661Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116531607267:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568120826575121:2254] 2025-04-07T13:49:56.634678Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116531607270:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568120826575122:2254] 2025-04-07T13:49:56.634730Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568116531607267:2049] Subscribe: subscriber# [1:7490568120826575121:2254], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:56.634731Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568116531607270:2052] Subscribe: subscriber# [1:7490568120826575122:2254], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:56.634787Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116531607273:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568120826575123:2254] 2025-04-07T13:49:56.634841Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568116531607273:2055] Subscribe: subscriber# [1:7490568120826575123:2254], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:56.634852Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575121:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568116531607267:2049] 2025-04-07T13:49:56.634876Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575122:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568116531607270:2052] 2025-04-07T13:49:56.634892Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575123:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568116531607273:2055] 2025-04-07T13:49:56.634917Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568116531607570:2113], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:56.634969Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568120826575118:2254] 2025-04-07T13:49:56.634998Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568120826575119:2254] 2025-04-07T13:49:56.635003Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116531607267:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568120826575121:2254] 2025-04-07T13:49:56.635038Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116531607270:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568120826575122:2254] 2025-04-07T13:49:56.635060Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116531607273:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568120826575123:2254] 2025-04-07T13:49:56.635080Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490568120826575117:2254][/dc-1] Set up state: owner# [1:7490568116531607570:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:56.635109Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568116531607570:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:56.635203Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568120826575120:2254] 2025-04-07T13:49:56.635243Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490568120826575117:2254][/dc-1] Path was already updated: owner# [1:7490568116531607570:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:56.635275Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575121:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568120826575118:2254], cookie# 1 2025-04-07T13:49:56.635290Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575122:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568120826575119:2254], cookie# 1 2025-04-07T13:49:56.635303Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575123:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568120826575120:2254], cookie# 1 2025-04-07T13:49:56.639594Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116531607267:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568120826575121:2254], cookie# 1 2025-04-07T13:49:56.639660Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116531607270:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568120826575122:2254], cookie# 1 2025-04-07T13:49:56.639679Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116531607273:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568120826575123:2254], cookie# 1 2025-04-07T13:49:56.639721Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575121:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568116531607267:2049], cookie# 1 2025-04-07T13:49:56.639737Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575122:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568116531607270:2052], cookie# 1 2025-04-07T13:49:56.639747Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575123:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568116531607273:2055], cookie# 1 2025-04-07T13:49:56.639783Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568120826575118:2254], cookie# 1 2025-04-07T13:49:56.639823Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:56.639839Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568120826575119:2254], cookie# 1 2025-04-07T13:49:56.639860Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:56.639877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568120826575120:2254], cookie# 1 2025-04-07T13:49:56.639905Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Unexpected sync response: sender# [1:7490568120826575120:2254], cookie# 1 2025-04-07T13:49:56.687659Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568116531607570:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 ... dleNotify: self# [1:7490568116531607570:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-04-07T13:49:56.969586Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568116531607570:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568120826575183:2300] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744033797009 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:56.969652Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568116531607570:2113], cacheItem# { Subscriber: { Subscriber: [1:7490568120826575183:2300] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744033797009 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-07T13:49:56.969761Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568120826575190:2301], recipient# [1:7490568120826575182:2299], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:56.969803Z node 1 :TX_PROXY INFO: Actor# [1:7490568120826575182:2299] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-04-07T13:49:56.972604Z node 1 :TX_PROXY DEBUG: actor# [1:7490568116531607545:2100] Handle TEvNavigate describe path /dc-1 2025-04-07T13:49:56.972640Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568120826575192:2303] HANDLE EvNavigateScheme /dc-1 2025-04-07T13:49:56.972858Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568116531607570:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:56.972942Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490568116531607570:2113], cookie# 4 2025-04-07T13:49:56.972993Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575121:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568120826575118:2254], cookie# 4 2025-04-07T13:49:56.973014Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575122:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568120826575119:2254], cookie# 4 2025-04-07T13:49:56.973038Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575123:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568120826575120:2254], cookie# 4 2025-04-07T13:49:56.973075Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116531607267:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568120826575121:2254], cookie# 4 2025-04-07T13:49:56.973121Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116531607270:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568120826575122:2254], cookie# 4 2025-04-07T13:49:56.973147Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116531607273:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568120826575123:2254], cookie# 4 2025-04-07T13:49:56.973172Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575121:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568116531607267:2049], cookie# 4 2025-04-07T13:49:56.973194Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575122:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568116531607270:2052], cookie# 4 2025-04-07T13:49:56.973223Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568120826575123:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568116531607273:2055], cookie# 4 2025-04-07T13:49:56.973261Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568120826575118:2254], cookie# 4 2025-04-07T13:49:56.973282Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:56.973295Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568120826575119:2254], cookie# 4 2025-04-07T13:49:56.973308Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:56.973361Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568120826575120:2254], cookie# 4 2025-04-07T13:49:56.973374Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568120826575117:2254][/dc-1] Unexpected sync response: sender# [1:7490568120826575120:2254], cookie# 4 2025-04-07T13:49:56.973411Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568116531607570:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T13:49:56.973494Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568116531607570:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568120826575117:2254] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744033796974 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:56.973558Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568116531607570:2113], cacheItem# { Subscriber: { Subscriber: [1:7490568120826575117:2254] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744033796974 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-07T13:49:56.973691Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568120826575193:2304], recipient# [1:7490568120826575192:2303], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:56.973725Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568120826575192:2303] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:56.973782Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568120826575192:2303] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-07T13:49:56.974255Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568120826575192:2303] Handle TEvDescribeSchemeResult Forward to# [1:7490568120826575191:2302] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744033796974 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744033796974 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744033797009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057... (TRUNCATED) >> TExtSubDomainTest::DeclareAndDrop [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes >> ObjectStorageListingTest::ListingNoFilter [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::MinId [GOOD] >> THealthCheckTest::StaticGroupIssue [GOOD] >> THealthCheckTest::StorageLimit87 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-07T13:49:55.696848Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568114787347718:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:55.696900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a5b/r3tmp/tmpNfyOgI/pdisk_1.dat 2025-04-07T13:49:56.738028Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:56.752803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:56.761920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:56.762015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:56.769707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24526 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:57.095408Z node 1 :TX_PROXY DEBUG: actor# [1:7490568114787347941:2101] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:57.095451Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568123377283040:2440] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:57.095570Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568119082315270:2123], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:57.095681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568119082315610:2327][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490568119082315270:2123], cookie# 1 2025-04-07T13:49:57.096940Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568119082315631:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568119082315628:2327], cookie# 1 2025-04-07T13:49:57.096979Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568119082315632:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568119082315629:2327], cookie# 1 2025-04-07T13:49:57.096997Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568119082315633:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568119082315630:2327], cookie# 1 2025-04-07T13:49:57.097045Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568114787347674:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568119082315631:2327], cookie# 1 2025-04-07T13:49:57.097068Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568114787347677:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568119082315632:2327], cookie# 1 2025-04-07T13:49:57.097083Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568114787347680:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568119082315633:2327], cookie# 1 2025-04-07T13:49:57.097109Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568119082315631:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568114787347674:2049], cookie# 1 2025-04-07T13:49:57.097124Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568119082315632:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568114787347677:2052], cookie# 1 2025-04-07T13:49:57.097136Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568119082315633:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568114787347680:2055], cookie# 1 2025-04-07T13:49:57.097166Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568119082315610:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568119082315628:2327], cookie# 1 2025-04-07T13:49:57.097203Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568119082315610:2327][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:57.097222Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568119082315610:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568119082315629:2327], cookie# 1 2025-04-07T13:49:57.097240Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568119082315610:2327][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:57.097261Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568119082315610:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568119082315630:2327], cookie# 1 2025-04-07T13:49:57.097273Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568119082315610:2327][/dc-1] Unexpected sync response: sender# [1:7490568119082315630:2327], cookie# 1 2025-04-07T13:49:57.097340Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568119082315270:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T13:49:57.107436Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568119082315270:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568119082315610:2327] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:57.107572Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568119082315270:2123], cacheItem# { Subscriber: { Subscriber: [1:7490568119082315610:2327] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-07T13:49:57.111369Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568123377283041:2441], recipient# [1:7490568123377283040:2440], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:57.111482Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568123377283040:2440] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:57.158142Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568123377283040:2440] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-07T13:49:57.160217Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568123377283040:2440] Handle TEvDescribeSchemeResult Forward to# [1:7490568123377283039:2439] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:49:57.185105Z node 1 :TX_PROXY DEBUG: actor# [1:7490568114787347941:2101] Handle TEvProposeTransaction 2025-04-07T13:49:57.185134Z node 1 :TX_PROXY DEBUG: actor# [1:7490568114787347941:2101] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T13:49:57.185208Z node 1 :TX_PROXY DEBUG: actor# [1:7490568114787347941:2101] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7490568123377283048:2447] 2025-04-07T13:49:57.275370Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568123377283048:2447] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-07T13:49:57.275480Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568123377283048:2447] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:49:57.275596Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568119082315270:2123], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath Re ... Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-04-07T13:49:57.905468Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568119082315270:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568123377283227:2592] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744033797380 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:57.905543Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568119082315270:2123], cacheItem# { Subscriber: { Subscriber: [1:7490568123377283227:2592] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744033797380 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-04-07T13:49:57.905703Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568123377283370:2676], recipient# [1:7490568123377283369:2675], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:57.905739Z node 1 :TX_PROXY INFO: Actor# [1:7490568123377283369:2675] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-04-07T13:49:57.916733Z node 1 :TX_PROXY DEBUG: actor# [1:7490568114787347941:2101] Handle TEvNavigate describe path /dc-1 2025-04-07T13:49:57.916782Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568123377283372:2678] HANDLE EvNavigateScheme /dc-1 2025-04-07T13:49:57.916876Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568119082315270:2123], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:57.916972Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568119082315610:2327][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490568119082315270:2123], cookie# 4 2025-04-07T13:49:57.917024Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568119082315631:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568119082315628:2327], cookie# 4 2025-04-07T13:49:57.917049Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568119082315632:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568119082315629:2327], cookie# 4 2025-04-07T13:49:57.917067Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568119082315633:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568119082315630:2327], cookie# 4 2025-04-07T13:49:57.917097Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568114787347674:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568119082315631:2327], cookie# 4 2025-04-07T13:49:57.917123Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568114787347677:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568119082315632:2327], cookie# 4 2025-04-07T13:49:57.917137Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568114787347680:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568119082315633:2327], cookie# 4 2025-04-07T13:49:57.917181Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568119082315631:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568114787347674:2049], cookie# 4 2025-04-07T13:49:57.917197Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568119082315632:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568114787347677:2052], cookie# 4 2025-04-07T13:49:57.917209Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568119082315633:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568114787347680:2055], cookie# 4 2025-04-07T13:49:57.917234Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568119082315610:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568119082315628:2327], cookie# 4 2025-04-07T13:49:57.917268Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568119082315610:2327][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:57.917285Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568119082315610:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568119082315629:2327], cookie# 4 2025-04-07T13:49:57.917301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568119082315610:2327][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:57.917336Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568119082315610:2327][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568119082315630:2327], cookie# 4 2025-04-07T13:49:57.917361Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568119082315610:2327][/dc-1] Unexpected sync response: sender# [1:7490568119082315630:2327], cookie# 4 2025-04-07T13:49:57.917402Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568119082315270:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T13:49:57.917463Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568119082315270:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568119082315610:2327] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744033797352 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:57.917558Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568119082315270:2123], cacheItem# { Subscriber: { Subscriber: [1:7490568119082315610:2327] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744033797352 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-07T13:49:57.917684Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568123377283373:2679], recipient# [1:7490568123377283372:2678], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:57.917720Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568123377283372:2678] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:57.917816Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568123377283372:2678] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-07T13:49:57.918444Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568123377283372:2678] Handle TEvDescribeSchemeResult Forward to# [1:7490568123377283371:2677] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744033797352 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744033797352 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744033797380 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depric... (TRUNCATED) >> KqpLimits::ReplySizeExceeded [GOOD] >> Cache::Test5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDrop [GOOD] Test command err: 2025-04-07T13:49:55.972744Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568116734296034:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:55.972985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a55/r3tmp/tmp5sZiAp/pdisk_1.dat 2025-04-07T13:49:56.823839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:56.823947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:56.825250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:56.862044Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23390 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:57.077059Z node 1 :TX_PROXY DEBUG: actor# [1:7490568121029263428:2100] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:57.077125Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568125324231018:2257] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:57.077258Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568121029263451:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:57.077412Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490568121029263451:2113], cookie# 1 2025-04-07T13:49:57.078844Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568121029263707:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568121029263704:2250], cookie# 1 2025-04-07T13:49:57.078892Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568121029263708:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568121029263705:2250], cookie# 1 2025-04-07T13:49:57.078907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568121029263709:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568121029263706:2250], cookie# 1 2025-04-07T13:49:57.078991Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116734295855:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568121029263707:2250], cookie# 1 2025-04-07T13:49:57.079012Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116734295858:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568121029263708:2250], cookie# 1 2025-04-07T13:49:57.079024Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116734295861:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568121029263709:2250], cookie# 1 2025-04-07T13:49:57.079088Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568121029263707:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568116734295855:2049], cookie# 1 2025-04-07T13:49:57.079106Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568121029263708:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568116734295858:2052], cookie# 1 2025-04-07T13:49:57.079118Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568121029263709:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568116734295861:2055], cookie# 1 2025-04-07T13:49:57.079141Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568121029263704:2250], cookie# 1 2025-04-07T13:49:57.079160Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:57.079186Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568121029263705:2250], cookie# 1 2025-04-07T13:49:57.079203Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:57.079229Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568121029263706:2250], cookie# 1 2025-04-07T13:49:57.079243Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Unexpected sync response: sender# [1:7490568121029263706:2250], cookie# 1 2025-04-07T13:49:57.079307Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568121029263451:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T13:49:57.089992Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568121029263451:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568121029263703:2250] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:57.090093Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568121029263451:2113], cacheItem# { Subscriber: { Subscriber: [1:7490568121029263703:2250] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-07T13:49:57.092097Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568125324231019:2258], recipient# [1:7490568125324231018:2257], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:57.092147Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568125324231018:2257] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:57.157028Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568125324231018:2257] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-07T13:49:57.164507Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568125324231018:2257] Handle TEvDescribeSchemeResult Forward to# [1:7490568125324231016:2255] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:49:57.184546Z node 1 :TX_PROXY DEBUG: actor# [1:7490568121029263428:2100] Handle TEvProposeTransaction 2025-04-07T13:49:57.184570Z node 1 :TX_PROXY DEBUG: actor# [1:7490568121029263428:2100] TxId# 281474976710657 ProcessProposeTransaction 2025-04-07T13:49:57.184641Z node 1 :TX_PROXY DEBUG: actor# [1:7490568121029263428:2100] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7490568125324231025:2263] 2025-04-07T13:49:57.260447Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568125324231025:2263] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-07T13:49:57.260508Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568125324231025:2263] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:49:57.260590Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568121029263451:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:57.260704Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490568121029263451:2113], cookie ... 2] Version: 18446744073709551615 }: sender# [1:7490568121029263698:2246], cookie# 281474976710659 2025-04-07T13:49:57.326225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-04-07T13:49:57.326237Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:7490568121029263697:2245] Ack for unknown update (already acked?): sender# [1:7490568121029263698:2246], cookie# 281474976710659 2025-04-07T13:49:57.326243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:49:57.326250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710659:0 2025-04-07T13:49:57.326305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-07T13:49:57.326486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2025-04-07T13:49:57.326511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2025-04-07T13:49:57.326551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-07T13:49:57.326572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T13:49:57.326600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-07T13:49:57.326757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-07T13:49:57.326785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T13:49:57.326819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T13:49:57.327237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T13:49:57.327267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1 2025-04-07T13:49:57.329300Z node 1 :TX_PROXY DEBUG: actor# [1:7490568121029263428:2100] Handle TEvNavigate describe path /dc-1 2025-04-07T13:49:57.329372Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568125324231095:2317] HANDLE EvNavigateScheme /dc-1 2025-04-07T13:49:57.329436Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568121029263451:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:57.329497Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490568121029263451:2113], cookie# 4 2025-04-07T13:49:57.329559Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568121029263707:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568121029263704:2250], cookie# 4 2025-04-07T13:49:57.329579Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568121029263708:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568121029263705:2250], cookie# 4 2025-04-07T13:49:57.329592Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568121029263709:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568121029263706:2250], cookie# 4 2025-04-07T13:49:57.329615Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116734295855:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568121029263707:2250], cookie# 4 2025-04-07T13:49:57.329634Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116734295858:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568121029263708:2250], cookie# 4 2025-04-07T13:49:57.329666Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568116734295861:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568121029263709:2250], cookie# 4 2025-04-07T13:49:57.329687Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568121029263707:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7490568116734295855:2049], cookie# 4 2025-04-07T13:49:57.329699Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568121029263708:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7490568116734295858:2052], cookie# 4 2025-04-07T13:49:57.329709Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568121029263709:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7490568116734295861:2055], cookie# 4 2025-04-07T13:49:57.329742Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7490568121029263704:2250], cookie# 4 2025-04-07T13:49:57.329757Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:57.329769Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7490568121029263705:2250], cookie# 4 2025-04-07T13:49:57.329782Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:57.329798Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7490568121029263706:2250], cookie# 4 2025-04-07T13:49:57.329819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568121029263703:2250][/dc-1] Unexpected sync response: sender# [1:7490568121029263706:2250], cookie# 4 2025-04-07T13:49:57.329850Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568121029263451:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T13:49:57.329903Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568121029263451:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568121029263703:2250] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744033797317 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:57.329953Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568121029263451:2113], cacheItem# { Subscriber: { Subscriber: [1:7490568121029263703:2250] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744033797317 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-07T13:49:57.330063Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568125324231096:2318], recipient# [1:7490568125324231095:2317], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:57.330098Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568125324231095:2317] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:57.330160Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568125324231095:2317] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-07T13:49:57.330643Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568125324231095:2317] Handle TEvDescribeSchemeResult Forward to# [1:7490568125324231094:2316] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744033797317 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744033797317 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution:... (TRUNCATED) >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |84.9%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |84.9%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] Test command err: 2025-04-07T13:49:58.141967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:58.142272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:58.142483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001257/r3tmp/tmpQ3o0gW/pdisk_1.dat 2025-04-07T13:49:58.523579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:49:58.563894Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:58.614426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:58.614608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:58.628266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:58.745599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:58.784171Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:49:58.784443Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:49:58.827543Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:49:58.827647Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:49:58.829233Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:49:58.829362Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:49:58.829420Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:49:58.829721Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:49:58.857624Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:49:58.857862Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:49:58.857997Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:49:58.858044Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:49:58.858082Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:49:58.858128Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:58.858617Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:49:58.858710Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:49:58.859116Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:49:58.859224Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:49:58.859290Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:49:58.859335Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:49:58.859383Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:49:58.859568Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:49:58.859829Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:49:58.859920Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:49:58.861716Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:49:58.872513Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:49:58.872637Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:49:59.057557Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:49:59.070079Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:49:59.070191Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:59.071180Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:49:59.071241Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:49:59.071287Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:49:59.071550Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:49:59.071713Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:49:59.072170Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:49:59.072225Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:49:59.073997Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:49:59.074307Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:49:59.075489Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:49:59.075526Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:59.075963Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:49:59.076028Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:49:59.076084Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:49:59.076935Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:49:59.076967Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:49:59.077007Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:49:59.077055Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:49:59.077115Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:49:59.077175Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:59.079768Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:49:59.081543Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:49:59.081627Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:49:59.081678Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:49:59.088800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:59.088910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:59.088975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:59.093344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:49:59.098369Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:49:59.292065Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:49:59.295351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:49:59.679340Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr887pxf2v41nb7a9qs3w4y0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmE5ZWQyYzMtYjZhODA2MzMtODQ0MWYyMmUtNjAwNzJiYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:49:59.686314Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2025-04-07T13:49:59.686636Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:49:59.699537Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:49:59.699680Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:59.703549Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2025-04-07T13:49:59.703813Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-07T13:49:59.704052Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 3 common prefixes: 2 2025-04-07T13:49:59.704279Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit [GOOD] >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts >> Cache::Test5 [GOOD] >> EntityId::CheckId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: 2025-04-07T13:49:56.804957Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568120557601377:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:56.805603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a42/r3tmp/tmpPXmTII/pdisk_1.dat 2025-04-07T13:49:57.337191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:57.337283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:57.341188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:57.347079Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6600 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:57.649567Z node 1 :TX_PROXY DEBUG: actor# [1:7490568120557601462:2112] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:57.649621Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568124852569222:2427] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:57.649747Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568120557601485:2125], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:57.649778Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490568120557601485:2125], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T13:49:57.649943Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T13:49:57.651565Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568120557601169:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568124852569227:2428] 2025-04-07T13:49:57.651619Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568120557601169:2049] Subscribe: subscriber# [1:7490568124852569227:2428], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:57.651665Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568120557601172:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568124852569228:2428] 2025-04-07T13:49:57.651674Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568120557601172:2052] Subscribe: subscriber# [1:7490568124852569228:2428], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:57.651684Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568120557601175:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568124852569229:2428] 2025-04-07T13:49:57.651691Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568120557601175:2055] Subscribe: subscriber# [1:7490568124852569229:2428], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:57.651718Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569227:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568120557601169:2049] 2025-04-07T13:49:57.651740Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569228:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568120557601172:2052] 2025-04-07T13:49:57.651762Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569229:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568120557601175:2055] 2025-04-07T13:49:57.651789Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568124852569224:2428] 2025-04-07T13:49:57.651821Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568124852569225:2428] 2025-04-07T13:49:57.651860Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490568124852569223:2428][/dc-1] Set up state: owner# [1:7490568120557601485:2125], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:57.651924Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568124852569226:2428] 2025-04-07T13:49:57.651957Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490568124852569223:2428][/dc-1] Path was already updated: owner# [1:7490568120557601485:2125], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:57.652001Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569227:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124852569224:2428], cookie# 1 2025-04-07T13:49:57.652011Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569228:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124852569225:2428], cookie# 1 2025-04-07T13:49:57.652018Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569229:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124852569226:2428], cookie# 1 2025-04-07T13:49:57.652034Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568120557601169:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568124852569227:2428] 2025-04-07T13:49:57.652055Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568120557601169:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124852569227:2428], cookie# 1 2025-04-07T13:49:57.652069Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568120557601172:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568124852569228:2428] 2025-04-07T13:49:57.652078Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568120557601172:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124852569228:2428], cookie# 1 2025-04-07T13:49:57.652088Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568120557601175:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568124852569229:2428] 2025-04-07T13:49:57.652096Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568120557601175:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124852569229:2428], cookie# 1 2025-04-07T13:49:57.652720Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569227:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568120557601169:2049], cookie# 1 2025-04-07T13:49:57.652738Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569228:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568120557601172:2052], cookie# 1 2025-04-07T13:49:57.652749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569229:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568120557601175:2055], cookie# 1 2025-04-07T13:49:57.652797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124852569224:2428], cookie# 1 2025-04-07T13:49:57.652846Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:57.652879Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124852569225:2428], cookie# 1 2025-04-07T13:49:57.652897Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:57.652919Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124852569226:2428], cookie# 1 2025-04-07T13:49:57.652935Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Unexpected sync response: sender# [1:7490568124852569226:2428], cookie# 1 2025-04-07T13:49:57.696441Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568120557601485:2125], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T13:49:57.696831Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568120557601485:2125], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-04-07T13:49:58.541157Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568120557601485:2125], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568129147536717:2575] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744033797982 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:58.541223Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568120557601485:2125], cacheItem# { Subscriber: { Subscriber: [1:7490568129147536717:2575] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744033797982 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-04-07T13:49:58.541383Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568129147536859:2668], recipient# [1:7490568129147536858:2667], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:58.541409Z node 1 :TX_PROXY INFO: Actor# [1:7490568129147536858:2667] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-04-07T13:49:58.547901Z node 1 :TX_PROXY DEBUG: actor# [1:7490568120557601462:2112] Handle TEvNavigate describe path /dc-1 2025-04-07T13:49:58.547939Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568129147536861:2670] HANDLE EvNavigateScheme /dc-1 2025-04-07T13:49:58.548027Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568120557601485:2125], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:58.548092Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490568120557601485:2125], cookie# 4 2025-04-07T13:49:58.548162Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569227:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124852569224:2428], cookie# 4 2025-04-07T13:49:58.548177Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569228:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124852569225:2428], cookie# 4 2025-04-07T13:49:58.548191Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569229:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124852569226:2428], cookie# 4 2025-04-07T13:49:58.548215Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568120557601169:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124852569227:2428], cookie# 4 2025-04-07T13:49:58.548238Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568120557601172:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124852569228:2428], cookie# 4 2025-04-07T13:49:58.548254Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568120557601175:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124852569229:2428], cookie# 4 2025-04-07T13:49:58.548287Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569227:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568120557601169:2049], cookie# 4 2025-04-07T13:49:58.548302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569228:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568120557601172:2052], cookie# 4 2025-04-07T13:49:58.548314Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124852569229:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568120557601175:2055], cookie# 4 2025-04-07T13:49:58.548360Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568124852569224:2428], cookie# 4 2025-04-07T13:49:58.548377Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:58.548390Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568124852569225:2428], cookie# 4 2025-04-07T13:49:58.548406Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:58.548453Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7490568124852569226:2428], cookie# 4 2025-04-07T13:49:58.548465Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124852569223:2428][/dc-1] Unexpected sync response: sender# [1:7490568124852569226:2428], cookie# 4 2025-04-07T13:49:58.548499Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568120557601485:2125], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T13:49:58.548567Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568120557601485:2125], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568124852569223:2428] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744033797940 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:58.548630Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568120557601485:2125], cacheItem# { Subscriber: { Subscriber: [1:7490568124852569223:2428] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744033797940 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-07T13:49:58.548775Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568129147536862:2671], recipient# [1:7490568129147536861:2670], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:58.548801Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568129147536861:2670] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:58.548878Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568129147536861:2670] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-07T13:49:58.549423Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568129147536861:2670] Handle TEvDescribeSchemeResult Forward to# [1:7490568129147536860:2669] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744033797940 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744033797940 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744033797982 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depric... (TRUNCATED) |84.9%| [TA] $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} >> SplitterBasic::LimitExceed [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 9825, MsgBus: 12650 2025-04-07T13:49:33.039162Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568020048707573:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:33.039200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001398/r3tmp/tmpLKk5DN/pdisk_1.dat 2025-04-07T13:49:33.420407Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9825, node 1 2025-04-07T13:49:33.477176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:33.477277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:33.478921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:33.537464Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:33.537489Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:33.537499Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:33.537594Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12650 TClient is connected to server localhost:12650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:34.138085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.171118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.302753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:34.462460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:49:34.556630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:36.431170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568032933610952:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.431276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.777637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.802247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.833033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.859596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.887458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.964388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.063062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568037228578754:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.063123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.066680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568037228578759:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.070081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:37.081649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568037228578761:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:38.040176Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568020048707573:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:38.040237Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:38.128974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:41.825575Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTY5OWM5ODItNTIwNWU0NGQtYzZiMDlhM2YtNTgyZTk1Yzg=, ActorId: [1:7490568050113481880:2573], ActorState: ExecuteState, TraceId: 01jr88756v5pwkkqb8bhywhknt, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (80001703 > 50331648), code: 2013 Trying to start YDB, gRPC: 13778, MsgBus: 5765 2025-04-07T13:49:42.859951Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568060295365838:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:42.860041Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001398/r3tmp/tmpLsEmMk/pdisk_1.dat 2025-04-07T13:49:43.046695Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:43.066465Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:43.066546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:43.067863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13778, node 2 2025-04-07T13:49:43.240327Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:43.240359Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:43.240374Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:43.240499Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5765 TClient is connected to server localhost:5765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:43.879802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:43.890522Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:43.897917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:43.981918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:44.174864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESch ... ol default not found or you don't have access permissions } 2025-04-07T13:49:46.425918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:46.485669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:46.547079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:46.610918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:46.688324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:46.771125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:46.850630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:46.944026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568077475237205:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:46.944099Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:46.944297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568077475237210:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:46.950677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:46.963211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568077475237212:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:47.862408Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568060295365838:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:47.862475Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:47.988337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 4180, MsgBus: 64408 2025-04-07T13:49:49.840497Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568088224294975:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:49.840533Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001398/r3tmp/tmp5PNTKT/pdisk_1.dat 2025-04-07T13:49:50.022496Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:50.045733Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:50.045818Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:50.047543Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4180, node 3 2025-04-07T13:49:50.120130Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:50.120153Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:50.120161Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:50.120304Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64408 TClient is connected to server localhost:64408 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:50.706056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:50.715112Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:50.724458Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:50.821852Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:51.023399Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:51.134067Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:53.537608Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568105404165820:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:53.537768Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:53.569644Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:53.620196Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:53.656230Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:53.705684Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:53.767189Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:53.839360Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:53.909213Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568105404166315:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:53.909299Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:53.909690Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568105404166320:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:53.913780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:53.935473Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568105404166322:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:54.841267Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568088224294975:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:54.841325Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:55.383656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:49:59.852924Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjA4OWQ5YjgtY2MxZmNiOTgtMTM4ZmU4M2ItOTEyNzFjNTU=, ActorId: [3:7490568113994101231:2461], ActorState: ExecuteState, TraceId: 01jr887q8a26epyaeh6xcsjjf6, Create QueryResponse for error on request, msg: |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::LimitExceed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] Test command err: 2025-04-07T13:49:45.681843Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568070218724566:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:45.681972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:45.746202Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568072018722162:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:45.746287Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001536/r3tmp/tmpbZvsrf/pdisk_1.dat 2025-04-07T13:49:46.623567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:46.623654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:46.625707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:46.625763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:46.633532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:46.634070Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:46.634913Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:49:46.641521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:46.773193Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.119816s 2025-04-07T13:49:46.773272Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.119910s TClient is connected to server localhost:9967 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:47.120993Z node 1 :TX_PROXY DEBUG: actor# [1:7490568070218724577:2139] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:47.121045Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568078808659639:2458] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:47.121177Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568070218724601:2153], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:47.121284Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568074513692272:2410][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490568070218724601:2153], cookie# 1 2025-04-07T13:49:47.124110Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568074513692277:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568074513692274:2410], cookie# 1 2025-04-07T13:49:47.124174Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568074513692278:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568074513692275:2410], cookie# 1 2025-04-07T13:49:47.124199Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568074513692279:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568074513692276:2410], cookie# 1 2025-04-07T13:49:47.124253Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568070218724243:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568074513692277:2410], cookie# 1 2025-04-07T13:49:47.124290Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568070218724246:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568074513692278:2410], cookie# 1 2025-04-07T13:49:47.124310Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568070218724249:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568074513692279:2410], cookie# 1 2025-04-07T13:49:47.124350Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568074513692277:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568070218724243:2052], cookie# 1 2025-04-07T13:49:47.124364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568074513692278:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568070218724246:2055], cookie# 1 2025-04-07T13:49:47.124381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568074513692279:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568070218724249:2058], cookie# 1 2025-04-07T13:49:47.124411Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568074513692272:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568074513692274:2410], cookie# 1 2025-04-07T13:49:47.124433Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568074513692272:2410][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:47.124450Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568074513692272:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568074513692275:2410], cookie# 1 2025-04-07T13:49:47.124475Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568074513692272:2410][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:47.124495Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568074513692272:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568074513692276:2410], cookie# 1 2025-04-07T13:49:47.124513Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568074513692272:2410][/dc-1] Unexpected sync response: sender# [1:7490568074513692276:2410], cookie# 1 2025-04-07T13:49:47.124592Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568070218724601:2153], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T13:49:47.143624Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568070218724601:2153], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568074513692272:2410] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:47.143730Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568070218724601:2153], cacheItem# { Subscriber: { Subscriber: [1:7490568074513692272:2410] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-07T13:49:47.145323Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568078808659640:2459], recipient# [1:7490568078808659639:2458], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:47.145394Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568078808659639:2458] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:47.200770Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568078808659639:2458] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-07T13:49:47.203882Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568078808659639:2458] Handle TEvDescribeSchemeResult Forward to# [1:7490568078808659638:2457] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:49:47.220238Z node 1 :TX_PROXY DEBUG: actor# [1:7490568070218724577:2139] Handle TEvProposeTransaction 2025-04-07T13:49:47.220262Z node 1 :TX_PROXY DEBUG: actor# [1:7490568070218724577:2139] TxId# 281474976710657 ProcessProposeTransaction 2025-04-07T13:49:47.220366Z node 1 :TX_PROXY DEBUG: actor# [ ... em# { Subscriber: { Subscriber: [3:7490568116413895049:2196] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:59.119785Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568133593764290:2218], recipient# [3:7490568133593764288:2513], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:59.119813Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568133593764291:2219], recipient# [3:7490568133593764289:2514], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:59.408561Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568094939058385:2169], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:59.408701Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568094939058385:2169], cacheItem# { Subscriber: { Subscriber: [3:7490568120708862356:2199] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:59.408764Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568094939058385:2169], cacheItem# { Subscriber: { Subscriber: [3:7490568120708862357:2200] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:59.408846Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568133593764300:2220], recipient# [3:7490568133593764299:2520], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:59.772821Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568094939058385:2169], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:59.772954Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568094939058385:2169], cacheItem# { Subscriber: { Subscriber: [3:7490568120708862365:2202] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:59.773055Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568133593764302:2221], recipient# [3:7490568133593764301:2521], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:59.861672Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490568072018722391:2104], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:59.861904Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490568072018722391:2104], cacheItem# { Subscriber: { Subscriber: [2:7490568076313689714:2109] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:59.862026Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490568132148264657:2142], recipient# [2:7490568132148264656:2305], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:59.875698Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490568072018722391:2104], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:59.875844Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490568072018722391:2104], cacheItem# { Subscriber: { Subscriber: [2:7490568076313689714:2109] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:49:59.875925Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490568132148264659:2143], recipient# [2:7490568132148264658:2306], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:00.103070Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490568072018722391:2104], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:00.103299Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490568072018722391:2104], cacheItem# { Subscriber: { Subscriber: [2:7490568102083493518:2119] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:00.103442Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490568136443231957:2144], recipient# [2:7490568136443231956:2307], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus [GOOD] >> THealthCheckTest::YellowGroupIssueOnYellowSpace >> EscapingBasics::HideSecretsOverEncloseSecretShouldWork [GOOD] >> EscapingBasics::EscapeStringShouldWork [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit [GOOD] Test command err: 2025-04-07T13:49:06.707502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:06.707751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:06.707980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00109f/r3tmp/tmpvn57GH/pdisk_1.dat 2025-04-07T13:49:07.120247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:49:07.180555Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:07.237386Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:49:07.238397Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:49:07.238570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:07.238706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:07.253856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:07.375687Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T13:49:07.375750Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T13:49:07.375911Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T13:49:07.483700Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T13:49:07.484159Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T13:49:07.484215Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:49:07.484396Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:07.484529Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T13:49:07.484604Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T13:49:07.484831Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T13:49:07.485849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:07.486474Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T13:49:07.486531Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T13:49:07.523101Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:49:07.524106Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:49:07.524492Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:49:07.532443Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:49:07.594700Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:49:07.595395Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:49:07.595524Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:49:07.597280Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:49:07.597359Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:49:07.597413Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:49:07.597718Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:49:07.652570Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:49:07.652798Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:49:07.652932Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:49:07.652979Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:49:07.653021Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:49:07.653059Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:07.653446Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:49:07.653534Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:49:07.653725Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:49:07.653816Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:49:07.654223Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:49:07.654269Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:49:07.654311Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:49:07.654366Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:49:07.654428Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:49:07.654471Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T13:49:07.654504Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T13:49:07.654538Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T13:49:07.654569Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:49:07.654615Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:49:07.654830Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T13:49:07.654895Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:49:07.654984Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:49:07.655196Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T13:49:07.655252Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:49:07.655334Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:49:07.655457Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T13:49:07.655501Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T13:49:07.655537Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T13:49:07.655587Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:49:07.655840Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T13:49:07.655877Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T13:49:07.655910Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T13:49:07.655941Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:49:07.655996Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T13:49:07.656027Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:49:07.656058Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T13:49:07.656103Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T13:49:07.656140Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T13:49:07.660573Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:49:07.660649Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:49:07.660700Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:49:07.660753Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T13:49:07.660837Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:49:07.663104Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T13:49:07.663158Z n ... 88, tableId# 2, last full compaction# 1970-01-01T00:00:04.039122Z 2025-04-07T13:50:00.731596Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:721:2601], Recipient [7:632:2537]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-07T13:50:00.731673Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-07T13:50:00.731730Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-04-07T13:50:00.731821Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-07T13:50:00.731896Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-07T13:50:00.731981Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:632:2537], Recipient [7:721:2601]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-07T13:50:00.732010Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-07T13:50:00.732035Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-04-07T13:50:00.732079Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-07T13:50:00.732109Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-07T13:50:00.732172Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:721:2601], Recipient [7:632:2537]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-07T13:50:00.732200Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-07T13:50:00.732224Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-04-07T13:50:00.732257Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-07T13:50:00.732404Z node 7 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [7:945:2746], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:50:00.732607Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:632:2537], Recipient [7:721:2601]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-07T13:50:00.732635Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-07T13:50:00.732829Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-04-07T13:50:00.732878Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-07T13:50:00.732971Z node 7 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715663] from 72075186224037889 at tablet 72075186224037889 send result to client [7:945:2746], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:50:00.733363Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:50:00.733617Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1482 } } ComputeActorStats { } 2025-04-07T13:50:00.734085Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 813 } } ComputeActorStats { } 2025-04-07T13:50:00.735553Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-07T13:50:00.735679Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:632:2537], Recipient [7:721:2601]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-07T13:50:00.735717Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T13:50:00.735771Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2025-04-07T13:50:00.761122Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-07T13:50:00.761538Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:721:2601], Recipient [7:632:2537]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-07T13:50:00.761605Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T13:50:00.761654Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-04-07T13:50:01.085826Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] Handle TEvExecuteKqpTransaction 2025-04-07T13:50:01.085913Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] TxId# 281474976715667 ProcessProposeKqpTransaction 2025-04-07T13:50:01.086778Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr887rhscmfswxge4qzt386z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YjU1NWRlZGUtYTJmOGE1YTUtMTQyYWYyYTEtZjI3ZGQ3OWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2025-04-07T13:50:01.091724Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:1069:2866], Recipient [7:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-07T13:50:01.091833Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T13:50:01.091904Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v4000/18446744073709551615 ImmediateWriteEdge# v4001/0 ImmediateWriteEdgeReplied# v4001/0 2025-04-07T13:50:01.091964Z node 7 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v4001/18446744073709551615 2025-04-07T13:50:01.092049Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-07T13:50:01.092162Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T13:50:01.092217Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-07T13:50:01.092269Z node 7 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T13:50:01.092316Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T13:50:01.092366Z node 7 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-07T13:50:01.092412Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T13:50:01.092438Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T13:50:01.092459Z node 7 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T13:50:01.092483Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-07T13:50:01.092616Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-07T13:50:01.092981Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:1069:2866], 0} after executionsCount# 1 2025-04-07T13:50:01.093050Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1069:2866], 0} sends rowCount# 2, bytes# 72, quota rows left# 999, quota bytes left# 5242808, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T13:50:01.093146Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1069:2866], 0} finished in read 2025-04-07T13:50:01.093226Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T13:50:01.093255Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T13:50:01.093281Z node 7 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T13:50:01.093324Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-07T13:50:01.093372Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T13:50:01.093393Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T13:50:01.093421Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-07T13:50:01.093480Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T13:50:01.093597Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-07T13:50:01.094488Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:1069:2866], Recipient [7:632:2537]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T13:50:01.094560Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } >> Cache::Test3 [GOOD] >> Cache::Test4 [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::GenericCases [GOOD] >> EscapingBasics::HideSecretsShouldWork [GOOD] >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test4 [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> OperationMapping::IndexBuildSuccess [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> YdbOlapStore::LogLast50 >> YdbImport::Simple >> TTableProfileTests::DescribeTableWithPartitioningPolicy |84.9%| [TA] $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true >> YdbTableBulkUpsert::ValidRetry ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::GenericCases [GOOD] Test command err: 2025-04-07T13:49:56.986399Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568119912602944:2149];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:56.991352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a36/r3tmp/tmp5mdIDo/pdisk_1.dat 2025-04-07T13:49:57.816217Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:57.826892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:57.826977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:57.855176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16286 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:58.107323Z node 1 :TX_PROXY DEBUG: actor# [1:7490568124207570357:2111] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:58.107367Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568128502538146:2437] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:58.107636Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568124207570381:2124], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:58.107748Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124207570833:2432][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490568124207570381:2124], cookie# 1 2025-04-07T13:49:58.123145Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124207570837:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124207570834:2432], cookie# 1 2025-04-07T13:49:58.123201Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124207570838:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124207570835:2432], cookie# 1 2025-04-07T13:49:58.123216Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124207570839:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124207570836:2432], cookie# 1 2025-04-07T13:49:58.123241Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568119912602774:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124207570837:2432], cookie# 1 2025-04-07T13:49:58.123263Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568119912602777:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124207570838:2432], cookie# 1 2025-04-07T13:49:58.123277Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568119912602780:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124207570839:2432], cookie# 1 2025-04-07T13:49:58.123302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124207570837:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568119912602774:2050], cookie# 1 2025-04-07T13:49:58.123318Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124207570838:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568119912602777:2053], cookie# 1 2025-04-07T13:49:58.123329Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124207570839:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568119912602780:2056], cookie# 1 2025-04-07T13:49:58.123360Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124207570833:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124207570834:2432], cookie# 1 2025-04-07T13:49:58.123380Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124207570833:2432][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:58.123392Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124207570833:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124207570835:2432], cookie# 1 2025-04-07T13:49:58.123407Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124207570833:2432][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:58.123427Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124207570833:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124207570836:2432], cookie# 1 2025-04-07T13:49:58.123442Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124207570833:2432][/dc-1] Unexpected sync response: sender# [1:7490568124207570836:2432], cookie# 1 2025-04-07T13:49:58.123493Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568124207570381:2124], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T13:49:58.130117Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568124207570381:2124], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568124207570833:2432] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:58.130219Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568124207570381:2124], cacheItem# { Subscriber: { Subscriber: [1:7490568124207570833:2432] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-07T13:49:58.132333Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568128502538149:2440], recipient# [1:7490568128502538146:2437], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:58.132392Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568128502538146:2437] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:58.177702Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568128502538146:2437] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-07T13:49:58.180031Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568128502538146:2437] Handle TEvDescribeSchemeResult Forward to# [1:7490568128502538145:2436] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:49:58.244976Z node 1 :TX_PROXY DEBUG: actor# [1:7490568124207570357:2111] Handle TEvProposeTransaction 2025-04-07T13:49:58.245005Z node 1 :TX_PROXY DEBUG: actor# [1:7490568124207570357:2111] TxId# 281474976710657 ProcessProposeTransaction 2025-04-07T13:49:58.245135Z node 1 :TX_PROXY DEBUG: actor# [1:7490568124207570357:2111] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7490568128502538154:2444] 2025-04-07T13:49:58.389808Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568128502538154:2444] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-07T13:49:58.389894Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568128502538154:2444] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:49:58.389998Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568124207570381:2124], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:58.390077Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124207570833:2432][/d ... 7490568119912602777:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7490568141387440844:3014] 2025-04-07T13:50:01.807501Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568124207570381:2124], cacheItem# { Subscriber: { Subscriber: [1:7490568141387440832:3014] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:01.807513Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568119912602777:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7490568141387440850:3015] 2025-04-07T13:50:01.807543Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568124207570381:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-07T13:50:01.807556Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568119912602780:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7490568141387440839:3013] 2025-04-07T13:50:01.807580Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568119912602780:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7490568141387440845:3014] 2025-04-07T13:50:01.807602Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568119912602780:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7490568141387440851:3015] 2025-04-07T13:50:01.807603Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568124207570381:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7490568141387440833:3015] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:50:01.807660Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568124207570381:2124], cacheItem# { Subscriber: { Subscriber: [1:7490568141387440833:3015] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:01.807689Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568141387440852:3016], recipient# [1:7490568141387440823:2303], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:01.807758Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568141387440853:3017], recipient# [1:7490568141387440830:2308], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:01.986073Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568119912602944:2149];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:01.986170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:02.024597Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568124207570381:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.024742Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568124207570381:2124], cacheItem# { Subscriber: { Subscriber: [1:7490568124207570841:2434] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:02.024832Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568145682408157:3021], recipient# [1:7490568145682408156:2309], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.809316Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568124207570381:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.809469Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568124207570381:2124], cacheItem# { Subscriber: { Subscriber: [1:7490568141387440831:3013] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:02.809571Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568145682408173:3024], recipient# [1:7490568145682408172:2310], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.992969Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568124207570381:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.993090Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568124207570381:2124], cacheItem# { Subscriber: { Subscriber: [1:7490568124207570841:2434] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:02.993164Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568145682408183:3030], recipient# [1:7490568145682408182:2311], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:03.027909Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568124207570381:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:03.028043Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568124207570381:2124], cacheItem# { Subscriber: { Subscriber: [1:7490568124207570841:2434] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:03.028129Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568149977375481:3031], recipient# [1:7490568149977375480:2312], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> OperationMapping::IndexBuildCanceled [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] Test command err: 2025-04-07T13:49:45.724283Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568071118404987:2189];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001530/r3tmp/tmpAWwyHj/pdisk_1.dat 2025-04-07T13:49:45.876316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:46.191530Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:46.206270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:46.206356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:46.217630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5009 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:46.465760Z node 1 :TX_PROXY DEBUG: actor# [1:7490568071118405020:2129] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:46.465792Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568075413372800:2431] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:46.465892Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568071118405084:2143], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:46.465930Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490568071118405084:2143], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T13:49:46.466051Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568075413372801:2432][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T13:49:46.467245Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568071118404744:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568075413372805:2432] 2025-04-07T13:49:46.467289Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568071118404744:2051] Subscribe: subscriber# [1:7490568075413372805:2432], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:46.467321Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568071118404747:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568075413372806:2432] 2025-04-07T13:49:46.467333Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568071118404747:2054] Subscribe: subscriber# [1:7490568075413372806:2432], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:46.467345Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568071118404750:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568075413372807:2432] 2025-04-07T13:49:46.467353Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568071118404750:2057] Subscribe: subscriber# [1:7490568075413372807:2432], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:46.467435Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568075413372805:2432][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568071118404744:2051] 2025-04-07T13:49:46.467448Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568075413372806:2432][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568071118404747:2054] 2025-04-07T13:49:46.467456Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568075413372807:2432][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568071118404750:2057] 2025-04-07T13:49:46.467480Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568075413372801:2432][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568075413372802:2432] 2025-04-07T13:49:46.467496Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568075413372801:2432][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568075413372803:2432] 2025-04-07T13:49:46.467542Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490568075413372801:2432][/dc-1] Set up state: owner# [1:7490568071118405084:2143], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:46.467612Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568075413372801:2432][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568075413372804:2432] 2025-04-07T13:49:46.467644Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490568075413372801:2432][/dc-1] Path was already updated: owner# [1:7490568071118405084:2143], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:46.467666Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568075413372805:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568075413372802:2432], cookie# 1 2025-04-07T13:49:46.467674Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568075413372806:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568075413372803:2432], cookie# 1 2025-04-07T13:49:46.467680Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568075413372807:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568075413372804:2432], cookie# 1 2025-04-07T13:49:46.467697Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568071118404744:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568075413372805:2432] 2025-04-07T13:49:46.467711Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568071118404744:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568075413372805:2432], cookie# 1 2025-04-07T13:49:46.467724Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568071118404747:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568075413372806:2432] 2025-04-07T13:49:46.467731Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568071118404747:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568075413372806:2432], cookie# 1 2025-04-07T13:49:46.467738Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568071118404750:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568075413372807:2432] 2025-04-07T13:49:46.467744Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568071118404750:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568075413372807:2432], cookie# 1 2025-04-07T13:49:46.468754Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568075413372805:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568071118404744:2051], cookie# 1 2025-04-07T13:49:46.468775Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568075413372806:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568071118404747:2054], cookie# 1 2025-04-07T13:49:46.468789Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568075413372807:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568071118404750:2057], cookie# 1 2025-04-07T13:49:46.468830Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568075413372801:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568075413372802:2432], cookie# 1 2025-04-07T13:49:46.468854Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568075413372801:2432][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:46.468869Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568075413372801:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568075413372803:2432], cookie# 1 2025-04-07T13:49:46.468890Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568075413372801:2432][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:46.468926Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568075413372801:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568075413372804:2432], cookie# 1 2025-04-07T13:49:46.468940Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568075413372801:2432][/dc-1] Unexpected sync response: sender# [1:7490568075413372804:2432], cookie# 1 TClient::Ls response: 2025-04-07T13:49:46.513682Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568071118405084:2143], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T13:49:46.514008Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568071118405084:2143], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { Sche ... criber: [2:7490568139914370567:2298] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:02.389387Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490568144209337975:2349], recipient# [2:7490568144209337974:2535], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.438652Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490568105554632083:2235], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.438801Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490568105554632083:2235], cacheItem# { Subscriber: { Subscriber: [2:7490568139914370567:2298] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:02.438868Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490568105554632083:2235], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.438941Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490568105554632083:2235], cacheItem# { Subscriber: { Subscriber: [2:7490568139914370567:2298] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:02.438998Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490568144209337978:2350], recipient# [2:7490568144209337976:2536], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.439055Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490568144209337979:2351], recipient# [2:7490568144209337977:2537], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.570807Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490568105554632083:2235], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.570953Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490568105554632083:2235], cacheItem# { Subscriber: { Subscriber: [2:7490568139914370582:2305] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:02.571053Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490568144209337981:2352], recipient# [2:7490568144209337980:2538], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.676367Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568079796688337:2224], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.676507Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568079796688337:2224], cacheItem# { Subscriber: { Subscriber: [3:7490568096976557655:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:02.676585Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568079796688337:2224], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.676652Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568079796688337:2224], cacheItem# { Subscriber: { Subscriber: [3:7490568096976557655:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:02.676721Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568079796688337:2224], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.676793Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568079796688337:2224], cacheItem# { Subscriber: { Subscriber: [3:7490568096976557688:2302] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:02.676807Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568144221206782:5643], recipient# [3:7490568144221206779:3659], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.676896Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568144221206783:5644], recipient# [3:7490568144221206780:3660], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:02.676902Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568144221206784:5645], recipient# [3:7490568144221206781:3661], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> THealthCheckTest::Issues100VCardListing [GOOD] >> THealthCheckTest::Issues100GroupsMerging >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] >> THealthCheckTest::ShardsNoLimit |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] >> THealthCheckTest::RedGroupIssueOnRedSpace [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup >> SplitPathTests::WithDatabaseShouldFail >> SplitPathTests::WithDatabaseShouldFail [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] >> OperationMapping::IndexBuildRejected [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession [GOOD] >> THealthCheckTest::StorageNoQuota [GOOD] >> THealthCheckTest::ShardsLimit995 [GOOD] >> THealthCheckTest::ShardsLimit905 >> THealthCheckTest::TestBootingTabletIsNotDead >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] >> Viewer::SimpleFeatureFlags [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues >> TGRpcYdbTest::MakeListRemoveDirectory |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |85.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |85.0%| [TA] {RESULT} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.0%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] >> TGRpcNewCoordinationClient::NodeNotFound |85.0%| [TA] $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] >> YdbYqlClient::TestReadTableOneBatch >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] |85.0%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |85.0%| [TA] {RESULT} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbYqlClient::TestYqlIssues >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts >> THealthCheckTest::StorageLimit87 [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace >> THealthCheckTest::StorageLimit80 >> GroupWriteTest::ByTableName [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut >> TGRpcNewCoordinationClient::CheckUnauthorized >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |85.0%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |85.0%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |85.0%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName [GOOD] Test command err: RandomSeed# 8332081399463613866 2025-04-07T13:49:34.732629Z 1 00h00m00.001536s :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.153120s 2025-04-07T13:49:34.737981Z 1 00h00m00.001536s :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.171335s 2025-04-07T13:49:36.758797Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058428954028033 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-07T13:49:36.989057Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-07T13:49:36.989156Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 going to send TEvBlock {TabletId# 72058428954028033 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-07T13:49:37.006542Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-07T13:49:37.117082Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T13:49:37.139499Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-07T13:50:02.801685Z 6 00h01m23.910512s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:5:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 2716 2025-04-07T13:50:09.461415Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-07T13:50:09.461532Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T13:50:09.461591Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-07T13:50:09.461632Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T13:50:09.523769Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-04-07T13:50:09.523881Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::SimpleFeatureFlags [GOOD] Test command err: 2025-04-07T13:48:17.771251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:17.771553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:17.771673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 9387, node 1 TClient is connected to server localhost:3109 2025-04-07T13:48:40.182902Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:285:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:40.183146Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:40.183227Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 19165, node 2 TClient is connected to server localhost:6185 2025-04-07T13:48:53.680091Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:297:2340], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:53.680563Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:53.680641Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 13847, node 3 TClient is connected to server localhost:11662 2025-04-07T13:49:04.491597Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:297:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:04.491847Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:04.492019Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 6585, node 4 TClient is connected to server localhost:27842 2025-04-07T13:49:14.415101Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:284:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:14.415397Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:14.415545Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 2724, node 5 TClient is connected to server localhost:9872 2025-04-07T13:49:26.056021Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:297:2340], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:26.056417Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:26.056506Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 3081, node 6 TClient is connected to server localhost:61718 2025-04-07T13:49:37.767247Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:297:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:37.767711Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:37.767796Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 8027, node 7 TClient is connected to server localhost:2984 2025-04-07T13:49:51.434826Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:455:2383], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:51.435316Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:51.435505Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:49:51.971419Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:52.150574Z node 8 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-07T13:49:52.183936Z node 8 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-07T13:49:52.992160Z node 8 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 26873, node 8 TClient is connected to server localhost:2055 2025-04-07T13:49:53.629698Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:53.629800Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:53.629871Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:53.630330Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:49:58.188098Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7490568127029788660:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:58.269620Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:49:59.049249Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:59.061618Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:59.061827Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:59.067660Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27966, node 11 2025-04-07T13:49:59.381737Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:59.381774Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:59.381790Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:59.382035Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11493 2025-04-07T13:50:03.171095Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7490568127029788660:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:03.171172Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] Test command err: Trying to start YDB, gRPC: 1225, MsgBus: 25981 2025-04-07T13:49:33.815156Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568021517180774:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:33.815202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001382/r3tmp/tmpnlcyX9/pdisk_1.dat 2025-04-07T13:49:34.224733Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:34.268724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:34.268897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 1225, node 1 2025-04-07T13:49:34.276048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:34.364978Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:34.365002Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:34.365009Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:34.365088Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25981 TClient is connected to server localhost:25981 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:35.058189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.078647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.233270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.403295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:35.483848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:37.297505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568038697051438:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.297635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.553604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.588228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.621754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.652777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.693438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.763266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.847153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568038697051944:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.847202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.847414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568038697051949:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.850095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:37.857276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568038697051951:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:38.822359Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568021517180774:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:38.822425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24783, MsgBus: 62945 2025-04-07T13:49:40.207890Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568048285594297:2194];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001382/r3tmp/tmpzCbR8H/pdisk_1.dat 2025-04-07T13:49:40.254140Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:40.346302Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:40.349064Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:40.349125Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:40.350702Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24783, node 2 2025-04-07T13:49:40.405261Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:40.405291Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:40.405298Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:40.405409Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62945 TClient is connected to server localhost:62945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:40.859085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:40.872531Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:40.879163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:40.956607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:41.170224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:49:41.257972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:43.398073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568061170497717:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't ha ... # peer# 2025-04-07T13:50:07.305356Z node 5 :GRPC_SERVER DEBUG: [0x51b000425d80] received request Name# TopicService/DropTopic ok# false data# peer# 2025-04-07T13:50:07.305551Z node 5 :GRPC_SERVER DEBUG: [0x51b00000a180] received request Name# Coordination/CreateNode ok# false data# peer# 2025-04-07T13:50:07.305559Z node 5 :GRPC_SERVER DEBUG: [0x51b00042b880] received request Name# Coordination/AlterNode ok# false data# peer# 2025-04-07T13:50:07.305719Z node 5 :GRPC_SERVER DEBUG: [0x51b00042bf80] received request Name# Coordination/DescribeNode ok# false data# peer# 2025-04-07T13:50:07.305723Z node 5 :GRPC_SERVER DEBUG: [0x51b00042b180] received request Name# Coordination/DropNode ok# false data# peer# 2025-04-07T13:50:07.305894Z node 5 :GRPC_SERVER DEBUG: [0x51b00042c680] received request Name# CreateDatabase ok# false data# peer# 2025-04-07T13:50:07.305904Z node 5 :GRPC_SERVER DEBUG: [0x51b0002a5480] received request Name# GetDatabaseStatus ok# false data# peer# 2025-04-07T13:50:07.306054Z node 5 :GRPC_SERVER DEBUG: [0x51b000407380] received request Name# AlterDatabase ok# false data# peer# 2025-04-07T13:50:07.306119Z node 5 :GRPC_SERVER DEBUG: [0x51b0003e9780] received request Name# ListDatabases ok# false data# peer# 2025-04-07T13:50:07.306251Z node 5 :GRPC_SERVER DEBUG: [0x51b0001bf980] received request Name# RemoveDatabase ok# false data# peer# 2025-04-07T13:50:07.306293Z node 5 :GRPC_SERVER DEBUG: [0x51b000060480] received request Name# DescribeDatabaseOptions ok# false data# peer# 2025-04-07T13:50:07.306413Z node 5 :GRPC_SERVER DEBUG: [0x51b0003d0880] received request Name# GetScaleRecommendation ok# false data# peer# 2025-04-07T13:50:07.306462Z node 5 :GRPC_SERVER DEBUG: [0x51b0003ea580] received request Name# ListEndpoints ok# false data# peer# 2025-04-07T13:50:07.306545Z node 5 :GRPC_SERVER DEBUG: [0x51b00032d580] received request Name# WhoAmI ok# false data# peer# 2025-04-07T13:50:07.306568Z node 5 :GRPC_SERVER DEBUG: [0x51b000060b80] received request Name# NodeRegistration ok# false data# peer# 2025-04-07T13:50:07.306699Z node 5 :GRPC_SERVER DEBUG: [0x51b000049f80] received request Name# Scan ok# false data# peer# 2025-04-07T13:50:07.306758Z node 5 :GRPC_SERVER DEBUG: [0x51b0003d5c80] received request Name# GetShardLocations ok# false data# peer# 2025-04-07T13:50:07.306870Z node 5 :GRPC_SERVER DEBUG: [0x51b0003a0680] received request Name# DescribeTable ok# false data# peer# 2025-04-07T13:50:07.306967Z node 5 :GRPC_SERVER DEBUG: [0x51b000270c80] received request Name# CreateSnapshot ok# false data# peer# 2025-04-07T13:50:07.307090Z node 5 :GRPC_SERVER DEBUG: [0x51b000159380] received request Name# RefreshSnapshot ok# false data# peer# 2025-04-07T13:50:07.307120Z node 5 :GRPC_SERVER DEBUG: [0x51b00015cb80] received request Name# DiscardSnapshot ok# false data# peer# 2025-04-07T13:50:07.307254Z node 5 :GRPC_SERVER DEBUG: [0x51b000049880] received request Name# List ok# false data# peer# 2025-04-07T13:50:07.307265Z node 5 :GRPC_SERVER DEBUG: [0x51b00037f980] received request Name# RateLimiter/CreateResource ok# false data# peer# 2025-04-07T13:50:07.307390Z node 5 :GRPC_SERVER DEBUG: [0x51b000445580] received request Name# RateLimiter/AlterResource ok# false data# peer# 2025-04-07T13:50:07.307430Z node 5 :GRPC_SERVER DEBUG: [0x51b000058d80] received request Name# RateLimiter/DropResource ok# false data# peer# 2025-04-07T13:50:07.307566Z node 5 :GRPC_SERVER DEBUG: [0x51b000089680] received request Name# RateLimiter/ListResources ok# false data# peer# 2025-04-07T13:50:07.307601Z node 5 :GRPC_SERVER DEBUG: [0x51b000154d80] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2025-04-07T13:50:07.307737Z node 5 :GRPC_SERVER DEBUG: [0x51b0003bf080] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2025-04-07T13:50:07.307775Z node 5 :GRPC_SERVER DEBUG: [0x51b0003bf780] received request Name# CreateStream ok# false data# peer# 2025-04-07T13:50:07.307912Z node 5 :GRPC_SERVER DEBUG: [0x51b00037eb80] received request Name# ListStreams ok# false data# peer# 2025-04-07T13:50:07.307942Z node 5 :GRPC_SERVER DEBUG: [0x51b00039d580] received request Name# DeleteStream ok# false data# peer# 2025-04-07T13:50:07.308094Z node 5 :GRPC_SERVER DEBUG: [0x51b00037f280] received request Name# DescribeStream ok# false data# peer# 2025-04-07T13:50:07.308096Z node 5 :GRPC_SERVER DEBUG: [0x51b00033ed80] received request Name# ListShards ok# false data# peer# 2025-04-07T13:50:07.308261Z node 5 :GRPC_SERVER DEBUG: [0x51b000439f80] received request Name# SetWriteQuota ok# false data# peer# 2025-04-07T13:50:07.308263Z node 5 :GRPC_SERVER DEBUG: [0x51b00010f480] received request Name# UpdateStream ok# false data# peer# 2025-04-07T13:50:07.308426Z node 5 :GRPC_SERVER DEBUG: [0x51b00004a680] received request Name# PutRecord ok# false data# peer# 2025-04-07T13:50:07.308469Z node 5 :GRPC_SERVER DEBUG: [0x51b00039ce80] received request Name# PutRecords ok# false data# peer# 2025-04-07T13:50:07.308624Z node 5 :GRPC_SERVER DEBUG: [0x51b000086580] received request Name# GetRecords ok# false data# peer# 2025-04-07T13:50:07.309231Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b8780] received request Name# GetShardIterator ok# false data# peer# 2025-04-07T13:50:07.309522Z node 5 :GRPC_SERVER DEBUG: [0x51b00004ad80] received request Name# SubscribeToShard ok# false data# peer# 2025-04-07T13:50:07.312937Z node 5 :GRPC_SERVER DEBUG: [0x51b00015bd80] received request Name# DescribeLimits ok# false data# peer# 2025-04-07T13:50:07.314160Z node 5 :GRPC_SERVER DEBUG: [0x51b00015d980] received request Name# DescribeStreamSummary ok# false data# peer# 2025-04-07T13:50:07.314438Z node 5 :GRPC_SERVER DEBUG: [0x51b00015a180] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2025-04-07T13:50:07.314636Z node 5 :GRPC_SERVER DEBUG: [0x51b000312380] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2025-04-07T13:50:07.314813Z node 5 :GRPC_SERVER DEBUG: [0x51b00015b680] received request Name# UpdateShardCount ok# false data# peer# 2025-04-07T13:50:07.314988Z node 5 :GRPC_SERVER DEBUG: [0x51b000312a80] received request Name# UpdateStreamMode ok# false data# peer# 2025-04-07T13:50:07.315148Z node 5 :GRPC_SERVER DEBUG: [0x51b000158c80] received request Name# RegisterStreamConsumer ok# false data# peer# 2025-04-07T13:50:07.315311Z node 5 :GRPC_SERVER DEBUG: [0x51b000271380] received request Name# DeregisterStreamConsumer ok# false data# peer# 2025-04-07T13:50:07.315486Z node 5 :GRPC_SERVER DEBUG: [0x51b0002d5680] received request Name# DescribeStreamConsumer ok# false data# peer# 2025-04-07T13:50:07.315638Z node 5 :GRPC_SERVER DEBUG: [0x51b0003d5580] received request Name# ListStreamConsumers ok# false data# peer# 2025-04-07T13:50:07.315800Z node 5 :GRPC_SERVER DEBUG: [0x51b0002d0280] received request Name# AddTagsToStream ok# false data# peer# 2025-04-07T13:50:07.315983Z node 5 :GRPC_SERVER DEBUG: [0x51b00032c780] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2025-04-07T13:50:07.316152Z node 5 :GRPC_SERVER DEBUG: [0x51b0003d0f80] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2025-04-07T13:50:07.316312Z node 5 :GRPC_SERVER DEBUG: [0x51b0003d4080] received request Name# ListTagsForStream ok# false data# peer# 2025-04-07T13:50:07.316471Z node 5 :GRPC_SERVER DEBUG: [0x51b0003d1680] received request Name# MergeShards ok# false data# peer# 2025-04-07T13:50:07.316645Z node 5 :GRPC_SERVER DEBUG: [0x51b0003d2480] received request Name# RemoveTagsFromStream ok# false data# peer# 2025-04-07T13:50:07.316884Z node 5 :GRPC_SERVER DEBUG: [0x51b000438380] received request Name# SplitShard ok# false data# peer# 2025-04-07T13:50:07.316972Z node 5 :GRPC_SERVER DEBUG: [0x51b000442480] received request Name# StartStreamEncryption ok# false data# peer# 2025-04-07T13:50:07.317064Z node 5 :GRPC_SERVER DEBUG: [0x51b00010ed80] received request Name# StopStreamEncryption ok# false data# peer# 2025-04-07T13:50:07.317241Z node 5 :GRPC_SERVER DEBUG: [0x51b000439180] received request Name# SelfCheck ok# false data# peer# 2025-04-07T13:50:07.317244Z node 5 :GRPC_SERVER DEBUG: [0x51b00043c280] received request Name# NodeCheck ok# false data# peer# 2025-04-07T13:50:07.317439Z node 5 :GRPC_SERVER DEBUG: [0x51b000111e80] received request Name# CreateSession ok# false data# peer# 2025-04-07T13:50:07.317449Z node 5 :GRPC_SERVER DEBUG: [0x51b000112580] received request Name# DeleteSession ok# false data# peer# 2025-04-07T13:50:07.317623Z node 5 :GRPC_SERVER DEBUG: [0x51b00010d880] received request Name# AttachSession ok# false data# peer# 2025-04-07T13:50:07.317646Z node 5 :GRPC_SERVER DEBUG: [0x51b00010ca80] received request Name# BeginTransaction ok# false data# peer# 2025-04-07T13:50:07.317813Z node 5 :GRPC_SERVER DEBUG: [0x51b00010c380] received request Name# CommitTransaction ok# false data# peer# 2025-04-07T13:50:07.317830Z node 5 :GRPC_SERVER DEBUG: [0x51b000113a80] received request Name# RollbackTransaction ok# false data# peer# 2025-04-07T13:50:07.317998Z node 5 :GRPC_SERVER DEBUG: [0x51b000015780] received request Name# ExecuteQuery ok# false data# peer# 2025-04-07T13:50:07.318009Z node 5 :GRPC_SERVER DEBUG: [0x51b000446380] received request Name# ExecuteScript ok# false data# peer# 2025-04-07T13:50:07.325329Z node 5 :GRPC_SERVER DEBUG: [0x51b00012fa80] received request Name# FetchScriptResults ok# false data# peer# 2025-04-07T13:50:07.325578Z node 5 :GRPC_SERVER DEBUG: [0x51b000114180] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2025-04-07T13:50:07.325793Z node 5 :GRPC_SERVER DEBUG: [0x51b000112c80] received request Name# ChangeTabletSchema ok# false data# peer# 2025-04-07T13:50:07.325983Z node 5 :GRPC_SERVER DEBUG: [0x51b000111080] received request Name# RestartTablet ok# false data# peer# 2025-04-07T13:50:07.326234Z node 5 :GRPC_SERVER DEBUG: [0x51b000111780] received request Name# CreateLogStore ok# false data# peer# 2025-04-07T13:50:07.326426Z node 5 :GRPC_SERVER DEBUG: [0x51b000110980] received request Name# DescribeLogStore ok# false data# peer# 2025-04-07T13:50:07.326611Z node 5 :GRPC_SERVER DEBUG: [0x51b000110280] received request Name# DropLogStore ok# false data# peer# 2025-04-07T13:50:07.326805Z node 5 :GRPC_SERVER DEBUG: [0x51b000033a80] received request Name# AlterLogStore ok# false data# peer# 2025-04-07T13:50:07.326983Z node 5 :GRPC_SERVER DEBUG: [0x51b000024580] received request Name# CreateLogTable ok# false data# peer# 2025-04-07T13:50:07.327180Z node 5 :GRPC_SERVER DEBUG: [0x51b000035680] received request Name# DescribeLogTable ok# false data# peer# 2025-04-07T13:50:07.327379Z node 5 :GRPC_SERVER DEBUG: [0x51b000023780] received request Name# DropLogTable ok# false data# peer# 2025-04-07T13:50:07.327578Z node 5 :GRPC_SERVER DEBUG: [0x51b000027680] received request Name# AlterLogTable ok# false data# peer# 2025-04-07T13:50:07.327831Z node 5 :GRPC_SERVER DEBUG: [0x51b000036b80] received request Name# Login ok# false data# peer# 2025-04-07T13:50:07.328032Z node 5 :GRPC_SERVER DEBUG: [0x51b000025380] received request Name# DescribeReplication ok# false data# peer# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-07T13:49:56.626218Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568119976771085:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:56.632313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a3f/r3tmp/tmpwnCVdp/pdisk_1.dat 2025-04-07T13:49:57.334569Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:57.351993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:57.352083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:57.365299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28408 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:57.690942Z node 1 :TX_PROXY DEBUG: actor# [1:7490568119976771164:2113] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:57.690991Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568124271738943:2437] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:57.691092Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568119976771208:2132], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:57.691191Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124271738924:2430][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490568119976771208:2132], cookie# 1 2025-04-07T13:49:57.692483Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124271738928:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124271738925:2430], cookie# 1 2025-04-07T13:49:57.692509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124271738929:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124271738926:2430], cookie# 1 2025-04-07T13:49:57.692523Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124271738930:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124271738927:2430], cookie# 1 2025-04-07T13:49:57.692554Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568119976770868:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124271738928:2430], cookie# 1 2025-04-07T13:49:57.692575Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568119976770871:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124271738929:2430], cookie# 1 2025-04-07T13:49:57.692595Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568119976770874:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124271738930:2430], cookie# 1 2025-04-07T13:49:57.692616Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124271738928:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568119976770868:2050], cookie# 1 2025-04-07T13:49:57.692629Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124271738929:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568119976770871:2053], cookie# 1 2025-04-07T13:49:57.692640Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124271738930:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568119976770874:2056], cookie# 1 2025-04-07T13:49:57.692683Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124271738924:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124271738925:2430], cookie# 1 2025-04-07T13:49:57.692700Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124271738924:2430][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:57.692712Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124271738924:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124271738926:2430], cookie# 1 2025-04-07T13:49:57.692732Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124271738924:2430][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:57.692759Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124271738924:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124271738927:2430], cookie# 1 2025-04-07T13:49:57.692770Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124271738924:2430][/dc-1] Unexpected sync response: sender# [1:7490568124271738927:2430], cookie# 1 2025-04-07T13:49:57.692812Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568119976771208:2132], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T13:49:57.701717Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568119976771208:2132], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568124271738924:2430] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:57.702412Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568119976771208:2132], cacheItem# { Subscriber: { Subscriber: [1:7490568124271738924:2430] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-07T13:49:57.704445Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568124271738944:2438], recipient# [1:7490568124271738943:2437], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:57.704489Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568124271738943:2437] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:57.755425Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568124271738943:2437] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-07T13:49:57.757282Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568124271738943:2437] Handle TEvDescribeSchemeResult Forward to# [1:7490568124271738942:2436] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:49:57.800961Z node 1 :TX_PROXY DEBUG: actor# [1:7490568119976771164:2113] Handle TEvProposeTransaction 2025-04-07T13:49:57.800992Z node 1 :TX_PROXY DEBUG: actor# [1:7490568119976771164:2113] TxId# 281474976710657 ProcessProposeTransaction 2025-04-07T13:49:57.801062Z node 1 :TX_PROXY DEBUG: actor# [1:7490568119976771164:2113] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7490568124271738949:2442] 2025-04-07T13:49:57.887619Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568124271738949:2442] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-07T13:49:57.888129Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568124271738949:2442] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:49:57.888209Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568119976771208:2132], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:57.888306Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124271738924:2430][/d ... ies# AckNotifications: true 2025-04-07T13:50:06.494457Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490568148716918583:2050] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-04-07T13:50:06.494473Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568148716918589:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490568161601821969:2985] 2025-04-07T13:50:06.494488Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568148716918589:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [3:7490568161601821963:2984] 2025-04-07T13:50:06.494496Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490568148716918589:2056] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-04-07T13:50:06.494498Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490568148716918583:2050] Subscribe: subscriber# [3:7490568161601821961:2984], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:50:06.494519Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490568148716918589:2056] Subscribe: subscriber# [3:7490568161601821963:2984], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:50:06.494546Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490568161601821962:2984][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7490568148716918586:2053] 2025-04-07T13:50:06.494561Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7490568148716918897:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-07T13:50:06.494567Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490568161601821961:2984][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7490568148716918583:2050] 2025-04-07T13:50:06.494587Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490568161601821963:2984][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7490568148716918589:2056] 2025-04-07T13:50:06.494617Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490568161601821956:2984][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7490568161601821959:2984] 2025-04-07T13:50:06.494658Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490568161601821956:2984][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7490568161601821958:2984] 2025-04-07T13:50:06.494658Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7490568148716918897:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7490568161601821957:2985] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:50:06.494684Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7490568161601821956:2984][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [3:7490568148716918897:2124], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:50:06.494708Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490568161601821956:2984][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7490568161601821960:2984] 2025-04-07T13:50:06.494730Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7490568161601821956:2984][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7490568148716918897:2124], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:50:06.494748Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568148716918586:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490568161601821962:2984] 2025-04-07T13:50:06.494763Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568148716918583:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490568161601821961:2984] 2025-04-07T13:50:06.494764Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568148716918897:2124], cacheItem# { Subscriber: { Subscriber: [3:7490568161601821957:2985] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:06.494777Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568148716918589:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490568161601821963:2984] 2025-04-07T13:50:06.494794Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7490568148716918897:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-07T13:50:06.494848Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7490568148716918897:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7490568161601821956:2984] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:50:06.494894Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568148716918897:2124], cacheItem# { Subscriber: { Subscriber: [3:7490568161601821956:2984] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:06.494984Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568161601821970:2986], recipient# [3:7490568161601821955:2290], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:07.216824Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568148716918897:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:07.216962Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568148716918897:2124], cacheItem# { Subscriber: { Subscriber: [3:7490568153011887016:2726] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:07.217068Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568165896789283:2990], recipient# [3:7490568165896789282:2291], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:07.485693Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568148716918897:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:07.485824Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568148716918897:2124], cacheItem# { Subscriber: { Subscriber: [3:7490568161601821941:2982] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:07.485921Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568165896789288:2991], recipient# [3:7490568165896789287:2292], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> YdbLogStore::LogStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] Test command err: 2025-04-07T13:49:06.769054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:06.769340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:06.769606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010ac/r3tmp/tmpPGXRuS/pdisk_1.dat 2025-04-07T13:49:07.229226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:49:07.288507Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:07.346948Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:49:07.347834Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:49:07.348025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:07.348170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:07.361137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:07.488239Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T13:49:07.488298Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T13:49:07.488462Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T13:49:07.608303Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T13:49:07.608824Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T13:49:07.608897Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:49:07.609233Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:07.609439Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T13:49:07.609568Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T13:49:07.609899Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T13:49:07.611258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:07.612231Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T13:49:07.612314Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T13:49:07.643010Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:49:07.644124Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:49:07.644578Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:49:07.645252Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:49:07.689687Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:49:07.690408Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:49:07.690526Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:49:07.692187Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:49:07.692272Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:49:07.692336Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:49:07.692632Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:49:07.727962Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:49:07.728164Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:49:07.728275Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:49:07.728364Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:49:07.728414Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:49:07.728511Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:07.732902Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:49:07.732971Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:49:07.733216Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:49:07.733300Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:49:07.733779Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:49:07.733818Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:49:07.734208Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:49:07.734300Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:49:07.734362Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:49:07.734416Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T13:49:07.734451Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T13:49:07.734480Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T13:49:07.734518Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:49:07.734608Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:49:07.735058Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T13:49:07.735101Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:49:07.735198Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:49:07.735438Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T13:49:07.735495Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:49:07.735583Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:49:07.735639Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T13:49:07.735682Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T13:49:07.735716Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T13:49:07.735747Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:49:07.736003Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T13:49:07.736032Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T13:49:07.736068Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T13:49:07.736096Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:49:07.736155Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T13:49:07.736185Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:49:07.736213Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T13:49:07.736239Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T13:49:07.736263Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T13:49:07.737003Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:49:07.737094Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:49:07.737126Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:49:07.737162Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T13:49:07.737221Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:49:07.739419Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T13:49:07.739463Z n ... on unit StoreAndSendOutRS 2025-04-07T13:50:08.110854Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit StoreAndSendOutRS 2025-04-07T13:50:08.110876Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2025-04-07T13:50:08.110897Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit StoreAndSendOutRS 2025-04-07T13:50:08.110916Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit PrepareKqpDataTxInRS 2025-04-07T13:50:08.110942Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit PrepareKqpDataTxInRS 2025-04-07T13:50:08.110968Z node 7 :TX_DATASHARD TRACE: Prepare InReadsets from 72075186224037889 to 72075186224037888 2025-04-07T13:50:08.110994Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2025-04-07T13:50:08.111012Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit PrepareKqpDataTxInRS 2025-04-07T13:50:08.111046Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-04-07T13:50:08.111068Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit LoadAndWaitInRS 2025-04-07T13:50:08.111099Z node 7 :TX_DATASHARD TRACE: Prepare for loading readset for [900:281474976715668] at 72075186224037888 source=72075186224037889 target=72075186224037888 2025-04-07T13:50:08.111133Z node 7 :TX_DATASHARD TRACE: Expected 1 readsets for [900:281474976715668] at 72075186224037888 2025-04-07T13:50:08.111205Z node 7 :TX_DATASHARD TRACE: Filled readset for [900:281474976715668] from=72075186224037889 to=72075186224037888origin=72075186224037889 2025-04-07T13:50:08.111233Z node 7 :TX_DATASHARD TRACE: Remain 0 read sets for [900:281474976715668] at 72075186224037888 2025-04-07T13:50:08.111285Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2025-04-07T13:50:08.111310Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-04-07T13:50:08.111334Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-07T13:50:08.111371Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-07T13:50:08.111416Z node 7 :TX_DATASHARD TRACE: Operation [900:281474976715668] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191860 2025-04-07T13:50:08.111590Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-07T13:50:08.111626Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T13:50:08.111650Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-07T13:50:08.111673Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit CompleteOperation 2025-04-07T13:50:08.111694Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit CompleteOperation 2025-04-07T13:50:08.111833Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is DelayComplete 2025-04-07T13:50:08.111857Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit CompleteOperation 2025-04-07T13:50:08.111885Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T13:50:08.111909Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit CompletedOperations 2025-04-07T13:50:08.111934Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2025-04-07T13:50:08.111953Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T13:50:08.111978Z node 7 :TX_DATASHARD TRACE: Execution plan for [900:281474976715668] at 72075186224037888 has finished 2025-04-07T13:50:08.112000Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:50:08.112033Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T13:50:08.112072Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T13:50:08.112098Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T13:50:08.112534Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:50:08.112604Z node 7 :TX_DATASHARD TRACE: Complete execution for [700:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-07T13:50:08.112865Z node 7 :TX_DATASHARD DEBUG: Complete [700 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [7:960:2752], exec latency: 652 ms, propose latency: 652 ms 2025-04-07T13:50:08.112979Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037889 {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-07T13:50:08.113046Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037889 {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-07T13:50:08.113082Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T13:50:08.113721Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:738:2605], Recipient [7:1074:2852]: {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-07T13:50:08.113782Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T13:50:08.113839Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 2025-04-07T13:50:08.114051Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:50:08.114082Z node 7 :TX_DATASHARD TRACE: Complete execution for [700:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-07T13:50:08.114122Z node 7 :TX_DATASHARD DEBUG: Complete [700 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [7:960:2752], exec latency: 202 ms, propose latency: 202 ms 2025-04-07T13:50:08.114173Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-07T13:50:08.114208Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:50:08.114427Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:1074:2852], Recipient [7:738:2605]: {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-07T13:50:08.114454Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T13:50:08.114482Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 2025-04-07T13:50:08.115750Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:50:08.115801Z node 7 :TX_DATASHARD TRACE: Complete execution for [800:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-04-07T13:50:08.115841Z node 7 :TX_DATASHARD DEBUG: Complete [800 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [7:998:2786], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:50:08.115887Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 800 txid# 281474976715666 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 2} 2025-04-07T13:50:08.115912Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 { items { int32_value: 1 } items { int32_value: 10 } } { items { int32_value: 2 } items { int32_value: 20 } } 2025-04-07T13:50:08.119328Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:1074:2852], Recipient [7:738:2605]: {TEvReadSet step# 800 txid# 281474976715666 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 2} 2025-04-07T13:50:08.120317Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T13:50:08.120778Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715666 2025-04-07T13:50:08.121170Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:50:08.121242Z node 7 :TX_DATASHARD TRACE: Complete execution for [900:281474976715668] at 72075186224037888 on unit CompleteOperation 2025-04-07T13:50:08.121351Z node 7 :TX_DATASHARD DEBUG: Complete [900 : 281474976715668] from 72075186224037888 at tablet 72075186224037888 send result to client [7:1033:2813], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:50:08.121451Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 900 txid# 281474976715668 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 3} 2025-04-07T13:50:08.121516Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:50:08.122032Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:54:2101], Recipient [7:738:2605]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 7 Status: STATUS_NOT_FOUND 2025-04-07T13:50:08.124074Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:1074:2852], Recipient [7:738:2605]: {TEvReadSet step# 900 txid# 281474976715668 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 3} 2025-04-07T13:50:08.124109Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T13:50:08.124151Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715668 2025-04-07T13:50:08.124242Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:54:2101], Recipient [7:738:2605]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 7 Status: STATUS_NOT_FOUND { items { int32_value: 2 } items { int32_value: 20 } } 2025-04-07T13:50:08.125485Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:54:2101], Recipient [7:738:2605]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715667 LockNode: 7 Status: STATUS_NOT_FOUND { items { int32_value: 2 } items { int32_value: 20 } } >> TGRpcYdbTest::CreateTableBadRequest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-07T13:49:56.837102Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568119942783204:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:56.837149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a4f/r3tmp/tmpQzUHdE/pdisk_1.dat 2025-04-07T13:49:57.576171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:57.576254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:57.583650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:57.623687Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12943 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:57.893379Z node 1 :TX_PROXY DEBUG: actor# [1:7490568119942783448:2128] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:57.893416Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568124237751209:2434] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:57.893514Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568124237750794:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:57.893589Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124237751190:2429][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490568124237750794:2151], cookie# 1 2025-04-07T13:49:57.894855Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124237751194:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124237751191:2429], cookie# 1 2025-04-07T13:49:57.894880Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124237751195:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124237751192:2429], cookie# 1 2025-04-07T13:49:57.894893Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124237751196:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124237751193:2429], cookie# 1 2025-04-07T13:49:57.894925Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568119942783133:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124237751194:2429], cookie# 1 2025-04-07T13:49:57.894949Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568119942783136:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124237751195:2429], cookie# 1 2025-04-07T13:49:57.894965Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568119942783139:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568124237751196:2429], cookie# 1 2025-04-07T13:49:57.894987Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124237751194:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568119942783133:2050], cookie# 1 2025-04-07T13:49:57.895004Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124237751195:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568119942783136:2053], cookie# 1 2025-04-07T13:49:57.895030Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568124237751196:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568119942783139:2056], cookie# 1 2025-04-07T13:49:57.895061Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124237751190:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124237751191:2429], cookie# 1 2025-04-07T13:49:57.895080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124237751190:2429][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:57.895093Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124237751190:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124237751192:2429], cookie# 1 2025-04-07T13:49:57.895108Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124237751190:2429][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:57.895128Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124237751190:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124237751193:2429], cookie# 1 2025-04-07T13:49:57.895140Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124237751190:2429][/dc-1] Unexpected sync response: sender# [1:7490568124237751193:2429], cookie# 1 2025-04-07T13:49:57.895183Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568124237750794:2151], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T13:49:57.903930Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568124237750794:2151], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568124237751190:2429] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:57.904052Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568124237750794:2151], cacheItem# { Subscriber: { Subscriber: [1:7490568124237751190:2429] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-07T13:49:57.906260Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568124237751211:2435], recipient# [1:7490568124237751209:2434], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:57.906316Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568124237751209:2434] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:57.934026Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568124237751209:2434] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-07T13:49:57.941703Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568124237751209:2434] Handle TEvDescribeSchemeResult Forward to# [1:7490568124237751208:2433] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:49:57.956382Z node 1 :TX_PROXY DEBUG: actor# [1:7490568119942783448:2128] Handle TEvProposeTransaction 2025-04-07T13:49:57.956411Z node 1 :TX_PROXY DEBUG: actor# [1:7490568119942783448:2128] TxId# 281474976710657 ProcessProposeTransaction 2025-04-07T13:49:57.956490Z node 1 :TX_PROXY DEBUG: actor# [1:7490568119942783448:2128] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7490568124237751220:2443] 2025-04-07T13:49:58.062140Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568124237751220:2443] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-07T13:49:58.062208Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568124237751220:2443] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:49:58.062275Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568124237750794:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:58.062341Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568124237751190:2429][/d ... pool_classifiers 2025-04-07T13:50:08.280456Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490568150866278604:2056] Subscribe: subscriber# [3:7490568172341116520:2921], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:50:08.280491Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490568172341116519:2921][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490568150866278601:2053] 2025-04-07T13:50:08.280511Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490568172341116520:2921][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490568150866278604:2056] 2025-04-07T13:50:08.280545Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490568172341116501:2921][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490568172341116516:2921] 2025-04-07T13:50:08.280586Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490568172341116501:2921][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490568172341116517:2921] 2025-04-07T13:50:08.280626Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7490568172341116501:2921][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7490568155161246199:2124], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:50:08.280649Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568150866278601:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490568172341116519:2921] 2025-04-07T13:50:08.280679Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568150866278604:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490568172341116520:2921] 2025-04-07T13:50:08.280720Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7490568155161246199:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-07T13:50:08.280799Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7490568155161246199:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7490568172341116501:2921] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:50:08.280870Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568155161246199:2124], cacheItem# { Subscriber: { Subscriber: [3:7490568172341116501:2921] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:08.280989Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568150866278598:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7490568172341116518:2921] 2025-04-07T13:50:08.280999Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490568150866278598:2050] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-07T13:50:08.281031Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490568150866278598:2050] Subscribe: subscriber# [3:7490568172341116518:2921], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:50:08.281087Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568172341116521:2923], recipient# [3:7490568172341116498:2289], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:08.281128Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490568172341116518:2921][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490568150866278598:2050] 2025-04-07T13:50:08.281153Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490568172341116501:2921][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490568172341116515:2921] 2025-04-07T13:50:08.281178Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7490568172341116501:2921][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7490568155161246199:2124], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:50:08.281195Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568150866278598:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490568172341116518:2921] 2025-04-07T13:50:08.294172Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568155161246199:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:08.294287Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568155161246199:2124], cacheItem# { Subscriber: { Subscriber: [3:7490568159456214317:2715] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:08.294359Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568172341116523:2924], recipient# [3:7490568172341116522:2290], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:09.282377Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568155161246199:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:09.282503Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568155161246199:2124], cacheItem# { Subscriber: { Subscriber: [3:7490568172341116501:2921] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:09.282577Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568176636083841:2930], recipient# [3:7490568176636083840:2291], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:09.299167Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568155161246199:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:09.299288Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568155161246199:2124], cacheItem# { Subscriber: { Subscriber: [3:7490568159456214317:2715] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:09.299354Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568176636083843:2931], recipient# [3:7490568176636083842:2292], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |85.0%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut >> YdbImport::Simple [GOOD] >> YdbImport::EmptyData >> THealthCheckTest::YellowGroupIssueOnYellowSpace [GOOD] >> THealthCheckTest::YellowIssueReadyVDisksOnFaultyPDisks |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |85.0%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut >> YdbTableBulkUpsert::ValidRetry [GOOD] >> YdbTableBulkUpsert::ZeroRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-07T13:49:59.105757Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568130155580340:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:59.105822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a1e/r3tmp/tmpmVM2M2/pdisk_1.dat 2025-04-07T13:49:59.450060Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:59.494629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:59.494928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:59.501111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22642 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:59.767168Z node 1 :TX_PROXY DEBUG: actor# [1:7490568130155580559:2137] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:59.767203Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568130155580978:2423] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:59.767282Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568130155580582:2150], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:59.767306Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490568130155580582:2150], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T13:49:59.767457Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568130155580979:2424][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T13:49:59.769192Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568125860612934:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568130155580983:2424] 2025-04-07T13:49:59.769253Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568125860612934:2050] Subscribe: subscriber# [1:7490568130155580983:2424], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:59.769298Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568125860612940:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568130155580985:2424] 2025-04-07T13:49:59.769327Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568125860612940:2056] Subscribe: subscriber# [1:7490568130155580985:2424], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:59.769365Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568130155580983:2424][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568125860612934:2050] 2025-04-07T13:49:59.769389Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568130155580985:2424][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568125860612940:2056] 2025-04-07T13:49:59.769421Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568130155580979:2424][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568130155580980:2424] 2025-04-07T13:49:59.769458Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568130155580979:2424][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568130155580982:2424] 2025-04-07T13:49:59.769511Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490568130155580979:2424][/dc-1] Set up state: owner# [1:7490568130155580582:2150], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:59.769614Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568130155580983:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568130155580980:2424], cookie# 1 2025-04-07T13:49:59.769628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568130155580984:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568130155580981:2424], cookie# 1 2025-04-07T13:49:59.769639Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568130155580985:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568130155580982:2424], cookie# 1 2025-04-07T13:49:59.771844Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568125860612937:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490568130155580984:2424] 2025-04-07T13:49:59.771885Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568125860612937:2053] Subscribe: subscriber# [1:7490568130155580984:2424], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:49:59.771942Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568125860612937:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568130155580984:2424], cookie# 1 2025-04-07T13:49:59.771975Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568125860612934:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568130155580983:2424] 2025-04-07T13:49:59.771991Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568125860612934:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568130155580983:2424], cookie# 1 2025-04-07T13:49:59.772005Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568125860612940:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568130155580985:2424] 2025-04-07T13:49:59.772016Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568125860612940:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568130155580985:2424], cookie# 1 2025-04-07T13:49:59.772042Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568130155580984:2424][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568125860612937:2053] 2025-04-07T13:49:59.772068Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568130155580984:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568125860612937:2053], cookie# 1 2025-04-07T13:49:59.772101Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568130155580983:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568125860612934:2050], cookie# 1 2025-04-07T13:49:59.772139Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568130155580985:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568125860612940:2056], cookie# 1 2025-04-07T13:49:59.772174Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568130155580979:2424][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490568130155580981:2424] 2025-04-07T13:49:59.772237Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490568130155580979:2424][/dc-1] Path was already updated: owner# [1:7490568130155580582:2150], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:49:59.772263Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568130155580979:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568130155580981:2424], cookie# 1 2025-04-07T13:49:59.772283Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568130155580979:2424][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:59.772296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568130155580979:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568130155580980:2424], cookie# 1 2025-04-07T13:49:59.772312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568130155580979:2424][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:59.772332Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568130155580979:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568130155580982:2424], cookie# 1 2025-04-07T13:49:59.772345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568130155580979:2424][/dc-1] Unexpected sync response: sender# [1:7490568130155580982:2424], cookie# 1 2025-04-07T13:49:59.772358Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568125860612937:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490568130155580984:2424] 2025-04-07T13:49:59.840022Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568130155580582:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T13:49:59.840382Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568130155580582:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... PrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:50:08.998992Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568158489036481:2124], cacheItem# { Subscriber: { Subscriber: [3:7490568171373939315:2780] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:09.000988Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568158489036175:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [3:7490568171373939323:2779] 2025-04-07T13:50:09.001004Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490568158489036175:2050] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-04-07T13:50:09.001061Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490568158489036175:2050] Subscribe: subscriber# [3:7490568171373939323:2779], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:50:09.001096Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568158489036178:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [3:7490568171373939325:2779] 2025-04-07T13:50:09.001103Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490568158489036178:2053] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-04-07T13:50:09.001122Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490568158489036178:2053] Subscribe: subscriber# [3:7490568171373939325:2779], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:50:09.001146Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568158489036181:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [3:7490568171373939326:2779] 2025-04-07T13:50:09.001175Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490568158489036181:2056] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-04-07T13:50:09.001196Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490568158489036181:2056] Subscribe: subscriber# [3:7490568171373939326:2779], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T13:50:09.001238Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490568171373939323:2779][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7490568158489036175:2050] 2025-04-07T13:50:09.001261Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490568171373939325:2779][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7490568158489036178:2053] 2025-04-07T13:50:09.001295Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490568171373939326:2779][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7490568158489036181:2056] 2025-04-07T13:50:09.001336Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490568171373939314:2779][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7490568171373939316:2779] 2025-04-07T13:50:09.001369Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490568171373939314:2779][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7490568171373939317:2779] 2025-04-07T13:50:09.001399Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7490568171373939314:2779][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [3:7490568158489036481:2124], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:50:09.001420Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490568171373939314:2779][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7490568171373939318:2779] 2025-04-07T13:50:09.001441Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7490568171373939314:2779][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7490568158489036481:2124], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T13:50:09.001493Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7490568158489036481:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-07T13:50:09.001564Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7490568158489036481:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7490568171373939314:2779] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:50:09.001647Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568158489036481:2124], cacheItem# { Subscriber: { Subscriber: [3:7490568171373939314:2779] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:09.001768Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568175668906624:2781], recipient# [3:7490568171373939313:2287], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:09.001815Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568158489036175:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490568171373939323:2779] 2025-04-07T13:50:09.001830Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568158489036178:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490568171373939325:2779] 2025-04-07T13:50:09.001842Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490568158489036181:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490568171373939326:2779] 2025-04-07T13:50:09.719865Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568158489036481:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:09.720004Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568158489036481:2124], cacheItem# { Subscriber: { Subscriber: [3:7490568162784004529:2664] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:09.720108Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568175668906638:2782], recipient# [3:7490568175668906637:2288], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:09.988799Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568158489036481:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:09.988928Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568158489036481:2124], cacheItem# { Subscriber: { Subscriber: [3:7490568171373939301:2777] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:09.989012Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568175668906646:2786], recipient# [3:7490568175668906645:2289], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TGRpcYdbTest::SdkUuid >> YdbYqlClient::TestYqlWrongTable >> YdbYqlClient::TestColumnOrder >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK >> YdbScripting::BasicV1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] Test command err: 2025-04-07T13:49:06.703512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:06.703812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:06.704067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001090/r3tmp/tmp458yzs/pdisk_1.dat 2025-04-07T13:49:07.152554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:49:07.191789Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:07.247498Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:49:07.248386Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:49:07.248525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:07.248624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:07.261990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:07.386848Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T13:49:07.386928Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T13:49:07.387132Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T13:49:07.493938Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T13:49:07.494630Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T13:49:07.494728Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:49:07.495086Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:07.495308Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T13:49:07.495432Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T13:49:07.495741Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T13:49:07.497352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:07.498471Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T13:49:07.498555Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T13:49:07.532626Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:49:07.533753Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:49:07.534242Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:49:07.534589Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:49:07.576576Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:49:07.577455Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:49:07.577605Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:49:07.579321Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:49:07.579419Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:49:07.579492Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:49:07.579845Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:49:07.610936Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:49:07.611169Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:49:07.611301Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:49:07.611373Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:49:07.611438Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:49:07.611480Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:07.611949Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:49:07.612010Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:49:07.612226Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:49:07.612333Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:49:07.612811Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:49:07.612917Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:49:07.612975Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:49:07.613048Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:49:07.613098Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:49:07.613147Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T13:49:07.613183Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T13:49:07.613220Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T13:49:07.613254Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:49:07.613306Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:49:07.613584Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T13:49:07.613634Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:49:07.613736Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:49:07.614009Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T13:49:07.614079Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:49:07.614176Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:49:07.614224Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T13:49:07.614265Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T13:49:07.614304Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T13:49:07.614360Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:49:07.614632Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T13:49:07.614672Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T13:49:07.614705Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T13:49:07.614736Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:49:07.614794Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T13:49:07.614827Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:49:07.614859Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T13:49:07.614932Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T13:49:07.614971Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T13:49:07.615726Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:49:07.615779Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:49:07.615810Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:49:07.615861Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T13:49:07.615946Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:49:07.618363Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T13:49:07.618426Z n ... T13:50:07.968568Z node 8 :KQP_EXECUTER DEBUG: ActorId: [8:1948:3222] TxId: 281474976715669. Ctx: { TraceId: 01jr887zbj5sy6q4b2a90tb8an, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=YThiZmIxYTctNjg3MDM5MjAtYjMxMWQ0NGQtODE1OTZiYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.002168s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 2025-04-07T13:50:07.968805Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YThiZmIxYTctNjg3MDM5MjAtYjMxMWQ0NGQtODE1OTZiYjQ=, ActorId: [8:1938:3222], ActorState: ExecuteState, TraceId: 01jr887zbj5sy6q4b2a90tb8an, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-07T13:50:07.969038Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YThiZmIxYTctNjg3MDM5MjAtYjMxMWQ0NGQtODE1OTZiYjQ=, ActorId: [8:1938:3222], ActorState: ExecuteState, TraceId: 01jr887zbj5sy6q4b2a90tb8an, txInfo Status: Committed Kind: ReadOnly TotalDuration: 25.695 ServerDuration: 25.588 QueriesCount: 2 2025-04-07T13:50:07.969191Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YThiZmIxYTctNjg3MDM5MjAtYjMxMWQ0NGQtODE1OTZiYjQ=, ActorId: [8:1938:3222], ActorState: ExecuteState, TraceId: 01jr887zbj5sy6q4b2a90tb8an, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T13:50:07.969686Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YThiZmIxYTctNjg3MDM5MjAtYjMxMWQ0NGQtODE1OTZiYjQ=, ActorId: [8:1938:3222], ActorState: ExecuteState, TraceId: 01jr887zbj5sy6q4b2a90tb8an, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:50:07.969770Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YThiZmIxYTctNjg3MDM5MjAtYjMxMWQ0NGQtODE1OTZiYjQ=, ActorId: [8:1938:3222], ActorState: ExecuteState, TraceId: 01jr887zbj5sy6q4b2a90tb8an, EndCleanup, isFinal: 0 2025-04-07T13:50:07.969890Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YThiZmIxYTctNjg3MDM5MjAtYjMxMWQ0NGQtODE1OTZiYjQ=, ActorId: [8:1938:3222], ActorState: ExecuteState, TraceId: 01jr887zbj5sy6q4b2a90tb8an, Sent query response back to proxy, proxyRequestId: 12, proxyId: [8:162:2148] { items { uint32_value: 1 } items { uint32_value: 10 } } 2025-04-07T13:50:08.113914Z node 8 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [8:2003:3243], Recipient [8:2005:3244]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:50:08.119987Z node 8 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [8:2003:3243], Recipient [8:2005:3244]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:50:08.120276Z node 8 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [8:2003:3243], Recipient [8:2005:3244]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:50:08.129425Z node 8 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [8:2005:3244] 2025-04-07T13:50:08.129747Z node 8 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:50:08.136237Z node 8 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:50:08.137869Z node 8 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:50:08.141396Z node 8 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:50:08.141527Z node 8 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:50:08.141644Z node 8 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:50:08.142125Z node 8 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:50:08.142209Z node 8 :TX_DATASHARD INFO: Switched to work state PreOffline tabletId 72075186224037888 2025-04-07T13:50:08.142329Z node 8 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 0 siblings to be activated: wait to activation from: 2025-04-07T13:50:08.142389Z node 8 :TX_DATASHARD INFO: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:50:08.142531Z node 8 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [8:2037:3269] 2025-04-07T13:50:08.142584Z node 8 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:50:08.142637Z node 8 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2025-04-07T13:50:08.142684Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:50:08.142769Z node 8 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-07T13:50:08.144189Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [8:2005:3244], Recipient [8:2005:3244]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:50:08.144272Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:50:08.145163Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvRegisterTablet TabletId# 72075186224037888 ProcessingParams { Version: 1 PlanResolution: 100 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 }} 2025-04-07T13:50:08.145322Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 3 AddTablets: 72075186224037888 2025-04-07T13:50:08.145401Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] SEND to Sender# [8:2005:3244] {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 1000} 2025-04-07T13:50:08.145592Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE NKikimr::TEvMediatorTimecast::TEvSubscribeReadStep{ CoordinatorId# 72057594046316545 } 2025-04-07T13:50:08.145852Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 276168711, Sender [8:2038:3270], Recipient [8:2005:3244]: NKikimr::NDataShard::TEvChangeExchange::TEvSplitAck 2025-04-07T13:50:08.146187Z node 8 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:50:08.146244Z node 8 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037888 state 5 2025-04-07T13:50:08.146304Z node 8 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:50:08.147377Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [8:25:2072], Recipient [8:2005:3244]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 1000} 2025-04-07T13:50:08.147424Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-07T13:50:08.148252Z node 8 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1000 2025-04-07T13:50:08.148350Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-07T13:50:08.150069Z node 8 :TX_DATASHARD NOTICE: TTxChangeExchangeSplitAck Execute, at tablet# 72075186224037888 2025-04-07T13:50:08.150168Z node 8 :TX_DATASHARD NOTICE: TTxChangeExchangeSplitAck Complete, at tablet# 72075186224037888 2025-04-07T13:50:08.155079Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 2 LastAcquireStep: 1000 NextAcquireStep: 1000 2025-04-07T13:50:08.155275Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [8:25:2072], Recipient [8:2005:3244]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 1000 NextReadStep# 1000 ReadStep# 1000 } 2025-04-07T13:50:08.155325Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-07T13:50:08.155385Z node 8 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 1000 next step 1000 2025-04-07T13:50:08.285887Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 1100 2025-04-07T13:50:08.286145Z node 9 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [9:51:2063] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 3 NextAcquireStep: 1100 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 1100 at node 8 bucket 0 2025-04-07T13:50:08.286531Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 1100} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 1100 at node 9 bucket 0 ... blocked step 1100 at node 9 bucket 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 1100 at node 9 bucket 1 ... blocked step 1100 at node 9 bucket 1 2025-04-07T13:50:09.110895Z node 8 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 3000 in 1.000000s at 2.950000s 2025-04-07T13:50:09.115453Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 2000 2025-04-07T13:50:09.115675Z node 9 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [9:51:2063] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 3 NextAcquireStep: 2000 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 2000 at node 8 bucket 0 2025-04-07T13:50:09.116124Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2000} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 2000 at node 9 bucket 0 ... blocked step 2000 at node 9 bucket 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 2000 at node 9 bucket 1 ... blocked step 2000 at node 9 bucket 1 2025-04-07T13:50:09.714562Z node 8 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4000 in 1.000000s at 3.950000s 2025-04-07T13:50:09.719298Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 3000 2025-04-07T13:50:09.719535Z node 9 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [9:51:2063] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 3 NextAcquireStep: 3000 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 3000 at node 8 bucket 0 2025-04-07T13:50:09.719884Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 3000 at node 9 bucket 0 ... blocked step 3000 at node 9 bucket 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 3000 at node 9 bucket 1 ... blocked step 3000 at node 9 bucket 1 ... upsert finished before unblocking node 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbQueryService::TestCreateAttachAndDropAttachedSession [GOOD] Test command err: 2025-04-07T13:48:47.225519Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567822904289675:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:47.225763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a1d/r3tmp/tmpIjgIbt/pdisk_1.dat 2025-04-07T13:48:47.755545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:47.785923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:47.786005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:47.799471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12612, node 1 2025-04-07T13:48:48.237266Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:48.237284Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:48.237294Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:48.237374Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9663 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:48.955750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.966822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:48.974873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.981571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:48:48.981761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:48:48.981775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:48:48.988445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:48:48.988480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:48:48.991662Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:48.993489Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:49.009019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033729046, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:49.009056Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:48:49.009394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:48:49.012032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:49.012212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:49.012260Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:48:49.012371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:48:49.012407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:48:49.012472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:48:49.014570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:48:49.014615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:48:49.014632Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:48:49.014697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:9663 2025-04-07T13:48:49.410800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } Columns { Name: "request_id" Type: "Utf8" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:48:49.411275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:49.411766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: OlapStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T13:48:49.411807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-07T13:48:49.411873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-07T13:48:49.411934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-07T13:48:49.411981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-07T13:48:49.412042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-07T13:48:49.412331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-04-07T13:48:49.413997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:48:49.414262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:49.414284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:49.414415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T13:48:49.414443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-04-07T13:48:49.417490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-07T13:48:49.417650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2025-04-07T13:48:49.417851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:49.417864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:48:49.417993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] waiting... 2025-04-07T13:48:49.418060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:49.418080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490567822904290130:2383], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-07T13:48:49.418100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490567822904290130:2383], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-07T13:48:49.418133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:49.418167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-04-07T13:48:49.418763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedCh ... tion complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T13:49:57.819602Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7490568101001274062:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:57.819685Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:00.371982Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7490568135537438435:2122];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a1d/r3tmp/tmpoYiIJ9/pdisk_1.dat 2025-04-07T13:50:00.489512Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:50:00.588839Z node 16 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32453, node 16 2025-04-07T13:50:00.673801Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:00.673920Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:00.705873Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:00.759524Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:00.759551Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:00.759563Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:00.759710Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30228 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:01.206114Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:01.206630Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:01.206662Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:01.210063Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:01.210340Z node 16 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:01.210361Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:50:01.212961Z node 16 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:01.213004Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:50:01.214985Z node 16 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:01.218203Z node 16 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:01.219704Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033801265, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:01.219743Z node 16 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:01.220066Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:50:01.222258Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:01.222470Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:01.222536Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:50:01.222703Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:50:01.222755Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:50:01.222807Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:50:01.227032Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:50:01.227104Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:50:01.227128Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:01.227232Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T13:50:05.356788Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7490568135537438435:2122];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:05.356883Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:05.761460Z node 16 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:50:05.762598Z node 16 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-07T13:50:05.763439Z node 16 :KQP_PROXY DEBUG: TraceId: "01jr887s2scee858n0z0ffavx2", Request has 18445000039903.788220s seconds to be completed 2025-04-07T13:50:05.766242Z node 16 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=16&id=ZTZhMWZhOTgtMmE3NTY3NzItMjQ2YzkwNmEtZWI5MTYyYTA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZTZhMWZhOTgtMmE3NTY3NzItMjQ2YzkwNmEtZWI5MTYyYTA= 2025-04-07T13:50:05.766340Z node 16 :KQP_PROXY DEBUG: TraceId: "01jr887s2scee858n0z0ffavx2", Created new session, sessionId: ydb://session/3?node_id=16&id=ZTZhMWZhOTgtMmE3NTY3NzItMjQ2YzkwNmEtZWI5MTYyYTA=, workerId: [16:7490568157012275898:2298], database: , longSession: 1, local sessions count: 1 2025-04-07T13:50:05.766395Z node 16 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-07T13:50:05.766578Z node 16 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jr887s2scee858n0z0ffavx2 2025-04-07T13:50:05.766728Z node 16 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:50:05.766807Z node 16 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:50:05.766843Z node 16 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:50:05.766878Z node 16 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:50:05.768321Z node 16 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=16&id=ZTZhMWZhOTgtMmE3NTY3NzItMjQ2YzkwNmEtZWI5MTYyYTA=, ActorId: [16:7490568157012275898:2298], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:50:05.768453Z node 16 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T13:50:05.768486Z node 16 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:50:05.768510Z node 16 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:50:05.833785Z node 16 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=16&id=ZTZhMWZhOTgtMmE3NTY3NzItMjQ2YzkwNmEtZWI5MTYyYTA=, rpc ctrl: [16:7490568157012275899:2299], sameNode: 1, trace_id: 2025-04-07T13:50:05.833853Z node 16 :KQP_PROXY TRACE: Attach local session: [16:7490568157012275898:2298] to rpc: [16:7490568157012275899:2299] on same node 2025-04-07T13:50:05.874212Z node 16 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=16&id=ZTZhMWZhOTgtMmE3NTY3NzItMjQ2YzkwNmEtZWI5MTYyYTA=, ActorId: [16:7490568157012275898:2298], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:50:05.874307Z node 16 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=16&id=ZTZhMWZhOTgtMmE3NTY3NzItMjQ2YzkwNmEtZWI5MTYyYTA=, ActorId: [16:7490568157012275898:2298], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:50:05.874359Z node 16 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=16&id=ZTZhMWZhOTgtMmE3NTY3NzItMjQ2YzkwNmEtZWI5MTYyYTA=, ActorId: [16:7490568157012275898:2298], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:50:05.874415Z node 16 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=16&id=ZTZhMWZhOTgtMmE3NTY3NzItMjQ2YzkwNmEtZWI5MTYyYTA=, ActorId: [16:7490568157012275898:2298], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:50:05.874530Z node 16 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=16&id=ZTZhMWZhOTgtMmE3NTY3NzItMjQ2YzkwNmEtZWI5MTYyYTA=, ActorId: [16:7490568157012275898:2298], ActorState: unknown state, Session actor destroyed 2025-04-07T13:50:05.874979Z node 16 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=16&id=ZTZhMWZhOTgtMmE3NTY3NzItMjQ2YzkwNmEtZWI5MTYyYTA=, workerId: [16:7490568157012275898:2298], local sessions count: 0 2025-04-07T13:50:05.898434Z node 16 :KQP_PROXY DEBUG: Received ping session request, request_id: 3, sender: [16:7490568157012275902:2301], trace_id: 2025-04-07T13:50:05.898582Z node 16 :KQP_PROXY NOTICE: Session not found: ydb://session/3?node_id=16&id=ZTZhMWZhOTgtMmE3NTY3NzItMjQ2YzkwNmEtZWI5MTYyYTA= 2025-04-07T13:50:05.898676Z node 16 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [16:7490568157012275902:2301], selfId: [16:7490568135537438440:2127], source: [16:7490568135537438440:2127] >> TTableProfileTests::DescribeTableWithPartitioningPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsSimple |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |85.1%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> Viewer::JsonStorageListingV1 [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter >> TGRpcYdbTest::MakeListRemoveDirectory [GOOD] >> TGRpcYdbTest::GetOperationBadRequest |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |85.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> TGRpcNewClient::SimpleYqlQuery >> TGRpcNewCoordinationClient::NodeNotFound [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchData |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut >> YdbMonitoring::SelfCheckWithNodesDying [GOOD] >> YdbOlapStore::BulkUpsert |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |85.1%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |85.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt >> Viewer::SelectStringWithNoBase64Encoding [GOOD] >> Viewer::ServerlessNodesPage >> TopicService::OneConsumer_TheRangesDoNotOverlap >> DemoTx::Scenario_1 >> THealthCheckTest::ShardsNoLimit [GOOD] >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging >> YdbYqlClient::TestReadTableOneBatch [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder >> YdbYqlClient::TestYqlIssues [GOOD] >> YdbYqlClient::TestYqlSessionClosed >> TPersQueueTest::ReadFromSeveralPartitions >> TGRpcNewCoordinationClient::CheckUnauthorized [GOOD] >> TGRpcNewCoordinationClient::CreateDropDescribe >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes >> TGRpcYdbTest::CreateTableBadRequest [GOOD] >> TGRpcYdbTest::AlterTableAddIndexBadRequest >> THealthCheckTest::ShardsLimit905 [GOOD] >> THealthCheckTest::ShardsLimit800 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsNoLimit [GOOD] Test command err: 2025-04-07T13:49:52.312640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:52.313213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:52.313428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bcf/r3tmp/tmpFWElcm/pdisk_1.dat 2025-04-07T13:49:52.953242Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26362, node 1 TClient is connected to server localhost:1653 2025-04-07T13:49:53.469949Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:53.470015Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:53.470058Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:53.470577Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:49:58.298798Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:255:2187], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:58.299024Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:58.299105Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bcf/r3tmp/tmpgbI2UO/pdisk_1.dat 2025-04-07T13:49:58.761684Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5991, node 3 TClient is connected to server localhost:1858 2025-04-07T13:49:59.340591Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:59.340644Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:59.341126Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:59.341909Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:03.819039Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:03.819266Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:03.819352Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bcf/r3tmp/tmp9yEniZ/pdisk_1.dat 2025-04-07T13:50:04.236951Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2360, node 6 TClient is connected to server localhost:7264 2025-04-07T13:50:04.715743Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:04.715792Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:04.715824Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:04.715981Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:13.372186Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:364:2316], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:13.372740Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:13.372933Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:13.375035Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:641:2326], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:13.375235Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:13.375400Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bcf/r3tmp/tmpnLYbtV/pdisk_1.dat 2025-04-07T13:50:13.798514Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3840, node 8 TClient is connected to server localhost:11922 2025-04-07T13:50:14.421764Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:14.421834Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:14.421887Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:14.422364Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues >> YdbImport::EmptyData [GOOD] >> YdbIndexTable::AlterIndexImplBySuperUser >> YdbTableBulkUpsert::ZeroRows [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug >> TGRpcNewCoordinationClient::SessionDescribeWatchData [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK [GOOD] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync >> TGRpcYdbTest::GetOperationBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithUuid >> THealthCheckTest::StorageLimit80 [GOOD] >> THealthCheckTest::StorageLimit50 >> TGRpcYdbTest::SdkUuid [GOOD] >> TGRpcYdbTest::SdkUuidViaParams >> YdbScripting::BasicV1 [GOOD] >> YdbScripting::MultiResults >> YdbYqlClient::TestYqlWrongTable [GOOD] >> YdbYqlClient::TraceId >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin >> TGRpcNewClient::SimpleYqlQuery [GOOD] >> TGRpcNewClient::TestAuth >> YdbYqlClient::TestColumnOrder [GOOD] >> YdbYqlClient::TestDecimal >> TPersQueueTest::WriteExisting >> TGRpcNewCoordinationClient::CreateDropDescribe [GOOD] >> TGRpcNewCoordinationClient::CreateAlter >> YdbYqlClient::TestYqlSessionClosed [GOOD] >> YdbYqlClient::TestYqlLongSessionPrepareError >> TTableProfileTests::ExplicitPartitionsSimple [GOOD] >> TTableProfileTests::ExplicitPartitionsUnordered >> YdbYqlClient::TestReadTableNotNullBorder [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder2 >> THealthCheckTest::YellowIssueReadyVDisksOnFaultyPDisks [GOOD] >> THealthCheckTest::TestTabletIsDead >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead >> TGRpcYdbTest::AlterTableAddIndexBadRequest [GOOD] >> TGRpcYdbTest::CreateAlterCopyAndDropTable >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners [GOOD] >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync [GOOD] >> YdbYqlClient::SimpleColumnFamilies >> TGRpcNewClient::TestAuth [GOOD] >> TGRpcNewClient::YqlQueryWithParams >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |85.1%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut >> YdbYqlClient::TestDecimal [GOOD] >> YdbYqlClient::TestBusySession >> TGRpcNewCoordinationClient::CreateAlter [GOOD] >> TGRpcNewCoordinationClient::BasicMethods >> YdbIndexTable::AlterIndexImplBySuperUser [GOOD] >> YdbIndexTable::CreateTableAddIndex >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable >> YdbScripting::MultiResults [GOOD] >> YdbScripting::Params >> TGRpcYdbTest::SdkUuidViaParams [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts >> YdbYqlClient::TraceId [GOOD] >> YdbYqlClient::Utf8DatabasePassViaHeader >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] Test command err: 2025-04-07T13:49:48.606125Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568084884399006:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:48.606179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bbc/r3tmp/tmpNC9unq/pdisk_1.dat 2025-04-07T13:49:49.796123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:49.801157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:49.814372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:49.866617Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:49.875869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:50.061939Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.147163s 2025-04-07T13:49:50.062015Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.147264s TServer::EnableGrpc on GrpcPort 4752, node 1 2025-04-07T13:49:50.452736Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:50.452772Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:50.452781Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:50.452936Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:51.045884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bbc/r3tmp/tmpnWTE9C/pdisk_1.dat 2025-04-07T13:49:53.134740Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:53.224166Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:53.224244Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:53.227039Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:53.266756Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14414, node 2 2025-04-07T13:49:53.373299Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:53.373322Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:53.373329Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:53.373415Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:53.570928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:03.958577Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:640:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:03.959025Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:03.959157Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:03.961437Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:638:2326], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:03.961741Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:03.961983Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bbc/r3tmp/tmpMUrykD/pdisk_1.dat 2025-04-07T13:50:04.341159Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23808, node 3 TClient is connected to server localhost:18281 2025-04-07T13:50:04.854240Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:04.854314Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:04.854352Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:04.854550Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:13.812623Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:635:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:50:13.826839Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:637:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:13.827260Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:13.827468Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:13.830533Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:13.830657Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bbc/r3tmp/tmpFnYXzC/pdisk_1.dat 2025-04-07T13:50:14.271678Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30119, node 5 TClient is connected to server localhost:22373 2025-04-07T13:50:14.817805Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:14.817870Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:14.817910Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:14.818385Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-04-07T13:50:21.938829Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:321:2253], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:21.939265Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:21.939371Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bbc/r3tmp/tmpehvWBj/pdisk_1.dat 2025-04-07T13:50:22.783513Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15967, node 7 TClient is connected to server localhost:7101 2025-04-07T13:50:23.540472Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:23.540539Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:23.540578Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:23.540927Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] >> TGRpcYdbTest::ExecuteQueryWithUuid [GOOD] >> TGRpcYdbTest::ExplainQuery >> YdbYqlClient::TestYqlLongSessionPrepareError [GOOD] >> YdbYqlClient::TestYqlLongSessionMultipleErrors >> YdbOlapStore::LogLast50 [GOOD] >> YdbOlapStore::LogLast50ByResource >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder2 [GOOD] >> YdbYqlClient::TestReadTableSnapshot >> TopicService::OneConsumer_TheRangesDoNotOverlap [GOOD] >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific >> KqpLimits::ComputeNodeMemoryLimit [GOOD] >> KqpLimits::DataShardReplySizeExceeded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a1f/r3tmp/tmpyymYCb/pdisk_1.dat 2025-04-07T13:49:58.391499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:58.777747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:58.780693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:58.793110Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:58.796043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24554 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:59.246335Z node 1 :TX_PROXY DEBUG: actor# [1:7490568125663376940:2110] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:59.246382Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568129958344924:2440] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:59.246495Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568125663377164:2124], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:59.246588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568125663377233:2149][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490568125663377164:2124], cookie# 1 2025-04-07T13:49:59.247991Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568125663377237:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568125663377234:2149], cookie# 1 2025-04-07T13:49:59.248028Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568125663377238:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568125663377235:2149], cookie# 1 2025-04-07T13:49:59.248040Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568125663377239:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568125663377236:2149], cookie# 1 2025-04-07T13:49:59.248072Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568121368409558:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568125663377237:2149], cookie# 1 2025-04-07T13:49:59.248100Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568121368409561:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568125663377238:2149], cookie# 1 2025-04-07T13:49:59.248114Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568121368409564:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568125663377239:2149], cookie# 1 2025-04-07T13:49:59.248138Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568125663377237:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568121368409558:2050], cookie# 1 2025-04-07T13:49:59.248150Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568125663377238:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568121368409561:2053], cookie# 1 2025-04-07T13:49:59.248162Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568125663377239:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568121368409564:2056], cookie# 1 2025-04-07T13:49:59.248191Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568125663377233:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568125663377234:2149], cookie# 1 2025-04-07T13:49:59.248265Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568125663377233:2149][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:59.248281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568125663377233:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568125663377235:2149], cookie# 1 2025-04-07T13:49:59.248302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568125663377233:2149][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:59.248322Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568125663377233:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568125663377236:2149], cookie# 1 2025-04-07T13:49:59.248356Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568125663377233:2149][/dc-1] Unexpected sync response: sender# [1:7490568125663377236:2149], cookie# 1 2025-04-07T13:49:59.248415Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568125663377164:2124], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T13:49:59.259146Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568125663377164:2124], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568125663377233:2149] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:59.259271Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568125663377164:2124], cacheItem# { Subscriber: { Subscriber: [1:7490568125663377233:2149] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-07T13:49:59.261171Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568129958344925:2441], recipient# [1:7490568129958344924:2440], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:59.261222Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568129958344924:2440] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:59.314071Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568129958344924:2440] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-07T13:49:59.315919Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568129958344924:2440] Handle TEvDescribeSchemeResult Forward to# [1:7490568129958344923:2439] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:49:59.364861Z node 1 :TX_PROXY DEBUG: actor# [1:7490568125663376940:2110] Handle TEvProposeTransaction 2025-04-07T13:49:59.364888Z node 1 :TX_PROXY DEBUG: actor# [1:7490568125663376940:2110] TxId# 281474976710657 ProcessProposeTransaction 2025-04-07T13:49:59.364971Z node 1 :TX_PROXY DEBUG: actor# [1:7490568125663376940:2110] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7490568129958344933:2448] 2025-04-07T13:49:59.390886Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568125663377164:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:59.390983Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490568125663377164:2124], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-04-07T13:49:59.391175Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568129958344935:2449][/dc-1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T13:49:59.391561Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568121368409558:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7490568129958344939:2449] 2025-04-07T13:49:59.391578Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490568121368409558:2050] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-04-07T13:49:59.391636Z node 1 :SCHEME_BOARD_RE ... nfo }] } 2025-04-07T13:50:23.521108Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568184143990933:2131], cacheItem# { Subscriber: { Subscriber: [3:7490568205618828613:2980] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:23.521196Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568235683600528:3547], recipient# [3:7490568235683600527:2320], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:24.419975Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568184143990933:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:24.420147Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568184143990933:2131], cacheItem# { Subscriber: { Subscriber: [3:7490568188438958961:2662] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:24.420236Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568239978567842:3551], recipient# [3:7490568239978567841:2321], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:24.450300Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568184143990933:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:24.450433Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568184143990933:2131], cacheItem# { Subscriber: { Subscriber: [3:7490568188438958961:2662] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:24.450509Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568239978567846:3552], recipient# [3:7490568239978567843:2322], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:24.525123Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568184143990933:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:24.525265Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568184143990933:2131], cacheItem# { Subscriber: { Subscriber: [3:7490568205618828613:2980] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:24.525346Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568239978567848:3553], recipient# [3:7490568239978567847:2323], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:25.428930Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568184143990933:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:25.429062Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568184143990933:2131], cacheItem# { Subscriber: { Subscriber: [3:7490568188438958961:2662] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:25.429230Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568244273535162:3557], recipient# [3:7490568244273535161:2324], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:25.455404Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568184143990933:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:25.455529Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568184143990933:2131], cacheItem# { Subscriber: { Subscriber: [3:7490568188438958961:2662] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:25.455606Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568244273535166:3558], recipient# [3:7490568244273535165:2325], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:25.533508Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568184143990933:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:25.533650Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568184143990933:2131], cacheItem# { Subscriber: { Subscriber: [3:7490568205618828613:2980] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:25.533733Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568244273535168:3559], recipient# [3:7490568244273535167:2326], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> THealthCheckTest::ShardsLimit800 [GOOD] |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |85.1%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword >> DataShardSnapshots::LockedWritesLimitedPerKey [GOOD] >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore [GOOD] >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback >> TPersQueueTest::UpdatePartitionLocation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-07T13:49:58.076936Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568128972671468:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:58.077483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a2b/r3tmp/tmpjkYMZ0/pdisk_1.dat 2025-04-07T13:49:58.800684Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:58.884444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:58.884555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:58.893677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:59.033093Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.101932s 2025-04-07T13:49:59.033183Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.102055s TClient is connected to server localhost:26888 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T13:49:59.280817Z node 1 :TX_PROXY DEBUG: actor# [1:7490568128972671688:2113] Handle TEvNavigate describe path dc-1 2025-04-07T13:49:59.280874Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568133267639459:2427] HANDLE EvNavigateScheme dc-1 2025-04-07T13:49:59.281010Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568128972671725:2131], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:49:59.281116Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568133267639403:2394][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490568128972671725:2131], cookie# 1 2025-04-07T13:49:59.282583Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568133267639407:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568133267639404:2394], cookie# 1 2025-04-07T13:49:59.282612Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568133267639408:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568133267639405:2394], cookie# 1 2025-04-07T13:49:59.282636Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568133267639409:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568133267639406:2394], cookie# 1 2025-04-07T13:49:59.282671Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568124677704096:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568133267639407:2394], cookie# 1 2025-04-07T13:49:59.282696Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568124677704099:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568133267639408:2394], cookie# 1 2025-04-07T13:49:59.282713Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490568124677704102:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490568133267639409:2394], cookie# 1 2025-04-07T13:49:59.282738Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568133267639407:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124677704096:2050], cookie# 1 2025-04-07T13:49:59.282760Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568133267639408:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124677704099:2053], cookie# 1 2025-04-07T13:49:59.282789Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490568133267639409:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568124677704102:2056], cookie# 1 2025-04-07T13:49:59.282818Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568133267639403:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568133267639404:2394], cookie# 1 2025-04-07T13:49:59.282838Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568133267639403:2394][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T13:49:59.282863Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568133267639403:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568133267639405:2394], cookie# 1 2025-04-07T13:49:59.282894Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568133267639403:2394][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T13:49:59.282917Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568133267639403:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490568133267639406:2394], cookie# 1 2025-04-07T13:49:59.282930Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490568133267639403:2394][/dc-1] Unexpected sync response: sender# [1:7490568133267639406:2394], cookie# 1 2025-04-07T13:49:59.282974Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490568128972671725:2131], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T13:49:59.296058Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490568128972671725:2131], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490568133267639403:2394] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:49:59.296174Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490568128972671725:2131], cacheItem# { Subscriber: { Subscriber: [1:7490568133267639403:2394] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-07T13:49:59.302383Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568133267639460:2428], recipient# [1:7490568133267639459:2427], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:49:59.302456Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568133267639459:2427] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:59.357554Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568133267639459:2427] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-07T13:49:59.359866Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568133267639459:2427] Handle TEvDescribeSchemeResult Forward to# [1:7490568133267639458:2426] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:49:59.410626Z node 1 :TX_PROXY DEBUG: actor# [1:7490568128972671688:2113] Handle TEvProposeTransaction 2025-04-07T13:49:59.410655Z node 1 :TX_PROXY DEBUG: actor# [1:7490568128972671688:2113] TxId# 281474976710657 ProcessProposeTransaction 2025-04-07T13:49:59.410725Z node 1 :TX_PROXY DEBUG: actor# [1:7490568128972671688:2113] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7490568133267639465:2432] 2025-04-07T13:49:59.508426Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568133267639465:2432] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-07T13:49:59.508491Z node 1 :TX_PROXY DEBUG: Actor# [1:7490568133267639465:2432] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:49:59.508570Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568128972671725:2131], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet ... ncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:25.000573Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568189173472555:2138], cacheItem# { Subscriber: { Subscriber: [3:7490568189173472988:2428] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:25.000691Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568245008049946:4076], recipient# [3:7490568245008049945:2329], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:25.947111Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568189173472555:2138], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:25.947258Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568189173472555:2138], cacheItem# { Subscriber: { Subscriber: [3:7490568206353343115:3172] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:25.947358Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568245008049963:4077], recipient# [3:7490568245008049962:2330], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:25.977156Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568189173472555:2138], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:25.977279Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568189173472555:2138], cacheItem# { Subscriber: { Subscriber: [3:7490568189173472988:2428] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:25.977346Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568245008049968:4081], recipient# [3:7490568245008049967:2331], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:26.001869Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568189173472555:2138], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:26.002018Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568189173472555:2138], cacheItem# { Subscriber: { Subscriber: [3:7490568189173472988:2428] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:26.002115Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568249303017266:4082], recipient# [3:7490568249303017265:2332], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:26.954712Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568189173472555:2138], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:26.954841Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568189173472555:2138], cacheItem# { Subscriber: { Subscriber: [3:7490568206353343115:3172] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:26.954915Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568249303017286:4086], recipient# [3:7490568249303017285:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:26.978345Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568189173472555:2138], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:26.978471Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568189173472555:2138], cacheItem# { Subscriber: { Subscriber: [3:7490568189173472988:2428] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:26.978544Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568249303017288:4087], recipient# [3:7490568249303017287:2334], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:27.003316Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490568189173472555:2138], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:27.003411Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490568189173472555:2138], cacheItem# { Subscriber: { Subscriber: [3:7490568189173472988:2428] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:27.003517Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490568253597984586:4088], recipient# [3:7490568253597984585:2335], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection >> TGRpcYdbTest::CreateAlterCopyAndDropTable [GOOD] >> TGRpcYdbTest::CreateDeleteYqlSession |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::NoBscResponse >> TTableProfileTests::ExplicitPartitionsUnordered [GOOD] >> TTableProfileTests::ExplicitPartitionsComplex >> TGRpcNewCoordinationClient::BasicMethods [GOOD] >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores >> DemoTx::Scenario_1 [GOOD] >> TGRpcNewClient::YqlQueryWithParams [GOOD] >> TGRpcNewClient::YqlExplainDataQuery >> YdbYqlClient::TestBusySession [GOOD] >> YdbYqlClient::TestConstraintViolation >> TopicService::OneConsumer_TheRangesOverlap |85.1%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsLimit800 [GOOD] Test command err: 2025-04-07T13:49:55.640399Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:49:55.656594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:55.657286Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:55.657533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:55.658495Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:55.658553Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb8/r3tmp/tmpAFlDI5/pdisk_1.dat 2025-04-07T13:49:56.089790Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21150, node 1 TClient is connected to server localhost:23201 2025-04-07T13:49:56.912169Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:56.912220Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:56.912262Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:56.912809Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:05.508540Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:640:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:05.509063Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:05.509196Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:05.511387Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:638:2326], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:05.511710Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:05.511961Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb8/r3tmp/tmpuaEheo/pdisk_1.dat 2025-04-07T13:50:05.826277Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14312, node 3 TClient is connected to server localhost:25906 2025-04-07T13:50:06.264582Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:06.264632Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:06.264694Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:06.264838Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:14.797470Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:635:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:50:14.836493Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:637:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:14.841315Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:14.841612Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:14.844024Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:14.844145Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb8/r3tmp/tmp3O6ae6/pdisk_1.dat 2025-04-07T13:50:15.292412Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7477, node 5 TClient is connected to server localhost:22233 2025-04-07T13:50:15.695356Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:15.695413Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:15.695440Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:15.695965Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:27.372220Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:50:27.410547Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:27.411026Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:27.411196Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:27.417000Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:27.417319Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb8/r3tmp/tmpBP2B0e/pdisk_1.dat 2025-04-07T13:50:28.151132Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15060, node 7 TClient is connected to server localhost:21094 2025-04-07T13:50:28.785935Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:28.785991Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:28.786019Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:28.786384Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TPartitionWriterCacheActorTests::WriteReplyOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWritesLimitedPerKey [GOOD] Test command err: 2025-04-07T13:49:37.610644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:37.610981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:37.611261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001067/r3tmp/tmpVb0KRM/pdisk_1.dat 2025-04-07T13:49:37.984479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.025789Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:38.081469Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:49:38.082460Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:49:38.082660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:38.082817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:38.097096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:38.232287Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T13:49:38.232358Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T13:49:38.232498Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T13:49:38.373133Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T13:49:38.373797Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T13:49:38.373913Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:49:38.374216Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:38.374413Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T13:49:38.374514Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T13:49:38.374804Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T13:49:38.376304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.377369Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T13:49:38.377475Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T13:49:38.407569Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:49:38.408562Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:49:38.409004Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:49:38.409301Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:49:38.459290Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:49:38.460005Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:49:38.460130Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:49:38.461790Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:49:38.461873Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:49:38.461934Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:49:38.462249Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:49:38.520449Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:49:38.520655Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:49:38.520992Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:49:38.521056Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:49:38.521103Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:49:38.521147Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:38.521570Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:49:38.521636Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:49:38.521856Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:49:38.521953Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:49:38.522398Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:49:38.522436Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:49:38.522490Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:49:38.522546Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:49:38.522595Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:49:38.522654Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T13:49:38.522691Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T13:49:38.522732Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T13:49:38.522771Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:49:38.522812Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:49:38.523001Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T13:49:38.523044Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:49:38.523152Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:49:38.523401Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T13:49:38.523464Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:49:38.523554Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:49:38.523619Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T13:49:38.523664Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T13:49:38.523699Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T13:49:38.523730Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:49:38.523981Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T13:49:38.524013Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T13:49:38.524045Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T13:49:38.524080Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:49:38.524158Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T13:49:38.524188Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:49:38.524224Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T13:49:38.524256Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T13:49:38.524287Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T13:49:38.525042Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:49:38.525138Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:49:38.525170Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:49:38.525209Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T13:49:38.525272Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:49:38.527471Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T13:49:38.527519Z n ... ng: false } RuntimeSettings { TimeoutMs: 300000 ExecType: DATA UseSpilling: false StatsMode: DQ_STATS_MODE_NONE } } TxBody: cleared Tasks TxBody: injected Locks 2025-04-07T13:50:29.869563Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [7:988:2793], Recipient [7:632:2537]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 988 RawX2: 30064773865 } TxBody: " \0018\000`\200\200\200\005jI\010\001\0329\n!\tY\001\000\000\000\000\000\000\021\000\000\001\000\000\020\000\001\030\001 \004)\000\001\205\000\000\000\000\0010\002\020\200\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715669 ExecLevel: 0 Flags: 8 2025-04-07T13:50:29.869648Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:50:29.869867Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [7:632:2537], Recipient [7:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T13:50:29.869912Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T13:50:29.870012Z node 7 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:50:29.870284Z node 7 :TX_DATASHARD TRACE: -- AddReadRange: (Uint64 : 345, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-07T13:50:29.870388Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 345, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-07T13:50:29.870514Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit CheckDataTx 2025-04-07T13:50:29.870618Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2025-04-07T13:50:29.870688Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit CheckDataTx 2025-04-07T13:50:29.870749Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T13:50:29.870795Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T13:50:29.870841Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2025-04-07T13:50:29.870921Z node 7 :TX_DATASHARD TRACE: Activated operation [0:281474976715669] at 72075186224037888 2025-04-07T13:50:29.870970Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2025-04-07T13:50:29.870990Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T13:50:29.871010Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-07T13:50:29.871032Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-07T13:50:29.871084Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2025-04-07T13:50:29.871162Z node 7 :TX_DATASHARD TRACE: Operation [0:281474976715669] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193400 2025-04-07T13:50:29.871268Z node 7 :TX_DATASHARD TRACE: KqpCommitLock LockId: 345 DataShard: 72075186224037888 Generation: 1 Counter: 4 SchemeShard: 72057594046644480 PathId: 2 2025-04-07T13:50:29.871329Z node 7 :TX_DATASHARD TRACE: Committing changes lockId# 345 in localTid# 1001 shard# 72075186224037888 2025-04-07T13:50:29.871547Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-07T13:50:29.871621Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T13:50:29.871656Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-07T13:50:29.871712Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit FinishPropose 2025-04-07T13:50:29.871785Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit FinishPropose 2025-04-07T13:50:29.871824Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is DelayComplete 2025-04-07T13:50:29.871861Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:50:29.871908Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T13:50:29.871949Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit CompletedOperations 2025-04-07T13:50:29.872002Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2025-04-07T13:50:29.872026Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T13:50:29.872058Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:281474976715669] at 72075186224037888 has finished 2025-04-07T13:50:29.872924Z node 7 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:50:29.872990Z node 7 :TX_DATASHARD TRACE: Complete execution for [0:281474976715669] at 72075186224037888 on unit FinishPropose 2025-04-07T13:50:29.873051Z node 7 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715669 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T13:50:29.873158Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715669 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 775 } } ComputeActorStats { } 2025-04-07T13:50:30.119413Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] Handle TEvExecuteKqpTransaction 2025-04-07T13:50:30.119497Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] TxId# 281474976715670 ProcessProposeKqpTransaction 2025-04-07T13:50:30.120464Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr888mzp83vwnxszkhqfapbw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjgxMTUxNy0xMDg4YmU2ZS1lODZkMDkxZi1kYTZjNDkwOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2025-04-07T13:50:30.134376Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:1010:2818], Recipient [7:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-07T13:50:30.134514Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T13:50:30.134602Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2025-04-07T13:50:30.134663Z node 7 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3000/18446744073709551615 2025-04-07T13:50:30.134748Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit CheckRead 2025-04-07T13:50:30.134867Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2025-04-07T13:50:30.134947Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit CheckRead 2025-04-07T13:50:30.135000Z node 7 :TX_DATASHARD TRACE: Add [0:11] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T13:50:30.135050Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T13:50:30.135106Z node 7 :TX_DATASHARD TRACE: Activated operation [0:11] at 72075186224037888 2025-04-07T13:50:30.135156Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2025-04-07T13:50:30.135186Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T13:50:30.135207Z node 7 :TX_DATASHARD TRACE: Add [0:11] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T13:50:30.135229Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit ExecuteRead 2025-04-07T13:50:30.135361Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-07T13:50:30.135677Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:1010:2818], 0} after executionsCount# 1 2025-04-07T13:50:30.135754Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1010:2818], 0} sends rowCount# 3, bytes# 72, quota rows left# 998, quota bytes left# 5242808, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T13:50:30.135886Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1010:2818], 0} finished in read 2025-04-07T13:50:30.135969Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2025-04-07T13:50:30.135993Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T13:50:30.136017Z node 7 :TX_DATASHARD TRACE: Add [0:11] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T13:50:30.136044Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit CompletedOperations 2025-04-07T13:50:30.136097Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2025-04-07T13:50:30.136117Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T13:50:30.136148Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:11] at 72075186224037888 has finished 2025-04-07T13:50:30.136208Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T13:50:30.136340Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-07T13:50:30.137356Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:1010:2818], Recipient [7:632:2537]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T13:50:30.137436Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 23 } }, { items { uint32_value: 3 } items { uint32_value: 31 } } >> DemoTx::Scenario_2 >> YdbIndexTable::CreateTableAddIndex [GOOD] >> YdbIndexTable::AlterTableAddIndex |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> YdbScripting::Params [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail |85.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> THealthCheckTest::StorageLimit50 [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] >> TPartitionWriterCacheActorTests::WriteReplyOrder [GOOD] >> TPartitionWriterCacheActorTests::DropOldWriter >> Viewer::ServerlessNodesPage [GOOD] >> Viewer::ServerlessWithExclusiveNodes >> YdbYqlClient::TestReadTableSnapshot [GOOD] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> TPersQueueTest::WriteExisting [GOOD] |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |85.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] >> THealthCheckTest::NoStoragePools >> TPartitionWriterCacheActorTests::DropOldWriter [GOOD] >> YdbYqlClient::Utf8DatabasePassViaHeader [GOOD] >> TPersQueueTest::WriteExistingBigValue >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError |85.2%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> YdbYqlClient::TestYqlTypesFromPreparedQuery >> DataStreams::TestReservedResourcesMetering >> DataShardSnapshots::UncommittedChangesRenameTable [GOOD] >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> DataStreams::TestControlPlaneAndMeteringData >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TGRpcYdbTest::ExplainQuery [GOOD] >> KqpStats::SysViewClientLost [FAIL] >> KqpNewEngine::PkSelect1 >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TGRpcYdbTest::KeepAlive >> KqpStats::SysViewCancelled >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] Test command err: 2025-04-07T13:50:10.092137Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568177256665437:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:10.092174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00199c/r3tmp/tmpiARch8/pdisk_1.dat 2025-04-07T13:50:10.811147Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:10.826324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:10.826427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:10.861471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19529, node 1 2025-04-07T13:50:11.188210Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:11.188236Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:11.188248Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:11.188340Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:11.524877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:11.540010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:11.541357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:11.572942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:11.573185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:11.573220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:50:11.577596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:11.577630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:11.579155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:11.586060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033811625, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:11.586103Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:11.586460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:11.587917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:11.589697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:11.591174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:11.591334Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:11.591432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:11.591471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:11.591512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:11.594398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:11.594443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:11.594460Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:11.594532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:15.714641Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490568200497012581:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:15.714691Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00199c/r3tmp/tmpqv67YJ/pdisk_1.dat 2025-04-07T13:50:15.907160Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:15.966102Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:15.966185Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 20484, node 4 2025-04-07T13:50:15.993958Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:16.058822Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:16.058844Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:16.058850Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:16.058954Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:16.326652Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:16.327084Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:16.327113Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:16.333579Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:16.333778Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:16.333797Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:50:16.341479Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:16.341512Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:50:16.343306Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:16.344252Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:16.346633Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033816392, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:16.346660Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:16.346916Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:50:16.348375Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:16.348515Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:16.348552Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:50:16.348617Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:50:16.348648Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:50:16.348696Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:50:16.349965Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdate ... 01Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:26.486241Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19692, node 10 2025-04-07T13:50:26.582417Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:26.582438Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:26.582447Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:26.582580Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6486 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:26.909754Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:26.910119Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:26.910148Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:26.917393Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:26.917581Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:26.917608Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:50:26.920125Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:26.920157Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:50:26.921099Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:26.922127Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:26.925548Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033826969, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:26.925587Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:26.925845Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:50:26.927536Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:26.927705Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:26.927747Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:50:26.927824Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:50:26.927862Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:50:26.927898Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:50:26.929916Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:50:26.929954Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:50:26.929971Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:26.930060Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T13:50:31.582273Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7490568268155425854:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:31.582459Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00199c/r3tmp/tmpzyahz3/pdisk_1.dat 2025-04-07T13:50:31.874045Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:31.892387Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:31.892488Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:31.898473Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11007, node 13 2025-04-07T13:50:32.123969Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:32.123994Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:32.124003Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:32.125074Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2979 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:32.443692Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:32.444046Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:32.444068Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:32.453907Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-04-07T13:50:32.455884Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:32.455912Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T13:50:32.456294Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:32.462224Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:32.462253Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:50:32.464368Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:32.468613Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033832513, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:32.468659Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:32.468972Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:50:32.473794Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:32.473991Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:32.474042Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:50:32.474141Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:50:32.474189Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:50:32.474256Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:50:32.488872Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:50:32.488955Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:50:32.488974Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:32.489109Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 >> TGRpcNewClient::YqlExplainDataQuery [GOOD] >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] >> YdbYqlClient::TestConstraintViolation [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts [GOOD] >> TPersQueueTest::ReadFromSeveralPartitions [GOOD] >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> KqpLimits::DataShardReplySizeExceeded [GOOD] >> TopicService::OneConsumer_TheRangesOverlap [GOOD] >> THealthCheckTest::TestTabletIsDead [GOOD] >> TGRpcYdbTest::KeepAlive [GOOD] >> TGRpcNewClient::InMemoryTables >> THealthCheckTest::NoStoragePools [GOOD] >> YdbIndexTable::AlterTableAddIndex [GOOD] >> DataStreams::TestControlPlaneAndMeteringData [GOOD] >> TTableProfileTests::ExplicitPartitionsComplex [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail [GOOD] >> TGRpcYdbTest::BeginTxRequestError >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts >> KqpLimits::QueryExecTimeout >> TPersQueueTest::Init >> TTableProfileTests::DescribeTableOptions >> DataStreams::ChangeBetweenRetentionModes >> YdbTableBulkUpsert::AsyncIndexShouldSucceed ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] Test command err: 2025-04-07T13:50:09.664433Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568173599399465:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:09.664492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00199e/r3tmp/tmpF2B2pV/pdisk_1.dat 2025-04-07T13:50:10.377322Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:10.422758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:10.422840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:10.436547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16146, node 1 2025-04-07T13:50:10.821490Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:10.821510Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:10.821518Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:10.821599Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:11.346429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:11.366878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:11.368184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:11.381233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:11.381626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:11.381640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:50:11.384819Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:11.384846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-07T13:50:11.388005Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:11.403865Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:11.405608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033811450, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:11.405646Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:11.405870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:11.407832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:11.407966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:11.408014Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:11.408087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:11.408121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:11.408154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:11.414232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:11.414282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:11.414296Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:11.414375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:14.025993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568195074236978:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:14.026089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:14.337361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:14.337772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:50:14.338239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:14.338263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:14.345267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T13:50:14.345466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:14.345635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:14.345702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:50:14.352322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:14.352368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:14.352389Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:14.352622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:14.352642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:14.352650Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:50:14.354598Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:50:14.369292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:14.369366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:50:14.373511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:50:14.457579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:50:14.457608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:50:14.457673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T13:50:14.461535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:50:14.472269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033814516, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:14.472332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744033814516 2025-04-07T13:50:14.472498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T13:50:14.474851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:14.475183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:14.475243Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:50:14.477464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:14.477532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:14.477550Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pat ... -07T13:50:35.706009Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:35.706027Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:35.706038Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T13:50:35.706146Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:35.706160Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:35.706170Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T13:50:35.706263Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:35.706277Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:35.706285Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2025-04-07T13:50:35.706384Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:35.706398Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:35.706406Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2025-04-07T13:50:35.709238Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:50:35.716700Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033835761, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:35.716745Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033835761, at schemeshard: 72057594046644480 2025-04-07T13:50:35.716841Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 128 -> 240 2025-04-07T13:50:35.716942Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033835761, at schemeshard: 72057594046644480 2025-04-07T13:50:35.716980Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:1 128 -> 240 2025-04-07T13:50:35.717019Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033835761, at schemeshard: 72057594046644480 2025-04-07T13:50:35.717066Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:2 128 -> 240 2025-04-07T13:50:35.717120Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710659:3, HandleReply TEvOperationPlan: step# 1744033835761 2025-04-07T13:50:35.717522Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:3 128 -> 240 2025-04-07T13:50:35.719352Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:35.719830Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:35.719894Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:2 ProgressState 2025-04-07T13:50:35.719965Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:2 progress is 1/4 2025-04-07T13:50:35.720114Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:0 ProgressState 2025-04-07T13:50:35.720145Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 2/4 2025-04-07T13:50:35.720212Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:3 ProgressState 2025-04-07T13:50:35.720239Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:3 progress is 3/4 2025-04-07T13:50:35.720302Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:1 ProgressState 2025-04-07T13:50:35.720329Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:1 progress is 4/4 2025-04-07T13:50:35.720352Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:50:35.720380Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:1 2025-04-07T13:50:35.720393Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:2 2025-04-07T13:50:35.720404Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:3 2025-04-07T13:50:35.720420Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 5, subscribers: 1 2025-04-07T13:50:35.724419Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:35.724459Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:35.724474Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T13:50:35.724682Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:35.724699Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:35.724709Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T13:50:35.724822Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:35.724835Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:35.724847Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T13:50:35.724946Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:35.724961Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:35.724970Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2025-04-07T13:50:35.725062Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:35.725077Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:35.725087Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2025-04-07T13:50:35.725122Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-04-07T13:50:35.732598Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7490568284679011027:2318], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:50:35.802472Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:50:35.802634Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:50:35.809775Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:50:35.832845Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7490568284679011117:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:50:35.834547Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MWIwN2E1YWEtMzY2MTUzNjgtODU4ZGJjNzgtZTViNzJkZmE=, ActorId: [10:7490568284679010854:2303], ActorState: ExecuteState, TraceId: 01jr888tnc9hpbngjwbh1sxx0h, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:50:35.868594Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7490568284679011142:2328], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable2]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:50:35.870238Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MWIwN2E1YWEtMzY2MTUzNjgtODU4ZGJjNzgtZTViNzJkZmE=, ActorId: [10:7490568284679010854:2303], ActorState: ExecuteState, TraceId: 01jr888tt61xkcyb8ba6ga9hjz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] Test command err: 2025-04-07T13:49:55.313467Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:49:55.328261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:55.328826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:55.329060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:55.329977Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:55.330048Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bce/r3tmp/tmpW4nDhl/pdisk_1.dat 2025-04-07T13:49:55.694051Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1889, node 1 TClient is connected to server localhost:11984 2025-04-07T13:49:56.583788Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:56.583842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:56.583883Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:56.584238Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:04.773724Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:436:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:04.774009Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:04.774182Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:04.775409Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:433:2130], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:04.775693Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:04.775806Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bce/r3tmp/tmpGBKoSI/pdisk_1.dat 2025-04-07T13:50:05.178411Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61105, node 3 TClient is connected to server localhost:30765 2025-04-07T13:50:05.759714Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:05.759779Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:05.759812Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:05.759978Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:14.731432Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:635:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:50:14.745515Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:637:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:14.745941Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:14.746158Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:14.748358Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:14.748496Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bce/r3tmp/tmpbf9og7/pdisk_1.dat 2025-04-07T13:50:15.256060Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27622, node 5 TClient is connected to server localhost:18334 2025-04-07T13:50:15.795681Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:15.795749Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:15.795793Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:15.796259Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-04-07T13:50:26.901869Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:50:26.917678Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:26.918172Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:26.918317Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:26.919884Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:26.920067Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bce/r3tmp/tmpZUNiy7/pdisk_1.dat 2025-04-07T13:50:27.319120Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10759, node 7 TClient is connected to server localhost:8708 2025-04-07T13:50:27.757068Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:27.757136Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:27.757179Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:27.757580Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-7" reason: "YELLOW-9a33-e9e2-8" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 7 host: "::1" port: 12001 } 2025-04-07T13:50:34.865672Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:34.866020Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:34.866346Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bce/r3tmp/tmp7ftIVf/pdisk_1.dat 2025-04-07T13:50:35.400375Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9482, node 9 TClient is connected to server localhost:23259 2025-04-07T13:50:35.978836Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:35.978915Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:35.978967Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:35.979213Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration |85.2%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable [GOOD] Test command err: 2025-04-07T13:49:37.055872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:37.056159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:37.056393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001072/r3tmp/tmpUgCHOg/pdisk_1.dat 2025-04-07T13:49:37.408785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.453685Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:37.508282Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:49:37.509320Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:49:37.509632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:37.509758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:37.521541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:37.637688Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T13:49:37.637799Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T13:49:37.638926Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T13:49:37.750884Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T13:49:37.751593Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T13:49:37.751691Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:49:37.752005Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:37.752216Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T13:49:37.752309Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T13:49:37.752612Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T13:49:37.754055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.755206Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T13:49:37.755302Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T13:49:37.784559Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:49:37.785736Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:49:37.786070Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:49:37.786334Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:49:37.818679Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:49:37.819294Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:49:37.819391Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:49:37.820676Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:49:37.820746Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:49:37.820789Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:49:37.821077Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:49:37.846675Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:49:37.846839Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:49:37.846950Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:49:37.847019Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:49:37.847064Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:49:37.847100Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:37.847478Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:49:37.847537Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:49:37.847762Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:49:37.847858Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:49:37.848269Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:49:37.848308Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:49:37.848351Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:49:37.848405Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:49:37.848450Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:49:37.848501Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T13:49:37.848533Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T13:49:37.848567Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T13:49:37.848599Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:49:37.848639Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:49:37.848815Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T13:49:37.848857Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:49:37.848939Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:49:37.849130Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T13:49:37.849180Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:49:37.849250Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:49:37.849298Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T13:49:37.849333Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T13:49:37.849361Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T13:49:37.849387Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:49:37.849621Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T13:49:37.849649Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T13:49:37.849677Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T13:49:37.849699Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:49:37.849753Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T13:49:37.849775Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:49:37.849803Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T13:49:37.849828Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T13:49:37.849846Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T13:49:37.850851Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T13:49:37.850923Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:49:37.861687Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:49:37.861781Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:49:37.861817Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:49:37.861876Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... 13:50:36.508150Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:50:36.508308Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [8:925:2743], Recipient [8:925:2743]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T13:50:36.508339Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T13:50:36.508435Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:50:36.508812Z node 8 :TX_DATASHARD TRACE: -- AddReadRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-07T13:50:36.508931Z node 8 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-07T13:50:36.509051Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CheckDataTx 2025-04-07T13:50:36.509126Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-04-07T13:50:36.509208Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2025-04-07T13:50:36.509275Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T13:50:36.509353Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T13:50:36.509412Z node 8 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v1000/18446744073709551615 ImmediateWriteEdge# v400/18446744073709551615 ImmediateWriteEdgeReplied# v400/18446744073709551615 2025-04-07T13:50:36.509491Z node 8 :TX_DATASHARD TRACE: Activated operation [0:281474976715664] at 72075186224037888 2025-04-07T13:50:36.509545Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-04-07T13:50:36.509571Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T13:50:36.509593Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-07T13:50:36.509614Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-07T13:50:36.509675Z node 8 :TX_DATASHARD TRACE: TSysLocks::GetLock: lock 281474976715661 not found 2025-04-07T13:50:36.509742Z node 8 :TX_DATASHARD TRACE: ValidateLocks: broken lock 281474976715661 expected 1:0 found 0:0 2025-04-07T13:50:36.509878Z node 8 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true 2025-04-07T13:50:36.509951Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-04-07T13:50:36.509994Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-07T13:50:36.510018Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2025-04-07T13:50:36.510042Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-04-07T13:50:36.510076Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2025-04-07T13:50:36.510114Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:50:36.510165Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T13:50:36.510223Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2025-04-07T13:50:36.510268Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-04-07T13:50:36.510305Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T13:50:36.510343Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037888 has finished 2025-04-07T13:50:36.510433Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:50:36.510506Z node 8 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-04-07T13:50:36.510596Z node 8 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2025-04-07T13:50:36.510717Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:50:36.511305Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=ZmVjOTI5NC00MWZiN2Q1MS1lNDEyMDI3Yi03OGNjYTk0Nw==, ActorId: [8:802:2649], ActorState: ExecuteState, TraceId: 01jr888vbhdtywypf3r2yxm87q, Create QueryResponse for error on request, msg: 2025-04-07T13:50:36.512199Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr888vbhdtywypf3r2yxm87q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=ZmVjOTI5NC00MWZiN2Q1MS1lNDEyMDI3Yi03OGNjYTk0Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:50:36.512535Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [8:953:2649], Recipient [8:925:2743]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 953 RawX2: 34359741017 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\001 \003\"\006\020\0020\000@\n" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-04-07T13:50:36.512575Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:50:36.512885Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [8:925:2743], Recipient [8:925:2743]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T13:50:36.512919Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T13:50:36.512971Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:50:36.513144Z node 8 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-07T13:50:36.513235Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-04-07T13:50:36.513274Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T13:50:36.513299Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-04-07T13:50:36.513338Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T13:50:36.513364Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T13:50:36.513397Z node 8 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v1000/18446744073709551615 ImmediateWriteEdge# v400/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2025-04-07T13:50:36.513467Z node 8 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-04-07T13:50:36.513498Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T13:50:36.513533Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T13:50:36.513556Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-07T13:50:36.513579Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-07T13:50:36.513642Z node 8 :TX_DATASHARD TRACE: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193454 2025-04-07T13:50:36.513761Z node 8 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true 2025-04-07T13:50:36.513852Z node 8 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-07T13:50:36.513914Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T13:50:36.513940Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-07T13:50:36.513961Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-04-07T13:50:36.513984Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-07T13:50:36.514041Z node 8 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T13:50:36.514147Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-04-07T13:50:36.514178Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:50:36.514210Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T13:50:36.514240Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-04-07T13:50:36.514285Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T13:50:36.514314Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T13:50:36.514341Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-04-07T13:50:36.514412Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:50:36.514444Z node 8 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-07T13:50:36.514483Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:50:36.890461Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [8:969:2775], Recipient [8:925:2743]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:36.890577Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:36.890651Z node 8 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [8:968:2774], serverId# [8:969:2775], sessionId# [0:0:0] 2025-04-07T13:50:36.890776Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553224, Sender [8:558:2485], Recipient [8:925:2743]: NKikimr::TEvDataShard::TEvGetOpenTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard [GOOD] Test command err: 2025-04-07T13:50:05.886456Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568157962014441:2216];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:05.886505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019c0/r3tmp/tmpEg4b8q/pdisk_1.dat 2025-04-07T13:50:06.539339Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:06.562531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:06.562606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:06.569088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20422, node 1 2025-04-07T13:50:06.893305Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:06.893328Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:06.893334Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:06.893424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:07.232236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:07.238124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:07.239595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:07.245461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:07.245619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:07.245633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:50:07.249248Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:07.249491Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:07.249504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:07.253390Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:07.257065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033807299, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:07.257091Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:07.257339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:07.259192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:07.259325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:07.259362Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:07.259439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:07.259472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:07.259531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:07.262011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:07.262055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:07.262069Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:07.262137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:09.524625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Logs, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:09.525539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:50:09.532290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:09.532325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:09.536058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Logs 2025-04-07T13:50:09.536259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:09.536426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:09.536479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:50:09.538932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:09.538989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:09.539006Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:09.539183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:09.539199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:09.539207Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:50:09.540926Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:50:09.565024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.565314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.565436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.565558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.565676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.565788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.565978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.566088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.566240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.566363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.566495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.566657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.566775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.566882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.567010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.567139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.567257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.567398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.567510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 28 ... MESHARD INFO: MkDir::TPropose operationId#281474976715659:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:36.587323Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:36.587416Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:36.593454Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T13:50:36.593515Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T13:50:36.593537Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T13:50:36.593837Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T13:50:36.594847Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T13:50:36.594870Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T13:50:36.595109Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T13:50:36.595134Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T13:50:36.595146Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2025-04-07T13:50:36.595289Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T13:50:36.595308Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T13:50:36.595320Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 3 2025-04-07T13:50:36.595427Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T13:50:36.595445Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T13:50:36.595456Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 1 2025-04-07T13:50:36.595587Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T13:50:36.633786Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033836643, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:36.633845Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033836643, at schemeshard: 72057594046644480 2025-04-07T13:50:36.634000Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2025-04-07T13:50:36.634144Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033836643, at schemeshard: 72057594046644480 2025-04-07T13:50:36.634188Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2025-04-07T13:50:36.634253Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033836643, at schemeshard: 72057594046644480 2025-04-07T13:50:36.634296Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2025-04-07T13:50:36.634365Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1744033836643 2025-04-07T13:50:36.634415Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2025-04-07T13:50:36.640162Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:36.640845Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:36.640935Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2025-04-07T13:50:36.641035Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2025-04-07T13:50:36.641269Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2025-04-07T13:50:36.641317Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2025-04-07T13:50:36.641404Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2025-04-07T13:50:36.641453Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2025-04-07T13:50:36.641528Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2025-04-07T13:50:36.641566Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2025-04-07T13:50:36.641594Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T13:50:36.641631Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2025-04-07T13:50:36.641649Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2025-04-07T13:50:36.641661Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2025-04-07T13:50:36.641681Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2025-04-07T13:50:36.653402Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T13:50:36.653486Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T13:50:36.653509Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T13:50:36.653801Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T13:50:36.653834Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T13:50:36.653846Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T13:50:36.653981Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T13:50:36.654004Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T13:50:36.654015Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2025-04-07T13:50:36.654134Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T13:50:36.654153Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T13:50:36.654163Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2025-04-07T13:50:36.654861Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T13:50:36.654890Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T13:50:36.654904Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 2 2025-04-07T13:50:36.654958Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-04-07T13:50:36.673919Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7490568289318686939:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T13:50:36.730506Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:50:36.730685Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:50:36.733198Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:50:36.868278Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr888vgwbzcd2tkx5fh69jxv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MTVjNTIxMTItMzM3MDJhNDAtY2Q1YzQ1YjgtZGY4M2U2Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:50:36.887457Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033836895, txId: 281474976715661] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] Test command err: 2025-04-07T13:50:13.837592Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568192789917753:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:13.837638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001935/r3tmp/tmpEI9fAv/pdisk_1.dat 2025-04-07T13:50:14.613452Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:14.619986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:14.620064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:14.639428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20971, node 1 2025-04-07T13:50:15.117222Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:15.117240Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:15.117259Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:15.117349Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26693 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:15.582010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.601384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:15.602879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.613635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:15.613842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:15.613854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:50:15.617690Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:15.617721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:50:15.619279Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.628863Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:15.633433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033815671, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:15.633469Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:15.633714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:50:15.655949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:15.656100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:15.656143Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:50:15.656210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:50:15.656243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:50:15.656290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:50:15.658722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:50:15.658758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:50:15.658772Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:15.658847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T13:50:18.443285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568214264755043:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:18.511422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:18.742797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:18.743145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSchemeError, reason: Column Key has wrong key type Json, at schemeshard: 72057594046644480 2025-04-07T13:50:18.749535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusSchemeError, reason: Column Key has wrong key type Json, operation: CREATE TABLE, path: /Root/Test 2025-04-07T13:50:18.840253Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568192789917753:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:18.840305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:18.842169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568214264755077:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:18.842228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:18.853130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:18.853392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSchemeError, reason: Column Key has wrong key type Yson, at schemeshard: 72057594046644480 2025-04-07T13:50:18.858868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusSchemeError, reason: Column Key has wrong key type Yson, operation: CREATE TABLE, path: /Root/Test test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001935/r3tmp/tmpCEBXAu/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29225, node 4 TClient is connected to server localhost:24736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001935/r3tmp/tmps8U1WE/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5723, node 7 TClient is connected to server localhost:29221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:50:37.340503Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7490568297109714133:2251];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:37.340542Z node 10 :METADATA_PROVIDER E ... known, add access: +(SR|DS):root@builtin 2025-04-07T13:50:40.919893Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:40.920453Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:40.920535Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2025-04-07T13:50:40.920792Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:40.920899Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:40.920976Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:40.922607Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T13:50:40.922728Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:40.922756Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:40.922774Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:40.922973Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:40.922988Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:40.922998Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:50:40.923117Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:40.923130Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:40.923137Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T13:50:40.923240Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:40.923259Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:40.923265Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T13:50:40.923343Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:40.923379Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:40.923389Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2025-04-07T13:50:40.930458Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033840976, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:40.930507Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033840976, at schemeshard: 72057594046644480 2025-04-07T13:50:40.930601Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-04-07T13:50:40.930683Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033840976, at schemeshard: 72057594046644480 2025-04-07T13:50:40.930716Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2025-04-07T13:50:40.930767Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033840976, at schemeshard: 72057594046644480 2025-04-07T13:50:40.930806Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2025-04-07T13:50:40.930887Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1744033840976 2025-04-07T13:50:40.930932Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2025-04-07T13:50:40.936657Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:40.937269Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:40.937348Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2025-04-07T13:50:40.937441Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2025-04-07T13:50:40.937648Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2025-04-07T13:50:40.937693Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2025-04-07T13:50:40.937790Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T13:50:40.937842Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2025-04-07T13:50:40.937932Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2025-04-07T13:50:40.937976Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2025-04-07T13:50:40.938009Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T13:50:40.938055Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2025-04-07T13:50:40.938077Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2025-04-07T13:50:40.938091Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2025-04-07T13:50:40.938109Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2025-04-07T13:50:40.939166Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:40.939213Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:40.939233Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:50:40.939495Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:40.939516Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:40.939530Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2025-04-07T13:50:40.939666Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:40.939688Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:40.939698Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T13:50:40.939790Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:40.939807Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:40.939816Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T13:50:40.939911Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:40.939925Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:40.939936Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2025-04-07T13:50:40.939970Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-04-07T13:50:40.943688Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7490568309994616850:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:50:41.019128Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:50:41.019269Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:50:41.022018Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::NoStoragePools [GOOD] Test command err: 2025-04-07T13:49:56.655977Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:49:56.670965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:56.671487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:56.671648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:56.672375Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:56.672429Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb7/r3tmp/tmpjQdTtb/pdisk_1.dat 2025-04-07T13:49:57.140087Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14043, node 1 TClient is connected to server localhost:18943 2025-04-07T13:49:57.684698Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:57.684751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:57.684799Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:57.685129Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:05.647564Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:436:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:05.647917Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:05.648087Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:05.650244Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:433:2130], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:05.650517Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:05.650632Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb7/r3tmp/tmpMY1Wsa/pdisk_1.dat 2025-04-07T13:50:06.062500Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63447, node 3 TClient is connected to server localhost:6344 2025-04-07T13:50:06.672184Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:06.672247Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:06.672280Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:06.672460Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:15.777348Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:635:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:50:15.791659Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:637:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:15.792111Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:15.792316Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:15.795252Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:15.795385Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb7/r3tmp/tmpzmGhSY/pdisk_1.dat 2025-04-07T13:50:16.242561Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9028, node 5 TClient is connected to server localhost:19693 2025-04-07T13:50:16.794820Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:16.794900Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:16.794946Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:16.795136Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:31.936636Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:50:31.982048Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:31.982674Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:31.982848Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:31.996824Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:31.997282Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb7/r3tmp/tmphn3Vyp/pdisk_1.dat 2025-04-07T13:50:33.017948Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8108, node 7 TClient is connected to server localhost:13164 2025-04-07T13:50:34.424325Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:34.424417Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:34.424471Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:34.425009Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:40.893996Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:40.894374Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:40.894597Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb7/r3tmp/tmpLuG8BU/pdisk_1.dat 2025-04-07T13:50:41.276286Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32327, node 9 TClient is connected to server localhost:29205 2025-04-07T13:50:41.726889Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:41.726956Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:41.726995Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:41.727520Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbIndexTable::AlterTableAddIndex [GOOD] Test command err: 2025-04-07T13:50:05.551204Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568158013044992:2251];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:05.551254Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019cd/r3tmp/tmp48vJ7e/pdisk_1.dat 2025-04-07T13:50:06.362510Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:06.399007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:06.399121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:06.423190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2627, node 1 2025-04-07T13:50:06.795722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:06.795748Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:06.795757Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:06.795868Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:07.301121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:07.310951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:07.312333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:07.326801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:07.327013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:07.327025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:50:07.329355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:07.329382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-07T13:50:07.334024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:07.334296Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:07.345885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033807383, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:07.345924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:07.346167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:07.353794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:07.354101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:07.354152Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:07.354277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:07.354313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:07.354354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:07.356609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:07.356658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:07.356688Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:07.356778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:09.594386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Logs, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:09.594854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:50:09.595437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:09.595456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:09.597789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Logs 2025-04-07T13:50:09.597977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:09.598112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:09.598163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:50:09.599505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:09.599537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:09.599551Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:09.599744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:09.599771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:09.599779Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:50:09.602285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:50:09.605080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:09.605144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:50:09.606770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:50:09.674398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:50:09.674425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:50:09.674483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T13:50:09.680990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:50:09.686174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033809728, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:09.686225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744033809728 2025-04-07T13:50:09.686328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T13:50:09.690477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:09.690722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:09.690774Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:50:09.692460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:09.692484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:09.692509Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:50:09.692694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:09.696623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:09.696642Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 28147 ... CHEMESHARD INFO: TAlterTable TConfigureParts operationId#281474976710759:2 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046644480 message# TxKind: TX_KIND_SCHEME Origin: 72075186224037892 Status: PREPARED TxId: 281474976710759 MinStep: 1744033837218 MaxStep: 18446744073709551615 PrepareArriveTime: 1744033837295651 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037892 CpuTimeUsec: 106 } } 2025-04-07T13:50:37.301388Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:50:37.301409Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:2 3 -> 128 2025-04-07T13:50:37.307479Z node 13 :FLAT_TX_SCHEMESHARD INFO: TFinalizeBuildIndex TPropose operationId#281474976710759:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:50:37.307808Z node 13 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#281474976710759:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:37.312702Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033837357, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:37.312767Z node 13 :FLAT_TX_SCHEMESHARD INFO: TFinalizeBuildIndex TPropose operationId#281474976710759:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744033837357 2025-04-07T13:50:37.312925Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-04-07T13:50:37.313032Z node 13 :FLAT_TX_SCHEMESHARD INFO: TAlterTableIndex TPropose operationId#281474976710759:1 HandleReply TEvOperationPlan, step: 1744033837357, at schemeshard: 72057594046644480 2025-04-07T13:50:37.313141Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:1 128 -> 240 2025-04-07T13:50:37.313257Z node 13 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#281474976710759:2 HandleReply TEvOperationPlan, operationId: 281474976710759:2, stepId: 1744033837357, at schemeshard: 72057594046644480 2025-04-07T13:50:37.313441Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:2 128 -> 129 2025-04-07T13:50:37.322004Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:37.327083Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:37.327198Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:1 ProgressState 2025-04-07T13:50:37.327306Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:1 progress is 1/3 2025-04-07T13:50:37.327586Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 ProgressState at tablet: 72057594046644480 2025-04-07T13:50:37.327800Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:50:37.329412Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710759 2025-04-07T13:50:37.329473Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2025-04-07T13:50:37.329495Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-07T13:50:37.329730Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710759 2025-04-07T13:50:37.329771Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2025-04-07T13:50:37.329783Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T13:50:37.329893Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710759 2025-04-07T13:50:37.329918Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2025-04-07T13:50:37.329927Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 4 2025-04-07T13:50:37.342145Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976710759 Step: 1744033837357 OrderId: 281474976710759 ExecLatency: 0 ProposeLatency: 14 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 976 } } 2025-04-07T13:50:37.380013Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037892 Status: COMPLETE TxId: 281474976710759 Step: 1744033837357 OrderId: 281474976710759 ExecLatency: 50 ProposeLatency: 63 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037892 CpuTimeUsec: 1031 } } 2025-04-07T13:50:37.380302Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037893 Status: COMPLETE TxId: 281474976710759 Step: 1744033837357 OrderId: 281474976710759 ExecLatency: 57 ProposeLatency: 61 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037893 CpuTimeUsec: 1052 } } 2025-04-07T13:50:37.380504Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976710759 Step: 1744033837357 OrderId: 281474976710759 ExecLatency: 23 ProposeLatency: 64 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 2272 } } 2025-04-07T13:50:37.380700Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037891 Status: COMPLETE TxId: 281474976710759 Step: 1744033837357 OrderId: 281474976710759 ExecLatency: 45 ProposeLatency: 62 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037891 CpuTimeUsec: 7399 } } 2025-04-07T13:50:37.380831Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:50:37.380863Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-07T13:50:37.380890Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-07T13:50:37.382456Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:50:37.382629Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:50:37.382734Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:50:37.382835Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:50:37.383632Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976710759 Step: 1744033837357 OrderId: 281474976710759 ExecLatency: 61 ProposeLatency: 66 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 1494 } } 2025-04-07T13:50:37.384223Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:50:37.384254Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:2, at schemeshard: 72057594046644480 2025-04-07T13:50:37.384278Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:2 129 -> 240 2025-04-07T13:50:37.384677Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:0 ProgressState 2025-04-07T13:50:37.384834Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 2/3 2025-04-07T13:50:37.386256Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:2 ProgressState 2025-04-07T13:50:37.386342Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:2 progress is 3/3 2025-04-07T13:50:37.386391Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-07T13:50:37.386511Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:1 2025-04-07T13:50:37.386526Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:2 2025-04-07T13:50:37.388013Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-07T13:50:37.390960Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 281474976715659, at schemeshard: 72057594046644480 2025-04-07T13:50:37.392853Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TDropLock Propose: opId# 281474976710760:0, path# /Root/TheTable 2025-04-07T13:50:37.393067Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710760:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:37.394542Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /Root, subject: , status: StatusAccepted, operation: DROP LOCK, path: /Root/TheTable 2025-04-07T13:50:37.394631Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2025-04-07T13:50:37.394842Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710760:0 ProgressState 2025-04-07T13:50:37.396320Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046644480 2025-04-07T13:50:37.397625Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033837441, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:37.397674Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 1744033837441 2025-04-07T13:50:37.397700Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-07T13:50:37.399379Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710760:0 ProgressState 2025-04-07T13:50:37.399483Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-07T13:50:37.399532Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-07T13:50:37.401531Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::StorageLimit50 [GOOD] Test command err: 2025-04-07T13:49:58.082647Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:49:58.097846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:58.098322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:58.098502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:58.099256Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:58.099306Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ba8/r3tmp/tmpqUSobV/pdisk_1.dat 2025-04-07T13:49:58.604232Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16008, node 1 TClient is connected to server localhost:21682 2025-04-07T13:49:59.047413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:59.047464Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:59.047499Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:59.047794Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-9a33-70fb" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" reason: "YELLOW-9a33-5321" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-1" reason: "YELLOW-9a33-e9e2-2" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-5321" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-9a33-595f-8d1d" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-9a33-595f-8d1d" status: YELLOW message: "Pool degraded" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "YELLOW-9a33-ef3e-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-9a33-4847-1-0-3-55-0-55" status: RED message: "VDisk is not available" location { storage { node { id: 1 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-3-55-0-55" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "YELLOW-9a33-ef3e-0" status: YELLOW message: "Group degraded" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-9a33-4847-1-0-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 1 host: "::1" port: 12001 } 2025-04-07T13:50:07.297467Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:436:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:07.297818Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:07.297978Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:07.299352Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:433:2130], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:07.299558Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:07.299653Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ba8/r3tmp/tmpOt3Wqt/pdisk_1.dat 2025-04-07T13:50:07.628520Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8093, node 3 TClient is connected to server localhost:14185 2025-04-07T13:50:08.061433Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:08.061496Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:08.061533Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:08.061690Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:16.903858Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:635:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:50:16.917442Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:637:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:16.917846Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:16.918026Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:16.920311Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:16.920432Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ba8/r3tmp/tmpQhuPpA/pdisk_1.dat 2025-04-07T13:50:17.374426Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13227, node 5 TClient is connected to server localhost:19040 2025-04-07T13:50:17.883472Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:17.883524Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:17.883546Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:17.883949Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:32.037091Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:50:32.072938Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:32.073318Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:32.073437Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:32.074707Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:32.074825Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ba8/r3tmp/tmpP01gP8/pdisk_1.dat 2025-04-07T13:50:32.627541Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16216, node 7 TClient is connected to server localhost:11438 2025-04-07T13:50:33.354513Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:33.354567Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:33.354590Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:33.354956Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestConstraintViolation [GOOD] Test command err: 2025-04-07T13:50:14.028914Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568196390378117:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:14.029064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001906/r3tmp/tmpr2B6AO/pdisk_1.dat 2025-04-07T13:50:14.594757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:14.594850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:14.610164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29304, node 1 2025-04-07T13:50:14.755540Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:14.826733Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:50:14.826755Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:50:14.833451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-07T13:50:14.988319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:50:15.030104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:15.030263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:50:15.050687Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:15.068794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:15.101642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:15.101679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:15.101732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:50:15.101784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.165273Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:15.165306Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:15.165315Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:15.165397Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:15.503041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.508464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:15.509828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.514124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:15.514335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:15.514348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:50:15.516083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:15.517775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:15.517801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:15.519399Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.522691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033815566, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:15.522722Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:15.523046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:15.524994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:15.525128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:15.525188Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:15.525277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:15.525309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:15.525341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:15.529525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:15.529579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:15.529594Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:15.529670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:18.201042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568213570248183:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:18.201164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:18.467854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:18.468336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:50:18.469314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:18.469339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:18.477374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T13:50:18.477548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:18.477712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:18.477792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:50:18.479576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:18.479621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:18.479644Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:18.479846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:18.479861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:18.479870Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:50:18.481728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:50:18.506441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:18.506532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:50:18.512609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:50:18.620948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:50:18.620976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 720575940466444 ... : AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2025-04-07T13:50:38.051479Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:50:38.062565Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033838106, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:38.062612Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033838106, at schemeshard: 72057594046644480 2025-04-07T13:50:38.062774Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 128 -> 240 2025-04-07T13:50:38.062895Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033838106, at schemeshard: 72057594046644480 2025-04-07T13:50:38.062935Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:1 128 -> 240 2025-04-07T13:50:38.062978Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033838106, at schemeshard: 72057594046644480 2025-04-07T13:50:38.063010Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:2 128 -> 240 2025-04-07T13:50:38.063084Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710659:3, HandleReply TEvOperationPlan: step# 1744033838106 2025-04-07T13:50:38.063176Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:3 128 -> 240 2025-04-07T13:50:38.069685Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:38.070206Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:38.070277Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:2 ProgressState 2025-04-07T13:50:38.070366Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:2 progress is 1/4 2025-04-07T13:50:38.070576Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:0 ProgressState 2025-04-07T13:50:38.070624Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 2/4 2025-04-07T13:50:38.070704Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:3 ProgressState 2025-04-07T13:50:38.070738Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:3 progress is 3/4 2025-04-07T13:50:38.070823Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:1 ProgressState 2025-04-07T13:50:38.070862Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:1 progress is 4/4 2025-04-07T13:50:38.070894Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:50:38.070936Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:1 2025-04-07T13:50:38.070951Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:2 2025-04-07T13:50:38.070961Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:3 2025-04-07T13:50:38.070980Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 5, subscribers: 1 2025-04-07T13:50:38.076618Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:38.082554Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:38.082589Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T13:50:38.082880Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:38.082906Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:38.082918Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T13:50:38.083075Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:38.083093Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:38.083102Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T13:50:38.083210Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:38.083228Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:38.083237Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2025-04-07T13:50:38.083330Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:38.083364Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:38.083374Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2025-04-07T13:50:38.083414Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-04-07T13:50:38.085849Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7490568299377141815:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:50:38.137892Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:50:38.138181Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:50:38.140822Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:50:38.801850Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr888wyf8n3116w4cchvyndx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2Q3NmMzZjEtODE5ZDU3MWItY2RlYzgzZmItNjNmMzEwZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:50:38.816896Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr888wyf8n3116w4cchvyndx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2Q3NmMzZjEtODE5ZDU3MWItY2RlYzgzZmItNjNmMzEwZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:50:38.842648Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7490568277902304161:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:38.842752Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:38.844166Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr888wyf8n3116w4cchvyndx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2Q3NmMzZjEtODE5ZDU3MWItY2RlYzgzZmItNjNmMzEwZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:50:39.073166Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr888xrjd98vj9rb5zve1ef7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2Q3NmMzZjEtODE5ZDU3MWItY2RlYzgzZmItNjNmMzEwZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:50:39.076235Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr888xrjd98vj9rb5zve1ef7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2Q3NmMzZjEtODE5ZDU3MWItY2RlYzgzZmItNjNmMzEwZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:50:39.118704Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7490568303672109318:2343], TxId: 281474976710665, task: 1. Ctx: { SessionId : ydb://session/3?node_id=10&id=M2Q3NmMzZjEtODE5ZDU3MWItY2RlYzgzZmItNjNmMzEwZDM=. CustomerSuppliedId : . TraceId : 01jr888xrjd98vj9rb5zve1ef7. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T13:50:39.119100Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7490568303672109320:2344], TxId: 281474976710665, task: 2. Ctx: { SessionId : ydb://session/3?node_id=10&id=M2Q3NmMzZjEtODE5ZDU3MWItY2RlYzgzZmItNjNmMzEwZDM=. CustomerSuppliedId : . TraceId : 01jr888xrjd98vj9rb5zve1ef7. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Handle abort execution event from: [10:7490568303672109315:2300], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:50:39.134965Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=M2Q3NmMzZjEtODE5ZDU3MWItY2RlYzgzZmItNjNmMzEwZDM=, ActorId: [10:7490568295082174340:2300], ActorState: ExecuteState, TraceId: 01jr888xrjd98vj9rb5zve1ef7, Create QueryResponse for error on request, msg: 2025-04-07T13:50:39.137237Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr888xrjd98vj9rb5zve1ef7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2Q3NmMzZjEtODE5ZDU3MWItY2RlYzgzZmItNjNmMzEwZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] Test command err: 2025-04-07T13:49:54.767597Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:49:54.787364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:54.787890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:54.788074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:54.788909Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:54.788965Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bdf/r3tmp/tmpd96T1l/pdisk_1.dat 2025-04-07T13:49:55.196066Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25381, node 1 TClient is connected to server localhost:4811 2025-04-07T13:49:55.697089Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:55.697169Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:55.700818Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:55.701420Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:03.890751Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:640:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:03.891194Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:03.891315Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:03.893191Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:638:2326], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:03.893446Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:03.893625Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bdf/r3tmp/tmp7SvHIJ/pdisk_1.dat 2025-04-07T13:50:04.252611Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30666, node 3 TClient is connected to server localhost:14418 2025-04-07T13:50:04.690700Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:04.690760Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:04.690794Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:04.691222Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-9a33-70fb" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-9a33-d6d1" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-3" reason: "YELLOW-9a33-e9e2-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-9a33-d6d1" status: RED message: "Storage failed" location { database { name: "/Root" } } reason: "RED-9a33-258e-ab18" type: "STORAGE" level: 2 } issue_log { id: "RED-9a33-258e-ab18" status: RED message: "Pool failed" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "RED-9a33-819b-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-9a33-99d2-3-2147483648-3-55-0-55" status: RED message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "RED-8ac8-3-3-42" reason: "RED-8ac8-3-3-43" reason: "RED-8ac8-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "RED-8ac8-3-3-42" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/u6pr/001bdf/r3tmp/tmp7SvHIJ/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-43" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/u6pr/001bdf/r3tmp/tmp7SvHIJ/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-44" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/u6pr/001bdf/r3tmp/tmp7SvHIJ/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-9a33-819b-2147483648" status: RED message: "Group failed" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-9a33-99d2-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-04-07T13:50:13.905911Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:635:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:50:13.920265Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:637:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:13.920772Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:13.921006Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:13.923190Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:13.923308Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bdf/r3tmp/tmpiae5tO/pdisk_1.dat 2025-04-07T13:50:14.386122Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14813, node 5 TClient is connected to server localhost:8858 2025-04-07T13:50:15.065604Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:15.065671Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:15.065712Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:15.066235Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:22.823238Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:22.823682Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:22.823754Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bdf/r3tmp/tmpQqyC53/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24376, node 7 TClient is connected to server localhost:7923 2025-04-07T13:50:32.185633Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:32.186006Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:32.186206Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bdf/r3tmp/tmpFitf8U/pdisk_1.dat 2025-04-07T13:50:32.902917Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9829, node 8 TClient is connected to server localhost:19146 2025-04-07T13:50:33.954956Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:33.955062Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:33.955129Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:33.955655Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] Test command err: 2025-04-07T13:50:08.995331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568168610909212:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:08.995365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019af/r3tmp/tmpw4FKWj/pdisk_1.dat 2025-04-07T13:50:09.554775Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:09.569178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:09.569291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:09.574396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16132, node 1 2025-04-07T13:50:09.675956Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:09.675976Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:09.675983Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:09.676072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:10.183003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:10.188958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:10.190411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:10.200576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:10.200756Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:10.200775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:50:10.203259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:10.203283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:10.205095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:10.207379Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:10.209142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033810253, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:10.209172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:10.209424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:10.211653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:10.211800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:10.211860Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:10.211946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:10.211979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:10.212027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:10.215959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:10.216020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:10.216035Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:10.216126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:15.012461Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490568201586467556:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:15.012514Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019af/r3tmp/tmpA02O1X/pdisk_1.dat 2025-04-07T13:50:15.210308Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:15.271541Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:15.271623Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:15.274561Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21193, node 4 2025-04-07T13:50:15.487979Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:15.488001Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:15.488009Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:15.488119Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:15.739046Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.739381Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:15.739406Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:15.745894Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:15.746074Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:15.746089Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T13:50:15.748010Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:15.748025Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:50:15.749298Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.751613Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:15.752224Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033815797, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:15.752242Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:15.752463Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:50:15.754112Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:15.754242Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:15.754280Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:50:15.754332Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:50:15.754358Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:50:15.754391Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:50:15.755805Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720 ... 4-07T13:50:32.337801Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019af/r3tmp/tmpZJ6ZHS/pdisk_1.dat 2025-04-07T13:50:32.692691Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:32.764225Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:32.764335Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:32.782368Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26150, node 13 2025-04-07T13:50:33.034770Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:33.034792Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:33.034801Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:33.034921Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:33.809605Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:33.809982Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:33.810007Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:33.813647Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:33.813883Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:33.813901Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:50:33.816960Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:33.816996Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:50:33.819227Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:33.823513Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033833871, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:33.823554Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:33.823850Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:50:33.824896Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:33.825989Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:33.826157Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:33.826207Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:50:33.826288Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:50:33.826325Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:50:33.826367Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:50:33.828198Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:50:33.828232Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:50:33.828252Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:33.828315Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T13:50:33.969969Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:33.970111Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2025-04-07T13:50:33.970450Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:33.970473Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:33.985678Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2025-04-07T13:50:33.985873Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:33.986084Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:33.986149Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateKesus, at tablet# 72057594046644480 2025-04-07T13:50:33.987881Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:33.987918Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:33.987937Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:33.988134Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:33.988151Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:33.988162Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:50:33.994144Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:33.994224Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-07T13:50:34.002275Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T13:50:34.075412Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2025-04-07T13:50:34.080796Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:34.084515Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033834130, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:34.084557Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2025-04-07T13:50:34.084680Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-04-07T13:50:34.086396Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:34.086632Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:34.086700Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T13:50:34.086788Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T13:50:34.086830Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T13:50:34.086980Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2025-04-07T13:50:34.088879Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:34.088929Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:34.088948Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:50:34.089146Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:34.089180Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:34.089190Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:50:34.089224Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableSnapshot [GOOD] Test command err: 2025-04-07T13:50:09.287472Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568174849514279:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:09.287508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019a6/r3tmp/tmpl1GUep/pdisk_1.dat 2025-04-07T13:50:09.998291Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:10.020086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:10.020158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:10.030643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25667, node 1 2025-04-07T13:50:10.213260Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:10.213313Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:10.213321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:10.213420Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:10.597889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:10.607854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:10.609635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:10.611981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:10.612201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:10.612216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:50:10.615049Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:10.615077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:10.615409Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:10.616809Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:10.622526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033810666, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:10.622575Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:10.622864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:10.624799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:10.624976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:10.625030Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:10.625113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:10.625147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:10.625195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:10.631042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:10.631108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:10.631133Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:10.631241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:13.719951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568192029384479:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:13.720033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:14.065509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:14.065935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:50:14.066388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:14.066406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:14.069545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T13:50:14.069786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:14.069992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:14.070093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:50:14.072368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:14.072426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:14.072447Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:14.076106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:14.076170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:14.076185Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:50:14.076558Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:50:14.093436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:14.093519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:50:14.095432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:50:14.162727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:50:14.162754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:50:14.162817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T13:50:14.164339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:50:14.167145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033814215, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:14.167191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744033814215 2025-04-07T13:50:14.167298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T13:50:14.168827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:14.169110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:14.169160Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:50:14.171236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:14.171263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:14.171275Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pat ... 025-04-07T13:50:34.146189Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:50:34.148592Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:34.148632Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:34.148654Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:34.148944Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:34.148968Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:34.148981Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:50:34.150191Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T13:50:34.150289Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T13:50:34.150298Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T13:50:34.150336Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T13:50:34.153780Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T13:50:34.185970Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:34.186082Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-07T13:50:34.196418Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:50:34.373959Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:50:34.373985Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:50:34.374066Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2025-04-07T13:50:34.385464Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:50:34.393905Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033834438, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:34.393958Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744033834438 2025-04-07T13:50:34.394077Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2025-04-07T13:50:34.426539Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:34.426878Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:34.426934Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:50:34.449179Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7490568260503262350:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:34.449526Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:34.449555Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T13:50:34.449662Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T13:50:34.449674Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T13:50:34.449715Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T13:50:34.462017Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:34.462061Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:34.462082Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:50:34.462321Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:34.462340Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:34.462350Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:50:34.489082Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1744033834438 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 87 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1199 } } 2025-04-07T13:50:34.525007Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:50:34.525060Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:34.525086Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2025-04-07T13:50:34.533499Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T13:50:34.533623Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T13:50:34.533678Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T13:50:34.570388Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jr888sj9c4h9fgqtqj8f76qw, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:36100, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-07T13:50:34.621142Z node 10 :READ_TABLE_API DEBUG: [10:7490568281978100033:2311] Adding quota request to queue ShardId: 0, TxId: 281474976715659 2025-04-07T13:50:34.621203Z node 10 :READ_TABLE_API DEBUG: [10:7490568281978100033:2311] Assign stream quota to Shard 0, Quota 5, TxId 281474976715659 Reserved: 5 of 25, Queued: 0 2025-04-07T13:50:34.627871Z node 10 :READ_TABLE_API DEBUG: [10:7490568281978100033:2311] got stream part, size: 35, RU required: 128 rate limiter absent 2025-04-07T13:50:34.628297Z node 10 :READ_TABLE_API DEBUG: [10:7490568281978100033:2311] Starting inactivity timer for 600.000000s with tag 3 2025-04-07T13:50:34.628347Z node 10 :READ_TABLE_API NOTICE: [10:7490568281978100033:2311] Finish grpc stream, status: 400000 2025-04-07T13:50:34.654596Z node 10 :GRPC_SERVER DEBUG: [0x51a00003e480] received request Name# Request ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.654854Z node 10 :GRPC_SERVER DEBUG: [0x51a0000cfc80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.655044Z node 10 :GRPC_SERVER DEBUG: [0x51a00010e680] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.655269Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b6480] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.655437Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b9480] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.655607Z node 10 :GRPC_SERVER DEBUG: [0x51a0000c9080] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.655765Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d0280] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.655924Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b8e80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.656087Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b8280] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.656240Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b7c80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.656415Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a7480] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.656584Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a6e80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.656763Z node 10 :GRPC_SERVER DEBUG: [0x51a00003de80] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.656924Z node 10 :GRPC_SERVER DEBUG: [0x51b0002af580] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.657097Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b6a80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.657290Z node 10 :GRPC_SERVER DEBUG: [0x51a000032a80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.657452Z node 10 :GRPC_SERVER DEBUG: [0x51a000033080] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.657615Z node 10 :GRPC_SERVER DEBUG: [0x51a000033c80] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.657790Z node 10 :GRPC_SERVER DEBUG: [0x51a000034280] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.657968Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ee280] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.658124Z node 10 :GRPC_SERVER DEBUG: [0x51a00010f280] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.658304Z node 10 :GRPC_SERVER DEBUG: [0x51a00010ec80] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.658471Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b7080] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-07T13:50:34.658600Z node 10 :GRPC_SERVER DEBUG: [0x51a0000edc80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores [GOOD] Test command err: 2025-04-07T13:50:10.677605Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568180940826666:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:10.677754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001996/r3tmp/tmpmzJUh9/pdisk_1.dat 2025-04-07T13:50:11.299907Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:11.318650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:11.318720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:11.324274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28737, node 1 2025-04-07T13:50:11.444020Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:11.444043Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:11.444048Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:11.444156Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:11.893177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:11.903500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:11.904968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:11.909466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:11.909648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:11.909660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:50:11.912311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:11.913453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:11.913482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:11.925498Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:11.933926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033811975, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:11.933962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:11.934222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:11.937508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:11.937688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:11.937750Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:11.937846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:11.937889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:11.937938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:11.939621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:11.939671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:11.939689Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:11.939761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:12.019181Z node 1 :TX_PROXY ERROR: Access denied for bad@builtin with access CreateTable to path Root 2025-04-07T13:50:16.719805Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490568205574851597:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:16.723696Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001996/r3tmp/tmp5P4nXL/pdisk_1.dat 2025-04-07T13:50:16.882121Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23287, node 4 2025-04-07T13:50:16.991396Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:16.994056Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:17.007206Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:17.081742Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:17.081763Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:17.081770Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:17.081865Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:17.283726Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:17.284088Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:17.284111Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:17.287114Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:17.287330Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:17.287345Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T13:50:17.291052Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:17.291076Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:50:17.292414Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:17.295515Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033817344, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:17.295546Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:17.295765Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:50:17.297187Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:17.297381Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:17.297429Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:50:17.297495Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:50:17.297523Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:50:17.297557Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2025-04-07T13:50:17.297938Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 ... 04-07T13:50:33.794472Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001996/r3tmp/tmp6aqgd8/pdisk_1.dat 2025-04-07T13:50:34.039358Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:34.147854Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:34.147943Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:34.157937Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19424, node 13 2025-04-07T13:50:34.397428Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:34.397449Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:34.397459Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:34.397580Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:34.774238Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:34.774578Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:34.774598Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:34.777967Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:34.778161Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:34.778174Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:50:34.780865Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:34.781188Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:34.781208Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:50:34.783054Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:34.786610Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033834830, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:34.786639Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:34.786903Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:50:34.788578Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:34.788757Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:34.788802Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:50:34.788869Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:50:34.788904Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:50:34.788944Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:50:34.790676Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:50:34.790705Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:50:34.790722Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:34.790779Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T13:50:34.857124Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:34.857276Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2025-04-07T13:50:34.857564Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:34.857584Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:34.859395Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2025-04-07T13:50:34.859567Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:34.859776Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:34.859835Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateKesus, at tablet# 72057594046644480 2025-04-07T13:50:34.861494Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:34.861532Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:34.861545Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:34.861767Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:34.861785Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:34.861794Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:50:34.873034Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T13:50:34.874648Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:34.874727Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-07T13:50:34.942524Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2025-04-07T13:50:34.944184Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:34.947228Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033834991, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:34.947264Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2025-04-07T13:50:34.947345Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-04-07T13:50:34.948898Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:34.949102Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:34.949174Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T13:50:34.949247Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T13:50:34.949283Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T13:50:34.949390Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2025-04-07T13:50:34.951221Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:34.951250Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:34.951264Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:50:34.951431Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:34.951445Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:34.951454Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:50:34.951481Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 >> Viewer::TenantInfo5kkTablets [GOOD] >> Viewer::UseTransactionWhenExecuteDataActionQuery >> TopicService::DifferentConsumers_TheRangesOverlap >> TGRpcNewClient::InMemoryTables [GOOD] >> KqpNewEngine::PkSelect1 [GOOD] >> TGRpcYdbTest::BeginTxRequestError [GOOD] >> KqpNewEngine::PkSelect2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestTabletIsDead [GOOD] Test command err: 2025-04-07T13:50:00.495487Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:50:00.512558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:00.517672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:00.517929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:00.518959Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:00.519011Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ba0/r3tmp/tmpXj2bJo/pdisk_1.dat 2025-04-07T13:50:01.086614Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5271, node 1 TClient is connected to server localhost:30406 2025-04-07T13:50:01.559549Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:01.559608Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:01.559656Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:01.560076Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:10.062367Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:436:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:10.062737Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:10.062906Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:10.064504Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:433:2130], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:10.064765Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:10.064873Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ba0/r3tmp/tmphUwy4o/pdisk_1.dat 2025-04-07T13:50:10.416384Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8601, node 3 TClient is connected to server localhost:21159 2025-04-07T13:50:10.934772Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:10.934837Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:10.934882Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:10.935077Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-9a33-70fb" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" reason: "YELLOW-9a33-5321" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-3" reason: "YELLOW-9a33-e9e2-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-5321" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-9a33-595f-ab18" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-9a33-595f-ab18" status: YELLOW message: "Pool degraded" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "YELLOW-9a33-ef3e-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "YELLOW-9a33-99d2-3-2147483648-3-55-0-55" status: YELLOW message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "YELLOW-e463-3-3-42" reason: "YELLOW-e463-3-3-43" reason: "YELLOW-e463-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "YELLOW-e463-3-3-42" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/u6pr/001ba0/r3tmp/tmphUwy4o/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-43" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/u6pr/001ba0/r3tmp/tmphUwy4o/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-44" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/u6pr/001ba0/r3tmp/tmphUwy4o/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-9a33-ef3e-2147483648" status: YELLOW message: "Group degraded" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "YELLOW-9a33-99d2-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-04-07T13:50:19.765751Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:635:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:50:19.790759Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:637:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:19.791247Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:19.791447Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:19.794582Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:19.794712Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ba0/r3tmp/tmp6UfpJl/pdisk_1.dat 2025-04-07T13:50:20.174583Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21859, node 5 TClient is connected to server localhost:61991 2025-04-07T13:50:20.989920Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:20.989984Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:20.990030Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:20.990513Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-9a33-70fb" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" reason: "YELLOW-9a33-5321" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-5321" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-9a33-595f-ab18" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-9a33-595f-ab18" status: YELLOW message: "Pool degraded" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "YELLOW-9a33-ef3e-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-a594-5-5-42" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-42" path: "/home/runner/.ya/build/build_root/u6pr/001ba0/r3tmp/tmp6UfpJl/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-5-5-43" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-43" path: "/home/runner/.ya/build/build_root/u6pr/001ba0/r3tmp/tmp6UfpJl/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-5-5-44" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-44" path: "/home/runner/.ya/build/build_root/u6pr/001ba0/r3tmp/tmp6UfpJl/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-9a33-ef3e-2147483648" status: YELLOW message: "Group degraded" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } type: "STORAGE_GROUP" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-04-07T13:50:37.166190Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:496:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:37.166610Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:37.166766Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:37.169014Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:494:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:37.169427Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:37.169639Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ba0/r3tmp/tmp87vdmb/pdisk_1.dat 2025-04-07T13:50:37.565680Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28127, node 7 TClient is connected to server localhost:21989 2025-04-07T13:50:42.244195Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:42.244277Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:42.244322Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:42.245301Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:42.264480Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:42.264652Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:42.315135Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-04-07T13:50:42.315840Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:42.538469Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-04-07T13:50:42.549100Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: EMERGENCY issue_log { id: "RED-9a33-f489" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-9a33-6fa7" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "RED-9a33-6fa7" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-9a33-e5e3-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-7" reason: "YELLOW-9a33-e9e2-8" reason: "YELLOW-9a33-e9e2-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-9a33-e5e3-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 7 host: "::1" port: 12001 } >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::KeepAlive [GOOD] Test command err: 2025-04-07T13:50:08.783228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568171775086065:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:08.783283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019b3/r3tmp/tmpzJtA7o/pdisk_1.dat 2025-04-07T13:50:09.414787Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:09.424645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:09.424938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:09.431169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26337, node 1 2025-04-07T13:50:09.689768Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:09.689791Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:09.689798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:09.689881Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:10.018767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:10.026488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:10.027671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:10.033463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:10.033658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:10.033677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:50:10.039190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:10.041563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:10.041588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:10.044157Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:10.051688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033810099, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:10.051730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:10.051998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:10.053856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:10.054013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:10.054050Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:10.054142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:10.054193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:10.054227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:10.059490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:10.059537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:10.059550Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:10.059642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:10.131242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/TheDirectory, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:10.131450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:10.133719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/TheDirectory 2025-04-07T13:50:10.133876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:10.134047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:10.134101Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:10.136066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:10.136119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:10.136138Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:10.136381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:10.136398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:10.136405Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T13:50:10.138442Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:50:10.141660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033810190, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:10.141701Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033810190, at schemeshard: 72057594046644480 2025-04-07T13:50:10.141800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-07T13:50:10.143485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:10.143637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:10.143672Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2025-04-07T13:50:10.143719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T13:50:10.143743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T13:50:10.143772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2025-04-07T13:50:10.145284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:10.145326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:10.145342Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:50:10.145520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:10.145534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:10.145542Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:50:10.145568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2025-04-07T13:50:10.188531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /Root/TheDirectory, pathId: 0, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:10.188930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:10.188956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:10.197883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: Statu ... 281474976710659:1 progress is 4/4 2025-04-07T13:50:36.549620Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:50:36.549654Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:1 2025-04-07T13:50:36.549668Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:2 2025-04-07T13:50:36.549677Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:3 2025-04-07T13:50:36.549694Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 5, subscribers: 1 2025-04-07T13:50:36.554993Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:36.555042Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:36.555059Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T13:50:36.555285Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:36.555325Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:36.555340Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T13:50:36.555485Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:36.555500Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:36.555509Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T13:50:36.555661Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:36.555678Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:36.555687Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2025-04-07T13:50:36.555831Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:36.555849Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:36.555859Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2025-04-07T13:50:36.555894Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-04-07T13:50:36.582120Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7490568292865212991:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:50:36.652497Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:50:36.652631Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:50:36.662096Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:50:36.769135Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr888ve51p1k8kryw50hemas, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTBjYTA3ZTAtMzU1NmMxNTYtYzczOGUwYy02ZjliODkxMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:50:39.434228Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7490568304693358182:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:39.434301Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019b3/r3tmp/tmpkpeHrY/pdisk_1.dat 2025-04-07T13:50:39.654141Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:39.689613Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:39.689703Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:39.693491Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31053, node 13 2025-04-07T13:50:39.862164Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:39.862187Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:39.862195Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:39.862302Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14581 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:40.198297Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:40.198617Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:40.198641Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:40.205063Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:40.205302Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:40.205324Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:50:40.208077Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:40.208102Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:40.210419Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:40.214127Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033840262, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:40.214170Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:40.214451Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:40.214952Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:40.216349Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:40.216535Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:40.216592Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:40.216706Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:40.216749Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:40.216795Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:40.217568Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:40.217620Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:40.217641Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:40.217752Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 >> TBackupCollectionTests::CreateAbsolutePath >> TBackupCollectionTests::HiddenByFeatureFlag |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |85.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::BeginTxRequestError [GOOD] Test command err: 2025-04-07T13:50:11.465455Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568184372980775:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:11.469032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00198b/r3tmp/tmpPhz5zl/pdisk_1.dat 2025-04-07T13:50:12.081216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:12.081302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:12.085240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:12.210012Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14933, node 1 2025-04-07T13:50:12.302765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:50:12.302787Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:50:12.303422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:12.303571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:50:12.303739Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:50:12.314497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:12.315120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:12.315141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:12.315189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:50:12.315260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T13:50:12.555004Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:12.555026Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:12.555032Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:12.566166Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:13.195325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:13.209922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:13.211461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:13.213920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:13.214141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:13.214156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:50:13.215941Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:13.215963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-07T13:50:13.224262Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:13.241161Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:13.269386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033813298, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:13.269422Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:13.269741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:13.277688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:13.277863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:13.277921Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:13.278041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:13.278093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:13.278150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:13.280416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:13.280460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:13.280528Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:13.280636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:17.701496Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490568209525692419:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:17.711728Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00198b/r3tmp/tmpSQ8S78/pdisk_1.dat 2025-04-07T13:50:17.960427Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:18.009597Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:18.009680Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:18.015678Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17375, node 4 2025-04-07T13:50:18.223380Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:18.223404Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:18.223412Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:18.223520Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:18.697718Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:18.698004Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:18.698027Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:18.700914Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:18.701061Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:18.701072Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:50:18.703989Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:18.708057Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:18.708090Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:50:18.713505Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 7205759404664448 ... 2025-04-07T13:50:43.987208Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T13:50:43.988730Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:43.988781Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:43.988799Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:43.989065Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:43.989095Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:43.989107Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:50:43.989275Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:43.989300Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:43.989309Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T13:50:43.989418Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:43.989440Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:43.989448Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T13:50:43.989543Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:43.989557Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:43.989564Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2025-04-07T13:50:43.992175Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033844035, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:43.992226Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033844035, at schemeshard: 72057594046644480 2025-04-07T13:50:43.992358Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-04-07T13:50:43.992511Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033844035, at schemeshard: 72057594046644480 2025-04-07T13:50:43.992560Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2025-04-07T13:50:43.992613Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744033844035, at schemeshard: 72057594046644480 2025-04-07T13:50:43.992650Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2025-04-07T13:50:43.992736Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1744033844035 2025-04-07T13:50:43.992792Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2025-04-07T13:50:43.994400Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:43.994898Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:43.994983Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2025-04-07T13:50:43.995076Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2025-04-07T13:50:43.995249Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2025-04-07T13:50:43.995289Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2025-04-07T13:50:43.995386Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T13:50:43.995426Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2025-04-07T13:50:43.995520Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2025-04-07T13:50:43.995554Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2025-04-07T13:50:43.995579Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T13:50:43.995611Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2025-04-07T13:50:43.995627Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2025-04-07T13:50:43.995639Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2025-04-07T13:50:43.995658Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2025-04-07T13:50:43.998384Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:43.998420Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:43.998440Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:50:43.998634Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:43.998651Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:43.998660Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2025-04-07T13:50:43.998796Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:43.998811Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:43.998820Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T13:50:43.998936Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:43.998951Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:43.998960Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T13:50:43.999065Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:43.999081Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:43.999090Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2025-04-07T13:50:43.999127Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-04-07T13:50:44.004645Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7490568319921850881:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:50:44.098312Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:50:44.098485Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:50:44.101249Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:50:44.105839Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=YmQ5MWQyZjctZTY3NTAzYzktNWM2YjJlOWMtNTVkNWI5ZGM=, ActorId: [13:7490568319921850856:2296], ActorState: ExecuteState, TraceId: 01jr8892r6cswn4bm0y485xbyz, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-04-07T13:50:44.111319Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=YmQ5MWQyZjctZTY3NTAzYzktNWM2YjJlOWMtNTVkNWI5ZGM=, ActorId: [13:7490568319921850856:2296], ActorState: ExecuteState, TraceId: 01jr8892wc92xth3vgx464tdh0, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-04-07T13:50:44.114076Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=YmQ5MWQyZjctZTY3NTAzYzktNWM2YjJlOWMtNTVkNWI5ZGM=, ActorId: [13:7490568319921850856:2296], ActorState: ExecuteState, TraceId: 01jr8892whck90f0wq2b33d6nh, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |85.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk >> TKeyValueTest::TestBasicWriteRead >> TKeyValueTest::TestObtainLockNewApi >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewClient::InMemoryTables [GOOD] Test command err: 2025-04-07T13:50:15.281189Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568199518469823:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:15.281257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018d5/r3tmp/tmpegLkPV/pdisk_1.dat 2025-04-07T13:50:15.806759Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:15.841124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:15.841198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:15.858440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10468, node 1 2025-04-07T13:50:16.083779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:16.083800Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:16.083811Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:16.083894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:16.410966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:16.415625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:16.416544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:16.418208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:16.418415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:16.418436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:50:16.419605Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:16.419920Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:16.419940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:16.421072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:16.423829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033816469, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:16.423852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:16.424073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:16.425337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:16.425478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:16.425521Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:16.425587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:16.425617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:16.425648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:16.428639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:16.428938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:16.428968Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:16.429029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:18.846235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/FooTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:18.846805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:50:18.847446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:18.847480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:18.853895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/FooTable 2025-04-07T13:50:18.854087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:18.854304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:18.854383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:50:18.856068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:18.856126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:18.856145Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:18.856356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:18.856370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:18.856400Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:50:18.857543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:50:18.866347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:18.866440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:50:18.885335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:50:18.954659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:50:18.954683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:50:18.954756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T13:50:18.962481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:50:18.968090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033819010, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:18.968142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744033819010 2025-04-07T13:50:18.968252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T13:50:19.006716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:19.013089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:19.013203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:50:19.026109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:19.026164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:19.026181Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:50:19.026423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:19.026456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:19.026466Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, tx ... e TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:43.987365Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:43.987400Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:50:43.989750Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1744033844028 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1517 } } 2025-04-07T13:50:43.990121Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:50:43.990165Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:43.990211Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2025-04-07T13:50:43.991402Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T13:50:43.991501Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T13:50:43.991554Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T13:50:44.015670Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Table, pathId: , opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:44.016546Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:44.016583Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:44.018650Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Table 2025-04-07T13:50:44.018863Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 ProgressState, operation type: TxAlterTable, at tablet# 72057594046644480 2025-04-07T13:50:44.018900Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2025-04-07T13:50:44.019253Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T13:50:44.020391Z node 13 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:44.024267Z node 13 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#281474976715659:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046644480 message# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: PREPARED TxId: 281474976715659 MinStep: 1744033844029 MaxStep: 18446744073709551615 PrepareArriveTime: 1744033844021656 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 325 } } 2025-04-07T13:50:44.024286Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:50:44.024339Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 3 -> 128 2025-04-07T13:50:44.025663Z node 13 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:44.028376Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033844070, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:44.028435Z node 13 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#281474976715659:0 HandleReply TEvOperationPlan, operationId: 281474976715659:0, stepId: 1744033844070, at schemeshard: 72057594046644480 2025-04-07T13:50:44.028747Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 129 2025-04-07T13:50:44.030194Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:44.030435Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:44.030498Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715659:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:50:44.030999Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T13:50:44.031043Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T13:50:44.031066Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 4 2025-04-07T13:50:44.032378Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715659 Step: 1744033844070 OrderId: 281474976715659 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1914 } } 2025-04-07T13:50:44.032798Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715659:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:50:44.032834Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:44.032854Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 129 -> 240 2025-04-07T13:50:44.034090Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2025-04-07T13:50:44.034190Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2025-04-07T13:50:44.034240Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T13:50:44.051549Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Table, pathId: , opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:50:44.052147Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:44.052268Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:50:44.054212Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Table 2025-04-07T13:50:44.054380Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 ProgressState, operation type: TxAlterTable, at tablet# 72057594046644480 2025-04-07T13:50:44.054407Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 2 -> 3 2025-04-07T13:50:44.054720Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715660, at schemeshard: 72057594046644480 2025-04-07T13:50:44.055764Z node 13 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#281474976715660:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:44.060425Z node 13 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#281474976715660:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046644480 message# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: PREPARED TxId: 281474976715660 MinStep: 1744033844071 MaxStep: 18446744073709551615 PrepareArriveTime: 1744033844057376 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 191 } } 2025-04-07T13:50:44.060447Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:50:44.060510Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 3 -> 128 2025-04-07T13:50:44.064404Z node 13 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#281474976715660:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:44.068482Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033844112, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:44.068529Z node 13 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#281474976715660:0 HandleReply TEvOperationPlan, operationId: 281474976715660:0, stepId: 1744033844112, at schemeshard: 72057594046644480 2025-04-07T13:50:44.068798Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 129 2025-04-07T13:50:44.070808Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:44.071032Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:44.071086Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715660:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:50:44.072350Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T13:50:44.072397Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T13:50:44.072415Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2025-04-07T13:50:44.074947Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715660 Step: 1744033844112 OrderId: 281474976715660 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1272 } } 2025-04-07T13:50:44.075382Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715660:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:50:44.075424Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:50:44.075445Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 129 -> 240 2025-04-07T13:50:44.076954Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2025-04-07T13:50:44.077135Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2025-04-07T13:50:44.077185Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk |85.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> DataStreams::TestReservedResourcesMetering [GOOD] >> DataStreams::TestReservedStorageMetering >> TKeyValueTest::TestGetStatusWorks >> TBackupCollectionTests::CreateAbsolutePath [GOOD] >> TBackupCollectionTests::Create >> THealthCheckTest::NoBscResponse [GOOD] >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk >> TKeyValueTest::TestWrite200KDeleteThenResponseError >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk >> TBackupCollectionTests::HiddenByFeatureFlag [GOOD] >> TBackupCollectionTests::DisallowedPath >> DemoTx::Scenario_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] Test command err: 2025-04-07T13:49:55.327086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:49:55.343071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:55.343574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:55.343757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:55.344575Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:55.344621Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bc2/r3tmp/tmpu1fhn3/pdisk_1.dat 2025-04-07T13:49:55.869646Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5340, node 1 TClient is connected to server localhost:5999 2025-04-07T13:49:56.715949Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:56.716005Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:56.716062Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:56.716487Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:05.078388Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:436:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:05.078648Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:05.078790Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:05.080251Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:433:2130], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:05.080473Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:05.080544Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bc2/r3tmp/tmpSRnPV3/pdisk_1.dat 2025-04-07T13:50:05.455561Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25064, node 3 TClient is connected to server localhost:4238 2025-04-07T13:50:06.076972Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:06.077023Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:06.077050Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:06.077209Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:14.942623Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:691:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:14.943000Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:14.943394Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:14.944606Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:689:2327], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:14.944877Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:14.945182Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bc2/r3tmp/tmpndkzKW/pdisk_1.dat 2025-04-07T13:50:15.474875Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26263, node 5 TClient is connected to server localhost:12631 2025-04-07T13:50:20.906760Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:20.906832Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:20.906871Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:20.912995Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:20.999826Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:20.999982Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:21.094588Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2025-04-07T13:50:21.095189Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" reason: "YELLOW-9a33-e9e2-7" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-04-07T13:50:34.393270Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:749:2383], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:34.393881Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:34.394155Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:34.395107Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:747:2328], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:34.395608Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:34.395685Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bc2/r3tmp/tmppxlpe3/pdisk_1.dat 2025-04-07T13:50:34.959689Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3018, node 8 TClient is connected to server localhost:23120 2025-04-07T13:50:39.809030Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:39.809116Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:39.809197Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:39.809998Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:39.810640Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([8:1227:2663]) [8:1482:2667] 2025-04-07T13:50:39.810832Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(PersQueue(72057594046578946,0)) 2025-04-07T13:50:39.834895Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046578946 OwnerIdx: 0 TabletType: PersQueue BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } 2025-04-07T13:50:39.835014Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-04-07T13:50:39.835273Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type PersQueue: {} 2025-04-07T13:50:39.835367Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-04-07T13:50:39.835596Z node 8 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet PersQueue.72075186224037888.Leader.0 2025-04-07T13:50:39.835679Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-04-07T13:50:39.835835Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-04-07T13:50:39.837652Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-04-07T13:50:39.892482Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvT ... y 1.000002048 2025-04-07T13:50:39.976421Z node 8 :HIVE DEBUG: HIVE#72057594037968897 [FBN] Finding best node for tablet PersQueue.72075186224037888.Leader.0 2025-04-07T13:50:39.976476Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 family {PersQueue.72075186224037888.Leader.0 Booting} 2025-04-07T13:50:39.976526Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 node 8 is not alive 2025-04-07T13:50:39.976625Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected usage 0.000003877 of node 10 2025-04-07T13:50:39.976691Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected nodes count 1 2025-04-07T13:50:39.976744Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected max priority nodes count 1 2025-04-07T13:50:39.976801Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected node 10 2025-04-07T13:50:39.976879Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 10) 2025-04-07T13:50:39.976948Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,0,0,0)->(0,1048576,0,0)) 2025-04-07T13:50:39.987482Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2025-04-07T13:50:39.987678Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2025-04-07T13:50:39.987820Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-07T13:50:39.988080Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2025-04-07T13:50:39.988329Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute, Sending TEvBootTablet(PersQueue.72075186224037888.Leader.1) to node 10 storage {Version# 1 TabletID# 72075186224037888 TabletType# PersQueue Channels# {0:{Channel# 0 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.064024Z}}, 1:{Channel# 1 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.064024Z}}, 2:{Channel# 2 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.064024Z}}} Tenant: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:50:40.003781Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Complete 2025-04-07T13:50:40.003875Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-07T13:50:40.003983Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Complete Tablet (72075186224037888,0) SideEffects: {Notifications: 0x10080002 [10:1452:2335]} 2025-04-07T13:50:40.004251Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected (duplicate), NodeId 10 Cookie 72075186224037888 2025-04-07T13:50:40.467880Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-04-07T13:50:40.468080Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Execute for tablet PersQueue.72075186224037888.Leader.1 status 0 generation 1 follower 0 from local [10:1452:2335] 2025-04-07T13:50:40.468175Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Starting -> Running (Node 10) 2025-04-07T13:50:40.468238Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,1048576,0,0)->(0,0,0,0)) 2025-04-07T13:50:40.468391Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2025-04-07T13:50:40.468515Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2025-04-07T13:50:40.468578Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,0,0,0)->(0,1048576,0,0)) 2025-04-07T13:50:40.468710Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2025-04-07T13:50:40.468816Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2025-04-07T13:50:40.468952Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-07T13:50:40.468997Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2025-04-07T13:50:40.469295Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - executing 2025-04-07T13:50:40.469364Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-07T13:50:40.469421Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-07T13:50:40.469472Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-07T13:50:40.500381Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Complete TabletId: 72075186224037888 SideEffects: {Notifications: 0x10040207 [8:1226:2662]} 2025-04-07T13:50:40.500492Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-07T13:50:43.878410Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 10: Status: 2 2025-04-07T13:50:43.878530Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Execute 2025-04-07T13:50:43.878586Z node 8 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2025-04-07T13:50:43.878693Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Complete 2025-04-07T13:50:43.879039Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRestartTablet(PersQueue.72075186224037888.Leader.1)::Execute 2025-04-07T13:50:43.879167Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Running -> Stopped (Node 10) 2025-04-07T13:50:43.879229Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,1048576,0,0)->(0,0,0,0)) 2025-04-07T13:50:43.879348Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2025-04-07T13:50:43.879446Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2025-04-07T13:50:43.879523Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(PersQueue.72075186224037888.Leader.1 gen 1) to node 10 2025-04-07T13:50:43.879596Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Stopped -> Booting 2025-04-07T13:50:43.879645Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-04-07T13:50:43.879689Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2025-04-07T13:50:43.880161Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxKillNode(10)::Execute 2025-04-07T13:50:43.880278Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T13:50:43.880326Z node 8 :HIVE TRACE: Node(10) DeregisterInDomains (72057594046644480:1) : 1 -> 0 2025-04-07T13:50:43.880384Z node 8 :HIVE DEBUG: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(3, 10) 2025-04-07T13:50:43.880441Z node 8 :HIVE TRACE: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [8:1522:2671] 2025-04-07T13:50:43.880494Z node 8 :HIVE DEBUG: HIVE#72057594037968897 TryToDeleteNode(10): waiting 3600.000000s 2025-04-07T13:50:43.882479Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([10:1453:2335]) [8:1522:2671] 2025-04-07T13:50:43.885827Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([8:1880:2691]) [8:1881:2696] 2025-04-07T13:50:43.890757Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([8:1880:2691]) [8:1881:2696] 2025-04-07T13:50:43.893529Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([11:1853:2334]) [8:1914:2698] 2025-04-07T13:50:43.906878Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [11:1852:2334] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-07T13:50:43.907032Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(11)::Execute 2025-04-07T13:50:43.907172Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:43.907259Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-07T13:50:43.907312Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-04-07T13:50:43.907364Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-07T13:50:43.907414Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-04-07T13:50:43.907512Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:43.908570Z node 8 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 11 Location DataCenter: "4" Module: "4" Rack: "4" Unit: "4" self_check_result: EMERGENCY issue_log { id: "RED-9a33-f489" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-9a33-6fa7" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "RED-9a33-6fa7" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-9a33-e5e3-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-10" reason: "YELLOW-9a33-e9e2-11" reason: "YELLOW-9a33-e9e2-8" reason: "YELLOW-9a33-e9e2-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-11" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 11 host: "::1" port: 12004 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-9a33-e5e3-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 8 host: "::1" port: 12001 } >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] >> TBackupCollectionTests::Create [GOOD] >> TBackupCollectionTests::CreateTwice >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TBackupCollectionTests::DisallowedPath [GOOD] >> TBackupCollectionTests::ParallelCreate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2025-04-07T13:50:48.191664Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-07T13:50:48.191754Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:270} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1744033848190 ErrorReason# 2025-04-07T13:50:48.202890Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-07T13:50:48.203007Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:240} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1744033848202 ErrorReason# 2025-04-07T13:50:48.214894Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-07T13:50:48.214973Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:222} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1744033848214 ErrorReason# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::NoBscResponse [GOOD] Test command err: 2025-04-07T13:49:54.939880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:49:54.956289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:54.957122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:54.957346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:54.958194Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:54.958235Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bf4/r3tmp/tmpSGO9wF/pdisk_1.dat 2025-04-07T13:49:55.324523Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3767, node 1 TClient is connected to server localhost:15695 2025-04-07T13:49:55.699900Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:55.699955Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:55.699994Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:55.700366Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:03.442978Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:436:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:03.443312Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:03.443479Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:03.445009Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:433:2130], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:03.445263Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:03.445383Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bf4/r3tmp/tmp5LIYdq/pdisk_1.dat 2025-04-07T13:50:03.808270Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4635, node 3 TClient is connected to server localhost:7936 2025-04-07T13:50:04.314068Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:04.314134Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:04.314172Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:04.314370Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:13.273861Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:635:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:50:13.287867Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:637:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:13.288310Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:13.288524Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:13.290904Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:13.291020Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bf4/r3tmp/tmpR5WumT/pdisk_1.dat 2025-04-07T13:50:13.733739Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13888, node 5 TClient is connected to server localhost:25116 2025-04-07T13:50:14.434938Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:14.435014Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:14.435057Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:14.435574Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:27.575435Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:50:27.613088Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:27.613627Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:27.613789Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:27.621797Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:27.622001Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bf4/r3tmp/tmp5QTBwv/pdisk_1.dat 2025-04-07T13:50:27.968304Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29933, node 7 TClient is connected to server localhost:29815 2025-04-07T13:50:28.480691Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:28.480765Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:28.480807Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:28.481227Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:36.301310Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:401:2383], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:36.301676Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:36.301756Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bf4/r3tmp/tmpKixKbg/pdisk_1.dat 2025-04-07T13:50:36.697946Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10773, node 9 TClient is connected to server localhost:4648 2025-04-07T13:50:37.299864Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:37.299938Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:37.299992Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:37.300231Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:37.400237Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:37.400451Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:37.420051Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi >> TKeyValueTest::TestIncorrectRequestThenResponseError >> TBackupCollectionTests::CreateTwice [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TBackupCollectionTests::BackupAbsentCollection >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 18578, MsgBus: 1695 2025-04-07T13:49:22.476298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567974446626442:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:22.476343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013d0/r3tmp/tmpnRaPws/pdisk_1.dat 2025-04-07T13:49:23.101080Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18578, node 1 2025-04-07T13:49:23.250622Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:23.250641Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:23.250650Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:23.250759Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:49:23.280575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:23.280678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:23.283842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1695 TClient is connected to server localhost:1695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:23.880204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:23.910373Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:23.924481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.107584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.252371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:24.332366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:26.070693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567991626497203:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.070806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.259551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.288929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.318611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.392353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.421756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.459212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:26.547742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567991626497706:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.547855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.548119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567991626497711:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:26.550863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:26.562877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567991626497713:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:27.479183Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567974446626442:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:27.479247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:27.550928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:29.388157Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490568000216433085:2486] TxId: 281474976710672. Ctx: { TraceId: 01jr886rwvaph44t0eve1j1zn4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDlkOTEzOGUtYWE2MWVjMTMtNTI4MWU0YmItNjA3YjZkZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Abort execution. Task #1 size is too big: 100442498 > 50331648 2025-04-07T13:49:29.395272Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDlkOTEzOGUtYWE2MWVjMTMtNTI4MWU0YmItNjA3YjZkZA==, ActorId: [1:7490567995921465630:2486], ActorState: ExecuteState, TraceId: 01jr886rwvaph44t0eve1j1zn4, Create QueryResponse for error on request, msg:
: Error: Datashard program size limit exceeded (100442498 > 50331648), code: 200509 Trying to start YDB, gRPC: 28299, MsgBus: 18678 2025-04-07T13:49:30.268779Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568008910924940:2063];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013d0/r3tmp/tmpLqr1tN/pdisk_1.dat 2025-04-07T13:49:30.323033Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:30.390425Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:30.400999Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:30.401081Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:30.404201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28299, node 2 2025-04-07T13:49:30.498741Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:30.498761Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:30.498768Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:30.498858Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18678 TClient is connected to server localhost:18678 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:49:30.954683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:30.960318Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:49:30.966312Z node 2 :FLAT_ ... 95829021:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:33.997967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:33.998174Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568021795829026:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:34.002080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:34.014117Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T13:49:34.014351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568021795829028:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:35.269384Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568008910924940:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:35.269453Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:45.380820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:49:45.380848Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:29.222532Z node 2 :KQP_EXECUTER WARN: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr886zftcg7hr68wb66edm2d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTEwMTVmYzAtZTJhYThjMzctNTMxODQzMGEtNDE5Y2Q5YzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, memory limit exceeded. 2025-04-07T13:50:29.222930Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTEwMTVmYzAtZTJhYThjMzctNTMxODQzMGEtNDE5Y2Q5YzI=, ActorId: [2:7490568030385763931:2460], ActorState: ExecuteState, TraceId: 01jr886zftcg7hr68wb66edm2d, Create QueryResponse for error on request, msg: 2025-04-07T13:50:29.223084Z node 2 :KQP_SLOW_LOG WARN: TraceId: "01jr886zftcg7hr68wb66edm2d", SessionId: ydb://session/3?node_id=2&id=ZTEwMTVmYzAtZTJhYThjMzctNTMxODQzMGEtNDE5Y2Q5YzI=, Slow query, duration: 54.122358s, status: PRECONDITION_FAILED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT ToDict(\n ListMap(\n ListFromRange(0ul, 5000000ul),\n ($x) -> { RETURN AsTuple($x, $x + 1); }\n )\n );\n ", parameters: 0b
: Warning: Type annotation, code: 1030
:2:13: Warning: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:2:20: Warning: At function: ToDict
:5:38: Warning: At function: OrderedMap
:5:53: Warning: At function: +
:5:53: Warning: Integral type implicit bitcast: Uint64 and Int32, code: 1107
: Error: Memory limit exceeded, code: 2029 Trying to start YDB, gRPC: 18865, MsgBus: 7552 2025-04-07T13:50:30.193662Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568265698264117:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:30.193713Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013d0/r3tmp/tmpzqNTYE/pdisk_1.dat 2025-04-07T13:50:30.660246Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:30.759637Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:30.759736Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:30.770192Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18865, node 3 2025-04-07T13:50:30.981283Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:30.981308Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:30.981317Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:30.981426Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7552 TClient is connected to server localhost:7552 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:32.565699Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:32.601744Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:32.612497Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:32.781111Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:33.269442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:33.482161Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:35.199765Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568265698264117:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:35.211328Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:37.098809Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568295763036895:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:37.098953Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:37.130580Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:50:37.173741Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:50:37.263655Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:50:37.324156Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:50:37.403062Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:50:37.517088Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:50:37.641105Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568295763037407:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:37.641241Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:37.641656Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568295763037412:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:37.650168Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:50:37.662548Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568295763037414:2439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:50:39.174969Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:50:43.132462Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTBmN2U3Ny0xMGMxMjMzNC1mMjg3OWU3MS0yNWU0YzU1Yw==, ActorId: [3:7490568304352972342:2468], ActorState: ExecuteState, TraceId: 01jr8891jd8fhbf57m2v5kfs96, Create QueryResponse for error on request, msg: >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] |85.3%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout >> TBackupCollectionTests::BackupAbsentCollection [GOOD] >> TBackupCollectionTests::BackupDroppedCollection >> TBackupCollectionTests::ParallelCreate [GOOD] >> TBackupCollectionTests::Drop >> DemoTx::Scenario_3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: 2025-04-07T13:50:47.414453Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:50:47.417565Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:50:47.417794Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-07T13:50:47.417868Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:50:47.417905Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-07T13:50:47.417947Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:50:47.417999Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:50:47.418044Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2025-04-07T13:50:47.418072Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-07T13:50:47.418663Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:47.418716Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [1:259:2251], now have 1 active actors on pipe 2025-04-07T13:50:47.418771Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T13:50:47.435420Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [1:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T13:50:47.441556Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T13:50:47.441723Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:50:47.442586Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [1:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T13:50:47.442694Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:50:47.443057Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:50:47.443414Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [1:268:2258] 2025-04-07T13:50:47.445871Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-07T13:50:47.445929Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [1:268:2258] 2025-04-07T13:50:47.445972Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:50:47.446292Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:50:47.446724Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:47.446776Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [1:271:2260], now have 1 active actors on pipe 2025-04-07T13:50:47.488857Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:50:47.492063Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:50:47.492298Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-07T13:50:47.492336Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:50:47.492367Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-07T13:50:47.492411Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:50:47.492449Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:50:47.492497Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] empty tx queue 2025-04-07T13:50:47.492526Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-07T13:50:47.493130Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:47.493182Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [1:408:2363], now have 1 active actors on pipe 2025-04-07T13:50:47.493271Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T13:50:47.493449Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [1:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T13:50:47.496051Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T13:50:47.496153Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:50:47.496956Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [1:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T13:50:47.497044Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:50:47.497342Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:50:47.497538Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [1:417:2370] 2025-04-07T13:50:47.499451Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-07T13:50:47.499500Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [1:417:2370] 2025-04-07T13:50:47.499555Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:50:47.499815Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:50:47.500242Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:47.500284Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [1:420:2372], now have 1 active actors on pipe 2025-04-07T13:50:47.513296Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:50:47.516483Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:50:47.516761Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-07T13:50:47.516807Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:50:47.516841Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-07T13:50:47.516877Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:50:47.516928Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:50:47.516970Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] empty tx queue 2025-04-07T13:50:47.517000Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-07T13:50:47.517601Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:47.517640Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [1:471:2411], now have 1 active actors on pipe 2025-04-07T13:50:47.517687Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T13:50:47.517845Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [1:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T13:50:47.520297Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T13:50:47.520407Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:50:47.521091Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [1:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T13:50:47.521194Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:50:47.521480Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:50:47.521657Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, St ... imeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T13:50:49.091964Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:50:49.092240Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:50:49.092396Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:481:2419] 2025-04-07T13:50:49.094448Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-07T13:50:49.094539Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:481:2419] 2025-04-07T13:50:49.094614Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:50:49.095183Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2025-04-07T13:50:49.095819Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:49.095871Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:484:2421], now have 1 active actors on pipe 2025-04-07T13:50:49.124733Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:50:49.128841Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:50:49.129215Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-07T13:50:49.129276Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:50:49.129318Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-07T13:50:49.129380Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:50:49.129437Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:50:49.129498Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2025-04-07T13:50:49.129540Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-07T13:50:49.130261Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:49.130324Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:535:2460], now have 1 active actors on pipe 2025-04-07T13:50:49.130450Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T13:50:49.130654Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 12(current 0) received from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T13:50:49.134250Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T13:50:49.134442Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:50:49.135072Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 12 actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T13:50:49.135202Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:50:49.135595Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:50:49.135817Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:544:2467] 2025-04-07T13:50:49.137881Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T13:50:49.137964Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:544:2467] 2025-04-07T13:50:49.138028Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:50:49.138394Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T13:50:49.138879Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:49.138934Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:547:2469], now have 1 active actors on pipe 2025-04-07T13:50:49.145177Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:49.145251Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:558:2474], now have 1 active actors on pipe 2025-04-07T13:50:49.145738Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:49.145775Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:560:2475], now have 1 active actors on pipe 2025-04-07T13:50:49.145871Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:49.145895Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:561:2475], now have 1 active actors on pipe 2025-04-07T13:50:49.145931Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:49.145952Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:562:2475], now have 1 active actors on pipe 2025-04-07T13:50:49.146613Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:49.146662Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:575:2486], now have 1 active actors on pipe 2025-04-07T13:50:49.176299Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:50:49.180224Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:50:49.180519Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-07T13:50:49.180558Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:50:49.180706Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:50:49.181184Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:50:49.181225Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-07T13:50:49.181317Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:50:49.181573Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:50:49.181748Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:634:2533] 2025-04-07T13:50:49.185664Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-07T13:50:49.187065Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-07T13:50:49.187281Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-07T13:50:49.187539Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-07T13:50:49.187715Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-07T13:50:49.187750Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-07T13:50:49.187785Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T13:50:49.187819Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T13:50:49.187866Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:634:2533] 2025-04-07T13:50:49.187918Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:50:49.188076Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2025-04-07T13:50:49.188302Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T13:50:49.189063Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:50:49.189135Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server disconnected, pipe [3:560:2475] destroyed 2025-04-07T13:50:49.189173Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:50:49.189192Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:558:2474] destroyed 2025-04-07T13:50:49.189246Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:50:49.189265Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:561:2475] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } ErrorCode: OK } } } >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestCreateExistingStream >> TPersQueueTest::UpdatePartitionLocation [GOOD] >> TPersQueueTest::TopicServiceCommitOffset ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] Test command err: 2025-04-07T13:50:14.217407Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568195727850699:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:14.217462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018ed/r3tmp/tmpSnaNHy/pdisk_1.dat 2025-04-07T13:50:14.895975Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:14.912984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:14.913054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:14.928429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29378, node 1 2025-04-07T13:50:15.008898Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:15.008968Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:15.008980Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:15.009051Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:15.375470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.381583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:15.382711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.386194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:15.386425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:15.386446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:50:15.388600Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:15.388634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-07T13:50:15.390483Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:15.390713Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.394009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033815440, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:15.394048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:15.394319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:15.395773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:15.395964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:15.396015Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:15.396428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:15.396475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:15.396525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:15.398773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:15.398822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:15.398838Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:15.398943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:17.396645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568208612753355:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:17.396839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:17.840449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:17.840965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:50:17.841502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:17.841524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:17.846925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TestTable 2025-04-07T13:50:17.847110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:17.847288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:17.847349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:50:17.848584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:17.848624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:17.848641Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:17.848858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:17.848876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:17.848888Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:50:17.849995Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:50:17.857551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:17.857644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:50:17.861828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:50:17.949619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:50:17.949643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:50:17.949700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T13:50:17.951490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:50:17.972970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033818009, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:17.973020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744033818009 2025-04-07T13:50:17.973176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T13:50:17.977571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:17.977889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:17.977945Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:50:17.979905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:17.979943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:17.979956Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 28147497671 ... 07T13:50:46.455592Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:46.455655Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:46.455678Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:46.456015Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:46.456039Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:46.456052Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:50:46.456197Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:46.456237Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:46.456251Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 1 2025-04-07T13:50:46.456531Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:46.456552Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:46.456565Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-04-07T13:50:46.459916Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T13:50:46.471493Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:46.471592Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-07T13:50:46.471905Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:2 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:46.471943Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 2 -> 3 2025-04-07T13:50:46.484297Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:50:46.484831Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:2 ProgressState at tabletId# 72057594046644480 2025-04-07T13:50:46.623898Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:2 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:50:46.623923Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:50:46.624002Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 3 -> 128 2025-04-07T13:50:46.627472Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:50:46.627495Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:50:46.627572Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2025-04-07T13:50:46.634688Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:2 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:50:46.650692Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:50:46.663867Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033846702, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:46.663925Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744033846702 2025-04-07T13:50:46.664039Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2025-04-07T13:50:46.664107Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTableIndex TPropose operationId#281474976715658:1 HandleReply TEvOperationPlan, step: 1744033846702, at schemeshard: 72057594046644480 2025-04-07T13:50:46.664230Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2025-04-07T13:50:46.664314Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:2 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744033846702 2025-04-07T13:50:46.664339Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 129 2025-04-07T13:50:46.670081Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:46.670806Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:46.670874Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2025-04-07T13:50:46.670982Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 1/3 2025-04-07T13:50:46.671183Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:50:46.671312Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:2 ProgressState at tablet: 72057594046644480 2025-04-07T13:50:46.675730Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:46.675776Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:46.675794Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:50:46.676015Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:46.676031Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:46.676040Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:50:46.676159Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:46.676175Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:46.676183Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 2 2025-04-07T13:50:46.676293Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:46.676307Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:46.676316Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T13:50:46.679283Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715658 Step: 1744033846702 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 12 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 962 } } 2025-04-07T13:50:46.680547Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1744033846702 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 14 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1390 } } 2025-04-07T13:50:46.680970Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:50:46.681022Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-07T13:50:46.681046Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 129 -> 240 2025-04-07T13:50:46.682605Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:50:46.682632Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:46.682649Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2025-04-07T13:50:46.688025Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2025-04-07T13:50:46.688162Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 2/3 2025-04-07T13:50:46.688398Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T13:50:46.688451Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/3 2025-04-07T13:50:46.688491Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T13:50:46.688599Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2025-04-07T13:50:46.688612Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2025-04-07T13:50:47.656287Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7490568315315603933:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:47.656698Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable'unknown table >> TBackupCollectionTests::BackupDroppedCollection [GOOD] >> TBackupCollectionTests::BackupAbsentDirs >> TTableProfileTests::DescribeTableOptions [GOOD] >> TBackupCollectionTests::Drop [GOOD] >> TBackupCollectionTests::DropTwice >> TKeyValueCollectorTest::TestKeyValueCollectorSingle >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents [GOOD] >> TKeyValueTest::TestWriteLongKey >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> TBackupCollectionTests::BackupAbsentDirs [GOOD] >> TBackupCollectionTests::BackupNonIncrementalCollection >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple >> DataShardWrite::UpsertImmediate >> TBackupCollectionTests::DropTwice [GOOD] >> TBackupCollectionTests::TableWithSystemColumns >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> DataShardWrite::UpsertPrepared+Volatile >> KqpIndexes::UpdateIndexSubsetPk |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> KqpIndexes::NullInIndexTableNoDataRead ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::DescribeTableOptions [GOOD] Test command err: 2025-04-07T13:50:05.725978Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568156023271219:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:05.726034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019c1/r3tmp/tmpLnnFui/pdisk_1.dat 2025-04-07T13:50:06.534845Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:06.543351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:06.543433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:06.553356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8854, node 1 2025-04-07T13:50:06.613013Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:50:06.613033Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:50:06.756098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:06.762696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:06.762740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:06.762793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:50:06.762844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T13:50:06.786948Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:06.813895Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:06.813916Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:06.813924Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:06.813995Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:07.300938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:07.308119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:07.309471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:07.316454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:07.316636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:07.316648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:50:07.318508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:07.318530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:07.320015Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:07.323319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033807369, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:07.323352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:07.323652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:07.325379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:07.325523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:07.325564Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:07.325646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:07.325679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:07.325720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-04-07T13:50:07.328546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:07.328586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:07.328606Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:07.332770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2025-04-07T13:50:07.332961Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:24478 2025-04-07T13:50:07.744253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:07.744703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:07.744743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:07.746882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant 2025-04-07T13:50:07.747038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:07.747172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:07.747210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:07.747229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:50:07.748467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:07.748493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:07.748510Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:07.748687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:07.748710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:07.748726Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T13:50:07.749751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:07.749772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T13:50:07.751453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:07.754124Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:50:07.755160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033807796, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:07.755174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-07T13:50:07.755270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-07T13:50:07.756384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:07.756495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:07.756519Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2025-04-07T13:50:07.756560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T13:50:07.756584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T13:50:07.756605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2025-04-07T13:50:07.757603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateA ... ication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:44.153958Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:50:44.154012Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-04-07T13:50:44.158673Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:44.159387Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:44.159422Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:44.162973Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant 2025-04-07T13:50:44.163229Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 waiting... 2025-04-07T13:50:44.169512Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:44.169914Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:44.169960Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2025-04-07T13:50:44.173369Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:44.176961Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T13:50:44.676350Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7490568326940327640:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:44.676406Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:50:44.679341Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:44.679440Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:44.690216Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-04-07T13:50:44.693774Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:45.003049Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 HandleReply TEvConfigureStatus operationId:281474976715659:0 at schemeshard:72057594046644480 2025-04-07T13:50:45.145023Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 HandleReply TEvConfigureStatus operationId:281474976715659:0 at schemeshard:72057594046644480 2025-04-07T13:50:45.145071Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 3 -> 128 2025-04-07T13:50:45.151789Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:45.170406Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033845211, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:45.170462Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715659:0, at tablet# 72057594046644480 2025-04-07T13:50:45.170794Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2025-04-07T13:50:45.174024Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:45.174251Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:45.174328Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2025-04-07T13:50:45.174456Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2025-04-07T13:50:45.174515Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T13:50:45.174861Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2025-04-07T13:50:45.177363Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T13:50:45.177428Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T13:50:45.177454Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 4 2025-04-07T13:50:45.177547Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 TClient is connected to server localhost:18811 2025-04-07T13:50:45.384287Z node 13 :FLAT_TX_SCHEMESHARD INFO: Got new config: TableProfilesConfig { TableProfiles { Name: "default" CompactionPolicy: "default" ExecutionPolicy: "default" PartitioningPolicy: "default" StoragePolicy: "default" ReplicationPolicy: "default" CachingPolicy: "default" } TableProfiles { Name: "profile1" CompactionPolicy: "compaction1" ExecutionPolicy: "execution1" PartitioningPolicy: "partitioning1" StoragePolicy: "storage1" ReplicationPolicy: "replication1" CachingPolicy: "caching1" } TableProfiles { Name: "profile2" CompactionPolicy: "compaction2" ExecutionPolicy: "execution2" PartitioningPolicy: "partitioning2" StoragePolicy: "storage2" ReplicationPolicy: "replication2" CachingPolicy: "caching2" } CompactionPolicies { Name: "default" } CompactionPolicies { Name: "compaction1" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } CompactionPolicies { Name: "compaction2" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } ExecutionPolicies { Name: "default" } ExecutionPolicies { Name: "execution1" PipelineConfig { NumActiveTx: 1 EnableOutOfOrder: false DisableImmediate: false EnableSoftUpdates: true } ResourceProfile: "profile1" EnableFilterByKey: true ExecutorFastLogPolicy: false TxReadSizeLimit: 10000000 } ExecutionPolicies { Name: "execution2" PipelineConfig { NumActiveTx: 8 EnableOutOfOrder: true DisableImmediate: true EnableSoftUpdates: false } ResourceProfile: "profile2" EnableFilterByKey: false ExecutorFastLogPolicy: true TxReadSizeLimit: 20000000 } PartitioningPolicies { Name: "default" } PartitioningPolicies { Name: "partitioning1" UniformPartitionsCount: 10 AutoSplit: true AutoMerge: false SizeToSplit: 123456 } PartitioningPolicies { Name: "partitioning2" UniformPartitionsCount: 20 AutoSplit: true AutoMerge: true SizeToSplit: 1000000000 } StoragePolicies { Name: "default" } StoragePolicies { Name: "storage1" ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecLZ4 StorageConfig { SysLog { PreferredPoolKind: "hdd" } Log { PreferredPoolKind: "hdd" } Data { PreferredPoolKind: "hdd" } External { PreferredPoolKind: "hdd" } ExternalThreshold: 4294967295 } } } StoragePolicies { Name: "storage2" ColumnFamilies { Id: 0 ColumnCache: ColumnCacheEver StorageConfig { SysLog { PreferredPoolKind: "ssd" } Log { PreferredPoolKind: "ssd" } Data { PreferredPoolKind: "ssd" } External { PreferredPoolKind: "ssd" } DataThreshold: 30000 } } } ReplicationPolicies { Name: "default" } ReplicationPolicies { Name: "replication1" FollowerCount: 1 AllowFollowerPromotion: false CrossDataCenter: true } ReplicationPolicies { Name: "replication2" FollowerCount: 2 AllowFollowerPromotion: true CrossDataCenter: false } CachingPolicies { Name: "default" } CachingPolicies { Name: "caching1" ExecutorCacheSize: 10000000 } CachingPolicies { Name: "caching2" ExecutorCacheSize: 20000000 } } QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "PostgreSQL" } 2025-04-07T13:50:45.518238Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-07T13:50:45.518828Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected >> KqpNewEngine::PkSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect2 >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] >> TBackupCollectionTests::TableWithSystemColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:50:46.868343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:50:46.868468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:50:46.868507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:50:46.868540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:50:46.868583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:50:46.868623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:50:46.868717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:50:46.869032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:50:46.939013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:50:46.939066Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:46.958699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:50:46.958846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:50:46.959035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:50:46.969767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:50:46.969981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:50:46.970638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:50:46.970848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:50:46.972724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:50:46.973915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:50:46.973972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:50:46.974089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:50:46.974131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:50:46.974164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:50:46.974326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:50:46.983215Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:50:47.104473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:50:47.104740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.104967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:50:47.105224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:50:47.105284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.108049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:50:47.108187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:50:47.108354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.108429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:50:47.108472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:50:47.108518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:50:47.117750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.117828Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:50:47.117863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:50:47.120646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.120721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.120763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:50:47.120849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:50:47.129655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:50:47.137096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:50:47.137399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:50:47.138517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:50:47.138674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:50:47.138720Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:50:47.139006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:50:47.139057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:50:47.139214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:50:47.139285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:50:47.142325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:50:47.142368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:50:47.142551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:50:47.142593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:50:47.142845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.142891Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:50:47.142988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:50:47.143034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:50:47.143092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:50:47.143135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:50:47.143169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:50:47.143198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:50:47.143274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:50:47.143310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:50:47.143337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:50:47.145264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:50:47.145391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:50:47.145424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:50:47.145455Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:50:47.145505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:50:47.145606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ompleteBarrier { TxId: 105 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-07T13:50:52.263387Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:1 240 -> 240 2025-04-07T13:50:52.264906Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:50:52.264938Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 105:1 2025-04-07T13:50:52.265031Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-07T13:50:52.265059Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-07T13:50:52.265128Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:1, at schemeshard: 72057594046678944 2025-04-07T13:50:52.265168Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:1 ProgressState 2025-04-07T13:50:52.265290Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:50:52.265331Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 2/2 2025-04-07T13:50:52.265370Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-04-07T13:50:52.265417Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/2, is published: true 2025-04-07T13:50:52.265479Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:299:2291] message: TxId: 105 2025-04-07T13:50:52.265549Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-04-07T13:50:52.265612Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-07T13:50:52.265654Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-07T13:50:52.265738Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-07T13:50:52.265775Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:1 2025-04-07T13:50:52.265793Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:1 2025-04-07T13:50:52.265898Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-07T13:50:52.265933Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-07T13:50:52.267647Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:50:52.267752Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:299:2291] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 105 at schemeshard: 72057594046678944 2025-04-07T13:50:52.267922Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T13:50:52.267966Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:515:2479] 2025-04-07T13:50:52.268145Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:517:2481], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:50:52.268189Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:50:52.268213Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-04-07T13:50:52.268786Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:582:2544], Recipient [7:123:2149]: {TEvModifySchemeTransaction txid# 106 TabletId# 72057594046678944} 2025-04-07T13:50:52.268847Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:50:52.270942Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupIncrementalBackupCollection BackupIncrementalBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:50:52.271381Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-07T13:50:52.271577Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 4], parent name: MyCollection1, child name: 19700101000000Z_incremental, child id: [OwnerId: 72057594046678944, LocalPathId: 8], at schemeshard: 72057594046678944 2025-04-07T13:50:52.271642Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2025-04-07T13:50:52.271706Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:50:52.271803Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 106:1, explain: Incremental backup is disabled on this collection, at schemeshard: 72057594046678944 2025-04-07T13:50:52.271857Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:2, propose status:StatusInvalidParameter, reason: Incremental backup is disabled on this collection, at schemeshard: 72057594046678944 2025-04-07T13:50:52.273350Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Abort operation: IgniteOperation fail to propose a part, opId: 106:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusInvalidParameter, with reason: Incremental backup is disabled on this collection, tx message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupIncrementalBackupCollection BackupIncrementalBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 106 TabletId: 72057594046678944 2025-04-07T13:50:52.273462Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir AbortPropose, opId: 106:0, at schemeshard: 72057594046678944 2025-04-07T13:50:52.273607Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:50:52.275599Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Incremental backup is disabled on this collection" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:50:52.275774Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Incremental backup is disabled on this collection, operation: BACKUP INCREMENTAL, path: /MyRoot/.backups/collections/MyCollection1 2025-04-07T13:50:52.275835Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-07T13:50:52.276139Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-07T13:50:52.276182Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-07T13:50:52.276608Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:588:2550], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:52.276694Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:52.276747Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-07T13:50:52.276884Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:299:2291], Recipient [7:123:2149]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-04-07T13:50:52.276916Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-07T13:50:52.276998Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-07T13:50:52.277129Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-07T13:50:52.277166Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:586:2548] 2025-04-07T13:50:52.277356Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:588:2550], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:50:52.277388Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:50:52.277419Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 2025-04-07T13:50:52.277875Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:589:2551], Recipient [7:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-07T13:50:52.277936Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-07T13:50:52.278026Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:50:52.278234Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 194us result status StatusSuccess 2025-04-07T13:50:52.278611Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1" PathDescription { Self { Name: "MyCollection1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeBackupCollection CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 1 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 1 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 BackupCollectionVersion: 0 } } Children { Name: "19700101000000Z_full" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } BackupCollectionDescription { Name: "MyCollection1" ExplicitEntryList { Entries { Type: ETypeTable Path: "/MyRoot/Table1" } } Cluster { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKeyValueTest::TestIncrementalKeySet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::TableWithSystemColumns [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:50:47.285999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:50:47.286117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:50:47.286160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:50:47.286194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:50:47.286239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:50:47.286267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:50:47.286320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:50:47.286649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:50:47.360062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:50:47.360123Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:47.375525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:50:47.375674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:50:47.375843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:50:47.381521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:50:47.381706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:50:47.382349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:50:47.382532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:50:47.384608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:50:47.385895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:50:47.385959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:50:47.386084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:50:47.386139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:50:47.386180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:50:47.386328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.393196Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:50:47.548127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:50:47.548368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.548586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:50:47.548832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:50:47.548891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.551495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:50:47.551636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:50:47.551854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.551950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:50:47.551993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:50:47.552050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:50:47.554524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.554587Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:50:47.554649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:50:47.562369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.562441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.562492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:50:47.562593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:50:47.567134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:50:47.569859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:50:47.570084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:50:47.571113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:50:47.571291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:50:47.571338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:50:47.571602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:50:47.571666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:50:47.571852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:50:47.571944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:50:47.577769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:50:47.577829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:50:47.578009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:50:47.578059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:50:47.578338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:50:47.578395Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:50:47.578525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:50:47.578566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:50:47.578620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:50:47.578682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:50:47.578723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:50:47.578757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:50:47.578848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:50:47.578900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:50:47.578934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:50:47.580997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:50:47.581133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:50:47.581171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:50:47.581212Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:50:47.581281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:50:47.581407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... te, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 11 PathOwnerId: 72057594046678944, cookie: 106 2025-04-07T13:50:52.914078Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-07T13:50:52.914141Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 11 2025-04-07T13:50:52.914201Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-07T13:50:52.914301Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:50:52.914884Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:202:2205], Recipient [6:120:2146]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 7] Version: 5 } 2025-04-07T13:50:52.914921Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-07T13:50:52.914977Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 5 PathOwnerId: 72057594046678944, cookie: 106 2025-04-07T13:50:52.915040Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 5 PathOwnerId: 72057594046678944, cookie: 106 2025-04-07T13:50:52.915069Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-07T13:50:52.915101Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 5 2025-04-07T13:50:52.915138Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-07T13:50:52.915203Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:50:52.915543Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:636:2589], Recipient [6:120:2146]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:52.915579Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:52.915608Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-07T13:50:52.915776Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [6:578:2538], Recipient [6:120:2146]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 578 RawX2: 25769806314 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-07T13:50:52.915812Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-07T13:50:52.915906Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 578 RawX2: 25769806314 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-07T13:50:52.915953Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2025-04-07T13:50:52.916096Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: Source { RawX1: 578 RawX2: 25769806314 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-07T13:50:52.916157Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T13:50:52.916273Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 578 RawX2: 25769806314 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-07T13:50:52.916355Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:1, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:50:52.916396Z node 6 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-07T13:50:52.916434Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:1, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-07T13:50:52.916484Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:1 129 -> 240 2025-04-07T13:50:52.916656Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:50:52.917032Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:202:2205], Recipient [6:120:2146]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 8] Version: 3 } 2025-04-07T13:50:52.917081Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-07T13:50:52.917136Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-04-07T13:50:52.917199Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-04-07T13:50:52.917224Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-07T13:50:52.917267Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 3 2025-04-07T13:50:52.917304Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 4 2025-04-07T13:50:52.917379Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/2, is published: true 2025-04-07T13:50:52.917422Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:50:52.922098Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:50:52.922262Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:50:52.922895Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-07T13:50:52.922941Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:50:52.924884Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-07T13:50:52.924953Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:50:52.925062Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-07T13:50:52.925100Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:50:52.925242Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-07T13:50:52.925290Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:50:52.925329Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 106:1 2025-04-07T13:50:52.925445Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:578:2538] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 106 at schemeshard: 72057594046678944 2025-04-07T13:50:52.925788Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:120:2146], Recipient [6:120:2146]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-07T13:50:52.925829Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-07T13:50:52.925882Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-07T13:50:52.925928Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:1 ProgressState 2025-04-07T13:50:52.926062Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:50:52.926100Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:1 progress is 2/2 2025-04-07T13:50:52.926144Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-04-07T13:50:52.926196Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 2/2, is published: true 2025-04-07T13:50:52.926266Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:298:2290] message: TxId: 106 2025-04-07T13:50:52.926322Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-04-07T13:50:52.926369Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-07T13:50:52.926406Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-07T13:50:52.926503Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-07T13:50:52.926550Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:1 2025-04-07T13:50:52.926571Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:1 2025-04-07T13:50:52.926638Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-04-07T13:50:52.927021Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-07T13:50:52.927051Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:50:52.928804Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:50:52.928901Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:298:2290] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 106 at schemeshard: 72057594046678944 2025-04-07T13:50:52.929113Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-07T13:50:52.929193Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [6:609:2562] 2025-04-07T13:50:52.929433Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:611:2564], Recipient [6:120:2146]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:50:52.929490Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:50:52.929521Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestIncrementalKeySet [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] >> KqpUniqueIndex::InsertFkAlreadyExist >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue >> Viewer::UseTransactionWhenExecuteDataActionQuery [FAIL] |85.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::ExplainCollectFullDiagnostics >> Viewer::ServerlessWithExclusiveNodes [GOOD] >> Viewer::ServerlessWithExclusiveNodesCheckTable >> DataStreams::TestCreateExistingStream [GOOD] >> DataStreams::ListStreamsValidation >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::UpsertImmediateManyColumns >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex >> KqpMultishardIndex::DataColumnWriteNull-StreamLookup >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> TopicService::DifferentConsumers_TheRangesOverlap [GOOD] >> TPersQueueTest::Init [GOOD] >> TPersQueueTest::NoDecompressionMemoryLeaks >> YdbOlapStore::LogLast50ByResource [GOOD] >> YdbOlapStore::LogNonExistingRequest >> DataStreams::TestReservedStorageMetering [GOOD] >> DataStreams::TestReservedConsumersMetering >> TopicService::UnknownConsumer >> KqpNewEngine::PkRangeSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect3 >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete >> TKeyValueTest::TestBasicWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] >> KqpLimits::CancelAfterRoTx [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerLegacy >> KqpIndexes::UpdateIndexSubsetPk [GOOD] >> KqpIndexes::UpdateOnReadColumns >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::ReplaceImmediate >> KqpIndexes::NullInIndexTableNoDataRead [GOOD] >> KqpIndexes::NullInIndexTable >> DataStreams::ListStreamsValidation [GOOD] >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter >> TKeyValueTest::TestGetStatusWorks [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi >> KqpIndexes::ExplainCollectFullDiagnostics [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00196b/r3tmp/tmpjqi1aj/pdisk_1.dat 2025-04-07T13:50:14.113529Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568193667079657:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:14.113736Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:50:14.465079Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:14.483425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:14.483499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:14.494618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27534, node 1 2025-04-07T13:50:14.696565Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:14.696592Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:14.696600Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:14.696714Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11540 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:15.261549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.267470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:15.269562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.276657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-04-07T13:50:15.276956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:15.276980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:50:15.285417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:15.285456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:15.288940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:15.289614Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.313400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033815342, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:15.313438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:15.313677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:15.325410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:15.325587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:15.325636Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:15.325714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:15.325751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:15.325811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:15.330170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:15.330456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:15.330485Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:15.330578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:18.090924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568215141917160:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:18.091008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:18.091078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568215141917171:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:18.094038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:18.094228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:18.094254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-07T13:50:18.094311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:18.094322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:50:18.094374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:18.094413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710658:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:50:18.094624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 1 -> 128 2025-04-07T13:50:18.094822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:18.094839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:50:18.097462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:50:18.097688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:18.098140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:18.098197Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, ProgressState 2025-04-07T13:50:18.098365Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:18.098455Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:18.098532Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:18.100171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:18.100202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:18.100218Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:18.100407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:18.100445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:18.100455Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:50:18.100576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:18.100591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:18.100598Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025 ... 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:49.202744Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:49.203096Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:49.203133Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:49.205290Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:49.205494Z node 16 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:49.205517Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:50:49.208216Z node 16 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:49.208256Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:50:49.210002Z node 16 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:49.211088Z node 16 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:49.213943Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033849257, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:49.213989Z node 16 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:49.214258Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:50:49.216141Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:49.216311Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:49.216371Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:50:49.216493Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:50:49.216539Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:50:49.216593Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:50:49.218494Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:50:49.218549Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:50:49.218564Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:49.218640Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T13:50:49.333012Z node 16 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:58718) has now valid token of root@builtin 2025-04-07T13:50:49.419067Z node 16 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-07T13:50:54.530986Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7490568366830705536:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:54.531296Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00196b/r3tmp/tmpcATZRW/pdisk_1.dat 2025-04-07T13:50:54.714704Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:54.752405Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:54.752524Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:54.756415Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29648, node 19 2025-04-07T13:50:54.829608Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:54.829629Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:54.829641Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:54.829772Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4279 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:55.224504Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:55.224989Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:55.225017Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:55.228301Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:55.228509Z node 19 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:55.228532Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:50:55.230601Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:55.230633Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:55.231231Z node 19 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:55.238219Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:55.250975Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033855291, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:55.251021Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:55.251317Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:55.257737Z node 19 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:55.257953Z node 19 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:55.258020Z node 19 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:55.258141Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:55.258193Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:55.258260Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:55.260076Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:55.260125Z node 19 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:55.260142Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:55.260224Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:55.360650Z node 19 :TICKET_PARSER DEBUG: Ticket D8029B05F1FA50125E9DB2AF77EC253E5386D788 (ipv6:[::1]:45576) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-07T13:50:55.527563Z node 19 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token 2025-04-07T13:50:55.629108Z node 19 :TICKET_PARSER DEBUG: Ticket 6AC2A6748DBF676F3B1D35CEF9256A8AF92585F4 (ipv6:[::1]:45616) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-07T13:50:55.629745Z node 19 :TICKET_PARSER ERROR: Ticket 6AC2A6748DBF676F3B1D35CEF9256A8AF92585F4: Cannot create token from certificate. Client certificate failed verification >> KqpUniqueIndex::InsertFkAlreadyExist [GOOD] >> KqpUniqueIndex::InsertFkDuplicate >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] Test command err: 2025-04-07T13:50:39.017025Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568301901011823:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:39.017061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a49/r3tmp/tmpxNXHzo/pdisk_1.dat 2025-04-07T13:50:39.730563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:39.730640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:39.734027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:39.811431Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22448, node 1 2025-04-07T13:50:39.874088Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:50:39.875766Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:50:39.969292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:50:39.969972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:39.970175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:50:39.978686Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:39.978706Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:39.978714Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:39.980951Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:39.998299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:39.999328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:39.999358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:39.999406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:50:39.999459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TClient is connected to server localhost:25017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:40.448952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:40.461191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:40.468925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:40.471817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:40.472005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:40.472022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:50:40.475725Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:40.475754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:40.477234Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:40.481093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033840528, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:40.481124Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:40.481423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:40.483019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:40.483165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:40.483212Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:40.483290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:40.483327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:40.483379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-04-07T13:50:40.485272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:40.487853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:40.487903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:40.487939Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:40.488015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:40.608022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:40.608378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:40.608402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:40.608471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T13:50:40.608546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T13:50:40.608579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T13:50:40.610198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T13:50:40.610340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:40.610624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:40.611482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:40.611528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:40.611548Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:50:40.611628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:25017 2025-04-07T13:50:40.825179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:40.825438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:40.825460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:40.832512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T13:50:40.832678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:40.837175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033840885, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:40.837224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1744033840885, at schemeshard: 72057594046644480 2025-04-07T13:50:40.837449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T13:50:40.837534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:50:40.837569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 waiting... 2025-04-07T13:50:40.840547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:40.840738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:40.842421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle ... 30397Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710661, database: /Root, subject: user@builtin, status: StatusAlreadyExists, reason: Check failed: path: '/Root/stream_TestCreateExistingStream', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypePersQueueGroup, state: EPathStateNoChanges), operation: CREATE PERSISTENT QUEUE, path: /Root/stream_TestCreateExistingStream 2025-04-07T13:50:56.348750Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7490568375524350696:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:56.377547Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a49/r3tmp/tmpLDZ9Y6/pdisk_1.dat 2025-04-07T13:50:56.605193Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:56.671735Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:56.671825Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:56.689889Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19407, node 10 2025-04-07T13:50:56.811873Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:56.811906Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:56.811914Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:56.812033Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11384 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:57.053825Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:57.054160Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:57.054193Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:57.060949Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:57.061177Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:57.061202Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:50:57.066150Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:57.066184Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:50:57.067971Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:57.071605Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033857118, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:57.071645Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:57.071895Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:50:57.073582Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:57.073758Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:57.073809Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:50:57.073890Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:50:57.073926Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:50:57.073969Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2025-04-07T13:50:57.074638Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:50:57.074677Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:50:57.074692Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:57.074761Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2025-04-07T13:50:57.075303Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:57.140400Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:57.140652Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:57.140690Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:57.140762Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T13:50:57.140840Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T13:50:57.140851Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2025-04-07T13:50:57.143472Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T13:50:57.143621Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:57.143875Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:57.144900Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:50:57.144957Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:50:57.144976Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:50:57.145081Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:11384 2025-04-07T13:50:57.347673Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:57.347805Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:57.347821Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:57.352939Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T13:50:57.353126Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:57.355141Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T13:50:57.359998Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033857405, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:57.360037Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1744033857405, at schemeshard: 72057594046644480 2025-04-07T13:50:57.360221Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2025-04-07T13:50:57.360311Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T13:50:57.360345Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2025-04-07T13:50:57.361866Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:57.362037Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:57.362435Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T13:50:57.362471Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T13:50:57.362485Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T13:50:57.362540Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 >> TPersQueueTest::WriteExistingBigValue [GOOD] >> TPersQueueTest::WriteEmptyData >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex [GOOD] >> KqpIndexes::DuplicateUpsertInterleave >> KqpMultishardIndex::DataColumnUpsertMixedSemantic+StreamLookup >> YdbOlapStore::LogWithUnionAllAscending [GOOD] >> YdbOlapStore::LogWithUnionAllDescending >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue [GOOD] >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn >> KqpMultishardIndex::DataColumnWriteNull-StreamLookup [GOOD] >> KqpMultishardIndex::DuplicateUpsert+StreamLookup >> KqpIndexes::WriteWithParamsFieldOrder >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue >> TKeyValueTest::TestWriteLongKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] >> KqpNewEngine::PkRangeSelect3 [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:141:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:143:2166] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:147:2057] recipient: [4:143:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:146:2167] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:216:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:142:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:145:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:146:2057] recipient: [5:144:2166] Leader for TabletID 72057594037927937 is [5:147:2167] sender: [5:148:2057] recipient: [5:144:2166] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:147:2167] Leader for TabletID 72057594037927937 is [5:147:2167] sender: [5:217:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:147:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:150:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:149:2171] Leader for TabletID 72057594037927937 is [7:152:2172] sender: [7:153:2057] recipient: [7:149:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:152:2172] Leader for TabletID 72057594037927937 is [7:152:2172] sender: [7:222:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:148:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:151:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:152:2057] recipient: [8:150:2171] Leader for TabletID 72057594037927937 is [8:153:2172] sender: [8:154:2057] recipient: [8:150:2171] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:153:2172] Leader for TabletID 72057594037927937 is [8:153:2172] sender: [8:223:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] >> KqpNewEngine::PkRangeSelect4 >> DemoTx::Scenario_3 [GOOD] >> TPersQueueTest::TopicServiceCommitOffset [GOOD] >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter >> YdbLogStore::LogStore [GOOD] >> YdbLogStore::LogStoreNegative ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::UseTransactionWhenExecuteDataActionQuery [FAIL] Test command err: BASE_PERF = 4.853803389 2025-04-07T13:50:00.734314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:1542:2386], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:00.735246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:00.735473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:00.737531Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:1098:2329], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:00.737757Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:1102:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:00.738064Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:1540:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:00.738281Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:1100:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:00.738746Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:00.738820Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:00.738873Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:00.739072Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:00.739121Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:00.739154Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:00.739383Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:00.739462Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:50:01.234388Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:01.404392Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-07T13:50:01.439304Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-07T13:50:02.036125Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 14511, node 1 TClient is connected to server localhost:7503 2025-04-07T13:50:02.317936Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:02.317996Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:02.318036Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:02.318435Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:45.417009Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490568331127350193:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:45.537073Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:50:45.892890Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:45.902113Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:45.902274Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:45.913478Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17764, node 6 2025-04-07T13:50:46.037295Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:46.037316Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:46.037325Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:46.037465Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:46.970681Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:47.070212Z node 6 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2025-04-07T13:50:50.416847Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490568331127350193:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:50.416936Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:51.032422Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490568356897154491:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:51.032589Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:51.032924Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490568356897154503:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:51.038052Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:50:51.050193Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490568356897154505:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream >> KqpUniqueIndex::UpdateOnFkAlreadyExist >> DemoTx::Scenario_4 >> DataStreams::TestReservedConsumersMetering [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> KqpIndexes::DuplicateUpsertInterleaveParams [GOOD] >> KqpIndexes::NullInIndexTable [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestReservedConsumersMetering [GOOD] Test command err: 2025-04-07T13:50:38.406538Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568301420361556:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:38.406584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a40/r3tmp/tmpCXYWXk/pdisk_1.dat 2025-04-07T13:50:38.864723Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29799, node 1 2025-04-07T13:50:38.879681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:50:38.880399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:38.880526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:50:38.881819Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:50:38.882123Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:50:38.883144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:38.884184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:38.884294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:38.884488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:50:38.887562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T13:50:38.926264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:38.926376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:38.929280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:38.945214Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:38.945242Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:38.945250Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:38.945352Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14201 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:39.296128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:39.301565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:39.307247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:39.311673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:39.311865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:39.311877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:50:39.314330Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:39.314356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:39.315839Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:39.321192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:39.322938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033839366, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:39.322964Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:39.323250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:39.325017Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:39.325172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:39.325236Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:39.325310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:39.325350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:39.325384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:39.329335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:39.329377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:39.329399Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:39.329473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:39.462775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:39.463089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:39.463121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:39.463187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T13:50:39.463276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T13:50:39.463289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T13:50:39.465504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T13:50:39.465654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:39.465909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:39.466438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:39.466493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:39.466524Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:50:39.466612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:14201 2025-04-07T13:50:39.633007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:39.633216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:39.633233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:39.637226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T13:50:39.637368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:39.638931Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:50:39.644383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033839688, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:39.644417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1744033839688, at schemeshard: 72057594046644480 2025-04-07T13:50:39.644938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T13:50:39.645000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:50:39.645025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2025-04-07T13:50:39.648337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:39.648473Z node 1 : ... older_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744033859481-170","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":0,"unit":"second","start":1744033859,"finish":1744033859},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033859}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744033859481-171","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1744033859,"finish":1744033859},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033859}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744033859481-172","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744033859,"finish":1744033859},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744033859}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744033859516-173","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744033859,"finish":1744033860},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033860}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744033859516-174","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1744033859,"finish":1744033860},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033860}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744033859516-175","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1744033859,"finish":1744033860},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033860}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744033859516-176","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744033859,"finish":1744033860},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744033860}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744033860576-177","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744033860,"finish":1744033861},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033861}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744033860576-178","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1744033860,"finish":1744033861},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033861}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744033860576-179","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1744033860,"finish":1744033861},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033861}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744033860576-180","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744033860,"finish":1744033861},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744033861}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744033861598-181","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744033861,"finish":1744033862},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033862}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744033861598-182","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1744033861,"finish":1744033862},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033862}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744033861598-183","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1744033861,"finish":1744033862},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033862}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744033861598-184","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744033861,"finish":1744033862},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744033862}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744033862682-185","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744033862,"finish":1744033863},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033863}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744033862682-186","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1744033862,"finish":1744033863},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033863}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744033862682-187","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1744033862,"finish":1744033863},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033863}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744033862682-188","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744033862,"finish":1744033863},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744033863}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744033863759-189","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744033863,"finish":1744033864},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033864}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744033863759-190","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1744033863,"finish":1744033864},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033864}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744033863759-191","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1744033863,"finish":1744033864},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744033864}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744033863759-192","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744033863,"finish":1744033864},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744033864}' >> KqpLimits::QueryExecTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams [GOOD] Test command err: Trying to start YDB, gRPC: 22209, MsgBus: 15980 2025-04-07T13:50:55.615122Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568373638016770:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:55.615172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b4f/r3tmp/tmpNQ166n/pdisk_1.dat 2025-04-07T13:50:56.113560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:56.120809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:56.121048Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:56.129239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22209, node 1 2025-04-07T13:50:56.233183Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:56.233206Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:56.233217Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:56.233283Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15980 TClient is connected to server localhost:15980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:56.777101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:56.791574Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:56.798906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:56.898104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:57.039192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:57.100884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:58.598528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568386522920364:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:58.598683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:58.810667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:50:58.845140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:50:58.874644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:50:58.940621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:50:58.972348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:50:59.048324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:50:59.150016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568390817888164:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:59.150092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:59.150269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568390817888169:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:59.153700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:50:59.163431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568390817888171:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:00.036612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:00.615591Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568373638016770:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:00.615695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2152, MsgBus: 16050 2025-04-07T13:51:01.316778Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568396560128725:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:01.323174Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b4f/r3tmp/tmpwCqeIg/pdisk_1.dat 2025-04-07T13:51:01.437871Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2152, node 2 2025-04-07T13:51:01.460932Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:01.461049Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:01.462376Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:01.481268Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:01.481296Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:01.481305Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:01.481394Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16050 TClient is connected to server localhost:16050 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:01.994133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:02.005175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:02.087398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:02.227097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:51:02.297459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:04.215106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568409445032309:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:04.215189Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:04.261730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:04.288986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:04.324590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:04.355468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:04.393994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:04.423489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:04.517038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568409445032810:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:04.517169Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:04.517525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568409445032815:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:04.521236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:04.527916Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T13:51:04.528305Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568409445032817:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:05.683911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.311931Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568396560128725:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:06.319146Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpUniqueIndex::InsertFkDuplicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:148:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:149:2057] recipient: [4:147:2169] Leader for TabletID 72057594037927937 is [4:150:2170] sender: [4:151:2057] recipient: [4:147:2169] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:150:2170] Leader for TabletID 72057594037927937 is [4:150:2170] sender: [4:220:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:150:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:150:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:149:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:150:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:150:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:150:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:151:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:151:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:141:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:144:2057] recipient: [10:143:2166] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:145:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:147:2057] recipient: [10:143:2166] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:146:2167] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:216:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:141:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:143:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:145:2057] recipient: [11:144:2166] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:147:2057] recipient: [11:144:2166] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:146:2167] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:145:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:148:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:149:2057] recipient: [12:147:2169] Leader for TabletID 72057594037927937 is [12:150:2170] sender: [12:151:2057] recipient: [12:147:2169] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:150:2170] Leader for TabletID 72057594037927937 is [12:150:2170] sender: [12:220:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] >> TopicService::UnknownConsumer [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin >> KqpIndexes::DuplicateUpsertInterleave [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::UpdateImmediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::NullInIndexTable [GOOD] Test command err: Trying to start YDB, gRPC: 1140, MsgBus: 6196 2025-04-07T13:50:52.575057Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568359508550382:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:52.575215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b6e/r3tmp/tmpdRPnzV/pdisk_1.dat 2025-04-07T13:50:52.937650Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:52.943431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:52.943521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:52.945973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1140, node 1 2025-04-07T13:50:53.052591Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:53.052613Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:53.052621Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:53.052736Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6196 TClient is connected to server localhost:6196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:53.714731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:53.739315Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:53.744141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:53.931188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:54.111033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:50:54.190196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:50:56.253200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568376688421139:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:56.261721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:56.325267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:50:56.368269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:50:56.445376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:50:56.485872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:50:56.526028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:50:56.584594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:50:56.635944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568376688421639:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:56.636053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:56.636257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568376688421644:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:56.640008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:50:56.659584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568376688421646:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:50:57.570557Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568359508550382:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:57.570624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:57.766347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:50:57.801778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:50:57.881784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6898, MsgBus: 22108 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b6e/r3tmp/tmp9DQ66Y/pdisk_1.dat 2025-04-07T13:51:00.520401Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:00.521977Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:00.540933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:00.541014Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:00.542860Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6898, node 2 2025-04-07T13:51:00.635572Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:00.635591Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:00.635602Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:00.635689Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22108 TClient is connected to server localhost:22108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:51:01.174564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:51:01.192236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:01.266738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:01.471802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:01.544203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:03.535543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568406425768499:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:03.535645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:03.582937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:03.615048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:03.643298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:03.673798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:03.715687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:03.795411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:03.863543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568406425768998:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:03.863718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:03.867246Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568406425769003:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:03.876976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:03.891815Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568406425769005:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:05.166446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:05.251425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:51:05.285558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 13619, MsgBus: 14489 2025-04-07T13:50:54.728765Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568366099006662:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:54.729623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b51/r3tmp/tmp2Dt5ww/pdisk_1.dat 2025-04-07T13:50:55.095981Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13619, node 1 2025-04-07T13:50:55.148828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:55.148894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:55.152271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:55.178992Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:55.179013Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:55.179019Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:55.179104Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14489 TClient is connected to server localhost:14489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:55.718408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:55.733621Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:55.742523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:55.854452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:56.069460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:56.142053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:57.801530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568378983910222:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:57.822307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:57.845255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:50:57.875066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:50:57.904005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:50:57.933608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:50:57.966573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:50:58.006782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:50:58.047262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568383278878014:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:58.047339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:58.047523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568383278878019:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:58.049828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:50:58.059011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568383278878021:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:50:59.051898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:59.730610Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568366099006662:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:59.730666Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:00.559889Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490568391868814037:2559], TxId: 281474976710677, task: 1. Ctx: { TraceId : 01jr889jm6c3pskjreysaak1he. SessionId : ydb://session/3?node_id=1&id=ZTUzOGE0NmUtM2NiZjc3N2YtMjRkNTNkZDItNTIwNjJkYTQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T13:51:00.560218Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490568391868814039:2560], TxId: 281474976710677, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTUzOGE0NmUtM2NiZjc3N2YtMjRkNTNkZDItNTIwNjJkYTQ=. TraceId : 01jr889jm6c3pskjreysaak1he. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490568391868814034:2518], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:51:00.561768Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTUzOGE0NmUtM2NiZjc3N2YtMjRkNTNkZDItNTIwNjJkYTQ=, ActorId: [1:7490568387573846370:2518], ActorState: ExecuteState, TraceId: 01jr889jm6c3pskjreysaak1he, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 8964, MsgBus: 64500 2025-04-07T13:51:01.518336Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568400101002122:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:01.520163Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b51/r3tmp/tmp5Qjc8A/pdisk_1.dat 2025-04-07T13:51:01.669763Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:01.680927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:01.681022Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:01.682445Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8964, node 2 2025-04-07T13:51:01.778463Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:01.778489Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:01.778502Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:01.778623Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64500 TClient is connected to server localhost:64500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:51:02.202134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:51:02.221716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:02.302064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:02.471251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:02.569146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:51:04.650912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568412985905616:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:04.651001Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:04.696770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:04.755096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:04.808164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:04.843974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:04.885648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:04.971378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:05.016047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568417280873406:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:05.016117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:05.016283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568417280873411:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:05.018750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:05.025590Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568417280873413:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:05.982053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:06.520517Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568400101002122:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:06.539989Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:07.363099Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490568425870809461:2559], TxId: 281474976710677, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZTc0ZGU4ZDAtZjBkM2FlZmUtNzE2ODYxOGEtOTJmMzdkOWQ=. CustomerSuppliedId : . TraceId : 01jr889s6x6d1xr2m9mvtey6j4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-07T13:51:07.364053Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490568425870809462:2560], TxId: 281474976710677, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZTc0ZGU4ZDAtZjBkM2FlZmUtNzE2ODYxOGEtOTJmMzdkOWQ=. TraceId : 01jr889s6x6d1xr2m9mvtey6j4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490568425870809458:2518], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:51:07.365112Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTc0ZGU4ZDAtZjBkM2FlZmUtNzE2ODYxOGEtOTJmMzdkOWQ=, ActorId: [2:7490568421575841775:2518], ActorState: ExecuteState, TraceId: 01jr889s6x6d1xr2m9mvtey6j4, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QueryExecTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 5128, MsgBus: 15578 2025-04-07T13:49:33.029345Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568021678320277:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:33.034213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00138a/r3tmp/tmpg5ZNxQ/pdisk_1.dat 2025-04-07T13:49:33.609942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:33.610058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:33.612537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:33.632404Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5128, node 1 2025-04-07T13:49:33.733346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:33.733373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:33.733384Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:33.733499Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15578 TClient is connected to server localhost:15578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:34.396108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.424227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.554685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.689262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.772266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:36.787937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568034563223860:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.788028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.053607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.110223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.141505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.210459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.255336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.290977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.334300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568038858191655:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.334375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.334592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568038858191660:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:37.337666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:37.347457Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:49:37.348081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568038858191662:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:38.031878Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568021678320277:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:38.031944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:38.294819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:39.755419Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490568047448127100:2497] TxId: 281474976710672. Ctx: { TraceId: 01jr8873rr5nrkytx5xrrjyf0z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjgxMmU5ZDAtOWVmOTZkNzEtNTk2M2Y4NTUtMTY5OWM1NDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception at ExecuteState, current limit is 1024 bytes. } 2025-04-07T13:49:39.764634Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490568047448127108:2507], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr8873rr5nrkytx5xrrjyf0z. SessionId : ydb://session/3?node_id=1&id=MjgxMmU5ZDAtOWVmOTZkNzEtNTk2M2Y4NTUtMTY5OWM1NDQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490568047448127100:2497], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:49:39.767719Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490568047448127109:2508], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MjgxMmU5ZDAtOWVmOTZkNzEtNTk2M2Y4NTUtMTY5OWM1NDQ=. TraceId : 01jr8873rr5nrkytx5xrrjyf0z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490568047448127100:2497], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:49:39.767936Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490568047448127111:2510], TxId: 281474976710672, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=MjgxMmU5ZDAtOWVmOTZkNzEtNTk2M2Y4NTUtMTY5OWM1NDQ=. CustomerSuppliedId : . TraceId : 01jr8873rr5nrkytx5xrrjyf0z. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490568047448127100:2497], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:49:39.770563Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490568047448127112:2511], TxId: 281474976710672, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjgxMmU5ZDAtOWVmOTZkNzEtNTk2M2Y4NTUtMTY5OWM1NDQ=. TraceId : 01jr8873rr5nrkytx5xrrjyf0z. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490568047448127100:2497], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:49:39.772201Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjgxMmU5ZDAtOWVmOTZkNzEtNTk2M2Y4NTUtMTY5OWM1NDQ=, ActorId: [1:7490568047448127079:2497], ActorState: ExecuteState, TraceId: 01jr8873rr5nrkytx5xrrjyf0z, Create QueryResponse for error on request, msg:
: Error: Memory limit exception at ExecuteState, current limit is 1024 bytes. Trying to start YDB, gRPC: 17686, MsgBus: 6913 2025-04-07T13:49:40.462440Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568049652222997:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00138a/r3tmp/tmp8ced4b/pdisk_1.dat 2025-04-07T13:49:40.493414Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:40.550602Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17686, node 2 2025-04-07T13:49:40.577018Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:40.577099Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:40.616767Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connecte ... ] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:44.465232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568066832094215:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:44.468568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:44.481798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568066832094217:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:49:45.486364Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568049652222997:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:45.486446Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:45.611319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:55.535837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:49:55.535870Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:39.030523Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Nzg0NDE1MDItNDg0MmQyNTktOGQ2OWZmMjctNTI0MDJhMTI=, ActorId: [2:7490568298760330159:2782], ActorState: ExecuteState, TraceId: 01jr888xtbd600ae2b46yd14w7, Create QueryResponse for error on request, msg:
: Error: Task execution timeout 93ms exceeded, terminating after 94ms 2025-04-07T13:50:39.275064Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7490568303055297545:2782] TxId: 281474976715674. Ctx: { TraceId: 01jr888xyxa29ps1xmn2ahxfsh, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Nzg0NDE1MDItNDg0MmQyNTktOGQ2OWZmMjctNTI0MDJhMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 204ms during execution } ] 2025-04-07T13:50:39.290425Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490568303055297560:2815], TxId: 281474976715674, task: 9. Ctx: { SessionId : ydb://session/3?node_id=2&id=Nzg0NDE1MDItNDg0MmQyNTktOGQ2OWZmMjctNTI0MDJhMTI=. TraceId : 01jr888xyxa29ps1xmn2ahxfsh. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490568303055297545:2782], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:50:39.437720Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490568303055297558:2813], TxId: 281474976715674, task: 7. Ctx: { TraceId : 01jr888xyxa29ps1xmn2ahxfsh. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Nzg0NDE1MDItNDg0MmQyNTktOGQ2OWZmMjctNTI0MDJhMTI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7490568303055297545:2782], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:50:39.447544Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Nzg0NDE1MDItNDg0MmQyNTktOGQ2OWZmMjctNTI0MDJhMTI=, ActorId: [2:7490568298760330159:2782], ActorState: ExecuteState, TraceId: 01jr888xyxa29ps1xmn2ahxfsh, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 204ms during execution Trying to start YDB, gRPC: 15237, MsgBus: 20511 2025-04-07T13:50:40.635184Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568307446134407:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:40.635245Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00138a/r3tmp/tmpg0mz2N/pdisk_1.dat 2025-04-07T13:50:40.729420Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15237, node 3 2025-04-07T13:50:40.757545Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:40.757649Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:40.760710Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:40.785144Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:40.785165Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:40.785173Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:40.785285Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20511 TClient is connected to server localhost:20511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:41.335702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:41.351134Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:50:41.429739Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:41.576803Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:41.658877Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:44.126916Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568324626005287:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:44.127048Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:44.166570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:50:44.195656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:50:44.220223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:50:44.250938Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:50:44.280809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:50:44.311083Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:50:44.349422Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568324626005781:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:44.349505Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568324626005786:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:44.349526Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:44.352292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:50:44.360102Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568324626005788:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:50:45.637627Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568307446134407:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:45.637684Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:55.728295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:50:55.728326Z node 3 :IMPORT WARN: Table profiles were not loaded
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=3&id=Y2ZlYmIyMWUtODJlODg1ZDYtMzYzMGYxOTQtYmU1YmY2MzA= >> KqpIndexes::WriteWithParamsFieldOrder [GOOD] >> KqpIndexes::UpsertWithoutExtraNullDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleave [GOOD] Test command err: Trying to start YDB, gRPC: 62390, MsgBus: 16248 2025-04-07T13:50:56.283661Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568377606960411:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:56.284042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b4b/r3tmp/tmpMfrUx8/pdisk_1.dat 2025-04-07T13:50:56.838766Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:56.854827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:56.854947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:56.857014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62390, node 1 2025-04-07T13:50:56.922754Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:56.922777Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:56.922784Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:56.922878Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16248 TClient is connected to server localhost:16248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:57.400719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:57.417703Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:57.428380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:57.554840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:57.742895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:57.808085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:59.699448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568390491863862:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:59.711022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:59.746771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:50:59.784946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:50:59.852235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:50:59.879838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:50:59.949237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:50:59.998170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:00.060426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568394786831656:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:00.060511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:00.060817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568394786831661:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:00.064699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:00.078383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568394786831663:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:01.145665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:01.258773Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568377606960411:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:01.258900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; query_phases { duration_us: 792 cpu_time_us: 792 } query_phases { duration_us: 8479 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 5779 affected_shards: 1 } query_phases { duration_us: 1696 cpu_time_us: 1696 } query_phases { duration_us: 8235 cpu_time_us: 4465 } query_phases { duration_us: 6643 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 3054 affected_shards: 2 } compilation { duration_us: 575870 cpu_time_us: 563394 } process_cpu_time_us: 5763 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":27,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":26,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_1_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1744033861861,\"TaskId\":1,\"Host\":\"ghrun-lqtfcktism\",\"ComputeTimeUs\":95}],\"CpuTimeUs\":593}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"BaseTimeMs\":1744033861861,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":593,\"Max\":593,\"Min\":593}},\"CTE Name\":\"precompute_1_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":25,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":24,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_3_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1744033861861,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_3_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":23,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":22,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_3_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1744033861861,\"TaskId\":2,\"Host\":\"ghrun-lqtfcktism\",\"ComputeTimeUs\":95}],\"CpuTimeUs\":538}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"BaseTimeMs\":1744033861861,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":538,\"Max\":538,\"Min\":538}},\"CTE Name\":\"precompute_3_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":20,\"Subplan Name\":\"CTE precompute_3_0\",\"Plans\":[{\"PlanNodeId\":19,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Operators\":[{\"Inputs\":[{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"}],\"Iterator\":\"FlatMap\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"FinishTimeMs\":1744033861852,\"Host\":\"ghrun-lqtfcktism\",\"OutputRows\":1,\"StartTimeMs\":1744033861849,\"ComputeTimeUs\":72,\"NodeId\":1,\"OutputChannels\":[{\"ChannelId\":1,\"Rows\":1,\"DstStageId\":2,\"Bytes\":29}],\"TaskId\":1,\"OutputBytes\":29}],\"PeakMemoryUsageBytes\":131072,\"DurationUs\":3000,\"CpuTimeUs\":1223}],\"UseLlvm\":\"undefined\",\"DurationUs\":{\"Count\":1,\"Sum\":3000,\"Max\":3000,\"Min\":3000},\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":29,\"Max\":29,\"Min\":29}},\"Name\":\"19\",\"P ... a1fad4-c79b55ba\"))))\n(let $42 (DqCnValue (TDqOutput $41 \'0)))\n(let $43 (KqpTxResultBinding $39 \'1 \'1))\n(let $44 (KqpPhysicalTx \'($41) \'($42) \'($33 \'($37 $43)) $11))\n(let $45 \'\"%kqp%tx_result_binding_2_0\")\n(let $46 (DictType $6 (TupleType $38 $40)))\n(let $47 %kqp%tx_result_binding_2_0)\n(let $48 (DqPhyStage \'() (lambda \'() (Iterator (FlatMap (Map $14 (lambda \'($100) (AsStruct \'(\'\"Key\" (Member $100 \'\"Key\")) \'(\'\"fk1\" (Member $100 \'\"fk1\")) \'(\'\"fk3\" (Member $100 \'\"fk3\"))))) (lambda \'($101) (block \'(\n (let $102 \'(\'\"Key\" (Member $101 \'\"Key\")))\n (let $103 \'(\'\"fk1\" (Member $101 \'\"fk1\")))\n (let $104 \'(\'\"fk3\" (Member $101 \'\"fk3\")))\n (return (IfPresent (Lookup $47 (AsStruct $102)) (lambda \'($105) (If (Nth $105 \'1) (Just (AsStruct $102 $103 \'(\'\"fk2\" (Member (Nth $105 \'0) \'\"fk2\")) $104)) (Nothing (OptionalType $23)))) (Just (AsStruct $102 $103 \'(\'\"fk2\" (Nothing $21)) $104))))\n)))))) \'(\'(\'\"_logical_id\" \'3419) \'(\'\"_id\" \'\"82794968-4e298218-e893a2f6-dc07163a\"))))\n(let $49 (DqPhyStage \'() (lambda \'() (Iterator (Map (Filter (DictItems $47) (lambda \'($106) (Nth (Nth $106 \'1) \'1))) (lambda \'($107) (block \'(\n (let $108 (Nth (Nth $107 \'1) \'0))\n (return (AsStruct \'(\'\"Key\" (Member (Nth $107 \'0) \'\"Key\")) \'(\'\"fk1\" (Member $108 \'\"fk1\")) \'(\'\"fk2\" (Member $108 \'\"fk2\")) \'(\'\"fk3\" (Member $108 \'\"fk3\"))))\n)))))) \'(\'(\'\"_logical_id\" \'3433) \'(\'\"_id\" \'\"880d4d4d-fe8cb45b-3169b6ba-a147353f\"))))\n(let $50 (DqCnUnionAll (TDqOutput $48 \'0)))\n(let $51 (lambda \'($109) $109))\n(let $52 (DqPhyStage \'($50) $51 \'(\'(\'\"_logical_id\" \'3529) \'(\'\"_id\" \'\"2acd4f0c-e88e2040-5037943d-ca40c8c8\"))))\n(let $53 (DqCnUnionAll (TDqOutput $49 \'0)))\n(let $54 (DqPhyStage \'($53) $51 \'(\'(\'\"_logical_id\" \'3544) \'(\'\"_id\" \'\"2c271a4b-fb6a26cc-f6e1d878-621bfd87\"))))\n(let $55 \'($48 $49 $52 $54))\n(let $56 (DqCnResult (TDqOutput $52 \'0) \'()))\n(let $57 (DqCnResult (TDqOutput $54 \'0) \'()))\n(let $58 (KqpTxResultBinding $46 \'2 \'0))\n(let $59 (KqpPhysicalTx $55 \'($56 $57) \'($33 \'($45 $58)) $11))\n(let $60 \'\"%kqp%tx_result_binding_1_0\")\n(let $61 (DqPhyStage \'() (lambda \'() (block \'(\n (let $110 \'(\'\"Key\" \'\"Value\" \'\"fk1\" \'\"fk3\"))\n (return (KqpEffects (KqpUpsertRows $18 (Iterator %kqp%tx_result_binding_1_0) $110 \'(\'(\'\"Mode\" \'\"upsert\")))))\n))) \'(\'(\'\"_logical_id\" \'4059) \'(\'\"_id\" \'\"f66ab706-66c71e5a-5950bb67-bb6635fc\"))))\n(let $62 (KqpTable \'\"/Root/TestTable/Index/indexImplTable\" \'\"72057594046644480:18\" \'\"\" \'1))\n(let $63 \'\"%kqp%tx_result_binding_3_1\")\n(let $64 (ListType $23))\n(let $65 (DqPhyStage \'() (lambda \'() (KqpEffects (KqpDeleteRows $62 (Iterator %kqp%tx_result_binding_3_1)))) \'(\'(\'\"_logical_id\" \'4073) \'(\'\"_id\" \'\"b847148d-a28fb29d-8f446518-e5f0fd61\"))))\n(let $66 \'\"%kqp%tx_result_binding_3_0\")\n(let $67 (DqPhyStage \'() (lambda \'() (block \'(\n (let $111 \'(\'\"fk1\" \'\"fk2\" \'\"fk3\" \'\"Key\"))\n (return (KqpEffects (KqpUpsertRows $62 (Iterator %kqp%tx_result_binding_3_0) $111 \'())))\n))) \'(\'(\'\"_logical_id\" \'4087) \'(\'\"_id\" \'\"a1b5fe8-e3087517-3e08045-8ed40ed9\"))))\n(let $68 \'($61 $65 $67))\n(let $69 (KqpTxResultBinding $5 \'1 \'0))\n(let $70 (KqpTxResultBinding $64 \'\"3\" \'0))\n(let $71 (KqpTxResultBinding $64 \'\"3\" \'1))\n(let $72 \'(\'($60 $69) \'($66 $70) \'($63 $71)))\n(let $73 (KqpPhysicalTx $68 \'() $72 \'($35 \'(\'\"with_effects\"))))\n(let $74 \'($12 $36 $44 $59 $73))\n(return (KqpPhysicalQuery $74 \'() \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 625083 total_cpu_time_us: 584943 2025-04-07T13:51:01.959338Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:02.546233Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 9879, MsgBus: 7646 2025-04-07T13:51:03.258052Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568407892753483:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:03.258120Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b4b/r3tmp/tmplLgjHI/pdisk_1.dat 2025-04-07T13:51:03.351717Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:03.364723Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:03.364801Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:03.366666Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9879, node 2 2025-04-07T13:51:03.417139Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:03.417164Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:03.417172Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:03.417279Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7646 TClient is connected to server localhost:7646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:03.830432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:03.837506Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:03.852503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:03.914031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:04.082883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:04.190677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:06.369885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568420777657078:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:06.370004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:06.409780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.438434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.475099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.506833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.536631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.571083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.625165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568420777657573:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:06.625253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:06.627462Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568420777657578:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:06.631043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:06.640467Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568420777657580:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:07.653023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:08.186277Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:08.258955Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568407892753483:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:08.259033Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TopicService::UnknownTopic >> KqpMultishardIndex::DuplicateUpsert+StreamLookup [GOOD] >> KqpMultishardIndex::DataColumnUpsertMixedSemantic+StreamLookup [GOOD] >> KqpMultishardIndex::DataColumnSelect-StreamLookup >> KqpMultishardIndex::DataColumnUpsertMixedSemantic-StreamLookup >> KqpIndexMetadata::HandleNotReadyIndex >> KqpUniqueIndex::ReplaceFkPartialColumnSet >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart >> KqpUniqueIndex::InsertNullInPk >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DuplicateUpsert+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 7736, MsgBus: 7759 2025-04-07T13:50:56.427416Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568378043930899:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:56.428646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b46/r3tmp/tmpexz5Yq/pdisk_1.dat 2025-04-07T13:50:56.816385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:56.827966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:56.828062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:56.829803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7736, node 1 2025-04-07T13:50:56.895937Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:56.895959Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:56.895988Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:56.896083Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7759 TClient is connected to server localhost:7759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:57.436225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:57.473796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:57.686467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:57.931817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:58.016044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:59.508289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568390928834353:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:59.508415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:59.705766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:50:59.737468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:50:59.765727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:50:59.799868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:50:59.874642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:50:59.921375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:50:59.983216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568390928834851:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:59.983288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:59.983508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568390928834856:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:59.986990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:50:59.996478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568390928834858:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:01.067004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:01.409066Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568378043930899:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:01.409188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12245, MsgBus: 6177 2025-04-07T13:51:03.796273Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568407979277964:2142];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b46/r3tmp/tmpOcxD7o/pdisk_1.dat 2025-04-07T13:51:03.850132Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:03.930306Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:03.944237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:03.944325Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:03.945859Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12245, node 2 2025-04-07T13:51:03.996070Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:03.996091Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:03.996101Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:03.996205Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6177 TClient is connected to server localhost:6177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:04.599844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:04.628399Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:04.642917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:04.737309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:04.872872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:51:04.964130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:07.134587Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568425159148738:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:07.134696Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:07.150564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.180616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.217825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.300159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.335622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.371066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.453105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568425159149239:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:07.453194Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:07.453472Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568425159149244:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:07.456810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:07.466789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568425159149246:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:08.460184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:08.796969Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568407979277964:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:08.797044Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> YdbLogStore::LogStoreNegative [GOOD] >> YdbLogStore::Dirs >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] >> KqpIndexes::MultipleSecondaryIndex >> KqpUniqueIndex::ReplaceFkAlreadyExist >> KqpMultishardIndex::SecondaryIndexSelectNull >> KqpNewEngine::PkRangeSelect4 [GOOD] >> KqpNewEngine::PrecomputeKey >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:143:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:146:2167] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:149:2057] recipient: [4:146:2167] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:148:2168] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:218:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:148:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:150:2172] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:154:2057] recipient: [5:150:2172] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:153:2173] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:223:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:155:2057] recipient: [7:153:2174] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:157:2057] recipient: [7:153:2174] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:156:2175] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:141:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:144:2057] recipient: [10:143:2166] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:145:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:147:2057] recipient: [10:143:2166] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:146:2167] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:216:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:141:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:143:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:145:2057] recipient: [11:144:2166] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:147:2057] recipient: [11:144:2166] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:146:2167] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:143:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:146:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:147:2057] recipient: [12:145:2167] Leader for TabletID 72057594037927937 is [12:148:2168] sender: [12:149:2057] recipient: [12:145:2167] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:148:2168] Leader for TabletID 72057594037927937 is [12:148:2168] sender: [12:218:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:148:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:151:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:152:2057] recipient: [13:150:2172] Leader for TabletID 72057594037927937 is [13:153:2173] sender: [13:154:2057] recipient: [13:150:2172] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:153:2173] Leader for TabletID 72057594037927937 is [13:153:2173] sender: [13:223:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:148:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:151:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:152:2057] recipient: [14:150:2172] Leader for TabletID 72057594037927937 is [14:153:2173] sender: [14:154:2057] recipient: [14:150:2172] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:153:2173] Leader for TabletID 72057594037927937 is [14:153:2173] sender: [14:223:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:151:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:154:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:155:2057] recipient: [15:153:2174] Leader for TabletID 72057594037927937 is [15:156:2175] sender: [15:157:2057] recipient: [15:153:2174] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:156:2175] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] 2025-04-07T13:51:11.962673Z node 17 :BS_PROXY_GET ERROR: [023d050af957c82c] Response# TEvGetResult {Status# BLOCKED ResponseSz# 1 {[72057594037927937:2:1:2:1:5:0] BLOCKED Size# 0 RequestedSize# 5} ErrorReason# "status# BLOCKED from# [0:1:0:0:0]"} Marker# BPG29 2025-04-07T13:51:11.962877Z node 17 :KEYVALUE ERROR: {KV323@keyvalue_storage_read_request.cpp:254} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 24 SentAt# 1970-01-01T00:00:00.024000Z GotAt# 23 ErrorReason# status# BLOCKED from# [0:1:0:0:0] 2025-04-07T13:51:11.968326Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2025-04-07T13:51:11.968404Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3, Details: Status# ALREADY From# [0:1:0:0:0] NodeId# 17 QuorumTracker# {Erroneous# 1 Successful# 0} Marker# TSYS31 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 6215, MsgBus: 14448 2025-04-07T13:50:54.758988Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568369735330332:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:54.759088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b58/r3tmp/tmpMhnX3q/pdisk_1.dat 2025-04-07T13:50:55.237482Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:55.267468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:55.267549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:55.270597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6215, node 1 2025-04-07T13:50:55.430832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:55.430852Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:55.430862Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:55.430970Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14448 TClient is connected to server localhost:14448 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:56.067525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:56.092954Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:56.124778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:56.304241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:56.467547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:56.557551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:58.345644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568386915201224:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:58.345752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:58.643149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:50:58.680381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:50:58.725617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:50:58.750279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:50:58.817335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:50:58.887221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:50:58.958869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568386915201726:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:58.958960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:58.959473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568386915201731:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:58.963357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:50:58.971094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568386915201733:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:50:59.760951Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568369735330332:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:59.761032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:00.071346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 10844, MsgBus: 26587 2025-04-07T13:51:03.701629Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568405464415259:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:03.701748Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b58/r3tmp/tmpHMHqdS/pdisk_1.dat 2025-04-07T13:51:03.790311Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10844, node 2 2025-04-07T13:51:03.827823Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:03.827897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:03.832143Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:03.877683Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:03.877705Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:03.877713Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:03.877793Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26587 TClient is connected to server localhost:26587 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:04.310276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:04.325418Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:04.338776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:04.453535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:04.630282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:04.715421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:06.684834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568418349318835:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:06.684957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:06.727836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.754329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.786660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.816949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.842305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.919286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.983666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568418349319338:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:06.983734Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:06.983838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568418349319343:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:06.986889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:06.994970Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568418349319345:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:08.035517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:08.709660Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568405464415259:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:08.709734Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:09.791445Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr889va11nqsrfpnz31gn53c, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmJkYTAyODEtNTQzYjM4ODAtNzE3M2IwZDMtYzI0YTU1OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:09.801036Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmJkYTAyODEtNTQzYjM4ODAtNzE3M2IwZDMtYzI0YTU1OGU=, ActorId: [2:7490568426939254977:2518], ActorState: ExecuteState, TraceId: 01jr889va11nqsrfpnz31gn53c, Create QueryResponse for error on request, msg: >> TPersQueueTest::NoDecompressionMemoryLeaks [GOOD] >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] >> KqpIndexes::SecondaryIndexUpdateOnUsingIndex >> KqpUniqueIndex::UpsertExplicitNullInComplexFk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] Test command err: 2025-04-07T13:49:36.948996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:36.949256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:36.949532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001081/r3tmp/tmp2patSv/pdisk_1.dat 2025-04-07T13:49:37.291963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.330821Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:37.385255Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:49:37.385983Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:49:37.386126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:37.386237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:37.397795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:37.515315Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T13:49:37.515382Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T13:49:37.515504Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T13:49:37.651242Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T13:49:37.651874Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T13:49:37.651961Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:49:37.652268Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T13:49:37.652456Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T13:49:37.652549Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T13:49:37.652916Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T13:49:37.654262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:37.655158Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T13:49:37.655232Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T13:49:37.688103Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:49:37.689870Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:49:37.690342Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:49:37.690646Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:49:37.733175Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:49:37.733925Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:49:37.734035Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:49:37.735459Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:49:37.735537Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:49:37.735598Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:49:37.735887Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:49:37.758576Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:49:37.758745Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:49:37.758856Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:49:37.758894Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:49:37.758924Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:49:37.758958Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:49:37.759309Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:49:37.759369Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:49:37.759544Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:49:37.759627Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:49:37.759972Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:49:37.760003Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:49:37.760045Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:49:37.760092Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:49:37.760134Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:49:37.760182Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T13:49:37.760213Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T13:49:37.760241Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T13:49:37.760267Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:49:37.760307Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:49:37.760490Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T13:49:37.760530Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:49:37.760611Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:49:37.760943Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T13:49:37.761001Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:49:37.761082Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:49:37.761136Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T13:49:37.761178Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T13:49:37.761208Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T13:49:37.761238Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:49:37.761475Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T13:49:37.761518Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T13:49:37.761549Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T13:49:37.761578Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:49:37.761641Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T13:49:37.761667Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:49:37.761697Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T13:49:37.761722Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T13:49:37.761756Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T13:49:37.762909Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T13:49:37.762991Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:49:37.776565Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:49:37.776641Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:49:37.776693Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:49:37.776762Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-07T13:51:11.608417Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [7:640:2544]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T13:51:11.608460Z node 7 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2025-04-07T13:51:11.608531Z node 7 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037888, for tableId 8: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2025-04-07T13:51:11.608610Z node 7 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 8 2025-04-07T13:51:11.608732Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [7:644:2546]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T13:51:11.608783Z node 7 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037889 2025-04-07T13:51:11.608835Z node 7 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037889, for tableId 8: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2025-04-07T13:51:11.608899Z node 7 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037889, FollowerId 0, tableId 8 2025-04-07T13:51:11.892369Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [7:787:2644]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-07T13:51:11.892449Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-07T13:51:11.892527Z node 7 :TX_DATASHARD TRACE: No cleanup at 72075186224037890 outdated step 62000 last cleanup 0 2025-04-07T13:51:11.892584Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:51:11.892621Z node 7 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-07T13:51:11.892700Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-07T13:51:11.892735Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-07T13:51:11.892876Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [7:792:2646]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-07T13:51:11.892911Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-07T13:51:11.892957Z node 7 :TX_DATASHARD TRACE: No cleanup at 72075186224037891 outdated step 62000 last cleanup 0 2025-04-07T13:51:11.893010Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:51:11.893036Z node 7 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037891 2025-04-07T13:51:11.893063Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037891 has no attached operations 2025-04-07T13:51:11.893088Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037891 2025-04-07T13:51:11.893883Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [7:787:2644]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T13:51:11.893950Z node 7 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037890 2025-04-07T13:51:11.894029Z node 7 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037890, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2025-04-07T13:51:11.894123Z node 7 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037890, FollowerId 0, tableId 3 2025-04-07T13:51:11.894212Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [7:792:2646]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T13:51:11.894248Z node 7 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037891 2025-04-07T13:51:11.894295Z node 7 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037891, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2025-04-07T13:51:11.894353Z node 7 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037891, FollowerId 0, tableId 3 2025-04-07T13:51:12.953364Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] Handle TEvProposeTransaction 2025-04-07T13:51:12.953474Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] TxId# 281474976715669 ProcessProposeTransaction 2025-04-07T13:51:12.953616Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] Cookie# 0 userReqId# "" txid# 281474976715669 SEND to# [7:1499:3278] DataReq marker# P0 2025-04-07T13:51:12.953821Z node 7 :TX_PROXY DEBUG: Actor# [7:1499:3278] Cookie# 0 txid# 281474976715669 HANDLE TDataReq marker# P1 2025-04-07T13:51:12.954136Z node 7 :TX_PROXY DEBUG: Actor# [7:1499:3278] txid# 281474976715669 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2025-04-07T13:51:12.954382Z node 7 :TX_PROXY DEBUG: Actor# [7:1499:3278] txid# 281474976715669 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-04-07T13:51:12.954525Z node 7 :TX_PROXY DEBUG: Actor# [7:1499:3278] txid# 281474976715669 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 2 followers disallowed marker# P4b 2025-04-07T13:51:12.954639Z node 7 :TX_PROXY DEBUG: Actor# [7:1499:3278] txid# 281474976715669 SEND TEvProposeTransaction to datashard 72075186224037889 with read table request affected shards 2 followers disallowed marker# P4b 2025-04-07T13:51:12.954958Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [7:1499:3278], Recipient [7:640:2544]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 1499 RawX2: 30064774350 } TxBody: " \0018\001BC\n\014\010\200\202\224\204\200\200\200\200\001\020\010\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001(\210\'0\217\247\200\200\200\200@H\001R\022\t\333\005\000\000\000\000\000\000\021\316\014\000\000\007\000\000\000" TxId: 281474976715669 ExecLevel: 0 Flags: 8 2025-04-07T13:51:12.955032Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:51:12.955173Z node 7 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:51:12.955405Z node 7 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:8:0] 2025-04-07T13:51:12.955511Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit CheckDataTx 2025-04-07T13:51:12.955644Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2025-04-07T13:51:12.955697Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit CheckDataTx 2025-04-07T13:51:12.955744Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit FinishPropose 2025-04-07T13:51:12.955786Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit FinishPropose 2025-04-07T13:51:12.955821Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is DelayComplete 2025-04-07T13:51:12.955859Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:51:12.955904Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:281474976715669] at 72075186224037888 has finished 2025-04-07T13:51:12.955988Z node 7 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:51:12.956025Z node 7 :TX_DATASHARD TRACE: Complete execution for [0:281474976715669] at 72075186224037888 on unit FinishPropose 2025-04-07T13:51:12.956070Z node 7 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715669 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: BAD_REQUEST 2025-04-07T13:51:12.956130Z node 7 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715669 at tablet 72075186224037888 status: BAD_REQUEST errors: SNAPSHOT_NOT_EXIST (Shard 72075186224037888 has no snapshot { table 72057594046644480:8 version 5000/281474976715663 }) | 2025-04-07T13:51:12.956217Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:51:12.956420Z node 7 :TX_PROXY DEBUG: Actor# [7:1499:3278] txid# 281474976715669 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# BAD_REQUEST shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-07T13:51:12.956518Z node 7 :TX_PROXY ERROR: Actor# [7:1499:3278] txid# 281474976715669 RESPONSE Status# WrongRequest marker# P13c 2025-04-07T13:51:12.956886Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [7:1499:3278], Recipient [7:644:2546]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 1499 RawX2: 30064774350 } TxBody: " \0018\001BC\n\014\010\200\202\224\204\200\200\200\200\001\020\010\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001(\210\'0\217\247\200\200\200\200@H\001R\022\t\333\005\000\000\000\000\000\000\021\316\014\000\000\007\000\000\000" TxId: 281474976715669 ExecLevel: 0 Flags: 8 2025-04-07T13:51:12.956919Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:51:12.957003Z node 7 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T13:51:12.957103Z node 7 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:8:0] 2025-04-07T13:51:12.957155Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037889 on unit CheckDataTx 2025-04-07T13:51:12.957201Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037889 is Executed 2025-04-07T13:51:12.957223Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037889 executing on unit CheckDataTx 2025-04-07T13:51:12.957245Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037889 to execution unit FinishPropose 2025-04-07T13:51:12.957265Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037889 on unit FinishPropose 2025-04-07T13:51:12.957285Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037889 is DelayComplete 2025-04-07T13:51:12.957308Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037889 executing on unit FinishPropose 2025-04-07T13:51:12.957334Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:281474976715669] at 72075186224037889 has finished 2025-04-07T13:51:12.957372Z node 7 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T13:51:12.957395Z node 7 :TX_DATASHARD TRACE: Complete execution for [0:281474976715669] at 72075186224037889 on unit FinishPropose 2025-04-07T13:51:12.957424Z node 7 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715669 at tablet 72075186224037889 send to client, exec latency: 0 ms, propose latency: 0 ms, status: BAD_REQUEST 2025-04-07T13:51:12.957451Z node 7 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715669 at tablet 72075186224037889 status: BAD_REQUEST errors: SNAPSHOT_NOT_EXIST (Shard 72075186224037889 has no snapshot { table 72057594046644480:8 version 5000/281474976715663 }) | 2025-04-07T13:51:12.957493Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 >> KqpAnalyze::AnalyzeTable+ColumnStore [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore >> KqpUniqueIndex::UpdateOnFkAlreadyExist [GOOD] >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 >> TPersQueueTest::WriteEmptyData [GOOD] >> TPersQueueTest::WriteNonExistingPartition >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] Test command err: 2025-04-07T13:48:19.155750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:19.167932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:19.168112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 27917, node 1 TClient is connected to server localhost:15146 2025-04-07T13:48:20.248617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:592:2505], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:20.248735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:603:2510], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:20.248872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:20.305169Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:20.306700Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:20.306748Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:20.306786Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:20.306961Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:48:20.363285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:20.363433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:20.373203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:48:20.413805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:20.547948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:607:2513], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:48:20.890306Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jr884pcpa7nejynkzscq3af8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIyMmY3OTktODFhOWIwYzAtOGU2ZDI0ODQtYTZjYzVjYWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:48:51.216098Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:285:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:51.216381Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:51.216462Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 16125, node 2 TClient is connected to server localhost:32204 2025-04-07T13:48:52.813601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:584:2503], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:52.813724Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:601:2508], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:52.813816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:48:52.860234Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:52.870234Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:52.870312Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:52.870352Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:52.870551Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:48:52.926595Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:52.926738Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:52.936484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:48:52.985931Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:53.128524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:605:2511], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:48:53.351577Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jr885p620azh54zebtqpxazy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzgwMzI0ZjItZDI1NTQxMWMtMzY4NjBjNWUtZjM1YjQ0ZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:49:03.652983Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:297:2340], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:03.653399Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:03.653483Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 29212, node 3 TClient is connected to server localhost:27026 2025-04-07T13:49:04.457132Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:590:2502], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:04.457293Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:600:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:04.457413Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:04.520357Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:04.520444Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:04.520498Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:04.520822Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:04.522349Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:49:04.584606Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:04.584800Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:04.591497Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:49:04.620482Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:04.741772Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:604:2510], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:49:04.855632Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jr8861j627st6g26y12aqy73, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTVhZjMzMTgtMWQzOGJhMTgtNmE5YzdkZGMtY2M0ODlmYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:49:14.857657Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:297:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:14.857956Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:14.858140Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 27714, node 4 TClient is connected to server localhost:8437 2025-04-07T13:49:15.673417Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:588:2502], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.673584Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:601:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:15.673708Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not fou ... 025-04-07T13:49:53.160065Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 22834, node 7 TClient is connected to server localhost:13034 2025-04-07T13:49:54.234572Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:589:2502], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:54.234719Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:600:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:54.234851Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:54.279422Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:54.285208Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:54.285307Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:54.285386Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:54.285857Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:49:54.334855Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:54.335049Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:54.346906Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:49:54.371278Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:54.513515Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:604:2510], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:49:54.681217Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jr887j5p10qc45rjxmj1wap4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTE4YjAwMjYtY2JmZjUxYjgtMjExNjJiMzEtNzBmYTYzZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:50:09.908215Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:92:2138], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:09.916954Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:09.917456Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 12950, node 8 TClient is connected to server localhost:3093 2025-04-07T13:50:11.207696Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:590:2503], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:11.207867Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:599:2508], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:11.208637Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:11.262322Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:11.265234Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:11.265351Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:11.265437Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:11.265759Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:11.319062Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:11.319310Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:11.328234Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:50:11.353575Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:11.484776Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:605:2511], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:50:11.737338Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jr8882r36qwnfsjhvphvcvhn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=ZDJlMTgxMDEtZGUzODI0ZDQtODhjMjIwNTktNDJjNjZm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:50:27.977787Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:399:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:27.978064Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:27.978193Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:50:28.470038Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:28.767968Z node 9 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-07T13:50:28.860793Z node 9 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-07T13:50:29.806449Z node 9 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 5999, node 9 TClient is connected to server localhost:11266 2025-04-07T13:50:31.147374Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:31.147477Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:31.147568Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:31.148311Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:48.397759Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:255:2187], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:48.398249Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:50:48.398499Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:50:48.940914Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:49.115183Z node 11 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-07T13:50:49.150592Z node 11 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-07T13:50:50.075863Z node 11 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 19462, node 11 TClient is connected to server localhost:23310 2025-04-07T13:50:50.918548Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:50.918675Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:50.918784Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:50.919162Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:51:07.845112Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:512:2384], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:07.845672Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:07.845914Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:51:08.457516Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:08.646780Z node 14 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-07T13:51:08.685911Z node 14 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-07T13:51:09.602931Z node 14 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 7093, node 14 TClient is connected to server localhost:21620 2025-04-07T13:51:10.425176Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:10.425273Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:10.425352Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:10.425662Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration >> KqpIndexes::SecondaryIndexUpsert1DeleteUpdate >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi >> KqpMultishardIndex::DataColumnUpsertMixedSemantic-StreamLookup [GOOD] >> KqpMultishardIndex::DataColumnWrite+StreamLookup >> YdbLogStore::Dirs [GOOD] >> YdbLogStore::AlterLogStore >> KqpIndexes::UpsertWithoutExtraNullDelete [GOOD] >> KqpMultishardIndex::DataColumnWriteNull+StreamLookup >> KqpUniqueIndex::ReplaceFkPartialColumnSet [GOOD] >> KqpUniqueIndex::UpdateFkAlreadyExist >> KqpIndexMetadata::HandleNotReadyIndex [GOOD] >> KqpIndexMetadata::HandleWriteOnlyIndex >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> KqpIndexes::MultipleSecondaryIndex [GOOD] >> KqpIndexes::MultipleSecondaryIndexWithSameComulns >> KqpUniqueIndex::InsertNullInPk [GOOD] >> KqpUniqueIndex::InsertNullInFk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithoutExtraNullDelete [GOOD] Test command err: Trying to start YDB, gRPC: 9277, MsgBus: 6490 2025-04-07T13:51:04.080531Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568411413720475:2118];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:04.080566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b44/r3tmp/tmpKoH5qp/pdisk_1.dat 2025-04-07T13:51:04.449629Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:04.496376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:04.496453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:04.499820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9277, node 1 2025-04-07T13:51:04.644855Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:04.644879Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:04.644889Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:04.645000Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6490 TClient is connected to server localhost:6490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:05.298919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:05.319328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:05.478630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:05.660550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:05.744647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.270122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568424298623987:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:07.270210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:07.515805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.543793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.573059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.602492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.639439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.676742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.784016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568424298624486:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:07.784083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:07.784300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568424298624491:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:07.788215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:07.803234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568424298624493:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:08.856440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:09.080656Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568411413720475:2118];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:09.080742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:09.484275Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:09.511833Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 20109, MsgBus: 13572 2025-04-07T13:51:10.336798Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568436762924625:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:10.340345Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b44/r3tmp/tmpP1JUYn/pdisk_1.dat 2025-04-07T13:51:10.507913Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:10.520690Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:10.520768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:10.522060Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20109, node 2 2025-04-07T13:51:10.568147Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:10.568176Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:10.568187Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:10.568292Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13572 TClient is connected to server localhost:13572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:11.073905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:11.084247Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:11.090722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:51:11.166466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:11.361819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:11.434630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.678427Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568449647828221:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:13.678610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:13.726679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:13.772818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:13.815959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:13.865871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:13.897997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:13.942370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.009765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568453942796013:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.009848Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.010229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568453942796018:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.013799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:14.024352Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568453942796020:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:15.102100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.339818Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568436762924625:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:15.339884Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:17.058092Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:17.553959Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:17.854430Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpMultishardIndex::SecondaryIndexSelectNull [GOOD] >> KqpMultishardIndex::SortByPk+StreamLookup >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [GOOD] >> KqpMultishardIndex::DataColumnSelect-StreamLookup [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] Test command err: 2025-04-07T13:50:54.525647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:54.525972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:54.526204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001099/r3tmp/tmpI15QSm/pdisk_1.dat 2025-04-07T13:50:54.867371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:50:54.902731Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:54.962814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:54.962968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:54.974226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:55.102127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:55.150013Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:50:55.151036Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:50:55.151450Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:50:55.151684Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:50:55.189084Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:50:55.189907Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:50:55.190034Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:50:55.191704Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:50:55.191785Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:50:55.191838Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:50:55.192185Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:50:55.223672Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:50:55.223936Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:50:55.224207Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:50:55.224244Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:50:55.224281Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:50:55.224329Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:50:55.224692Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:50:55.224783Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:50:55.224984Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:50:55.225130Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:50:55.225642Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:55.225682Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:55.225725Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:50:55.225791Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:50:55.225828Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:50:55.225903Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T13:50:55.225956Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T13:50:55.225991Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T13:50:55.226032Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:50:55.226080Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:50:55.226223Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T13:50:55.226264Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:50:55.226367Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:50:55.226577Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T13:50:55.226640Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:50:55.226735Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:50:55.226801Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T13:50:55.226859Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T13:50:55.226899Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T13:50:55.226931Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:50:55.227182Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T13:50:55.227227Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T13:50:55.227262Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T13:50:55.227293Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:50:55.227347Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T13:50:55.227386Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:50:55.227429Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T13:50:55.227461Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T13:50:55.227488Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T13:50:55.228524Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T13:50:55.228559Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:50:55.240193Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:50:55.240288Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:50:55.240325Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:50:55.240373Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T13:50:55.240441Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:50:55.430587Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:55.430650Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:55.430688Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:50:55.431091Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T13:50:55.431139Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T13:50:55.431264Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T13:50:55.431306Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T13:50:55.431346Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T13:50:55.431381Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T13:50:55.443004Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:50:55.443111Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:50:55.444082Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:50:55.444134Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:50:55.444184Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:50:55.444225Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:50:55.444282Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T13:50:55.444323Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... d: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-07T13:51:18.940712Z node 6 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[6:893:2738], 1001} after executionsCount# 1 2025-04-07T13:51:18.940755Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:893:2738], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T13:51:18.940811Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:893:2738], 1001} finished in read 2025-04-07T13:51:18.940854Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-07T13:51:18.940877Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-04-07T13:51:18.940900Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-04-07T13:51:18.940926Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-04-07T13:51:18.940994Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-07T13:51:18.941018Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-04-07T13:51:18.941041Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-04-07T13:51:18.941067Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-07T13:51:18.941143Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-07T13:51:18.941699Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:898:2743], Recipient [6:683:2564]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:18.941742Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:18.941781Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:897:2742], serverId# [6:898:2743], sessionId# [0:0:0] 2025-04-07T13:51:18.941896Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:896:2741], Recipient [6:683:2564]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-07T13:51:18.942728Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:901:2746], Recipient [6:683:2564]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:18.942766Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:18.942802Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:900:2745], serverId# [6:901:2746], sessionId# [0:0:0] 2025-04-07T13:51:18.942998Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:899:2744], Recipient [6:683:2564]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-07T13:51:18.943077Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-04-07T13:51:18.943113Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T13:51:18.943141Z node 6 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-04-07T13:51:18.943182Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CheckRead 2025-04-07T13:51:18.943237Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-07T13:51:18.943262Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CheckRead 2025-04-07T13:51:18.943301Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-07T13:51:18.943326Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-07T13:51:18.943366Z node 6 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037890 2025-04-07T13:51:18.943394Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-07T13:51:18.943417Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-07T13:51:18.943453Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit ExecuteRead 2025-04-07T13:51:18.943494Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit ExecuteRead 2025-04-07T13:51:18.943569Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-07T13:51:18.943741Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[6:899:2744], 1002} after executionsCount# 1 2025-04-07T13:51:18.943797Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:899:2744], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T13:51:18.943856Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:899:2744], 1002} finished in read 2025-04-07T13:51:18.943903Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-07T13:51:18.943927Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit ExecuteRead 2025-04-07T13:51:18.943951Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit CompletedOperations 2025-04-07T13:51:18.943974Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CompletedOperations 2025-04-07T13:51:18.944020Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-07T13:51:18.944049Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CompletedOperations 2025-04-07T13:51:18.944082Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037890 has finished 2025-04-07T13:51:18.944117Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-07T13:51:18.944199Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-07T13:51:18.944782Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:904:2749], Recipient [6:681:2562]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:18.944833Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:18.944886Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:903:2748], serverId# [6:904:2749], sessionId# [0:0:0] 2025-04-07T13:51:18.945027Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:902:2747], Recipient [6:681:2562]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-07T13:51:18.945841Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:907:2752], Recipient [6:681:2562]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:18.945879Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:18.945914Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:906:2751], serverId# [6:907:2752], sessionId# [0:0:0] 2025-04-07T13:51:18.946065Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:905:2750], Recipient [6:681:2562]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-07T13:51:18.946130Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-04-07T13:51:18.946190Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T13:51:18.946223Z node 6 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-04-07T13:51:18.946266Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CheckRead 2025-04-07T13:51:18.946338Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-07T13:51:18.946368Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CheckRead 2025-04-07T13:51:18.946395Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-04-07T13:51:18.946421Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit BuildAndWaitDependencies 2025-04-07T13:51:18.946461Z node 6 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037891 2025-04-07T13:51:18.946494Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-07T13:51:18.946520Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-04-07T13:51:18.946540Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit ExecuteRead 2025-04-07T13:51:18.946619Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit ExecuteRead 2025-04-07T13:51:18.946704Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-07T13:51:18.946890Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[6:905:2750], 1003} after executionsCount# 1 2025-04-07T13:51:18.946937Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:905:2750], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T13:51:18.946992Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:905:2750], 1003} finished in read 2025-04-07T13:51:18.947036Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-07T13:51:18.947060Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit ExecuteRead 2025-04-07T13:51:18.947083Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit CompletedOperations 2025-04-07T13:51:18.947108Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CompletedOperations 2025-04-07T13:51:18.947156Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-07T13:51:18.947178Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CompletedOperations 2025-04-07T13:51:18.947217Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037891 has finished 2025-04-07T13:51:18.947249Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-04-07T13:51:18.947325Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL [GOOD] >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL >> KqpUniqueIndex::ReplaceFkAlreadyExist [GOOD] >> KqpUniqueIndex::ReplaceFkDuplicate >> TopicService::UnknownTopic [GOOD] >> KqpNewEngine::PrecomputeKey [GOOD] >> DemoTx::Scenario_4 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnSelect-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 11725, MsgBus: 2868 2025-04-07T13:51:03.397080Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568407867890082:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:03.397142Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b45/r3tmp/tmpZPGYbS/pdisk_1.dat 2025-04-07T13:51:03.705987Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11725, node 1 2025-04-07T13:51:03.789370Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:03.789390Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:03.789406Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:03.789474Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:51:03.800698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:03.800804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:03.804180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2868 TClient is connected to server localhost:2868 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:04.557033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:04.568575Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:04.575898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:04.695217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:04.848882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:51:04.937072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:06.525817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568420752793666:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:06.525931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:06.798832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.834098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.859150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.898545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:06.966312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.004227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:07.061365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568425047761458:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:07.061460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:07.061657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568425047761464:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:07.065318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:07.075898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568425047761466:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } waiting... 2025-04-07T13:51:08.077542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:08.406959Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568407867890082:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:08.407107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17583, MsgBus: 5411 2025-04-07T13:51:10.554970Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568435927104752:2124];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:10.556855Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b45/r3tmp/tmp092cg9/pdisk_1.dat 2025-04-07T13:51:10.664325Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:10.685737Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:10.685822Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:10.690568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17583, node 2 2025-04-07T13:51:10.737198Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:10.737218Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:10.737231Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:10.737320Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5411 TClient is connected to server localhost:5411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:11.129615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:11.139734Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:11.150298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:11.244282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:11.425090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:11.516582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.957843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568448812008252:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:13.957932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.034738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.115448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.148122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.183488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.220731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.260094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.334788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568453106976051:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.334854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.334996Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568453106976056:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.338416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:14.352110Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568453106976058:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:15.556895Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568435927104752:2124];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:15.556972Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:15.574607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.631836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.697623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:51:17.667346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 waiting... >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> KqpExplain::AggGroupLimit >> KqpIndexes::ForbidViewModification ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PrecomputeKey [GOOD] Test command err: Trying to start YDB, gRPC: 26583, MsgBus: 2355 2025-04-07T13:50:39.751567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568304134680575:2122];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:39.752267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012c5/r3tmp/tmpf0feUh/pdisk_1.dat 2025-04-07T13:50:40.150364Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:40.152933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:40.153059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:40.157443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26583, node 1 2025-04-07T13:50:40.247103Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:40.247141Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:40.247155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:40.247262Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2355 TClient is connected to server localhost:2355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:40.910917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:40.960358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:50:41.117212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:41.290626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:41.352653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:42.883977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568317019584080:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:42.884102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:43.234728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:50:43.267920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:50:43.304969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:50:43.354275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:50:43.399143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:50:43.426609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:50:43.478842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568321314551871:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:43.478916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568321314551876:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:43.478926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:43.482295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:50:43.491362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568321314551878:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:50:44.754877Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568304134680575:2122];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:44.754973Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61527, MsgBus: 26503 2025-04-07T13:50:45.754180Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568328516032709:2189];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012c5/r3tmp/tmpJzxrie/pdisk_1.dat 2025-04-07T13:50:45.938276Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:50:46.127307Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:46.157493Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:46.168892Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:46.174697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61527, node 2 2025-04-07T13:50:46.365278Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:46.365303Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:46.365309Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:46.365404Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26503 TClient is connected to server localhost:26503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:47.012090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:47.023799Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:47.037208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:47.140862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:47.315722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:47.399702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:49.710662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568345695903458:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't hav ... : 72057594046644480 waiting... 2025-04-07T13:51:06.469371Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:06.554636Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:09.461446Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490568434532030740:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:09.461558Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:09.523030Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:09.568088Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:09.623845Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:09.661430Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:09.717383Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:09.815222Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:09.865936Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490568434532031236:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:09.866036Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:09.866074Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490568434532031241:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:09.869731Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:09.880032Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490568434532031243:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:10.456263Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490568417352159947:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:10.457976Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31217, MsgBus: 14523 2025-04-07T13:51:12.803406Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490568446377058126:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:12.803461Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012c5/r3tmp/tmp4Th0US/pdisk_1.dat 2025-04-07T13:51:13.010160Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:13.047967Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:13.048052Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:13.052317Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31217, node 6 2025-04-07T13:51:13.119191Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:13.119218Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:13.119226Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:13.119354Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14523 TClient is connected to server localhost:14523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:13.870948Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.888427Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.901155Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.011327Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:14.244919Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:14.368602Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:17.379042Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490568467851896317:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:17.379170Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:17.419461Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:17.521411Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:17.606099Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:17.656099Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:17.739360Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:17.803529Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490568446377058126:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:17.803598Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:17.811568Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:17.903297Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490568467851896831:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:17.903389Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490568467851896836:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:17.903398Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:17.907614Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:17.924476Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490568467851896838:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> TopicService::UseDoubleSlashInTopicPath >> KqpIndexes::UpsertMultipleUniqIndexes >> KqpIndexes::SecondaryIndexUpdateOnUsingIndex [GOOD] >> KqpIndexes::SecondaryIndexSelectUsingScripting >> KqpUniqueIndex::UpsertExplicitNullInComplexFk [GOOD] >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets [GOOD] >> TPersQueueTest::TopicServiceReadBudget >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] >> DemoTx::Scenario_5 >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> KqpIndexes::UniqIndexComplexPkComplexFkOverlap >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestRenameToLongKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpsertExplicitNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 32422, MsgBus: 7141 2025-04-07T13:51:15.207887Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568456388980947:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:15.213466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b22/r3tmp/tmpNlo5pD/pdisk_1.dat 2025-04-07T13:51:15.671529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:15.671621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:15.673514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:15.674768Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32422, node 1 2025-04-07T13:51:15.852291Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:15.852311Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:15.852321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:15.852388Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7141 TClient is connected to server localhost:7141 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:16.408603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:16.454376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:16.570271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:16.727472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:16.813879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:18.374541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568469273884526:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:18.374626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:18.648514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:18.685127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:18.714170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:18.741259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:18.767612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:18.877884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:18.951194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568469273885031:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:18.951257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:18.951549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568469273885036:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:18.954914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:18.962835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568469273885038:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:19.885068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.228214Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568456388980947:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:20.228327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] >> KqpIndexes::SelectConcurentTX >> YdbLogStore::AlterLogStore [GOOD] >> YdbLogStore::AlterLogTable >> KqpIndexes::SecondaryIndexUpsert1DeleteUpdate [GOOD] >> KqpIndexes::SecondaryIndexUpsert2Update ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: 2025-04-07T13:50:55.074979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:50:55.075244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:55.075488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00108d/r3tmp/tmpXGpnfR/pdisk_1.dat 2025-04-07T13:50:55.415698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:50:55.453421Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:55.502634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:55.502763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:55.517435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:55.632004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:55.679752Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:50:55.680840Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:50:55.681296Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:50:55.681525Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:50:55.725817Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:50:55.726583Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:50:55.726687Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:50:55.728181Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:50:55.728272Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:50:55.728324Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:50:55.728645Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:50:55.770719Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:50:55.770942Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:50:55.771048Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:50:55.771083Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:50:55.771130Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:50:55.771176Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:50:55.771541Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:50:55.771614Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:50:55.771821Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:50:55.771904Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:50:55.772304Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:55.772347Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:55.772402Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:50:55.772484Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:50:55.772522Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:50:55.772555Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T13:50:55.772587Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T13:50:55.772615Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T13:50:55.772651Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:50:55.772716Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:50:55.772868Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T13:50:55.772904Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:50:55.773014Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:50:55.773246Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T13:50:55.773298Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:50:55.773390Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:50:55.773455Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T13:50:55.773504Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T13:50:55.773550Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T13:50:55.773583Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:50:55.773841Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T13:50:55.773876Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T13:50:55.773905Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T13:50:55.773936Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:50:55.773989Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T13:50:55.774024Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:50:55.774059Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T13:50:55.774087Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T13:50:55.774119Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T13:50:55.775455Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T13:50:55.775503Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:50:55.789295Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:50:55.789386Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:50:55.789422Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:50:55.789465Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T13:50:55.789526Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:50:55.987470Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:55.987538Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:50:55.987582Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:50:55.988002Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T13:50:55.988044Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T13:50:55.988181Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T13:50:55.988224Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T13:50:55.988265Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T13:50:55.988301Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T13:50:55.999676Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:50:56.000195Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:50:56.001364Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:50:56.001422Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:50:56.001482Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:50:56.001545Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:50:56.001601Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T13:50:56.001646Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... 594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-07T13:51:22.995941Z node 6 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[6:970:2793], 1001} after executionsCount# 1 2025-04-07T13:51:22.995972Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:970:2793], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T13:51:22.996013Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:970:2793], 1001} finished in read 2025-04-07T13:51:22.996044Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-07T13:51:22.996059Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-04-07T13:51:22.996083Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-04-07T13:51:22.996107Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-04-07T13:51:22.996134Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-07T13:51:22.996151Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-04-07T13:51:22.996166Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-04-07T13:51:22.996183Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-07T13:51:22.996234Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-07T13:51:22.996579Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:975:2798], Recipient [6:683:2564]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:22.996604Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:22.996629Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:974:2797], serverId# [6:975:2798], sessionId# [0:0:0] 2025-04-07T13:51:22.997209Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:973:2796], Recipient [6:683:2564]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-07T13:51:22.997705Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:978:2801], Recipient [6:683:2564]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:22.997732Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:22.997764Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:977:2800], serverId# [6:978:2801], sessionId# [0:0:0] 2025-04-07T13:51:22.997860Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:976:2799], Recipient [6:683:2564]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-07T13:51:22.997914Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-04-07T13:51:22.997940Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T13:51:22.997962Z node 6 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-04-07T13:51:22.997991Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2025-04-07T13:51:22.998033Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-07T13:51:22.998049Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2025-04-07T13:51:22.998066Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-07T13:51:22.998082Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-07T13:51:22.998106Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037890 2025-04-07T13:51:22.998126Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-07T13:51:22.998156Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-07T13:51:22.998174Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2025-04-07T13:51:22.998191Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2025-04-07T13:51:22.998233Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-07T13:51:22.998343Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[6:976:2799], 1002} after executionsCount# 1 2025-04-07T13:51:22.998379Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:976:2799], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T13:51:22.998418Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:976:2799], 1002} finished in read 2025-04-07T13:51:22.998446Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-07T13:51:22.998461Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2025-04-07T13:51:22.998474Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2025-04-07T13:51:22.998488Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2025-04-07T13:51:22.998521Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-07T13:51:22.998539Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2025-04-07T13:51:22.998559Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037890 has finished 2025-04-07T13:51:22.998584Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-07T13:51:22.998636Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-07T13:51:22.998988Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:981:2804], Recipient [6:681:2562]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:22.999011Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:22.999037Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:980:2803], serverId# [6:981:2804], sessionId# [0:0:0] 2025-04-07T13:51:22.999082Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:979:2802], Recipient [6:681:2562]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-07T13:51:22.999476Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:984:2807], Recipient [6:681:2562]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:22.999502Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:51:22.999524Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:983:2806], serverId# [6:984:2807], sessionId# [0:0:0] 2025-04-07T13:51:22.999614Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:982:2805], Recipient [6:681:2562]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-07T13:51:22.999660Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-04-07T13:51:22.999689Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T13:51:22.999713Z node 6 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-04-07T13:51:22.999739Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2025-04-07T13:51:22.999778Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-07T13:51:22.999793Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2025-04-07T13:51:22.999822Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-04-07T13:51:22.999840Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2025-04-07T13:51:22.999864Z node 6 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037891 2025-04-07T13:51:22.999889Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-07T13:51:22.999907Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-04-07T13:51:22.999922Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2025-04-07T13:51:22.999935Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2025-04-07T13:51:22.999977Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-07T13:51:23.000037Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[6:982:2805], 1003} after executionsCount# 1 2025-04-07T13:51:23.000068Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:982:2805], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T13:51:23.000103Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:982:2805], 1003} finished in read 2025-04-07T13:51:23.000130Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-07T13:51:23.000146Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2025-04-07T13:51:23.000160Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2025-04-07T13:51:23.000172Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2025-04-07T13:51:23.000192Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-07T13:51:23.000206Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2025-04-07T13:51:23.000220Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037891 has finished 2025-04-07T13:51:23.000237Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-04-07T13:51:23.000282Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 8466, MsgBus: 12195 2025-04-07T13:51:09.141071Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568431602058999:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:09.142130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b3a/r3tmp/tmpyhB5uH/pdisk_1.dat 2025-04-07T13:51:09.483740Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:09.507192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:09.507281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:09.508927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8466, node 1 2025-04-07T13:51:09.591423Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:09.591461Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:09.591479Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:09.591582Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12195 TClient is connected to server localhost:12195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:10.071802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:10.085085Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:10.092302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:10.263005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:10.444808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:10.508509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:12.267406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568444486962568:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:12.267517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:12.584059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:12.622153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:12.665216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:12.695872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:12.742384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:12.831689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:12.944263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568444486963068:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:12.944357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:12.944683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568444486963073:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:12.951368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:12.974472Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:51:12.976371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568444486963075:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:14.109247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.140650Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568431602058999:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:14.140729Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:14.173026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22144, MsgBus: 14600 2025-04-07T13:51:16.609089Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568463642473688:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:16.609141Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b3a/r3tmp/tmpuM5rSK/pdisk_1.dat 2025-04-07T13:51:16.771757Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:16.796365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:16.796441Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:16.798041Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22144, node 2 2025-04-07T13:51:16.883740Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:16.883768Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:16.883777Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:16.883876Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14600 TClient is connected to server localhost:14600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:51:17.311554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:51:17.337678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:17.432572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:17.622235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:17.713698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:19.881297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568476527377286:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:19.881387Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:19.932245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:19.959944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:19.995688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:20.024346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:20.079026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:20.144242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:20.247486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568480822345086:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:20.247621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:20.247646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568480822345091:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:20.251985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:20.275555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568480822345093:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:21.527230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:21.610144Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568463642473688:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:21.610198Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:21.645302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> YdbOlapStore::LogNonExistingRequest [GOOD] >> YdbOlapStore::LogNonExistingUserId >> KqpIndexes::CheckUpsertNonEquatableType+NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] Test command err: Trying to start YDB, gRPC: 14791, MsgBus: 27486 2025-04-07T13:51:07.266013Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568424838822550:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:07.266101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b40/r3tmp/tmp9BSEsS/pdisk_1.dat 2025-04-07T13:51:07.580468Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14791, node 1 2025-04-07T13:51:07.653653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:07.653839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:07.655352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:07.655631Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:07.655642Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:07.655652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:07.655725Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27486 TClient is connected to server localhost:27486 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:08.150813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:08.202738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:08.338792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:08.532621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:08.614498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:10.138785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568437723726125:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:10.138924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:10.440748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:10.480920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:10.547078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:10.618640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:10.645489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:10.682756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:10.741614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568437723726624:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:10.741678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:10.742015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568437723726629:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:10.745523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:10.759022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568437723726631:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:11.975581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:12.265731Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568424838822550:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:12.265823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:14.090080Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr889z8fdxkb21kvdqvhc8t4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTU4MGE1YS01OWM4OGE4ZS0zNzAzNjYyOS0xMzRmNTgzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:14.097710Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTU4MGE1YS01OWM4OGE4ZS0zNzAzNjYyOS0xMzRmNTgzNg==, ActorId: [1:7490568446313662277:2519], ActorState: ExecuteState, TraceId: 01jr889z8fdxkb21kvdqvhc8t4, Create QueryResponse for error on request, msg: 2025-04-07T13:51:14.827623Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88a05xdmtsb4fc4bpqmz8v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTU4MGE1YS01OWM4OGE4ZS0zNzAzNjYyOS0xMzRmNTgzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:14.827869Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTU4MGE1YS01OWM4OGE4ZS0zNzAzNjYyOS0xMzRmNTgzNg==, ActorId: [1:7490568446313662277:2519], ActorState: ExecuteState, TraceId: 01jr88a05xdmtsb4fc4bpqmz8v, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 5033, MsgBus: 24581 2025-04-07T13:51:15.593016Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568460269448602:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:15.593077Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b40/r3tmp/tmpiZdovB/pdisk_1.dat 2025-04-07T13:51:15.824752Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5033, node 2 2025-04-07T13:51:15.870591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:15.870700Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:15.875740Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:15.940942Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:15.940979Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:15.940988Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:15.941092Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24581 TClient is connected to server localhost:24581 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:16.348051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:16.372147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:16.461773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:16.658008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:16.738276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:19.189980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568477449319483:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:19.190085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:19.230371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:19.300619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:19.331232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:19.397846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:19.442327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:19.529553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:19.620312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568477449319992:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:19.620371Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:19.620508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568477449319997:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:19.624368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:19.639770Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568477449319999:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:20.582273Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568460269448602:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:20.582348Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:20.945781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:23.336078Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88a8971ats3w0n2a9ms5fk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDQ2ZmM4NjYtNGRjZGZkNmMtZWZiNWJmY2QtOTVhY2U5MWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:23.336302Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDQ2ZmM4NjYtNGRjZGZkNmMtZWZiNWJmY2QtOTVhY2U5MWQ=, ActorId: [2:7490568486039255673:2520], ActorState: ExecuteState, TraceId: 01jr88a8971ats3w0n2a9ms5fk, Create QueryResponse for error on request, msg: 2025-04-07T13:51:24.619062Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88a9am130cmh8bte47yvc1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDQ2ZmM4NjYtNGRjZGZkNmMtZWZiNWJmY2QtOTVhY2U5MWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:24.619332Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDQ2ZmM4NjYtNGRjZGZkNmMtZWZiNWJmY2QtOTVhY2U5MWQ=, ActorId: [2:7490568486039255673:2520], ActorState: ExecuteState, TraceId: 01jr88a9am130cmh8bte47yvc1, Create QueryResponse for error on request, msg: >> Viewer::JsonStorageListingV2GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV2NodeIdFilter >> KqpMultishardIndex::SortedRangeReadDesc >> KqpLimits::CancelAfterRoTxWithFollowerLegacyDependedRead [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL [GOOD] >> KqpMultishardIndex::DataColumnWriteNull+StreamLookup [GOOD] >> KqpMultishardIndex::DataColumnWrite-StreamLookup >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] >> KqpUniqueIndex::InsertNullInFk [GOOD] >> KqpMultishardIndex::SortByPk+StreamLookup [GOOD] >> KqpIndexes::MultipleModifications ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:143:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:146:2167] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:149:2057] recipient: [4:146:2167] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:148:2168] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:218:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:148:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:150:2172] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:154:2057] recipient: [5:150:2172] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:153:2173] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:223:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:155:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:158:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:159:2057] recipient: [7:157:2178] Leader for TabletID 72057594037927937 is [7:160:2179] sender: [7:161:2057] recipient: [7:157:2178] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:160:2179] Leader for TabletID 72057594037927937 is [7:160:2179] sender: [7:230:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:141:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:144:2057] recipient: [10:143:2166] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:145:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:147:2057] recipient: [10:143:2166] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:146:2167] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:216:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:141:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:143:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:145:2057] recipient: [11:144:2166] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:147:2057] recipient: [11:144:2166] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:146:2167] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:142:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:145:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:146:2057] recipient: [12:144:2166] Leader for TabletID 72057594037927937 is [12:147:2167] sender: [12:148:2057] recipient: [12:144:2166] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:147:2167] Leader for TabletID 72057594037927937 is [12:147:2167] sender: [12:217:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:147:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:149:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:151:2057] recipient: [13:150:2171] Leader for TabletID 72057594037927937 is [13:152:2172] sender: [13:153:2057] recipient: [13:150:2171] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:152:2172] Leader for TabletID 72057594037927937 is [13:152:2172] sender: [13:222:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:147:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:149:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:151:2057] recipient: [14:150:2171] Leader for TabletID 72057594037927937 is [14:152:2172] sender: [14:153:2057] recipient: [14:150:2171] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:152:2172] Leader for TabletID 72057594037927937 is [14:152:2172] sender: [14:222:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:149:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:152:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:153:2057] recipient: [15:151:2172] Leader for TabletID 72057594037927937 is [15:154:2173] sender: [15:155:2057] recipient: [15:151:2172] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:154:2173] Leader for TabletID 72057594037927937 is [15:154:2173] sender: [15:224:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:154:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:156:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:158:2057] recipient: [16:157:2177] Leader for TabletID 72057594037927937 is [16:159:2178] sender: [16:160:2057] recipient: [16:157:2177] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:159:2178] Leader for TabletID 72057594037927937 is [16:159:2178] sender: [16:229:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:154:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:157:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:158:2057] recipient: [17:156:2177] Leader for TabletID 72057594037927937 is [17:159:2178] sender: [17:160:2057] recipient: [17:156:2177] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:159:2178] Leader for TabletID 72057594037927937 is [17:159:2178] sender: [17:229:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:156:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:159:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:160:2057] recipient: [18:158:2178] Leader for TabletID 72057594037927937 is [18:161:2179] sender: [18:162:2057] recipient: [18:158:2178] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:161:2179] Leader for TabletID 72057594037927937 is [18:161:2179] sender: [18:231:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:161:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:164:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:165:2057] recipient: [19:163:2183] Leader for TabletID 72057594037927937 is [19:166:2184] sender: [19:167:2057] recipient: [19:163:2183] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:166:2184] Leader for TabletID 72057594037927937 is [19:166:2184] sender: [19:236:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:161:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:164:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:165:2057] recipient: [20:163:2183] Leader for TabletID 72057594037927937 is [20:166:2184] sender: [20:167:2057] recipient: [20:163:2183] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:166:2184] Leader for TabletID 72057594037927937 is [20:166:2184] sender: [20:236:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:164:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:167:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:168:2057] recipient: [21:166:2185] Leader for TabletID 72057594037927937 is [21:169:2186] sender: [21:170:2057] recipient: [21:166:2185] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:169:2186] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL [GOOD] Test command err: Trying to start YDB, gRPC: 15285, MsgBus: 8022 2025-04-07T13:51:14.011728Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568455133925365:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:14.011873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b26/r3tmp/tmppXemaJ/pdisk_1.dat 2025-04-07T13:51:14.379703Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:14.423478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:14.423952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:14.425296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15285, node 1 2025-04-07T13:51:14.491722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:14.491751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:14.491762Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:14.491865Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8022 TClient is connected to server localhost:8022 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:15.042541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:15.071329Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:15.095344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:15.259524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:15.459483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:15.548894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:17.572937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568468018828961:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:17.580701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:17.606767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:17.647432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:17.735035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:17.778330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:17.859056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:17.923028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:18.038922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568472313796763:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:18.039015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:18.039288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568472313796768:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:18.044033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:18.056265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568472313796770:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:19.012781Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568455133925365:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:19.012837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:19.113612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:20.139782Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 28789, MsgBus: 22786 2025-04-07T13:51:20.969119Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568478717977673:2191];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b26/r3tmp/tmp1E7idV/pdisk_1.dat 2025-04-07T13:51:21.047643Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:21.087079Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:21.108889Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:21.108984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:21.109960Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28789, node 2 2025-04-07T13:51:21.164682Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:21.164708Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:21.164715Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:21.164777Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22786 TClient is connected to server localhost:22786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:21.501086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:21.507376Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:21.519921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:21.588323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:21.736942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:51:21.813291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:23.765611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568491602881100:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.765696Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.803417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.840723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.897327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.976218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:24.051158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:24.089113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:24.146973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568495897848897:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:24.147067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:24.147361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568495897848902:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:24.150167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:24.167141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568495897848904:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:25.390822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:25.966145Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568478717977673:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:25.966203Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:26.634268Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:26.669046Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpUniqueIndex::InsertFkPartialColumnSet >> KqpExplain::AggGroupLimit [GOOD] >> KqpIndexes::ForbidViewModification [GOOD] >> KqpIndexes::ForbidDirectIndexTableCreation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInFk [GOOD] Test command err: Trying to start YDB, gRPC: 18862, MsgBus: 3763 2025-04-07T13:51:11.798511Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568441507962500:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:11.798586Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b32/r3tmp/tmpJ8CcCD/pdisk_1.dat 2025-04-07T13:51:12.350235Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:12.358733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:12.358833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18862, node 1 2025-04-07T13:51:12.363801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:12.476873Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:12.476903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:12.476913Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:12.477007Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3763 TClient is connected to server localhost:3763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:13.083840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.107718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.285358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:13.481869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:51:13.551616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:15.217868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568458687833380:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.218022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.580350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.613413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.645679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.675180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.722156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.797040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.897152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568458687833885:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.897271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.897470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568458687833890:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.900509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:15.907651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568458687833892:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:16.798769Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568441507962500:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:16.798847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:17.038931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:19.084097Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490568475867704633:2584], TxId: 281474976710681, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODYxZTYzZDMtMzBmMDhjYzgtYjAwNTg1ZjgtZWQ3NjVjMGI=. CustomerSuppliedId : . TraceId : 01jr88a4kp6xzdzr47nv1awcyt. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T13:51:19.084924Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490568475867704634:2585], TxId: 281474976710681, task: 2. Ctx: { TraceId : 01jr88a4kp6xzdzr47nv1awcyt. SessionId : ydb://session/3?node_id=1&id=ODYxZTYzZDMtMzBmMDhjYzgtYjAwNTg1ZjgtZWQ3NjVjMGI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490568475867704630:2520], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:51:19.086832Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODYxZTYzZDMtMzBmMDhjYzgtYjAwNTg1ZjgtZWQ3NjVjMGI=, ActorId: [1:7490568467277769552:2520], ActorState: ExecuteState, TraceId: 01jr88a4kp6xzdzr47nv1awcyt, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 20598, MsgBus: 4432 2025-04-07T13:51:19.894511Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568474562758156:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:19.894567Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b32/r3tmp/tmpm7EmOm/pdisk_1.dat 2025-04-07T13:51:20.076495Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:20.080449Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:20.080533Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:20.081980Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20598, node 2 2025-04-07T13:51:20.135243Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:20.135267Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:20.135277Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:20.135384Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4432 TClient is connected to server localhost:4432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:20.561664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.567343Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.581287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:51:20.668812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.832159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.920841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:23.092784Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568491742629063:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.092875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.131997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.171891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.209174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.284602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.338889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.386959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.488713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568491742629567:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.488788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.489085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568491742629572:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.492383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:23.504387Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568491742629574:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:24.631310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:24.900857Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568474562758156:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:24.901177Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SortByPk+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 7395, MsgBus: 14704 2025-04-07T13:51:12.416260Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568444587102097:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:12.416369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b2a/r3tmp/tmpqe8UFI/pdisk_1.dat 2025-04-07T13:51:12.817050Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:12.862502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:12.862574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:12.863979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7395, node 1 2025-04-07T13:51:12.941941Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:12.941957Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:12.941968Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:12.942036Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14704 TClient is connected to server localhost:14704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:13.434098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.456022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.562312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.732012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.803060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:15.538308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568457472005678:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.544578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.766197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.788436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.813606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.843925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.870294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.903578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.954302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568457472006171:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.954370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.954688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568457472006176:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.957717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:15.969181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568457472006178:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:16.935746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:17.420785Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568444587102097:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:17.420846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24875, MsgBus: 10317 2025-04-07T13:51:20.349856Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568479794043123:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:20.349936Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b2a/r3tmp/tmpJh2K7d/pdisk_1.dat 2025-04-07T13:51:20.432780Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24875, node 2 2025-04-07T13:51:20.488250Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:20.488343Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:20.489672Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:20.521119Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:20.521138Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:20.521145Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:20.521235Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10317 TClient is connected to server localhost:10317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:20.860603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:20.879408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:51:20.932711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:21.061511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:21.117070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:23.185387Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568492678946731:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.185476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.248712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.337624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.377849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.444178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.514256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.582383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.672466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568492678947240:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.672559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.672819Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568492678947245:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.681070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:23.693213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568492678947247:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:25.007202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:25.350105Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568479794043123:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:25.350176Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexes::SecondaryIndexSelectUsingScripting [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:144:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:144:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:141:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:144:2057] recipient: [10:143:2166] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:145:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:147:2057] recipient: [10:143:2166] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:146:2167] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:216:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:141:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:143:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:145:2057] recipient: [11:144:2166] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:147:2057] recipient: [11:144:2166] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:146:2167] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:142:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:145:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:146:2057] recipient: [12:144:2166] Leader for TabletID 72057594037927937 is [12:147:2167] sender: [12:148:2057] recipient: [12:144:2166] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:147:2167] Leader for TabletID 72057594037927937 is [12:147:2167] sender: [12:217:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:147:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:149:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:151:2057] recipient: [13:150:2171] Leader for TabletID 72057594037927937 is [13:152:2172] sender: [13:153:2057] recipient: [13:150:2171] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:152:2172] Leader for TabletID 72057594037927937 is [13:152:2172] sender: [13:222:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:147:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:149:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:151:2057] recipient: [14:150:2171] Leader for TabletID 72057594037927937 is [14:152:2172] sender: [14:153:2057] recipient: [14:150:2171] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:152:2172] Leader for TabletID 72057594037927937 is [14:152:2172] sender: [14:222:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:148:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:151:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:152:2057] recipient: [15:150:2171] Leader for TabletID 72057594037927937 is [15:153:2172] sender: [15:154:2057] recipient: [15:150:2171] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:153:2172] Leader for TabletID 72057594037927937 is [15:153:2172] sender: [15:223:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:153:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:156:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:157:2057] recipient: [16:155:2176] Leader for TabletID 72057594037927937 is [16:158:2177] sender: [16:159:2057] recipient: [16:155:2176] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:158:2177] Leader for TabletID 72057594037927937 is [16:158:2177] sender: [16:229:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:153:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:156:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:157:2057] recipient: [17:155:2176] Leader for TabletID 72057594037927937 is [17:158:2177] sender: [17:159:2057] recipient: [17:155:2176] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:158:2177] Leader for TabletID 72057594037927937 is [17:158:2177] sender: [17:228:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:154:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:157:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:158:2057] recipient: [18:156:2176] Leader for TabletID 72057594037927937 is [18:159:2177] sender: [18:160:2057] recipient: [18:156:2176] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:159:2177] Leader for TabletID 72057594037927937 is [18:159:2177] sender: [18:229:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] >> KqpUniqueIndex::UpsertImplicitNullInComplexFk >> KqpIndexMetadata::HandleWriteOnlyIndex [GOOD] >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive [GOOD] >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive >> TPersQueueTest::WriteNonExistingPartition [GOOD] >> TPersQueueTest::WriteNonExistingTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::AggGroupLimit [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f9/r3tmp/tmpAwe5S8/pdisk_1.dat 2025-04-07T13:49:07.367044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:07.749816Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:07.839406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:07.839486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:07.851731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9551, node 1 2025-04-07T13:49:08.229293Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:08.229314Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:08.229321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:08.229409Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:49:08.310823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:13439 2025-04-07T13:49:08.568507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:49:08.639029Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567913239870175:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:08.893255Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:49:08.897022Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T13:49:08.896024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:08.896116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:08.933300Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:49:08.949640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:09.124842Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T13:49:09.124919Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T13:49:09.228598Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T13:49:09.228635Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T13:49:09.229579Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T13:49:09.229607Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T13:49:09.229624Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T13:49:09.229639Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T13:49:09.229668Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T13:49:09.229690Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T13:49:09.231113Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:09.246357Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T13:49:09.246405Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7490567917534837847:2277] 2025-04-07T13:49:09.246454Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T13:49:09.263135Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T13:49:09.263159Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T13:49:09.263203Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T13:49:09.272914Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T13:49:09.347381Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-04-07T13:49:09.361770Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:49:09.361816Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:7490567917534837990:2304], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:49:09.368962Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7490567917534837998:2357] 2025-04-07T13:49:09.369200Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:7490567917534837998:2357], schemeshard id = 72075186224037889 2025-04-07T13:49:09.380561Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:09.380631Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:09.383851Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:09.400854Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T13:49:09.418598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037889 2025-04-07T13:49:09.426240Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T13:49:09.426295Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-04-07T13:49:09.432015Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:49:09.820390Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-04-07T13:49:09.886693Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T13:49:12.513905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567928035227260:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:12.514023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:12.961589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72075186224037889 2025-04-07T13:49:13.277094Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7490567934714707591:2339];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:49:13.280831Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7490567934714707591:2339];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:49:13.281133Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7490567934714707591:2339];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:49:13.281245Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7490567934714707591:2339];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:49:13.281341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7490567934714707591:2339];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:49:13.281435Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7490567934714707591:2339];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:49:13.281548Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7490567934714707591:2339];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:49:13.281640Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7490567934714707591:2339];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:49:13.281733Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7490567934714707591:2339];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:49:13.281834Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7490567934714707591:2339];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:49:13.281928Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7490567934714707591:2339];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:49:13.282016Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7490567934714707591:2339];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:49:13.343054Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[2:7490567934714707578:2327];tablet_id=72075186224037913;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:49:13.343119Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[2:7490567934714707578:2327];tablet_id=72075186224037913;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:49:13.343359Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[2:7490567934714707578:2327];tablet_id=72075186224037913;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:49:13.3434 ... 92], tablet id = 72075186224037897, status = OK 2025-04-07T13:51:20.176404Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-04-07T13:51:20.176435Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [3:7490568479781546325:2311], StatRequests.size() = 1 2025-04-07T13:51:20.175910Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:7490568479941400598:2592] 2025-04-07T13:51:20.176240Z node 4 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [4:7490568479941400598:2592], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T13:51:20.176267Z node 4 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2025-04-07T13:51:20.202284Z node 3 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2025-04-07T13:51:20.203186Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T13:51:20.204330Z node 3 :STATISTICS DEBUG: EvClientDestroyed, node id = 3, client id = [3:7490568479781546327:2312], server id = [4:7490568479941400598:2592], tablet id = 72075186224037897 2025-04-07T13:51:20.204366Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [3:7490568479781546330:2313] 2025-04-07T13:51:20.204389Z node 3 :STATISTICS DEBUG: SyncNode(), pipe client id = [3:7490568479781546330:2313] 2025-04-07T13:51:20.274311Z node 3 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [3:7490568479781546327:2312], schemeshard count = 1 2025-04-07T13:51:21.006015Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568462601675837:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:21.006089Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11916, MsgBus: 3906 2025-04-07T13:51:22.356519Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490568488058460021:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f9/r3tmp/tmpe2tyt1/pdisk_1.dat 2025-04-07T13:51:22.423072Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:22.519073Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:22.551268Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:22.551365Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:22.553839Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11916, node 5 2025-04-07T13:51:22.628960Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:22.628985Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:22.628995Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:22.629106Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3906 TClient is connected to server localhost:3906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:23.202345Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:23.223031Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:23.285310Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:23.445700Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:23.528761Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:25.891670Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490568500943363464:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:25.891784Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:25.933252Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:25.979906Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.030060Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.069700Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.115307Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.213877Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.310402Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490568505238331262:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:26.310484Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:26.310816Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490568505238331268:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:26.314640Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:26.329367Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490568505238331270:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:27.334709Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490568488058460021:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:27.377010Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1}],"GroupBy":"item.App","Aggregation":"{_yql_agg_0: MAX(item.Message,state._yql_agg_0),_yql_agg_1: MIN(item.Message,state._yql_agg_1)}","Name":"Aggregate"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"item.Ts \u003E 1 AND item.Ts \u003C= 4 OR item.App == \"ydb\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["App"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"columns":["App","Message","Ts"],"scan_by":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.Ts \u003E 1 AND item.Ts \u003C= 4 OR item.App == \"ydb\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"GroupBy":"item.App","Aggregation":"{_yql_agg_0: MAX(item.Message,state._yql_agg_0),_yql_agg_1: MIN(item.Message,state._yql_agg_1)}","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpUniqueIndex::UpdateFkAlreadyExist [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexSelectUsingScripting [GOOD] Test command err: Trying to start YDB, gRPC: 11199, MsgBus: 24336 2025-04-07T13:51:15.231218Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568460145976756:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:15.231510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b21/r3tmp/tmpWymWX0/pdisk_1.dat 2025-04-07T13:51:15.784398Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:15.811386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:15.811491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:15.813441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11199, node 1 2025-04-07T13:51:15.919025Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:15.919049Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:15.919061Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:15.919148Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24336 TClient is connected to server localhost:24336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:16.588231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:16.621787Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:16.631731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:16.793168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:16.994652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:17.137222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:18.962558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568473030880184:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:18.962667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:19.269144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:19.343394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:19.377200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:19.434471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:19.467687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:19.520722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:19.612807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568477325847979:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:19.612884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:19.613102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568477325847984:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:19.617059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:19.627903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568477325847986:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:20.248804Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568460145976756:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:20.248851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:20.724581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:21.506511Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:22.479886Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 22883, MsgBus: 29762 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b21/r3tmp/tmpReMGoC/pdisk_1.dat 2025-04-07T13:51:23.281289Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:23.332443Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:23.354915Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:23.354999Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:23.356771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22883, node 2 2025-04-07T13:51:23.444701Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:23.444717Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:23.444728Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:23.444806Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29762 TClient is connected to server localhost:29762 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:23.999623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:24.027201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:24.104496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:24.297963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:24.369527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:26.668802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568507015138391:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:26.668898Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:26.717947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.803991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.875519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.962003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.992757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.042605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.095864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568511310106187:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:27.095937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:27.096106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568511310106192:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:27.099343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:27.117525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568511310106194:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:28.311380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpIndexes::SecondaryIndexOrderBy |85.4%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::CheckUpsertNonEquatableType-NotNull >> KqpUniqueIndex::InsertNullInComplexFk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexMetadata::HandleWriteOnlyIndex [GOOD] Test command err: Trying to start YDB, gRPC: 4769, MsgBus: 7420 2025-04-07T13:51:10.711402Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568436353298136:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:10.712679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b38/r3tmp/tmpJOLv3W/pdisk_1.dat 2025-04-07T13:51:11.120103Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:11.124065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:11.124245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:11.127172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4769, node 1 2025-04-07T13:51:11.237151Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:11.237176Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:11.237183Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:11.237268Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7420 TClient is connected to server localhost:7420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:11.787148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:11.800280Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:11.815582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:51:11.971771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:12.127896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:12.217880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:51:13.787395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568449238201590:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:13.787517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.009610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.040300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.067485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.103369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.182061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.254694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.323681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568453533169385:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.323752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.323822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568453533169390:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.327133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:14.340167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568453533169392:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:15.236386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.703800Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568436353298136:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:15.703868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13652, MsgBus: 30129 2025-04-07T13:51:19.531392Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568475829681105:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:19.531850Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b38/r3tmp/tmpeUTmky/pdisk_1.dat 2025-04-07T13:51:19.633250Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:19.653466Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:19.653551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:19.655078Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13652, node 2 2025-04-07T13:51:19.765565Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:19.765589Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:19.765598Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:19.765702Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30129 TClient is connected to server localhost:30129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:20.470049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.502936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.602325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.759595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.834686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:23.304788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568493009551988:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.304881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.339359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.380628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.419460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.465562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.511233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.593553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.687636Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568493009552493:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.687715Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.687916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568493009552498:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.691809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:23.704511Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568493009552500:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:24.534089Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568475829681105:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:24.534139Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:24.995539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] >> KqpIndexes::SecondaryIndexUpsert2Update [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkAlreadyExist [GOOD] Test command err: Trying to start YDB, gRPC: 18524, MsgBus: 25847 2025-04-07T13:51:11.063069Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568439360731267:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:11.063789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b34/r3tmp/tmpsILTyl/pdisk_1.dat 2025-04-07T13:51:11.481705Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18524, node 1 2025-04-07T13:51:11.516751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:11.516842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:11.518394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:11.571637Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:11.571674Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:11.571686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:11.571776Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25847 TClient is connected to server localhost:25847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:12.155366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:12.167286Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:12.183952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:12.338513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:12.535557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:12.608900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:14.415979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568452245634714:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.416107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.703137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.742828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.788904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.820425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.848035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.943820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.056389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568456540602516:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.056462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.056639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568456540602521:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.065944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:15.085706Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:51:15.086512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568456540602523:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:16.080980Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568439360731267:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:16.081240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:16.206054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 21154, MsgBus: 13268 2025-04-07T13:51:19.284912Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568477508000932:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:19.284965Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b34/r3tmp/tmpsLH3Xp/pdisk_1.dat 2025-04-07T13:51:19.421061Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:19.447615Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:19.447699Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:19.448676Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21154, node 2 2025-04-07T13:51:19.507474Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:19.507493Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:19.507500Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:19.507585Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13268 TClient is connected to server localhost:13268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:19.938580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:19.943387Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:19.951140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.014336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.199958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.268958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:22.247550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568490392904513:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.247632Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.286066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.312797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.351727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.387348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.420524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.457915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.514616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568490392905010:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.514695Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.515028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568490392905015:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.518926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:22.535493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568490392905017:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:23.520586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:24.292898Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568477508000932:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:24.292966Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:25.496235Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88aabregeq2qcvtybjp885, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTQwNjk0ZmQtMWQ1NjgyM2UtNjBmM2JhYjQtNWIxZGJhZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:25.510290Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTQwNjk0ZmQtMWQ1NjgyM2UtNjBmM2JhYjQtNWIxZGJhZmI=, ActorId: [2:7490568494687873397:2518], ActorState: ExecuteState, TraceId: 01jr88aabregeq2qcvtybjp885, Create QueryResponse for error on request, msg: 2025-04-07T13:51:26.492312Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88abaw4755ta8aptcqc675, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTQwNjk0ZmQtMWQ1NjgyM2UtNjBmM2JhYjQtNWIxZGJhZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:26.492556Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTQwNjk0ZmQtMWQ1NjgyM2UtNjBmM2JhYjQtNWIxZGJhZmI=, ActorId: [2:7490568494687873397:2518], ActorState: ExecuteState, TraceId: 01jr88abaw4755ta8aptcqc675, Create QueryResponse for error on request, msg: 2025-04-07T13:51:26.528077Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88ac9d2dwx0mcff1q7qvzg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTQwNjk0ZmQtMWQ1NjgyM2UtNjBmM2JhYjQtNWIxZGJhZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:26.528328Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTQwNjk0ZmQtMWQ1NjgyM2UtNjBmM2JhYjQtNWIxZGJhZmI=, ActorId: [2:7490568494687873397:2518], ActorState: ExecuteState, TraceId: 01jr88ac9d2dwx0mcff1q7qvzg, Create QueryResponse for error on request, msg: 2025-04-07T13:51:27.700686Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88acah4y9prggv9768a86y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTQwNjk0ZmQtMWQ1NjgyM2UtNjBmM2JhYjQtNWIxZGJhZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:27.700926Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTQwNjk0ZmQtMWQ1NjgyM2UtNjBmM2JhYjQtNWIxZGJhZmI=, ActorId: [2:7490568494687873397:2518], ActorState: ExecuteState, TraceId: 01jr88acah4y9prggv9768a86y, Create QueryResponse for error on request, msg: >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn >> YdbLogStore::AlterLogTable [FAIL] >> KqpIndexes::SelectConcurentTX [GOOD] >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns >> KqpIndexes::MultipleSecondaryIndexWithSameComulns [GOOD] >> KqpIndexes::SelectConcurentTX2 >> KqpIndexes::UpsertMultipleUniqIndexes [GOOD] >> KqpIndexes::UpsertNoIndexColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 27052, MsgBus: 24341 2025-04-07T13:51:12.485898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568446306725401:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:12.486389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b2f/r3tmp/tmpIi5Vlw/pdisk_1.dat 2025-04-07T13:51:12.878852Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:12.884965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:12.885116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:12.887093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27052, node 1 2025-04-07T13:51:12.948827Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:12.948849Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:12.948870Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:12.948945Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24341 TClient is connected to server localhost:24341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:13.455419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.480264Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:13.486354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.635410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.802669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.878959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:15.655529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568459191628923:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.655688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.901067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.927889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.959103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:16.003319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:16.027712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:16.099772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:16.171440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568463486596715:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:16.171513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:16.171695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568463486596720:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:16.174962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:16.189986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568463486596722:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:17.477604Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568446306725401:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:17.477980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:17.495377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:19.377569Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88a4kxct7b68jg810thrd1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2RmZjYwM2QtYjgxYzk5Mi05ZDAzMDdiOC1kNGNmYmE5Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:19.395294Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2RmZjYwM2QtYjgxYzk5Mi05ZDAzMDdiOC1kNGNmYmE5Nw==, ActorId: [1:7490568467781565081:2520], ActorState: ExecuteState, TraceId: 01jr88a4kxct7b68jg810thrd1, Create QueryResponse for error on request, msg: 2025-04-07T13:51:20.253050Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88a5bjac3t1w9dxrcym38y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2RmZjYwM2QtYjgxYzk5Mi05ZDAzMDdiOC1kNGNmYmE5Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:20.253307Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2RmZjYwM2QtYjgxYzk5Mi05ZDAzMDdiOC1kNGNmYmE5Nw==, ActorId: [1:7490568467781565081:2520], ActorState: ExecuteState, TraceId: 01jr88a5bjac3t1w9dxrcym38y, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 16191, MsgBus: 7679 2025-04-07T13:51:21.403071Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568482195110549:2154];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b2f/r3tmp/tmpO8ACIH/pdisk_1.dat 2025-04-07T13:51:21.470505Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:21.523141Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:21.571838Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:21.571943Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:21.573297Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16191, node 2 2025-04-07T13:51:21.705332Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:21.705361Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:21.705371Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:21.705494Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7679 TClient is connected to server localhost:7679 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:51:22.221966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.240535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:22.324278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:22.536507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:22.629362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:24.962864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568495080014010:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:24.962928Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:25.022165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:25.072200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:25.118217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:25.158205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:25.194187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:25.274241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:25.358092Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568499374981806:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:25.358180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:25.358409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568499374981812:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:25.362455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:25.383321Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:51:25.384789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568499374981814:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:26.409263Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568482195110549:2154];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:26.412227Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:26.479084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:28.457871Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88addt0mb6sbzrsswrrsrq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDVlOTQ2Ni04MzZlYjZlYS02NTA2OWNmYS05YmViNjdhOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:28.458147Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDVlOTQ2Ni04MzZlYjZlYS02NTA2OWNmYS05YmViNjdhOQ==, ActorId: [2:7490568503669950184:2519], ActorState: ExecuteState, TraceId: 01jr88addt0mb6sbzrsswrrsrq, Create QueryResponse for error on request, msg: 2025-04-07T13:51:29.345293Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88ae6xfb4awm3pymc2rc1c, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDVlOTQ2Ni04MzZlYjZlYS02NTA2OWNmYS05YmViNjdhOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:29.345564Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDVlOTQ2Ni04MzZlYjZlYS02NTA2OWNmYS05YmViNjdhOQ==, ActorId: [2:7490568503669950184:2519], ActorState: ExecuteState, TraceId: 01jr88ae6xfb4awm3pymc2rc1c, Create QueryResponse for error on request, msg: >> KqpMultishardIndex::WriteIntoRenamingSyncIndex >> KqpStats::SysViewCancelled [GOOD] >> KqpTypes::DyNumberCompare ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUpsert2Update [GOOD] Test command err: Trying to start YDB, gRPC: 13504, MsgBus: 19579 2025-04-07T13:51:17.184978Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568465615778012:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:17.185026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b1c/r3tmp/tmpZFYJ6F/pdisk_1.dat 2025-04-07T13:51:17.688383Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:17.691520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:17.691610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:17.694077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13504, node 1 2025-04-07T13:51:17.797716Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:17.797736Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:17.797742Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:17.797818Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19579 TClient is connected to server localhost:19579 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:18.355170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:18.373339Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:18.383190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:18.529559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:18.698908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:18.786080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.293416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568478500681614:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:20.293521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:20.500598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:20.522923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:20.546032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:20.573979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:20.644824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:20.675056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:20.740860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568478500682112:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:20.740927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:20.741103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568478500682117:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:20.744465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:20.751984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568478500682119:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:21.753421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.185499Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568465615778012:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:22.185552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:22.743150Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:22.805851Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568487090617410:2512], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestTable/Index/indexImplTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:51:22.807143Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWI5NTlmYTAtNzI1ZTA2NWQtYTc5MjE5OC01ZDAxNTZlMA==, ActorId: [1:7490568482795649721:2460], ActorState: ExecuteState, TraceId: 01jr88a8n43txf8rpx00mv0amn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:51:22.826895Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568487090617416:2515], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Required global index not found, index name: WrongView, code: 2003 2025-04-07T13:51:22.828247Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWI5NTlmYTAtNzI1ZTA2NWQtYTc5MjE5OC01ZDAxNTZlMA==, ActorId: [1:7490568482795649721:2460], ActorState: ExecuteState, TraceId: 01jr88a8nz3t6chbts3mfa2yp2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:51:23.439795Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:23.630617Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:24.084789Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:24.433759Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 20016, MsgBus: 15710 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b1c/r3tmp/tmpJG7cnf/pdisk_1.dat 2025-04-07T13:51:25.399343Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:25.420949Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:25.443056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 20016, node 2 2025-04-07T13:51:25.443129Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:25.444472Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:25.541179Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:25.541205Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:25.541217Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:25.541301Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15710 TClient is connected to server localhost:15710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:25.987073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:26.033410Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:26.048359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:26.106188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.311610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:26.394091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:28.503373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568516339412778:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:28.503456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:28.551761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.625000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.655423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.684570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.761298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.812857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.923237Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568516339413283:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:28.923296Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:28.923439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568516339413288:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:28.926231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:28.937710Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T13:51:28.937925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568516339413290:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:30.133991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.134311Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:31.510587Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpIndexes::CheckUpsertNonEquatableType+NotNull [GOOD] >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin >> TopicService::UseDoubleSlashInTopicPath [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::MultipleSecondaryIndexWithSameComulns [GOOD] Test command err: Trying to start YDB, gRPC: 16069, MsgBus: 61680 2025-04-07T13:51:12.343500Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568443452000745:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:12.343542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b31/r3tmp/tmpvyIzzZ/pdisk_1.dat 2025-04-07T13:51:12.765300Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:12.781303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:12.781404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:12.782996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16069, node 1 2025-04-07T13:51:12.913143Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:12.913162Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:12.913168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:12.913251Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61680 TClient is connected to server localhost:61680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:13.489803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.509210Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:13.530795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.704216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.856877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:13.939219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:15.540622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568456336904141:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.540794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:15.876153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.936178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:15.976933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:16.085914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:16.127113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:16.178076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:16.270019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568460631871938:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:16.270108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:16.270218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568460631871943:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:16.273373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:16.284850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568460631871945:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:17.334352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:17.346511Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568443452000745:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:17.350793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:18.880919Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:18.907633Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 19179, MsgBus: 61504 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b31/r3tmp/tmpVYgSuf/pdisk_1.dat 2025-04-07T13:51:19.856393Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:19.856782Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:19.906367Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:19.906451Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:19.910542Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19179, node 2 2025-04-07T13:51:19.950977Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:19.950998Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:19.951005Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:19.951115Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61504 TClient is connected to server localhost:61504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:51:20.398535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:51:20.420091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.500778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.643126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.719939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:22.589495Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568487714454277:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.589592Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.635556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.717102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.751090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.787778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.820303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.903963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.964796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568487714454778:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.964905Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.965138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568487714454783:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.970179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:22.986714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568487714454785:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:24.008464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:25.018989Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:25.047816Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:25.113639Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:26.506125Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:27.300827Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:27.324512Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:28.255677Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:28.271565Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:29.157249Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:29.752416Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:29.782286Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:30.198639Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:30.223899Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:30.304553Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:30.827287Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:30.881726Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpMultishardIndex::SecondaryIndexSelect >> KqpIndexes::ForbidDirectIndexTableCreation [GOOD] >> KqpMultishardIndex::SortedRangeReadDesc [GOOD] >> KqpMultishardIndex::SortByPk-StreamLookup >> KqpIndexes::IndexOr |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |85.4%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::UpsertWithNullKeysSimple >> TopicService::RelativePath >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpIndexes::MultipleModifications [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail >> KqpIndexes::InnerJoinWithNonIndexWherePredicate >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::ForbidDirectIndexTableCreation [GOOD] Test command err: Trying to start YDB, gRPC: 11771, MsgBus: 11048 2025-04-07T13:51:22.496520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568489706542374:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:22.496590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b16/r3tmp/tmp9A76r6/pdisk_1.dat 2025-04-07T13:51:22.979582Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11771, node 1 2025-04-07T13:51:23.012855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:23.017139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:23.028582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:23.049439Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:23.049461Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:23.049468Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:23.049568Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11048 TClient is connected to server localhost:11048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:23.683538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:23.696934Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:23.709213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:23.863591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:24.061058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:51:24.138438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:26.066828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568506886413257:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:26.066959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:26.415891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.457762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.514781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.548078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.582990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.652440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.724138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568506886413761:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:26.724213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:26.724292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568506886413766:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:26.727754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:26.739152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568506886413768:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:27.502942Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568489706542374:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:27.502999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:27.818094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.443317Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568515476348953:2492], status: GENERIC_ERROR, issues:
:3:46: Error: Unexpected token 'VIEW' : cannot match to any predicted input... 2025-04-07T13:51:28.444571Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThjMDJkOS00ZDUzYjVmMy0yZDRkZDk0ZS04ZDVlZWNmNw==, ActorId: [1:7490568511181381375:2461], ActorState: ExecuteState, TraceId: 01jr88ae5p2wzqq0fhwgwbce1a, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T13:51:28.486781Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568515476348957:2494], status: GENERIC_ERROR, issues:
:3:46: Error: Unexpected token 'VIEW' : cannot match to any predicted input... 2025-04-07T13:51:28.487477Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThjMDJkOS00ZDUzYjVmMy0yZDRkZDk0ZS04ZDVlZWNmNw==, ActorId: [1:7490568511181381375:2461], ActorState: ExecuteState, TraceId: 01jr88ae6q1dd3chx0n744wmc8, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T13:51:28.514409Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568515476348962:2497], status: GENERIC_ERROR, issues:
:3:41: Error: Unexpected token 'VIEW' : cannot match to any predicted input... 2025-04-07T13:51:28.515713Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThjMDJkOS00ZDUzYjVmMy0yZDRkZDk0ZS04ZDVlZWNmNw==, ActorId: [1:7490568511181381375:2461], ActorState: ExecuteState, TraceId: 01jr88ae7m5fnps21my077rm3e, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T13:51:28.530456Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568515476348967:2499], status: GENERIC_ERROR, issues:
:3:46: Error: Unexpected token 'VIEW' : cannot match to any predicted input... 2025-04-07T13:51:28.541940Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThjMDJkOS00ZDUzYjVmMy0yZDRkZDk0ZS04ZDVlZWNmNw==, ActorId: [1:7490568511181381375:2461], ActorState: ExecuteState, TraceId: 01jr88ae8e0vdexknjch1br21s, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 30672, MsgBus: 28448 2025-04-07T13:51:29.278670Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568516449246170:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:29.278701Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b16/r3tmp/tmpBGRfXr/pdisk_1.dat 2025-04-07T13:51:29.384824Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:29.387518Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:29.387576Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:29.390615Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30672, node 2 2025-04-07T13:51:29.441156Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:29.441181Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:29.441188Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:29.441274Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28448 TClient is connected to server localhost:28448 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:29.877844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:29.888993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:29.966988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:30.112420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:30.180166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:32.162097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568529334149746:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.162215Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.216653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.282396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.324871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.358389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.405217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.492248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.595564Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568529334150245:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.595657Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.596026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568529334150250:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.600527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:32.627440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568529334150252:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:34.022619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:34.281192Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568516449246170:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:34.283496Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpUniqueIndex::InsertFkPartialColumnSet [GOOD] >> KqpIndexes::UniqAndNoUniqSecondaryIndex >> KqpUniqueIndex::InsertFkPkOverlap |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |85.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |85.5%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |85.5%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |85.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> DemoTx::Scenario_5 [GOOD] >> KqpLimits::CancelAfterRwTx [GOOD] >> KqpMultishardIndex::DataColumnWrite+StreamLookup [GOOD] >> KqpIndexes::CheckUpsertNonEquatableType-NotNull [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |85.5%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |85.5%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut |85.5%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout >> KqpIndexes::UniqIndexComplexPkComplexFkOverlap [GOOD] >> KqpIndexes::UpdateDeletePlan |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |85.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |85.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 18382, MsgBus: 5248 2025-04-07T13:51:10.687004Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568438337334588:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b39/r3tmp/tmpYHq2Bi/pdisk_1.dat 2025-04-07T13:51:10.873593Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:11.143123Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:11.147610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:11.147699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:11.150710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18382, node 1 2025-04-07T13:51:11.353636Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:11.353661Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:11.353669Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:11.353785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5248 TClient is connected to server localhost:5248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:11.987958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:12.005421Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:12.012336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:12.202318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:12.353878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:12.435219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:14.147011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568455517205335:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.147138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.383116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.409880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.452315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.483151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.518079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.550894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:14.643184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568455517205832:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.643268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.643451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568455517205837:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:14.646908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:14.661666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568455517205839:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:15.683851Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568438337334588:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:15.683910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:15.945234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 62615, MsgBus: 11653 2025-04-07T13:51:18.649418Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568470419159936:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b39/r3tmp/tmphVgJBt/pdisk_1.dat 2025-04-07T13:51:18.672417Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:18.818518Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:18.823221Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:18.823305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:18.836512Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62615, node 2 2025-04-07T13:51:18.959419Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:18.959445Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:18.959461Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:18.959565Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11653 TClient is connected to server localhost:11653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:19.401860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:19.409511Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:19.422087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:19.517588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:19.660002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:19.734574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:51:21.655244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568483304063347:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:21.655317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:21.700279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:21.791232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:21.820771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:21.857998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:21.886082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:21.950260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:22.039452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568487599031149:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.039550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.039940Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568487599031154:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.044837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:22.060057Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:51:22.061828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568487599031156:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:23.168287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:23.594320Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568470419159936:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:23.614805Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:32.377720Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490568530548708384:2814], TxId: 281474976710731, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88ahjsdfcn2rc6j4af0xsv. SessionId : ydb://session/3?node_id=2&id=OWUzY2EwMTAtNTZhOTU1OTQtN2JmODRjMDYtYTFkYzEwN2Q=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T13:51:32.379224Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490568530548708386:2815], TxId: 281474976710731, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=OWUzY2EwMTAtNTZhOTU1OTQtN2JmODRjMDYtYTFkYzEwN2Q=. CustomerSuppliedId : . TraceId : 01jr88ahjsdfcn2rc6j4af0xsv. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490568530548708381:2460], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:51:32.380936Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWUzY2EwMTAtNTZhOTU1OTQtN2JmODRjMDYtYTFkYzEwN2Q=, ActorId: [2:7490568491893998763:2460], ActorState: ExecuteState, TraceId: 01jr88ahjsdfcn2rc6j4af0xsv, Create QueryResponse for error on request, msg: 2025-04-07T13:51:33.622423Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2025-04-07T13:51:33.622688Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-07T13:51:33.622720Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-04-07T13:51:33.633119Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2025-04-07T13:51:33.633161Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2025-04-07T13:51:33.642906Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2025-04-07T13:51:33.654726Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-07T13:51:33.663362Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-04-07T13:51:33.663400Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-04-07T13:51:33.810464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:51:33.810487Z node 2 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRwTx [GOOD] Test command err: Trying to start YDB, gRPC: 32678, MsgBus: 25431 2025-04-07T13:49:12.308104Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567928604656173:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:12.308372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013da/r3tmp/tmpKvyloW/pdisk_1.dat 2025-04-07T13:49:12.902889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:12.902957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:12.909005Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:12.922326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32678, node 1 2025-04-07T13:49:13.133144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:13.133178Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:13.133184Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:13.133264Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25431 TClient is connected to server localhost:25431 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:13.958653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:13.970122Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:13.985437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:14.126009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:49:14.343133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:14.448158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.353387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567945784526938:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.353499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:16.606211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.660317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.697247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.776851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.844611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:16.937646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:17.035576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567950079494736:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:17.035670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:17.036028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567950079494741:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:17.040107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:17.051108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567950079494743:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:17.334052Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567928604656173:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:17.334474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:18.257312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:18.615683Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710672;task_id=2;memory=8388608; 2025-04-07T13:49:18.615718Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710672, task: 2. [Mem] memory 8388608 NOT granted 2025-04-07T13:49:25.569125Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490567954374462746:2499], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YmM5ZGJiYjgtNDI3ZGZkZDktNmU5NjE5ZTMtOTRmOGU4NTU=. TraceId : 01jr886f6pap0ez7hzf0jvrfay. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-lqtfcktism, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710672, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 8MiB, tx largest successful memory allocation: 8MiB, tx largest failed memory allocation: 8MiB, tx total execution units: 2, started at: 2025-04-07T13:49:18.606220Z } TxMaxAllocationBacktrace: 0. /-S/ydb/core/kqp/rm_service/kqp_rm_service.h:251: Allocated @ 0x3C69F7E1 1. /-S/ydb/core/kqp/rm_service/kqp_rm_service.cpp:335: AllocateResources @ 0x3C69944A 2. /-S/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp:36: AllocateExtraQuota @ 0x403F456E 3. /-S/ydb/library/yql/dq/actors/compute/dq_compute_actor.h:296: ?? @ 0x403F3F3E 4. /-S/ydb/library/yql/dq/actors/compute/dq_compute_memory_quota.h:144: RequestExtraMemory @ 0x3DCD0F15 5. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x1D2F2E2F 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x1D2F2E2F 7. /-S/yql/essentials/minikql/aligned_page_pool.cpp:592: TryIncreaseLimit @ 0x1D2F2E2F 8. /-S/yql/essentials/minikql/aligned_page_pool.cpp:372: GetPage @ 0x1D2F2E2F 9. /-S/yql/essentials/minikql/mkql_alloc.cpp:193: MKQLAllocSlow @ 0x1D3198A7 10. /-S/yql/essentials/minikql/mkql_alloc.h:343: MKQLAllocFastWithSize @ 0x3A813053 11. /-S/yql/essentials/minikql/mkql_alloc.h:462: AllocateOn, NKikimr::NMiniKQL::TMemoryUsageInfo *, const NKikimr::NMiniKQL::TType *&, TVector >, std::__y1::allocator > > >, NYql::NDq::TDqMeteringStats::TInputStatsMeter &> @ 0x3A813053 12. /-S/yql/essentials/minikql/computation/mkql_computation_node_holders.h:841: Create, const NKikimr::NMiniKQL::TType *&, TVector >, std::__y1::allocator > > >, NYql::NDq::TDqMeteringStats::TInputStatsMeter &> @ 0x3A813053 13. /-S/ydb/library/yql/dq/runtime/dq_input_producer.cpp:749: CreateInputUnionValue @ 0x3A813053 14. /-S/ydb/library/yql/dq/runtime/dq_tasks_runner.cpp:150: DqBuildInputValue @ 0x3A7E6D5E 15. /-S/ydb/library/yql/dq/runtime/dq_tasks_runner.cpp:590: Prepare @ 0x3A7EDDF9 16. /-S/ydb/library/yql/dq/actors/compute/dq_sync_compute_actor_base.h:226: PrepareTaskRunner @ 0x40404805 17. /-S/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp:80: DoBootstrap @ 0x40400F10 18. /-S/ydb/library/yql/dq/actors/compute/dq_compute_actor_impl.h:162: Bootstrap @ 0x4042EF85 19. /-S/ydb/library/actors/core/actor_bootstrapped.h:26: StateBootstrap @ 0x4042DE9E 20. /-S/ydb/library/actors/core/actor.h:533: Receive @ 0x1AE85106 21. /-S/ydb/library/actors/core/executor_thread.cpp:248: Execute @ 0x1AE7EED8 22. /-S/ydb/library/actors/core/executor_thread.cpp:425: operator() ... zQtOGMxNTA2MWEtMjdkZjkwMTg=. CustomerSuppliedId : . TraceId : 01jr889831bd7avy4qkga6fyk4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7490568348771062695:2460], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:50:49.829286Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568348771062705:4885], TxId: 281474976715951, task: 4. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=. CustomerSuppliedId : . TraceId : 01jr889831bd7avy4qkga6fyk4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7490568348771062695:2460], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:50:49.830652Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, ActorId: [3:7490568035238438149:2460], ActorState: ExecuteState, TraceId: 01jr889831bd7avy4qkga6fyk4, Create QueryResponse for error on request, msg: 2025-04-07T13:50:50.225705Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7490568353066030053:2460] TxId: 281474976715953. Ctx: { TraceId: 01jr8898fff2kq6mgkw2ad9sty, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 385ms } {
: Error: Cancelling after 385ms during execution } ] 2025-04-07T13:50:50.226214Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, ActorId: [3:7490568035238438149:2460], ActorState: ExecuteState, TraceId: 01jr8898fff2kq6mgkw2ad9sty, Create QueryResponse for error on request, msg: 2025-04-07T13:50:50.620224Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, ActorId: [3:7490568035238438149:2460], ActorState: ExecuteState, TraceId: 01jr8898vw9j2fdy9cr40mxsj5, Create QueryResponse for error on request, msg: 2025-04-07T13:50:52.248890Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, ActorId: [3:7490568035238438149:2460], ActorState: ExecuteState, TraceId: 01jr889aeebh4yf4ydskdv43b4, Create QueryResponse for error on request, msg: 2025-04-07T13:50:53.142270Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, ActorId: [3:7490568035238438149:2460], ActorState: ExecuteState, TraceId: 01jr889baa3tsbg3re1ged6eh8, Create QueryResponse for error on request, msg: 2025-04-07T13:50:56.240595Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7490568378835834910:2460] TxId: 281474976715981. Ctx: { TraceId: 01jr889eaw3k8wvdx74skddy7r, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 402ms } {
: Error: Cancelling after 401ms during execution } ] 2025-04-07T13:50:56.241340Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568378835834917:5101], TxId: 281474976715981, task: 1. Ctx: { TraceId : 01jr889eaw3k8wvdx74skddy7r. SessionId : ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7490568378835834910:2460], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:50:56.241365Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568378835834925:5109], TxId: 281474976715981, task: 9. Ctx: { TraceId : 01jr889eaw3k8wvdx74skddy7r. SessionId : ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7490568378835834910:2460], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:50:56.241997Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568378835834918:5102], TxId: 281474976715981, task: 2. Ctx: { TraceId : 01jr889eaw3k8wvdx74skddy7r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7490568378835834910:2460], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:50:56.242390Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568378835834919:5103], TxId: 281474976715981, task: 3. Ctx: { TraceId : 01jr889eaw3k8wvdx74skddy7r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7490568378835834910:2460], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:50:56.242463Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568378835834920:5104], TxId: 281474976715981, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=. TraceId : 01jr889eaw3k8wvdx74skddy7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7490568378835834910:2460], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:50:56.242968Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568378835834921:5105], TxId: 281474976715981, task: 5. Ctx: { TraceId : 01jr889eaw3k8wvdx74skddy7r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7490568378835834910:2460], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:50:56.243000Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568378835834922:5106], TxId: 281474976715981, task: 6. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=. TraceId : 01jr889eaw3k8wvdx74skddy7r. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7490568378835834910:2460], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:50:56.243420Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568378835834923:5107], TxId: 281474976715981, task: 7. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=. TraceId : 01jr889eaw3k8wvdx74skddy7r. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7490568378835834910:2460], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:50:56.243465Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568378835834924:5108], TxId: 281474976715981, task: 8. Ctx: { TraceId : 01jr889eaw3k8wvdx74skddy7r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7490568378835834910:2460], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:50:56.246807Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, ActorId: [3:7490568035238438149:2460], ActorState: ExecuteState, TraceId: 01jr889eaw3k8wvdx74skddy7r, Create QueryResponse for error on request, msg: 2025-04-07T13:51:01.180347Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7490568400310672382:2460] TxId: 281474976716008. Ctx: { TraceId: 01jr889k4t45fygy7d9tq1w275, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 415ms } {
: Error: Cancelling after 417ms during execution } ] 2025-04-07T13:51:01.180705Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, ActorId: [3:7490568035238438149:2460], ActorState: ExecuteState, TraceId: 01jr889k4t45fygy7d9tq1w275, Create QueryResponse for error on request, msg: 2025-04-07T13:51:15.940879Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7490568460440217361:2460] TxId: 281474976716084. Ctx: { TraceId: 01jr88a1gya1ch2gmb9y1nwx6b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 453ms } {
: Error: Cancelling after 453ms during execution } ] 2025-04-07T13:51:15.941262Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, ActorId: [3:7490568035238438149:2460], ActorState: ExecuteState, TraceId: 01jr88a1gya1ch2gmb9y1nwx6b, Create QueryResponse for error on request, msg: 2025-04-07T13:51:20.141310Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7490568481915054604:2460] TxId: 281474976716105. Ctx: { TraceId: 01jr88a5krb1bfcknbndmaz7y2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 463ms } {
: Error: Cancelling after 468ms during execution } ] 2025-04-07T13:51:20.141696Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, ActorId: [3:7490568035238438149:2460], ActorState: ExecuteState, TraceId: 01jr88a5krb1bfcknbndmaz7y2, Create QueryResponse for error on request, msg: 2025-04-07T13:51:24.499546Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, ActorId: [3:7490568035238438149:2460], ActorState: ExecuteState, TraceId: 01jr88a9vpbbp0d5m5gb1d9hav, Create QueryResponse for error on request, msg: 2025-04-07T13:51:26.485975Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7490568507684859451:2460] TxId: 281474976716133. Ctx: { TraceId: 01jr88absp3w63r9tedn2h2sq0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 478ms } {
: Error: Cancelling after 478ms during execution } ] 2025-04-07T13:51:26.486366Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, ActorId: [3:7490568035238438149:2460], ActorState: ExecuteState, TraceId: 01jr88absp3w63r9tedn2h2sq0, Create QueryResponse for error on request, msg: 2025-04-07T13:51:34.276123Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, ActorId: [3:7490568035238438149:2460], ActorState: ExecuteState, TraceId: 01jr88akce883a3rfrcc357474, Create QueryResponse for error on request, msg: 2025-04-07T13:51:36.556729Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTA2YjU4ZjctZjZlYjQzMzQtOGMxNTA2MWEtMjdkZjkwMTg=, ActorId: [3:7490568035238438149:2460], ActorState: ExecuteState, TraceId: 01jr88ankpcanb0e1mecfqyh72, Create QueryResponse for error on request, msg: >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi >> TFstClassSrcIdPQTest::TestTableCreated >> TPersQueueTest::TopicServiceReadBudget [GOOD] >> TPersQueueTest::TopicServiceSimpleHappyWrites >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] |85.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex >> KqpIndexes::SelectConcurentTX2 [GOOD] >> KqpIndexes::SelectFromAsyncIndexedTable >> KqpUniqueIndex::InsertNullInComplexFk [GOOD] >> KqpUniqueIndex::InsertNullInComplexFkDuplicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:144:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:144:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:150:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:150:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:150:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:152:2173] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:156:2057] recipient: [8:152:2173] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:155:2174] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:225:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:150:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:152:2173] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:156:2057] recipient: [9:152:2173] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:155:2174] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:225:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:151:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:153:2173] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:157:2057] recipient: [10:153:2173] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:156:2174] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:226:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:156:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:159:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:160:2057] recipient: [11:158:2178] Leader for TabletID 72057594037927937 is [11:161:2179] sender: [11:162:2057] recipient: [11:158:2178] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:161:2179] Leader for TabletID 72057594037927937 is [11:161:2179] sender: [11:231:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:141:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:144:2057] recipient: [17:143:2166] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:145:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:147:2057] recipient: [17:143:2166] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:146:2167] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:216:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:142:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:145:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:146:2057] recipient: [18:144:2166] Leader for TabletID 72057594037927937 is [18:147:2167] sender: [18:148:2057] recipient: [18:144:2166] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:147:2167] Leader for TabletID 72057594037927937 is [18:147:2167] sender: [18:217:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:147:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:150:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:151:2057] recipient: [19:149:2171] Leader for TabletID 72057594037927937 is [19:152:2172] sender: [19:153:2057] recipient: [19:149:2171] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:152:2172] Leader for TabletID 72057594037927937 is [19:152:2172] sender: [19:222:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:147:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:150:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:151:2057] recipient: [20:149:2171] Leader for TabletID 72057594037927937 is [20:152:2172] sender: [20:153:2057] recipient: [20:149:2171] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:152:2172] Leader for TabletID 72057594037927937 is [20:152:2172] sender: [20:222:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:148:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:151:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:152:2057] recipient: [21:150:2171] Leader for TabletID 72057594037927937 is [21:153:2172] sender: [21:154:2057] recipient: [21:150:2171] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:153:2172] Leader for TabletID 72057594037927937 is [21:153:2172] sender: [21:201:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:150:2057] recipient: [22:97:2132] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:153:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:154:2057] recipient: [22:152:2173] Leader for TabletID 72057594037927937 is [22:155:2174] sender: [22:156:2057] recipient: [22:152:2173] !Reboot 72057594037927937 (actor [22:105:2137]) rebooted! !Reboot 72057594037927937 (actor [22:105:2137]) tablet resolver refreshed! new actor is[22:155:2174] Leader for TabletID 72057594037927937 is [22:155:2174] sender: [22:225:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:150:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:152:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:154:2057] recipient: [23:153:2173] Leader for TabletID 72057594037927937 is [23:155:2174] sender: [23:156:2057] recipient: [23:153:2173] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:155:2174] Leader for TabletID 72057594037927937 is [23:155:2174] sender: [23:225:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:151:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:153:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:155:2057] recipient: [24:154:2173] Leader for TabletID 72057594037927937 is [24:156:2174] sender: [24:157:2057] recipient: [24:154:2173] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:156:2174] Leader for TabletID 72057594037927937 is [24:156:2174] sender: [24:226:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:156:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:158:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:160:2057] recipient: [25:159:2178] Leader for TabletID 72057594037927937 is [25:161:2179] sender: [25:162:2057] recipient: [25:159:2178] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:161:2179] Leader for TabletID 72057594037927937 is [25:161:2179] sender: [25:231:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:156:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:158:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:160:2057] recipient: [26:159:2178] Leader for TabletID 72057594037927937 is [26:161:2179] sender: [26:162:2057] recipient: [26:159:2178] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:161:2179] Leader for TabletID 72057594037927937 is [26:161:2179] sender: [26:231:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] >> KqpIndexes::SecondaryIndexOrderBy2 >> KqpTypes::DyNumberCompare [GOOD] >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn [GOOD] >> KqpUniqueIndex::UpdateOnNullInComplexFk >> KqpIndexes::UpsertNoIndexColumns [GOOD] >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly [GOOD] >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |85.5%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |85.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |85.5%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 17202, MsgBus: 63576 2025-04-07T13:51:30.697186Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568521993017538:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:30.697699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001af8/r3tmp/tmpVMScWI/pdisk_1.dat 2025-04-07T13:51:30.990064Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17202, node 1 2025-04-07T13:51:31.049365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:31.049452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:31.053614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:31.062924Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:31.062943Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:31.062953Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:31.063045Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63576 TClient is connected to server localhost:63576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:31.630979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:31.670809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:31.811161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:32.024281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:32.187204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:33.909366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568534877920983:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:33.909475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:34.150372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:34.183893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:34.211156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:34.250662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:34.324035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:34.414490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:34.520913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568539172888783:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:34.520960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:34.524987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568539172888788:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:34.528575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:34.541067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568539172888790:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:35.706535Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568521993017538:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:35.708020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:35.756226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:42.213968Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88atswfa7891fghdab6je3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA3N2E3ODktZTFhNDNkM2QtMTQxZDUyYzYtYjQ5ZWEwZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:42.257743Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjA3N2E3ODktZTFhNDNkM2QtMTQxZDUyYzYtYjQ5ZWEwZGM=, ActorId: [1:7490568547762824443:2520], ActorState: ExecuteState, TraceId: 01jr88atswfa7891fghdab6je3, Create QueryResponse for error on request, msg: >> KqpIndexes::UpsertWithNullKeysSimple [GOOD] >> KqpIndexes::UpsertWithNullKeysComplex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:217:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:148:2057] recipient: [4:146:2168] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:150:2057] recipient: [4:146:2168] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:149:2169] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:219:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:153:2057] recipient: [5:151:2173] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:155:2057] recipient: [5:151:2173] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:154:2174] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:224:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:149:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:153:2057] recipient: [6:151:2173] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:155:2057] recipient: [6:151:2173] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:154:2174] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:224:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:155:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:156:2057] recipient: [7:154:2175] Leader for TabletID 72057594037927937 is [7:157:2176] sender: [7:158:2057] recipient: [7:154:2175] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:157:2176] Leader for TabletID 72057594037927937 is [7:157:2176] sender: [7:227:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:158:2057] recipient: [8:156:2177] Leader for TabletID 72057594037927937 is [8:159:2178] sender: [8:160:2057] recipient: [8:156:2177] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:159:2178] Leader for TabletID 72057594037927937 is [8:159:2178] sender: [8:229:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:158:2057] recipient: [9:156:2177] Leader for TabletID 72057594037927937 is [9:159:2178] sender: [9:160:2057] recipient: [9:156:2177] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:159:2178] Leader for TabletID 72057594037927937 is [9:159:2178] sender: [9:229:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:160:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:161:2057] recipient: [10:159:2179] Leader for TabletID 72057594037927937 is [10:162:2180] sender: [10:163:2057] recipient: [10:159:2179] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:162:2180] Leader for TabletID 72057594037927937 is [10:162:2180] sender: [10:232:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:159:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:162:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:163:2057] recipient: [11:161:2181] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:165:2057] recipient: [11:161:2181] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:164:2182] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:234:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 4037927937 is [13:167:2184] sender: [13:168:2057] recipient: [13:164:2183] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:167:2184] Leader for TabletID 72057594037927937 is [13:167:2184] sender: [13:237:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:141:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:144:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:145:2057] recipient: [16:143:2166] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:147:2057] recipient: [16:143:2166] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:146:2167] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:216:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:141:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:144:2057] recipient: [17:143:2166] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:145:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:147:2057] recipient: [17:143:2166] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:146:2167] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:216:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:142:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:145:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:146:2057] recipient: [18:144:2166] Leader for TabletID 72057594037927937 is [18:147:2167] sender: [18:148:2057] recipient: [18:144:2166] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:147:2167] Leader for TabletID 72057594037927937 is [18:147:2167] sender: [18:217:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:147:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:150:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:151:2057] recipient: [19:149:2171] Leader for TabletID 72057594037927937 is [19:152:2172] sender: [19:153:2057] recipient: [19:149:2171] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:152:2172] Leader for TabletID 72057594037927937 is [19:152:2172] sender: [19:222:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:147:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:150:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:151:2057] recipient: [20:149:2171] Leader for TabletID 72057594037927937 is [20:152:2172] sender: [20:153:2057] recipient: [20:149:2171] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:152:2172] Leader for TabletID 72057594037927937 is [20:152:2172] sender: [20:222:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:148:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:151:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:152:2057] recipient: [21:150:2171] Leader for TabletID 72057594037927937 is [21:153:2172] sender: [21:154:2057] recipient: [21:150:2171] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:153:2172] Leader for TabletID 72057594037927937 is [21:153:2172] sender: [21:223:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:153:2057] recipient: [22:97:2132] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:156:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:157:2057] recipient: [22:155:2176] Leader for TabletID 72057594037927937 is [22:158:2177] sender: [22:159:2057] recipient: [22:155:2176] !Reboot 72057594037927937 (actor [22:105:2137]) rebooted! !Reboot 72057594037927937 (actor [22:105:2137]) tablet resolver refreshed! new actor is[22:158:2177] Leader for TabletID 72057594037927937 is [22:158:2177] sender: [22:228:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:153:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:156:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:157:2057] recipient: [23:155:2176] Leader for TabletID 72057594037927937 is [23:158:2177] sender: [23:159:2057] recipient: [23:155:2176] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:158:2177] Leader for TabletID 72057594037927937 is [23:158:2177] sender: [23:228:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:154:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:157:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:158:2057] recipient: [24:156:2176] Leader for TabletID 72057594037927937 is [24:159:2177] sender: [24:160:2057] recipient: [24:156:2176] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:159:2177] Leader for TabletID 72057594037927937 is [24:159:2177] sender: [24:229:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] >> KqpIndexes::SecondaryIndexOrderBy [GOOD] >> KqpIndexes::SecondaryIndexInsert1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin [GOOD] Test command err: Trying to start YDB, gRPC: 18773, MsgBus: 6928 2025-04-07T13:51:27.047569Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568510533287792:2116];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:27.048658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b09/r3tmp/tmpVe47RV/pdisk_1.dat 2025-04-07T13:51:27.655997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:27.656095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:27.657342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:27.698927Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18773, node 1 2025-04-07T13:51:27.935868Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:27.935892Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:27.935899Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:27.935986Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6928 TClient is connected to server localhost:6928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:28.752828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:28.782059Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:28.802500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:28.976387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:29.171949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:29.280849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:30.871257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568523418191315:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:30.871335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:31.143458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.177901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.208439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.243032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.299677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.377962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.453000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568527713159112:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:31.453076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:31.453422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568527713159117:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:31.457070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:31.466516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568527713159119:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:32.044974Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568510533287792:2116];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:32.045046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:32.614561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:33.197584Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 29718, MsgBus: 25407 2025-04-07T13:51:34.419985Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568540081453367:2173];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b09/r3tmp/tmpzVSSxY/pdisk_1.dat 2025-04-07T13:51:34.537304Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:34.623360Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:34.666087Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:34.666170Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:34.677837Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29718, node 2 2025-04-07T13:51:34.924997Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:34.925020Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:34.925028Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:34.925122Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25407 TClient is connected to server localhost:25407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:35.683978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:35.705570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:35.829333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:36.066386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:36.235757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661: ... t not found or you don't have access permissions } 2025-04-07T13:51:39.590299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:39.590563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568561556291908:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:39.594506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:39.608946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568561556291910:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:40.780017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ( (declare $am_1 (DataType 'Decimal '22 '9)) (declare $b_1 (DataType 'Utf8)) (declare $b_2 (DataType 'Utf8)) (declare $constant_param_1 (DataType 'Timestamp)) (declare $constant_param_2 (DataType 'Utf8)) (declare $pa_id_1 (DataType 'Utf8)) (declare $status_1 (DataType 'Utf8)) (declare $type_1 (ListType (DataType 'Utf8))) (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (OptionalType (DataType 'Utf8))) (OptionalType (OptionalType (DataType 'Utf8))) (OptionalType (OptionalType (DataType 'Timestamp))) (OptionalType (OptionalType (DataType 'Utf8))) (DataType 'Int32)) (TupleType (OptionalType (OptionalType (DataType 'Utf8))) (OptionalType (OptionalType (DataType 'Utf8))) (OptionalType (OptionalType (DataType 'Timestamp))) (OptionalType (OptionalType (DataType 'Utf8))) (DataType 'Int32)))))) (let $1 (DataType 'Utf8)) (let $2 (DataType 'Timestamp)) (let $3 (OptionalType $2)) (let $4 '"$constant_param_1") (let $5 (OptionalType $1)) (let $6 (OptionalType $5)) (let $7 '"$constant_param_2") (let $8 '('('"_logical_id" '4767) '('"_id" '"f4508ff8-db08c718-38854de0-261e6482") '('"_partition_mode" '"single"))) (let $9 (DqPhyStage '() (lambda '() (block '( (let $50 (Uint64 '10000)) (let $51 (Int32 '1)) (let $52 '((Just (Just $b_2)) $51)) (let $53 '((Just (Just $pa_id_1)) $51)) (let $54 (Int32 '0)) (let $55 (Just (Just $constant_param_1))) (let $56 '((Nothing $6) $54)) (let $57 (RangeMultiply $50 (RangeCreate (AsList '('((Just (Nothing $3)) $54) '($55 $54)))) (RangeCreate (AsList '($56 $56))))) (let $58 '($55 $51)) (let $59 (RangeCreate (AsList '($58 $58)))) (let $60 (Just (Just $constant_param_2))) (let $61 (RangeMultiply $50 $59 (RangeCreate (AsList '('((Just (Nothing $5)) $54) '($60 $54)))))) (let $62 '($60 $51)) (let $63 (RangeMultiply $50 $59 (RangeCreate (AsList '($62 $62))))) (let $64 (RangeUnion $57 $61 $63)) (let $65 (RangeMultiply $50 (RangeCreate (AsList '($53 $53))) $64)) (return (ToStream (Just '((RangeFinalize (RangeMultiply $50 (RangeUnion (RangeMultiply $50 (RangeCreate (AsList '($52 $52))) $65)))))))) ))) $8)) (let $10 (DqCnValue (TDqOutput $9 '0))) (let $11 '('('"$b_2") '($4) '($7) '('"$pa_id_1"))) (let $12 (KqpPhysicalTx '($9) '($10) $11 '('('type '"compute")))) (let $13 (KqpTable '"/Root/tg/tg_index/indexImplTable" '"72057594046644480:18" '"" '1)) (let $14 '('"am" '"b" '"id" '"pa_id" '"status" '"system_date" 'type)) (let $15 '"%kqp%tx_result_binding_0_0") (let $16 (TupleType $6 $6 (OptionalType $3) $6 (DataType 'Int32))) (let $17 (TupleType (ListType (TupleType $16 $16)))) (let $18 '('"b" '"pa_id" '"system_date" '"id")) (let $19 '('('"UsedKeyColumns" $18) '('"ExpectedMaxRanges" '3) '('"PointPrefixLen" '2))) (let $20 (KqpRowsSourceSettings $13 $14 '('('"Reverse") '('"Sorted")) %kqp%tx_result_binding_0_0 $19)) (let $21 (Bool 'false)) (let $22 (DataType 'Decimal '22 '9)) (let $23 (Uint64 '11)) (let $24 '('"b" $5)) (let $25 '('"id" $5)) (let $26 '('"pa_id" $5)) (let $27 '('"system_date" $3)) (let $28 (StructType $24 $25 $26 $27)) (let $29 '('('"_logical_id" '4926) '('"_id" '"519a2389-b44c19dd-b96c88eb-11dc53a3") '('"_wide_channels" $28))) (let $30 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $20)) (lambda '($66) (block '( (let $67 (lambda '($70) (block '( (let $71 (Member $70 'type)) (let $72 (IfPresent $71 (lambda '($74) (block '( (let $75 '('Auto 'One 'Compact)) (let $76 (ToDict $type_1 (lambda '($77) $77) (lambda '($78) (Void)) $75)) (return (Contains $76 $74)) ))) $21)) (let $73 (And (Not (If (Exists $71) $72 (Bool 'true))) (Coalesce (!= (Member $70 '"status") $status_1) $21) (Coalesce (!= (Member $70 '"am") $am_1) $21))) (return (Member $70 '"b") (Member $70 '"id") (Member $70 '"pa_id") (Member $70 '"system_date") $73) )))) (let $68 (WideFilter (ExpandMap (ToFlow $66) $67) (lambda '($79 $80 $81 $82 $83) $83) $23)) (let $69 (lambda '($84 $85 $86 $87 $88) $84 $85 $86 $87)) (return (FromFlow (WideMap $68 $69))) ))) $29)) (let $31 (DqCnUnionAll (TDqOutput $30 '0))) (let $32 '('('"_logical_id" '5040) '('"_id" '"c0a7e5d5-bd1d7eb-8733ba2d-bbb2bfe9") '('"_wide_channels" (StructType $25)))) (let $33 (DqPhyStage '($31) (lambda '($89) (block '( (let $90 '('('0 $21) '('2 $21) '('3 $21) '('1 $21))) (let $91 (WideTop (ToFlow $89) $23 $90)) (return (FromFlow (WideMap $91 (lambda '($92 $93 $94 $95) $93)))) ))) $32)) (let $34 (DqCnHashShuffle (TDqOutput $33 '0) '('0))) (let $35 (DqPhyStage '($34) (lambda '($96) (FromFlow (PartitionByKey (NarrowMap (ToFlow $96) (lambda '($97) (AsStruct '('"id" $97)))) (lambda '($98) (Member $98 '"id")) (Void) (Void) (lambda '($99) (Map (Filter (FlatMap $99 (lambda '($100) (Take (Nth $100 '1) (Uint64 '1)))) (lambda '($101) (Exists (Member $101 '"id")))) (lambda '($102) (AsStruct '('"b" $b_1) '('"id" (Member $102 '"id"))))))))) '('('"_logical_id" '5133) '('"_id" '"62365a76-f0525f4-a8d17dd9-c4eea0ff")))) (let $36 (KqpTable '"/Root/tg" '"72057594046644480:16" '"" '1)) (let $37 '('"am" '"b" '"cur" '"id" '"pa_id" '"product" '"status" '"system_date" 'type)) (let $38 (KqpCnStreamLookup (TDqOutput $35 '0) $36 $37 (ListType (StructType '('"b" $1) $25)) '"LookupRows")) (let $39 (Uint64 '"1001")) (let $40 (StructType '('"am" (OptionalType $22)) $24 '('"cur" $5) $25 $26 '('"product" $5) '('"status" $5) $27 '('type $5))) (let $41 '('('"_logical_id" '5235) '('"_id" '"74cdc110-841a0284-69f91cac-8a1d7233") '('"_wide_channels" $40))) (let $42 (DqPhyStage '($38) (lambda '($103) (block '( (let $104 (lambda '($107) (Member $107 '"am") (Member $107 '"b") (Member $107 '"cur") (Member $107 '"id") (Member $107 '"pa_id") (Member $107 '"product") (Member $107 '"status") (Member $107 '"system_date") (Member $107 'type))) (return (FromFlow (ExpandMap (ToFlow (TopSort (OrderedFilter $103 (lambda '($105) (Exists (Member $105 '"id")))) $39 '($21 $21) (lambda '($106) '((Member $106 '"system_date") (Member $106 '"id"))))) $104))) ))) $41)) (let $43 (DqCnMerge (TDqOutput $42 '0) '('('"7" '"Desc") '('3 '"Desc")))) (let $44 (DqPhyStage '($43) (lambda '($108) (FromFlow (NarrowMap (Take (ToFlow $108) $39) (lambda '($109 $110 $111 $112 $113 $114 $115 $116 $117) (AsStruct '('"am" $109) '('"b" $110) '('"cur" $111) '('"id" $112) '('"pa_id" $113) '('"product" $114) '('"status" $115) '('"system_date" $116) '('type $117)))))) '('('"_logical_id" '5248) '('"_id" '"12f36513-c40736b-fd7296d9-f98ceaaa")))) (let $45 '($30 $33 $35 $42 $44)) (let $46 (DqCnResult (TDqOutput $44 '0) '())) (let $47 (KqpTxResultBinding $17 '0 '0)) (let $48 '('('"$am_1") '('"$b_1") '('"$status_1") '('"$type_1") '($15 $47))) (let $49 (KqpPhysicalTx $45 '($46) $48 '('('type '"data")))) (return (KqpPhysicalQuery '($12 $49) '((KqpTxResultBinding (ListType $40) '1 '0)) '('('type '"data_query")))) ) {"Plan":{"Plans":[{"PlanNodeId":14,"Plans":[{"PlanNodeId":13,"Operators":[{"Inputs":[{"ExternalPlanNodeId":12}],"Limit":"1001","Name":"Limit"}],"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Limit":"1001","Name":"TopSort","TopSortBy":""},{"E-Rows":"No estimate","Inputs":[{"ExternalPlanNodeId":10}],"Predicate":"Exist(item.id)","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["b","id"],"Node Type":"TableLookup","PlanNodeId":10,"Columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"E-Rows":"No estimate","Plans":[{"PlanNodeId":9,"Operators":[{"Inputs":[{"ExternalPlanNodeId":8}],"Input":"NarrowMap","Name":"PartitionByKey"}],"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"ExternalPlanNodeId":6}],"Limit":"11","Name":"Top","TopBy":""}],"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1},{"InternalOperatorId":1},{"InternalOperatorId":1}],"Limit":"11","Name":"Limit"},{"E-Rows":"No estimate","Inputs":[{"ExternalPlanNodeId":4}],"Predicate":"NOT If AND item.status != $status_1 AND item.am != $am_1","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"Tables":["tg\/tg_index\/indexImplTable"],"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":"%kqp%tx_result_binding_0_0","Reverse":true,"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","ReadRangesPointPrefixLen":"2","ReadRangesKeys":["b","pa_id","system_date","id"],"Table":"tg\/tg_index\/indexImplTable","ReadColumns":["am","b","id","pa_id","status","system_date","type"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Limit-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Top"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Node Type":"Aggregate"}],"Table":"tg","PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"TopSort-Filter"}],"Node Type":"Merge","SortColumns":["system_date (Desc)","id (Desc)"],"PlanNodeType":"Connection"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"Stage"}],"Subplan Name":"CTE precompute_0_0","Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tg","reads":[{"lookup_by":["b","id"],"columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"type":"Lookup"}]},{"name":"\/Root\/tg\/tg_index\/indexImplTable","reads":[{"columns":["am","b","id","pa_id","status","system_date","type"],"reverse":true,"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Limit":"1001","Name":"Limit"}],"Plans":[{"PlanNodeId":4,"Operators":[{"Limit":"1001","Name":"TopSort","TopSortBy":""}],"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.id)","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"No estimate","Columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"Name":"TableLookup","E-Cost":"No estimate","E-Size":"No estimate","LookupKeyColumns":["b","id"],"Table":"tg"}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Node Type":"Filter"}],"Node Type":"TopSort"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2}}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertNoIndexColumns [GOOD] Test command err: Trying to start YDB, gRPC: 23667, MsgBus: 61723 2025-04-07T13:51:23.048273Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568494747086625:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:23.052771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b12/r3tmp/tmp9TUzP7/pdisk_1.dat 2025-04-07T13:51:23.505673Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:23.511188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:23.511247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:23.513402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23667, node 1 2025-04-07T13:51:23.672041Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:23.672058Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:23.672071Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:23.672155Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61723 TClient is connected to server localhost:61723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:24.417464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:24.432624Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:24.457053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:24.583629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:24.763374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:24.818548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.599363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568507631990063:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:26.599494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:26.907463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.951168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:26.985258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.011472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.081262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.129577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.216239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568511926957865:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:27.216328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:27.216336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568511926957870:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:27.220411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:27.231291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568511926957872:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:28.134543Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568494747086625:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:28.134783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:28.507468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:30.045475Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:31.125670Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88afswas19fe2nj0s2wzqn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODViOWEwMjMtMmRjYjYzZWItODZkZWQzZGMtNjM5NGU1MTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:31.165766Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODViOWEwMjMtMmRjYjYzZWItODZkZWQzZGMtNjM5NGU1MTU=, ActorId: [1:7490568516221925483:2461], ActorState: ExecuteState, TraceId: 01jr88afswas19fe2nj0s2wzqn, Create QueryResponse for error on request, msg: 2025-04-07T13:51:31.214721Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:31.224621Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:31.250507Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:32.566027Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 21595, MsgBus: 28202 2025-04-07T13:51:33.460843Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568537183274842:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:33.460897Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b12/r3tmp/tmpCp6Eih/pdisk_1.dat 2025-04-07T13:51:33.610022Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21595, node 2 2025-04-07T13:51:33.630917Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:33.631010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:33.645139Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:33.703534Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:33.703562Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:33.703570Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:33.703668Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28202 TClient is connected to server localhost:28202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:34.247947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:34.257150Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:34.276594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:34.381096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:34.563775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:34.657579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.118286Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568554363145750:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.118379Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.167853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.221702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.279793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.320149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.363150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.437979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.496768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568554363146247:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.499896Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.500279Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568554363146252:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.504340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:37.522648Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568554363146254:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:38.461310Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568537183274842:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:38.470603Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:39.355174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.412916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.480951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpMultishardIndex::DataColumnSelect+StreamLookup >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] >> KqpMultishardIndex::SortByPk-StreamLookup [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] >> KqpIndexes::IndexOr [GOOD] >> KqpIndexes::IndexFilterPushDown >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns [GOOD] >> KqpIndexes::InnerJoinWithNonIndexWherePredicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 2410, MsgBus: 24295 2025-04-07T13:51:29.291221Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568518954685557:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:29.291934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001af9/r3tmp/tmp9yrq7X/pdisk_1.dat 2025-04-07T13:51:29.578791Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2410, node 1 2025-04-07T13:51:29.645828Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:29.645855Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:29.645875Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:29.645996Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:51:29.684531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:29.684641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:29.686266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24295 TClient is connected to server localhost:24295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:30.130998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:30.151916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:30.288152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:30.408202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:30.472319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:32.116489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568531839589024:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.116607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.449704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.489825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.516086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.543997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.578125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.621742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.683650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568531839589520:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.683721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.684097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568531839589525:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.687085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:32.696335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568531839589527:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:33.813794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:34.295599Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568518954685557:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:34.295670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2731, MsgBus: 18350 2025-04-07T13:51:37.409540Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568552029588086:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:37.409594Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001af9/r3tmp/tmpyTjaE5/pdisk_1.dat 2025-04-07T13:51:37.570332Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:37.604555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:37.604636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:37.609950Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2731, node 2 2025-04-07T13:51:37.781163Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:37.781183Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:37.781191Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:37.781281Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18350 TClient is connected to server localhost:18350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:38.349833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:38.380058Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:38.396611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:38.480916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:38.647356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:51:38.733705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:40.926069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568564914491645:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.926184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.977197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.008892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.048363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.119244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.153428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.196040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.297667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568569209459445:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:41.297748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:41.297998Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568569209459450:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:41.301618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:41.315203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568569209459452:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:42.449906Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568552029588086:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:42.449983Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:42.799178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... >> TKeyValueTest::TestRenameToLongKey [GOOD] >> KqpUniqueIndex::UpdateFkSameValue >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] Test command err: Trying to start YDB, gRPC: 24947, MsgBus: 22017 2025-04-07T13:51:31.441571Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568529151335222:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:31.441944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001af0/r3tmp/tmpcjfBjp/pdisk_1.dat 2025-04-07T13:51:31.793039Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24947, node 1 2025-04-07T13:51:31.854452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:31.854580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:31.858278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:31.904255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:31.904272Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:31.904277Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:31.904352Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22017 TClient is connected to server localhost:22017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:32.395738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:32.454687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:32.582468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.744198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:32.814487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:51:35.141538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568542036238790:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:35.148968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:35.207818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:35.235293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:35.281628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:35.329510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:35.360904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:35.418840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:35.541563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568546331206590:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:35.541648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:35.541819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568546331206595:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:35.545629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:35.566819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568546331206597:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:36.446525Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568529151335222:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:36.446615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:37.215323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.788600Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:37.845875Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 19801, MsgBus: 20279 2025-04-07T13:51:39.157210Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568560256160727:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001af0/r3tmp/tmptV9W4D/pdisk_1.dat 2025-04-07T13:51:39.233198Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:39.270191Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:39.277689Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:39.277770Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:39.286434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19801, node 2 2025-04-07T13:51:39.407345Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:39.407365Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:39.407373Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:39.407463Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20279 TClient is connected to server localhost:20279 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:39.877910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:39.894462Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:39.910717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:39.967429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:40.119802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:40.193623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:42.354261Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568573141064143:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:42.354412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:42.396817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:42.487029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:42.555143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:42.653041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:42.734339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:42.833698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:42.930203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568573141064650:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:42.930291Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:42.930673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568573141064655:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:42.935443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:42.953374Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T13:51:42.953826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568573141064657:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:44.133934Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568560256160727:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:44.134007Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:44.712245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.193592Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpMultishardIndex::SecondaryIndexSelect [GOOD] >> KqpMultishardIndex::DuplicateUpsert-StreamLookup >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SortByPk-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 20391, MsgBus: 15428 2025-04-07T13:51:27.527487Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568512048089573:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:27.527550Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b01/r3tmp/tmpUg0GBb/pdisk_1.dat 2025-04-07T13:51:28.121660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:28.128444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:28.128558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:28.133060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20391, node 1 2025-04-07T13:51:28.365305Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:28.365327Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:28.365338Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:28.365446Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15428 TClient is connected to server localhost:15428 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:29.044030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:29.070108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:29.199610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:29.360369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:29.465974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:31.122468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568529227960260:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:31.122612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:31.336872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.370803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.408648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.437894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.476746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.551779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.610583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568529227960763:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:31.610648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:31.610777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568529227960768:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:31.613548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:31.625388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568529227960770:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:32.529003Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568512048089573:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:32.529098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:32.898438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 25980, MsgBus: 4257 2025-04-07T13:51:35.538688Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568544630625949:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:35.538789Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b01/r3tmp/tmpiWwZl7/pdisk_1.dat 2025-04-07T13:51:35.758854Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:35.773424Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:35.773501Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:35.778689Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25980, node 2 2025-04-07T13:51:35.893206Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:35.893229Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:35.893237Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:35.893335Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4257 TClient is connected to server localhost:4257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:36.609878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:36.622064Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:36.637374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:51:36.779145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.027758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.176028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:39.764650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568561810496697:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:39.764758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:39.794907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.870541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.948162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.029569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.078014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.162083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.293526Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568566105464497:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.293632Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.294060Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568566105464502:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.298138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:40.312973Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T13:51:40.315305Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568566105464504:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:40.544870Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568544630625949:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:40.544946Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:41.891495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:144:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:144:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:149:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:153:2057] recipient: [7:151:2173] Leader for TabletID 72057594037927937 is [7:154:2174] sender: [7:155:2057] recipient: [7:151:2173] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:154:2174] Leader for TabletID 72057594037927937 is [7:154:2174] sender: [7:224:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:149:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:152:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:151:2173] Leader for TabletID 72057594037927937 is [8:154:2174] sender: [8:155:2057] recipient: [8:151:2173] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:154:2174] Leader for TabletID 72057594037927937 is [8:154:2174] sender: [8:225:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:151:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:155:2057] recipient: [9:153:2175] Leader for TabletID 72057594037927937 is [9:156:2176] sender: [9:157:2057] recipient: [9:153:2175] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:156:2176] Leader for TabletID 72057594037927937 is [9:156:2176] sender: [9:226:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:151:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:153:2175] Leader for TabletID 72057594037927937 is [10:156:2176] sender: [10:157:2057] recipient: [10:153:2175] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:156:2176] Leader for TabletID 72057594037927937 is [10:156:2176] sender: [10:226:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:153:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:156:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:155:2177] Leader for TabletID 72057594037927937 is [11:158:2178] sender: [11:159:2057] recipient: [11:155:2177] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:158:2178] Leader for TabletID 72057594037927937 is [11:158:2178] sender: [11:228:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipie ... ] sender: [18:169:2057] recipient: [18:165:2185] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:168:2186] Leader for TabletID 72057594037927937 is [18:168:2186] sender: [18:238:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:141:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:144:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:145:2057] recipient: [21:143:2166] Leader for TabletID 72057594037927937 is [21:146:2167] sender: [21:147:2057] recipient: [21:143:2166] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:146:2167] Leader for TabletID 72057594037927937 is [21:146:2167] sender: [21:216:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:141:2057] recipient: [22:97:2132] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:144:2057] recipient: [22:143:2166] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:145:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:146:2167] sender: [22:147:2057] recipient: [22:143:2166] !Reboot 72057594037927937 (actor [22:105:2137]) rebooted! !Reboot 72057594037927937 (actor [22:105:2137]) tablet resolver refreshed! new actor is[22:146:2167] Leader for TabletID 72057594037927937 is [22:146:2167] sender: [22:216:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:142:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:145:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:146:2057] recipient: [23:144:2166] Leader for TabletID 72057594037927937 is [23:147:2167] sender: [23:148:2057] recipient: [23:144:2166] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:147:2167] Leader for TabletID 72057594037927937 is [23:147:2167] sender: [23:217:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:147:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:149:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:151:2057] recipient: [24:150:2171] Leader for TabletID 72057594037927937 is [24:152:2172] sender: [24:153:2057] recipient: [24:150:2171] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:152:2172] Leader for TabletID 72057594037927937 is [24:152:2172] sender: [24:222:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:147:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:150:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:151:2057] recipient: [25:149:2171] Leader for TabletID 72057594037927937 is [25:152:2172] sender: [25:153:2057] recipient: [25:149:2171] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:152:2172] Leader for TabletID 72057594037927937 is [25:152:2172] sender: [25:222:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:148:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:150:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:152:2057] recipient: [26:151:2171] Leader for TabletID 72057594037927937 is [26:153:2172] sender: [26:154:2057] recipient: [26:151:2171] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:153:2172] Leader for TabletID 72057594037927937 is [26:153:2172] sender: [26:223:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:153:2057] recipient: [27:97:2132] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:155:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:157:2057] recipient: [27:156:2176] Leader for TabletID 72057594037927937 is [27:158:2177] sender: [27:159:2057] recipient: [27:156:2176] !Reboot 72057594037927937 (actor [27:105:2137]) rebooted! !Reboot 72057594037927937 (actor [27:105:2137]) tablet resolver refreshed! new actor is[27:158:2177] Leader for TabletID 72057594037927937 is [27:158:2177] sender: [27:228:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:106:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:139:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:153:2057] recipient: [28:97:2132] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:155:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:157:2057] recipient: [28:156:2176] Leader for TabletID 72057594037927937 is [28:158:2177] sender: [28:159:2057] recipient: [28:156:2176] !Reboot 72057594037927937 (actor [28:105:2137]) rebooted! !Reboot 72057594037927937 (actor [28:105:2137]) tablet resolver refreshed! new actor is[28:158:2177] Leader for TabletID 72057594037927937 is [28:158:2177] sender: [28:228:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:106:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:139:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:154:2057] recipient: [29:97:2132] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:156:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:158:2057] recipient: [29:157:2176] Leader for TabletID 72057594037927937 is [29:159:2177] sender: [29:160:2057] recipient: [29:157:2176] !Reboot 72057594037927937 (actor [29:105:2137]) rebooted! !Reboot 72057594037927937 (actor [29:105:2137]) tablet resolver refreshed! new actor is[29:159:2177] Leader for TabletID 72057594037927937 is [29:159:2177] sender: [29:229:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] >> TSchemeShardViewTest::EmptyQueryText ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns [GOOD] Test command err: Trying to start YDB, gRPC: 28372, MsgBus: 61489 2025-04-07T13:51:24.877198Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568498726753108:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:24.877493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b0a/r3tmp/tmphqd6Iv/pdisk_1.dat 2025-04-07T13:51:25.431032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:25.431115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:25.431353Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:25.437711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28372, node 1 2025-04-07T13:51:25.603381Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:25.603420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:25.603430Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:25.603550Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61489 TClient is connected to server localhost:61489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:26.195080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:26.231226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:26.415020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:26.584223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:26.671633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:28.454221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568515906623865:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:28.454337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:28.751666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.799559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.824820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.853922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.890226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.976229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:29.045171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568520201591662:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:29.045242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:29.045424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568520201591667:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:29.047990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:29.073931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568520201591669:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:29.863162Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568498726753108:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:29.863225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:30.236009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15028, MsgBus: 1789 2025-04-07T13:51:32.642990Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568532002543080:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:32.643046Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b0a/r3tmp/tmp9GMNZi/pdisk_1.dat 2025-04-07T13:51:32.760401Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:32.784524Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:32.784603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:32.787673Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15028, node 2 2025-04-07T13:51:32.849194Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:32.849218Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:32.849226Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:32.849313Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1789 TClient is connected to server localhost:1789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:33.433975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:33.441765Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:33.446128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:33.553182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:33.754195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:33.815886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:36.421688Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568549182413960:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:36.421753Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:36.458834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:36.514626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:36.562661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:36.629302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:36.711059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:36.799610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:36.887290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568549182414461:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:36.887364Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:36.887562Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568549182414466:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:36.891400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:36.906286Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568549182414468:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:37.644416Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568532002543080:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:37.644475Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:38.125620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:38.990441Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:39.036732Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:40.663950Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:40.747158Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:41.557788Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:41.618880Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:42.510777Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:42.532627Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:42.997721Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:43.074863Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:44.279591Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:44.993373Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:45.054180Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::InnerJoinWithNonIndexWherePredicate [GOOD] Test command err: Trying to start YDB, gRPC: 24709, MsgBus: 20899 2025-04-07T13:51:28.456564Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568512565002109:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:28.456808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b00/r3tmp/tmpLGIrbu/pdisk_1.dat 2025-04-07T13:51:29.011517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:29.011601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:29.016060Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:29.019530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24709, node 1 2025-04-07T13:51:29.101236Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:29.101279Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:29.101287Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:29.101386Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20899 TClient is connected to server localhost:20899 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:29.653130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:29.672921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:29.833472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:30.063076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:30.139735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:32.077562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568525449905563:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.084547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.119860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.165921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.233275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.279240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.323195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.416899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.516964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568529744873364:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.517040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.517373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568529744873369:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.521751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:32.537075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568529744873371:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:33.525138Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568512565002109:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:33.525223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:34.067690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26685, MsgBus: 1233 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b00/r3tmp/tmpQ76d5t/pdisk_1.dat 2025-04-07T13:51:36.440879Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:36.475091Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:36.502166Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:36.502251Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:36.504584Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26685, node 2 2025-04-07T13:51:36.617725Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:36.617743Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:36.617749Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:36.617861Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1233 TClient is connected to server localhost:1233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:37.297550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.310192Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:37.324601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.397687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.573961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.647343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:40.293808Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568565557858081:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.293895Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.347564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.392443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.422144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.481649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.550598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.621777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.733110Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568565557858585:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.733206Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.733498Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568565557858590:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.737334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:40.750539Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568565557858592:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:42.168509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:42.260157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:51:42.364855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpIndexes::UpdateDeletePlan [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TPersQueueTest::WriteNonExistingTopic [GOOD] >> TPersQueueTest::WriteAfterAlter |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> KqpIndexes::SelectFromAsyncIndexedTable [GOOD] >> TopicService::RelativePath [GOOD] >> TSchemeShardViewTest::EmptyQueryText [GOOD] >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:51:48.220355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:51:48.220490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:51:48.220529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:51:48.220558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:51:48.220605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:51:48.220629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:51:48.220692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:51:48.221022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:51:48.283146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:51:48.283203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:48.292729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:51:48.292891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:51:48.293047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:51:48.301256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:51:48.301458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:51:48.302040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:51:48.302247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:51:48.304219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:51:48.305453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:51:48.305513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:51:48.305630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:51:48.305675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:51:48.305717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:51:48.305862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.312494Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:51:48.412829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:51:48.413076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.413322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:51:48.413544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:51:48.413595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.415922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:51:48.416072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:51:48.416233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.416299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:51:48.416343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:51:48.416509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:51:48.418605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.418677Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:51:48.418724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:51:48.420319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.420366Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.420413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:51:48.420485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:51:48.423794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:51:48.433501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:51:48.433728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:51:48.434703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:51:48.434851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:51:48.434897Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:51:48.435158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:51:48.435231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:51:48.435395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:51:48.435477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:51:48.437773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:51:48.437815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:51:48.438001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:51:48.438035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:51:48.438273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.438319Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:51:48.438411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:51:48.438462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:51:48.438505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:51:48.438545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:51:48.438590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:51:48.438619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:51:48.438687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:51:48.438719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:51:48.438746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:51:48.455301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:51:48.455456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:51:48.455491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:51:48.455541Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:51:48.455583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:51:48.455694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... blet 72057594046678944 for txId: 101 at step: 5000002 2025-04-07T13:51:48.504479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:51:48.504596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:51:48.504642Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-07T13:51:48.504824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-07T13:51:48.505081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:51:48.505152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T13:51:48.508708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:51:48.508761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:51:48.508960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:51:48.509077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:51:48.509127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T13:51:48.509167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T13:51:48.509518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.509564Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T13:51:48.509661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T13:51:48.509692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:51:48.509735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T13:51:48.509772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:51:48.509833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T13:51:48.509865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T13:51:48.509938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:51:48.509987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-07T13:51:48.510018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-07T13:51:48.510048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-07T13:51:48.510856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:51:48.510941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:51:48.510975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:51:48.511016Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-07T13:51:48.511050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:51:48.512001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:51:48.512077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:51:48.512099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:51:48.512122Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T13:51:48.512148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T13:51:48.512206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T13:51:48.515660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:51:48.516903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T13:51:48.517165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T13:51:48.517206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T13:51:48.517599Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T13:51:48.517702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T13:51:48.517745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:294:2286] TestWaitNotification: OK eventTxId 101 2025-04-07T13:51:48.518171Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:51:48.518381Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 218us result status StatusSuccess 2025-04-07T13:51:48.518730Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-07T13:51:48.521832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:51:48.522083Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-04-07T13:51:48.522193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2025-04-07T13:51:48.522347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, at schemeshard: 72057594046678944 2025-04-07T13:51:48.524759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-07T13:51:48.524941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T13:51:48.525216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T13:51:48.525251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T13:51:48.525635Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T13:51:48.525714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:51:48.525759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:302:2294] TestWaitNotification: OK eventTxId 102 >> KqpMultishardIndex::DataColumnWrite-StreamLookup [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateDeletePlan [GOOD] Test command err: Trying to start YDB, gRPC: 6301, MsgBus: 10529 2025-04-07T13:51:24.293735Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568497802572226:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:24.305822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b0e/r3tmp/tmpmRqidA/pdisk_1.dat 2025-04-07T13:51:24.924277Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:24.944316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:24.944416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:24.948160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6301, node 1 2025-04-07T13:51:25.059418Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:25.059440Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:25.059453Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:25.059526Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10529 TClient is connected to server localhost:10529 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:25.798531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:25.830210Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:25.836855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:25.995320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:26.204926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:26.279855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:27.957862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568510687475747:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:27.990808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:28.288571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.327694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.403375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.436167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.471252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.523836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:28.599001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568514982443547:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:28.599081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:28.599456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568514982443553:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:28.602701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:28.615327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568514982443555:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:29.288785Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568497802572226:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:29.288861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:29.730663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:30.711465Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:30.730247Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:31.496360Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:32.385785Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88ah6y8g7yqhmfk0mp9h4v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdjMWFhZGMtZDY0NTcxNWMtOWJhMWI0ZjYtNzE4ZmFjZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:32.418081Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTdjMWFhZGMtZDY0NTcxNWMtOWJhMWI0ZjYtNzE4ZmFjZDQ=, ActorId: [1:7490568519277411162:2461], ActorState: ExecuteState, TraceId: 01jr88ah6y8g7yqhmfk0mp9h4v, Create QueryResponse for error on request, msg: 2025-04-07T13:51:33.312184Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:33.378413Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:34.884538Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88ak03ap4nhnkt6dghvjjn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdjMWFhZGMtZDY0NTcxNWMtOWJhMWI0ZjYtNzE4ZmFjZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:34.884815Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTdjMWFhZGMtZDY0NTcxNWMtOWJhMWI0ZjYtNzE4ZmFjZDQ=, ActorId: [1:7490568519277411162:2461], ActorState: ExecuteState, TraceId: 01jr88ak03ap4nhnkt6dghvjjn, Create QueryResponse for error on request, msg: 2025-04-07T13:51:34.983390Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:35.472837Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490568545047215767:2610], TxId: 281474976710706, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NTdjMWFhZGMtZDY0NTcxNWMtOWJhMWI0ZjYtNzE4ZmFjZDQ=. TraceId : 01jr88amk79q8ksc62832gdrvz. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T13:51:35.474373Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490568545047215768:2611], TxId: 281474976710706, task: 2. Ctx: { TraceId : 01jr88amk79q8ksc62832gdrvz. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTdjMWFhZGMtZDY0NTcxNWMtOWJhMWI0ZjYtNzE4ZmFjZDQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490568545047215763:2461], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:51:35.485876Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTdjMWFhZGMtZDY0NTcxNWMtOWJhMWI0ZjYtNzE4ZmFjZDQ=, ActorId: [1:7490568519277411162:2461], ActorState: ExecuteState, TraceId: 01jr88amk79q8ksc62832gdrvz, Create QueryResponse for error on request, msg: 2025-04-07T13:51:37.379750Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88an2he3sj9v1mzd3nr5hp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdjMWFhZGMtZDY0NTcxNWMtOWJhMWI0ZjYtNzE4ZmFjZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:37.379916Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTdjMWFhZGMtZDY0NTcxNWMtOWJhMWI0ZjYtNzE4ZmFjZDQ=, ActorId: [1:7490568519277411162:2461], ActorState: ExecuteState, TraceId: 01jr88an2he3sj9v1mzd3nr5hp, Create QueryResponse for error on request, msg: 2025-04-07T13:51:38.833913Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88apy2d403tj1kzkja9azg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdjMWFhZGMtZDY0NTcxNWMtOWJhMWI0ZjYtNzE4ZmFjZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:38.834243Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTdjMWFhZGMtZDY0NTcxNWMtOWJhMWI0ZjYtNzE4ZmFjZDQ=, ActorId: [1:7490568519277411162:2461], ActorState: ExecuteState, TraceId: 01jr88apy2d403tj1kzkja9azg, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29010, MsgBus: 5301 2025-04-07T13:51:39.894020Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568559993649433:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:39.894058Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b0e/r3tmp/tmpJLlYiM/pdisk_1.dat 2025-04-07T13:51:40.087346Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:40.099280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:40.099553Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:40.106867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29010, node 2 2025-04-07T13:51:40.257256Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:40.257283Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:40.257295Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:40.257395Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5301 TClient is connected to server localhost:5301 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:40.968481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:40.985011Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:40.998056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:41.119845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:41.352159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:41.444051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:44.844361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568581468487573:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:44.844450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:44.903574Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568559993649433:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:44.915712Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:44.918130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.031230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.108390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.167648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.222251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.289500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.390987Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568585763455368:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:45.391149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:45.391861Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568585763455376:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:45.395789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:45.424413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568585763455378:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:46.700903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:51:48.555010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:51:48.555122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:51:48.555168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:51:48.555205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:51:48.555249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:51:48.555275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:51:48.555327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:51:48.555658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:51:48.622616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:51:48.622680Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:48.635409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:51:48.635600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:51:48.635767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:51:48.642701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:51:48.642907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:51:48.643553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:51:48.643792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:51:48.653745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:51:48.655360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:51:48.655442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:51:48.655592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:51:48.655661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:51:48.655712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:51:48.655893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.675030Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:51:48.903248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:51:48.903534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.903776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:51:48.904023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:51:48.904096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.914729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:51:48.914904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:51:48.915111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.915177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:51:48.915215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:51:48.915253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:51:48.925923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.926015Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:51:48.926057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:51:48.933725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.933821Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.933885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:51:48.933952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:51:48.959996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:51:48.966665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:51:48.966892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:51:48.967924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:51:48.968101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:51:48.968165Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:51:48.968445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:51:48.968500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:51:48.968710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:51:48.968810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:51:48.978340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:51:48.978417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:51:48.978640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:51:48.978681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:51:48.978936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:51:48.978987Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:51:48.979111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:51:48.979149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:51:48.979203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:51:48.979250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:51:48.979285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:51:48.979316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:51:48.979392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:51:48.979455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:51:48.979492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:51:48.985837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:51:48.986000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:51:48.986046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:51:48.986094Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:51:48.986152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:51:48.986276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:51:49.003263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:51:49.003846Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T13:51:49.004515Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T13:51:49.059116Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T13:51:49.078104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:51:49.078399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2025-04-07T13:51:49.078464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2025-04-07T13:51:49.078634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:51:49.078718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-07T13:51:49.078771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:51:49.092932Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:51:49.095414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-07T13:51:49.095589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-04-07T13:51:49.101159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:51:49.101258Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2025-04-07T13:51:49.101348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-07T13:51:49.101607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:51:49.103118Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:51:49.113708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-07T13:51:49.113931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-07T13:51:49.114382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:51:49.114543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:51:49.114600Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-07T13:51:49.114771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-07T13:51:49.115022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:51:49.115106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T13:51:49.129992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:51:49.130061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:51:49.130248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:51:49.130387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:51:49.130448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T13:51:49.130507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T13:51:49.130882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:51:49.130948Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T13:51:49.131067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T13:51:49.131109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:51:49.131171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T13:51:49.131226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:51:49.131270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T13:51:49.131307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T13:51:49.131397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:51:49.131437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-07T13:51:49.131476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-07T13:51:49.131510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-07T13:51:49.132384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:51:49.132488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:51:49.132523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:51:49.132622Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-07T13:51:49.144750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:51:49.146324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:51:49.146428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:51:49.146468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:51:49.146502Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T13:51:49.146535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T13:51:49.146610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T13:51:49.165864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:51:49.167458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] >> TPQCDTest::TestRelatedServicesAreRunning >> YdbProxy::CopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SelectFromAsyncIndexedTable [GOOD] Test command err: Trying to start YDB, gRPC: 23229, MsgBus: 12635 2025-04-07T13:51:33.102982Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568535340624710:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:33.103230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001add/r3tmp/tmpa8XUmK/pdisk_1.dat 2025-04-07T13:51:33.537670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:33.537766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:33.541121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:33.546574Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23229, node 1 2025-04-07T13:51:33.669176Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:33.669201Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:33.669207Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:33.669299Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12635 TClient is connected to server localhost:12635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:34.233896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:34.245622Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:34.256729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:34.409434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:34.559921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:34.641227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.077497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568548225528156:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.093441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.151311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.227346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.262164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.319419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.369595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.449197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.541475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568552520495955:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.541563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.541802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568552520495960:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.546742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:37.571233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568552520495962:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:38.116693Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568535340624710:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:38.117001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:39.202292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.547166Z node 1 :TX_DATASHARD ERROR: Complete [1744033901582 : 281474976710681] from 72075186224037920 at tablet 72075186224037920, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-07T13:51:41.558934Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWQ2ZjYwMjAtNjRkYWU1MWMtYmUzMDhjNTYtNGFmMmNlNzM=, ActorId: [1:7490568561110430874:2464], ActorState: ExecuteState, TraceId: 01jr88atd91jvtdvdf5w30pqy3, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 12467, MsgBus: 9255 2025-04-07T13:51:42.367057Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568573356588636:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:42.367122Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001add/r3tmp/tmpF9gU31/pdisk_1.dat 2025-04-07T13:51:42.465504Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:42.494868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:42.494962Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:42.496462Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12467, node 2 2025-04-07T13:51:42.597223Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:42.597243Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:42.597251Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:42.597341Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9255 TClient is connected to server localhost:9255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:43.225951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:43.237564Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:43.255461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:43.372060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:43.583411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:43.691532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:45.691793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568586241492213:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:45.691925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:45.751533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.796630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.886841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.949753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.005927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.094000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.206371Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568590536460012:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:46.206461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:46.206854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568590536460017:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:46.211032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:46.229409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568590536460019:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:47.368191Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568573356588636:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:47.368249Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:47.887743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.259526Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7490568599126395161:2462] TxId: 281474976715672. Ctx: { TraceId: 01jr88b1gx0xy79xrnc2spfrsb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDNkODY0NDUtZjYxMjJlNTUtNmQ0OThiMWEtMThhZDIwYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-04-07T13:51:48.259924Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDNkODY0NDUtZjYxMjJlNTUtNmQ0OThiMWEtMThhZDIwYzg=, ActorId: [2:7490568594831427632:2462], ActorState: ExecuteState, TraceId: 01jr88b1gx0xy79xrnc2spfrsb, Create QueryResponse for error on request, msg: 2025-04-07T13:51:48.302298Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7490568599126395185:2462] TxId: 281474976715674. Ctx: { TraceId: 01jr88b1hwc340mbb13kbr97hj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDNkODY0NDUtZjYxMjJlNTUtNmQ0OThiMWEtMThhZDIwYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-04-07T13:51:48.302662Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDNkODY0NDUtZjYxMjJlNTUtNmQ0OThiMWEtMThhZDIwYzg=, ActorId: [2:7490568594831427632:2462], ActorState: ExecuteState, TraceId: 01jr88b1hwc340mbb13kbr97hj, Create QueryResponse for error on request, msg: 2025-04-07T13:51:48.321074Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7490568599126395195:2462] TxId: 281474976715676. Ctx: { TraceId: 01jr88b1jy38tgwksjd6z90w1y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDNkODY0NDUtZjYxMjJlNTUtNmQ0OThiMWEtMThhZDIwYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-04-07T13:51:48.321682Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDNkODY0NDUtZjYxMjJlNTUtNmQ0OThiMWEtMThhZDIwYzg=, ActorId: [2:7490568594831427632:2462], ActorState: ExecuteState, TraceId: 01jr88b1jy38tgwksjd6z90w1y, Create QueryResponse for error on request, msg: 2025-04-07T13:51:48.364847Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7490568599126395207:2462] TxId: 281474976715678. Ctx: { TraceId: 01jr88b1km13fexztkmegx2nxa, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDNkODY0NDUtZjYxMjJlNTUtNmQ0OThiMWEtMThhZDIwYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-04-07T13:51:48.365171Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDNkODY0NDUtZjYxMjJlNTUtNmQ0OThiMWEtMThhZDIwYzg=, ActorId: [2:7490568594831427632:2462], ActorState: ExecuteState, TraceId: 01jr88b1km13fexztkmegx2nxa, Create QueryResponse for error on request, msg: >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout [GOOD] >> KqpIndexes::Uint8Index |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier >> KqpIndexes::UniqAndNoUniqSecondaryIndex [GOOD] >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover >> TopicService::AccessRights ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 17027, MsgBus: 14969 2025-04-07T13:51:31.494944Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568527288769701:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:31.500122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ae8/r3tmp/tmpe9LfcI/pdisk_1.dat 2025-04-07T13:51:31.863814Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17027, node 1 2025-04-07T13:51:31.906607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:31.915564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:31.917844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:31.935287Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:31.935314Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:31.935321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:31.935413Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14969 TClient is connected to server localhost:14969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:32.476401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:32.496211Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:32.514918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:32.610790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:32.800129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:32.899010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:35.839181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568544468640603:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:35.839278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:36.355813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:36.419820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:36.477355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:36.528624Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568527288769701:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:36.528877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:36.577871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:36.659541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:36.755732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:36.845146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568548763608404:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:36.845222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:36.845288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568548763608409:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:36.849010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:36.866404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568548763608411:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:38.975415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 6157, MsgBus: 5194 2025-04-07T13:51:42.460337Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568574322873260:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:42.460389Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ae8/r3tmp/tmpD8AVol/pdisk_1.dat 2025-04-07T13:51:42.610011Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:42.642966Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:42.643062Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:42.649049Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6157, node 2 2025-04-07T13:51:42.737377Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:42.737402Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:42.737409Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:42.737512Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5194 TClient is connected to server localhost:5194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:51:43.362644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:51:43.379845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:43.520938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:43.704300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:43.796692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:45.942431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568587207776851:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:45.942533Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:45.968628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.013402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.114987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.212977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.316689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.404258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.538200Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568591502744658:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:46.538290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:46.538698Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568591502744663:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:46.543165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:46.559045Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568591502744665:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:47.477899Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568574322873260:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:47.477966Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:47.920062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:148:2057] recipient: [4:146:2168] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:150:2057] recipient: [4:146:2168] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:149:2169] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:219:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:153:2057] recipient: [5:151:2173] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:155:2057] recipient: [5:151:2173] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:154:2174] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:224:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:149:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:153:2057] recipient: [6:151:2173] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:155:2057] recipient: [6:151:2173] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:154:2174] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:224:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:155:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:156:2057] recipient: [7:154:2175] Leader for TabletID 72057594037927937 is [7:157:2176] sender: [7:158:2057] recipient: [7:154:2175] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:157:2176] Leader for TabletID 72057594037927937 is [7:157:2176] sender: [7:205:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:158:2057] recipient: [8:156:2177] Leader for TabletID 72057594037927937 is [8:159:2178] sender: [8:160:2057] recipient: [8:156:2177] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:159:2178] Leader for TabletID 72057594037927937 is [8:159:2178] sender: [8:229:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:158:2057] recipient: [9:156:2177] Leader for TabletID 72057594037927937 is [9:159:2178] sender: [9:160:2057] recipient: [9:156:2177] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:159:2178] Leader for TabletID 72057594037927937 is [9:159:2178] sender: [9:229:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:160:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:161:2057] recipient: [10:159:2179] Leader for TabletID 72057594037927937 is [10:162:2180] sender: [10:163:2057] recipient: [10:159:2179] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:162:2180] Leader for TabletID 72057594037927937 is [10:162:2180] sender: [10:210:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:159:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:162:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:163:2057] recipient: [11:161:2181] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:165:2057] recipient: [11:161:2181] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:164:2182] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:234:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader f ... 37927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:150:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:153:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:154:2057] recipient: [24:152:2173] Leader for TabletID 72057594037927937 is [24:155:2174] sender: [24:156:2057] recipient: [24:152:2173] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:155:2174] Leader for TabletID 72057594037927937 is [24:155:2174] sender: [24:225:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:155:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:158:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:159:2057] recipient: [25:157:2178] Leader for TabletID 72057594037927937 is [25:160:2179] sender: [25:161:2057] recipient: [25:157:2178] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:160:2179] Leader for TabletID 72057594037927937 is [25:160:2179] sender: [25:230:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:155:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:158:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:159:2057] recipient: [26:157:2178] Leader for TabletID 72057594037927937 is [26:160:2179] sender: [26:161:2057] recipient: [26:157:2178] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:160:2179] Leader for TabletID 72057594037927937 is [26:160:2179] sender: [26:230:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:157:2057] recipient: [27:97:2132] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:160:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:161:2057] recipient: [27:159:2179] Leader for TabletID 72057594037927937 is [27:162:2180] sender: [27:163:2057] recipient: [27:159:2179] !Reboot 72057594037927937 (actor [27:105:2137]) rebooted! !Reboot 72057594037927937 (actor [27:105:2137]) tablet resolver refreshed! new actor is[27:162:2180] Leader for TabletID 72057594037927937 is [27:162:2180] sender: [27:232:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:106:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:139:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:162:2057] recipient: [28:97:2132] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:165:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:166:2057] recipient: [28:164:2184] Leader for TabletID 72057594037927937 is [28:167:2185] sender: [28:168:2057] recipient: [28:164:2184] !Reboot 72057594037927937 (actor [28:105:2137]) rebooted! !Reboot 72057594037927937 (actor [28:105:2137]) tablet resolver refreshed! new actor is[28:167:2185] Leader for TabletID 72057594037927937 is [28:167:2185] sender: [28:237:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:106:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:139:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:162:2057] recipient: [29:97:2132] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:165:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:166:2057] recipient: [29:164:2184] Leader for TabletID 72057594037927937 is [29:167:2185] sender: [29:168:2057] recipient: [29:164:2184] !Reboot 72057594037927937 (actor [29:105:2137]) rebooted! !Reboot 72057594037927937 (actor [29:105:2137]) tablet resolver refreshed! new actor is[29:167:2185] Leader for TabletID 72057594037927937 is [29:167:2185] sender: [29:237:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:167:2057] recipient: [30:97:2132] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:170:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:171:2057] recipient: [30:169:2188] Leader for TabletID 72057594037927937 is [30:172:2189] sender: [30:173:2057] recipient: [30:169:2188] !Reboot 72057594037927937 (actor [30:105:2137]) rebooted! !Reboot 72057594037927937 (actor [30:105:2137]) tablet resolver refreshed! new actor is[30:172:2189] Leader for TabletID 72057594037927937 is [30:172:2189] sender: [30:242:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:106:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:139:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:169:2057] recipient: [31:97:2132] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:172:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:173:2057] recipient: [31:171:2190] Leader for TabletID 72057594037927937 is [31:174:2191] sender: [31:175:2057] recipient: [31:171:2190] !Reboot 72057594037927937 (actor [31:105:2137]) rebooted! !Reboot 72057594037927937 (actor [31:105:2137]) tablet resolver refreshed! new actor is[31:174:2191] Leader for TabletID 72057594037927937 is [31:174:2191] sender: [31:244:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:106:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:139:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:169:2057] recipient: [32:97:2132] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:172:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:173:2057] recipient: [32:171:2190] Leader for TabletID 72057594037927937 is [32:174:2191] sender: [32:175:2057] recipient: [32:171:2190] !Reboot 72057594037927937 (actor [32:105:2137]) rebooted! !Reboot 72057594037927937 (actor [32:105:2137]) tablet resolver refreshed! new actor is[32:174:2191] Leader for TabletID 72057594037927937 is [32:174:2191] sender: [32:244:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:106:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:139:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:174:2057] recipient: [33:97:2132] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:177:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:178:2057] recipient: [33:176:2194] Leader for TabletID 72057594037927937 is [33:179:2195] sender: [33:180:2057] recipient: [33:176:2194] !Reboot 72057594037927937 (actor [33:105:2137]) rebooted! !Reboot 72057594037927937 (actor [33:105:2137]) tablet resolver refreshed! new actor is[33:179:2195] Leader for TabletID 72057594037927937 is [33:179:2195] sender: [33:249:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:106:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:139:2057] recipient: [34:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 18388, MsgBus: 1533 2025-04-07T13:51:18.980909Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568473020167425:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:18.980979Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b1b/r3tmp/tmpyHbD89/pdisk_1.dat 2025-04-07T13:51:19.509239Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:19.515124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:19.515220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:19.519086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18388, node 1 2025-04-07T13:51:19.631160Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:19.631182Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:19.631190Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:19.631262Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1533 TClient is connected to server localhost:1533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:20.396778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.409705Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:20.423453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.587653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.781250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:20.846974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:22.772400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568490200038111:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.772514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:22.959228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.029285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.059891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.094348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.131995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.214544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:23.306665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568494495005917:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.306731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.306970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568494495005922:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:23.311921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:23.325107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568494495005924:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:23.990941Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568473020167425:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:23.990983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:24.355301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 18899, MsgBus: 4415 2025-04-07T13:51:27.665088Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568511878111837:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:27.665179Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b1b/r3tmp/tmpFzJunR/pdisk_1.dat 2025-04-07T13:51:27.753662Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:27.769721Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:27.769797Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:27.771159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18899, node 2 2025-04-07T13:51:27.825677Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:27.825699Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:27.825707Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:27.825810Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4415 TClient is connected to server localhost:4415 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:28.282471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:28.289840Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:28.306788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:28.383305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:28.529061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:28.591316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:30.902801Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568524763015403:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:30.902909Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:30.946919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:30.990409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.034666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.067432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.097797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.139380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.180461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568529057983192:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:31.180553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:31.181743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568529057983197:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:31.185771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:31.205067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568529057983199:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:32.442588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:32.667910Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568511878111837:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:32.668232Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:42.736601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:51:42.736632Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:42.978044Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2025-04-07T13:51:42.978084Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-07T13:51:42.978104Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-07T13:51:42.992949Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2025-04-07T13:51:43.001046Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2025-04-07T13:51:43.001084Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-07T13:51:43.009340Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-04-07T13:51:43.708893Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490568580597595659:2827], TxId: 281474976715731, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWFjYzllNDktM2Y3ODU4NzQtOTY3MTYyMDEtNzFlMjQyNmE=. CustomerSuppliedId : . TraceId : 01jr88awk46gyby0ngw9c1wnr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T13:51:43.709336Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490568580597595660:2828], TxId: 281474976715731, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZWFjYzllNDktM2Y3ODU4NzQtOTY3MTYyMDEtNzFlMjQyNmE=. TraceId : 01jr88awk46gyby0ngw9c1wnr4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490568580597595656:2460], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:51:43.711590Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWFjYzllNDktM2Y3ODU4NzQtOTY3MTYyMDEtNzFlMjQyNmE=, ActorId: [2:7490568533352950810:2460], ActorState: ExecuteState, TraceId: 01jr88awk46gyby0ngw9c1wnr4, Create QueryResponse for error on request, msg: 2025-04-07T13:51:47.763327Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-04-07T13:51:47.763543Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex [GOOD] >> KqpIndexes::DeleteByIndex >> TPQCDTest::TestUnavailableWithoutClustersList >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TPQCDTest::TestDiscoverClusters >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:144:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:144:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:150:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:152:2173] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:156:2057] recipient: [8:152:2173] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:155:2174] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:225:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:150:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:152:2173] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:156:2057] recipient: [9:152:2173] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:155:2174] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:225:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:151:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:153:2173] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:157:2057] recipient: [10:153:2173] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:156:2174] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:226:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:153:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:156:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:155:2175] Leader for TabletID 72057594037927937 is [11:158:2176] sender: [11:159:2057] recipient: [11:155:2175] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:158:2176] Leader for TabletID 72057594037927937 is [11:158:2176] sender: [11:228:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:147:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:149:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:151:2057] recipient: [23:150:2171] Leader for TabletID 72057594037927937 is [23:152:2172] sender: [23:153:2057] recipient: [23:150:2171] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:152:2172] Leader for TabletID 72057594037927937 is [23:152:2172] sender: [23:222:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:148:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:151:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:152:2057] recipient: [24:150:2171] Leader for TabletID 72057594037927937 is [24:153:2172] sender: [24:154:2057] recipient: [24:150:2171] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:153:2172] Leader for TabletID 72057594037927937 is [24:153:2172] sender: [24:201:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:150:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:153:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:154:2057] recipient: [25:152:2173] Leader for TabletID 72057594037927937 is [25:155:2174] sender: [25:156:2057] recipient: [25:152:2173] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:155:2174] Leader for TabletID 72057594037927937 is [25:155:2174] sender: [25:226:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:150:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:153:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:154:2057] recipient: [26:152:2173] Leader for TabletID 72057594037927937 is [26:155:2174] sender: [26:156:2057] recipient: [26:152:2173] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:155:2174] Leader for TabletID 72057594037927937 is [26:155:2174] sender: [26:225:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:151:2057] recipient: [27:97:2132] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:154:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:155:2057] recipient: [27:153:2173] Leader for TabletID 72057594037927937 is [27:156:2174] sender: [27:157:2057] recipient: [27:153:2173] !Reboot 72057594037927937 (actor [27:105:2137]) rebooted! !Reboot 72057594037927937 (actor [27:105:2137]) tablet resolver refreshed! new actor is[27:156:2174] Leader for TabletID 72057594037927937 is [27:156:2174] sender: [27:204:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:106:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:139:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:153:2057] recipient: [28:97:2132] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:156:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:157:2057] recipient: [28:155:2175] Leader for TabletID 72057594037927937 is [28:158:2176] sender: [28:159:2057] recipient: [28:155:2175] !Reboot 72057594037927937 (actor [28:105:2137]) rebooted! !Reboot 72057594037927937 (actor [28:105:2137]) tablet resolver refreshed! new actor is[28:158:2176] Leader for TabletID 72057594037927937 is [28:158:2176] sender: [28:228:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:106:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:139:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:153:2057] recipient: [29:97:2132] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:156:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:157:2057] recipient: [29:155:2175] Leader for TabletID 72057594037927937 is [29:158:2176] sender: [29:159:2057] recipient: [29:155:2175] !Reboot 72057594037927937 (actor [29:105:2137]) rebooted! !Reboot 72057594037927937 (actor [29:105:2137]) tablet resolver refreshed! new actor is[29:158:2176] Leader for TabletID 72057594037927937 is [29:158:2176] sender: [29:228:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:154:2057] recipient: [30:97:2132] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:157:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:158:2057] recipient: [30:156:2175] Leader for TabletID 72057594037927937 is [30:159:2176] sender: [30:160:2057] recipient: [30:156:2175] !Reboot 72057594037927937 (actor [30:105:2137]) rebooted! !Reboot 72057594037927937 (actor [30:105:2137]) tablet resolver refreshed! new actor is[30:159:2176] Leader for TabletID 72057594037927937 is [30:159:2176] sender: [30:229:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:106:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:139:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:159:2057] recipient: [31:97:2132] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:162:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:163:2057] recipient: [31:161:2180] Leader for TabletID 72057594037927937 is [31:164:2181] sender: [31:165:2057] recipient: [31:161:2180] !Reboot 72057594037927937 (actor [31:105:2137]) rebooted! !Reboot 72057594037927937 (actor [31:105:2137]) tablet resolver refreshed! new actor is[31:164:2181] Leader for TabletID 72057594037927937 is [31:164:2181] sender: [31:234:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:106:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:139:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:159:2057] recipient: [32:97:2132] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:162:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:163:2057] recipient: [32:161:2180] Leader for TabletID 72057594037927937 is [32:164:2181] sender: [32:165:2057] recipient: [32:161:2180] !Reboot 72057594037927937 (actor [32:105:2137]) rebooted! !Reboot 72057594037927937 (actor [32:105:2137]) tablet resolver refreshed! new actor is[32:164:2181] Leader for TabletID 72057594037927937 is [32:164:2181] sender: [32:234:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:106:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:139:2057] recipient: [33:14:2061] >> KqpIndexes::UpdateOnReadColumns [GOOD] >> TPQCDTest::TestUnavailableWithoutBoth >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldNotErase >> KqpIndexes::SecondaryIndexInsert1 [GOOD] >> TPQCDTest::TestPrioritizeLocalDatacenter >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds >> KqpIndexes::SecondaryIndexOrderBy2 [GOOD] >> KqpIndexes::SecondaryIndexReplace >> EraseRowsTests::EraseRowsShouldSuccess >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |85.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateOnReadColumns [GOOD] Test command err: Trying to start YDB, gRPC: 32024, MsgBus: 22153 2025-04-07T13:50:52.220202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568360024377836:2170];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:52.220570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b7a/r3tmp/tmpncLIYm/pdisk_1.dat 2025-04-07T13:50:52.727434Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:52.765813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:52.765917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:52.767204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32024, node 1 2025-04-07T13:50:53.001375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:53.001393Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:53.001400Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:53.001491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22153 TClient is connected to server localhost:22153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:53.700138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:50:53.741002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:53.916843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:54.101194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:54.195991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:55.998396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568372909281308:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:55.998497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:56.246767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:50:56.329525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:50:56.373791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:50:56.411374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:50:56.451852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:50:56.544932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:50:56.667962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568377204249108:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:56.668028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:56.668085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568377204249113:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:56.677241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:50:56.696364Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:50:56.696622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568377204249115:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:50:57.220868Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568360024377836:2170];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:57.230740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:57.715676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2756, MsgBus: 18081 2025-04-07T13:51:00.283535Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568395398430546:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:00.283591Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b7a/r3tmp/tmpoLRlNE/pdisk_1.dat 2025-04-07T13:51:00.394416Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:00.408508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:00.408579Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:00.410164Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2756, node 2 2025-04-07T13:51:00.454574Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:00.454600Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:00.454609Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:00.454720Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18081 TClient is connected to server localhost:18081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:00.945759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:00.953613Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:00.963851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:01.046078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:01.227561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:01.327801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... 046644480 2025-04-07T13:51:31.939974Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:31.993665Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.054782Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.120249Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:32.222548Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490568530468717128:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.222687Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.223002Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490568530468717133:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:32.231768Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:32.247464Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T13:51:32.248076Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490568530468717135:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:32.508488Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490568508993878448:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:32.508568Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:34.076572Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:34.193095Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:51:34.305812Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24213, MsgBus: 5131 2025-04-07T13:51:40.494425Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490568564831694223:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:40.494491Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b7a/r3tmp/tmpkQupcH/pdisk_1.dat 2025-04-07T13:51:40.613671Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:40.641416Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:40.641524Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:40.646243Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24213, node 6 2025-04-07T13:51:40.743831Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:40.743862Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:40.743876Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:40.744005Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5131 TClient is connected to server localhost:5131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:41.527509Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:41.542969Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:41.559180Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:41.681764Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:41.921006Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:42.021924Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:45.090077Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490568586306532418:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:45.090178Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:45.147680Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.207037Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.263304Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.371312Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.464288Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.532595Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490568564831694223:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:45.533087Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:45.588145Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:45.747136Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490568586306532933:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:45.747287Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:45.748099Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490568586306532939:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:45.754464Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:45.802783Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490568586306532941:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:47.766266Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:47.855646Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:51:47.940753Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |85.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 25459, MsgBus: 17430 2025-04-07T13:51:36.699423Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568550330345540:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:36.699472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ab9/r3tmp/tmph6eYpc/pdisk_1.dat 2025-04-07T13:51:37.210035Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:37.214753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:37.214861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:37.219299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25459, node 1 2025-04-07T13:51:37.443676Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:37.443701Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:37.443706Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:37.443782Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17430 TClient is connected to server localhost:17430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:38.436171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:38.453163Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:38.463972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:38.628312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:38.916322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:39.017799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:40.885511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568567510216224:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.901097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.933390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.012592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.060124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.097701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.185921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.241113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.307896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568571805184024:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:41.307960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:41.308153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568571805184029:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:41.311703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:41.326781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568571805184031:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:41.700011Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568550330345540:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:41.700071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:42.304560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:43.210716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710675:1, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 25351, MsgBus: 8183 2025-04-07T13:51:44.493125Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568584553501502:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:44.559547Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ab9/r3tmp/tmpBLWLVQ/pdisk_1.dat 2025-04-07T13:51:44.689186Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:44.699615Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:44.705219Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:44.713502Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25351, node 2 2025-04-07T13:51:44.825151Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:44.825174Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:44.825185Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:44.825264Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8183 TClient is connected to server localhost:8183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:45.251178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:45.258464Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:45.274980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:45.325980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:45.482534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:45.555831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:48.011181Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568597438404935:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:48.011405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:48.047858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.085371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.168926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.220538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.256031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.313634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.401867Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568601733372730:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:48.401934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:48.402240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568601733372735:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:48.405943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:48.419320Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T13:51:48.420163Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568601733372737:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:49.433240Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568584553501502:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:49.433297Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:49.916309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexInsert1 [GOOD] Test command err: Trying to start YDB, gRPC: 21068, MsgBus: 17047 2025-04-07T13:51:31.280988Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568528715397428:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:31.286442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001af1/r3tmp/tmpfXEWOc/pdisk_1.dat 2025-04-07T13:51:31.696777Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21068, node 1 2025-04-07T13:51:31.727585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:31.727683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:31.729388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:31.790414Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:31.790437Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:31.790451Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:31.790578Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17047 TClient is connected to server localhost:17047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:32.405248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:32.423774Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:32.446261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:32.618107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:32.846813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:32.936995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:51:35.417585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568545895268307:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:35.437245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:35.467344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:35.514224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:35.546043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:35.601837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:35.659730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:35.747769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:35.835723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568545895268809:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:35.835809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:35.836166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568545895268814:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:35.840103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:35.853186Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:51:35.853465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568545895268816:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:36.262424Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568528715397428:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:36.262486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:37.164569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28639, MsgBus: 9636 2025-04-07T13:51:45.525161Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568587434517782:2176];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001af1/r3tmp/tmpRHLMH1/pdisk_1.dat 2025-04-07T13:51:45.633664Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:45.687805Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:45.687879Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:45.690307Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:45.701966Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28639, node 2 2025-04-07T13:51:45.792825Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:45.792848Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:45.792857Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:45.792989Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9636 TClient is connected to server localhost:9636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:46.494814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:46.517086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:46.621724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:47.008736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:47.139740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:50.517094Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568587434517782:2176];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:50.517159Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:50.546872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568608909355807:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:50.549486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:50.576864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:50.664917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:50.760849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:50.814490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:50.878590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:51.001604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:51.086235Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568613204323614:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:51.086307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:51.086506Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568613204323619:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:51.089761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:51.128810Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568613204323621:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:53.131441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:53.777429Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> DistributedEraseTests::ConditionalEraseRowsShouldErase >> KqpIndexes::IndexFilterPushDown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:144:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:144:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:150:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:150:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:224:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:150:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:152:2173] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:156:2057] recipient: [8:152:2173] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:155:2174] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:225:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:150:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:152:2173] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:156:2057] recipient: [9:152:2173] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:155:2174] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:225:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:151:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:153:2173] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:157:2057] recipient: [10:153:2173] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:156:2174] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:226:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:153:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:156:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:155:2175] Leader for TabletID 72057594037927937 is [11:158:2176] sender: [11:159:2057] recipient: [11:155:2175] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:158:2176] Leader for TabletID 72057594037927937 is [11:158:2176] sender: [11:228:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:147:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:149:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:151:2057] recipient: [23:150:2171] Leader for TabletID 72057594037927937 is [23:152:2172] sender: [23:153:2057] recipient: [23:150:2171] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:152:2172] Leader for TabletID 72057594037927937 is [23:152:2172] sender: [23:222:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:148:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:151:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:152:2057] recipient: [24:150:2171] Leader for TabletID 72057594037927937 is [24:153:2172] sender: [24:154:2057] recipient: [24:150:2171] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:153:2172] Leader for TabletID 72057594037927937 is [24:153:2172] sender: [24:201:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:150:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:153:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:154:2057] recipient: [25:152:2173] Leader for TabletID 72057594037927937 is [25:155:2174] sender: [25:156:2057] recipient: [25:152:2173] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:155:2174] Leader for TabletID 72057594037927937 is [25:155:2174] sender: [25:225:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:150:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:153:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:154:2057] recipient: [26:152:2173] Leader for TabletID 72057594037927937 is [26:155:2174] sender: [26:156:2057] recipient: [26:152:2173] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:155:2174] Leader for TabletID 72057594037927937 is [26:155:2174] sender: [26:225:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:151:2057] recipient: [27:97:2132] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:154:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:155:2057] recipient: [27:153:2173] Leader for TabletID 72057594037927937 is [27:156:2174] sender: [27:157:2057] recipient: [27:153:2173] !Reboot 72057594037927937 (actor [27:105:2137]) rebooted! !Reboot 72057594037927937 (actor [27:105:2137]) tablet resolver refreshed! new actor is[27:156:2174] Leader for TabletID 72057594037927937 is [27:156:2174] sender: [27:204:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:106:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:139:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:153:2057] recipient: [28:97:2132] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:156:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:157:2057] recipient: [28:155:2175] Leader for TabletID 72057594037927937 is [28:158:2176] sender: [28:159:2057] recipient: [28:155:2175] !Reboot 72057594037927937 (actor [28:105:2137]) rebooted! !Reboot 72057594037927937 (actor [28:105:2137]) tablet resolver refreshed! new actor is[28:158:2176] Leader for TabletID 72057594037927937 is [28:158:2176] sender: [28:228:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:106:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:139:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:153:2057] recipient: [29:97:2132] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:156:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:157:2057] recipient: [29:155:2175] Leader for TabletID 72057594037927937 is [29:158:2176] sender: [29:159:2057] recipient: [29:155:2175] !Reboot 72057594037927937 (actor [29:105:2137]) rebooted! !Reboot 72057594037927937 (actor [29:105:2137]) tablet resolver refreshed! new actor is[29:158:2176] Leader for TabletID 72057594037927937 is [29:158:2176] sender: [29:228:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:154:2057] recipient: [30:97:2132] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:157:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:158:2057] recipient: [30:156:2175] Leader for TabletID 72057594037927937 is [30:159:2176] sender: [30:160:2057] recipient: [30:156:2175] !Reboot 72057594037927937 (actor [30:105:2137]) rebooted! !Reboot 72057594037927937 (actor [30:105:2137]) tablet resolver refreshed! new actor is[30:159:2176] Leader for TabletID 72057594037927937 is [30:159:2176] sender: [30:229:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:106:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:139:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:159:2057] recipient: [31:97:2132] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:162:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:163:2057] recipient: [31:161:2180] Leader for TabletID 72057594037927937 is [31:164:2181] sender: [31:165:2057] recipient: [31:161:2180] !Reboot 72057594037927937 (actor [31:105:2137]) rebooted! !Reboot 72057594037927937 (actor [31:105:2137]) tablet resolver refreshed! new actor is[31:164:2181] Leader for TabletID 72057594037927937 is [31:164:2181] sender: [31:234:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:106:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:139:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:159:2057] recipient: [32:97:2132] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:162:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:163:2057] recipient: [32:161:2180] Leader for TabletID 72057594037927937 is [32:164:2181] sender: [32:165:2057] recipient: [32:161:2180] !Reboot 72057594037927937 (actor [32:105:2137]) rebooted! !Reboot 72057594037927937 (actor [32:105:2137]) tablet resolver refreshed! new actor is[32:164:2181] Leader for TabletID 72057594037927937 is [32:164:2181] sender: [32:235:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:106:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:139:2057] recipient: [33:14:2061] >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] >> YdbProxy::CopyTable [GOOD] >> YdbProxy::CopyTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] Test command err: Trying to start YDB, gRPC: 25116, MsgBus: 61407 2025-04-07T13:51:35.952270Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568544286797981:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:35.952314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ab7/r3tmp/tmpL6k6Vx/pdisk_1.dat 2025-04-07T13:51:36.811461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:36.815802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:36.815896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:36.821659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25116, node 1 2025-04-07T13:51:37.045243Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:37.045280Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:37.045288Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:37.045383Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61407 TClient is connected to server localhost:61407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:38.110062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:38.142868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:38.445903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:38.687369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:38.804086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:40.818423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568565761636187:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.818528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.975446Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568544286797981:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:40.975512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:41.209672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.258864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.299692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.352088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.406912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.507701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.609901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568570056603986:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:41.610004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:41.610382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568570056603991:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:41.614906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:41.633315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568570056603993:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:42.943811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:43.115001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:51:43.196549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:51:43.536392Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:44.431987Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 18101, MsgBus: 23557 2025-04-07T13:51:45.287681Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568588035676085:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ab7/r3tmp/tmp3YHTLt/pdisk_1.dat 2025-04-07T13:51:45.354207Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:45.465745Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:45.519644Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:45.519733Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:45.525982Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18101, node 2 2025-04-07T13:51:45.702476Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:45.702497Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:45.702511Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:45.702617Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23557 TClient is connected to server localhost:23557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:51:46.449995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.485348Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:46.503694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:46.642346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:46.959455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:47.064921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:49.321806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568605215546817:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:49.321894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:49.362943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:49.408065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:49.481830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:49.524284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:49.603134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:49.702196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:49.787120Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568605215547322:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:49.787203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:49.787575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568605215547327:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:49.791564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:49.819316Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568605215547329:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:50.321175Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568588035676085:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:50.321329Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:51.036118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:53.063929Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:53.079285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:51:53.169803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:51:53.543389Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpMultishardIndex::DuplicateUpsert-StreamLookup [GOOD] |85.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |85.7%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> KqpUniqueIndex::UpdateFkSameValue [GOOD] >> KqpUniqueIndex::UpdateFkPkOverlap >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::IndexFilterPushDown [GOOD] Test command err: Trying to start YDB, gRPC: 23317, MsgBus: 26387 2025-04-07T13:51:35.761092Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568546092418815:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:35.763047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ac5/r3tmp/tmpGx3zaN/pdisk_1.dat 2025-04-07T13:51:36.377027Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:36.413113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:36.413195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:36.417810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23317, node 1 2025-04-07T13:51:36.658974Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:36.658999Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:36.659013Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:36.659096Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26387 TClient is connected to server localhost:26387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:37.490967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.511249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.692876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.899244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.997632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:40.472753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568567567256866:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.487880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.569419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.655335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.696798Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568546092418815:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:40.696875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:40.719720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.794622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.848431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.906673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:41.017314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568571862224670:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:41.017398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:41.017730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568571862224675:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:41.025195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:41.044241Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:51:41.044527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568571862224677:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:42.375552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:42.464894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:51:42.570618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20804, MsgBus: 4078 2025-04-07T13:51:46.368933Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568591915268914:2193];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ac5/r3tmp/tmpn1WutS/pdisk_1.dat 2025-04-07T13:51:46.478477Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:46.656174Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:46.711318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:46.711432Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:46.713526Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20804, node 2 2025-04-07T13:51:46.893407Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:46.893434Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:46.893443Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:46.893544Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4078 TClient is connected to server localhost:4078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:47.713439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:47.726837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:47.803267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:48.007180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:48.118347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:50.399340Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568609095139636:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:50.399499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:50.511798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:50.554995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:50.596537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:50.672570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:50.720213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:50.822535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:50.916323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568609095140142:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:50.916456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:50.917041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568609095140147:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:50.923820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:50.944722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568609095140149:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:51.352485Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568591915268914:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:51.352558Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:52.762589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:52.854943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:51:52.988020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] >> KqpMultishardIndex::DataColumnSelect+StreamLookup [GOOD] >> KqpMultishardIndex::CheckPushTopSort >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds |85.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |85.7%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] Test command err: 2025-04-07T13:51:52.897338Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568617894218272:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001822/r3tmp/tmpcVVlHb/pdisk_1.dat 2025-04-07T13:51:53.290331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:53.686224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:53.686305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:53.697691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:53.702548Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10606, node 1 2025-04-07T13:51:53.969254Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001822/r3tmp/yandexISRJUQ.tmp 2025-04-07T13:51:53.969300Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001822/r3tmp/yandexISRJUQ.tmp 2025-04-07T13:51:53.969425Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001822/r3tmp/yandexISRJUQ.tmp 2025-04-07T13:51:53.969495Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:51:54.077174Z node 1 :HTTP WARN: [::1]:40974 anonymous GET /actors/pqcd/health 2025-04-07T13:51:54.189489Z node 1 :HTTP WARN: [::1]:40984 anonymous GET /actors/pqcd/health 2025-04-07T13:51:54.304772Z node 1 :HTTP WARN: [::1]:40992 anonymous GET /actors/pqcd/health 2025-04-07T13:51:54.417123Z node 1 :HTTP WARN: [::1]:40998 anonymous GET /actors/pqcd/health 2025-04-07T13:51:54.525176Z node 1 :HTTP WARN: [::1]:41012 anonymous GET /actors/pqcd/health 2025-04-07T13:51:54.632771Z node 1 :HTTP WARN: [::1]:41018 anonymous GET /actors/pqcd/health 2025-04-07T13:51:54.735325Z node 1 :HTTP WARN: [::1]:41034 anonymous GET /actors/pqcd/health 2025-04-07T13:51:54.846031Z node 1 :HTTP WARN: [::1]:41036 anonymous GET /actors/pqcd/health 2025-04-07T13:51:54.950202Z node 1 :HTTP WARN: [::1]:41046 anonymous GET /actors/pqcd/health 2025-04-07T13:51:55.058718Z node 1 :HTTP WARN: [::1]:41054 anonymous GET /actors/pqcd/health 2025-04-07T13:51:55.165098Z node 1 :HTTP WARN: [::1]:41066 anonymous GET /actors/pqcd/health 2025-04-07T13:51:55.279563Z node 1 :HTTP WARN: [::1]:41074 anonymous GET /actors/pqcd/health 2025-04-07T13:51:55.382643Z node 1 :HTTP WARN: [::1]:41078 anonymous GET /actors/pqcd/health 2025-04-07T13:51:55.490667Z node 1 :HTTP WARN: [::1]:41090 anonymous GET /actors/pqcd/health 2025-04-07T13:51:55.594380Z node 1 :HTTP WARN: [::1]:41106 anonymous GET /actors/pqcd/health 2025-04-07T13:51:55.701911Z node 1 :HTTP WARN: [::1]:41120 anonymous GET /actors/pqcd/health 2025-04-07T13:51:55.813180Z node 1 :HTTP WARN: [::1]:41130 anonymous GET /actors/pqcd/health 2025-04-07T13:51:55.921299Z node 1 :HTTP WARN: [::1]:41146 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.037707Z node 1 :HTTP WARN: [::1]:41152 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.160735Z node 1 :HTTP WARN: [::1]:41158 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.265923Z node 1 :HTTP WARN: [::1]:41166 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.368374Z node 1 :HTTP WARN: [::1]:41178 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.470616Z node 1 :HTTP WARN: [::1]:41188 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.577071Z node 1 :HTTP WARN: [::1]:41200 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.681739Z node 1 :HTTP WARN: [::1]:41210 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.785712Z node 1 :HTTP WARN: [::1]:41216 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.888398Z node 1 :HTTP WARN: [::1]:41230 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.994914Z node 1 :HTTP WARN: [::1]:38410 anonymous GET /actors/pqcd/health 2025-04-07T13:51:57.096769Z node 1 :HTTP WARN: [::1]:38422 anonymous GET /actors/pqcd/health 2025-04-07T13:51:57.201313Z node 1 :HTTP WARN: [::1]:38428 anonymous GET /actors/pqcd/health 2025-04-07T13:51:57.418506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568639369055292:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.418594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.418638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568639369055299:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.433976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-07T13:51:57.482550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568639369055321:2301], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } >> KqpIndexes::Uint8Index [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DuplicateUpsert-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 10791, MsgBus: 15511 2025-04-07T13:51:34.757166Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568539204645233:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:34.757405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001aca/r3tmp/tmpzlGp0z/pdisk_1.dat 2025-04-07T13:51:35.380207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:35.380315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:35.396595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:35.437030Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10791, node 1 2025-04-07T13:51:35.589166Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:35.589186Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:35.589192Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:35.589291Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15511 TClient is connected to server localhost:15511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:36.515563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:36.557216Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:36.571150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:36.939223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.247639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.358625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:39.462337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568560679483282:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:39.462462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:39.762044Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568539204645233:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:39.762153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:39.787928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.825824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.862998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.914301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.956323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.014509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.074903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568564974451077:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.074978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.075301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568564974451082:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.078747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:40.088937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568564974451084:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:41.229243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 18616, MsgBus: 22269 2025-04-07T13:51:47.809990Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568594920360679:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:47.900191Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001aca/r3tmp/tmpXvwU67/pdisk_1.dat 2025-04-07T13:51:48.036588Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:48.061868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:48.061974Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:48.063077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18616, node 2 2025-04-07T13:51:48.237143Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:48.237165Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:48.237171Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:48.237267Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22269 TClient is connected to server localhost:22269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:49.012285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:49.052121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:49.161877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:49.367566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:49.461890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:52.521250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568616395198843:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:52.521367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:52.582594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:52.637688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:52.699401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:52.759299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:52.810254Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568594920360679:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:52.810311Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:52.851835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:52.947444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:53.041620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568620690166642:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:53.041714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:53.042093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568620690166647:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:53.045790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:53.061238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568620690166649:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:55.001268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] Test command err: 2025-04-07T13:51:54.021053Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568620860437900:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:54.021255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001815/r3tmp/tmprp46bO/pdisk_1.dat 2025-04-07T13:51:54.881284Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:54.954999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:54.955119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:55.007276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17685, node 1 2025-04-07T13:51:55.289313Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:55.289335Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:55.289343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:55.289424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:51:55.402041Z node 1 :HTTP WARN: [::1]:56548 anonymous GET /actors/pqcd/health 2025-04-07T13:51:55.524190Z node 1 :HTTP WARN: [::1]:56550 anonymous GET /actors/pqcd/health 2025-04-07T13:51:55.630040Z node 1 :HTTP WARN: [::1]:56558 anonymous GET /actors/pqcd/health 2025-04-07T13:51:55.733998Z node 1 :HTTP WARN: [::1]:56562 anonymous GET /actors/pqcd/health 2025-04-07T13:51:55.841192Z node 1 :HTTP WARN: [::1]:56564 anonymous GET /actors/pqcd/health 2025-04-07T13:51:55.949732Z node 1 :HTTP WARN: [::1]:56576 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.069643Z node 1 :HTTP WARN: [::1]:56590 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.188934Z node 1 :HTTP WARN: [::1]:56604 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.309517Z node 1 :HTTP WARN: [::1]:56610 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.413392Z node 1 :HTTP WARN: [::1]:56624 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.516099Z node 1 :HTTP WARN: [::1]:56628 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.618541Z node 1 :HTTP WARN: [::1]:56630 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.725949Z node 1 :HTTP WARN: [::1]:56634 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.837049Z node 1 :HTTP WARN: [::1]:56650 anonymous GET /actors/pqcd/health 2025-04-07T13:51:56.949586Z node 1 :HTTP WARN: [::1]:40916 anonymous GET /actors/pqcd/health 2025-04-07T13:51:57.053937Z node 1 :HTTP WARN: [::1]:40918 anonymous GET /actors/pqcd/health 2025-04-07T13:51:57.158972Z node 1 :HTTP WARN: [::1]:40920 anonymous GET /actors/pqcd/health 2025-04-07T13:51:57.269161Z node 1 :HTTP WARN: [::1]:40934 anonymous GET /actors/pqcd/health 2025-04-07T13:51:57.379630Z node 1 :HTTP WARN: [::1]:40950 anonymous GET /actors/pqcd/health 2025-04-07T13:51:57.485158Z node 1 :HTTP WARN: [::1]:40958 anonymous GET /actors/pqcd/health 2025-04-07T13:51:57.592795Z node 1 :HTTP WARN: [::1]:40964 anonymous GET /actors/pqcd/health 2025-04-07T13:51:57.695011Z node 1 :HTTP WARN: [::1]:40978 anonymous GET /actors/pqcd/health 2025-04-07T13:51:57.801905Z node 1 :HTTP WARN: [::1]:40982 anonymous GET /actors/pqcd/health 2025-04-07T13:51:57.909837Z node 1 :HTTP WARN: [::1]:40992 anonymous GET /actors/pqcd/health 2025-04-07T13:51:58.013719Z node 1 :HTTP WARN: [::1]:41000 anonymous GET /actors/pqcd/health 2025-04-07T13:51:58.121577Z node 1 :HTTP WARN: [::1]:41006 anonymous GET /actors/pqcd/health 2025-04-07T13:51:58.225814Z node 1 :HTTP WARN: [::1]:41016 anonymous GET /actors/pqcd/health 2025-04-07T13:51:58.337128Z node 1 :HTTP WARN: [::1]:41022 anonymous GET /actors/pqcd/health 2025-04-07T13:51:58.444856Z node 1 :HTTP WARN: [::1]:41024 anonymous GET /actors/pqcd/health 2025-04-07T13:51:58.548915Z node 1 :HTTP WARN: [::1]:41034 anonymous GET /actors/pqcd/health 2025-04-07T13:51:59.029621Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568620860437900:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:59.029696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] Test command err: 2025-04-07T13:51:51.039177Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568610540112052:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:51.039348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001852/r3tmp/tmpqzfXyo/pdisk_1.dat 2025-04-07T13:51:52.032906Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:52.081484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:52.081577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:52.082465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:52.084282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17008, node 1 2025-04-07T13:51:52.389282Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001852/r3tmp/yandexPO4bVH.tmp 2025-04-07T13:51:52.389305Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001852/r3tmp/yandexPO4bVH.tmp 2025-04-07T13:51:52.389452Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001852/r3tmp/yandexPO4bVH.tmp 2025-04-07T13:51:52.389545Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2879 PQClient connected to localhost:17008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:53.326885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:53.398275Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:53.416866Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:53.442091Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T13:51:56.037314Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568610540112052:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:56.037366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:57.380587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568640604883687:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.380780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.390467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568640604883708:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.397250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T13:51:57.446701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568640604883727:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.464297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.470147Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-07T13:51:57.472931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568640604883710:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:51:58.084814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:58.098967Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568640604883775:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:51:58.100531Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmNlYWQ1YmYtZDllZTA1OTktYmYwZjBhMjEtMTNlYTUwMDY=, ActorId: [1:7490568640604883668:2304], ActorState: ExecuteState, TraceId: 01jr88back88xjxe6saek5pzhm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:51:58.136802Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:51:58.435138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:58.610601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:51:59.314434Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr88bbz80kctqt6wr5dmfzkj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmUxMTc1OTAtNjIzMTkxNGYtZGRlMzRiMTgtNGJmOGFmNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpIndexes::DeleteByIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::Uint8Index [GOOD] Test command err: Trying to start YDB, gRPC: 13880, MsgBus: 5959 2025-04-07T13:51:39.878712Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568563455648770:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:39.878761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001aa4/r3tmp/tmpY4wLdG/pdisk_1.dat 2025-04-07T13:51:40.380923Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:40.406850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:40.406987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:40.408615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13880, node 1 2025-04-07T13:51:40.473351Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:40.473382Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:40.473398Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:40.473530Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5959 TClient is connected to server localhost:5959 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:41.203872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:41.226954Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:41.242954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:41.438226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:41.673001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:41.770442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:43.525608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568580635519632:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:43.525722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:44.236287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:44.321521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:44.445449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:44.542982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:44.636036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:44.735777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:44.844835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568584930487447:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:44.844939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:44.848384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568584930487452:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:44.858728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:44.880782Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568563455648770:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:44.884875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:44.912452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568584930487454:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:46.147899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.219686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.315305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16701, MsgBus: 10563 2025-04-07T13:51:51.392435Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568611103657971:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001aa4/r3tmp/tmpVNtOr8/pdisk_1.dat 2025-04-07T13:51:51.520123Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:51.793279Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:51.823388Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:51.823482Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:51.838409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16701, node 2 2025-04-07T13:51:52.112107Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:52.112136Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:52.112146Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:52.112262Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10563 TClient is connected to server localhost:10563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:53.530254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:53.543895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:53.634060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:53.828561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:53.956047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:56.361432Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568611103657971:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:56.361489Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:57.460714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568636873463291:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.460824Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.515031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:57.554297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:57.601555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:57.658615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:57.749234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:57.809829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:57.877091Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568636873463789:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.877182Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.877438Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568636873463794:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.881610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:57.895272Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:51:57.895412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568636873463796:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:59.116105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:59.347493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:51:59.450714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:51:59.540960Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> EraseRowsTests::ConditionalEraseRowsShouldNotErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds >> TPersQueueTest::TopicServiceSimpleHappyWrites [GOOD] >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DeleteByIndex [GOOD] Test command err: Trying to start YDB, gRPC: 10062, MsgBus: 12516 2025-04-07T13:51:42.309256Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568574262551949:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:42.309545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001aa2/r3tmp/tmprbK5O7/pdisk_1.dat 2025-04-07T13:51:42.745270Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10062, node 1 2025-04-07T13:51:42.767114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:42.767208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:42.768696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:42.836453Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:42.836472Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:42.836477Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:42.836545Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12516 TClient is connected to server localhost:12516 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:43.664402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:43.719832Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:43.743645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:43.950085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:44.117859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:44.203971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:45.828076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568587147455390:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:45.828180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:46.133752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.186480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.233825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.271412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.307784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.341300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:46.416466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568591442423184:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:46.416544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:46.416967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568591442423189:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:46.421314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:46.436632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568591442423191:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:47.366391Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568574262551949:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:47.366467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:47.750349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 609 cpu_time_us: 609 } query_phases { duration_us: 1532 cpu_time_us: 1532 } query_phases { duration_us: 26976 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 5826 affected_shards: 1 } query_phases { duration_us: 1519 cpu_time_us: 1519 } query_phases { duration_us: 5801 cpu_time_us: 5801 } query_phases { duration_us: 4536 table_access { name: "/Root/TestTable/Index/indexImplTable" } cpu_time_us: 3547 } query_phases { duration_us: 1690 cpu_time_us: 1690 } query_phases { duration_us: 5489 cpu_time_us: 5489 } query_phases { duration_us: 3860 cpu_time_us: 5191 } query_phases { duration_us: 36809 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 3550 affected_shards: 2 } compilation { duration_us: 1367030 cpu_time_us: 1317517 } process_cpu_time_us: 29597 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":46,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":45,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_7_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1744033909279,\"TaskId\":1,\"Host\":\"ghrun-lqtfcktism\",\"ComputeTimeUs\":70}],\"CpuTimeUs\":664}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"BaseTimeMs\":1744033909278,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":664,\"Max\":664,\"Min\":664}},\"CTE Name\":\"precompute_7_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":44,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":43,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1744033909278,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_8_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":42,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":41,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1744033909278,\"TaskId\":2,\"Host\":\"ghrun-lqtfcktism\",\"ComputeTimeUs\":113}],\"CpuTimeUs\":492}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"BaseTimeMs\":1744033909278,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":492,\"Max\":492,\"Min\":492}},\"CTE Name\":\"precompute_8_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":35,\"Subplan Name\":\"CTE precompute_8_1\",\"Plans\":[{\"PlanNodeId\":34,\"Plans\":[{\"PlanNodeId\":33,\"Plans\":[{\"PlanNodeId\":32,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1},{\"InternalOperatorId\":1},{\"InternalOperatorId\":1},{\"InternalOperatorId\":1}],\"Iterator\":\"Map\",\"Name\":\"Iterator\"},{\"Inputs\":[],\"E-Rows\":\"1\",\"Predicate\":\"Nth\",\"Name\":\"Filter\",\"E-Size\":\"5\",\"E-Cost\":\"0\"}],\"Node Type\":\"ConstantExpr-Filter\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1744033909239,\"StartTimeMs\":1744033909238,\"TaskId\":2,\"Host\":\"ghrun-lqtfcktism\",\"ComputeTimeUs\":12,\"OutputChannels\":[{\"ChannelId\":2,\"DstStageId\":3}]}],\"PeakMemoryUsageBytes\":65536,\"DurationUs\":1000,\"CpuTimeUs\":938}],\"UseLlvm\":\"undefined\",\"DurationUs\":{\"Cou ... \'(\'(\'\"_logical_id\" \'5233) \'(\'\"_id\" \'\"77cf71aa-cc9ae500-65f38a65-81ea8846\"))))\n(let $91 (DqCnValue (TDqOutput $89 \'0)))\n(let $92 (DqCnValue (TDqOutput $90 \'0)))\n(let $93 (KqpTxResultBinding $11 \'\"6\" \'0))\n(let $94 \'($87 $93))\n(let $95 (KqpPhysicalTx \'($89 $90) \'($91 $92) \'($47 $94) $3))\n(let $96 \'\"%kqp%tx_result_binding_7_0\")\n(let $97 (DictType $19 (TupleType $40 $52)))\n(let $98 %kqp%tx_result_binding_7_0)\n(let $99 (DqPhyStage \'() (lambda \'() (Iterator (FlatMap (Map $88 (lambda \'($178) (AsStruct \'(\'\"Key\" (Member $178 \'\"Key\")) \'(\'\"fk1\" (Member $178 \'\"fk1\")) \'(\'\"fk3\" (Member $178 \'\"fk3\"))))) (lambda \'($179) (block \'(\n (let $180 \'(\'\"Key\" (Member $179 \'\"Key\")))\n (let $181 \'(\'\"fk1\" (Member $179 \'\"fk1\")))\n (let $182 \'(\'\"fk3\" (Member $179 \'\"fk3\")))\n (return (IfPresent (Lookup $98 (AsStruct $180)) (lambda \'($183) (If (Nth $183 \'1) (Just (AsStruct $180 $181 \'(\'\"fk2\" (Member (Nth $183 \'0) \'\"fk2\")) $182)) (Nothing (OptionalType $27)))) (Just (AsStruct $180 $181 $43 $182))))\n)))))) \'(\'(\'\"_logical_id\" \'5855) \'(\'\"_id\" \'\"abc37568-7ad9b9de-858701d1-c1149acd\"))))\n(let $100 (DqPhyStage \'() (lambda \'() (Iterator (Map (Filter (DictItems $98) (lambda \'($184) (Nth (Nth $184 \'1) \'1))) (lambda \'($185) (block \'(\n (let $186 (Nth (Nth $185 \'1) \'0))\n (return (AsStruct \'(\'\"Key\" (Member (Nth $185 \'0) \'\"Key\")) \'(\'\"fk1\" (Member $186 \'\"fk1\")) \'(\'\"fk2\" (Member $186 \'\"fk2\")) \'(\'\"fk3\" (Member $186 \'\"fk3\"))))\n)))))) \'(\'(\'\"_logical_id\" \'5869) \'(\'\"_id\" \'\"f0e7ec66-1197c66f-3329c587-e5c543b5\"))))\n(let $101 (DqCnUnionAll (TDqOutput $99 \'0)))\n(let $102 (lambda \'($187) $187))\n(let $103 (DqPhyStage \'($101) $102 \'(\'(\'\"_logical_id\" \'5965) \'(\'\"_id\" \'\"9ecb56e6-17f94a85-959efcfb-57ea31e3\"))))\n(let $104 (DqCnUnionAll (TDqOutput $100 \'0)))\n(let $105 (DqPhyStage \'($104) $102 \'(\'(\'\"_logical_id\" \'5980) \'(\'\"_id\" \'\"70ce975a-14ceed0f-e8a1edfe-3ea1538e\"))))\n(let $106 \'($99 $100 $103 $105))\n(let $107 (DqCnResult (TDqOutput $103 \'0) \'()))\n(let $108 (DqCnResult (TDqOutput $105 \'0) \'()))\n(let $109 (KqpTxResultBinding $97 \'\"7\" \'0))\n(let $110 (KqpPhysicalTx $106 \'($107 $108) \'($94 \'($96 $109)) $3))\n(let $111 \'\"%kqp%tx_result_binding_7_1\")\n(let $112 (DqPhyStage \'() (lambda \'() (block \'(\n (let $188 \'(\'\"Key\" \'\"Value\" \'\"fk1\" \'\"fk3\"))\n (return (KqpEffects (KqpUpsertRows $22 (Iterator %kqp%tx_result_binding_7_1) $188 \'(\'(\'\"Mode\" \'\"upsert\")))))\n))) \'(\'(\'\"_logical_id\" \'6495) \'(\'\"_id\" \'\"1bbf360f-3a6c288-9184738f-43d01e20\"))))\n(let $113 \'\"%kqp%tx_result_binding_8_1\")\n(let $114 (ListType $27))\n(let $115 (DqPhyStage \'() (lambda \'() (KqpEffects (KqpDeleteRows $64 (Iterator %kqp%tx_result_binding_8_1)))) \'(\'(\'\"_logical_id\" \'6509) \'(\'\"_id\" \'\"a264771-5ee1a56-1c279524-cbeedd1f\"))))\n(let $116 \'\"%kqp%tx_result_binding_8_0\")\n(let $117 (DqPhyStage \'() (lambda \'() (block \'(\n (let $189 \'(\'\"fk1\" \'\"fk2\" \'\"fk3\" \'\"Key\"))\n (return (KqpEffects (KqpUpsertRows $64 (Iterator %kqp%tx_result_binding_8_0) $189 \'())))\n))) \'(\'(\'\"_logical_id\" \'6523) \'(\'\"_id\" \'\"a6a57d2-3f1f676f-9c25c403-3798609\"))))\n(let $118 \'($112 $115 $117))\n(let $119 (KqpTxResultBinding $11 \'\"7\" \'1))\n(let $120 (KqpTxResultBinding $114 \'\"8\" \'0))\n(let $121 (KqpTxResultBinding $114 \'\"8\" \'1))\n(let $122 \'(\'($111 $119) \'($116 $120) \'($113 $121)))\n(let $123 (KqpPhysicalTx $118 \'() $122 \'($36 \'(\'\"with_effects\"))))\n(let $124 \'($4 $17 $38 $48 $61 $76 $86 $95 $110 $123))\n(return (KqpPhysicalQuery $124 \'() \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 1530076 total_cpu_time_us: 1381868 2025-04-07T13:51:49.577480Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:50.980896Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 19838, MsgBus: 13564 2025-04-07T13:51:52.534375Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568615451257582:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001aa2/r3tmp/tmpYtXHBN/pdisk_1.dat 2025-04-07T13:51:52.763059Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:52.928155Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:52.973634Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:52.973714Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:52.977966Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19838, node 2 2025-04-07T13:51:53.093287Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:53.093308Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:53.093316Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:53.093464Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13564 TClient is connected to server localhost:13564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:53.737787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:53.746591Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:53.755384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:53.853210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:54.017161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:54.120871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:56.254094Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568632631128315:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:56.254192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:56.301440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:56.363649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:56.407771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:56.450200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:56.494574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:56.574920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:56.631249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568632631128811:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:56.631339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:56.631537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568632631128816:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:56.634681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:56.651269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568632631128818:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:57.510970Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568615451257582:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:57.511031Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:58.052877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2025-04-07T13:51:54.080250Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568625244027086:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:54.093011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017dd/r3tmp/tmp0rqglX/pdisk_1.dat 2025-04-07T13:51:54.849798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:54.849900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:54.859791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26999, node 1 2025-04-07T13:51:55.112500Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:55.179990Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0017dd/r3tmp/yandex3L2zFt.tmp 2025-04-07T13:51:55.180010Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0017dd/r3tmp/yandex3L2zFt.tmp 2025-04-07T13:51:55.180144Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0017dd/r3tmp/yandex3L2zFt.tmp 2025-04-07T13:51:55.180215Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64673 PQClient connected to localhost:26999 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:55.860242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:51:55.924600Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:59.092958Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568625244027086:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:59.093075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:00.796073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568651013831406:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:00.796231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:00.796720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568651013831434:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:00.800370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T13:52:00.831783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568651013831436:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:52:01.323496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:52:01.329774Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568651013831509:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:01.341603Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmNlZGFiNDctYzk0Nzc2ZWQtMmVjMGM5M2ItNjc1MjY4NTc=, ActorId: [1:7490568651013831404:2307], ActorState: ExecuteState, TraceId: 01jr88bdr6321qrg1m6c85km80, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:01.343965Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:01.882622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:02.166005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:52:02.630745Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr88bf7j7be65k0pfwjdvcvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTY3YmQxZDQtNzlmMTY2OC1hZWQwMDk4My1hZTczNzdmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:02.688793Z node 1 :HTTP WARN: [::1]:53214 anonymous GET /actors/pqcd/health 2025-04-07T13:52:02.795221Z node 1 :HTTP WARN: [::1]:53222 anonymous GET /actors/pqcd/health >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop >> TBlobStorageWardenTest::TestDeleteStoragePool >> EraseRowsTests::EraseRowsShouldSuccess [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] Test command err: 2025-04-07T13:51:51.744034Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568614357661761:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:51.744403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001869/r3tmp/tmpADZoDe/pdisk_1.dat 2025-04-07T13:51:52.680718Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:52.694836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:52.694931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:52.697513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:52.697865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10810, node 1 2025-04-07T13:51:52.975313Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:52.975333Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:52.975339Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:52.975424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32160 PQClient connected to localhost:10810 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:53.713565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:53.754592Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:53.765933Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:53.800791Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T13:51:56.724925Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568614357661761:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:56.724984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:58.112346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568644422433387:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:58.112534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:58.113871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568644422433423:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:58.118438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T13:51:58.336541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568644422433425:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:51:58.956573Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568644422433498:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:51:59.029010Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTViZjVhYjMtYjIwNDk2OTUtZjM5OTM5ZGUtMmJlZmQ4YTU=, ActorId: [1:7490568644422433383:2305], ActorState: ExecuteState, TraceId: 01jr88bb4h0whrh82qyyvs462j, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:51:59.038355Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:51:59.043282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:59.249747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:59.373592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:51:59.703822Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr88bcf327bcgkbt81k8z0h8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2QwNGNlOGEtNGEyYTdlMWItYTJkMTIyNGEtNjM4MzNiOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:51:59.793854Z node 1 :HTTP WARN: [::1]:60634 anonymous GET /actors/pqcd/health 2025-04-07T13:51:59.898136Z node 1 :HTTP WARN: [::1]:60648 anonymous GET /actors/pqcd/health 2025-04-07T13:52:00.001857Z node 1 :HTTP WARN: [::1]:60652 anonymous GET /actors/pqcd/health 2025-04-07T13:52:00.105892Z node 1 :HTTP WARN: [::1]:60656 anonymous GET /actors/pqcd/health 2025-04-07T13:52:00.222890Z node 1 :HTTP WARN: [::1]:60670 anonymous GET /actors/pqcd/health 2025-04-07T13:52:00.356810Z node 1 :HTTP WARN: [::1]:60678 anonymous GET /actors/pqcd/health 2025-04-07T13:52:00.466177Z node 1 :HTTP WARN: [::1]:60694 anonymous GET /actors/pqcd/health 2025-04-07T13:52:00.602506Z node 1 :HTTP WARN: [::1]:60710 anonymous GET /actors/pqcd/health 2025-04-07T13:52:00.710464Z node 1 :HTTP WARN: [::1]:60726 anonymous GET /actors/pqcd/health 2025-04-07T13:52:00.821137Z node 1 :HTTP WARN: [::1]:60734 anonymous GET /actors/pqcd/health 2025-04-07T13:52:00.929345Z node 1 :HTTP WARN: [::1]:60746 anonymous GET /actors/pqcd/health 2025-04-07T13:52:01.031369Z node 1 :HTTP WARN: [::1]:60760 anonymous GET /actors/pqcd/health 2025-04-07T13:52:01.146965Z node 1 :HTTP WARN: [::1]:60764 anonymous GET /actors/pqcd/health 2025-04-07T13:52:01.257492Z node 1 :HTTP WARN: [::1]:60778 anonymous GET /actors/pqcd/health 2025-04-07T13:52:01.369092Z node 1 :HTTP WARN: [::1]:60792 anonymous GET /actors/pqcd/health 2025-04-07T13:52:01.478019Z node 1 :HTTP WARN: [::1]:60800 anonymous GET /actors/pqcd/health 2025-04-07T13:52:01.583106Z node 1 :HTTP WARN: [::1]:60816 anonymous GET /actors/pqcd/health 2025-04-07T13:52:01.686540Z node 1 :HTTP WARN: [::1]:60822 anonymous GET /actors/pqcd/health 2025-04-07T13:52:01.789678Z node 1 :HTTP WARN: [::1]:60836 anonymous GET /actors/pqcd/health 2025-04-07T13:52:01.914170Z node 1 :HTTP WARN: [::1]:60846 anonymous GET /actors/pqcd/health 2025-04-07T13:52:02.025058Z node 1 :HTTP WARN: [::1]:60862 anonymous GET /actors/pqcd/health 2025-04-07T13:52:02.131478Z node 1 :HTTP WARN: [::1]:60866 anonymous GET /actors/pqcd/health 2025-04-07T13:52:02.240797Z node 1 :HTTP WARN: [::1]:60870 anonymous GET /actors/pqcd/health 2025-04-07T13:52:02.355081Z node 1 :HTTP WARN: [::1]:60884 anonymous GET /actors/pqcd/health 2025-04-07T13:52:02.461555Z node 1 :HTTP WARN: [::1]:60900 anonymous GET /actors/pqcd/health 2025-04-07T13:52:02.571310Z node 1 :HTTP WARN: [::1]:60904 anonymous GET /actors/pqcd/health 2025-04-07T13:52:02.711866Z node 1 :HTTP WARN: [::1]:60908 anonymous GET /actors/pqcd/health 2025-04-07T13:52:02.833853Z node 1 :HTTP WARN: [::1]:60914 anonymous GET /actors/pqcd/health 2025-04-07T13:52:02.939206Z node 1 :HTTP WARN: [::1]:60930 anonymous GET /actors/pqcd/health 2025-04-07T13:52:03.044874Z node 1 :HTTP WARN: [::1]:60940 anonymous GET /actors/pqcd/health >> TCmsTest::TestKeepAvailableModeScheduledDisconnects >> YdbProxy::CopyTables [GOOD] >> YdbProxy::AlterTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] Test command err: 2025-04-07T13:51:55.128490Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568631866013445:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:55.128545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017c8/r3tmp/tmpOLisgt/pdisk_1.dat 2025-04-07T13:51:56.146440Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:56.161586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:56.161668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:56.162939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:56.166850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13848, node 1 2025-04-07T13:51:56.457166Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0017c8/r3tmp/yandexUpGyz2.tmp 2025-04-07T13:51:56.457187Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0017c8/r3tmp/yandexUpGyz2.tmp 2025-04-07T13:51:56.457350Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0017c8/r3tmp/yandexUpGyz2.tmp 2025-04-07T13:51:56.457432Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2221 PQClient connected to localhost:13848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:57.041180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:51:57.085286Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:00.137778Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568631866013445:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:00.137861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:01.969457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568657635817720:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:01.969608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:01.971934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568657635817733:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:01.980438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T13:52:01.997153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568657635817735:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:52:02.353709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:52:02.359544Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568661930785104:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:02.360201Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWI3MjZiY2EtOWM4NTFiZGMtNzkxOGY0OGItZjE5ZWE5MTg=, ActorId: [1:7490568657635817717:2307], ActorState: ExecuteState, TraceId: 01jr88bewffckb42gdgh3fprae, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:02.362117Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:02.503813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:02.614555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:52:02.857776Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr88bfkv51mj1vjfrfze1rnx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjYzNTZhMWQtOTZlYWZmMTQtZTA0ZTFkMzEtOTMxYmNmNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:02.894801Z node 1 :HTTP WARN: [::1]:58272 anonymous GET /actors/pqcd/health 2025-04-07T13:52:03.008195Z node 1 :HTTP WARN: [::1]:58276 anonymous GET /actors/pqcd/health 2025-04-07T13:52:03.128789Z node 1 :HTTP WARN: [::1]:58290 anonymous GET /actors/pqcd/health 2025-04-07T13:52:03.237811Z node 1 :HTTP WARN: [::1]:58296 anonymous GET /actors/pqcd/health 2025-04-07T13:52:03.339904Z node 1 :HTTP WARN: [::1]:58310 anonymous GET /actors/pqcd/health 2025-04-07T13:52:03.453480Z node 1 :HTTP WARN: [::1]:58316 anonymous GET /actors/pqcd/health 2025-04-07T13:52:03.558373Z node 1 :HTTP WARN: [::1]:58330 anonymous GET /actors/pqcd/health 2025-04-07T13:52:03.697071Z node 1 :HTTP WARN: [::1]:58336 anonymous GET /actors/pqcd/health 2025-04-07T13:52:03.803642Z node 1 :HTTP WARN: [::1]:58340 anonymous GET /actors/pqcd/health 2025-04-07T13:52:03.921720Z node 1 :HTTP WARN: [::1]:58354 anonymous GET /actors/pqcd/health ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::DyNumberCompare [GOOD] Test command err: Trying to start YDB, gRPC: 1154, MsgBus: 14972 2025-04-07T13:49:28.188265Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567998551486575:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:28.188431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013bd/r3tmp/tmpMCKRdc/pdisk_1.dat 2025-04-07T13:49:28.615640Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:28.625265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:28.625367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:28.627975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1154, node 1 2025-04-07T13:49:28.729956Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:28.729977Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:28.729985Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:28.730069Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14972 TClient is connected to server localhost:14972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:29.306129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:29.322378Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:29.332459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:29.445979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:29.586753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:29.660808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:31.276845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568011436389962:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.276950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.529868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.611344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.633680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.662438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.741308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.772878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:31.815399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568011436390460:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.815473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.815617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568011436390465:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:31.818832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:31.828164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568011436390467:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:32.910650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.187580Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567998551486575:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:33.187686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:43.605230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:49:43.605259Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:18.822273Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033818784, txId: 281474976710672] shutting down 2025-04-07T13:50:18.881138Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T13:50:20.268445Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033820202, txId: 281474976710674] shutting down 2025-04-07T13:50:22.023183Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033821986, txId: 281474976710676] shutting down 2025-04-07T13:50:23.272351Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033823245, txId: 281474976710678] shutting down 2025-04-07T13:50:24.528827Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033824514, txId: 281474976710680] shutting down 2025-04-07T13:50:25.921827Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033825864, txId: 281474976710682] shutting down 2025-04-07T13:50:27.279930Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033827258, txId: 281474976710684] shutting down 2025-04-07T13:50:28.706391Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033828697, txId: 281474976710686] shutting down 2025-04-07T13:50:29.932849Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033829921, txId: 281474976710688] shutting down 2025-04-07T13:50:31.130408Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033831120, txId: 281474976710690] shutting down 2025-04-07T13:50:32.424243Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033832385, txId: 281474976710692] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:551, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1853261F 1. /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:551: Execute_ @ 0x17C8C178 2. /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:17: operator() @ 0x17C9EEF7 3. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:17:1) &> @ 0x17C9EEF7 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:17:1) &> @ 0x17C9EEF7 5. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17C9EEF7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17C9EEF7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x18571518 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x18571518 9. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x18571518 10. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18539188 11. /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:17: Execute @ 0x17C9E07B 12. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1853AA55 13. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1856B15C 14. ??:0: ?? @ 0x7F58271E5D8F 15. ??:0: ?? @ 0x7F58271E5E3F 16. ??:0: ?? @ 0x15810028 Trying to start YDB, gRPC: 4481, MsgBus: 1915 2025-04-07T13:50:38.992482Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568300 ... CHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:35.548013Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:35.651281Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:38.746321Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568537815332404:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:38.746396Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:39.562715Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568563585137880:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:39.562832Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:39.607561Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.644287Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.736128Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.795272Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.836236Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.895596Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.018305Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568567880105681:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.018419Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.018830Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490568567880105686:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:40.024226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:40.047858Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:51:40.048144Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568567880105688:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:41.953911Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490568572175073375:2475], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Double
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Double
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Double
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Double 2025-04-07T13:51:41.961469Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWE3Y2RkNjMtYjZjMGUzZmYtNDE5NGMxZTktZWM2YTYxNjc=, ActorId: [3:7490568572175073322:2466], ActorState: ExecuteState, TraceId: 01jr88av9v589cmbvkx1amtt19, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Double
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Double
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Double
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Double 2025-04-07T13:51:41.994993Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490568572175073401:2478], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Int32
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Int32
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2025-04-07T13:51:41.996964Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWE3Y2RkNjMtYjZjMGUzZmYtNDE5NGMxZTktZWM2YTYxNjc=, ActorId: [3:7490568572175073322:2466], ActorState: ExecuteState, TraceId: 01jr88avcn657tq2grm6rh8560, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Int32
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Int32
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2025-04-07T13:51:42.048922Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490568576470040701:2480], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional 2025-04-07T13:51:42.051320Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWE3Y2RkNjMtYjZjMGUzZmYtNDE5NGMxZTktZWM2YTYxNjc=, ActorId: [3:7490568572175073322:2466], ActorState: ExecuteState, TraceId: 01jr88avdrd1g1jpbvgag803fy, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> TCmsTenatsTest::TestNoneTenantPolicy >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] |85.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |85.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> TCmsTest::TestForceRestartModeDisconnects >> TPQCDTest::TestDiscoverClusters [GOOD] >> TCmsTest::StateStorageNodesFromOneRing >> TBlobStorageWardenTest::TestDeleteStoragePool [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup >> KqpIndexes::SecondaryIndexReplace [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx >> DistributedEraseTests::ConditionalEraseRowsShouldErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TCmsTest::TestLoadLog >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate |85.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |85.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters [GOOD] Test command err: 2025-04-07T13:51:53.681183Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568622435941503:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:53.681456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017ef/r3tmp/tmp9XbZJm/pdisk_1.dat 2025-04-07T13:51:54.474746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:54.474860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:54.484382Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:54.496379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10332, node 1 2025-04-07T13:51:54.789373Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0017ef/r3tmp/yandexPKyr5T.tmp 2025-04-07T13:51:54.789400Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0017ef/r3tmp/yandexPKyr5T.tmp 2025-04-07T13:51:54.789560Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0017ef/r3tmp/yandexPKyr5T.tmp 2025-04-07T13:51:54.789670Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11163 PQClient connected to localhost:10332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:55.452067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:55.509244Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:58.652928Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568622435941503:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:58.652984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:58.880032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568643910778548:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:58.880175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568643910778571:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:58.880246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:58.894758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T13:51:58.930371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568643910778573:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:51:59.608565Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568648205745942:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:51:59.627242Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGI2ZjNlMDEtNjMyOTFhZmMtOWJlMTEwMzAtYzVkZmI0NTQ=, ActorId: [1:7490568643910778530:2303], ActorState: ExecuteState, TraceId: 01jr88bbv64v0ps6zrxkqn3pdh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:51:59.629118Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:51:59.634215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:59.925215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:00.141869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:52:01.202509Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr88bd805v4agvc270cypsts, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRhNDFlZjktYjkwZmMyZTQtOWFkYzljMTUtYzI2NzE5ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:01.519075Z node 1 :HTTP WARN: [::1]:33432 anonymous GET /actors/pqcd/health 2025-04-07T13:52:01.624070Z node 1 :HTTP WARN: [::1]:33448 anonymous GET /actors/pqcd/health 2025-04-07T13:52:01.736022Z node 1 :HTTP WARN: [::1]:33454 anonymous GET /actors/pqcd/health 2025-04-07T13:52:01.985209Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jr88beqt3b8hy3xf27js1gt2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDEzNTA3ZDUtZTFhMjcwNjItNTY4ZWI5ZTgtMTllN2JjZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:01.991151Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jr88beqt3b8hy3xf27js1gt2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDEzNTA3ZDUtZTFhMjcwNjItNTY4ZWI5ZTgtMTllN2JjZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:03.477781Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jr88bg382pj6c1cfx1y3gzsb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzgyYTc2YzYtZGRkZWE5MjYtZWNiYTQ5M2EtNDI1ZGIyOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:03.483248Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr88bg382pj6c1cfx1y3gzsb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzgyYTc2YzYtZGRkZWE5MjYtZWNiYTQ5M2EtNDI1ZGIyOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:04.849467Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr88bheqe6jbqxyyyq5av762, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWI3OGYzYTItZTM0M2FjZTMtMjAyMWMwMzUtYTIxMDc3Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:04.861092Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr88bheqe6jbqxyyyq5av762, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWI3OGYzYTItZTM0M2FjZTMtMjAyMWMwMzUtYTIxMDc3Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:06.256145Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01jr88bjtge4yyg2jerm4pa17f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmNkZTQ1MjctNWM4MzU2MjctMTI1YTBkYjQtZmFkNDcxMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:06.260205Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710684. Ctx: { TraceId: 01jr88bjtge4yyg2jerm4pa17f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmNkZTQ1MjctNWM4MzU2MjctMTI1YTBkYjQtZmFkNDcxMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:06.299106Z node 1 :HTTP WARN: [::1]:33460 anonymous GET /actors/pqcd ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] Test command err: Trying to start YDB, gRPC: 6323, MsgBus: 31187 2025-04-07T13:51:37.669370Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568554119877362:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:37.669414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ab6/r3tmp/tmpJSKVvl/pdisk_1.dat 2025-04-07T13:51:38.319331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:38.319444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:38.322690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6323, node 1 2025-04-07T13:51:38.383822Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:51:38.383842Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:51:38.597028Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:38.597048Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:38.597054Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:38.597133Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:51:38.607091Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:31187 TClient is connected to server localhost:31187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:39.596382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:39.654826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:39.862894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:40.201526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:40.301584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:42.656996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568575594715337:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:42.657144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:42.708803Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568554119877362:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:42.709082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:42.779755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:42.830901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:42.899213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:42.953559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:43.004981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:43.088234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:43.173762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568579889683135:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:43.173850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:43.174229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568579889683140:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:43.185280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:43.210255Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:51:43.211756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568579889683142:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:45.047507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:47.287847Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88azkn2n7zvp33ghh0gcke, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdiNmEzN2UtOWU1YzYxOTgtNTQ2NGEzMDQtMzlhOWVlMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:47.304622Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjdiNmEzN2UtOWU1YzYxOTgtNTQ2NGEzMDQtMzlhOWVlMzk=, ActorId: [1:7490568588479618060:2467], ActorState: ExecuteState, TraceId: 01jr88azkn2n7zvp33ghh0gcke, Create QueryResponse for error on request, msg: 2025-04-07T13:51:47.967471Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490568597069553267:2554], TxId: 281474976710679, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MjdiNmEzN2UtOWU1YzYxOTgtNTQ2NGEzMDQtMzlhOWVlMzk=. TraceId : 01jr88b0kx806ff64gcy96eckz. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T13:51:47.969087Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490568597069553268:2555], TxId: 281474976710679, task: 2. Ctx: { TraceId : 01jr88b0kx806ff64gcy96eckz. SessionId : ydb://session/3?node_id=1&id=MjdiNmEzN2UtOWU1YzYxOTgtNTQ2NGEzMDQtMzlhOWVlMzk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490568597069553264:2467], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:51:47.970221Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjdiNmEzN2UtOWU1YzYxOTgtNTQ2NGEzMDQtMzlhOWVlMzk=, ActorId: [1:7490568588479618060:2467], ActorState: ExecuteState, TraceId: 01jr88b0kx806ff64gcy96eckz, Create QueryResponse for error on request, msg: 2025-04-07T13:51:49.031838Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88b18j84hvqnjw2k2dzqw7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdiNmEzN2UtOWU1YzYxOTgtNTQ2NGEzMDQtMzlhOWVlMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:49.032075Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjdiNmEzN2UtOWU1YzYxOTgtNTQ2NGEzMDQtMzlhOWVlMzk=, ActorId: [1:7490568588479618060:2467], ActorState: ExecuteState, TraceId: 01jr88b18j84hvqnjw2k2dzqw7, Create QueryResponse for error on request, msg: 2025-04-07T13:51:49.110634Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:51:50.317046Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 2324, MsgBus: 15772 2025-04-07T13:51:51.728091Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568614131612394:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:51.728127Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ab6/r3tmp/tmpv1yfx2/pdisk_1.dat 2025-04-07T13:51:52.207027Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:52.282635Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:52.282755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:52.298420Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2324, node 2 2025-04-07T13:51:52.541382Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:52.541406Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:52.541418Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:52.541540Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15772 TClient is connected to server localhost:15772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:51:53.465993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:51:53.486148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:53.602023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:53.887586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:54.033333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:56.732870Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568614131612394:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:56.732943Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:56.848906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568635606450574:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:56.849042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:56.902662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:56.973164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:57.057951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:57.119428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:57.199133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:57.309534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:57.377063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568639901418375:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.377176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.377530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568639901418380:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:57.381326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:57.397338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568639901418382:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:59.621546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:52:02.349138Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88bdx0e57zhrjzcea3s4j3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDc3Njc2MGEtN2ViMDkwZGItODFmNmVkNDQtZmNjMTk4MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:52:02.349413Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDc3Njc2MGEtN2ViMDkwZGItODFmNmVkNDQtZmNjMTk4MTc=, ActorId: [2:7490568648491353315:2465], ActorState: ExecuteState, TraceId: 01jr88bdx0e57zhrjzcea3s4j3, Create QueryResponse for error on request, msg: 2025-04-07T13:52:02.853986Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490568661376255798:2547], TxId: 281474976710679, task: 1. Ctx: { TraceId : 01jr88bf9s7sx6ehtrqmfffpry. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZDc3Njc2MGEtN2ViMDkwZGItODFmNmVkNDQtZmNjMTk4MTc=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T13:52:02.855087Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490568661376255799:2548], TxId: 281474976710679, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr88bf9s7sx6ehtrqmfffpry. SessionId : ydb://session/3?node_id=2&id=ZDc3Njc2MGEtN2ViMDkwZGItODFmNmVkNDQtZmNjMTk4MTc=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490568661376255795:2465], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:52:02.856144Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDc3Njc2MGEtN2ViMDkwZGItODFmNmVkNDQtZmNjMTk4MTc=, ActorId: [2:7490568648491353315:2465], ActorState: ExecuteState, TraceId: 01jr88bf9s7sx6ehtrqmfffpry, Create QueryResponse for error on request, msg: 2025-04-07T13:52:04.138270Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88bfswcbmbw3gavftget75, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDc3Njc2MGEtN2ViMDkwZGItODFmNmVkNDQtZmNjMTk4MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:52:04.138914Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDc3Njc2MGEtN2ViMDkwZGItODFmNmVkNDQtZmNjMTk4MTc=, ActorId: [2:7490568648491353315:2465], ActorState: ExecuteState, TraceId: 01jr88bfswcbmbw3gavftget75, Create QueryResponse for error on request, msg: 2025-04-07T13:52:05.879780Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:52:05.926472Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TopicService::AccessRights [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct |85.7%| [TA] $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost >> TFstClassSrcIdPQTest::TestTableCreated [GOOD] >> TFstClassSrcIdPQTest::NoMapping >> TCmsTest::TestLoadLog [GOOD] >> TCmsTest::TestLogOperationsRollback ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexReplace [GOOD] Test command err: Trying to start YDB, gRPC: 30239, MsgBus: 15480 2025-04-07T13:51:43.292881Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568579121269154:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:43.292918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001aa3/r3tmp/tmpQplenf/pdisk_1.dat 2025-04-07T13:51:44.174440Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:44.178636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:44.178743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:44.187189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30239, node 1 2025-04-07T13:51:44.437124Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:44.437145Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:44.437155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:44.437233Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15480 TClient is connected to server localhost:15480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:45.381650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:45.427720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:45.680158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:45.930165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:46.014512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:48.197495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568596301140034:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:48.211790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:48.278810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.296439Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568579121269154:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:48.300824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:48.333335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.435047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.482846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.526554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.613737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.737084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568600596107838:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:48.737179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:48.737399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568600596107843:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:48.741760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:48.755362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568600596107845:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:50.321298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24726, MsgBus: 14245 2025-04-07T13:51:55.838585Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568630189952136:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001aa3/r3tmp/tmporyAC1/pdisk_1.dat 2025-04-07T13:51:56.047867Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:56.234352Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:56.281554Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:56.281635Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:56.297945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24726, node 2 2025-04-07T13:51:56.500183Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:56.500205Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:56.500213Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:56.500304Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14245 TClient is connected to server localhost:14245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:57.359484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:57.370685Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:57.381545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:57.533102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:58.109770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:58.552177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:00.837530Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568630189952136:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:00.837601Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:02.902629Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568660254724740:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:02.902915Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:02.945094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:52:02.986638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:52:03.030969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:03.117603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:03.184324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:52:03.270432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:52:03.357611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568664549692549:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:03.357723Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:03.358186Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568664549692554:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:03.362531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:52:03.382108Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568664549692557:2439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:52:04.657895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:52:06.092807Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:52:06.117679Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T13:52:06.925834Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] >> TCmsTest::TestForceRestartModeDisconnects [GOOD] >> TCmsTest::TestForceRestartModeScheduled >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageTwoBrokenRings |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |85.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut |85.8%| [TA] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 >> KqpMultishardIndex::CheckPushTopSort [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] Test command err: 2025-04-07T13:52:06.049415Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:06.049561Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:06.051982Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:06.052090Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:06.053357Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:06.053446Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e36/r3tmp/tmpEbyui0/pdisk_1.dat 2025-04-07T13:52:06.853434Z node 1 :BS_PROXY_PUT INFO: [6173685a7ad4b3c4] bootstrap ActorId# [1:471:2455] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:1282:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:06.853593Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1282:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:06.853632Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1282:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:06.853664Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1282:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:06.853690Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1282:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:06.853715Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1282:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:06.853760Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1282:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:06.853801Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] restore Id# [72057594037932033:2:7:0:0:1282:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:06.853875Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:1282:1] Marker# BPG33 2025-04-07T13:52:06.853917Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:1282:1] Marker# BPG32 2025-04-07T13:52:06.853961Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:1282:2] Marker# BPG33 2025-04-07T13:52:06.853990Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:1282:2] Marker# BPG32 2025-04-07T13:52:06.854021Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:1282:3] Marker# BPG33 2025-04-07T13:52:06.854049Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:1282:3] Marker# BPG32 2025-04-07T13:52:06.854249Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:44:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1282:3] FDS# 1282 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:06.854332Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:37:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1282:2] FDS# 1282 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:06.854390Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1282:1] FDS# 1282 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:06.870560Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1282:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90094 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-07T13:52:06.870832Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1282:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90094 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-07T13:52:06.870954Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1282:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 90094 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-07T13:52:06.871049Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:1282:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-07T13:52:06.871109Z node 1 :BS_PROXY_PUT INFO: [6173685a7ad4b3c4] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:1282:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T13:52:06.925491Z node 1 :BS_PROXY_PUT INFO: [e2e5f1b9c917f854] bootstrap ActorId# [1:515:2492] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:222:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:06.925642Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:06.925681Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:06.925711Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:06.925738Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:06.925782Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:06.925806Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:06.925842Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] restore Id# [72057594037932033:2:8:0:0:222:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:06.925908Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG33 2025-04-07T13:52:06.925954Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG32 2025-04-07T13:52:06.925993Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG33 2025-04-07T13:52:06.926017Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG32 2025-04-07T13:52:06.926046Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG33 2025-04-07T13:52:06.926071Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG32 2025-04-07T13:52:06.926225Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:44:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:3] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:06.926293Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:37:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:2] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:06.926337Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:1] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:06.934350Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-07T13:52:06.934598Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-07T13:52:06.934709Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-07T13:52:06.934784Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-07T13:52:06.934861Z node 1 :BS_PROXY_PUT INFO: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T13:52:06.935846Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-04-07T13:52:06.935891Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-04-07T13:52:06.935986Z nod ... 2:08.851861Z node 2 :BS_PROXY_PUT NOTICE: [8d27cf9df52bfb78] SendReply putResult# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 Sending TEvPut 2025-04-07T13:52:08.852261Z node 2 :BS_PROXY_PUT INFO: [a55b41de52eb2a08] bootstrap ActorId# [2:595:2507] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:08.852369Z node 2 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:08.852411Z node 2 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:08.852458Z node 2 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-04-07T13:52:08.852502Z node 2 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-04-07T13:52:08.852611Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:584:2498] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:08.855789Z node 2 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-07T13:52:08.855899Z node 2 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-04-07T13:52:08.855956Z node 2 :BS_PROXY_PUT INFO: [a55b41de52eb2a08] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T13:52:08.856444Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-04-07T13:52:08.856482Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-04-07T13:52:08.856596Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2025-04-07T13:52:08.857349Z node 3 :BS_NODE ERROR: {NW19@node_warden_group.cpp:207} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/u6pr/001e36/r3tmp/tmptiRobM//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-04-07T13:52:08.858666Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-04-07T13:52:08.858722Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-04-07T13:52:08.860428Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:597:2103] Create Queue# [3:599:2104] targetNodeId# 2 Marker# DSP01 2025-04-07T13:52:08.860554Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:597:2103] Create Queue# [3:600:2105] targetNodeId# 2 Marker# DSP01 2025-04-07T13:52:08.860649Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:597:2103] Create Queue# [3:601:2106] targetNodeId# 2 Marker# DSP01 2025-04-07T13:52:08.860788Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:597:2103] Create Queue# [3:602:2107] targetNodeId# 2 Marker# DSP01 2025-04-07T13:52:08.860885Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:597:2103] Create Queue# [3:603:2108] targetNodeId# 2 Marker# DSP01 2025-04-07T13:52:08.860985Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:597:2103] Create Queue# [3:604:2109] targetNodeId# 2 Marker# DSP01 2025-04-07T13:52:08.861082Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:597:2103] Create Queue# [3:605:2110] targetNodeId# 2 Marker# DSP01 2025-04-07T13:52:08.861112Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-07T13:52:08.862364Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:08.862558Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:08.862688Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:08.862823Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:08.862866Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:08.862915Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:08.863058Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:08.863086Z node 3 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-07T13:52:08.863122Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-07T13:52:08.863359Z node 3 :BS_PROXY_BLOCK DEBUG: [1a43693427d0a82b] bootstrap ActorId# [3:608:2111] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-07T13:52:08.863408Z node 3 :BS_PROXY_BLOCK DEBUG: [1a43693427d0a82b] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 2 Marker# DSPB03 2025-04-07T13:52:08.863587Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:599:2104] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 14996281283495157827 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-07T13:52:08.864581Z node 3 :BS_PROXY_BLOCK DEBUG: [1a43693427d0a82b] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 2 Marker# DSPB01 2025-04-07T13:52:08.864625Z node 3 :BS_PROXY_BLOCK DEBUG: [1a43693427d0a82b] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2025-04-07T13:52:08.864923Z node 3 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-07T13:52:08.865084Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-07T13:52:08.865302Z node 2 :BS_PROXY_PUT INFO: [bba3bffd2e286f4b] bootstrap ActorId# [2:609:2508] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:08.865411Z node 2 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:08.865444Z node 2 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:08.865484Z node 2 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2025-04-07T13:52:08.865509Z node 2 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2025-04-07T13:52:08.865603Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:584:2498] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:08.865744Z node 2 :BS_VDISK_PUT ERROR: VDISK[82000002:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-07T13:52:08.865926Z node 2 :BS_PROXY_PUT INFO: [bba3bffd2e286f4b] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-07T13:52:08.865984Z node 2 :BS_PROXY_PUT ERROR: [bba3bffd2e286f4b] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2025-04-07T13:52:08.866026Z node 2 :BS_PROXY_PUT NOTICE: [bba3bffd2e286f4b] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T13:52:08.866338Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:599:2104] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> TopicService::ThereAreGapsInTheOffsetRanges >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit >> YdbProxy::AlterTopic [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-04-07T13:52:00.503179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:00.503499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:00.503756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00105f/r3tmp/tmp0SkrZf/pdisk_1.dat 2025-04-07T13:52:00.996368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:01.084237Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:01.217393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:01.217553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:01.229032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:01.360388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:01.408558Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:52:01.408947Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:01.455746Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:01.455888Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:01.457567Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:01.457665Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:01.457718Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:01.458060Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:01.500877Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:01.501116Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:01.501249Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:52:01.501292Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:01.501329Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:01.501366Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:01.501937Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:01.502060Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:01.502683Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:01.502748Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:01.502792Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:01.502837Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:01.502879Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:01.503061Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:01.503332Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:01.503433Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:01.505220Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:01.516067Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:01.516202Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:01.723410Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:01.732456Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:01.732569Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:01.741986Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:01.742071Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:01.742133Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:01.742461Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:01.742657Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:01.743637Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:01.743729Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:01.754827Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:01.755382Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:01.757497Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:01.757562Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:01.758306Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:01.758369Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:01.758447Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:01.759705Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:01.759754Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:01.759851Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:01.759927Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:01.759983Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:01.760069Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:01.771895Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:01.774336Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:01.774488Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:01.774547Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:01.786305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:01.786433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:01.786513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:01.791776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:01.798673Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:01.998020Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:02.001211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:02.402242Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88beqr301v8y502evjqwg0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODFhMDM0NTUtOGIwNzk1ZTAtYTE3N2ExODctNjNhZGVjM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:02.409073Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:02.409364Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:02.425438Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:02.425611Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:02.430178Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:02.431507Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:52:02.431563Z node 1 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-07T13:52:02.431732Z no ... lt 281474976715662 datashard 72075186224037893 state Ready 2025-04-07T13:52:08.878860Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 Got TEvSchemaChangedResult from SS at 72075186224037893 2025-04-07T13:52:08.883064Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1204:2991], serverId# [2:1205:2992], sessionId# [0:0:0] 2025-04-07T13:52:08.883243Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1204:2991], serverId# [2:1205:2992], sessionId# [0:0:0] 2025-04-07T13:52:08.884419Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1209:2996], serverId# [2:1210:2997], sessionId# [0:0:0] 2025-04-07T13:52:08.884575Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1209:2996], serverId# [2:1210:2997], sessionId# [0:0:0] 2025-04-07T13:52:08.890584Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1214:3001], serverId# [2:1215:3002], sessionId# [0:0:0] 2025-04-07T13:52:08.890819Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1214:3001], serverId# [2:1215:3002], sessionId# [0:0:0] 2025-04-07T13:52:08.893945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:52:08.897554Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-04-07T13:52:08.897667Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T13:52:08.897724Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-04-07T13:52:08.897783Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:08.897970Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-07T13:52:08.898298Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-04-07T13:52:08.939482Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [2:1238:3021] 2025-04-07T13:52:08.939678Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:08.947327Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:08.947422Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:08.948599Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-04-07T13:52:08.948657Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2025-04-07T13:52:08.948724Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2025-04-07T13:52:08.949038Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:08.949087Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037894 2025-04-07T13:52:08.949166Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:08.949232Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037894, actorId: [2:1254:3030] 2025-04-07T13:52:08.949256Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-04-07T13:52:08.949280Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2025-04-07T13:52:08.949302Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-07T13:52:08.949640Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037894 2025-04-07T13:52:08.949701Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2025-04-07T13:52:08.949768Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1235:3019], serverId# [2:1245:3025], sessionId# [0:0:0] 2025-04-07T13:52:08.950023Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-04-07T13:52:08.950054Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:08.950105Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037894 TxInFly 0 2025-04-07T13:52:08.950142Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-04-07T13:52:08.950279Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-07T13:52:08.950427Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037894 txId 281474976715663 ssId 72057594046644480 seqNo 2:7 2025-04-07T13:52:08.950484Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715663 at tablet 72075186224037894 2025-04-07T13:52:08.950871Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-04-07T13:52:08.973252Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-07T13:52:08.973349Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:09.170611Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1264:3040], serverId# [2:1265:3041], sessionId# [0:0:0] 2025-04-07T13:52:09.171277Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715663 at step 4000 at tablet 72075186224037894 { Transactions { TxId: 281474976715663 AckTo { RawX1: 517 RawX2: 8589937047 } } Step: 4000 MediatorID: 72057594046382081 TabletID: 72075186224037894 } 2025-04-07T13:52:09.171315Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-07T13:52:09.171652Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-04-07T13:52:09.171693Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:09.171729Z node 2 :TX_DATASHARD DEBUG: Found ready operation [4000:281474976715663] in PlanQueue unit at 72075186224037894 2025-04-07T13:52:09.171911Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037894 loaded tx from db 4000:281474976715663 keys extracted: 0 2025-04-07T13:52:09.172035Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:09.173285Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-04-07T13:52:09.173343Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037894 tableId# [OwnerId: 72057594046644480, LocalPathId: 8] schema version# 1 2025-04-07T13:52:09.173973Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037894 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:09.174290Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:09.177000Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037894 time 3500 2025-04-07T13:52:09.177048Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-07T13:52:09.177986Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037894 step# 4000 txid# 281474976715663} 2025-04-07T13:52:09.178062Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037894 step# 4000} 2025-04-07T13:52:09.178122Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-04-07T13:52:09.179584Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-04-07T13:52:09.179623Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-04-07T13:52:09.179658Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037894 2025-04-07T13:52:09.179703Z node 2 :TX_DATASHARD DEBUG: Complete [4000 : 281474976715663] from 72075186224037894 at tablet 72075186224037894 send result to client [2:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:09.179745Z node 2 :TX_DATASHARD INFO: 72075186224037894 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2025-04-07T13:52:09.179796Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-07T13:52:09.188388Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-04-07T13:52:09.188529Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T13:52:09.188582Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-04-07T13:52:09.188874Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-07T13:52:09.189012Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-04-07T13:52:09.189072Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:09.189156Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037894 coordinator 72057594046316545 last step 0 next step 4000 2025-04-07T13:52:09.189314Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-04-07T13:52:09.190693Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037894 state Ready 2025-04-07T13:52:09.190746Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-04-07T13:52:09.196252Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1293:3063], serverId# [2:1294:3064], sessionId# [0:0:0] 2025-04-07T13:52:09.196456Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1293:3063], serverId# [2:1294:3064], sessionId# [0:0:0] 2025-04-07T13:52:09.201941Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1298:3068], serverId# [2:1299:3069], sessionId# [0:0:0] 2025-04-07T13:52:09.202128Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1298:3068], serverId# [2:1299:3069], sessionId# [0:0:0] 2025-04-07T13:52:09.203806Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1303:3073], serverId# [2:1304:3074], sessionId# [0:0:0] 2025-04-07T13:52:09.203992Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1303:3073], serverId# [2:1304:3074], sessionId# [0:0:0] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] Test command err: 2025-04-07T13:52:01.359742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:01.360024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:01.360293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001056/r3tmp/tmptHBwag/pdisk_1.dat 2025-04-07T13:52:02.096551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:02.166970Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:02.226366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:02.226477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:02.237958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:02.364459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:02.406897Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:52:02.407174Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:02.466150Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:02.466323Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:02.467819Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:02.467899Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:02.467947Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:02.468243Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:02.499265Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:02.499465Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:02.499569Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:52:02.499603Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:02.499637Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:02.499683Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:02.500163Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:02.500298Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:02.500760Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:02.500854Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:02.500892Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:02.500931Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:02.500975Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:02.501169Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:02.501406Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:02.501495Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:02.503158Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:02.517253Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:02.517377Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:02.722836Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:02.740888Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:02.741010Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:02.741944Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:02.741991Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:02.742034Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:02.742274Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:02.742426Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:02.743099Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:02.743170Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:02.745160Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:02.745553Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:02.747176Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:02.747218Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:02.747826Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:02.747879Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:02.747936Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:02.749008Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:02.749050Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:02.749121Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:02.749181Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:02.749227Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:02.749311Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:02.752491Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:02.754212Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:02.754315Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:02.754357Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:02.762591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:02.762698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:02.762770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:02.767272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:02.772961Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:02.990757Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:02.994076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:03.467068Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88bfp8es5n69tfq09hbdje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmQwY2MwMmUtNzNjNWQwMjItMmE5YWYxMTgtNGNmNjYzMDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:03.473109Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:03.473371Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:03.487144Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:03.487284Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:03.490981Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:03.491960Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:03.505394Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:03 ... 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:08.856540Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:630:2536], serverId# [2:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:08.856612Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:08.856658Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:08.856735Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:08.856784Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:08.856954Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:08.857138Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:08.857208Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:08.858812Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:08.874955Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:08.875085Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:09.073361Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:667:2559], serverId# [2:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:09.073833Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 517 RawX2: 8589937047 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:09.073893Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.074271Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:09.074319Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:09.074370Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:09.074643Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:09.074783Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:09.074951Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:09.075015Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:09.075485Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:09.075893Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:09.084031Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:09.084117Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.084730Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:09.084810Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:09.084897Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:09.086247Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:09.086309Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:09.086363Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:09.086444Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:09.086509Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:09.086607Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.087821Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:09.095658Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:09.096428Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:09.096509Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:09.107197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:09.107311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:711:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:09.107389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:09.114554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:09.121884Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:09.328560Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:09.332260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:09.482152Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88bnwh637qqjgrkb7mh1wh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjJhN2VjYS03OWUwZGZhLWMxZDAzNjE4LTMxZjk2ZWY0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:09.482759Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:816:2654], serverId# [2:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:09.482991Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:09.497796Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:09.497972Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.503369Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:824:2661], serverId# [2:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:09.504525Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:09.517499Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:09.517590Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.517907Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:52:09.517951Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-07T13:52:09.518326Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:09.518392Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:09.518449Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:09.518526Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:09.518650Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:824:2661], serverId# [2:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:09.519791Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:09.520229Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:09.520446Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:09.520499Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:09.520548Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-07T13:52:09.520884Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:09.520963Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:09.521667Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-07T13:52:09.521933Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T13:52:09.522083Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-07T13:52:09.522135Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-07T13:52:09.523742Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:52:09.523800Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-07T13:52:09.524261Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:09.524305Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:09.524358Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-07T13:52:09.524517Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:09.524576Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:09.524626Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::CheckPushTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 24088, MsgBus: 14369 2025-04-07T13:51:45.858147Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568588658611113:2252];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:45.858188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a98/r3tmp/tmpxTvIwW/pdisk_1.dat 2025-04-07T13:51:46.599936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:46.608319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:46.608553Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:46.621716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24088, node 1 2025-04-07T13:51:46.862399Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:46.862421Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:46.862427Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:46.862513Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14369 TClient is connected to server localhost:14369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:47.655383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:47.674238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:47.874547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:48.113906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:48.202755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:51:50.856247Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568588658611113:2252];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:50.856326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:51.186853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568610133449075:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:51.202733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:51.239092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:51.299934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:51.356308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:51.394173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:51.446111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:51.507573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:51.606856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568614428416872:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:51.606934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:51.607269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568614428416877:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:51.611232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:51.627510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568614428416879:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:53.029457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:53.121202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:51:53.218348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:51:55.547261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 23125, MsgBus: 17610 2025-04-07T13:52:01.015741Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568651782851642:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a98/r3tmp/tmpsBbs2z/pdisk_1.dat 2025-04-07T13:52:01.272302Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:01.434227Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:01.492105Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:01.492204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:01.505752Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23125, node 2 2025-04-07T13:52:01.750557Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:01.750584Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:01.750593Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:01.750700Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17610 TClient is connected to server localhost:17610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:52:02.509396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:02.537740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:52:02.630006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:02.855652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:03.021131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:05.660834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568673257689656:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:05.677468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:05.740939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:52:05.830324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:52:05.889547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:05.946225Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568651782851642:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:05.948904Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:05.962805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:06.063955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:52:06.125266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:52:06.204220Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568677552657452:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:06.204308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:06.204490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568677552657457:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:06.208234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:52:06.232178Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568677552657459:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:52:07.614519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2025-04-07T13:51:51.168263Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568612432673795:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:51.168314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00137d/r3tmp/tmpHAsXnn/pdisk_1.dat 2025-04-07T13:51:51.917380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:51.917489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:51.927237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:51.937358Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2708 TServer::EnableGrpc on GrpcPort 8985, node 1 2025-04-07T13:51:52.521455Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:52.521475Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:52.521483Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:52.521568Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2708 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:53.380446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:56.168989Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568612432673795:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:56.169043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:56.990720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:58.769140Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568644443915255:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00137d/r3tmp/tmpHjBPI4/pdisk_1.dat 2025-04-07T13:51:59.024576Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:59.231887Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:59.298017Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:59.298100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:59.309948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65456 TServer::EnableGrpc on GrpcPort 64069, node 2 2025-04-07T13:52:00.041362Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:00.041384Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:00.041402Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:00.041488Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:00.653369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:00.678722Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:52:03.752805Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568644443915255:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:03.752889Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:04.790103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:52:04.902876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:52:06.046121Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568676971617442:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:06.086939Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00137d/r3tmp/tmpmMCutw/pdisk_1.dat 2025-04-07T13:52:06.246679Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:06.247001Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:06.249786Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:06.259960Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5790 TServer::EnableGrpc on GrpcPort 26017, node 3 2025-04-07T13:52:06.809350Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:06.809372Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:06.809381Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:06.809473Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:07.273626Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:07.442572Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-04-07T13:52:02.898565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:02.898871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:02.899170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00103e/r3tmp/tmpDuRlss/pdisk_1.dat 2025-04-07T13:52:03.241243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:03.283988Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:03.343433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:03.343621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:03.358107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:03.487007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:03.529155Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:52:03.529465Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:03.572993Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:03.573186Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:03.574900Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:03.574987Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:03.575032Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:03.575358Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:03.605636Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:03.605862Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:03.606003Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:52:03.606039Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:03.606074Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:03.606110Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:03.606669Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:03.606785Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:03.607224Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:03.607295Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:03.607334Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:03.607399Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:03.607442Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:03.607619Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:03.607859Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:03.607972Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:03.609445Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:03.621367Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:03.621486Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:03.839102Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:03.868989Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:03.869116Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:03.870142Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:03.870194Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:03.870243Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:03.870516Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:03.870688Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:03.871382Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:03.871458Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:03.873863Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:03.880269Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:03.882307Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:03.882368Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:03.883286Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:03.883363Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:03.883436Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:03.884621Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:03.884690Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:03.884777Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:03.884857Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:03.884917Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:03.884999Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:03.888344Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:03.902214Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:03.902351Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:03.902416Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:03.929923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:03.930083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:03.930170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:03.935162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:03.941515Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:04.145005Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:04.147650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:04.836642Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88bgtjert9zsfqc3w2mb3h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDM4ZjVlOGItNWIzMzE0OTQtYmNjOTY5ODUtNjY1MThkNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:04.844296Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:04.844593Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:04.859370Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:04.859493Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:04.863201Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:04.863370Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:04.877304Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:04 ... 3:52:09.472273Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:09.483963Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:09.628892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:09.649636Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:631:2537] 2025-04-07T13:52:09.649826Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:09.688246Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:09.688366Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:09.690114Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:09.690195Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:09.690243Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:09.690540Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:09.690591Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:09.690678Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:09.690755Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:648:2546] 2025-04-07T13:52:09.690786Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:09.690816Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:09.690842Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.691574Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:09.691654Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:09.691714Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:630:2536], serverId# [2:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:09.691773Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:09.691807Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:09.691844Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:09.691880Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:09.692011Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:09.692177Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:09.692249Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:09.693645Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:09.704491Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:09.704592Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:09.907178Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:667:2559], serverId# [2:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:09.907542Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 517 RawX2: 8589937047 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:09.907587Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.907920Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:09.907964Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:09.908009Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:09.908220Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:09.908334Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:09.908466Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:09.908522Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:09.908945Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:09.909285Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:09.911441Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:09.911491Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.911932Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:09.911990Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:09.912045Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:09.913125Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:09.913170Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:09.913213Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:09.913270Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:09.913317Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:09.913397Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.914327Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:09.915991Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:09.916514Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:09.916565Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:09.921312Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2587], serverId# [2:704:2588], sessionId# [0:0:0] 2025-04-07T13:52:09.921453Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:09.945256Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:09.945325Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.945606Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:703:2587], serverId# [2:704:2588], sessionId# [0:0:0] 2025-04-07T13:52:09.947339Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:709:2593], serverId# [2:710:2594], sessionId# [0:0:0] 2025-04-07T13:52:09.947481Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:09.947735Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:09.947784Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.947970Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:709:2593], serverId# [2:710:2594], sessionId# [0:0:0] 2025-04-07T13:52:09.952197Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:714:2598], serverId# [2:715:2599], sessionId# [0:0:0] 2025-04-07T13:52:09.952336Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:09.952498Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:09.952535Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.952737Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:714:2598], serverId# [2:715:2599], sessionId# [0:0:0] 2025-04-07T13:52:09.954354Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:719:2603], serverId# [2:720:2604], sessionId# [0:0:0] 2025-04-07T13:52:09.954488Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:09.954635Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:09.954676Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.954840Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:719:2603], serverId# [2:720:2604], sessionId# [0:0:0] 2025-04-07T13:52:09.956406Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:724:2608], serverId# [2:725:2609], sessionId# [0:0:0] 2025-04-07T13:52:09.956534Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:09.957409Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:09.957515Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.957774Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:724:2608], serverId# [2:725:2609], sessionId# [0:0:0] 2025-04-07T13:52:09.959732Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:729:2613], serverId# [2:730:2614], sessionId# [0:0:0] 2025-04-07T13:52:09.959871Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:09.960072Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:09.960119Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.960300Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:729:2613], serverId# [2:730:2614], sessionId# [0:0:0] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> YdbOlapStore::LogNonExistingUserId [GOOD] >> YdbOlapStore::LogPagingAfter >> TKeyValueTest::TestSetExecutorFastLogPolicy >> TCmsTest::TestForceRestartModeScheduled [GOOD] >> TCmsTest::TestForceRestartModeScheduledDisconnects >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart >> TMaintenanceApiTest::ActionReason [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] Test command err: 2025-04-07T13:52:04.500159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:04.500531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:04.509177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001025/r3tmp/tmp1LNu3l/pdisk_1.dat 2025-04-07T13:52:04.899229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:04.944892Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:04.996618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:04.996809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:05.008360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:05.133149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:05.178006Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:52:05.178341Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:05.234860Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:05.234978Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:05.236272Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:05.236337Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:05.236389Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:05.236749Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:05.264421Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:05.264689Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:05.264874Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:52:05.264917Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:05.264968Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:05.265038Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:05.265672Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:05.265815Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:05.266562Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:05.266648Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:05.266695Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:05.266745Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:05.266791Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:05.267012Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:05.267339Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:05.267447Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:05.269427Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:05.280648Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:05.280838Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:05.476009Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:05.490795Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:05.490922Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:05.491965Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:05.492024Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:05.492095Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:05.492394Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:05.492572Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:05.499506Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:05.499634Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:05.507755Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:05.508356Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:05.510433Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:05.510492Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:05.511176Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:05.511241Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:05.511310Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:05.512493Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:05.512539Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:05.512624Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:05.521880Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:05.522030Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:05.522194Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:05.530235Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:05.532466Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:05.534245Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:05.534344Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:05.545292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:05.545437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:05.545522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:05.550778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:05.557333Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:05.757506Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:05.760325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:06.194782Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88bjd6b39d3crm154n81tr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Y1NWU3NmQtZDc3ZTM2YS05OTBjYTExOC03MzE5ZDA4Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:06.204997Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:06.205342Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:06.224728Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:06.224883Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:06.229618Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:06.306745Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr88bk2sav14r8qfb34psyrc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTFjZGYyM2ItNjNmOWZmNGUtZmI1NGE5ZGEtZGRkOGY5ZQ== ... BUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:06.338741Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:06.338964Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:06.339006Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:06.339058Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for WaitForStreamClearance 2025-04-07T13:52:06.339363Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:06.339446Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:06.340163Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-07T13:52:06.340390Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T13:52:06.340564Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-07T13:52:06.340622Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 0 2025-04-07T13:52:06.342341Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:52:06.342382Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037888 2025-04-07T13:52:06.342842Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:06.342885Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:06.342923Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2025-04-07T13:52:06.343070Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:06.343122Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:06.343176Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:10.435481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:10.435881Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:10.435968Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001025/r3tmp/tmpRDH4ZT/pdisk_1.dat 2025-04-07T13:52:10.716436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:10.747965Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:10.796702Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:10.796832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:10.808371Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:10.922176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:10.947376Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:631:2537] 2025-04-07T13:52:10.947624Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:11.002482Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:11.002637Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:11.004394Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:11.004498Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:11.004559Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:11.004964Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:11.005042Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:11.005161Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:11.005266Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:648:2546] 2025-04-07T13:52:11.005307Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:11.005351Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:11.005391Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:11.006337Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:11.006452Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:11.006540Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:630:2536], serverId# [2:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:11.006621Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:11.006770Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:11.006815Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:11.006869Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:11.007054Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:11.007275Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:11.007367Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:11.009088Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:11.020636Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:11.020826Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:11.208315Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:667:2559], serverId# [2:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:11.208719Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 517 RawX2: 8589937047 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:11.208783Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:11.209129Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:11.209173Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:11.209212Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:11.209447Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:11.209563Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:11.209684Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:11.209734Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:11.210109Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:11.210469Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:11.212531Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:11.212580Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:11.213148Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:11.213214Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:11.213279Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:11.214557Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:11.214614Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:11.214668Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:11.214743Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:11.214803Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:11.214890Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:11.216108Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:11.218052Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:11.218685Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:11.218744Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:11.224504Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2587], serverId# [2:704:2588], sessionId# [0:0:0] 2025-04-07T13:52:11.224646Z node 2 :TX_DATASHARD NOTICE: Rejecting erase request on datashard: tablet# 72075186224037888, error# Can't execute erase at replicated table 2025-04-07T13:52:11.224901Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:703:2587], serverId# [2:704:2588], sessionId# [0:0:0] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 4827, MsgBus: 15869 2025-04-07T13:51:47.917949Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568597039061328:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:47.918002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a8f/r3tmp/tmpELAsOy/pdisk_1.dat 2025-04-07T13:51:48.527618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:48.527709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:48.535226Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:48.537032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4827, node 1 2025-04-07T13:51:48.820747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:48.820766Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:48.820780Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:48.820882Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15869 TClient is connected to server localhost:15869 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:50.059533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:50.077999Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:50.094472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:50.293034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:50.507372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:51:50.597020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:52.924833Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568597039061328:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:52.924912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:53.498756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568622808866815:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:53.498854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:53.906429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:53.976163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:54.033627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:54.113630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:54.181636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:54.301603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:54.424174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568627103834622:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:54.424264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:54.424693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568627103834628:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:54.428439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:54.442575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568627103834630:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:56.023309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:51:56.127279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20573, MsgBus: 17460 2025-04-07T13:52:01.047389Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568651833942017:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a8f/r3tmp/tmpIVfgXK/pdisk_1.dat 2025-04-07T13:52:01.334946Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:01.550615Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:01.607177Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:01.607262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:01.608455Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20573, node 2 2025-04-07T13:52:01.841223Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:01.841243Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:01.841250Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:01.841337Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17460 TClient is connected to server localhost:17460 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:02.562081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:02.574720Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:52:02.911681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:03.029497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:03.281757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:03.362419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:05.767889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568673308780022:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:05.767984Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:05.832128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:52:05.916791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:52:05.966226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:06.015448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:06.045699Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568651833942017:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:06.045796Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:06.089698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:52:06.165253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:52:06.244114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568677603747821:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:06.244209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:06.244406Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568677603747826:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:06.247821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:52:06.268898Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568677603747828:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:52:07.510179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:52:07.569236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ActionReason [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal >> TCmsTest::RequestReplaceBrokenDevices >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns >> ExternalBlobsMultipleChannels::WithCompaction >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction >> TPersQueueTest::WriteAfterAlter [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] |85.8%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 8358, MsgBus: 9657 2025-04-07T13:51:32.601991Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568530035323553:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:32.602116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ae2/r3tmp/tmpUvgfBS/pdisk_1.dat 2025-04-07T13:51:33.314714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:33.314821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:33.321381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:51:33.351116Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8358, node 1 2025-04-07T13:51:33.533283Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:33.533301Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:33.533311Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:33.533391Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9657 TClient is connected to server localhost:9657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:34.087471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:34.135982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:34.290734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:34.663825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:51:34.800112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:37.010513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568547215194318:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.018559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.052946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.103327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.143379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.197878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.237588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.314957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:37.378245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568551510162111:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.378329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.378550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568551510162116:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:37.382978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:37.403526Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:51:37.404837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568551510162118:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:37.556012Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568530035323553:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:37.556073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:39.115940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:41.565419Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88asstdpx6kq1tkypzegfs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJlNDdkZDUtZTI0Y2NiYjEtYjllYjU3ZGYtZDJhODZlZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:41.590098Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWJlNDdkZDUtZTI0Y2NiYjEtYjllYjU3ZGYtZDJhODZlZTA=, ActorId: [1:7490568560100097761:2523], ActorState: ExecuteState, TraceId: 01jr88asstdpx6kq1tkypzegfs, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 21859, MsgBus: 15647 2025-04-07T13:51:43.885584Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568579665329073:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:43.888025Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ae2/r3tmp/tmpOF6CeM/pdisk_1.dat 2025-04-07T13:51:44.025730Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:44.045715Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:44.045793Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:44.046694Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21859, node 2 2025-04-07T13:51:44.188948Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:44.188965Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:44.188969Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:44.189041Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15647 TClient is connected to server localhost:15647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:44.725741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:44.737669Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:51:44.749037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:44.837786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:44.999164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:45.073016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:47.909991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568596845199944:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:47.910079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:47.959621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.014771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.076485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.155210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.193609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.244392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:48.359559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568601140167745:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:48.359717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:48.361417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568601140167751:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:48.366134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:48.397077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568601140167753:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:51:49.011316Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568579665329073:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:49.011879Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:49.997264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:58.569782Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88baf3ca7hbpda3smzrbk7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTVmOTJmMmQtYjkxNGVhMjItOGQ1ODAwZjktMWI0NTMwM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:51:58.570544Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTVmOTJmMmQtYjkxNGVhMjItOGQ1ODAwZjktMWI0NTMwM2Q=, ActorId: [2:7490568609730103448:2522], ActorState: ExecuteState, TraceId: 01jr88baf3ca7hbpda3smzrbk7, Create QueryResponse for error on request, msg: 2025-04-07T13:51:59.008856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:51:59.008882Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:00.507684Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-04-07T13:52:00.507730Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-04-07T13:52:00.540898Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2025-04-07T13:52:00.540938Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2025-04-07T13:52:00.544419Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-07T13:52:00.729963Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2025-04-07T13:52:00.729996Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found 2025-04-07T13:52:00.745096Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-04-07T13:52:00.745135Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-07T13:52:00.745151Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-07T13:52:00.745441Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2025-04-07T13:52:00.752826Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037922 not found 2025-04-07T13:52:00.752864Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2025-04-07T13:52:09.574239Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88bmtn427mqzeeax31btf2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTVmOTJmMmQtYjkxNGVhMjItOGQ1ODAwZjktMWI0NTMwM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:52:09.574533Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTVmOTJmMmQtYjkxNGVhMjItOGQ1ODAwZjktMWI0NTMwM2Q=, ActorId: [2:7490568609730103448:2522], ActorState: ExecuteState, TraceId: 01jr88bmtn427mqzeeax31btf2, Create QueryResponse for error on request, msg: 2025-04-07T13:52:10.446304Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-07T13:52:10.446349Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037939 not found 2025-04-07T13:52:10.453369Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037938 not found 2025-04-07T13:52:10.588940Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037943 not found 2025-04-07T13:52:10.590507Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-04-07T13:52:10.619020Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037942 not found 2025-04-07T13:52:10.619060Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037941 not found 2025-04-07T13:52:11.024230Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88bpfs3d92wvgfjg0mn8rk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTVmOTJmMmQtYjkxNGVhMjItOGQ1ODAwZjktMWI0NTMwM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:52:11.024532Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTVmOTJmMmQtYjkxNGVhMjItOGQ1ODAwZjktMWI0NTMwM2Q=, ActorId: [2:7490568609730103448:2522], ActorState: ExecuteState, TraceId: 01jr88bpfs3d92wvgfjg0mn8rk, Create QueryResponse for error on request, msg: 2025-04-07T13:52:12.533168Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88bs6m9gbkkh3bqphetmja, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTVmOTJmMmQtYjkxNGVhMjItOGQ1ODAwZjktMWI0NTMwM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T13:52:12.533473Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTVmOTJmMmQtYjkxNGVhMjItOGQ1ODAwZjktMWI0NTMwM2Q=, ActorId: [2:7490568609730103448:2522], ActorState: ExecuteState, TraceId: 01jr88bs6m9gbkkh3bqphetmja, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] Test command err: 2025-04-07T13:52:00.070011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:00.070404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:00.070681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001047/r3tmp/tmpZG4EyL/pdisk_1.dat 2025-04-07T13:52:00.599929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:00.654741Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:00.730443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:00.730570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:00.745775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:00.876507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:00.934089Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:52:00.934342Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:01.016943Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:01.017073Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:01.018612Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:01.018700Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:01.018746Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:01.019064Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:01.126659Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:01.126834Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:01.126944Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:52:01.126978Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:01.127016Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:01.127052Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:01.127439Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:01.127540Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:01.127871Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:01.127933Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:01.127968Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:01.128012Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:01.128044Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:01.128193Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:01.128528Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:01.128615Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:01.146553Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:01.161195Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:01.161298Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:01.362329Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:01.383006Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:01.383105Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:01.384055Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:01.384105Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:01.384147Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:01.384400Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:01.384560Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:01.401475Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:01.401595Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:01.403594Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:01.404022Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:01.417949Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:01.418028Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:01.418637Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:01.418689Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:01.418748Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:01.419791Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:01.419836Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:01.419896Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:01.419956Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:01.420000Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:01.420078Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:01.429562Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:01.431553Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:01.431667Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:01.431721Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:01.467825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:01.467975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:01.468065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:01.485190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:01.491006Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:01.732192Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:01.735120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:02.512988Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88beds12jagqwzkcznwm75, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzg2NWEzM2QtMmQ0YWE2YjgtZjc3YmIxNDItNWM3Y2Y0Yjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:02.519842Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:02.520145Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:02.533387Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:02.533543Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:02.537782Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:02.539000Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:02.550724Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:02 ... _DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:12.134391Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:628:2534], serverId# [3:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:12.134853Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:12.134906Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:12.134963Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:12.135014Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:12.135164Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:12.135441Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:12.135540Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:12.137627Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:12.148452Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:12.148600Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:12.335152Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:667:2559], serverId# [3:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:12.335890Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 534 RawX2: 12884904352 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:12.335956Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:12.338450Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:12.338513Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:12.338585Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:12.338892Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:12.339074Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:12.339330Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:12.339397Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:12.339893Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:12.340360Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:12.342698Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:12.342750Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:12.343353Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:12.343407Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:12.343485Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:12.344323Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:12.344363Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:12.344406Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:12.344473Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:393:2385], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:12.344528Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:12.344600Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:12.345094Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:12.346725Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:12.346790Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:12.347533Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:12.356820Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:12.356954Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:711:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:12.357038Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:12.362951Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:12.369145Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:12.578455Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:12.582202Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:12.747710Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88bs222h1jtgjny00x8rnr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWYxZmM0ZmEtNDY0NWM2ZDYtZWRmYzA3MGMtZTg5NDQ0MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:12.748345Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:816:2654], serverId# [3:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:12.748592Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:12.761711Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:12.761859Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:12.766676Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:824:2661], serverId# [3:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:12.768076Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:12.782733Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:12.782823Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:12.783141Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:52:12.783198Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-07T13:52:12.783568Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:12.783644Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:12.783702Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:12.783770Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:12.783870Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:824:2661], serverId# [3:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:12.784957Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:12.785377Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:12.785595Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:12.785643Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:12.785729Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-07T13:52:12.786006Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:12.786075Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:12.786762Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-07T13:52:12.787102Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 43, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T13:52:12.787241Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-07T13:52:12.787289Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-07T13:52:12.859217Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:52:12.859298Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-07T13:52:12.859825Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:12.859873Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:12.859918Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-07T13:52:12.860072Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:12.860133Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:12.860186Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> BasicUsage::FallbackToSingleDb |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] >> BasicUsage::GetAllStartPartitionSessions >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] >> BasicUsage::WaitEventBlocksBeforeDiscovery >> BasicUsage::WriteSessionNoAvailableDatabase >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] >> BasicUsage::WriteSessionWriteInHandlers >> BasicUsage::PropagateSessionClosed >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> TCmsTest::PermissionDuration >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi [GOOD] >> TKeyValueTest::TestWriteDeleteThenReadRemaining >> BasicUsage::WriteSessionCloseWaitsForWrites >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> BasicUsage::RetryDiscoveryWithCancel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:1392:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:1395:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:1396:2057] recipient: [4:1394:3416] Leader for TabletID 72057594037927937 is [4:1397:3417] sender: [4:1398:2057] recipient: [4:1394:3416] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:1397:3417] Leader for TabletID 72057594037927937 is [4:1397:3417] sender: [4:1467:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:1397:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:1399:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:1401:2057] recipient: [5:1400:3421] Leader for TabletID 72057594037927937 is [5:1402:3422] sender: [5:1403:2057] recipient: [5:1400:3421] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:1402:3422] Leader for TabletID 72057594037927937 is [5:1402:3422] sender: [5:1472:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:1397:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:1400:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:1401:2057] recipient: [6:1399:3421] Leader for TabletID 72057594037927937 is [6:1402:3422] sender: [6:1403:2057] recipient: [6:1399:3421] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:1402:3422] Leader for TabletID 72057594037927937 is [6:1402:3422] sender: [6:1472:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:1400:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:1403:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:1404:2057] recipient: [7:1402:3423] Leader for TabletID 72057594037927937 is [7:1405:3424] sender: [7:1406:2057] recipient: [7:1402:3423] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:1405:3424] Leader for TabletID 72057594037927937 is [7:1405:3424] sender: [7:1476:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:1402:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:1405:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:1406:2057] recipient: [8:1404:3425] Leader for TabletID 72057594037927937 is [8:1407:3426] sender: [8:1408:2057] recipient: [8:1404:3425] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:1407:3426] Leader for TabletID 72057594037927937 is [8:1407:3426] sender: [8:1478:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:1402:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:1405:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:1406:2057] recipient: [9:1404:3425] Leader for TabletID 72057594037927937 is [9:1407:3426] sender: [9:1408:2057] recipient: [9:1404:3425] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:1407:3426] Leader for TabletID 72057594037927937 is [9:1407:3426] sender: [9:1477:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:1405:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:1407:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:1409:2057] recipient: [10:1408:3427] Leader for TabletID 72057594037927937 is [10:1410:3428] sender: [10:1411:2057] recipient: [10:1408:3427] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:1410:3428] Leader for TabletID 72057594037927937 is [10:1410:3428] sender: [10:1480:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:141:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:144:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:145:2057] recipient: [13:143:2166] Leader for TabletID 72057594037927937 is [13:146:2167] sender: [13:147:2057] recipient: [13:143:2166] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:146:2167] Leader for TabletID 72057594037927937 is [13:146:2167] sender: [13:216:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:141:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:144:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:145:2057] recipient: [14:143:2166] Leader for TabletID 72057594037927937 is [14:146:2167] sender: [14:147:2057] recipient: [14:143:2166] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:146:2167] Leader for TabletID 72057594037927937 is [14:146:2167] sender: [14:216:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:268:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:271:2057] recipient: [15:270:2292] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:272:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:273:2293] sender: [15:274:2057] recipient: [15:270:2292] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:273:2293] Leader for TabletID 72057594037927937 is [15:273:2293] sender: [15:343:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:273:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:276:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:277:2057] recipient: [16:275:2297] Leader for TabletID 72057594037927937 is [16:278:2298] sender: [16:279:2057] recipient: [16:275:2297] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:278:2298] Leader for TabletID 72057594037927937 is [16:278:2298] sender: [16:348:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:273:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:276:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:277:2057] recipient: [17:275:2297] Leader for TabletID 72057594037927937 is [17:278:2298] sender: [17:279:2057] recipient: [17:275:2297] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:278:2298] Leader for TabletID 72057594037927937 is [17:278:2298] sender: [17:348:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:276:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:279:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:280:2057] recipient: [18:278:2299] Leader for TabletID 72057594037927937 is [18:281:2300] sender: [18:282:2057] recipient: [18:278:2299] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:281:2300] Leader for TabletID 72057594037927937 is [18:281:2300] sender: [18:329:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:278:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:281:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:282:2057] recipient: [19:280:2301] Leader for TabletID 72057594037927937 is [19:283:2302] sender: [19:284:2057] recipient: [19:280:2301] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:283:2302] Leader for TabletID 72057594037927937 is [19:283:2302] sender: [19:353:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:278:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:280:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:282:2057] recipient: [20:281:2301] Leader for TabletID 72057594037927937 is [20:283:2302] sender: [20:284:2057] recipient: [20:281:2301] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:283:2302] Leader for TabletID 72057594037927937 is [20:283:2302] sender: [20:353:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:281:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:284:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:285:2057] recipient: [21:283:2303] Leader for TabletID 72057594037927937 is [21:286:2304] sender: [21:287:2057] recipient: [21:283:2303] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:286:2304] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 65492, MsgBus: 18589 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a8e/r3tmp/tmpTuG23M/pdisk_1.dat 2025-04-07T13:51:48.034884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568596979245494:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:48.034951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:51:48.238272Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:48.259527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:48.259628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:48.271742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65492, node 1 2025-04-07T13:51:48.503355Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:48.503388Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:48.503397Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:48.503516Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18589 TClient is connected to server localhost:18589 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:49.161828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:49.191031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:49.378319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:49.521133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:51:49.583515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:52.133304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568614159116233:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:52.159210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:52.240396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:52.340782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:52.411564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:52.469840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:52.524037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:52.580992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:52.690951Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568596979245494:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:52.691119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:52.712838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568618454084036:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:52.712960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:52.713372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568618454084041:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:52.717113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:52.777577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568618454084043:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:54.128490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 13815, MsgBus: 3905 2025-04-07T13:52:00.390108Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568652758338408:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:00.390141Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a8e/r3tmp/tmpf4YaYd/pdisk_1.dat 2025-04-07T13:52:00.778798Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:00.826559Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:00.826642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:00.849831Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13815, node 2 2025-04-07T13:52:00.989418Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:00.989437Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:00.989447Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:00.989543Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3905 TClient is connected to server localhost:3905 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:52:01.571528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:01.617552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:01.699410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:01.917480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:02.005597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:04.664821Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568669938209303:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:04.664930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:04.707833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:52:04.791644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:52:04.886452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:04.980972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:05.074497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:52:05.163600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:52:05.326097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568674233177112:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:05.326260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:05.326726Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568674233177118:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:05.331699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:52:05.360945Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568674233177120:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:52:05.392930Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568652758338408:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:05.393239Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:07.046517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> BasicUsage::BasicWriteSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] Test command err: 2025-04-07T13:52:07.881480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:07.881768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:07.882008Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fc9/r3tmp/tmplmuH9C/pdisk_1.dat 2025-04-07T13:52:08.251345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:08.335422Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:08.386514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:08.386621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:08.397831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:08.542467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:08.618498Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:52:08.618815Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:08.711738Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:08.711866Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:08.713335Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:08.713413Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:08.713464Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:08.713784Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:08.741273Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:08.741434Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:08.741527Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:52:08.741555Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:08.741580Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:08.741631Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:08.741978Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:08.742063Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:08.742394Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:08.742434Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:08.742459Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:08.742491Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:08.742518Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:08.742640Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:08.742855Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:08.742925Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:08.744192Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:08.754907Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:08.755020Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:08.961013Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:08.966627Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:08.966709Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:08.967528Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:08.967574Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:08.967613Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:08.967821Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:08.967971Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:08.968633Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:08.968719Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:08.970647Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:08.971031Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:08.972833Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:08.972881Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:08.973498Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:08.973557Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:08.973613Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:08.974751Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:08.974793Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:08.974874Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:08.974934Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:08.974980Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:08.975057Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:08.978700Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:08.980505Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:08.980588Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:08.980641Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:08.989537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:08.989662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:08.989740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:08.994374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:09.000886Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:09.203454Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:09.206598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:09.607524Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88bnrv50c7mczwak5qda7h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYwOTExZDQtYTgxYmJhODEtNzk0NDkyYTgtOWQ0ZTVlZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:09.614098Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:09.614345Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:09.629101Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:09.629215Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.632510Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:09.633448Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:09.645826Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:09 ... X_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:13.880734Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:630:2536], serverId# [2:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:13.880832Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:13.880879Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:13.880925Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:13.880979Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:13.881144Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:13.881377Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:13.881472Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:13.883417Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:13.894837Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:13.894958Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:14.092771Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:667:2559], serverId# [2:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:14.093156Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 517 RawX2: 8589937047 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:14.093199Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:14.093490Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:14.093543Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:14.093585Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:14.093803Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:14.093913Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:14.094070Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:14.094117Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:14.094499Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:14.094889Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:14.097199Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:14.097267Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:14.097924Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:14.098001Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:14.098078Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:14.099375Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:14.099442Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:14.099495Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:14.099564Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:14.099627Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:14.099722Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:14.100994Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:14.103288Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:14.104047Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:14.104108Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:14.113051Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:14.113160Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:711:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:14.113240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:14.119415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:14.126255Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:14.354534Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:14.357645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:14.504488Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88btrz3kmrw7dqh375xwnp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjEzY2FhY2QtODRkYTQxZGEtZmI4ZjFjMzQtNjBhMTZkNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:14.505164Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:816:2654], serverId# [2:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:14.505452Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:14.518361Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:14.518544Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:14.522875Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:824:2661], serverId# [2:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:14.523820Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:14.535374Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:14.535452Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:14.535714Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:52:14.535757Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-07T13:52:14.536068Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:14.536124Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:14.536172Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:14.536230Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:14.536315Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:824:2661], serverId# [2:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:14.537366Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:14.537723Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:14.537881Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:14.537917Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:14.537961Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-07T13:52:14.538184Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:14.538241Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:14.538819Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-07T13:52:14.539046Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T13:52:14.539158Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-07T13:52:14.539202Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-07T13:52:14.540706Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:52:14.540752Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-07T13:52:14.541113Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:14.541145Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:14.541176Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-07T13:52:14.541287Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:14.541340Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:14.541376Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] >> TCmsTest::StateStorageRollingRestart [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] >> ExternalBlobsMultipleChannels::SingleChannel >> BasicUsage::SelectDatabaseByHash [GOOD] >> BasicUsage::SelectDatabase [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> TCmsTest::PermissionDuration [GOOD] >> TCmsTest::RacyStartCollecting |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageRollingRestart [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] Test command err: 2025-04-07T13:52:04.370303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:04.370545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:04.370727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00100f/r3tmp/tmpGijYtw/pdisk_1.dat 2025-04-07T13:52:04.782157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:04.840134Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:04.902795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:04.902962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:04.916094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:05.050985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:05.109521Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:52:05.109817Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:05.149603Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:05.149768Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:05.151441Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:05.151539Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:05.151607Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:05.151955Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:05.177838Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:05.178058Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:05.178189Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:52:05.178234Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:05.178299Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:05.178341Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:05.178878Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:05.179019Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:05.179517Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:05.179591Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:05.179634Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:05.179686Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:05.179735Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:05.180064Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:05.180398Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:05.180523Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:05.182451Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:05.193424Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:05.193557Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:05.419079Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:05.439278Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:05.439384Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:05.440733Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:05.440804Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:05.440859Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:05.441173Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:05.441388Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:05.442078Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:05.442174Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:05.444192Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:05.444687Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:05.446570Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:05.446625Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:05.447299Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:05.447369Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:05.447434Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:05.448573Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:05.448621Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:05.448723Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:05.448788Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:05.448842Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:05.448925Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:05.452345Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:05.454384Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:05.454500Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:05.454546Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:05.464607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:05.465148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:05.465277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:05.471981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:05.478297Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:05.688523Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:05.694030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:06.079295Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88bjap1a98z2a3pe5ga33g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWMwNTRiZWMtNTk0ODhhNTgtZjUyYjVmOGMtMzRjMDJkNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:06.084764Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:06.085028Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:06.098248Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:06.098401Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:06.102417Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:06.103504Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:06.115142Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:06 ... _DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:15.516145Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:628:2534], serverId# [3:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:15.516610Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:15.516683Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:15.516725Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:15.516770Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:15.516924Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:15.517190Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:15.517285Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:15.519292Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:15.531931Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:15.532058Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:15.732007Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:667:2559], serverId# [3:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:15.732766Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 534 RawX2: 12884904352 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:15.732827Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:15.734296Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:15.734352Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:15.734958Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:15.735262Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:15.735435Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:15.735671Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:15.735739Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:15.736216Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:15.736733Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:15.738417Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:15.738467Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:15.739899Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:15.739973Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:15.740042Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:15.741343Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:15.741389Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:15.741435Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:15.741513Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:393:2385], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:15.741576Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:15.741667Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:15.742186Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:15.744315Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:15.744391Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:15.746393Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:15.757230Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:15.757348Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:711:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:15.757427Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:15.857383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:15.863346Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:16.062555Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:16.065527Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:16.205335Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88bwcbaf2y7j44yf5fs0dd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWY4MmU2MTgtZjIxMmZiMWEtMTVhN2E3NTctZGQ2YWNlZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:16.205952Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:816:2654], serverId# [3:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:16.206177Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:16.220214Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:16.220375Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:16.224501Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:824:2661], serverId# [3:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:16.226487Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:16.237967Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:16.238065Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:16.238304Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:52:16.238346Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-07T13:52:16.238650Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:16.238702Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:16.238747Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:16.238805Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:16.238898Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:824:2661], serverId# [3:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:16.239884Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:16.240253Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:16.240441Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:16.240483Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:16.240526Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-07T13:52:16.241098Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:16.241168Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:16.241846Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-07T13:52:16.242093Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 37, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T13:52:16.242215Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-07T13:52:16.242258Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-07T13:52:16.292219Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:52:16.292297Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-07T13:52:16.292834Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:16.292878Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:16.292922Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-07T13:52:16.293067Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:16.293130Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:16.293175Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] >> ExternalBlobsMultipleChannels::Simple |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] Test command err: 2025-04-07T13:52:01.864268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:01.864542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:01.877132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001068/r3tmp/tmpKGHif6/pdisk_1.dat 2025-04-07T13:52:02.303122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:02.390670Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:02.447978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:02.448148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:02.460199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:02.589610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:02.667951Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:653:2553] 2025-04-07T13:52:02.668264Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:02.737690Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:02.738166Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:656:2555] 2025-04-07T13:52:02.738381Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:02.758988Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:02.760789Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:02.760912Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:02.760966Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:02.761370Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:02.809764Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:02.810112Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:02.810289Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:687:2573] 2025-04-07T13:52:02.810332Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:02.810373Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:02.810431Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:02.811624Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:02.811728Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:02.812395Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:644:2548], serverId# [1:676:2566], sessionId# [0:0:0] 2025-04-07T13:52:02.812731Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:02.812781Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:02.812827Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:02.812875Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:02.812980Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:02.813166Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:02.813433Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:02.813541Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:02.814037Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:02.815687Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T13:52:02.815770Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T13:52:02.815819Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T13:52:02.816084Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:02.816135Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T13:52:02.816367Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:02.816445Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:694:2576] 2025-04-07T13:52:02.816473Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T13:52:02.816501Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T13:52:02.816526Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T13:52:02.817379Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T13:52:02.817462Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T13:52:02.818604Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:52:02.818647Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:02.818679Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T13:52:02.818717Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:52:02.819364Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:645:2549], serverId# [1:681:2570], sessionId# [0:0:0] 2025-04-07T13:52:02.819780Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:661:2557] 2025-04-07T13:52:02.819980Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:02.835537Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T13:52:02.835783Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-07T13:52:02.835873Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-07T13:52:02.837789Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:02.837906Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:02.839269Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-07T13:52:02.839344Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-07T13:52:02.839394Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-07T13:52:02.839688Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:02.839737Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-07T13:52:02.839831Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:02.839895Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:713:2585] 2025-04-07T13:52:02.839923Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-07T13:52:02.839947Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-07T13:52:02.839974Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T13:52:02.840525Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:02.840616Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T13:52:02.841106Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-07T13:52:02.841192Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-07T13:52:02.841297Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T13:52:02.841333Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:02.841364Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-07T13:52:02.841395Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T13:52:02.852681Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:02.852873Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:02.852991Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T13:52:02.853028Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:02.906798Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:646:2550], serverId# [1:722:2593], sessionId# [0:0:0] 2025-04-07T13:52:02.907008Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-07T13:52:02.907252Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-04-07T13:52:02.907377Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-04-07T13:52:02.907996Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-07T13:52:02.923466Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-07T13:52:02.923582Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:03.099672Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:736:2607], serverId# [1:740:2611], sessionId# [0:0:0] 2025-04-07T13:52:03.099897Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:737:2608], serverId# [1:741:2612], sessionId# [0:0:0] 2025-04-07T13:52:03.100213Z node 1 :TX_DATA ... 75186224037892 2025-04-07T13:52:16.630458Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:16.630485Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-04-07T13:52:16.630513Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-07T13:52:16.632327Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-04-07T13:52:16.632446Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack init split/merge destination OpId 281474976715664 2025-04-07T13:52:16.632478Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 not sending time cast registration request in state SplitDstReceivingSnapshot 2025-04-07T13:52:16.633555Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 received split OpId 281474976715664 at state Ready 2025-04-07T13:52:16.647007Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 starting snapshot for split OpId 281474976715664 2025-04-07T13:52:16.647378Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 CancelReadIterators#0 2025-04-07T13:52:16.649340Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 3, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-07T13:52:16.649394Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 3, finished edge# 0, front# 0 2025-04-07T13:52:16.657003Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 4, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-07T13:52:16.657053Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 4, finished edge# 0, front# 0 2025-04-07T13:52:16.659113Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 7, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-07T13:52:16.659167Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 7, finished edge# 0, front# 0 2025-04-07T13:52:16.663291Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 8, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-07T13:52:16.663332Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 8, finished edge# 0, front# 0 2025-04-07T13:52:16.664099Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-07T13:52:16.664136Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-04-07T13:52:16.664569Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 snapshot complete for split OpId 281474976715664 2025-04-07T13:52:16.664835Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 12 for split OpId 281474976715664 2025-04-07T13:52:16.664908Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 24 for split OpId 281474976715664 2025-04-07T13:52:16.664943Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 36 for split OpId 281474976715664 2025-04-07T13:52:16.664973Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 48 for split OpId 281474976715664 2025-04-07T13:52:16.667500Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 146 total snapshot size is 194 for split OpId 281474976715664 2025-04-07T13:52:16.673195Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 206 for split OpId 281474976715664 2025-04-07T13:52:16.673276Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 218 for split OpId 281474976715664 2025-04-07T13:52:16.673308Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 230 for split OpId 281474976715664 2025-04-07T13:52:16.673338Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 242 for split OpId 281474976715664 2025-04-07T13:52:16.673523Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 155 total snapshot size is 397 for split OpId 281474976715664 2025-04-07T13:52:16.674587Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Sending snapshots from src for split OpId 281474976715664 2025-04-07T13:52:16.674790Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715664 from datashard 72075186224037889 to datashard 72075186224037892 size 221 2025-04-07T13:52:16.674899Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715664 from datashard 72075186224037889 to datashard 72075186224037891 size 215 2025-04-07T13:52:16.675214Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [3:1178:2904], serverId# [3:1179:2905], sessionId# [0:0:0] 2025-04-07T13:52:16.675254Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037892, clientId# [3:1177:2903], serverId# [3:1180:2906], sessionId# [0:0:0] 2025-04-07T13:52:16.675368Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 Received snapshot for split/merge TxId 281474976715664 from tabeltId 72075186224037889 2025-04-07T13:52:16.676039Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 Received snapshot for split/merge TxId 281474976715664 from tabeltId 72075186224037889 2025-04-07T13:52:16.678020Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack snapshot OpId 281474976715664 2025-04-07T13:52:16.678101Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2025-04-07T13:52:16.678178Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:16.678271Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-07T13:52:16.684783Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [3:1183:2909] 2025-04-07T13:52:16.684845Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-04-07T13:52:16.684913Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2025-04-07T13:52:16.684959Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-07T13:52:16.685382Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715664 2025-04-07T13:52:16.685858Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 2000 2025-04-07T13:52:16.685909Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-07T13:52:16.686476Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-07T13:52:16.686510Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:16.686539Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-07T13:52:16.686577Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-07T13:52:16.686662Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1178:2904], serverId# [3:1179:2905], sessionId# [0:0:0] 2025-04-07T13:52:16.687032Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack snapshot OpId 281474976715664 2025-04-07T13:52:16.687084Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037892 2025-04-07T13:52:16.687142Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:16.687202Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-07T13:52:16.687249Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037892, actorId: [3:1185:2911] 2025-04-07T13:52:16.687274Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-04-07T13:52:16.687300Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-04-07T13:52:16.687324Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-07T13:52:16.687594Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715664 2025-04-07T13:52:16.687904Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-04-07T13:52:16.687933Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-07T13:52:16.688079Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-07T13:52:16.688106Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:16.688131Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-04-07T13:52:16.688161Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-07T13:52:16.688336Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [3:1177:2903], serverId# [3:1180:2906], sessionId# [0:0:0] 2025-04-07T13:52:16.688487Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 1500 next step 2000 2025-04-07T13:52:16.688532Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037891: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-07T13:52:16.688781Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 1500 next step 2000 2025-04-07T13:52:16.688808Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-07T13:52:16.721243Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715664 2025-04-07T13:52:16.724036Z node 3 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-04-07T13:52:16.728212Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-07T13:52:16.728273Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-04-07T13:52:16.729162Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1050:2803], serverId# [3:1051:2804], sessionId# [0:0:0] 2025-04-07T13:52:16.729421Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:52:16.729463Z node 3 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037889 state 5 2025-04-07T13:52:16.729724Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715664 2025-04-07T13:52:16.729785Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-07T13:52:16.729826Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 >> Balancing::Balancing_OneTopic_TopicApi >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK >> TCmsTest::RacyStartCollecting [GOOD] >> TCmsTest::PriorityRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] Test command err: 2025-04-07T13:52:05.871891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:05.872251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:05.872544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ff0/r3tmp/tmpE59anN/pdisk_1.dat 2025-04-07T13:52:06.264563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:06.309734Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:06.361664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:06.361875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:06.373576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:06.558436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:06.626187Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:52:06.626474Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:06.667825Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:06.667965Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:06.669662Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:06.669780Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:06.669838Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:06.670181Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:06.701105Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:06.701324Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:06.701449Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:52:06.701495Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:06.701542Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:06.701584Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:06.702083Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:06.702200Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:06.702653Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:06.702711Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:06.702756Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:06.702803Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:06.702844Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:06.703050Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:06.703334Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:06.703441Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:06.705182Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:06.717348Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:06.717483Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:06.927272Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:06.938745Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:06.938865Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:06.939858Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:06.939914Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:06.939980Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:06.940283Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:06.940461Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:06.944134Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:06.944246Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:06.946603Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:06.947158Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:06.952437Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:06.952528Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:06.953380Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:06.953451Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:06.953533Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:06.954767Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:06.954832Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:06.954905Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:06.954972Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:06.955034Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:06.955127Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:06.970819Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:06.973124Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:06.973271Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:06.973327Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:06.983805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:06.983932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:06.984015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:06.989191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:06.995415Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:07.210558Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:07.213565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:07.596075Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88bkt557yvkvy2ae1zy96z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmJjYzU3N2QtNmQ5YjJjZjktODNjNjRmMWUtYjQ4MWRhZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:07.602990Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:07.603302Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:07.616186Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:07.616346Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:07.621152Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:07.622400Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:07.634610Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:07 ... _DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:17.150623Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:628:2534], serverId# [3:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:17.150981Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:17.151055Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:17.151095Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:17.151137Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:17.151272Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:17.151469Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:17.151549Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:17.155869Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:17.167356Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:17.167465Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:17.391181Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:667:2559], serverId# [3:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:17.391756Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 534 RawX2: 12884904352 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:17.391817Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:17.393149Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:17.393202Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:17.393243Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:17.393491Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:17.393628Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:17.393813Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:17.393865Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:17.394302Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:17.394699Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:17.401796Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:17.401864Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:17.402676Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:17.402739Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:17.402816Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:17.403914Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:17.403972Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:17.404019Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:17.404097Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:393:2385], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:17.404155Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:17.404249Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:17.408909Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:17.411404Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:17.411484Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:17.412342Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:17.429532Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:17.429656Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:711:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:17.429768Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:17.441436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:17.453261Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:17.684795Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:17.687706Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:17.857031Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88by0f3qq26s9mkvredmyp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGJkMjAyNDctOWU0NDAxZTgtM2JkYTdmNTEtNjEyOTQ4Mzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:17.857635Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:816:2654], serverId# [3:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:17.857830Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:17.871154Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:17.871289Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:17.875098Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:824:2661], serverId# [3:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:17.876206Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:17.887837Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:17.887925Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:17.888186Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:52:17.888231Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-07T13:52:17.888515Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:17.888562Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:17.888608Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:17.888701Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:17.888785Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:824:2661], serverId# [3:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:17.889725Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:17.890028Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:17.890209Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:17.890263Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:17.890314Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-07T13:52:17.890555Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:17.890617Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:17.891211Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-07T13:52:17.891935Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 48, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T13:52:17.892114Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-07T13:52:17.892158Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-07T13:52:17.966268Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:52:17.966349Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-07T13:52:17.966824Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:17.966866Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:17.966902Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-07T13:52:17.967033Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:17.967096Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:17.967137Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TopicAutoscaling::ControlPlane_CreateAlterDescribe >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV1NodeIdFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-04-07T13:50:11.301201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568182361732177:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:11.301247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00198f/r3tmp/tmpDFY2Av/pdisk_1.dat 2025-04-07T13:50:12.210742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:12.210821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:12.220276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:12.226267Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62726, node 1 2025-04-07T13:50:12.598689Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:12.598706Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:12.598711Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:12.598784Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:13.245082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:13.255379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:13.256636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:13.261807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:13.262018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:13.262032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:50:13.264252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:13.264267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:13.266352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:13.269054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:13.275384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033813319, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:13.275414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:13.275588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:13.276953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:13.277100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:13.277136Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:13.277200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:13.277228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:13.277268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:13.280544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:13.280580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:13.280617Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:13.284568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:13.540272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Uint8" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:45646" , at schemeshard: 72057594046644480 2025-04-07T13:50:13.545903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:13.546321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: LogStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T13:50:13.546356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-07T13:50:13.546418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-07T13:50:13.546463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-07T13:50:13.546509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-07T13:50:13.546551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-07T13:50:13.546827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-04-07T13:50:13.549231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:50:13.549488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:13.549508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:13.549626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T13:50:13.549660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-04-07T13:50:13.553413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-07T13:50:13.553531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-04-07T13:50:13.553652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:13.553661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:50:13.553768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T13:50:13.553816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:13.553825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490568186656700113:2391], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-07T13:50:13.553833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490568186656700113:2391], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-07T13:50:13.553857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:13.553885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-04-07T13:50:13.554241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChann ... 24037888, partId: 0 2025-04-07T13:51:27.306553Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037888 TxId: 281474976715659 MinStep: 0 Step: 1744033887288 2025-04-07T13:51:27.306596Z node 40 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037888 TxId: 281474976715659 MinStep: 0 Step: 1744033887288 2025-04-07T13:51:27.306715Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, message: Origin: 72075186224037888 TxId: 281474976715659 2025-04-07T13:51:27.306727Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715659, tablet: 72075186224037888, partId: 0 2025-04-07T13:51:27.306788Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Origin: 72075186224037888 TxId: 281474976715659 2025-04-07T13:51:27.306922Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037889 TxId: 281474976715659 MinStep: 0 Step: 1744033887288 2025-04-07T13:51:27.306933Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715659, tablet: 72075186224037889, partId: 0 2025-04-07T13:51:27.307022Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037889 TxId: 281474976715659 MinStep: 0 Step: 1744033887288 2025-04-07T13:51:27.307062Z node 40 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037889 TxId: 281474976715659 MinStep: 0 Step: 1744033887288 2025-04-07T13:51:27.307177Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, message: Origin: 72075186224037889 TxId: 281474976715659 2025-04-07T13:51:27.307189Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715659, tablet: 72075186224037889, partId: 0 2025-04-07T13:51:27.307248Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Origin: 72075186224037889 TxId: 281474976715659 2025-04-07T13:51:27.311895Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.312018Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.312094Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.312144Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.312184Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.312225Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.312265Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.312303Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.312397Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.312425Z node 40 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2025-04-07T13:51:27.312559Z node 40 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2025-04-07T13:51:27.312575Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715659 ready parts: 1/1 2025-04-07T13:51:27.312609Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715659, ready parts: 1/1, is published: true 2025-04-07T13:51:27.312691Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [40:7490568510977566050:2312] message: TxId: 281474976715659 2025-04-07T13:51:27.312722Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715659 ready parts: 1/1 2025-04-07T13:51:27.312759Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T13:51:27.312773Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715659:0 2025-04-07T13:51:27.312957Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-07T13:51:27.346950Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/LogStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "log2" TtlSettings { Enabled { ColumnName: "saved_at" ExpireAfterSeconds: 2000 Tiers { ApplyAfterSeconds: 2000 Delete { } } } } SchemaPresetName: "default" ColumnShardCount: 4 Sharding { HashSharding { Function: HASH_FUNCTION_CLOUD_LOGS Columns: "timestamp" Columns: "uid" } } } } TxId: 281474976715660 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:45772" , at schemeshard: 72057594046644480 2025-04-07T13:51:27.347381Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /Root/LogStore/log2, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:51:27.347543Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusSchemeError, reason: Haven't MAX-index for TTL column and TTL column is not first column in primary key, at schemeshard: 72057594046644480 2025-04-07T13:51:27.350650Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715660, response: Status: StatusSchemeError Reason: "Haven\'t MAX-index for TTL column and TTL column is not first column in primary key" TxId: 281474976715660 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T13:51:27.350840Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusSchemeError, reason: Haven't MAX-index for TTL column and TTL column is not first column in primary key, operation: CREATE COLUMN TABLE, path: /Root/LogStore/ 2025-04-07T13:51:27.567947Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[40:7490568506682598513:2293];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:51:27.568018Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[40:7490568506682598503:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:51:27.571988Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[40:7490568506682598537:2294];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:51:27.584027Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[40:7490568506682598509:2292];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:51:28.068396Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[40:7490568506682598513:2293];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:51:28.068465Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[40:7490568506682598503:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:51:28.072504Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[40:7490568506682598537:2294];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:51:28.088793Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[40:7490568506682598509:2292];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:51:28.103851Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[40:7490568506682598537:2294];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T13:51:28.108416Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[40:7490568506682598513:2293];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-04-07T13:51:28.116864Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[40:7490568506682598509:2292];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2025-04-07T13:51:28.132862Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[40:7490568506682598503:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-04-07T13:51:28.569950Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[40:7490568506682598513:2293];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:51:28.570011Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[40:7490568506682598503:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:51:28.576807Z node 40 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[40:7490568506682598537:2294];fline=actor.cpp:33;event=skip_flush_writing; assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:458, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (SCHEME_ERROR != SUCCESS)
: Error: Haven't MAX-index for TTL column and TTL column is not first column in primary key , with diff: S(|UC)C(H|)E(ME_ERROR|SS) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1B7F66A0) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+26819 (0x1AEBBE73) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AEE1248) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x1B835599) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1B7FD209) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1204 (0x1AEE0414) NUnitTest::TTestFactory::Execute()+2438 (0x1B7FEAD6) NUnitTest::RunMain(int, char**)+5149 (0x1B82F1DD) ??+0 (0x7F1445C4FD90) __libc_start_main+128 (0x7F1445C4FE40) _start+41 (0x1844F029) >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] Test command err: 2025-04-07T13:52:03.814733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:03.815154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:03.815458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fff/r3tmp/tmp558AdZ/pdisk_1.dat 2025-04-07T13:52:04.269115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:04.337231Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:04.393161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:04.393342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:04.418076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:04.555572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:04.628887Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:653:2553] 2025-04-07T13:52:04.629297Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:04.676235Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:04.676859Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:656:2555] 2025-04-07T13:52:04.677081Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:04.686207Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:04.687944Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:04.688119Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:04.688203Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:04.688608Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:04.717008Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:04.717197Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:04.717350Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:687:2573] 2025-04-07T13:52:04.717390Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:04.717431Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:04.717476Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:04.718568Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:04.718681Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:04.719393Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:644:2548], serverId# [1:676:2566], sessionId# [0:0:0] 2025-04-07T13:52:04.719696Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:04.719741Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:04.719781Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:04.719822Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:04.719922Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:04.720100Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:04.720377Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:04.720476Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:04.720973Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:04.722394Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T13:52:04.722466Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T13:52:04.722525Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T13:52:04.722805Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:04.722859Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T13:52:04.722919Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:04.722980Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:694:2576] 2025-04-07T13:52:04.723003Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T13:52:04.723025Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T13:52:04.723051Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T13:52:04.723849Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T13:52:04.723929Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T13:52:04.725056Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:52:04.725092Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:04.725118Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T13:52:04.725152Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:52:04.725764Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:645:2549], serverId# [1:681:2570], sessionId# [0:0:0] 2025-04-07T13:52:04.726144Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:661:2557] 2025-04-07T13:52:04.726333Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:04.735557Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T13:52:04.735770Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-07T13:52:04.735839Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-07T13:52:04.737727Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:04.737823Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:04.740802Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-07T13:52:04.740900Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-07T13:52:04.740946Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-07T13:52:04.741240Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:04.741293Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-07T13:52:04.741398Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:04.741475Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:713:2585] 2025-04-07T13:52:04.741501Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-07T13:52:04.741528Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-07T13:52:04.741553Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T13:52:04.742111Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:04.742197Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T13:52:04.742268Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-07T13:52:04.742334Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-07T13:52:04.742429Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T13:52:04.742467Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:04.742499Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-07T13:52:04.742533Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T13:52:04.755464Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:04.755576Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:04.755647Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T13:52:04.755677Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:04.805066Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:646:2550], serverId# [1:722:2593], sessionId# [0:0:0] 2025-04-07T13:52:04.805209Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-07T13:52:04.805426Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-04-07T13:52:04.805519Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-04-07T13:52:04.806055Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-07T13:52:04.816831Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-07T13:52:04.816938Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:04.995861Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:736:2607], serverId# [1:740:2611], sessionId# [0:0:0] 2025-04-07T13:52:04.996073Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:737:2608], serverId# [1:741:2612], sessionId# [0:0:0] 2025-04-07T13:52:04.996381Z node 1 :TX_DATA ... cesMode: (empty maybe) } From: (Uint32 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 4 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-07T13:52:18.865347Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1056:2808] Handle TEvDataShard::TEvEraseRowsRequest 2025-04-07T13:52:18.865510Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1056:2808] Propose tx: txId# 281474976715663, shard# 72075186224037890, keys# 3, dependents# 0, dependencies# 1 2025-04-07T13:52:18.865613Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1056:2808] Propose tx: txId# 281474976715663, shard# 72075186224037888, keys# 3, dependents# 0, dependencies# 1 2025-04-07T13:52:18.865669Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1056:2808] Propose tx: txId# 281474976715663, shard# 72075186224037889, keys# 3, dependents# 2, dependencies# 0 2025-04-07T13:52:18.865974Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-07T13:52:18.866173Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715663 at tablet 72075186224037890 2025-04-07T13:52:18.866567Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:18.866695Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715663 at tablet 72075186224037888 2025-04-07T13:52:18.866942Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T13:52:18.867022Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715663 at tablet 72075186224037889 2025-04-07T13:52:18.878763Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-07T13:52:18.878869Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:18.878984Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T13:52:18.879055Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2025-04-07T13:52:18.879137Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1056:2808] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037888, status# 1 2025-04-07T13:52:18.879251Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2025-04-07T13:52:18.879296Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-07T13:52:18.879326Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-07T13:52:18.879384Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2025-04-07T13:52:18.879415Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2025-04-07T13:52:18.879447Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1056:2808] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037889, status# 1 2025-04-07T13:52:18.879482Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1056:2808] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037890, status# 1 2025-04-07T13:52:18.879515Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1056:2808] Register plan: txId# 281474976715663, minStep# 1535, maxStep# 31535 2025-04-07T13:52:18.892435Z node 3 :TX_DATASHARD INFO: OnDetach: 72075186224037888 2025-04-07T13:52:18.892513Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-07T13:52:18.895160Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037889 is reset 2025-04-07T13:52:18.895235Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037890 is reset 2025-04-07T13:52:18.895333Z node 3 :TX_DATASHARD ERROR: [DistEraser] [3:1056:2808] Reply: txId# 281474976715663, status# SHARD_UNKNOWN, error# Tx state unknown: reason# lost pipe while waiting for reply (plan), txId# 281474976715663, shard# 72075186224037888 2025-04-07T13:52:18.895911Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-07T13:52:18.895951Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-04-07T13:52:18.896322Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:52:18.896373Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:18.896415Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2025-04-07T13:52:18.896470Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:52:18.896977Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1050:2803], serverId# [3:1051:2804], sessionId# [0:0:0] 2025-04-07T13:52:18.919262Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:1067:2818] 2025-04-07T13:52:18.919545Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:18.925429Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:18.926813Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:18.929465Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:18.929550Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:18.929607Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:18.930037Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:18.930157Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037888 2025-04-07T13:52:18.930256Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:18.930566Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:1081:2825] 2025-04-07T13:52:18.930611Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:18.930664Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:18.930702Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:18.931063Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-07T13:52:18.931243Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-07T13:52:18.932652Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:18.933685Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:18.933924Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1501 2025-04-07T13:52:18.934000Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:18.934133Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:18.934228Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:18.934280Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:18.934324Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 1 2025-04-07T13:52:18.934369Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:18.934728Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-04-07T13:52:18.934777Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715662 2025-04-07T13:52:18.934823Z node 3 :TX_DATASHARD DEBUG: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715662 2025-04-07T13:52:18.935010Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715662 2025-04-07T13:52:18.935084Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 1501 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-07T13:52:18.935138Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1501:281474976715662 at 72075186224037889 2025-04-07T13:52:18.935183Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-07T13:52:18.935235Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037889 {TEvReadSet step# 1501 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-07T13:52:18.935379Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-04-07T13:52:18.935411Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715662 2025-04-07T13:52:18.935438Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715662 2025-04-07T13:52:18.935539Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 1500 next step 1501 2025-04-07T13:52:18.935586Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037888: waitStep# 1501 readStep# 1501 observedStep# 1501 2025-04-07T13:52:18.935676Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715662 2025-04-07T13:52:18.935739Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715662 2025-04-07T13:52:18.935806Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 1501 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-04-07T13:52:18.935843Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1501:281474976715662 at 72075186224037890 2025-04-07T13:52:18.935876Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-07T13:52:18.935910Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037890 {TEvReadSet step# 1501 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-04-07T13:52:18.935964Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK >> TopicAutoscaling::PartitionSplit_PQv1 >> YdbOlapStore::BulkUpsert [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits [GOOD] >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex >> YdbOlapStore::DuplicateRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] Test command err: 2025-04-07T13:52:07.514272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:07.514608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:07.514857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fdd/r3tmp/tmpcNx3UY/pdisk_1.dat 2025-04-07T13:52:07.888147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:07.932051Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:07.988393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:07.988594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:08.002210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:08.130601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:08.172640Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:52:08.172990Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:08.222119Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:08.222268Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:08.223994Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:08.224105Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:08.224161Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:08.224527Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:08.259959Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:08.260176Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:08.260306Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:52:08.260355Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:08.260396Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:08.260438Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:08.260967Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:08.261088Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:08.261505Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:08.261570Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:08.261606Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:08.261664Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:08.261717Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:08.261883Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:08.262119Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:08.262210Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:08.269338Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:08.281204Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:08.281325Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:08.486864Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:08.496643Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:08.496753Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:08.497701Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:08.497753Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:08.497799Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:08.498087Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:08.498269Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:08.498869Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:08.498933Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:08.501249Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:08.501695Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:08.503633Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:08.503680Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:08.504244Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:08.504302Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:08.504356Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:08.505752Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:08.505818Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:08.505879Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:08.505953Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:08.506131Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:08.506233Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:08.510468Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:08.512427Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:08.512558Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:08.512627Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:08.522329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:08.522449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:08.522523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:08.527546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:08.537357Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:08.769146Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:08.772228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:09.281046Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88bna849tjb5cg00vpm4vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGQ4NmM2NTUtZmQyYjM5MjItMmQwMTM5ODYtZDU3NzJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:09.287251Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:09.287571Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:09.301320Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:09.301475Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:09.311028Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:09.312299Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:09.336495Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:09.336 ... _DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:19.075562Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:628:2534], serverId# [3:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:19.075957Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:19.076002Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:19.076044Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:19.076086Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:19.076259Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:19.076459Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:19.076540Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:19.078718Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:19.091067Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:19.091186Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:19.283861Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:667:2559], serverId# [3:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:19.284479Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 534 RawX2: 12884904352 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:19.284535Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:19.285823Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:19.285875Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:19.285913Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:19.286144Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:19.286273Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:19.286460Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:19.286517Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:19.286958Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:19.287380Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:19.299645Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:19.299767Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:19.300683Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:19.300776Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:19.300851Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:19.302107Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:19.302151Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:19.302213Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:19.302276Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:393:2385], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:19.302322Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:19.302423Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:19.302876Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:19.305201Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:19.305271Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:19.306149Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:19.314715Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.314819Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:711:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.314892Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.320150Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:19.325903Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:19.531239Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:19.537015Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:19.672381Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88bzvg1tg9kqpwsh619ajn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDRkMmEzNDItNzM0NjJiZjItNDYzOTc2NTAtZWE5OTVhMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.672899Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:816:2654], serverId# [3:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:19.673139Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:19.685248Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:19.685379Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:19.690847Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:824:2661], serverId# [3:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:19.691898Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:19.705490Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:19.705569Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:19.705829Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:52:19.705882Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-07T13:52:19.706171Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:19.706219Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:19.706262Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:19.706310Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:19.706385Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:824:2661], serverId# [3:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:19.709976Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:19.710373Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:19.710559Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:19.710638Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:19.710691Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-07T13:52:19.710905Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:19.710996Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:19.711659Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-07T13:52:19.711895Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T13:52:19.712026Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-07T13:52:19.712072Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-07T13:52:19.805461Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:52:19.805537Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-07T13:52:19.805989Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:19.806041Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:19.806075Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-07T13:52:19.806199Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:19.806254Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:19.806295Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed >> TCmsTest::PriorityRange [GOOD] >> TopicService::ThereAreGapsInTheOffsetRanges [GOOD] >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK >> TCmsTenatsTest::TestTenantLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::PriorityRange [GOOD] Test command err: 2025-04-07T13:52:18.721110Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-07T13:52:18.721180Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-07T13:52:18.721206Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-07T13:52:18.721225Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-07T13:52:18.721243Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-07T13:52:18.721260Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-07T13:52:18.721279Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-07T13:52:18.721297Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-04-07T13:52:18.727857Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-07T13:52:18.727919Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-07T13:52:18.727940Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-07T13:52:18.727960Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-07T13:52:18.727981Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-07T13:52:18.728000Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-07T13:52:18.728019Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-07T13:52:18.728036Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-04-07T13:52:18.774366Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-07T13:52:18.774424Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-07T13:52:18.774455Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-07T13:52:18.774478Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-07T13:52:18.774496Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-07T13:52:18.774513Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-07T13:52:18.774542Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-07T13:52:18.774566Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TCmsTest::RequestRestartServicesMultipleNodes >> TopicService::OnePartitionAndNoGapsInTheOffsets >> TFstClassSrcIdPQTest::NoMapping [GOOD] >> TFstClassSrcIdPQTest::ProperPartitionSelected >> TDowntimeTest::AddDowntime [GOOD] >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] >> TCmsTenatsTest::TestTenantLimit [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TMaintenanceApiTest::SingleCompositeActionGroup >> TCmsTest::RequestRestartServicesMultipleNodes [GOOD] >> TCmsTest::RequestRestartServicesDryRun >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] >> YdbOlapStore::LogWithUnionAllDescending [GOOD] >> YdbOlapStore::LogTsRangeDescending >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] 2025-04-07T13:51:22.476885Z node 1 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] 2025-04-07T13:52:14.392042Z node 2 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:624:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:627:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:628:2057] recipient: [4:626:2543] Leader for TabletID 72057594037927937 is [4:629:2544] sender: [4:630:2057] recipient: [4:626:2543] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:629:2544] Leader for TabletID 72057594037927937 is [4:629:2544] sender: [4:699:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:624:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:627:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:628:2057] recipient: [5:626:2543] Leader for TabletID 72057594037927937 is [5:629:2544] sender: [5:630:2057] recipient: [5:626:2543] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:629:2544] Leader for TabletID 72057594037927937 is [5:629:2544] sender: [5:699:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:625:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:628:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:629:2057] recipient: [6:627:2543] Leader for TabletID 72057594037927937 is [6:630:2544] sender: [6:631:2057] recipient: [6:627:2543] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:630:2544] Leader for TabletID 72057594037927937 is [6:630:2544] sender: [6:700:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] >> TCmsTenatsTest::TestTenantRatioLimit >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] Test command err: 2025-04-07T13:52:16.863179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:16.863352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:16.863510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001028/r3tmp/tmpPYLUcM/pdisk_1.dat 2025-04-07T13:52:17.208904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:17.242483Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:17.294549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:17.295110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:17.307656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:17.434653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:17.844070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:706:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:17.844198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:717:2594], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:17.846357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:17.853291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:18.070208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:720:2597], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:18.435362Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88bydj6d7ttbpv07ca588r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmFjMWY2ODYtMTdkNzg5OTktNGRjYmIyZjktYzI3Yjk4NzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:18.549058Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr88bz12dymd9wx1j2eg8nwm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjgxZWE2NDItMWI0ZWJmYjUtNTYyMzU4MzYtN2VlNjhkNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:18.619593Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr88bz3t9kmmw4j8pnjzmk6e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVmOWM1ZDktODYyZDBkMi00MTNhYzM2NC04ZWVmOTIzMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:18.687924Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr88bz5yed08gw8bkrd20kjz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI5M2RhOTItYTEyYWU4NGItMzQ3NzA2NDYtNTZjNjBkNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:18.762782Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr88bz8435y0b0yn7p0m84bg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzkxYjIwMjItNjZhMjJjMWItNzZlZDEzY2ItOTNkZTNkMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:18.836404Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr88bzafdazt4z496pccfnrg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODE1ZjJmYjMtYjQ1YzgzNy0yMDE3MTc4My0xMjc3OTMwNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:18.923539Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr88bzcs7mwyqm08jeh5k04a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjkwNTVmOWYtZDcxZjc2YzQtNDlhM2NkNjItOGUxNTYwMDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:18.994606Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr88bzfheqj89g53fnb3msf6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmIzYjlkOTktZGMxN2JjNWMtZDgwOWIwZTEtNGU3YTAzOWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.076011Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr88bzhq8kr5mkykzadaeww8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjhjNTZjNzQtM2NlN2I3NTgtNzI0MWM2MWYtZWEyZmViMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.144487Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr88bzm845r3a7y0kpeydesz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODQ0NTdhM2EtNWY1MWViNjgtNDI3NDI1ZC03NDQ2YjQ4NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.226715Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr88bzpd8p9v1mdn5knfz0qv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTUzZWViZDQtZTliNjBhZGQtNDUzNzAzMWEtZDExNjQzYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.297407Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr88bzrzecxqf4yn9y41b9hy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjZiOGE0MDYtODEwNjRlYzktYWUwMzQ3YmEtZWFmYzAwYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.366916Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr88bzv63a5yt0dp11ebvefm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQwZDgyODMtYTg5MzE2NmQtNGY5M2U5YjktMTUzZDZmODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.442510Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr88bzxb16exkeex2hs5m5tj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDc4ZWQ5ZTItMjY2ZTBjNTctMjE3MzhjOC03NzIzNjE1MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.510952Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr88bzzqd2eeknb3njjdjw15, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYzYzJkYy1hOGZhMThhYi1kOWM4MjNiOC02YTRjZDAzNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.579385Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jr88c01vaaenv0hze0fj6ndk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWExNjBkZS0xOGZhMTc0NS01ZmNmMjZjNi01OWM3MTliMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.648507Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr88c040f4j94f7v3mwh58jv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDE5YjQ1YjUtNmEzZmQzZjEtYjExN2Q1ZjUtNjliYzRiMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.715120Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jr88c06499jsct1q52sq3exb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY2MDA5ODUtYjM1MWYxMWYtZDk3NDg5MTktZmU0MGE3NmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.783590Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jr88c087aqedrb0cvhcbktkz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmYyYWQxMjItOTRkOWYwZjgtYTkxOTMyODItZmIzZWZkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.851015Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr88c0ac51kests8ttvvfc67, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZkNGJlMzQtZmU5Yjk5YjktNjZjZGNlOC1lZjNmZGYwZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.917676Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jr88c0cf1hn99wx5xn75cf6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTNiMDNlMDctYTk0MmYzNjUtOGEyZDAxZjAtNjYxMTIxYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.984343Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr88c0ej4xbzn9z0w4prd4p5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThkYmExN2EtOWNjYzNkM2MtNGU1ZDU5OTctZjMzMTU5ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.053030Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jr88c0gn5keegbx8y806mzfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA0ZTU0MzQtNDMwYWQwZjEtNmI1ODAxNDUtMmM5MTUwYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.121277Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jr88c0js55xwdq16ed5qmf48, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjdhYjg0OWUtODNiYzA0NTAtYzk1YzE0OGYtYTQ3MjlmYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.189286Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr88c0mxdyjpmn0y42d3t49h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDU3N2EwM2MtMzEyNDc5ZmMtZDgxNWRiMmMtZjljMGUzZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.257366Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jr88c0q29gsrtxva1fwz9bva, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDg2Nzg5MmUtNzRkZDU4NTctNDQ0MWJlODAtOGZjN2U1ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.321770Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jr88c0s6eb4w31a7be4hswnj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODJkN2ViOTQtZDQ5MzY4Zi0yOGZiNzdhLWNhYzQzMjQ5, CurrentExecut ... 349191Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jr88c3qp66y4vmncsc2b0ar9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjFlYjEzMzItMTNhNGU4MTMtOGVkNjM5NS02YTFmNzRmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.414307Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jr88c3ss098fk77jfnmxsec4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQ4YTc1NWItNGI3NWNkZmYtODRiNTU3NmYtYzU3YzBjZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.511310Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jr88c3vv1stsv7h4sg0q82xf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDUwYjhhZWYtODg5NTI0ZC04NWVjY2U3ZS0yMGYzM2JkMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.584721Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jr88c3ywfdskvvpf7vy2hnaw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU2MzlhYTItYjdmNTUyOGItYWQ5MGM1MDMtNmUxODU2Nzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.660463Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jr88c415fhf24b1bng97ze4k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MThkM2MxODItYTQ1N2Q5YmEtMjBmN2VjNjctZjc5OGI2ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.758179Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jr88c43ja9ppevtv21vh4sc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmY0MTcxNDctNmZiOTg1ZmQtNDk0ZmJhNDMtNjhjYTE0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.827770Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jr88c46j7qf6hp0v0kvdy820, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTliN2UzMjItYzNkNDczMmItNDI1MmI0YWItOTJlNjI1NjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.894447Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jr88c48r5941sphrf0ead1sv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzk1NGUyZi03ODJmYmQ3Ni1jMWY3ZWRjLTc4NzAzZjUy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.979828Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jr88c4axd1cnww57k14zkq1n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWE4OTFmNmYtZDllNjg3OTEtMmFkNjYyOGQtOGQ4YWMyNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.038610Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jr88c4dg2tz7bwsqjbzb65e9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODVjNWVhNC00MTQwMGJmYS1jNjllNWRmYy1jOWJlZjJlZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.098544Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jr88c4fb3ym621ftygyp82j7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODM3NGUxMzctOGZjNWE4ZWItOWY3Yjk3MTYtNGQ3ZDZhMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.195203Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jr88c4h64z50bkpzrys24kh9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2U3ZDk1NmMtODlhNzQ2NzUtYmY1NTY4ZmUtMjc3ZTM1ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.267206Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jr88c4m8281nhy64v5ssefq7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTM3ZjBiNGItODcyY2FmZmQtNWU4OWY0OTItN2IyNzk5ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.351199Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jr88c4pya25y51gh7pgy7ej2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTk2ZWI3NmYtMjc3ZDQyNWUtNjNkZjA4NzYtMzk5MWM0NzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.419833Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jr88c4s300eyj7qf9f4h4jpv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmYwYTlhZDktODdhMDM3OTctMWYyYTdmMmQtZTdhNzVjOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.479882Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jr88c4v80b8j2nav1sm1m24c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmU5OWI2M2YtZmZhN2RiOWEtZmMwMGIwYjQtYjE2ZmUyNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.542654Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jr88c4x550xx0m7af86tey0r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTVkZjlhNGEtMzgwMDlmMDgtMWM3MmUxZGQtMjk0ZDZlNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.615391Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jr88c4z425k64xm6jvct7jbx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmFmMTVkNWMtMWI1YmIyMTAtZTViZDgyMjgtZTc4ZWU5NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.682075Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jr88c51adztydny9a3jrn6m1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGZjYjMwOS0yYWM1MmUxZS05NDBkM2U2Yy00YmNhZjFjMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.757931Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jr88c53e4fxm08k8nv1dwavn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODVlN2U4OGMtNzc2MWZiZTgtZDczZDZhZjItMTY0NjRjMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.818733Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jr88c55s94s1j9htfvhymxhr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWRlZGE1Mi1hYmM4ZDc5ZC02Y2U4MmU1MS0xMTY4ODVlZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.888089Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jr88c57q68n0fy4dtaxdj7kq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTA4NzI5YjYtZTIyYWMwNmYtMzhmOTRlZjAtMzQ5ODI3Zjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.964129Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jr88c59w24tte8qcw5y1aj38, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmJhODliNmItMTNkNzU1MzktMzk5ODFhMjgtZDIyYTFjYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.037961Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jr88c5c8959b3aa6mqz09h0z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZlMGRkNWYtNDM5OTA2NWYtZWI0MGJkMjItMWY0MjU5ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.093610Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jr88c5ej8eda6habbf564hds, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTNhYzFhNzItYWRkMTg4ZjctZTc4NzUxODMtODM1NTFjODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.159668Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jr88c5ga5s9z5qjmqt4ra2rm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjc0NDY1NWEtMWU0MTRkYjUtMjUxZWEwYzUtOGY0NTcwYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.229753Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jr88c5jb49xwnfn0zeg6c5e6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzEzMDk5YmItNmY5YTUwNjYtMjI3Yjk5NmUtODZmYmU1YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.298120Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jr88c5mp21h52pg5wbm6mqgd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWRhODcyNjQtYzZkOWE4Y2QtNjE3NDJmZmYtMWMyNWNiZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.364883Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jr88c5pp0m9a2acrxkvk9w49, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzIxOWMwNmEtOGY5YTExOGUtYzg1YzYzNjUtZDk2OTY4OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.462768Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jr88c5rscncn3vtczh68fzya, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Q2YzA4YWMtYWE2OGExYTUtNGE2NmU0NjMtNzM3ZDdhYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.534224Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jr88c5vvdv6640rmj0jhn9cv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWU1ZjczNzgtNmUwNWU4ZGYtZjM1Y2Y3MjAtZTczZDMzNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.603637Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jr88c5y29px44e65s61tk3wh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGEyOTAyZjMtYzE0NGNlZTAtM2I3ZWVmODgtMmMzM2Y1YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.671472Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jr88c6084312z0erswvwxptk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTE2M2VhYzQtNDVkZjhmNWYtNGJlMTY3M2ItZjczOGI3YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.731443Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jr88c62c5g5ef51bybxghdw9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmZmNGNlZGEtY2M4Mjk0ZjgtYzY2ZjQ2MTMtMjRiZTM5Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.805839Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jr88c648agesgapygyrxmtck, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWUyODc5ZDUtZmFjZGRlNTktYjY3NGFhZjMtNzU3MTlhZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.155772Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jr88c6710b85yqwznejvtrxn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmY2Y2VjNmMtNTMxNTRlOWEtYWM2N2QwNmEtYjkwNzQ2MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TCmsTest::RequestRestartServicesDryRun [GOOD] >> TCmsTest::RequestReplaceDevices >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] Test command err: 2025-04-07T13:52:17.667597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:17.667921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:17.668172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001012/r3tmp/tmpjn2O2D/pdisk_1.dat 2025-04-07T13:52:18.020038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:18.065523Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:18.118567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:18.118759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:18.132681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:18.259470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:18.756652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-04-07T13:52:19.091910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:778:2640], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.092040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:789:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.092141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.101012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:19.307625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2648], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:19.609016Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88bzmh7x1f9v9s47dwdf58, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhlNTFhMzItNTA3NGM2OTgtNmVlNmExZWMtOGUwMDdlZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.683563Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr88c0557ptxn378xtawsg7d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGQ1NTkzZmQtNzk1MTI1YWUtNWFkMmQyZi1kMGUxODI4OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.754458Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr88c077ds5s04wbmvkmme0t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTAzNGYwMTgtYTkyZDMzZTAtYTEwMjA5ZDktNGRiMTE4NTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.817151Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr88c09f9nhhak9ed2vsq5ch, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTA4ZmZiNzMtMmNjM2ZiNTgtNTE0YTJlZTEtOTFjY2U0Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.881072Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr88c0bd7td8zmx7mrm4n4xe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDYwYThiNGMtYTg2MzEzMTEtYTYyZDZiZDgtYmMwMGU1ZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.945721Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr88c0dd4t66q2e72zf4ssvs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTIyZjE2NmMtNTExY2I5NGEtZjdhZTg4ZWYtNmZjODNlMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.011741Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr88c0feavpmjky08yqp5dcg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODMyMGE4YmQtMjJiODg3ZjItODA1YjQ0NDktODMyNGMxNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.075882Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr88c0hg23h7aswa3c2qa0yq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTRkYzE2LTllMzAyZDkzLWVmOGQxZmZkLTExOGNkZGIy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.140008Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr88c0kg3ap380h19g4ccst9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODFjY2QwMDQtMzcyMjZkZTQtZjUxZTFkYTgtMWIzZDExNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.203510Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr88c0ng502b2abvj6f5ndg4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzliZjNkZWQtMTk0MWQyNDMtODJkMDdiYmYtZjNkMDUxNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.266870Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr88c0qfcmnpbm3f5rwewhqv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRkMDYxNjItNWMyNmE5OTItOGVhNDlmYmUtMzkwOThlOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.328647Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr88c0sf9wfgngj76v5pg3jf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzIxNmM0ZmMtNGRjNzI3YzQtNGQ2N2Y3ODItNWRhYmJhODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.390070Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr88c0vd5aph13vn0h3gk7q5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGExMzEwZDQtYmNjNDhiMzctZmU3Nzk4NDAtZWUwMmNhZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.447177Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr88c0x98mj6r1m568n10arg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRkZTNhMDItNjE4YjZiZmEtNjdhYzZkMDUtYzAwMjIyZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.504203Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr88c0z3a0k8hb58fb37bd3v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU3YTk4ZjQtY2FiMjQzOWMtZjEzNjZmZjItMjM4MDQzYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.570519Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jr88c10w1zs0y4gf50v2qrss, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzBiNGE3MWUtNGZjNTUyNDAtZWUyN2ZmNGItNmQ4NGM3MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.638168Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr88c12z0a33eavwa7x01qyw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2EwOTA0NzAtODVlNzljNzQtZjVjOTJkMzEtZTkwMzczOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.706031Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jr88c153d4bzg4ppadbpp0kj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTRjN2ViMmEtMWU0MTRmNy0yM2IwNmE1Yi1jMmMxOWU0Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.770908Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jr88c1762e1wnd133j96qztz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjMzZjNkODYtNTE0YzI3NTEtOWQ0NzU0ODgtNWU0ODM3Nzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.833986Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr88c1979fzj4yqe4apj3gja, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRiYjUzMzktNDRmMDkxYi1jYmJlYjY2ZS02YzE1NDczZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.903025Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jr88c1b6e6z5qpjjrr2spz6z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWJkOGIwZGQtYzIzMmIwZDYtOWVhYThiMTItNGIyYTlmY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.974685Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr88c1dbf1jqgvtt1pxf47zn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZhN2MwMzMtNzE5YWMwYWMtNGVjNWYyOGUtZTg2M2QwOGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:21.057345Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jr88c1fk71b06a3q4whe7vrq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWJhOGRmYTAtNjViMGQ2OTEtZjI5YmUwMWMtZDExMDgzZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:21.122147Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jr88c1j63vnx0etsgmbh67en, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQzMTIwYzAtODQ1ZDJjOWItMTA1NGFkMTktNDRlMjI1MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:21.176916Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr88c1m576a0qkfqj65eqx4x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTgxMjYyMDQtOGRlNGEzMDUtNWRhNmRmNGEtZWIxNmI1MGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:21.229181Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jr88c1nwcaqgrf4c30kz1v86, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGNkYzYwNmEtNDg1NzFmMDAtNGM5YzA2NmMtM2E3OTdlMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:21.286248Z node 1 :KQP_EXEC ... 393048Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jr88c4rqb0204y4974kws0s5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWYyMzRhZjctM2QyMmNhOGUtNmQyNmQ4MTYtZTJlMjc0NGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.453353Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jr88c4tc7ewn3wwc3qp9fa9v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWFmZGU2MjQtMjY4ZGQ4ZWEtZGY3YzkwNi1mYjBlZDFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.512544Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jr88c4w91px4h8jvzpsbxjqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTNhNjMwMTItMWI3ZDMxNTAtODdkYmMzYWEtOTU3NDNjM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.565615Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jr88c4y50v645xfvm2k0c6ta, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdjMDRkZTctOWQyMDlhNi0zMWQ0YmYxZS04MWFmYmMwNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.613355Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jr88c4zr9ph6gzbfshrdkmng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTJmN2IxMmEtMmNiMjNhMDUtYWE4OTIyMDYtNjJjNzVhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.688856Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jr88c519fckfvc6aprxfc855, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmI0ZWFjNTMtYWFjNDJiYzctMWM2NzRkZjctYjhmMmFmYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.757821Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jr88c53m2gbyt17f50vqdygs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY4NmEwMTYtMzI3MTlkYmEtYzFkMmMyZjctMmUwMzRjODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.825777Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jr88c55s02ycnp2xbpsqjy8t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWViOGZlODEtMTYzY2Q3OGEtNDAyNzYzYjAtODk0NmQ5OTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.917609Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jr88c5809cndn9hd5638agym, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA5NWY3NjEtZjU1ZjA2YWQtMjBkOWQyYjMtZjk1OWIzYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.987168Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jr88c5ay3h067jf0249mzcbd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI3ODcyODQtODc3YWQxYzMtN2MyNTkwNDYtZGJmNjZlMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.052878Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jr88c5czedrpx30bw2nrv2ad, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBhZTI5NzEtY2IzNTEwOTQtYzViNmIyZjItZTY3MjZhZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.122025Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jr88c5f02vmjsyd18mmedxfe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjFiMTZiMjAtZGEwNTUwYjEtOTJhNzhjOGEtNmFkNDhmMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.189387Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jr88c5h63t6dhk4ezrf2st9b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZlODIwNzEtZWU2NjI4OS02YTg5MjdhZS1iYTI5NDU4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.253141Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jr88c5k9csmryjaa5amqafak, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWEzMmY1N2ItYmU0Mjc0YTItMzdjYTE2NWYtMzZmMjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.326288Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jr88c5n94nvkf76npxj8jyrx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzkwNmYzYzgtODBlZGY2NzktYTQ3YThkYzgtNWJhNzdlYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.405807Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jr88c5qkcb30c008yhttat24, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFkY2JiOGItYzBhZThhNjAtY2ZiMTNiMS0zODgxZGJiNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.511449Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jr88c5t22gv55spya8dk1n3k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDY5YTk3NDctMzYyYWEwYTUtMzgyNWUxOC05YWVmMjk1NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.571136Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jr88c5xbar0599bmr9dvqjfs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRkNDdkYTQtM2Q4NTk1MjEtNDM1OGRjNDctZTg4ZmM3ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.643622Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jr88c5z7089ft0m2vhkbhaxf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2I3YzQ4OGMtNDIxYzI2OTktN2MyYmQ4NDQtYzZlNmRkOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.734696Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jr88c61fc4y5s85n5zqm9c9e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmRjYTkwNjMtYjM5OWZkNTktYzU5NTZkMzgtNWYwMzZiNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.807844Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jr88c64bd07q6wrk7hmy9063, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODJmMDBiZWQtYjNiZjU4NjktYzljNzlkNTEtZjc0ZWQ1YWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.870501Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jr88c66m83jh9e0gwsfvf978, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTZjOTVkZDMtYTcyYTNiODEtZjZiMTk3NjAtZDlkOTVlMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.933636Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jr88c68jeecn7gpgsykt8gx8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODNhNTExMTAtNmU5MWVhOWYtMjViMDRmOWMtOWE4Zjk5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.993882Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jr88c6ajet52cthxdzapy5y8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZhNGY3MTctNTdiNjlhZmQtYTkzMWE0M2ItNGQ3YmZlMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.052791Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jr88c6ce8dvj9q4ek6g3tfr1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGRlMmQ5YjgtZWQ3MWMwMWEtOGQxY2RmMTAtM2ZiNGU5ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.141533Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jr88c6e8ekacd87vaw6hfaes, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzMxNGY3M2EtZTYwMGM2OGYtODVmM2VjMGEtMTM1MDdkM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.218414Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jr88c6h09srvgdbj49c9td0w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWEwYzFmNzEtNzgzYTRiYTktZDk2ZDg3MDYtMmEzMmU3YWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.288687Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jr88c6kfb0rsfg7wybdqa8zd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTZmYzZmOGUtMTczODQ5ODQtOTUyNDg3M2MtNTY3M2U3MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.348865Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jr88c6nne5sh6gfvtz9bwtms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRjYmIxYWMtZWI5MmIxNjktMTVlNmU4OGItZDE3MGJhYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.410877Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jr88c6qhcrbehna119ec9n3h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2FiNGE0ZjctNzVjNmYyN2YtMTI1ZTk2YzEtZGFmNTAwODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.471498Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jr88c6sf89qkmf97srkztb1s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmZkMzU0NDktZmMyNjJhNjQtYzIxOThkZmEtOGM0NGQ4ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.541445Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jr88c6vb660q54wsatka499v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTFlODJiZS1lNTc1Y2NmYy02MzlmM2FjZC1kM2EzOWU0OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.602706Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jr88c6xj00grhr64kw4pz5wp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTk3OGNhNTctMTZhYTAwYTYtN2E3NTJmZTItZjFjMzkwYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.666537Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jr88c6zf8e50v4v5gqh3c6zt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDdhNTY3ODctZjZjMDJjOGItMmI3NjJmNC1kYThkMDVkMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.783900Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jr88c71f9vb7bwnt7kcwx65h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTAwY2RiZjAtNzUzZTU3MGUtYWQ3YzA0NWItYWI1NjFmZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.006456Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jr88c78nddf8n5sm6g1b0zr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzI4ODgxZjMtNDQxZGU4YmItZDM5Y2Y5ZGEtMWYwZDRkZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TCmsTest::WalleTasks >> TMaintenanceApiTest::SingleCompositeActionGroup [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC >> TCmsTest::TestOutdatedState >> BasicUsage::WriteSessionNoAvailableDatabase [GOOD] >> BasicUsage::WriteSessionSwitchDatabases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] Test command err: 2025-04-07T13:52:03.482255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:03.482511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:03.482731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001031/r3tmp/tmpFPc423/pdisk_1.dat 2025-04-07T13:52:03.949214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:03.991144Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:04.050280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:04.050436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:04.061934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:04.203072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:04.287616Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:653:2553] 2025-04-07T13:52:04.287873Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:04.372207Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:04.372583Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:656:2555] 2025-04-07T13:52:04.372837Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:04.380603Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:04.382223Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:04.382336Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:04.382394Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:04.382748Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:04.413342Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:04.413535Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:04.413665Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:687:2573] 2025-04-07T13:52:04.413719Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:04.413754Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:04.413798Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:04.414815Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:04.415114Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:04.415721Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:644:2548], serverId# [1:676:2566], sessionId# [0:0:0] 2025-04-07T13:52:04.416124Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:04.416166Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:04.416202Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:04.416245Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:04.416341Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:04.416502Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:04.416802Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:04.416982Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:04.417470Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:04.418716Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T13:52:04.418774Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T13:52:04.418821Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T13:52:04.419078Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:04.419123Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T13:52:04.419184Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:04.419251Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:694:2576] 2025-04-07T13:52:04.419280Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T13:52:04.419303Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T13:52:04.419327Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T13:52:04.419960Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T13:52:04.420039Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T13:52:04.429477Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:52:04.429533Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:04.429567Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T13:52:04.429611Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:52:04.430300Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:645:2549], serverId# [1:681:2570], sessionId# [0:0:0] 2025-04-07T13:52:04.430662Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:661:2557] 2025-04-07T13:52:04.430849Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:04.443580Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T13:52:04.443791Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-07T13:52:04.443912Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-07T13:52:04.445656Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:04.445749Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:04.446909Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-07T13:52:04.446973Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-07T13:52:04.447017Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-07T13:52:04.447305Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:04.447345Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-07T13:52:04.447412Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:04.447490Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:713:2585] 2025-04-07T13:52:04.447515Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-07T13:52:04.447539Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-07T13:52:04.447560Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T13:52:04.448003Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:04.448072Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T13:52:04.448137Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-07T13:52:04.448211Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-07T13:52:04.448288Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T13:52:04.448319Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:04.448349Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-07T13:52:04.448377Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T13:52:04.461294Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:04.461394Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:04.461453Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T13:52:04.461491Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:04.509717Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:646:2550], serverId# [1:722:2593], sessionId# [0:0:0] 2025-04-07T13:52:04.509864Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-07T13:52:04.510058Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-04-07T13:52:04.510155Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-04-07T13:52:04.510700Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-07T13:52:04.525275Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-07T13:52:04.525374Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:04.695485Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:736:2607], serverId# [1:740:2611], sessionId# [0:0:0] 2025-04-07T13:52:04.695706Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:737:2608], serverId# [1:741:2612], sessionId# [0:0:0] 2025-04-07T13:52:04.695994Z node 1 :TX_DATA ... mmediate 0 planned 1 2025-04-07T13:52:27.854082Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-07T13:52:27.854340Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 0 Step: 2500 TxId: 281474976715667 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-04-07T13:52:27.854574Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 0 Step: 2500 TxId: 281474976715667 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-04-07T13:52:27.854683Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 2500 TxId: 281474976715667 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-04-07T13:52:27.854963Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:27.870251Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037893 step# 2500 txid# 281474976715667} 2025-04-07T13:52:27.870364Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037893 step# 2500} 2025-04-07T13:52:27.870442Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-07T13:52:27.885537Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037891 step# 2500 txid# 281474976715667} 2025-04-07T13:52:27.885621Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 2500} 2025-04-07T13:52:27.885699Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-07T13:52:27.885771Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037891 from 72075186224037891 to 72075186224037893 txId 281474976715667 2025-04-07T13:52:27.885845Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-07T13:52:27.885918Z node 3 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715667] from 72075186224037891 at tablet 72075186224037891 send result to client [3:1385:3033], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:27.886049Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037891, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 } 2025-04-07T13:52:27.886121Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-07T13:52:27.886533Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1385:3033] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715667, shard# 72075186224037891, status# 2 2025-04-07T13:52:27.886965Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037893 source 72075186224037891 dest 72075186224037893 producer 72075186224037891 txId 281474976715667 2025-04-07T13:52:27.887083Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037893 got read set: {TEvReadSet step# 2500 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletProducer# 72075186224037891 ReadSet.Size()# 19 Seqno# 2 Flags# 0} 2025-04-07T13:52:27.887190Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037893 2025-04-07T13:52:27.887434Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037891 2025-04-07T13:52:27.887799Z node 3 :TX_DATASHARD DEBUG: Send 3 change records: to# [3:1197:2921], at tablet# 72075186224037891 2025-04-07T13:52:27.887859Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 3, forgotten# 0, left# 0, at tablet# 72075186224037891 2025-04-07T13:52:27.887941Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-07T13:52:27.887978Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:27.888019Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2500:281474976715667] at 72075186224037893 for LoadAndWaitInRS 2025-04-07T13:52:27.888477Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:27.889465Z node 3 :TX_DATASHARD DEBUG: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037891, generation# 1, at tablet# 72075186224037892 2025-04-07T13:52:27.900871Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-07T13:52:27.900977Z node 3 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715667] from 72075186224037893 at tablet 72075186224037893 send result to client [3:1385:3033], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T13:52:27.901072Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037893 {TEvReadSet step# 2500 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletConsumer# 72075186224037893 Flags# 0 Seqno# 2} 2025-04-07T13:52:27.901127Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-07T13:52:27.901263Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1385:3033] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715667, shard# 72075186224037893, status# 2 2025-04-07T13:52:27.901312Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1385:3033] Reply: txId# 281474976715667, status# OK, error# 2025-04-07T13:52:27.901591Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037891 source 72075186224037891 dest 72075186224037893 consumer 72075186224037893 txId 281474976715667 2025-04-07T13:52:27.901841Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037891 2025-04-07T13:52:27.901887Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037891 2025-04-07T13:52:27.902061Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1380:3029], serverId# [3:1381:3030], sessionId# [0:0:0] 2025-04-07T13:52:27.902143Z node 3 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037891 2025-04-07T13:52:27.902177Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037891 2025-04-07T13:52:27.902283Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037891 2025-04-07T13:52:27.902318Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037891 2025-04-07T13:52:27.902472Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-07T13:52:27.902511Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:27.902550Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-07T13:52:27.903707Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037893 2025-04-07T13:52:27.904130Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037893 2025-04-07T13:52:27.904347Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-07T13:52:27.904400Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:27.904454Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037893 for WaitForStreamClearance 2025-04-07T13:52:27.904767Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:27.904849Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-07T13:52:27.905632Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037893, TxId: 281474976715668, MessageQuota: 1 2025-04-07T13:52:27.905804Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037893, TxId: 281474976715668, MessageQuota: 1 2025-04-07T13:52:27.964515Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037893 2025-04-07T13:52:27.964609Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037893 2025-04-07T13:52:27.964908Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-07T13:52:27.964948Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:27.964987Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037893 for ReadTableScan 2025-04-07T13:52:27.965110Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:27.965171Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-07T13:52:27.965213Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-07T13:52:27.966281Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2025-04-07T13:52:27.966531Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2025-04-07T13:52:27.966658Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-07T13:52:27.966682Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:27.966710Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715669] at 72075186224037892 for WaitForStreamClearance 2025-04-07T13:52:27.966851Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:27.966884Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-07T13:52:27.967424Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715669, MessageQuota: 1 2025-04-07T13:52:27.967567Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037892, TxId: 281474976715669, MessageQuota: 1 2025-04-07T13:52:27.969156Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2025-04-07T13:52:27.969203Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715669, at: 72075186224037892 2025-04-07T13:52:27.969355Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-07T13:52:27.969381Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:52:27.969410Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715669] at 72075186224037892 for ReadTableScan 2025-04-07T13:52:27.969495Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:27.969537Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-07T13:52:27.969570Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode >> BasicUsage::WriteSessionWriteInHandlers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] Test command err: 2025-04-07T13:52:17.383080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:17.383304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:17.383505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001002/r3tmp/tmp76pM5w/pdisk_1.dat 2025-04-07T13:52:17.735428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:17.782823Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:17.836798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:17.836929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:17.848486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:17.990846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:18.381679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:706:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:18.381802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:717:2594], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:18.382204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:18.386759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:18.602611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:720:2597], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:19.055669Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88byyb6gw5cjpfp08tv8fx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzk5NDdhMjAtZmY4ZGViOTUtNjk4OGM3NS1lODJhZGRiMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.142368Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr88bzm60b840z3p43r057dc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjM1ZDE0NGEtM2NlOWI2NmQtZGM3OWQxYWItYzc4OWM3MWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.235891Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr88bzpr5w1z2j5kjcvbwdx5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmE0NTdhZTYtODE0YzdkZi1hZThhYWVkZS1mNzI0ODA3MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.314866Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr88bzsm5yq2q8hxk69g390a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNjNWQ5ZTgtYjNhMGIwMWQtMzk2NzJmNGMtZWE2ZTRiMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.398933Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr88bzw9bgxwyqvf8wqkt0r9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg4OTU5N2ItZTA4NWM0ZjMtYTQxODJiOTEtMzk5M2JiM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.474446Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr88bzync7mm01q0pg2b1qta, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA4NzdhY2YtYTA4YTJkNTItMmVhMGRjZDQtMmRiODNhZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.552280Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr88c011fj5v2y47dskbyyj5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGNmMTE4ZGItNTIwMjM5ZWQtNzRkZDc1N2YtMjhlNzJhOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.635300Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr88c03g8t3rzbmq10ydstz7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2QzNDU5ZGYtZWFhMWY4MDctYzg1YjZkNTctYTA3NTI1NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.745260Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr88c064aekv139pz9bmewh6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQzZjhkYzktODRjYTQzZGEtZDAxOGFhMTUtZDQ2ODA5MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.822141Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr88c09f86gcax1gjrhpay2e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM2M2JmNjUtYThhOGZlMjgtNzFjYjY3NzgtY2VkZTcxZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.904741Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr88c0bz4fp6q8za23j5s307, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQ0OWM3ZjQtYWQ5M2Y0M2YtYWEwYjQ1NzAtMWE2ZDVlZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:19.977294Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr88c0ef0y8yt2883vw3qrck, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM2NjQ2ZWQtNjQyMDI1OTMtZDJhZTIyMWItOTVlYTIxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.059624Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr88c0grbea9fvbh4tsvrr3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTQyODBiN2YtMmU0NzgyYjAtNDVjYjk5YTgtZTg1OTkwYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.132793Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr88c0kacxfbmxb53df85eyk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjEyMjYyOWItMjg2MTU2ZjQtOTkzZmI2YjctYTljNGU1M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.203477Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr88c0nm1zzb2h581m2vxkbe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDZhNWUxOTMtZmJkN2UwZmEtODAzZjJlOTMtOGNlNDlmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.285728Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jr88c0qw7e2qvf3vqgh5cf01, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWRhNzA1YmYtOTFmMzZmMi1hNjA4ZDNjOS1hMzUzY2ExOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.370432Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr88c0tdb6j6v6qg978tx154, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDU5YTVmMmYtYzQyNDNlMC0yMmZjMGQ1MS05NTRlZTc3MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.461237Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jr88c0x1a30dz6e87vzj13hq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NkNDk1ZDUtNDZlMjIyYzctZDRlMWM0NzItMjZiMWQyMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.540016Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jr88c0zx0rxwmvsqtncjjazh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTU3YThlY2UtOTRkZWYzOTQtOTFmYjcyMDYtMzYyYWQ0Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.615844Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr88c12bcqkaf84mb0pn5aza, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQ4MDE2ZDgtYWQ2NzVmNzctYzg4MzQ5NDMtODUwNTE3ODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.697727Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jr88c14r56z3w9rffd88d4yy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2U0YjYyMzktZTUyNDE3YzEtZDliYjhmNTItNDIyM2Q0YjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.786442Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr88c17bc9540kq1wjmxn4ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzE0MGY5MTctOGYzYzI5NTEtZTcyOWRlNmItNDA4Y2Q2MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.863771Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jr88c1a2f0wah9pwga4vk6vb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTcwNTRjMmUtZDAzNTViZDAtY2RlODk5MmMtODJlYTIxNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.942377Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jr88c1cecc3bz0jshe413k6q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDVkNmQ1ZTYtNzg0NTBjNDgtYzkzYzM4Mi1lNzUyMWU2Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:21.033924Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr88c1ey6v3awjw9vj3dgzxq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzZkNTJhYTItMzU5MGM5NGUtODEyZGZjYzMtNzg2MzJhMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:21.117145Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jr88c1hxftjfh554wmgem70k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjY3ZTAxYTMtY2M3Nzk2ODItYmVlZGEwMzgtYmE2NzljNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:21.186736Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jr88c1mba1zrpymxxrd9gbta, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTViNjU4N2UtNTcyOWUwZTgtZTg4NTVlN2EtZjg3YmZhNGQ=, CurrentEx ... jMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.983826Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jr88c5a18hp75cxphmxwz9t0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGFlNDAyZDItZjU2MzZmOTQtYzRlMmE0YWItZmRjODYyMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.081092Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jr88c5d9cwvh3m81pqz0tgzn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjlkYjU5YjItYjFiMGE5YTMtNjgwNDQzM2YtNmQ1Y2M3OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.168100Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jr88c5gbf7s7s0xwpkj4twqd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhlMzk3YzItNjE5NTI0YjctOTBmNTRiYTMtNTk1NTgzZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.256967Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jr88c5k24cn7kj189qs4p290, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjY1ODBjNWUtZThjM2ZhYTAtODQ2MTYyOGYtMmRlOTliNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.347080Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jr88c5nraes859pnxvkq4p21, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM3M2RhYjAtZmYyZjE3N2UtMjZmMzgyNTYtNmFkNGY3YWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.440247Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jr88c5rj3f5gaz3hvd60cx5f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODAwMjYxZDMtZmNmMTcyYzctYmM2ZDViNzgtMWYzZDQxMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.516081Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jr88c5vf665yw3vjb1f7n7kk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTU4YzU0Zi1mYzEwMDRiNi1jNjZmMGVmNC0yNWY5YTRjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.595598Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jr88c5xwdnnqfq4mvsx6gxsf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRlZDVjZDQtOWI1MDE4ODktMjAyZGIwYy1iZjViNjIyYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.667873Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jr88c60bfkax1xqxadveaxt3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFlMDJmNzQtOTBlYjdiZmUtODNiNDFiMmEtNDlkMDI1ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.757557Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jr88c62nfw0gm4xvaw18f82w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdiYWFiNTYtYmViMDUxNWMtNzJlOGNlODEtMTQ3ODBhNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.830171Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jr88c65d6c981rhyerag3y6m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZlOTUwOTItMTA1ZWExOWUtMzc1ZDg4ZmEtOTQzYmRlOTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.898513Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jr88c67nfvwqgsm8rr6rncxk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjlkODk0Zi02YmMxZWI5YS01ODdjYjg2ZC02YzEyZjFmYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.981362Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jr88c69tavxswc0vjybp5yj3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjE5NzM4YTgtMzE1ZDk3MzgtZDc3MjA5OGMtMzU2ZjEwMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.060379Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jr88c6cc3edsexe0wxv81da9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTcyMTNkODItZDcxZTE5OWEtYWIzZWYzOGMtYWZhZjc0YWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.139677Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jr88c6eve2ph5tde7ycqvsny, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjdkMWM4YWQtZmZhOTJkNjgtNGIzNTEyMzctMzU5YmYzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.234644Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jr88c6hy809n38mm8z8njrjp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ3MTljMjQtN2YxYjk0YS1jNjFhY2RjNi1kNDBlNTMwZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.316201Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jr88c6m95h0dkmg53y5e6vjm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjhiY2I4NDctNDlkODkyNmUtZjE0MGRiNmMtM2FkYTBmMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.391910Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jr88c6pv43xs367jhb9f324t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFlM2Y3N2ItY2E1MWM5NjEtODZlYjQ2NzgtODY4YWI0Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.467417Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jr88c6s62rtp38pbtm57prs6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2I5NGQyZi1mZTVlZGQxMC0zZGU3Zjc1Ny01MDA4ODUwMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.541452Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jr88c6vj8tsmr211dv95hdtz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NiMjEyZjEtNjFkNjkyMzktZDc2YzZlMDYtNzgwODkwYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.616183Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jr88c6xv13f2hhp5bcngd0jg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTVhMTZmY2MtNWYzYjJhZTAtNjNlMjlhNTUtNjZjMTczOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.688496Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jr88c708219yw7ry07zefed7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzIyZDQzNDctNjVkNjZmY2EtNGY4MDczNi01OTAyZTJjZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.775832Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jr88c72ffjd7tqh8j4ekt2jd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTM0ZWM4Y2ItOTkxYmQ2OWUtYzMwY2U3MTYtMzU0ZmYzNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.876345Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jr88c757cybf8r70g57e27db, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2U0YWEyYzUtZDUwMjIzOGQtYjBjNzdiOGYtZThmNzc3NmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:26.952538Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jr88c78b59184z91ntmfyet0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzIzMjlkZDMtMjZlMzVkOTEtOWY2YzI5YzQtYWVmOWJmNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.065696Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jr88c7aq8tww8sfmqm60vaxd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzNkOWMyMzUtZDFjOGFkZDgtOTk3MTYzMWMtY2RiYWVmZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.139292Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jr88c7e9dystc4jqk5t3aztk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjcyMzQ0OTUtYjMyM2Q2YzYtM2Y4ZWI4ZDUtOWU4ZGY3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.210290Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jr88c7gk47s1tbmdjra694dy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzZiNmJlZmUtMTNkM2RkOWEtYmI4MDc0ZGEtZDM4OGYyNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.284219Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jr88c7js9jrwnfxz267z6g3y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg2NGNlZTItNDY2MDAwYjctYmIzNzZjYzItZDBiN2RjYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.359881Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jr88c7n360nd7y9dq8q23g6d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQ1MjRlYmQtNDEwMGE3OWMtYWEwNDMyMjAtOWMyMjQ1MTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.434308Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jr88c7qkb6gx0dgyexd2pxv2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTlkYmNlZjItZjc3NzViM2EtNzZiYWUyNzctYmE3ZmQ0NWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.503910Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jr88c7ssf0f1ck1t0t8mzegr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODkyMjFkY2YtNmIxOWEyODItZmE5OTA1ODQtN2JmMjE5ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.582519Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jr88c7vzdtg0ed52a14hhsyq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmNiMTU3MWMtNmU3NzE3ZTQtYzRiYTdjMzctNWFkMWU1OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.705561Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jr88c7ye3xncasrx6g3ag8nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ3MjNjNTMtMjcwNjc5NTktNmQ0OTNkMmEtMWIyZDIwMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.727164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-04-07T13:52:28.174271Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jr88c8cfbzxf55br7rp4kzaq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjBkZGQzZjItMmVkOThjMDAtNzg4NjNkYmQtOGQ1MDUxMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TCmsTenatsTest::TestTenantLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] >> TClusterInfoTest::DeviceId [GOOD] >> TClusterInfoTest::FillInfo [GOOD] >> TCmsTenatsTest::CollectInfo >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex >> BasicUsage::FallbackToSingleDb [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2025-04-07T13:52:15.194148Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1744033935194115 2025-04-07T13:52:15.667523Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568717542412274:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:15.669512Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568718022458188:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:15.672520Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:15.976041Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:15.990072Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:16.042342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001829/r3tmp/tmpwCeYgU/pdisk_1.dat 2025-04-07T13:52:16.476283Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:16.487595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:16.487669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:16.488814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:16.488873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:16.493667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:16.494163Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:52:16.495703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62237, node 1 2025-04-07T13:52:16.653144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001829/r3tmp/yandexzjEOk1.tmp 2025-04-07T13:52:16.653170Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001829/r3tmp/yandexzjEOk1.tmp 2025-04-07T13:52:16.653301Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001829/r3tmp/yandexzjEOk1.tmp 2025-04-07T13:52:16.653380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:16.702917Z INFO: TTestServer started on Port 12917 GrpcPort 62237 TClient is connected to server localhost:12917 PQClient connected to localhost:62237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:17.001248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T13:52:19.684996Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568734722281782:2283], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.685089Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568734722281790:2286], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.685138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.691126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:52:19.740976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568734722281796:2287], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:52:20.242245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:52:20.243152Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568735202328294:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:20.244974Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjgxMGZmYmYtM2YyYWFhNzUtZWZlNWE3YzgtNGMyMGYzNzQ=, ActorId: [1:7490568735202328276:2303], ActorState: ExecuteState, TraceId: 01jr88c0ag9jmfe1g49nxtnz06, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:20.246949Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:20.250639Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568734722281831:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:20.251836Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmRlNWYzNjItNWZlY2RiYjItOGJiNGE5NWEtNTUzMmJmZjQ=, ActorId: [2:7490568734722281780:2282], ActorState: ExecuteState, TraceId: 01jr88c071dvmm16j89pd9dzbb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:20.252239Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:20.451520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:52:20.620732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:52:20.633176Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568718022458188:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:20.633244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:20.672766Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568717542412274:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:20.672830Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:62237", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-07T13:52:21.167471Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr88c1b59xeywvc1nvcvmdh3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTNlOTU2ZTgtYWMyNzA0ZGQtY2ZkZTQzN2UtOGIyYzMxZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490568743792263301:2967] === CheckClustersList. Ok 2025-04-07T13:52:26.383481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:62237 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T13:52:26.502376Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:62237 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 ... UEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 1 partNo : 0 messageNo: 1 size 115 offset: -1 2025-04-07T13:52:28.569389Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 1 partNo 0 2025-04-07T13:52:28.608520Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 189 count 1 nextOffset 1 batches 1 2025-04-07T13:52:28.609255Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 177 WTime 1744033948606 2025-04-07T13:52:28.609494Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:52:28.626631Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 122 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:52:28.626713Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:52:28.626780Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-07T13:52:28.627010Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:52:28.627047Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-07T13:52:28.627154Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-07T13:52:28.636756Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-04-07T13:52:28.637701Z :DEBUG: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-07T13:52:28.636894Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-07T13:52:28.636951Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T13:52:28.637000Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-07T13:52:28.637029Z node 2 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:52:28.637162Z node 2 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1744033948569 queuesize 0 startOffset 0 2025-04-07T13:52:28.638306Z :DEBUG: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { nanos: 19000000 } min_queue_wait_time { nanos: 37000000 } max_queue_wait_time { nanos: 37000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-07T13:52:28.638364Z :DEBUG: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-04-07T13:52:28.638394Z :DEBUG: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Write session: acknoledged message 1 === Inside AcksHandler 2025-04-07T13:52:28.638602Z :DEBUG: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Write 1 messages with Id from 2 to 2 === Inside ReadyToAcceptHandler 2025-04-07T13:52:28.639145Z :DEBUG: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Write session: try to update token 2025-04-07T13:52:28.639211Z :DEBUG: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 2 === AcksHandler has written a message, closing the session 2025-04-07T13:52:28.642888Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|e726137b-193f1f18-b4ab52db-c129f648_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-07T13:52:28.643168Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-07T13:52:28.643871Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:52:28.643912Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:52:28.643994Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 2 requestId: cookie: 2 2025-04-07T13:52:28.644237Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-04-07T13:52:28.644587Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:52:28.644611Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:52:28.644658Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 2 partNo : 0 messageNo: 3 size 107 offset: -1 2025-04-07T13:52:28.644962Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 2 partNo 0 2025-04-07T13:52:28.670168Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 181 count 1 nextOffset 2 batches 1 2025-04-07T13:52:28.670512Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 169 WTime 1744033948669 2025-04-07T13:52:28.670649Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:52:28.682193Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-04-07T13:52:28.681194Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 114 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:52:28.681259Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:52:28.681295Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-07T13:52:28.681472Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2025-04-07T13:52:28.688829Z :DEBUG: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-07T13:52:28.689016Z :DEBUG: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 11000000 } min_queue_wait_time { nanos: 24000000 } max_queue_wait_time { nanos: 24000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-07T13:52:28.689044Z :DEBUG: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2025-04-07T13:52:28.689071Z :DEBUG: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler === Inside SessionClosedHandler 2025-04-07T13:52:28.690986Z :DEBUG: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2025-04-07T13:52:28.691091Z :INFO: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-04-07T13:52:28.691113Z :INFO: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Write session will now close 2025-04-07T13:52:28.691148Z :DEBUG: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Write session: aborting 2025-04-07T13:52:28.691535Z :WARNING: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-04-07T13:52:28.691563Z :DEBUG: [/Root] TraceId [] SessionId [src_id|e726137b-193f1f18-b4ab52db-c129f648_0] MessageGroupId [src_id] Write session: destroy 2025-04-07T13:52:28.692832Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|e726137b-193f1f18-b4ab52db-c129f648_0 grpc read done: success: 0 data: 2025-04-07T13:52:28.692856Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|e726137b-193f1f18-b4ab52db-c129f648_0 grpc read failed 2025-04-07T13:52:28.692886Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|e726137b-193f1f18-b4ab52db-c129f648_0 grpc closed 2025-04-07T13:52:28.692907Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|e726137b-193f1f18-b4ab52db-c129f648_0 is DEAD 2025-04-07T13:52:28.694604Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:52:28.694935Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:52:28.694972Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7490568773857035270:2483] destroyed 2025-04-07T13:52:28.695014Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:52:29.098352Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7490568778152002614:2496], TxId: 281474976710687, task: 1, CA Id [1:7490568778152002612:2496]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-07T13:52:29.138959Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7490568778152002614:2496], TxId: 281474976710687, task: 1, CA Id [1:7490568778152002612:2496]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:52:29.200772Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7490568778152002614:2496], TxId: 281474976710687, task: 1, CA Id [1:7490568778152002612:2496]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> BasicUsage::BasicWriteSession [GOOD] >> BasicUsage::CloseWriteSessionImmediately >> ExternalBlobsMultipleChannels::Simple [GOOD] >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> BasicUsage::PreferredDatabaseNoFallback >> TCmsTest::ScheduledEmergencyDuringRollingRestart >> TCmsTest::TestForceRestartMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] Test command err: 2025-04-07T13:52:20.757022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:20.757258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:20.757432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fe0/r3tmp/tmpk8oyOf/pdisk_1.dat 2025-04-07T13:52:21.128635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:21.205057Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:21.262578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:21.262705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:21.278054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:21.414647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:21.844228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:706:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:21.844356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:717:2594], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:21.844920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:21.849759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:22.065996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:720:2597], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:22.432685Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88c2aj1db3w3eq3wjyra7q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2JiMDQzZTMtNTI5MDEwOGEtOWQ2ZDZhOGEtYWI1OGJlMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:22.507153Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr88c2xpbernywnbsq13bv8n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzMzZGIwMy02ZGZlNDJlNi1lYjNiZGY4Zi0yNzRkYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:22.572330Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr88c2zed4zekfm4bmtzx3zk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVhMTA4YTMtZjM5NDM0ZmQtZjM4ZTVlMTItYjcxYmMxODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:22.637177Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr88c31hdbcrkdvpd57kw8px, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjU0YjM1MGItMmM4MzViMjUtYWMzNDc0NWItNmRlMzcxZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:22.695790Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr88c33h4gs2gh86yaaf2w6a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2UwMTA4ZDMtNWI3YzRmODUtYzU4YzkzNzEtOTVmM2NkZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:22.761929Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr88c35ca2ja5tqxmvhm0hbf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVhNWZjMTEtZThlZWRkYjYtOTU0NDVmYjctMmFiNWY0M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:22.815940Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr88c37edf94fkeqfbwyk3xa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc5YTA1MjQtOGRmNjc1OWItYzZkMTVkMTMtOTE0NjIwMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:22.877135Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr88c3942k3fza8y9mbwr0m9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzZiM2Q3NTMtODA5ZDVhNjEtMmQwMjYzZjktMzUxOWIyN2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.110830Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr88c3b145n5ttr17yw0adbe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTQzYTEyYjctOGZiNDk2Y2ItMTVhYmJhNzUtMmI5M2MwZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.250425Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr88c3jb2gv2p82qx98xc2gx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGU0NjQwMTgtYjQ4NTU1NC0xNWYzMThhMC01YWM5NTNiNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.315403Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr88c3pp4mn0rs4qz66wec10, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE3ODcwOWMtZmQxNmUwODQtY2IyYTgxNmEtNWRkNDBiMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.381609Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr88c3rqc5kpyx1pv3kfcepx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTUxMTU5M2ItOWMxNjRhNjYtZTQ2YmY2YmMtZjUyZTg5OGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.448907Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr88c3ttch5y987a8vrx32z4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWZjZWJiNzAtYzhjMGZmYjItMWZjZWM5YjYtMjUzMTA0YTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.519615Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr88c3wxfq1x0wvbhrh3r9p0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2QyMGM0MTktYmE5YTAyNzAtODM3YmZhMTYtYTgzOTNmZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.588593Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr88c3z30ghwj8nds9xgj2gy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmMwMWQxY2EtYTE0M2RjLWI0NjlkNTVjLTFmZDljMjA4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.649171Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jr88c418fwgqaadhbsm31vpf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE0M2ZmZTAtZjhiNjJmNDQtNzFiMzNhNDYtYzU0NDk2ZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.729247Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr88c4358z7sjss4yphppc6p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTI1ZmM4Yy0xODAwZDAzNS1mNTI1ZWI2Yi0yNWZjNjk0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.828536Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jr88c45n7embzhgvc76y4fy8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM0NTE4M2YtYmFmZDE4ZWMtNjEyNmM1NzMtNGNmOGRlNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.896784Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jr88c48t72m6ca3kjn3ekyqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTY3YmZiNTgtMzNjYjM2ZDktZDk0ZTM5NTYtYjhhYjQ1ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.950758Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr88c4avbdjh81grd0986y76, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5NmJiOTUtODBiNGY3ZTMtNmYwOGJiZTItM2UyMjNhNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.015215Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jr88c4ck7kyvr3ste7phdjdw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTM3ODM0YWItOThjMDkzYjgtMzUyZDhjNDUtMzkwMDRhZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.088934Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr88c4em35fkzgmnvw4wqday, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWZiZWY5YTgtNTViYjM4MzctNDdlZWRmYzQtMzJkNzQzYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.161018Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jr88c4gy6n8fkq4cbwwkj36m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzBmNmI3M2UtZjVjMjM4Y2QtMmU0NmJmOTktNGVjM2VkY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.232263Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jr88c4k55y9zb89599c06n9d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU0MjdhNzItYTE4ZGY0N2MtYTE1MzkyZGYtNWZjODUzY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.299168Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr88c4ndaeetjskkxnz6yskh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY2ZGFiMjAtYTM2MTVhOC02ODRhZTExNC01NmQ2YWRhNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.365314Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jr88c4qf0ya01jtxz6wtrm8z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjYyNzU5YjktOTZmMTMwNDctYjM5NjUxMmEtOGViOTZmOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.432118Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jr88c4shdfrn553pvczdjkne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODNlNzU2OWQtOWQxYmM1NDktYzYyZjIyNmMtOWJiM2UzYzQ=, CurrentExecutionId: , ... :27.246149Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jr88c7hed767zk8d2bdv9qg2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0OTViYTYtNzY1NDBhOTYtZDc1YjZhZjctZTdmOTg5Y2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.312694Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jr88c7kk1qc8w7d7zpy15cr8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWI5NGFiNWYtNzRlMzQzZjItZTMwYWJkMWMtNDljODczNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.376798Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jr88c7nm56m4jckxje27hacn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA4NTFhNzQtNjk1ZDUyYjktNzY3NDI4NGEtNjYwMDU1MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.439908Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jr88c7qn4608teryey7h2dhs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ1ZGJiYjUtYTk5ZDhjMzAtODg3OWNlM2MtYmIwMDMwOWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.505681Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jr88c7smbhrvx4naj42rkxqk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRjZDgxNjUtMzJiYWZmNC1jODA1YWU4Yy04NzgzNTA1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.572690Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jr88c7vpb469d0j00gyr9ffh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDdiYzAwY2ItZTExMGNlMS04NDIyOWVjMS0yZmJkYjk3OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.668962Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jr88c7xvfmbw2z7xvc0c8eje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JhZTZiYzYtNjJjNDYwMTUtNGRlNzFiNi0zYzAxMzFj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.739566Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jr88c80s14kxb0388bje6p36, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWMzZjliZDMtNTFlYmRmOTctNTU3ZGRjYTItYzBiNmNhYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.802787Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jr88c82z5qm7qcthbz8gw3sc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM5OWU0MTYtMWRhYTQ3YmUtNGI3ZWJiMmMtZWE4OGM0NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.875591Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jr88c84z511eb66m4q9568br, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWYwZmEwNDAtMzU4MjA3MzMtOWJlMDQ1OWMtMWM5ZDdhM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:27.939584Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jr88c8798thk1t8wkgra74e5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzhmYmM3ZGYtNWY0ZDc5YmEtYjU1NjU4ZDEtNWE1NmYyYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.017822Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jr88c89gb4jrkwq40jf9hkzj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmY0ZjBiY2QtMmVlYWU3OTgtNzU3YTQ1MGMtMjk0ZWIwNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.106498Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jr88c8bpbazdpna95jdctnsq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzNlYTUzZS1jZjJiODVmLTc3YmJkZjItYTI1OTk4NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.179300Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jr88c8eecmyfr6kv7n57scxt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGNhYzM1YzUtZjQ5MGVjZDAtZDk2Mjg4ZDUtZTEzNDRhYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.259147Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jr88c8gqa1kbnmnbrs8qm9az, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjM1ZjBjYWEtYTQ4NTRhMTMtM2MzMzJiZGItZGMzYzhjODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.373097Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jr88c8kcfd2xgc357jmnr1r4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzA3MTY1ZjctOWEyMGI0MDctZWU5MTk4YjItNzJjMzY2NzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.434179Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jr88c8ps6gr6yd49g96b4c8t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI4MWYwZWItMjFjYjNkMmYtODgyNzYwMTktZDQxMzJkYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.523900Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jr88c8rpds9s8vy7qhqqr01t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NhYzA3YjAtNmVlYTA3NTQtN2M4ZWY0ODEtNTU4NDExMDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.612558Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jr88c8vte00x9gdb1mgjqyh9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjAzYWYzMmUtYTQwMzAzZjctOWJjYjc5ZjMtMTE0ZWQzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.681308Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jr88c8yc0wth1ky8cjeh4vqw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2MxNGU0YTAtMTZhMWNjYzUtOGYwYjMzMjctYjIyMmU1YWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.751263Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jr88c90dfbkk1xwm0zt5zefe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njk0YjUzZjEtMzkyMDI0ZDAtNGRjMmZjNDMtOTlmOTRmZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.824630Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jr88c92k9h60j5gwhdq6dmyg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI1MzM3MGUtMmVmMDM0MzMtMmRjMzBmMTItYzNlY2JiYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.898160Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jr88c94xc23q1eh2qe2rdh44, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTU2NzZjOTEtZmJkYjYwZjMtZTQxMDM2NjQtZTVlMDIwYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.965687Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jr88c97628jbwb0dtr0r73k2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTM5NTg0ZDAtYmRkYWMyN2MtZDM5MTdlOWQtYTQ0YWY1MTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.054527Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jr88c99afat4d72mf6p14gmj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzcwNGY5OTYtYmI5NjkyODctMTMzNTVlYWQtODExMTMwOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.123119Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jr88c9c3a18c2a2ey46cwgz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGQwMDcxYjMtNmQxODlhODQtZTZhOTZiODItMjA4OTY1ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.191500Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jr88c9e74pw03fzbqbm8ezjs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWYzNDYyNTMtYjgwYTVhYTEtN2ZhNTcwNGMtZmFkMGM5N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.252641Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jr88c9gbekh71bqr6rwdrhb3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc3Y2M4OWYtNjgzMjQ2YjgtYzI5YmFjZTktNGE0ZTg0Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.318709Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jr88c9j933h6efwb0zhy7avw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTkyOGUwNjEtOGVhYzU5ZDUtOTAzZGIxMmUtZDRlZWFjYzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.383196Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jr88c9md8mqnhdc2s6rxmdg5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmE0MGQyMWMtOGMzZDU5MzMtODlmNjkwODEtYTRkMWU4M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.460769Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jr88c9pb3gp12a3660p7zabs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhhOTA2Y2MtZmM0Y2NiZTYtODE3MDQyYjctMjRiNWNiMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.586251Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jr88c9s27yv5r3mabhckp88y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UwMjVkYjgtZDA2Njc0OTYtZTE0N2RmMGEtNGY1OTA5ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.657317Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jr88c9wp8sra0e2zknmyaxbv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc0ODUyZGYtYmQyMDM2ZjQtNGJlMjFhZTYtZjJiZTMzOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.752481Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jr88c9z02g1rnm2eyc6zrrrw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjkxY2Q2NzAtNGQzOGMxMS0xNGZlN2E3NS03MjdkYmVhMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.831947Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jr88ca1we72cx6a3j2xf30yx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRkZWY1OTItMjNhMWQyODAtNWNjYmY2NzYtNjFkNWE4MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.959313Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jr88ca5299sqa8q0t1gwb0t5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmJiMTRhYjEtM2MyYmY0MTEtMWMyYjdmZDMtYjRiODljOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TCmsTest::TestOutdatedState [GOOD] >> TCmsTest::TestSetResetMarkers >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: 2025-04-07T13:52:21.978215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:21.978503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:21.978702Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fb0/r3tmp/tmpVrBABb/pdisk_1.dat 2025-04-07T13:52:22.305993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:22.347073Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:22.400562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:22.400714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:22.412312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:22.530488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:22.887132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:706:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:22.887255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:717:2594], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:22.887655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:22.892301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:23.455982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:720:2597], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:23.831087Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88c3b5a9762y60266r1zkr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M1NGQyNS04MTg4MDJiNy05Njc2M2E0MC0yZjJkNjZh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.908786Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr88c4968h5kz6xyvegzykab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU1NDgwYzItNmMyZTNmMDMtMTc0NjZlNWYtMzNmZjg5OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:23.975138Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr88c4b9fyrqhgt1ef9xpg2x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2EzZWJmOC05NzIzNjQ1My0xYjJjMjQ1YS1mYzk1YmZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.038779Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr88c4db3y27c039tkfyshhq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRjM2EwMGUtODI5NzliMzItNmNhNDRkMjctMTk2Mzc2NjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.102587Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr88c4fbdfes3tk0r91ssxw0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTQ3ZDMyNTgtMjg4ZDkwOTItODUxZjI3ZmItYmMzYTQ1OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.168377Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr88c4hb1xzrvfxem6bq744g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDI4YTc5ZDYtMzA5NDc3ZjAtOWVlNjlhODctM2IwYzc4YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.228614Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr88c4kc7ry66srkfyng4rv7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTRhMWQ0YTctZTlmMjZjZDktYTkwNmViOTYtZmM1MzZiNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.293796Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr88c4n917ehbfcexz2yeskt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc0NzkwMDctNzFiZWE1MS1hMzFjNWEwYS1kYTIxYjAyMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.353623Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr88c4qaahencnmxmwn0n7w0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWRjMjZiZGQtZTk0NzI3ZDAtYzI3NGEyMmYtOGZkNWM4OTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.415056Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr88c4s51q4ag96mkn65e22e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWUwNzFkNTYtZjc2ZmE3ZGEtMjkzYzliMDYtMTIwM2I4Yzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.475745Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr88c4v3er77ck276n2cfabr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRkOGY4OTUtNGEwODZhMDQtMjc4MjNlZTMtYWUyNjg5ZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.539694Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr88c4wz2t7a9fnt0vyh9sdf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTQ4ZWIzYmMtMWE1MzM3ZTktNjZkNzljMTAtNGFkMjI1MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.599648Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr88c4yyfx2dz1qgrcjgxvm0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjczZDM4NDItOTJiOGJlOWQtNGM2MWNlM2ItOTI5NDA2NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.666412Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr88c50vccczsmx7f5ekrpre, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2I1MGVmNDMtYWZhMmZmZGUtY2FjYjFkNjMtZmM4MDQ2Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.728383Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr88c52y1fvqyk0wh7px0gak, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjY0NThhMWYtYzNlMTMwNy00OWM1ODgxZi1kM2I2YTA3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.784904Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jr88c54x1bv3tdq8s5t2mnzx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVhYWFlNjktYzZiMGVjYmItNTRhNWQxYzEtNWE5NTM5ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.856335Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr88c56m7esmvt0npb29etkg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI1MmEwNC1hNzBiYmZhMS1iZmFkZjE1YS1kOTdkY2M0NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:24.925748Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jr88c58w2bhcb59ykvmvz0dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQxZDk3ZDMtNTNmYWUzZDItOWZhNmY4ZDgtOTRhM2IyOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.037969Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jr88c5b2827hp7tf05kw2fgc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzJhZjQ0NGUtMzE1NzQyNTItMjZjZDlmNmYtMjNlZjJhZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.116402Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr88c5ew7cxshq0n0hwygke4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmEyYmVhMzYtYWE3MWQ5ZTItNjk0MmU0MjctNTk2NjI4YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.179754Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jr88c5h0e27zapcz7k9pkbpv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmI4YTcwZTMtMWYzZDVjYzYtNmIzMjExNDYtNmYwN2U1YmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.259486Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr88c5k45zm588ch98mnx4fy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGU2OTAxMjItMWM0Yzg0NmUtNTY0NDYwM2ItYmFhMjc5Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.328109Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jr88c5nf5twgj56xjfzxn18y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDViNzg2MDktZWVlYmIxNWYtYWJkMWU0ZDgtYzRhNzMyZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.409618Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jr88c5qq5svjr1kd1nj9y18j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTkyNTEzMy1kZGEyZDFkNS0zZTYwMTMyMi1iMWY0NTBhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.473789Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr88c5t54tg0zww31sjpaemf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzZkNjM3MGMtYTFhMTNmN2MtZjcwMjJhNDAtMmM0ZjE0ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.543761Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jr88c5w6fz04cerzkyaxq0b6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzlhZTg0NzQtNDRjMjMwMDYtY2NkM2VkMi1lMDUyYTcxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:25.603931Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jr88c5yb3vyb8r9yfatb858n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YThkNTgyOC1lZjFlYmIxZC0xMGI5MWViNC1iOGU0YzI3Zg==, CurrentExecut ... 495701Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jr88c8r6b3n6tds1v3rb3w4k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTc0OTFkNGEtY2Y3Y2IwYjUtZmIzZjExODUtMjkyNDE3ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.557970Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jr88c8tpe4evx80dt2bfhnff, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmM4YjI0N2EtOWQyN2JlMTItMmI2ZDIxZmUtOGY2ZDgyNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.615691Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jr88c8wjbd9wnjhb6b4g5k57, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWI5YTdmZDQtZDhiYjliNDAtMmZmYmExYTEtMzI5ODlhNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.673687Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jr88c8yb991937dw7wt6dgmb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdmY2Q0OTMtNzUzN2FkN2QtMzRmMGMzNzUtMzRlNWJhNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.736543Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jr88c9065b7apw09z5sye4xb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTczZWJkN2MtNmY4MGQyMC03OGUzYTgwNy01NzcyMmI3NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.803506Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jr88c9251xpawejb1x2p4zny, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA1YjY5YzUtMTczZDcxMzItN2E4Y2QwNDYtMWViN2Q4M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.878410Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jr88c94882nab5rapz65qebe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2QzYmU3N2ItMWNmNzg2ZmEtOTU1N2MxNDMtZjJjNTI5MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.937130Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jr88c96k5y6x04re4gxbewpq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZhYjlmMjQtODg3NzFlZGQtNGVlZDE5Y2ItZWMzOTViMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:28.996625Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jr88c98cdqymbes31n0e2e0f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA2Mjg1NTQtYWY5MzBkODItNTc3M2I0MmYtMjYzN2YwNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.059086Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jr88c9a8fazvt6zcsgxx5a9p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdlOGJhYy04YTRmZmYyNC01ODk2YjdhLWJlMTU4Y2Mx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.124901Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jr88c9c7dffq8r7gmetg27k9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQwM2ExY2QtMTMzNzE1NGYtYmQyMTMyYy05ODhjZDE2OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.186395Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jr88c9e920rv2j2tz1fkgsqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDYzNDUwY2EtNmE5NWQ5ZjItODE5MWMwMmEtODVlMzY2OGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.257491Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jr88c9g7bjrh8wcf8q06m8t9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWM1NGYxZDgtNzY2MzEwMzUtZWE1YWIzMGItOWNmMDNhOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.321612Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jr88c9jeb77jwsja1mqajpkj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTMwMjk5ZDgtZTI5Y2MxZTctZWYxMjA5OWItMTdiODNhOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.385294Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jr88c9me8b46va9506nmc693, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk4M2E4YTItZTAxMGQ4NDgtNjQ3ZjdhYWUtZTFmOWM0OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.488392Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jr88c9pddbaerbmrt9h3f2rq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYxZDU4MDgtZGQ0YTNlM2QtMzkzOGFlZTktN2Y1NDYyZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.550626Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jr88c9sncx7kemebnmtj175v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTU2OTI4ZjEtYzU0M2E5NzktY2Y3MDNhZWEtY2IwN2YwNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.618357Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jr88c9vk5r4c7drftzw1t53v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjk4MDlhYmMtYjE3YzBlZWQtOTE3NTliY2MtZTk1ZmQ3YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.679607Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jr88c9xqaktma5226qypscxf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ2ODhiNmItNTgzNzllZWItYmZiYjRhOTAtMmIzNWRjOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.740702Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jr88c9zmehktbqh719hppbww, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNmNzM0M2ItOWI3MjFmZDktZjE5YTVkZGQtOTNjYWNlYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.803249Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jr88ca1hej3x629k52x2c32p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWFlOWQzYWUtM2NhMzE3NmItOWNhMjIxYmEtNmJiYTQwOWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.867650Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jr88ca3fdvew6s1cwpx5y78v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODIwYmJkZDgtYjRkZGE1MDItNTExNzM1MzgtMzg1NzY4YzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.930413Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jr88ca5g4f2jed7kgwr6qanh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmZkOTA2NDEtOWQxZDZjYmQtNWE0Y2I1ZTgtY2FlMzYzMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:29.993706Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jr88ca7f8r2g01p367xq828v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzczMTRkMDctYWMzZjIyZC0xZDVlZjkxMy1kNjM1MDMzYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:30.078281Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jr88ca9eepqw7n6jfdh07yw2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2FiMjk5ZTEtMTUxNGZlZDUtNWE0Y2Q3MGEtNWVhZjg4ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:30.142449Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jr88cac36ntwf0htze03wxbw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWI3MzYzYzMtNGQ5NTcwMTctODMyMzA2YWYtYWEzZWYzZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:30.205509Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jr88cae20erkmazgt4m5c175, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWNmNTVkYmEtMTE2NmU2NjEtNDBkMTI1MjItYmY3Yzc4NzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:30.269829Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jr88cag2b7pc7fvs31xk4asq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQwMzcwMC1iM2YyODliZi0xZjQyN2U5YS1hNjViN2UxMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:30.333254Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jr88caj2d4fadq1yejxanh1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2RhNDBjMTYtYWU3ODhiZC02MTAzMWQ4ZS0yYWUyY2ExOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:30.395559Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jr88cam1c6hfa6f4va3c246j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Q5MjYzOTgtN2UzNzdkYzQtNzNjODliODEtZDg5MzQzODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:30.453535Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jr88canzfq9ay7ac64rd0py7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTczMmYyNTQtODA1ZDA1YzctMzk3MDM5NDEtNzM1MDNkY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:30.514963Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jr88caqs72sqsxzzt0gcn8yn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGMyYTc5ZTYtZTMzMjdhNjUtMzVmNWRlYjYtNDU2ZDE2ZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:30.572971Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jr88casq12rnh3x1zfyg1czn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjI2NzhiNS1kNzQ1NTQyNS05NGU3OWMxNS1hOGRiYjBlYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:30.650461Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jr88cavh7bmj5m74gw21j5tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTczODM2ZjQtZDU3NGE4ZWQtNzQ4OTcwNDktODc5MjhlM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:30.744982Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jr88caxz2rncnbw8hgy08y1f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA4ZmI1OGMtM2RlZDE5ODctNzlkYTg2NzAtMjY3MzJlMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:30.965006Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jr88cb4975nh2p4yavkyya8v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQyNmQzNDQtYjJhYTg0OTYtNzQxNDk2N2UtZWEwODE4NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TopicAutoscaling::ControlPlane_CreateAlterDescribe [GOOD] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning >> TCmsTenatsTest::CollectInfo [GOOD] >> TCmsTenatsTest::RequestRestartServices |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] |85.9%| [TA] $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::ManageRequestsWrong >> TCmsTest::StateRequestUnknownNode >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::StateStorageTwoRings >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestProcessingQueue >> TCmsTest::TestKeepAvailableMode >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ActionIssue >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> TCmsTest::ActionIssuePartialPermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:141:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:143:2166] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:147:2057] recipient: [4:143:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:146:2167] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:216:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:141:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:144:2057] recipient: [5:143:2166] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:145:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:146:2167] sender: [5:147:2057] recipient: [5:143:2166] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:146:2167] Leader for TabletID 72057594037927937 is [5:146:2167] sender: [5:216:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:142:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:145:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:146:2057] recipient: [6:144:2166] Leader for TabletID 72057594037927937 is [6:147:2167] sender: [6:148:2057] recipient: [6:144:2166] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:147:2167] Leader for TabletID 72057594037927937 is [6:147:2167] sender: [6:217:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:144:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:147:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:146:2168] Leader for TabletID 72057594037927937 is [7:149:2169] sender: [7:150:2057] recipient: [7:146:2168] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:149:2169] Leader for TabletID 72057594037927937 is [7:149:2169] sender: [7:219:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:144:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:146:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:148:2057] recipient: [8:147:2168] Leader for TabletID 72057594037927937 is [8:149:2169] sender: [8:150:2057] recipient: [8:147:2168] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:149:2169] Leader for TabletID 72057594037927937 is [8:149:2169] sender: [8:219:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:145:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:148:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:149:2057] recipient: [9:147:2168] Leader for TabletID 72057594037927937 is [9:150:2169] sender: [9:151:2057] recipient: [9:147:2168] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:150:2169] Leader for TabletID 72057594037927937 is [9:150:2169] sender: [9:220:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:150:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:153:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:152:2173] Leader for TabletID 72057594037927937 is [10:155:2174] sender: [10:156:2057] recipient: [10:152:2173] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:155:2174] Leader for TabletID 72057594037927937 is [10:155:2174] sender: [10:225:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:150:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:153:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:154:2057] recipient: [11:152:2173] Leader for TabletID 72057594037927937 is [11:155:2174] sender: [11:156:2057] recipient: [11:152:2173] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:155:2174] Leader for TabletID 72057594037927937 is [11:155:2174] sender: [11:225:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:151:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:153:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:155:2057] recipient: [12:154:2173] Leader for TabletID 72057594037927937 is [12:156:2174] sender: [12:157:2057] recipient: [12:154:2173] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:156:2174] Leader for TabletID 72057594037927937 is [12:156:2174] sender: [12:227:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] >> TCmsTenatsTest::TestClusterLimit >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::SysTabletsNode >> TCmsTest::ManageRequestsWrong [GOOD] >> TCmsTest::ManageRequestsDry >> TCmsTest::StateRequestUnknownNode [GOOD] >> TCmsTest::StateStorageAvailabilityMode |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> TopicService::OnePartitionAndNoGapsInTheOffsets [GOOD] >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleTasksWithNodeLimit >> TCmsTest::TestProcessingQueue [GOOD] >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestProcessingQueue [GOOD] Test command err: 2025-04-07T13:52:30.351693Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-07T13:52:30.467356Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-07T13:52:30.484450Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-07T13:52:30.536267Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-07T13:52:34.600140Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-07T13:52:34.600223Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-07T13:52:34.600246Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-07T13:52:34.600270Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-07T13:52:34.600294Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-07T13:52:34.600318Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-07T13:52:34.600341Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-07T13:52:34.600366Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay >> TCmsTest::WalleRebootDownNode >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_AutoscaleAwareSDK >> TCmsTenatsTest::TestClusterRatioLimit >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> TCmsTest::ActionWithZeroDuration >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets >> TCmsTest::ManageRequestsDry [GOOD] >> TCmsTest::Notifications |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHost >> TCmsTest::SysTabletsNode [GOOD] >> TCmsTest::StateRequest >> TCmsTest::StateStorageAvailabilityMode [GOOD] >> TCmsTest::StateStorageLockedNodes >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestKeepAvailableModeScheduled |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] >> TCmsTest::ActionIssue [GOOD] >> TCmsTest::SamePriorityRequest2 [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssue [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleCleanupTest >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ControlPlane_BackCompatibility |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] >> TCmsTest::ManagePermissions >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode >> TFstClassSrcIdPQTest::ProperPartitionSelected [GOOD] >> TPQCompatTest::DiscoverTopics >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart >> TCmsTest::Notifications [GOOD] >> TCmsTest::Mirror3dcPermissions >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled >> TopicAutoscaling::PartitionSplit_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK >> TCmsTest::StateRequest [GOOD] >> TCmsTest::StateRequestNode >> Balancing::Balancing_OneTopic_TopicApi [GOOD] >> Balancing::Balancing_OneTopic_PQv1 >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TCmsTest::StateStorageLockedNodes [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> TCmsTest::CollectInfo >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleTasksDifferentPriorities >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:144:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:144:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:150:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:150:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:149:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:153:2057] recipient: [7:151:2172] Leader for TabletID 72057594037927937 is [7:154:2173] sender: [7:155:2057] recipient: [7:151:2172] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:154:2173] Leader for TabletID 72057594037927937 is [7:154:2173] sender: [7:224:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:158:2057] recipient: [8:156:2177] Leader for TabletID 72057594037927937 is [8:159:2178] sender: [8:160:2057] recipient: [8:156:2177] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:159:2178] Leader for TabletID 72057594037927937 is [8:159:2178] sender: [8:229:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:158:2057] recipient: [9:156:2177] Leader for TabletID 72057594037927937 is [9:159:2178] sender: [9:160:2057] recipient: [9:156:2177] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:159:2178] Leader for TabletID 72057594037927937 is [9:159:2178] sender: [9:229:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:158:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:159:2057] recipient: [10:157:2177] Leader for TabletID 72057594037927937 is [10:160:2178] sender: [10:161:2057] recipient: [10:157:2177] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:160:2178] Leader for TabletID 72057594037927937 is [10:160:2178] sender: [10:230:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:160:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:163:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:164:2057] recipient: [11:162:2182] Leader for TabletID 72057594037927937 is [11:165:2183] sender: [11:166:2057] recipient: [11:162:2182] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:165:2183] Leader for TabletID 72057594037927937 is [11:165:2183] sender: [11:235:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... TabletID 72057594037927937 is [44:178:2194] sender: [44:248:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:101:2057] recipient: [45:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:101:2057] recipient: [45:99:2133] Leader for TabletID 72057594037927937 is [45:105:2137] sender: [45:106:2057] recipient: [45:99:2133] Leader for TabletID 72057594037927937 is [45:105:2137] sender: [45:139:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [45:105:2137] sender: [45:176:2057] recipient: [45:97:2132] Leader for TabletID 72057594037927937 is [45:105:2137] sender: [45:178:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [45:105:2137] sender: [45:180:2057] recipient: [45:179:2195] Leader for TabletID 72057594037927937 is [45:181:2196] sender: [45:182:2057] recipient: [45:179:2195] !Reboot 72057594037927937 (actor [45:105:2137]) rebooted! !Reboot 72057594037927937 (actor [45:105:2137]) tablet resolver refreshed! new actor is[45:181:2196] Leader for TabletID 72057594037927937 is [45:181:2196] sender: [45:229:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:101:2057] recipient: [46:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:101:2057] recipient: [46:99:2133] Leader for TabletID 72057594037927937 is [46:105:2137] sender: [46:106:2057] recipient: [46:99:2133] Leader for TabletID 72057594037927937 is [46:105:2137] sender: [46:139:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [46:105:2137] sender: [46:178:2057] recipient: [46:97:2132] Leader for TabletID 72057594037927937 is [46:105:2137] sender: [46:181:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [46:105:2137] sender: [46:182:2057] recipient: [46:180:2197] Leader for TabletID 72057594037927937 is [46:183:2198] sender: [46:184:2057] recipient: [46:180:2197] !Reboot 72057594037927937 (actor [46:105:2137]) rebooted! !Reboot 72057594037927937 (actor [46:105:2137]) tablet resolver refreshed! new actor is[46:183:2198] Leader for TabletID 72057594037927937 is [46:183:2198] sender: [46:253:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:101:2057] recipient: [47:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:101:2057] recipient: [47:99:2133] Leader for TabletID 72057594037927937 is [47:105:2137] sender: [47:106:2057] recipient: [47:99:2133] Leader for TabletID 72057594037927937 is [47:105:2137] sender: [47:139:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [47:105:2137] sender: [47:178:2057] recipient: [47:97:2132] Leader for TabletID 72057594037927937 is [47:105:2137] sender: [47:180:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:105:2137] sender: [47:182:2057] recipient: [47:181:2197] Leader for TabletID 72057594037927937 is [47:183:2198] sender: [47:184:2057] recipient: [47:181:2197] !Reboot 72057594037927937 (actor [47:105:2137]) rebooted! !Reboot 72057594037927937 (actor [47:105:2137]) tablet resolver refreshed! new actor is[47:183:2198] Leader for TabletID 72057594037927937 is [47:183:2198] sender: [47:253:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:101:2057] recipient: [48:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:101:2057] recipient: [48:99:2133] Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:106:2057] recipient: [48:99:2133] Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:139:2057] recipient: [48:14:2061] !Reboot 72057594037927937 (actor [48:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:181:2057] recipient: [48:97:2132] Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:183:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:185:2057] recipient: [48:184:2199] Leader for TabletID 72057594037927937 is [48:186:2200] sender: [48:187:2057] recipient: [48:184:2199] !Reboot 72057594037927937 (actor [48:105:2137]) rebooted! !Reboot 72057594037927937 (actor [48:105:2137]) tablet resolver refreshed! new actor is[48:186:2200] Leader for TabletID 72057594037927937 is [48:186:2200] sender: [48:234:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:101:2057] recipient: [49:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:101:2057] recipient: [49:99:2133] Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:106:2057] recipient: [49:99:2133] Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:139:2057] recipient: [49:14:2061] !Reboot 72057594037927937 (actor [49:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:183:2057] recipient: [49:97:2132] Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:186:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:187:2057] recipient: [49:185:2201] Leader for TabletID 72057594037927937 is [49:188:2202] sender: [49:189:2057] recipient: [49:185:2201] !Reboot 72057594037927937 (actor [49:105:2137]) rebooted! !Reboot 72057594037927937 (actor [49:105:2137]) tablet resolver refreshed! new actor is[49:188:2202] Leader for TabletID 72057594037927937 is [49:188:2202] sender: [49:258:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:101:2057] recipient: [50:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:101:2057] recipient: [50:99:2133] Leader for TabletID 72057594037927937 is [50:105:2137] sender: [50:106:2057] recipient: [50:99:2133] Leader for TabletID 72057594037927937 is [50:105:2137] sender: [50:139:2057] recipient: [50:14:2061] !Reboot 72057594037927937 (actor [50:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [50:105:2137] sender: [50:183:2057] recipient: [50:97:2132] Leader for TabletID 72057594037927937 is [50:105:2137] sender: [50:186:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [50:105:2137] sender: [50:187:2057] recipient: [50:185:2201] Leader for TabletID 72057594037927937 is [50:188:2202] sender: [50:189:2057] recipient: [50:185:2201] !Reboot 72057594037927937 (actor [50:105:2137]) rebooted! !Reboot 72057594037927937 (actor [50:105:2137]) tablet resolver refreshed! new actor is[50:188:2202] Leader for TabletID 72057594037927937 is [50:188:2202] sender: [50:258:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:101:2057] recipient: [51:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:101:2057] recipient: [51:99:2133] Leader for TabletID 72057594037927937 is [51:105:2137] sender: [51:106:2057] recipient: [51:99:2133] Leader for TabletID 72057594037927937 is [51:105:2137] sender: [51:139:2057] recipient: [51:14:2061] !Reboot 72057594037927937 (actor [51:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [51:105:2137] sender: [51:186:2057] recipient: [51:97:2132] Leader for TabletID 72057594037927937 is [51:105:2137] sender: [51:189:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [51:105:2137] sender: [51:190:2057] recipient: [51:188:2203] Leader for TabletID 72057594037927937 is [51:191:2204] sender: [51:192:2057] recipient: [51:188:2203] !Reboot 72057594037927937 (actor [51:105:2137]) rebooted! !Reboot 72057594037927937 (actor [51:105:2137]) tablet resolver refreshed! new actor is[51:191:2204] Leader for TabletID 72057594037927937 is [51:191:2204] sender: [51:239:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:101:2057] recipient: [52:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:101:2057] recipient: [52:99:2133] Leader for TabletID 72057594037927937 is [52:105:2137] sender: [52:106:2057] recipient: [52:99:2133] Leader for TabletID 72057594037927937 is [52:105:2137] sender: [52:139:2057] recipient: [52:14:2061] !Reboot 72057594037927937 (actor [52:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [52:105:2137] sender: [52:188:2057] recipient: [52:97:2132] Leader for TabletID 72057594037927937 is [52:105:2137] sender: [52:191:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [52:105:2137] sender: [52:192:2057] recipient: [52:190:2205] Leader for TabletID 72057594037927937 is [52:193:2206] sender: [52:194:2057] recipient: [52:190:2205] !Reboot 72057594037927937 (actor [52:105:2137]) rebooted! !Reboot 72057594037927937 (actor [52:105:2137]) tablet resolver refreshed! new actor is[52:193:2206] Leader for TabletID 72057594037927937 is [52:193:2206] sender: [52:263:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:101:2057] recipient: [53:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:101:2057] recipient: [53:99:2133] Leader for TabletID 72057594037927937 is [53:105:2137] sender: [53:106:2057] recipient: [53:99:2133] Leader for TabletID 72057594037927937 is [53:105:2137] sender: [53:139:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [53:105:2137] sender: [53:188:2057] recipient: [53:97:2132] Leader for TabletID 72057594037927937 is [53:105:2137] sender: [53:191:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [53:105:2137] sender: [53:192:2057] recipient: [53:190:2205] Leader for TabletID 72057594037927937 is [53:193:2206] sender: [53:194:2057] recipient: [53:190:2205] !Reboot 72057594037927937 (actor [53:105:2137]) rebooted! !Reboot 72057594037927937 (actor [53:105:2137]) tablet resolver refreshed! new actor is[53:193:2206] Leader for TabletID 72057594037927937 is [53:193:2206] sender: [53:263:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:101:2057] recipient: [54:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:101:2057] recipient: [54:99:2133] Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:106:2057] recipient: [54:99:2133] Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:139:2057] recipient: [54:14:2061] !Reboot 72057594037927937 (actor [54:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:191:2057] recipient: [54:97:2132] Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:194:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:195:2057] recipient: [54:193:2207] Leader for TabletID 72057594037927937 is [54:196:2208] sender: [54:197:2057] recipient: [54:193:2207] !Reboot 72057594037927937 (actor [54:105:2137]) rebooted! !Reboot 72057594037927937 (actor [54:105:2137]) tablet resolver refreshed! new actor is[54:196:2208] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:101:2057] recipient: [55:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:101:2057] recipient: [55:99:2133] Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:106:2057] recipient: [55:99:2133] Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:139:2057] recipient: [55:14:2061] >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed [GOOD] >> TPersQueueTest::TestWriteStat >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::AllVDisksEvictionInRack >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled >> TCmsTest::StateRequestNode [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning >> KqpLimits::CancelAfterRoTxWithFollowerLegacy [GOOD] >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::DynamicConfig >> TCmsTest::RequestRestartServicesOk >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> TCmsTest::RequestRestartServicesRejectSecond >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::ManageRequests >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerLegacy [GOOD] Test command err: Trying to start YDB, gRPC: 5182, MsgBus: 24574 2025-04-07T13:49:08.165347Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567911541687135:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:08.165379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f3/r3tmp/tmpKwRA16/pdisk_1.dat 2025-04-07T13:49:08.605745Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5182, node 1 2025-04-07T13:49:08.654695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:08.654793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:08.661210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:49:08.724355Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:08.724374Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:08.724381Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:08.724461Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24574 TClient is connected to server localhost:24574 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:09.306317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:09.331303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:09.499079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:09.669010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:09.760492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:12.105445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567924426590743:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:12.111945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:12.147764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:12.199706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:12.228540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:12.306015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:12.383340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:12.449758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:12.510900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567928721558541:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:12.510964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:12.511273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567928721558546:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:12.515009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:12.535347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567928721558548:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:13.248527Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567911541687135:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:13.249244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:13.649306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14870, MsgBus: 12235 2025-04-07T13:49:16.422533Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490567946282942282:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:16.422693Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f3/r3tmp/tmpcCK3FY/pdisk_1.dat 2025-04-07T13:49:16.541636Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:16.553365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:16.553438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:16.554984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14870, node 2 2025-04-07T13:49:16.592560Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:16.592587Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:16.592594Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:16.592691Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12235 TClient is connected to server localhost:12235 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:16.969831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.004171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.066482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:17.258565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:49:17.331665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T13:49:19.217219Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490567959167845845:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues ... 3:51:44.614471Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568581839771239:3579], TxId: 281474976715802, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=. TraceId : 01jr88axp572t9b0dynsjsdw22. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7490568581839771234:2461], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:51:44.616124Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88axp572t9b0dynsjsdw22, Create QueryResponse for error on request, msg: 2025-04-07T13:51:45.372527Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88aydk2zqdserh39kbz31q, Create QueryResponse for error on request, msg: 2025-04-07T13:51:45.685050Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88ayq935jpgwfvgzsnk2s4, Create QueryResponse for error on request, msg: 2025-04-07T13:51:46.481449Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88azfq6ck1r9hxchv4d5ky, Create QueryResponse for error on request, msg: 2025-04-07T13:51:46.792084Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88azss6v114vjqceh21m67, Create QueryResponse for error on request, msg: 2025-04-07T13:51:47.355167Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b0bf65vfn1dg8czz08k9, Create QueryResponse for error on request, msg: 2025-04-07T13:51:47.716535Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b0pk9q5m06c8xttvkm0z, Create QueryResponse for error on request, msg: 2025-04-07T13:51:48.334997Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b19weg6c9jx4dpxw2yvm, Create QueryResponse for error on request, msg: 2025-04-07T13:51:48.656970Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7490568599019640725:2461] TxId: 281474976715814. Ctx: { TraceId: 01jr88b1kv25p67yeasnen65tb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 306ms } {
: Error: Cancelling after 308ms during execution } ] 2025-04-07T13:51:48.657267Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568599019640730:3669], TxId: 281474976715814, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=. TraceId : 01jr88b1kv25p67yeasnen65tb. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7490568599019640725:2461], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:51:48.657895Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568599019640729:3668], TxId: 281474976715814, task: 1. Ctx: { TraceId : 01jr88b1kv25p67yeasnen65tb. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7490568599019640725:2461], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:51:48.659676Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b1kv25p67yeasnen65tb, Create QueryResponse for error on request, msg: 2025-04-07T13:51:49.522086Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b2czexm5efj3hpdprk6c, Create QueryResponse for error on request, msg: 2025-04-07T13:51:51.128973Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b40tbh19hys20cxht8ya, Create QueryResponse for error on request, msg: 2025-04-07T13:51:51.646220Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b4gc39yfy4bqzyevxsqb, Create QueryResponse for error on request, msg: 2025-04-07T13:51:52.073183Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b4yb787vdb0bsxew3vmv, Create QueryResponse for error on request, msg: 2025-04-07T13:51:52.546194Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b58jbtwa2qw42mx9j038, Create QueryResponse for error on request, msg: 2025-04-07T13:51:53.116384Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b5yceer4aepdhn6re1qr, Create QueryResponse for error on request, msg: 2025-04-07T13:51:53.653170Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b6fd54herze2h6r3ykxy, Create QueryResponse for error on request, msg: 2025-04-07T13:51:54.710401Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b7ge9td53qcbdnqer54q, Create QueryResponse for error on request, msg: 2025-04-07T13:51:55.226344Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b805fwgkt8j7g227zy0e, Create QueryResponse for error on request, msg: 2025-04-07T13:51:55.978932Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b8ncdde7gp9dxmz536jx, Create QueryResponse for error on request, msg: 2025-04-07T13:51:56.329465Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b92rb1qcq52wc98qxt18, Create QueryResponse for error on request, msg: 2025-04-07T13:51:57.056055Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88b9rh9ze6bcyrbycw7d6v, Create QueryResponse for error on request, msg: 2025-04-07T13:51:57.412518Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88ba4hfz9cyjrjakakhaqm, Create QueryResponse for error on request, msg: 2025-04-07T13:51:58.608904Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88bb9p5n8fywe16g91kzvt, Create QueryResponse for error on request, msg: 2025-04-07T13:51:58.960656Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88bbms7k9js6jdf4cxt1bp, Create QueryResponse for error on request, msg: 2025-04-07T13:51:59.313098Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7490568646264281605:2461] TxId: 281474976715838. Ctx: { TraceId: 01jr88bbzv9pg68ewxdnk7npjy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 344ms } {
: Error: Cancelling after 340ms during execution } ] 2025-04-07T13:51:59.313473Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568646264281610:3856], TxId: 281474976715838, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=. TraceId : 01jr88bbzv9pg68ewxdnk7npjy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7490568646264281605:2461], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:51:59.321307Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490568646264281609:3855], TxId: 281474976715838, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88bbzv9pg68ewxdnk7npjy. SessionId : ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7490568646264281605:2461], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-07T13:51:59.323410Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88bbzv9pg68ewxdnk7npjy, Create QueryResponse for error on request, msg: 2025-04-07T13:52:01.774755Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88bea3ebzwavxmd7hn1nrj, Create QueryResponse for error on request, msg: 2025-04-07T13:52:02.457881Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88bf1e0htcsey527dgxfwj, Create QueryResponse for error on request, msg: 2025-04-07T13:52:05.786885Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88bj7a78q9byerb30a82ea, Create QueryResponse for error on request, msg: 2025-04-07T13:52:08.745527Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTdiMTUwOGMtNDlkMTIyNjQtZjFhN2JjM2UtZGM4ZjdjNjM=, ActorId: [3:7490568418631010501:2461], ActorState: ExecuteState, TraceId: 01jr88bn5heq5twv1y89jrqgkw, Create QueryResponse for error on request, msg: |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TCmsTest::AllVDisksEvictionInRack [GOOD] >> BasicUsage::PropagateSessionClosed [GOOD] >> BasicUsage::ReadMirrored |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled >> BasicUsage::CloseWriteSessionImmediately [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions >> TCmsTest::Mirror3dcPermissions [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::AllVDisksEvictionInRack [GOOD] Test command err: 2025-04-07T13:52:43.001581Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-07T13:52:43.001688Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-07T13:52:43.001846Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-07T13:52:43.003856Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120027512 } } 2025-04-07T13:52:43.004814Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120027512 } 2025-04-07T13:52:43.005069Z node 25 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003512s 2025-04-07T13:52:43.005130Z node 25 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-07T13:52:43.005345Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-07T13:52:43.005430Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-04-07T13:52:43.005490Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 25 has not yet been completed) 2025-04-07T13:52:43.005650Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-07T13:52:43.005877Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-07T13:52:43.005934Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 25, marker# MARKER_DISK_FAULTY 2025-04-07T13:52:43.006211Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-04-07T13:52:43.006263Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2025-04-07T13:52:43.006295Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2025-04-07T13:52:43.006323Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2025-04-07T13:52:43.006352Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-04-07T13:52:43.006383Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-04-07T13:52:43.006414Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-04-07T13:52:43.006446Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-04-07T13:52:43.012537Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: ... pdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2025-04-07T13:52:43.222675Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-04-07T13:52:43.222701Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-04-07T13:52:43.222728Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-04-07T13:52:43.222758Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-04-07T13:52:43.222984Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-07T13:52:43.223685Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-07T13:52:43.223805Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-07T13:52:43.223871Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-07T13:52:43.223937Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-07T13:52:43.223994Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-07T13:52:43.224053Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-07T13:52:43.224142Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-07T13:52:43.224187Z node 25 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-07T13:52:43.224371Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 26:26, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-07T13:52:43.224442Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 25:25, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-07T13:52:43.224517Z node 25 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2025-04-07T13:52:43.224767Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-04-07T13:52:43.224977Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-04-07T13:52:43.225099Z node 25 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2025-04-07T13:52:43.225147Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 25:25 2025-04-07T13:52:43.225196Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 26:26 2025-04-07T13:52:43.238915Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-04-07T13:52:43.239027Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-04-07T13:52:43.255077Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-07T13:52:43.255161Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-07T13:52:43.255229Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-04-07T13:52:43.256119Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-07T13:52:43.256221Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } 2025-04-07T13:52:43.256287Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-04-07T13:52:43.256348Z node 25 :CMS DEBUG: Ring: 0; State: Ok 2025-04-07T13:52:43.256376Z node 25 :CMS DEBUG: Ring: 1; State: Ok 2025-04-07T13:52:43.256396Z node 25 :CMS DEBUG: Ring: 2; State: Ok 2025-04-07T13:52:43.256426Z node 25 :CMS DEBUG: Result: ALLOW 2025-04-07T13:52:43.256585Z node 25 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-04-07T13:52:43.256641Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-04-07T13:52:43.256816Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-07T13:52:43.257004Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.127512Z, action# Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-04-07T13:52:43.257136Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-07T13:52:43.269709Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-04-07T13:52:43.270036Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } Deadline: 780127512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2025-04-07T13:52:43.270101Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:33:00.127512Z 2025-04-07T13:52:43.287957Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-04-07T13:52:43.288340Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-07T13:52:43.288430Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-07T13:52:43.288518Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-04-07T13:52:43.289424Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-07T13:52:43.289539Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } 2025-04-07T13:52:43.289611Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-04-07T13:52:43.289661Z node 25 :CMS DEBUG: Result: ALLOW 2025-04-07T13:52:43.289818Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-04-07T13:52:43.289886Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:13:00Z) 2025-04-07T13:52:43.289968Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-07T13:52:43.290138Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.229024Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2025-04-07T13:52:43.290252Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-07T13:52:43.302898Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-04-07T13:52:43.303200Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } Deadline: 780229024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2025-04-07T13:52:43.303837Z node 25 :CMS INFO: User user is done with permissions user-p-1 2025-04-07T13:52:43.303896Z node 25 :CMS DEBUG: Resulting status: OK 2025-04-07T13:52:43.303984Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-04-07T13:52:43.304077Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 25 2025-04-07T13:52:43.304200Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2025-04-07T13:52:43.304257Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-04-07T13:52:43.317678Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-04-07T13:52:43.317886Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-04-07T13:52:43.318520Z node 25 :CMS INFO: User user is done with permissions user-p-2 2025-04-07T13:52:43.318579Z node 25 :CMS DEBUG: Resulting status: OK 2025-04-07T13:52:43.318647Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-04-07T13:52:43.318764Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 26 2025-04-07T13:52:43.318868Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2025-04-07T13:52:43.318912Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-04-07T13:52:43.331581Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-04-07T13:52:43.331805Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesReject >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] Test command err: 2025-04-07T13:52:44.450139Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:44.451621Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:44.454984Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:44.455109Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:44.457325Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:44.457789Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e2a/r3tmp/tmpdrbi4W/pdisk_1.dat 2025-04-07T13:52:44.989379Z node 1 :BS_PROXY_PUT INFO: [6173685a7ad4b3c4] bootstrap ActorId# [1:471:2455] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:1288:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:44.989555Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1288:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:44.989605Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1288:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:44.989630Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1288:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:44.989654Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1288:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:44.989679Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1288:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:44.989704Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1288:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:44.989742Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] restore Id# [72057594037932033:2:7:0:0:1288:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:44.989829Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:1288:1] Marker# BPG33 2025-04-07T13:52:44.989867Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:1288:1] Marker# BPG32 2025-04-07T13:52:44.989898Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:1288:2] Marker# BPG33 2025-04-07T13:52:44.989917Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:1288:2] Marker# BPG32 2025-04-07T13:52:44.989937Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:1288:3] Marker# BPG33 2025-04-07T13:52:44.989953Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:1288:3] Marker# BPG32 2025-04-07T13:52:44.990099Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:44:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1288:3] FDS# 1288 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:44.990147Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:37:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1288:2] FDS# 1288 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:44.990173Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1288:1] FDS# 1288 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:44.998349Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1288:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90141 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-07T13:52:44.998529Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1288:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90141 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-07T13:52:44.998589Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1288:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 90141 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-07T13:52:44.998640Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:1288:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-07T13:52:44.998694Z node 1 :BS_PROXY_PUT INFO: [6173685a7ad4b3c4] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:1288:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T13:52:45.034681Z node 1 :BS_PROXY_PUT INFO: [e2e5f1b9c917f854] bootstrap ActorId# [1:515:2492] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:222:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:45.034835Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:45.034876Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:45.034901Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:45.034926Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:45.034952Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:45.034976Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:45.035008Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] restore Id# [72057594037932033:2:8:0:0:222:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:45.035075Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG33 2025-04-07T13:52:45.035119Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG32 2025-04-07T13:52:45.035154Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG33 2025-04-07T13:52:45.035174Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG32 2025-04-07T13:52:45.035199Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG33 2025-04-07T13:52:45.035222Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG32 2025-04-07T13:52:45.035336Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:44:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:3] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:45.035384Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:37:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:2] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:45.035420Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:1] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:45.036723Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-07T13:52:45.037339Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-07T13:52:45.037483Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-07T13:52:45.037574Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-07T13:52:45.037634Z node 1 :BS_PROXY_PUT INFO: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T13:52:45.038514Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-04-07T13:52:45.038550Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-04-07T13:52:45.038641Z nod ... 0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:45.070705Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:9:0:0:241:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:45.070731Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:9:0:0:241:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:45.070770Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] restore Id# [72057594037932033:2:9:0:0:241:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:45.070844Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:241:1] Marker# BPG33 2025-04-07T13:52:45.070891Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:241:1] Marker# BPG32 2025-04-07T13:52:45.070932Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:241:2] Marker# BPG33 2025-04-07T13:52:45.070959Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:241:2] Marker# BPG32 2025-04-07T13:52:45.070986Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:241:3] Marker# BPG33 2025-04-07T13:52:45.071010Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:241:3] Marker# BPG32 2025-04-07T13:52:45.071151Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:37:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:241:3] FDS# 241 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:45.071215Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:241:2] FDS# 241 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:45.071256Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:51:2095] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:241:1] FDS# 241 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:45.073230Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:241:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81897 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-07T13:52:45.073414Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:241:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81897 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-04-07T13:52:45.073519Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:241:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81897 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-07T13:52:45.073589Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Result# TEvPutResult {Id# [72057594037932033:2:9:0:0:241:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-07T13:52:45.073660Z node 1 :BS_PROXY_PUT INFO: [abc2fc901918ac71] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:9:0:0:241:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T13:52:45.074602Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T13:52:45.074641Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T13:52:45.075954Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:520:2496] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:45.076037Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:521:2497] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:45.076103Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:522:2498] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:45.076180Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:523:2499] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:45.076268Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:524:2500] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:45.076333Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:525:2501] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:45.076432Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:526:2502] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:45.076457Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-07T13:52:45.078752Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:45.078919Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:45.079101Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:45.079220Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:45.079275Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:45.079415Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:45.079494Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:45.079522Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-07T13:52:45.079553Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-07T13:52:45.079729Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] bootstrap ActorId# [1:529:2503] Group# 2181038082 TabletId# 1234 Generation# 1 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-07T13:52:45.079778Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Sending TEvVBlock Tablet# 1234 Generation# 1 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-07T13:52:45.079970Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 1 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 116602290196327027 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-07T13:52:45.081103Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-07T13:52:45.081174Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2025-04-07T13:52:45.081494Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 2025-04-07T13:52:45.085587Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2025-04-07T13:52:45.086202Z node 1 :BS_PROXY_BLOCK DEBUG: [8d27cf9df52bfb78] bootstrap ActorId# [1:531:2505] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-07T13:52:45.086248Z node 1 :BS_PROXY_BLOCK DEBUG: [8d27cf9df52bfb78] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-07T13:52:45.086433Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 9343957118166102989 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-07T13:52:45.087935Z node 1 :BS_PROXY_BLOCK DEBUG: [8d27cf9df52bfb78] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-07T13:52:45.088012Z node 1 :BS_PROXY_BLOCK DEBUG: [8d27cf9df52bfb78] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2025-04-07T13:52:45.088464Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] bootstrap ActorId# [1:532:2506] Group# 2181038082 TabletId# 1234 Generation# 4 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-07T13:52:45.088539Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Sending TEvVBlock Tablet# 1234 Generation# 4 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-07T13:52:45.088730Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 4 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 17013429885058158871 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-07T13:52:45.089789Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-07T13:52:45.089861Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Result# TEvBlockResult {Status# OK} Marker# DSPB04 >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEviction >> TBlobStorageWardenTest::TestSendUsefulMonitoring >> TBlobStorageWardenTest::TestSendToInvalidGroupId ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] Test command err: 2025-04-07T13:52:16.336773Z :BasicWriteSession INFO: Random seed for debugging is 1744033936336737 2025-04-07T13:52:17.021433Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568720027595756:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:17.021807Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:17.057242Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568725609594787:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:17.057351Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:17.583661Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:17.589011Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017da/r3tmp/tmp7dBRDY/pdisk_1.dat 2025-04-07T13:52:18.031724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:18.101425Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:18.136302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:18.136421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:18.137609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:18.137664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:18.146688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:18.154225Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:52:18.161088Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:18.161752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17177, node 1 2025-04-07T13:52:18.431738Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0017da/r3tmp/yandex0R85y4.tmp 2025-04-07T13:52:18.431761Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0017da/r3tmp/yandex0R85y4.tmp 2025-04-07T13:52:18.431899Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0017da/r3tmp/yandex0R85y4.tmp 2025-04-07T13:52:18.431969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:18.504824Z INFO: TTestServer started on Port 61970 GrpcPort 17177 TClient is connected to server localhost:61970 PQClient connected to localhost:17177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:18.847845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T13:52:21.651788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568742789464152:2286], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:21.651933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568742789464137:2283], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:21.652263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:21.682402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-04-07T13:52:21.878807Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568742789464166:2287], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-04-07T13:52:22.056598Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568725609594787:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:22.056658Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:22.132995Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568720027595756:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:22.133502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:22.150718Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568742789464209:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:22.152567Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDU2MjFkYTMtMTc4ZWVmNDUtZDc1YjE4ZTktNTUyMjg5N2I=, ActorId: [2:7490568742789464126:2281], ActorState: ExecuteState, TraceId: 01jr88c230fhvtbvtg740wvn4k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:22.153820Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568741502433149:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:22.155449Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDAxMzRkNGYtY2E1OGY2ODktNzcyNTU2NTMtYWVlZDc4MmU=, ActorId: [1:7490568741502433103:2304], ActorState: ExecuteState, TraceId: 01jr88c276693ver1ym9r9vcx5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:22.155814Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:22.155888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T13:52:22.155018Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:22.291362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:52:22.441438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:17177", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-07T13:52:22.773652Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr88c323eg112p1cs5vtef1n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTJkOGQ5OGUtZTQ2OGJjZTctNjM3YzljMDItZDllZDY2YWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490568745797400869:2961] === CheckClustersList. Ok 2025-04-07T13:52:28.118755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:17177 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T13:52:28.309815Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:17177 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 ... ors: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:10173 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-04-07T13:52:43.232710Z node 3 :PERSQUEUE INFO: proxy answer 2025-04-07T13:52:43.241672Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-04-07T13:52:43.242218Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-04-07T13:52:43.242270Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:10173 2025-04-07T13:52:43.254064Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-07T13:52:43.254659Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-07T13:52:43.254698Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-07T13:52:43.256936Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-07T13:52:43.257069Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:47502 2025-04-07T13:52:43.257091Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:47502 proto=v1 topic=test-topic durationSec=0 2025-04-07T13:52:43.257102Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T13:52:43.258393Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-07T13:52:43.258474Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-07T13:52:43.258482Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T13:52:43.258487Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-07T13:52:43.258499Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568836483106542:2469] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-07T13:52:43.260088Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568836483106542:2469] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-07T13:52:43.408242Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568836483106542:2469] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-07T13:52:43.408884Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490568836483106587:2469] connected; active server actors: 1 2025-04-07T13:52:43.409027Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568836483106542:2469] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-07T13:52:43.409045Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568836483106542:2469] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-07T13:52:43.409382Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490568836483106587:2469] disconnected; active server actors: 1 2025-04-07T13:52:43.409398Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490568836483106587:2469] disconnected no session 2025-04-07T13:52:43.562991Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568836483106542:2469] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-07T13:52:43.563029Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568836483106542:2469] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-07T13:52:43.563046Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568836483106542:2469] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-07T13:52:43.563074Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T13:52:43.564714Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2025-04-07T13:52:43.564532Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:43.564579Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7490568836483106622:2469], now have 1 active actors on pipe 2025-04-07T13:52:43.565779Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:52:43.565816Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:52:43.565894Z node 4 :PERSQUEUE INFO: new Cookie src|dba653e1-6ba815cd-36e345bc-8cf2be31_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-07T13:52:43.565981Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T13:52:43.566026Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:52:43.566783Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:52:43.566815Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:52:43.566879Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:52:43.567321Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|dba653e1-6ba815cd-36e345bc-8cf2be31_0 2025-04-07T13:52:43.569342Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744033963569 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:52:43.569449Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|dba653e1-6ba815cd-36e345bc-8cf2be31_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-07T13:52:43.569631Z :INFO: [] MessageGroupId [src] SessionId [src|dba653e1-6ba815cd-36e345bc-8cf2be31_0] Write session: close. Timeout = 0 ms 2025-04-07T13:52:43.569676Z :INFO: [] MessageGroupId [src] SessionId [src|dba653e1-6ba815cd-36e345bc-8cf2be31_0] Write session will now close 2025-04-07T13:52:43.569709Z :DEBUG: [] MessageGroupId [src] SessionId [src|dba653e1-6ba815cd-36e345bc-8cf2be31_0] Write session: aborting 2025-04-07T13:52:43.570235Z :INFO: [] MessageGroupId [src] SessionId [src|dba653e1-6ba815cd-36e345bc-8cf2be31_0] Write session: gracefully shut down, all writes complete 2025-04-07T13:52:43.570271Z :DEBUG: [] MessageGroupId [src] SessionId [src|dba653e1-6ba815cd-36e345bc-8cf2be31_0] Write session: destroy 2025-04-07T13:52:43.574743Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|dba653e1-6ba815cd-36e345bc-8cf2be31_0 grpc read done: success: 0 data: 2025-04-07T13:52:43.574768Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|dba653e1-6ba815cd-36e345bc-8cf2be31_0 grpc read failed 2025-04-07T13:52:43.574783Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|dba653e1-6ba815cd-36e345bc-8cf2be31_0 grpc closed 2025-04-07T13:52:43.574796Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|dba653e1-6ba815cd-36e345bc-8cf2be31_0 is DEAD 2025-04-07T13:52:43.575285Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:52:43.579287Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:52:43.579333Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7490568836483106622:2469] destroyed 2025-04-07T13:52:43.579449Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created 2025-04-07T13:52:44.220815Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490568840778073970:2494], TxId: 281474976710689, task: 1, CA Id [3:7490568840778073968:2494]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-07T13:52:44.256997Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490568840778073970:2494], TxId: 281474976710689, task: 1, CA Id [3:7490568840778073968:2494]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:52:44.305200Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490568840778073970:2494], TxId: 281474976710689, task: 1, CA Id [3:7490568840778073968:2494]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:52:44.377638Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490568840778073970:2494], TxId: 281474976710689, task: 1, CA Id [3:7490568840778073968:2494]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:52:44.457746Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490568840778073970:2494], TxId: 281474976710689, task: 1, CA Id [3:7490568840778073968:2494]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::EmergencyDuringRollingRestart >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring >> TCmsTest::ManageRequests [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2025-04-07T13:52:14.764114Z :FallbackToSingleDb INFO: Random seed for debugging is 1744033934764072 2025-04-07T13:52:15.292250Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568714970477385:2157];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:15.292823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:15.391299Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568716395041051:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:15.391340Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001861/r3tmp/tmpVaBVns/pdisk_1.dat 2025-04-07T13:52:15.717198Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:15.738527Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:16.358281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:16.400739Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:16.488628Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:16.504761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:16.504885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:16.509988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:16.510061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:16.514865Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:52:16.515097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:16.515609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9550, node 1 2025-04-07T13:52:16.812011Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001861/r3tmp/yandexX8zrAB.tmp 2025-04-07T13:52:16.812032Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001861/r3tmp/yandexX8zrAB.tmp 2025-04-07T13:52:16.812186Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001861/r3tmp/yandexX8zrAB.tmp 2025-04-07T13:52:16.812284Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:16.895751Z INFO: TTestServer started on Port 7459 GrpcPort 9550 TClient is connected to server localhost:7459 PQClient connected to localhost:9550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:17.448352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T13:52:20.288786Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568714970477385:2157];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:20.288852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:20.391383Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568716395041051:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:20.391439Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:20.504426Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568737869877844:2287], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:20.504514Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568737869877825:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:20.504688Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:20.510976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-04-07T13:52:20.599002Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568737869877853:2288], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-04-07T13:52:20.990532Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568737869877889:2292], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:20.991628Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2UxOTUwNzAtMzA5MzhiYTYtZTkzZTQyYTktMzg1ZWQw, ActorId: [2:7490568737869877823:2283], ActorState: ExecuteState, TraceId: 01jr88c10f0f9vft3d7hnm7tdb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:20.991574Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568736445314834:2312], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:20.993013Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWZjYmJlLTI5ZTQ0NWFlLTMwMmUxNmViLWYyM2EyNjJh, ActorId: [1:7490568736445314808:2305], ActorState: ExecuteState, TraceId: 01jr88c13p76vsgnmpk3740b7y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:20.994513Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:20.993769Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:20.996458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T13:52:21.282835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:52:21.473006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:9550", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-07T13:52:21.965209Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr88c2567qkx79b0watpx1he, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjdlZmQ1OTgtY2U3NDQ0YS00YzA4MDBjMi05ZjIyZTc2ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490568745035249827:2972] === CheckClustersList. Ok 2025-04-07T13:52:28.148915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:9550 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T13:52:28.368785Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:9550 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { ... 25-04-07T13:52:42.775455Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T13:52:42.775463Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-07T13:52:42.775481Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568830268022778:2465] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-07T13:52:42.777803Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568830268022778:2465] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-07T13:52:42.982062Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568830268022778:2465] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-07T13:52:42.982570Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490568830268022820:2465] connected; active server actors: 1 2025-04-07T13:52:42.982702Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568830268022778:2465] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-07T13:52:42.982722Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568830268022778:2465] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-07T13:52:42.983208Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490568830268022820:2465] disconnected; active server actors: 1 2025-04-07T13:52:42.983238Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490568830268022820:2465] disconnected no session 2025-04-07T13:52:43.127631Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568830268022778:2465] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-07T13:52:43.127675Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568830268022778:2465] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-07T13:52:43.127689Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568830268022778:2465] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-07T13:52:43.127727Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T13:52:43.136224Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744033963136 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:52:43.132988Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:43.136334Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|a9c9bb18-1936911d-478b8c16-2961533f_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-07T13:52:43.136504Z :INFO: [] MessageGroupId [src] SessionId [src|a9c9bb18-1936911d-478b8c16-2961533f_0] Write session: close. Timeout = 0 ms 2025-04-07T13:52:43.136534Z :INFO: [] MessageGroupId [src] SessionId [src|a9c9bb18-1936911d-478b8c16-2961533f_0] Write session will now close 2025-04-07T13:52:43.136561Z :DEBUG: [] MessageGroupId [src] SessionId [src|a9c9bb18-1936911d-478b8c16-2961533f_0] Write session: aborting 2025-04-07T13:52:43.133586Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2025-04-07T13:52:43.135326Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|a9c9bb18-1936911d-478b8c16-2961533f_0 2025-04-07T13:52:43.136918Z :INFO: [] MessageGroupId [src] SessionId [src|a9c9bb18-1936911d-478b8c16-2961533f_0] Write session: gracefully shut down, all writes complete 2025-04-07T13:52:43.136953Z :DEBUG: [] MessageGroupId [src] SessionId [src|a9c9bb18-1936911d-478b8c16-2961533f_0] Write session: destroy 2025-04-07T13:52:43.140802Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|a9c9bb18-1936911d-478b8c16-2961533f_0 grpc read done: success: 0 data: 2025-04-07T13:52:43.140828Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a9c9bb18-1936911d-478b8c16-2961533f_0 grpc read failed 2025-04-07T13:52:43.140876Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|a9c9bb18-1936911d-478b8c16-2961533f_0 2025-04-07T13:52:43.140903Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a9c9bb18-1936911d-478b8c16-2961533f_0 is DEAD PORTS 8509 31678 2025-04-07T13:52:43.141193Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:52:43.133073Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7490568834562990135:2465], now have 1 active actors on pipe 2025-04-07T13:52:43.134038Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:52:43.134079Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:52:43.134201Z node 4 :PERSQUEUE INFO: new Cookie src|a9c9bb18-1936911d-478b8c16-2961533f_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-07T13:52:43.134299Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T13:52:43.134357Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:52:43.134918Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:52:43.134939Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:52:43.134998Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:52:43.141590Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:52:43.141630Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7490568834562990135:2465] destroyed 2025-04-07T13:52:43.141674Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >>> Ready to answer: ok 2025-04-07T13:52:44.206740Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2025-04-07T13:52:44.206848Z :INFO: [/Root] [] [5c2d1d94-21709f25-7277e4bc-11ffc778] Open read subsessions to databases: { name: , endpoint: localhost:31678, path: /Root } 2025-04-07T13:52:44.207083Z :INFO: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] Starting read session 2025-04-07T13:52:44.208422Z :DEBUG: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] Starting single session 2025-04-07T13:52:44.208963Z :DEBUG: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-07T13:52:44.209008Z :DEBUG: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-07T13:52:44.209044Z :DEBUG: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] [] Reconnecting session to cluster in 0.000000s 2025-04-07T13:52:44.209235Z :ERROR: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:31678
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:31678. 2025-04-07T13:52:44.209317Z :DEBUG: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-07T13:52:44.209354Z :DEBUG: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-07T13:52:44.209518Z :INFO: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:31678" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:31678
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:31678. " } 2025-04-07T13:52:44.210033Z :NOTICE: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:52:44.210097Z :DEBUG: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:31678" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:31678
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:31678. " } 2025-04-07T13:52:44.210252Z :INFO: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] Closing read session. Close timeout: 0.010000s 2025-04-07T13:52:44.210299Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-07T13:52:44.210358Z :INFO: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] Counters: { Errors: 1 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:52:44.210401Z :INFO: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] Closing read session. Close timeout: 0.000000s 2025-04-07T13:52:44.210428Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-07T13:52:44.210463Z :INFO: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] Counters: { Errors: 1 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:52:44.210501Z :INFO: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] Closing read session. Close timeout: 0.000000s 2025-04-07T13:52:44.210527Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-07T13:52:44.210558Z :INFO: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] Counters: { Errors: 1 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:52:44.210637Z :NOTICE: [/Root] [/Root] [b66ce27f-a66179e4-85ad7fd0-33ebc5e5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> BindQueue::Basic >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> TBlobStorageWardenTest::TestHttpMonPage >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TBlobStorageWardenTest::TestFilterBadSerials [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> TCmsTest::RestartNodeInDownState |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] Test command err: 2025-04-07T13:52:46.929640Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:46.933479Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:46.935762Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:46.935880Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:46.938528Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:46.938981Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e15/r3tmp/tmpbpjD9a/pdisk_1.dat 2025-04-07T13:52:47.447333Z node 1 :BS_PROXY_PUT INFO: [6173685a7ad4b3c4] bootstrap ActorId# [1:471:2455] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:1288:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:47.447498Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1288:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.447542Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1288:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.447568Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1288:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.447597Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1288:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.447622Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1288:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.447648Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037932033:2:7:0:0:1288:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.447686Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] restore Id# [72057594037932033:2:7:0:0:1288:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:47.447761Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:1288:1] Marker# BPG33 2025-04-07T13:52:47.447805Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:1288:1] Marker# BPG32 2025-04-07T13:52:47.447847Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:1288:2] Marker# BPG33 2025-04-07T13:52:47.447871Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:1288:2] Marker# BPG32 2025-04-07T13:52:47.447905Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:1288:3] Marker# BPG33 2025-04-07T13:52:47.447930Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:1288:3] Marker# BPG32 2025-04-07T13:52:47.448119Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:44:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1288:3] FDS# 1288 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.448196Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:37:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1288:2] FDS# 1288 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.448246Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1288:1] FDS# 1288 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.452020Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1288:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90141 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-07T13:52:47.452274Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1288:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90141 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-07T13:52:47.452374Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1288:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 90141 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-07T13:52:47.452450Z node 1 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:1288:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-07T13:52:47.452560Z node 1 :BS_PROXY_PUT INFO: [6173685a7ad4b3c4] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:1288:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T13:52:47.466018Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 4294967295 IsLimitedKeyless# 0 fullIfPossible# 1 Marker# DSP58 2025-04-07T13:52:47.468014Z node 1 :BS_PROXY CRIT: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvBlock {TabletId# 1234 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} Response# TEvBlockResult {Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 Sending TEvPut 2025-04-07T13:52:47.468264Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvPut {Id# [1234:1:0:0:0:5:0] Size# 5 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID" ApproximateFreeSpaceShare# 0} Marker# DSP31 2025-04-07T13:52:47.468400Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Response# TEvCollectGarbageResult {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2025-04-07T13:52:46.884291Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:46.885947Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:46.887025Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:46.887131Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:46.889596Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:46.891515Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:46.891617Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e16/r3tmp/tmpOU9MpQ/pdisk_1.dat 2025-04-07T13:52:47.447153Z node 1 :BS_PROXY_PUT INFO: [c8d415ebd9884d79] bootstrap ActorId# [1:537:2459] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:1306:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:47.447313Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:7:0:0:1306:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.447343Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:7:0:0:1306:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.447362Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:7:0:0:1306:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.447395Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:7:0:0:1306:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.447416Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:7:0:0:1306:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.447434Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:7:0:0:1306:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.447466Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] restore Id# [72057594037932033:2:7:0:0:1306:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:47.447523Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:1306:1] Marker# BPG33 2025-04-07T13:52:47.447557Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:1306:1] Marker# BPG32 2025-04-07T13:52:47.447587Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:1306:2] Marker# BPG33 2025-04-07T13:52:47.447605Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:1306:2] Marker# BPG32 2025-04-07T13:52:47.447630Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:1306:3] Marker# BPG33 2025-04-07T13:52:47.447654Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:1306:3] Marker# BPG32 2025-04-07T13:52:47.447840Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:100:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1306:3] FDS# 1306 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.447907Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:93:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1306:2] FDS# 1306 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.447939Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:114:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1306:1] FDS# 1306 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.450262Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1306:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90283 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-07T13:52:47.450492Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1306:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90283 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-07T13:52:47.450578Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1306:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 90283 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-07T13:52:47.450652Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:1306:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-07T13:52:47.450735Z node 1 :BS_PROXY_PUT INFO: [c8d415ebd9884d79] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:1306:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T13:52:47.486725Z node 1 :BS_PROXY_PUT INFO: [abc2fc901918ac71] bootstrap ActorId# [1:581:2496] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:222:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:47.486888Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.486927Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.486952Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.486980Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.487010Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.487038Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.487072Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] restore Id# [72057594037932033:2:8:0:0:222:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:47.487141Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG33 2025-04-07T13:52:47.487188Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG32 2025-04-07T13:52:47.487231Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG33 2025-04-07T13:52:47.487258Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG32 2025-04-07T13:52:47.487290Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG33 2025-04-07T13:52:47.487313Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG32 2025-04-07T13:52:47.487466Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:100:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:3] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.487531Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:93:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:2] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.487582Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:114:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:1] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.489748Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-07T13:52:47.490142Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-07T13:52:47.490235Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-07T13:52:47.490309Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-07T13:52:47.490373Z node 1 :BS_PROXY_PUT INFO: [abc2fc901918ac71] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 Sending TEvPut 2025-04-07T13:52:47.491446Z node 1 :BS_PROXY INF ... lse Marker# DSP02 2025-04-07T13:52:47.527577Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T13:52:47.529208Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:582:2497] Create Queue# [1:586:2500] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.529317Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:582:2497] Create Queue# [1:587:2501] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.529416Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:582:2497] Create Queue# [1:588:2502] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.529484Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:582:2497] Create Queue# [1:589:2503] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.529549Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:582:2497] Create Queue# [1:590:2504] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.529645Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:582:2497] Create Queue# [1:591:2505] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.529747Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:582:2497] Create Queue# [1:592:2506] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.529775Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-07T13:52:47.530472Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.530548Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.530637Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.530690Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.530813Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.530851Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.530904Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.530921Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-07T13:52:47.530945Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-07T13:52:47.530976Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2025-04-07T13:52:47.531660Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:595:2507] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:47.531773Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.531812Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:47.531859Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-04-07T13:52:47.531892Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-04-07T13:52:47.532024Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:586:2500] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.534811Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-07T13:52:47.534905Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-04-07T13:52:47.534968Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T13:52:47.535388Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-04-07T13:52:47.535447Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-04-07T13:52:47.535596Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2025-04-07T13:52:47.536029Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:207} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/u6pr/001e16/r3tmp/tmpOU9MpQ//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-04-07T13:52:47.536737Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-04-07T13:52:47.536779Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-04-07T13:52:47.538561Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:599:2104] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.538698Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:600:2105] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.538799Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:601:2106] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.538895Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:602:2107] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.538994Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:603:2108] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.539085Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:604:2109] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.539175Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:605:2110] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.539194Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-07T13:52:47.540210Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.540423Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.540613Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.540768Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.540825Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.540862Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.541003Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.541030Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-07T13:52:47.541064Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-07T13:52:47.541380Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:599:2104] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> TCmsTest::VDisksEviction [GOOD] >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] Test command err: 2025-04-07T13:52:47.208330Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:47.209580Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:47.210990Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:47.211112Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:47.214039Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:47.216108Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:47.216220Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dfe/r3tmp/tmpUGci9v/pdisk_1.dat 2025-04-07T13:52:47.741866Z node 1 :BS_PROXY_PUT INFO: [c8d415ebd9884d79] bootstrap ActorId# [1:537:2459] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:1306:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:47.741986Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:7:0:0:1306:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.742016Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:7:0:0:1306:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.742034Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:7:0:0:1306:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.742065Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:7:0:0:1306:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.742086Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:7:0:0:1306:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.742103Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:7:0:0:1306:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.742128Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] restore Id# [72057594037932033:2:7:0:0:1306:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:47.742176Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:1306:1] Marker# BPG33 2025-04-07T13:52:47.742221Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:1306:1] Marker# BPG32 2025-04-07T13:52:47.742248Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:1306:2] Marker# BPG33 2025-04-07T13:52:47.742269Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:1306:2] Marker# BPG32 2025-04-07T13:52:47.742298Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:1306:3] Marker# BPG33 2025-04-07T13:52:47.742315Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:1306:3] Marker# BPG32 2025-04-07T13:52:47.742463Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:100:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1306:3] FDS# 1306 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.742503Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:93:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1306:2] FDS# 1306 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.742530Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:114:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1306:1] FDS# 1306 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.744222Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1306:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90283 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-07T13:52:47.744348Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1306:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90283 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-07T13:52:47.744400Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1306:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 90283 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-07T13:52:47.744457Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:1306:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-07T13:52:47.744512Z node 1 :BS_PROXY_PUT INFO: [c8d415ebd9884d79] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:1306:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T13:52:47.778658Z node 1 :BS_PROXY_PUT INFO: [abc2fc901918ac71] bootstrap ActorId# [1:581:2496] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:222:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:47.778814Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.778852Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.778878Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.778900Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.778922Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.778944Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.778975Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] restore Id# [72057594037932033:2:8:0:0:222:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:47.779038Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG33 2025-04-07T13:52:47.779078Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG32 2025-04-07T13:52:47.779111Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG33 2025-04-07T13:52:47.779131Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG32 2025-04-07T13:52:47.779160Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG33 2025-04-07T13:52:47.779183Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG32 2025-04-07T13:52:47.779316Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:100:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:3] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.779372Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:93:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:2] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.779409Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:114:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:1] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.782137Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-07T13:52:47.782455Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-07T13:52:47.782548Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-07T13:52:47.782616Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-07T13:52:47.782671Z node 1 :BS_PROXY_PUT INFO: [abc2fc901918ac71] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T13:52:47.783594Z node 1 :BS_PROXY INFO: Group# 218103 ... 52:47.826154Z node 1 :BS_PROXY_PUT NOTICE: [8d27cf9df52bfb78] SendReply putResult# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 Sending TEvPut 2025-04-07T13:52:47.826534Z node 1 :BS_PROXY_PUT INFO: [a55b41de52eb2a08] bootstrap ActorId# [1:597:2509] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:47.826628Z node 1 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.826674Z node 1 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:47.826722Z node 1 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-04-07T13:52:47.826763Z node 1 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-04-07T13:52:47.826864Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:586:2500] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.829640Z node 1 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-07T13:52:47.829742Z node 1 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-04-07T13:52:47.829799Z node 1 :BS_PROXY_PUT INFO: [a55b41de52eb2a08] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T13:52:47.830347Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-04-07T13:52:47.830391Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-04-07T13:52:47.830515Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2025-04-07T13:52:47.831049Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:207} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/u6pr/001dfe/r3tmp/tmpUGci9v//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-04-07T13:52:47.831871Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-04-07T13:52:47.831912Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-04-07T13:52:47.835794Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:599:2103] Create Queue# [2:601:2104] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.835918Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:599:2103] Create Queue# [2:602:2105] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.836008Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:599:2103] Create Queue# [2:603:2106] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.836103Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:599:2103] Create Queue# [2:604:2107] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.836199Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:599:2103] Create Queue# [2:605:2108] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.836288Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:599:2103] Create Queue# [2:606:2109] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.836378Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:599:2103] Create Queue# [2:607:2110] targetNodeId# 1 Marker# DSP01 2025-04-07T13:52:47.836402Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-07T13:52:47.837709Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.837812Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.837907Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.838030Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.838094Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.838128Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.838228Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-07T13:52:47.838247Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-07T13:52:47.838266Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-07T13:52:47.838368Z node 2 :BS_PROXY_BLOCK DEBUG: [1a43693427d0a82b] bootstrap ActorId# [2:610:2111] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-07T13:52:47.838418Z node 2 :BS_PROXY_BLOCK DEBUG: [1a43693427d0a82b] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-07T13:52:47.838585Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:601:2104] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 2153675614693384131 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-07T13:52:47.846123Z node 2 :BS_PROXY_BLOCK DEBUG: [1a43693427d0a82b] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-07T13:52:47.846199Z node 2 :BS_PROXY_BLOCK DEBUG: [1a43693427d0a82b] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2025-04-07T13:52:47.846518Z node 2 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-07T13:52:47.846683Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-07T13:52:47.846996Z node 1 :BS_PROXY_PUT INFO: [bba3bffd2e286f4b] bootstrap ActorId# [1:611:2510] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:47.847118Z node 1 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:47.847159Z node 1 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:47.847216Z node 1 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2025-04-07T13:52:47.847249Z node 1 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2025-04-07T13:52:47.847374Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:586:2500] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:47.847610Z node 1 :BS_VDISK_PUT ERROR: VDISK[82000002:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-07T13:52:47.847861Z node 1 :BS_PROXY_PUT INFO: [bba3bffd2e286f4b] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-07T13:52:47.847951Z node 1 :BS_PROXY_PUT ERROR: [bba3bffd2e286f4b] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2025-04-07T13:52:47.848005Z node 1 :BS_PROXY_PUT NOTICE: [bba3bffd2e286f4b] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T13:52:47.848347Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:601:2104] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |86.0%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::EmergencyDuringRollingRestart [GOOD] |86.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |86.0%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2025-04-07T13:52:46.744148Z node 18 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-07T13:52:46.744250Z node 18 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-07T13:52:46.744401Z node 18 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-07T13:52:46.751614Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120028000 } } 2025-04-07T13:52:46.752566Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120028000 } 2025-04-07T13:52:46.752941Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003000s 2025-04-07T13:52:46.753001Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-07T13:52:46.753208Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-07T13:52:46.753283Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-04-07T13:52:46.753352Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-04-07T13:52:46.753533Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-04-07T13:52:46.753820Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-07T13:52:46.753885Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-04-07T13:52:46.754233Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-04-07T13:52:46.754300Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-04-07T13:52:46.754351Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-04-07T13:52:46.754398Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-04-07T13:52:46.754451Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-04-0 ... ices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120541048 } } 2025-04-07T13:52:47.119111Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120541048 } 2025-04-07T13:52:47.119434Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-07T13:52:47.119493Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-04-07T13:52:47.119552Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-04-07T13:52:47.119688Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-04-07T13:52:47.119844Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-07T13:52:47.119889Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-04-07T13:52:47.120065Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 0.100000s 2025-04-07T13:52:47.120109Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-07T13:52:47.120187Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-04-07T13:52:47.120222Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-04-07T13:52:47.120244Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-04-07T13:52:47.120268Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-04-07T13:52:47.120290Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-04-07T13:52:47.120325Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 23, wbId# [23:8388350642965737326:1634689637] 2025-04-07T13:52:47.120351Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 24, wbId# [24:8388350642965737326:1634689637] 2025-04-07T13:52:47.120371Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-04-07T13:52:47.120813Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 18 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-04-07T13:52:47.121282Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 19 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-04-07T13:52:47.121345Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 20 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-04-07T13:52:47.121387Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 21 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-04-07T13:52:47.121436Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 22 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-04-07T13:52:47.121483Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 23 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-04-07T13:52:47.121515Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 24 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-04-07T13:52:47.121552Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-04-07T13:52:47.121579Z node 18 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-07T13:52:47.137285Z node 18 :CMS DEBUG: TTxStorePermissions complete 2025-04-07T13:52:47.137595Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 18 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2025-04-07T13:52:47.138219Z node 18 :CMS INFO: User user removes request user-r-3 2025-04-07T13:52:47.138274Z node 18 :CMS DEBUG: Resulting status: OK 2025-04-07T13:52:47.138340Z node 18 :CMS DEBUG: TTxRemoveRequest Execute 2025-04-07T13:52:47.138383Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 18 2025-04-07T13:52:47.138518Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2025-04-07T13:52:47.153784Z node 18 :CMS DEBUG: TTxRemoveRequest Complete 2025-04-07T13:52:47.154048Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> YdbOlapStore::LogPagingAfter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EmergencyDuringRollingRestart [GOOD] Test command err: 2025-04-07T13:52:44.585425Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2025-04-07T13:52:44.586052Z node 10 :CMS DEBUG: TTxUpdateConfig Execute 2025-04-07T13:52:44.622283Z node 10 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-07T13:52:44.622401Z node 10 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-07T13:52:44.623582Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120028512 } } 2025-04-07T13:52:44.624269Z node 10 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120028512 } 2025-04-07T13:52:44.624417Z node 10 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003512s 2025-04-07T13:52:44.624461Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-07T13:52:44.624532Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-07T13:52:44.624564Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-07T13:52:44.624589Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-07T13:52:44.624604Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-07T13:52:44.624619Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-07T13:52:44.624650Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-07T13:52:44.624693Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-07T13:52:44.624716Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:83883506 ... 05Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-07T13:52:44.882982Z node 10 :CMS DEBUG: Running CleanupWalleTasks 2025-04-07T13:52:44.883196Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-07T13:52:44.883247Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-07T13:52:44.883290Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-07T13:52:44.883319Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-07T13:52:44.883351Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-07T13:52:44.883385Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-07T13:52:44.883410Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-07T13:52:44.883436Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-04-07T13:52:44.883708Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240028 2025-04-07T13:52:44.884322Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240028 2025-04-07T13:52:44.884422Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240028 2025-04-07T13:52:44.884623Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240028 2025-04-07T13:52:44.884708Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240028 2025-04-07T13:52:44.884769Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240028 2025-04-07T13:52:44.884874Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240028 2025-04-07T13:52:44.884939Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240028 2025-04-07T13:52:44.884982Z node 10 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-07T13:52:44.885213Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# FAULTY, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 3 StateLimit# 1, dry run# 0 2025-04-07T13:52:44.885279Z node 10 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-04-07T13:52:44.885454Z node 10 :CMS DEBUG: TTxLogAndSend Execute 2025-04-07T13:52:44.885678Z node 10 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2025-04-07T13:52:44.885735Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 2025-04-07T13:52:44.898349Z node 10 :CMS DEBUG: TTxLogAndSend Complete 2025-04-07T13:52:44.914898Z node 10 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-07T13:52:44.914992Z node 10 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-07T13:52:44.915067Z node 10 :CMS DEBUG: Timestamp: 1970-01-01T00:04:00Z 2025-04-07T13:52:44.916000Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-07T13:52:44.916116Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } 2025-04-07T13:52:44.916164Z node 10 :CMS DEBUG: Result: ERROR (reason: Evict vdisks is disabled in Sentinel (self heal)) 2025-04-07T13:52:44.916293Z node 10 :CMS DEBUG: TTxStorePermissions Execute 2025-04-07T13:52:44.916441Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-07T13:52:44.937281Z node 10 :CMS DEBUG: TTxStorePermissions complete 2025-04-07T13:52:44.937499Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR Reason: "Evict vdisks is disabled in Sentinel (self heal)" } RequestId: "user-r-1" } 2025-04-07T13:52:44.937878Z node 10 :CMS DEBUG: TTxUpdateConfig Execute 2025-04-07T13:52:44.950755Z node 10 :CMS DEBUG: TTxUpdateConfig Complete 2025-04-07T13:52:44.951031Z node 10 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 1 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2025-04-07T13:52:45.014421Z node 10 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-07T13:52:45.014492Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-07T13:52:45.014580Z node 10 :CMS DEBUG: Running CleanupWalleTasks 2025-04-07T13:52:45.014800Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-07T13:52:45.014848Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-07T13:52:45.014890Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-07T13:52:45.014927Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-07T13:52:45.014960Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-07T13:52:45.014990Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-07T13:52:45.015010Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-07T13:52:45.015031Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-04-07T13:52:45.015239Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300028 2025-04-07T13:52:45.015701Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300028 2025-04-07T13:52:45.015808Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300028 2025-04-07T13:52:45.015854Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300028 2025-04-07T13:52:45.015895Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300028 2025-04-07T13:52:45.015928Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300028 2025-04-07T13:52:45.015957Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300028 2025-04-07T13:52:45.015985Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300028 2025-04-07T13:52:45.016014Z node 10 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-07T13:52:45.016160Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-07T13:52:45.016202Z node 10 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-04-07T13:52:45.016342Z node 10 :CMS DEBUG: TTxLogAndSend Execute 2025-04-07T13:52:45.016511Z node 10 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2025-04-07T13:52:45.016539Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets [GOOD] >> KqpMultishardIndex::WriteIntoRenamingSyncIndex [FAIL] >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesNoUser >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::SamePriorityRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] Test command err: 2025-04-07T13:52:47.916905Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:47.921998Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:47.924066Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:47.924204Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:47.928268Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:47.928770Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ddc/r3tmp/tmpvCLyu8/pdisk_1.dat 2025-04-07T13:52:49.100643Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:49.106087Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:49.107934Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:49.108016Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:49.108398Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:49.109061Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ddc/r3tmp/tmpcAs0qM/pdisk_1.dat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] Test command err: 2025-04-07T13:52:48.081081Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:48.082017Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:48.083605Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:48.083746Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:48.086342Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:48.088287Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:52:48.088450Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dda/r3tmp/tmpdFVhWg/pdisk_1.dat Formatting pdisk Creating PDisk Creating pdisk Verify that PDisk returns ERROR 2025-04-07T13:52:48.712691Z node 1 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001dda/r3tmp/tmpYqK4UF//new_pdisk.dat": no such file. PDiskId# 1001 2025-04-07T13:52:48.713416Z node 1 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1001 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001dda/r3tmp/tmpYqK4UF//new_pdisk.dat": no such file. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001dda/r3tmp/tmpYqK4UF//new_pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10083356734329128526 PDiskId# 1001 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 HashedMainKey[0]# 0x221976E60BD392C7 StartOwnerRound# 10 SectorMap# false EnableSectorEncryption # 1 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# Enable WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1001 2025-04-07T13:52:48.734580Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:536:2458] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:370:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T13:52:48.734698Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:7:0:0:370:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:48.734731Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:7:0:0:370:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:48.734750Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:7:0:0:370:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:48.734770Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:7:0:0:370:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:48.734787Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:7:0:0:370:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:48.734811Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:7:0:0:370:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-07T13:52:48.734845Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:7:0:0:370:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T13:52:48.734900Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:370:1] Marker# BPG33 2025-04-07T13:52:48.734948Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:370:1] Marker# BPG32 2025-04-07T13:52:48.734977Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:370:2] Marker# BPG33 2025-04-07T13:52:48.734995Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:370:2] Marker# BPG32 2025-04-07T13:52:48.735016Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:370:3] Marker# BPG33 2025-04-07T13:52:48.735032Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:370:3] Marker# BPG32 2025-04-07T13:52:48.735148Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:100:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:370:3] FDS# 370 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:48.735190Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:93:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:370:2] FDS# 370 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:48.735221Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:114:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:370:1] FDS# 370 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T13:52:48.742664Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:370:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 82913 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-07T13:52:48.742874Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:370:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 82913 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-07T13:52:48.742957Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:370:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 82913 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-07T13:52:48.743043Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:370:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-07T13:52:48.743103Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:370:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> TopicAutoscaling::ControlPlane_BackCompatibility [GOOD] >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2025-04-07T13:52:15.754420Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1744033935754390 2025-04-07T13:52:16.165567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568719393180744:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:16.166176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:16.379496Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:16.419645Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001826/r3tmp/tmpbaHgNr/pdisk_1.dat 2025-04-07T13:52:16.458406Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:16.635202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:16.635270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:16.644203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:16.646477Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:52:16.648754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:16.649619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:16.649668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:16.679619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13116, node 1 2025-04-07T13:52:16.798446Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001826/r3tmp/yandexShuIiV.tmp 2025-04-07T13:52:16.798466Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001826/r3tmp/yandexShuIiV.tmp 2025-04-07T13:52:16.798602Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001826/r3tmp/yandexShuIiV.tmp 2025-04-07T13:52:16.798732Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:16.841999Z INFO: TTestServer started on Port 24643 GrpcPort 13116 TClient is connected to server localhost:24643 PQClient connected to localhost:13116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:17.168735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T13:52:20.048228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568732278083461:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:20.048502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:20.053286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568736573050793:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:20.067346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T13:52:20.141232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568736573050795:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:52:20.484841Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568736745052211:2287], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:20.485139Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjU1MWIzZi1kZGM3Y2IzYS05OTM1ODEyYy1kNGIxNzI4, ActorId: [2:7490568736745052171:2281], ActorState: ExecuteState, TraceId: 01jr88c0qkdaxgr7ykhewhb5x8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:20.487249Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:20.489810Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568736573050890:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:20.491610Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDU0NjlmZjktNDc4NDJhMjktNzI2ZDU1Zi05NjUzMTQzZQ==, ActorId: [1:7490568732278083458:2302], ActorState: ExecuteState, TraceId: 01jr88c0gb8aey6e38bz08xpjg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:20.491959Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:20.494100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:52:20.629222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:20.818803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:13116", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-07T13:52:21.160429Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568719393180744:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:21.160486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:21.198140Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr88c1fae93766bpasem77qj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODRlNDBhMGUtZmEzYzdiYzEtZjIxOTViNDMtNTI0NzZlMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490568740868018633:3012] === CheckClustersList. Ok 2025-04-07T13:52:26.325751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:13116 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T13:52:26.532801Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:13116 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976710678 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK AddTopic: rt3.dc1--test-topic ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with to ... ecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:20872 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T13:52:43.905461Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:20872 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T13:52:44.419025Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-04-07T13:52:44.431371Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-04-07T13:52:44.431736Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-04-07T13:52:44.431769Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:20872 2025-04-07T13:52:44.445593Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-07T13:52:44.446090Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-07T13:52:44.446135Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-07T13:52:44.446435Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-07T13:52:44.446533Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:39050 2025-04-07T13:52:44.446552Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:39050 proto=v1 topic=test-topic durationSec=0 2025-04-07T13:52:44.446562Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T13:52:44.447909Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-07T13:52:44.448007Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-07T13:52:44.448016Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T13:52:44.448024Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-07T13:52:44.448039Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568838493491344:2482] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-07T13:52:44.450164Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568838493491344:2482] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-07T13:52:44.681809Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568838493491344:2482] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-07T13:52:44.682669Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568838493491344:2482] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-07T13:52:44.682697Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568838493491344:2482] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-07T13:52:44.682323Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490568838493491402:2482] connected; active server actors: 1 2025-04-07T13:52:44.683142Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490568838493491402:2482] disconnected; active server actors: 1 2025-04-07T13:52:44.683164Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490568838493491402:2482] disconnected no session 2025-04-07T13:52:44.842562Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568838493491344:2482] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-07T13:52:44.842604Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568838493491344:2482] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-07T13:52:44.842629Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568838493491344:2482] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-07T13:52:44.842659Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T13:52:44.843355Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 3, Generation: 1 2025-04-07T13:52:44.843380Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:44.843411Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7490568838493491428:2482], now have 1 active actors on pipe 2025-04-07T13:52:44.843438Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:52:44.843464Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:52:44.843542Z node 3 :PERSQUEUE INFO: new Cookie src|9218f691-8c99cd84-f7ed8cbe-ebd5dc43_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-07T13:52:44.843646Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T13:52:44.843716Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:52:44.843814Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:52:44.843829Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:52:44.843904Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:52:44.844030Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|9218f691-8c99cd84-f7ed8cbe-ebd5dc43_0 2025-04-07T13:52:44.849782Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744033964849 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:52:44.849899Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|9218f691-8c99cd84-f7ed8cbe-ebd5dc43_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-07T13:52:44.850108Z :INFO: [] MessageGroupId [src] SessionId [src|9218f691-8c99cd84-f7ed8cbe-ebd5dc43_0] Write session: close. Timeout = 0 ms 2025-04-07T13:52:44.850153Z :INFO: [] MessageGroupId [src] SessionId [src|9218f691-8c99cd84-f7ed8cbe-ebd5dc43_0] Write session will now close 2025-04-07T13:52:44.850192Z :DEBUG: [] MessageGroupId [src] SessionId [src|9218f691-8c99cd84-f7ed8cbe-ebd5dc43_0] Write session: aborting 2025-04-07T13:52:44.850600Z :INFO: [] MessageGroupId [src] SessionId [src|9218f691-8c99cd84-f7ed8cbe-ebd5dc43_0] Write session: gracefully shut down, all writes complete 2025-04-07T13:52:44.850678Z :DEBUG: [] MessageGroupId [src] SessionId [src|9218f691-8c99cd84-f7ed8cbe-ebd5dc43_0] Write session: destroy 2025-04-07T13:52:44.856183Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|9218f691-8c99cd84-f7ed8cbe-ebd5dc43_0 grpc read done: success: 0 data: 2025-04-07T13:52:44.856218Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|9218f691-8c99cd84-f7ed8cbe-ebd5dc43_0 grpc read failed 2025-04-07T13:52:44.856248Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|9218f691-8c99cd84-f7ed8cbe-ebd5dc43_0 grpc closed 2025-04-07T13:52:44.856266Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|9218f691-8c99cd84-f7ed8cbe-ebd5dc43_0 is DEAD 2025-04-07T13:52:44.857015Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:52:44.858899Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:52:44.858945Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7490568838493491428:2482] destroyed 2025-04-07T13:52:44.859016Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >>> Ready to answer: ok 2025-04-07T13:52:44.918226Z :ERROR: [/Root] OnFederationDiscovery: Got error. Status: UNAVAILABLE. Description: 2025-04-07T13:52:46.762265Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:52:46.762296Z node 3 :IMPORT WARN: Table profiles were not loaded >> BindQueue::Basic [GOOD] >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> TTopicYqlTest::DropTopicYql >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] >> TopicAutoscaling::Simple_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_PQv1 >> TCmsTest::RequestRestartServicesNoUser [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] Test command err: Pick Delete nodeId# 22 Disable nodeId# 12 Add nodeId# 101 Delete nodeId# 78 Pick Delete nodeId# 11 Enable nodeId# 12 Disable nodeId# 52 Pick Pick Pick Pick Enable nodeId# 52 Pick Pick Add nodeId# 102 Delete nodeId# 16 Delete nodeId# 34 Disable nodeId# 47 Add nodeId# 103 Add nodeId# 104 Add nodeId# 105 Delete nodeId# 49 Pick Pick Add nodeId# 106 Enable nodeId# 47 Disable nodeId# 102 Disable nodeId# 93 Enable nodeId# 93 Pick Enable nodeId# 102 Delete nodeId# 2 Pick Pick Add nodeId# 107 Disable nodeId# 4 Pick Enable nodeId# 4 Disable nodeId# 46 Enable nodeId# 46 Delete nodeId# 26 Delete nodeId# 8 Delete nodeId# 31 Delete nodeId# 10 Disable nodeId# 21 Add nodeId# 108 Pick Delete nodeId# 3 Pick Pick Disable nodeId# 40 Pick Enable nodeId# 21 Delete nodeId# 33 Add nodeId# 109 Pick Disable nodeId# 72 Pick Delete nodeId# 101 Pick Delete nodeId# 56 Disable nodeId# 68 Pick Disable nodeId# 32 Delete nodeId# 35 Add nodeId# 110 Delete nodeId# 73 Enable nodeId# 40 Disable nodeId# 103 Disable nodeId# 107 Delete nodeId# 18 Add nodeId# 111 Enable nodeId# 32 Disable nodeId# 71 Disable nodeId# 102 Pick Enable nodeId# 72 Add nodeId# 112 Pick Disable nodeId# 81 Enable nodeId# 103 Pick Delete nodeId# 65 Pick Add nodeId# 113 Delete nodeId# 106 Add nodeId# 114 Delete nodeId# 90 Delete nodeId# 17 Pick Pick Delete nodeId# 111 Disable nodeId# 23 Pick Disable nodeId# 82 Pick Pick Add nodeId# 115 Add nodeId# 116 Add nodeId# 117 Disable nodeId# 7 Add nodeId# 118 Add nodeId# 119 Add nodeId# 120 Pick Pick Disable nodeId# 20 Enable nodeId# 81 Disable nodeId# 75 Add nodeId# 121 Enable nodeId# 7 Pick Disable nodeId# 55 Delete nodeId# 96 Delete nodeId# 32 Disable nodeId# 113 Add nodeId# 122 Enable nodeId# 107 Disable nodeId# 60 Add nodeId# 123 Delete nodeId# 102 Enable nodeId# 113 Enable nodeId# 55 Add nodeId# 124 Pick Disable nodeId# 37 Add nodeId# 125 Add nodeId# 126 Delete nodeId# 72 Delete nodeId# 67 Pick Add nodeId# 127 Add nodeId# 128 Disable nodeId# 127 Pick Pick Pick Enable nodeId# 71 Enable nodeId# 68 Enable nodeId# 127 Enable nodeId# 75 Disable nodeId# 120 Disable nodeId# 36 Delete nodeId# 57 Disable nodeId# 47 Add nodeId# 129 Disable nodeId# 83 Enable nodeId# 20 Enable nodeId# 83 Enable nodeId# 60 Delete nodeId# 103 Disable nodeId# 70 Disable nodeId# 9 Disable nodeId# 40 Enable nodeId# 47 Add nodeId# 130 Enable nodeId# 37 Disable nodeId# 95 Disable nodeId# 100 Pick Pick Pick Disable nodeId# 126 Delete nodeId# 76 Enable nodeId# 82 Pick Delete nodeId# 80 Enable nodeId# 120 Add nodeId# 131 Enable nodeId# 23 Delete nodeId# 63 Pick Delete nodeId# 112 Enable nodeId# 40 Pick Disable nodeId# 108 Add nodeId# 132 Enable nodeId# 9 Pick Disable nodeId# 44 Pick Add nodeId# 133 Delete nodeId# 20 Enable nodeId# 95 Disable nodeId# 12 Disable nodeId# 23 Enable nodeId# 12 Enable nodeId# 70 Pick Enable nodeId# 100 Enable nodeId# 36 Enable nodeId# 23 Delete nodeId# 71 Delete nodeId# 108 Delete nodeId# 94 Add nodeId# 134 Disable nodeId# 50 Enable nodeId# 44 Disable nodeId# 114 Enable nodeId# 126 Pick Add nodeId# 135 Delete nodeId# 74 Add nodeId# 136 Disable nodeId# 66 Enable nodeId# 50 Add nodeId# 137 Delete nodeId# 87 Add nodeId# 138 Enable nodeId# 114 Disable nodeId# 45 Enable nodeId# 45 Add nodeId# 139 Disable nodeId# 109 Enable nodeId# 109 Disable nodeId# 42 Pick Pick Enable nodeId# 42 Delete nodeId# 55 Enable nodeId# 66 Disable nodeId# 120 Delete nodeId# 25 Pick Delete nodeId# 47 Pick Pick Disable nodeId# 139 Disable nodeId# 81 Delete nodeId# 19 Enable nodeId# 120 Pick Delete nodeId# 107 Add nodeId# 140 Enable nodeId# 81 Delete nodeId# 93 Add nodeId# 141 Disable nodeId# 114 Add nodeId# 142 Disable nodeId# 82 Enable nodeId# 114 Enable nodeId# 82 Add nodeId# 143 Add nodeId# 144 Delete nodeId# 36 Delete nodeId# 23 Add nodeId# 145 Enable nodeId# 139 Add nodeId# 146 Disable nodeId# 9 Disable nodeId# 50 Delete nodeId# 97 Delete nodeId# 44 Pick Disable nodeId# 86 Enable nodeId# 50 Add nodeId# 147 Enable nodeId# 9 Pick Enable nodeId# 86 Pick Pick Add nodeId# 148 Add nodeId# 149 Delete nodeId# 51 Pick Pick Delete nodeId# 46 Add nodeId# 150 Delete nodeId# 62 Delete nodeId# 79 Add nodeId# 151 Add nodeId# 152 Add nodeId# 153 Disable nodeId# 129 Enable nodeId# 129 Delete nodeId# 40 Add nodeId# 154 Add nodeId# 155 Delete nodeId# 118 Delete nodeId# 84 Add nodeId# 156 Disable nodeId# 38 Disable nodeId# 82 Disable nodeId# 83 Enable nodeId# 83 Add nodeId# 157 Pick Disable nodeId# 1 Enable nodeId# 38 Disable nodeId# 14 Delete nodeId# 9 Pick Pick Enable nodeId# 1 Delete nodeId# 4 Enable nodeId# 14 Add nodeId# 158 Pick Enable nodeId# 82 Delete nodeId# 5 Disable nodeId# 116 Add nodeId# 159 Add nodeId# 160 Disable nodeId# 159 Enable nodeId# 159 Disable nodeId# 126 Enable nodeId# 126 Pick Disable nodeId# 85 Delete nodeId# 12 Enable nodeId# 116 Enable nodeId# 85 Pick Delete nodeId# 64 Delete nodeId# 7 Pick Delete nodeId# 146 Delete nodeId# 124 Delete nodeId# 91 Pick Add nodeId# 161 Pick Pick Pick Delete nodeId# 75 Delete nodeId# 92 Delete nodeId# 109 Disable nodeId# 123 Add nodeId# 162 Pick Delete nodeId# 144 Delete nodeId# 156 Pick Pick Pick Add nodeId# 163 Add nodeId# 164 Disable nodeId# 98 Add nodeId# 165 Delete nodeId# 60 Delete nodeId# 77 Pick Delete nodeId# 117 Disable nodeId# 43 Disable nodeId# 152 Delete nodeId# 45 Add nodeId# 166 Add nodeId# 167 Enable nodeId# 43 Pick Enable nodeId# 123 Delete nodeId# 128 Add nodeId# 168 Enable nodeId# 98 Add nodeId# 169 Delete nodeId# 142 Pick Enable nodeId# 152 Delete nodeId# 125 Disable nodeId# 89 Pick Delete nodeId# 68 Disable nodeId# 28 Pick Pick Enable nodeId# 89 Enable nodeId# 28 Pick Disable nodeId# 138 Disable nodeId# 99 Enable nodeId# 99 Pick Delete nodeId# 150 Delete nodeId# 98 Add nodeId# 170 Delete nodeId# 126 Delete nodeId# 58 Delete nodeId# 88 Enable nodeId# 138 Disable nodeId# 27 Add nodeId# 171 Disable nodeId# 39 Add nodeId# 172 Delete nodeId# 14 Pick Add nodeId# 173 Enable nodeId# 39 Pick Pick Add nodeId# 174 Disable nodeId# 82 Delete nodeId# 1 Disable nodeId# 86 Delete nodeId# 86 Pick Pick Pick Disable nodeId# 70 Enable nodeId# 70 Enable nodeId# 82 Disable nodeId# 161 Pick Disable nodeId# 53 Pick Add nodeId# 175 Disable nodeId# 24 Delete nodeId# 167 Pick Delete nodeId# 70 Delete nodeId# 152 Enable nodeId# 161 Add nodeId# 176 Add nodeId# 177 Disable nodeId# 145 Pick Enable nodeId# 145 Delete nodeId# 29 Pick Enable nodeId# 24 Disable nodeId# 169 Enable nodeId# 169 Pick Enable nodeId# 27 Pick Disable nodeId# 82 Pick Enable nodeId# 53 Add nodeId# 178 Enable nodeId# 82 Add nodeId# 179 Disable nodeId# 163 Disable nodeId# 53 Delete nodeId# 163 Enable nodeId# 53 Disable nodeId# 89 Pick Delete nodeId# 151 Pick Pick Enable nodeId# 89 Disable nodeId# 27 Enable nodeId# 27 Add nodeId# 180 Disable nodeId# 59 Pick Add nodeId# 181 Pick Add nodeId# 182 Enable nodeId# 59 Pick Delete nodeId# 82 Delete nodeId# 164 Pick Add nodeId# 183 Disable nodeId# 54 Disable nodeId# 114 Enable nodeId# 114 Disable nodeId# 13 Disable nodeId# 48 Add nodeId# 184 Enable nodeId# 13 Add nodeId# 185 Pick Pick Enable nodeId# 54 Pick Disable nodeId# 66 Enable nodeId# 48 Add nodeId# 186 Delete nodeId# 114 Delete nodeId# 38 Enable nodeId# 66 Pick Pick Pick Pick Add nodeId# 187 Pick Pick Disable nodeId# 41 Disable nodeId# 30 Add nodeId# 188 Pick Enable nodeId# 30 Enable nodeId# 41 Add nodeId# 189 Disable nodeId# 149 Disable nodeId# 139 Enable nodeId# 139 Delete nodeId# 81 Pick Pick Enable nodeId# 149 Disable nodeId# 50 Disable nodeId# 131 Disable nodeId# 61 Add nodeId# 190 Disable nodeId# 85 Add nodeId# 191 Add nodeId# 192 Pick Enable nodeId# 61 Pick Delete nodeId# 155 Pick Delete nodeId# 129 Disable nodeId# 28 Pick Enable nodeId# 131 Delete nodeId# 191 Delete nodeId# 66 Disable nodeId# 143 Delete nodeId# 174 Delete nodeId# 185 Pick Add nodeId# 193 Disable nodeId# 110 Disable nodeId# 30 Pick Enable nodeId# 110 Pick Add nodeId# 194 Add nodeId# 195 Disable nodeId# 127 Disable nodeId# 192 Disable nodeId# 188 Enable nodeId# 28 Delete nodeId# 153 Add nodeId# 196 Pick Add nodeId# 197 Add nodeId# 198 Delete nodeId# 158 Pick Pick Enable nodeId# 85 Enable nodeId# 188 Disable nodeId# 120 Delete nodeId# 168 Add nodeId# 199 Pick Pick Disable nodeId# 100 Add nodeId# 200 Pick Enable nodeId# 192 Enable nodeId# 50 Disable nodeId# 141 Pick Pick Disable nodeId# 123 Pick Pick Add nodeId# 201 Enable nodeId# 30 Pick Disable nodeId# 115 Delete nodeId# 182 Pick Disable nodeId# 157 Delete nodeId# 188 Pick Pick Delete nodeId# 6 Add nodeId# 202 Pick Add nodeId# 203 Add nodeId# 204 Disable nodeId# 175 Add nodeId# 205 Add nodeId# 206 Add nodeId# 207 Pick Pick Add nodeId# 208 Delete nodeId# 48 Enable nodeId# 123 Pick Delete nodeId# 202 Enable nodeId# 127 Delete nodeId# 192 Pick Delete nodeId# 165 Disable nodeId# 39 Add nodeId# 209 Delete nodeId# 193 Disable nodeId# 161 Pick Pick Add nodeId# 210 Disable nodeId# 160 Disable nodeId# 123 Disable nodeId# 105 Add nodeId# 211 Pick Delete nodeId# 83 Enable nodeId# 120 Disable nodeId# 119 Enable nodeId# 100 Disable nodeId# 130 Add nodeId# 212 Disable nodeId# 149 Disable nodeId# 206 Delete nodeId# 201 Delete nodeId# 50 Add nodeId# 213 Delete nodeId# 169 Add nodeId# 214 Enable nodeId# 130 Disable nodeId# 172 Delete nodeId# 159 Enable nodeId# 143 Add nodeId# 215 Enable nodeId# 172 Add nodeId# 216 Disable nodeId# 122 Add nodeId# 217 Enable nodeId# 149 Delete nodeId# 30 Add nodeId# 218 Pick Add nodeId# 219 Delete nodeId# 184 Enable nodeId# 39 Enable nodeId# 206 Pick Pick Enable nodeId# 161 Enable nodeId# 175 Disable nodeId# 190 Enable nodeId# 119 Enable nodeId# 122 Enable nodeId# 123 Disable nodeId# 130 Disable nodeId# 53 Delete nodeId# 160 Disable nodeId# 214 Delete nodeId# 215 Delete nodeId# 205 Add nodeId# 220 Add nodeId# 221 Enable nodeId# 190 Pick Delete nodeId# 53 Delete nodeId# 221 Delete nodeId# 123 Add nodeId# 222 Delete nodeId# 204 Disable nodeId# 187 Pick Pick Pick Pick Enable nodeId# 130 Enable nodeId# 115 Pick Add nodeId# 223 Enable nodeId# 214 Delete nodeId# 147 Delete nodeId# 143 Disable nodeId# 110 Disable nodeId# 122 Delete nodeId# 189 Pick Delete nodeId# 99 Delete nodeId# 113 Disable nodeId# 138 Enable nodeId# 110 Delete nodeId# 161 Disable nodeId# 209 Add nodeId# 224 Pick Enable nodeId# 105 Disable nodeId# 222 Pick Disable nodeId# 176 Disable nodeId# 208 Disable nodeId# 13 Delete nodeId# 43 Enable nodeId# 122 Delete nodeId# 195 Disable nodeId# 140 Pick Pick Add nodeId# 225 Disable nodeId# 127 Enable nodeId# 222 Enable nodeId# 138 Delete nodeId# 223 Pick Add nodeId# 226 Pick Enable nodeId# 141 Enable nodeId# 187 Disable nodeId# 222 Pick Pick Disable nodeId# 100 Delete nodeId# 139 Enable nodeId# 208 Delete nodeId# 154 Add nodeId# 227 Enable nodeId# 13 Delete nodeId# 116 Enable nodeId# 157 Pick Pick Pick Enable nodeId# 100 Disable nodeId# 13 Delete nodeId# 141 Pick Pick Pick Pick Delete nodeId# 140 Disable nodeId# 138 Pick Disable nodeId# 213 Add nodeId# 228 Pick Delete nodeId# 197 Disable nodeId# 59 Pick Enable nodeId# 209 Pick Enable nodeId# 13 Disable nodeId# 179 Pick Add nodeId# 229 Enable nodeId# 59 Delete nodeId# 186 Pick Delete nodeId# 226 Delete nodeId# 209 Disable nodeId# 61 Delete nodeId# 120 Add nodeId# 230 Delete nodeId# 61 Pick Enable nodeId# 138 Pick Enable nodeId# 127 Add nodeId# 231 Delete nodeId# 220 Disable nodeId# 21 Disable nodeId# 218 Disable nodeId# 231 Add nodeId# 232 Add nodeId# 233 Disable nodeId# 183 Disable nodeId# 210 Pick Pick Pick Disable nodeId# 42 Delete nodeId# 207 Delete nodeId# 59 Pick Add nodeId# 234 Disable nodeId# 171 Enable nodeId# 231 Pick Enable nodeId# 222 Delete nodeId# 219 Add nodeId# 235 Pick Pick Delete nodeId# 213 Pick Pick Pick Add nodeId# 236 Disable nodeId# 37 Delete nodeId# 212 Add nodeId# 237 Enable nodeId# 42 Add nodeId# 238 Enable nodeId# 218 Disable nodeId# 238 Pick Disable nodeId# 132 Delete nodeId# 37 Pick Delete nodeId# 180 Add nodeId# 239 Disable nodeId# 237 Delete nodeId# 227 Pick Enable nodeId# 179 Add nodeId# 240 Disable nodeId# 224 Delete nodeId# 210 Enable nodeId# 132 Disable nodeId# 172 Delete nodeId# 173 Pick Enable nodeId# 21 Pick ... Enable nodeId# 20378 Enable nodeId# 20333 Pick Add nodeId# 20406 Add nodeId# 20407 Disable nodeId# 20291 Pick Delete nodeId# 20380 Pick Add nodeId# 20408 Enable nodeId# 20332 Add nodeId# 20409 Add nodeId# 20410 Pick Add nodeId# 20411 Add nodeId# 20412 Pick Disable nodeId# 20322 Delete nodeId# 20349 Disable nodeId# 20411 Disable nodeId# 20310 Pick Add nodeId# 20413 Delete nodeId# 20201 Enable nodeId# 20322 Enable nodeId# 20411 Pick Disable nodeId# 20267 Delete nodeId# 20375 Delete nodeId# 20320 Disable nodeId# 20214 Pick Add nodeId# 20414 Pick Pick Add nodeId# 20415 Enable nodeId# 20310 Add nodeId# 20416 Delete nodeId# 20337 Enable nodeId# 20267 Enable nodeId# 20291 Delete nodeId# 20397 Delete nodeId# 20332 Enable nodeId# 20214 Disable nodeId# 20298 Enable nodeId# 20298 Enable nodeId# 20344 Add nodeId# 20417 Pick Add nodeId# 20418 Add nodeId# 20419 Disable nodeId# 20398 Delete nodeId# 20336 Delete nodeId# 20323 Disable nodeId# 20342 Delete nodeId# 20342 Pick Add nodeId# 20420 Add nodeId# 20421 Add nodeId# 20422 Disable nodeId# 20401 Delete nodeId# 20413 Delete nodeId# 20252 Delete nodeId# 20278 Disable nodeId# 20254 Pick Delete nodeId# 20385 Enable nodeId# 20254 Delete nodeId# 20238 Enable nodeId# 20398 Disable nodeId# 20360 Delete nodeId# 20389 Enable nodeId# 20360 Delete nodeId# 20366 Enable nodeId# 20401 Disable nodeId# 20219 Delete nodeId# 20388 Add nodeId# 20423 Pick Enable nodeId# 20219 Add nodeId# 20424 Enable nodeId# 20305 Pick Pick Disable nodeId# 20395 Delete nodeId# 20305 Disable nodeId# 20404 Disable nodeId# 20408 Add nodeId# 20425 Pick Delete nodeId# 20411 Disable nodeId# 20299 Enable nodeId# 20408 Disable nodeId# 20214 Disable nodeId# 20409 Delete nodeId# 20387 Pick Delete nodeId# 20327 Delete nodeId# 20267 Delete nodeId# 20286 Add nodeId# 20426 Delete nodeId# 20421 Pick Disable nodeId# 20374 Add nodeId# 20427 Pick Enable nodeId# 20395 Add nodeId# 20428 Add nodeId# 20429 Pick Pick Enable nodeId# 20409 Pick Enable nodeId# 20214 Add nodeId# 20430 Add nodeId# 20431 Disable nodeId# 20376 Add nodeId# 20432 Enable nodeId# 20299 Disable nodeId# 20401 Pick Delete nodeId# 20371 Disable nodeId# 20378 Delete nodeId# 20335 Disable nodeId# 20396 Disable nodeId# 20310 Disable nodeId# 20299 Add nodeId# 20433 Delete nodeId# 20275 Delete nodeId# 20430 Disable nodeId# 20214 Pick Enable nodeId# 20404 Delete nodeId# 20377 Delete nodeId# 20355 Disable nodeId# 20412 Pick Add nodeId# 20434 Enable nodeId# 20378 Delete nodeId# 20373 Pick Delete nodeId# 20396 Pick Add nodeId# 20435 Add nodeId# 20436 Disable nodeId# 20288 Pick Disable nodeId# 20382 Enable nodeId# 20412 Delete nodeId# 20326 Add nodeId# 20437 Disable nodeId# 20314 Delete nodeId# 20219 Delete nodeId# 20426 Enable nodeId# 20314 Add nodeId# 20438 Delete nodeId# 20434 Delete nodeId# 20408 Pick Disable nodeId# 20333 Disable nodeId# 20428 Pick Disable nodeId# 20265 Pick Disable nodeId# 20346 Disable nodeId# 20393 Enable nodeId# 20333 Disable nodeId# 20431 Enable nodeId# 20428 Pick Pick Add nodeId# 20439 Disable nodeId# 20378 Delete nodeId# 20438 Disable nodeId# 20427 Add nodeId# 20440 Pick Disable nodeId# 20428 Delete nodeId# 20423 Delete nodeId# 20344 Delete nodeId# 20404 Enable nodeId# 20265 Pick Delete nodeId# 20437 Pick Disable nodeId# 20359 Disable nodeId# 20439 Disable nodeId# 20347 Delete nodeId# 20284 Delete nodeId# 20418 Add nodeId# 20441 Enable nodeId# 20299 Pick Enable nodeId# 20439 Disable nodeId# 20360 Pick Pick Pick Enable nodeId# 20346 Disable nodeId# 20420 Disable nodeId# 20250 Pick Enable nodeId# 20360 Delete nodeId# 20333 Enable nodeId# 20347 Disable nodeId# 20406 Enable nodeId# 20310 Delete nodeId# 20322 Pick Delete nodeId# 20431 Pick Delete nodeId# 20435 Add nodeId# 20442 Pick Pick Pick Pick Delete nodeId# 20360 Disable nodeId# 20383 Delete nodeId# 20401 Pick Enable nodeId# 20250 Pick Pick Pick Disable nodeId# 20291 Enable nodeId# 20378 Enable nodeId# 20374 Disable nodeId# 20367 Enable nodeId# 20406 Disable nodeId# 20398 Add nodeId# 20443 Enable nodeId# 20427 Delete nodeId# 20361 Disable nodeId# 20441 Pick Add nodeId# 20444 Add nodeId# 20445 Add nodeId# 20446 Disable nodeId# 20444 Add nodeId# 20447 Disable nodeId# 20343 Add nodeId# 20448 Pick Delete nodeId# 20314 Pick Add nodeId# 20449 Disable nodeId# 20449 Disable nodeId# 20432 Enable nodeId# 20393 Pick Enable nodeId# 20291 Add nodeId# 20450 Pick Enable nodeId# 20383 Disable nodeId# 20299 Add nodeId# 20451 Disable nodeId# 20424 Enable nodeId# 20288 Enable nodeId# 20432 Disable nodeId# 20451 Add nodeId# 20452 Disable nodeId# 20254 Enable nodeId# 20449 Enable nodeId# 20444 Disable nodeId# 20409 Add nodeId# 20453 Add nodeId# 20454 Disable nodeId# 20403 Pick Delete nodeId# 20415 Add nodeId# 20455 Disable nodeId# 20378 Add nodeId# 20456 Disable nodeId# 20432 Pick Enable nodeId# 20420 Delete nodeId# 20439 Add nodeId# 20457 Enable nodeId# 20343 Pick Enable nodeId# 20451 Disable nodeId# 20447 Disable nodeId# 20417 Pick Delete nodeId# 20422 Disable nodeId# 20427 Add nodeId# 20458 Delete nodeId# 20442 Disable nodeId# 20399 Add nodeId# 20459 Add nodeId# 20460 Add nodeId# 20461 Add nodeId# 20462 Pick Pick Disable nodeId# 20414 Add nodeId# 20463 Disable nodeId# 20444 Add nodeId# 20464 Delete nodeId# 20428 Disable nodeId# 20464 Add nodeId# 20465 Pick Enable nodeId# 20359 Delete nodeId# 20393 Disable nodeId# 20343 Add nodeId# 20466 Disable nodeId# 20406 Add nodeId# 20467 Pick Add nodeId# 20468 Pick Delete nodeId# 20250 Delete nodeId# 20465 Disable nodeId# 20436 Add nodeId# 20469 Delete nodeId# 20416 Pick Disable nodeId# 20420 Enable nodeId# 20367 Add nodeId# 20470 Pick Delete nodeId# 20452 Pick Pick Add nodeId# 20471 Delete nodeId# 20450 Delete nodeId# 20447 Add nodeId# 20472 Disable nodeId# 20453 Pick Delete nodeId# 20254 Pick Enable nodeId# 20464 Disable nodeId# 20341 Delete nodeId# 20449 Disable nodeId# 20383 Disable nodeId# 20291 Disable nodeId# 20448 Disable nodeId# 20367 Delete nodeId# 20399 Pick Pick Disable nodeId# 20469 Pick Pick Enable nodeId# 20444 Disable nodeId# 20467 Add nodeId# 20473 Delete nodeId# 20291 Enable nodeId# 20403 Pick Pick Enable nodeId# 20376 Enable nodeId# 20299 Add nodeId# 20474 Disable nodeId# 20394 Pick Delete nodeId# 20383 Pick Add nodeId# 20475 Add nodeId# 20476 Disable nodeId# 20346 Add nodeId# 20477 Enable nodeId# 20394 Enable nodeId# 20420 Delete nodeId# 20445 Add nodeId# 20478 Enable nodeId# 20341 Enable nodeId# 20378 Disable nodeId# 20440 Enable nodeId# 20414 Delete nodeId# 20382 Pick Pick Enable nodeId# 20469 Add nodeId# 20479 Add nodeId# 20480 Add nodeId# 20481 Enable nodeId# 20467 Disable nodeId# 20476 Add nodeId# 20482 Pick Delete nodeId# 20475 Enable nodeId# 20436 Disable nodeId# 20460 Disable nodeId# 20482 Pick Delete nodeId# 20432 Pick Disable nodeId# 20473 Pick Add nodeId# 20483 Enable nodeId# 20398 Delete nodeId# 20407 Delete nodeId# 20443 Add nodeId# 20484 Enable nodeId# 20440 Pick Pick Delete nodeId# 20419 Pick Enable nodeId# 20453 Delete nodeId# 20459 Disable nodeId# 20403 Enable nodeId# 20448 Disable nodeId# 20347 Pick Pick Enable nodeId# 20417 Disable nodeId# 20472 Delete nodeId# 20265 Add nodeId# 20485 Delete nodeId# 20478 Add nodeId# 20486 Enable nodeId# 20403 Delete nodeId# 20427 Add nodeId# 20487 Add nodeId# 20488 Enable nodeId# 20346 Enable nodeId# 20473 Enable nodeId# 20214 Disable nodeId# 20394 Disable nodeId# 20468 Enable nodeId# 20367 Add nodeId# 20489 Pick Add nodeId# 20490 Delete nodeId# 20480 Pick Add nodeId# 20491 Disable nodeId# 20398 Enable nodeId# 20409 Pick Enable nodeId# 20472 Pick Enable nodeId# 20394 Pick Delete nodeId# 20310 Enable nodeId# 20398 Pick Pick Disable nodeId# 20486 Enable nodeId# 20424 Delete nodeId# 20482 Add nodeId# 20492 Pick Delete nodeId# 20409 Delete nodeId# 20367 Delete nodeId# 20370 Disable nodeId# 20473 Add nodeId# 20493 Disable nodeId# 20487 Disable nodeId# 20467 Enable nodeId# 20406 Disable nodeId# 20403 Pick Add nodeId# 20494 Delete nodeId# 20298 Delete nodeId# 20446 Delete nodeId# 20458 Pick Disable nodeId# 20457 Pick Enable nodeId# 20473 Disable nodeId# 20398 Delete nodeId# 20469 Add nodeId# 20495 Add nodeId# 20496 Pick Add nodeId# 20497 Delete nodeId# 20457 Disable nodeId# 20471 Pick Add nodeId# 20498 Pick Disable nodeId# 20417 Enable nodeId# 20487 Disable nodeId# 20454 Add nodeId# 20499 Add nodeId# 20500 Delete nodeId# 20444 Enable nodeId# 20471 Add nodeId# 20501 Delete nodeId# 20494 Disable nodeId# 20424 Pick Add nodeId# 20502 Enable nodeId# 20398 Delete nodeId# 20456 Disable nodeId# 20363 Enable nodeId# 20476 Disable nodeId# 20429 Add nodeId# 20503 Enable nodeId# 20403 Enable nodeId# 20363 Disable nodeId# 20448 Pick Enable nodeId# 20486 Disable nodeId# 20496 Disable nodeId# 20406 Delete nodeId# 20288 Enable nodeId# 20406 Enable nodeId# 20496 Enable nodeId# 20347 Delete nodeId# 20496 Pick Add nodeId# 20504 Delete nodeId# 20485 Add nodeId# 20505 Disable nodeId# 20481 Pick Delete nodeId# 20499 Pick Enable nodeId# 20468 Enable nodeId# 20417 Disable nodeId# 20410 Disable nodeId# 20504 Add nodeId# 20506 Delete nodeId# 20489 Disable nodeId# 20433 Add nodeId# 20507 Pick Pick Add nodeId# 20508 Add nodeId# 20509 Disable nodeId# 20507 Enable nodeId# 20441 Enable nodeId# 20460 Add nodeId# 20510 Disable nodeId# 20403 Add nodeId# 20511 Disable nodeId# 20341 Pick Pick Delete nodeId# 20346 Pick Add nodeId# 20512 Pick Disable nodeId# 20491 Pick Disable nodeId# 20441 Pick Pick Add nodeId# 20513 Disable nodeId# 20484 Delete nodeId# 20433 Disable nodeId# 20420 Enable nodeId# 20448 Pick Disable nodeId# 20417 Enable nodeId# 20454 Disable nodeId# 20374 Enable nodeId# 20343 Add nodeId# 20514 Add nodeId# 20515 Pick Delete nodeId# 20436 Add nodeId# 20516 Enable nodeId# 20420 Disable nodeId# 20501 Add nodeId# 20517 Pick Add nodeId# 20518 Pick Add nodeId# 20519 Delete nodeId# 20299 Enable nodeId# 20467 Enable nodeId# 20429 Enable nodeId# 20403 Add nodeId# 20520 Pick Delete nodeId# 20471 Delete nodeId# 20455 Pick Pick Disable nodeId# 20347 Enable nodeId# 20441 Delete nodeId# 20468 Disable nodeId# 20403 Disable nodeId# 20460 Delete nodeId# 20412 Enable nodeId# 20403 Delete nodeId# 20483 Delete nodeId# 20464 Delete nodeId# 20440 Add nodeId# 20521 Disable nodeId# 20506 Delete nodeId# 20476 Enable nodeId# 20506 Add nodeId# 20522 Delete nodeId# 20479 Disable nodeId# 20467 Pick Pick Delete nodeId# 20512 Pick Disable nodeId# 20492 Delete nodeId# 20451 Disable nodeId# 20420 Enable nodeId# 20492 Add nodeId# 20523 Pick Disable nodeId# 20395 Delete nodeId# 20341 Delete nodeId# 20503 Add nodeId# 20524 Pick Enable nodeId# 20507 Delete nodeId# 20521 Delete nodeId# 20214 Add nodeId# 20525 Add nodeId# 20526 Pick Disable nodeId# 20520 Enable nodeId# 20424 Disable nodeId# 20477 Disable nodeId# 20376 Enable nodeId# 20347 Pick Disable nodeId# 20524 Pick Pick Delete nodeId# 20488 Pick Pick Add nodeId# 20527 Delete nodeId# 20509 Delete nodeId# 20513 Pick Disable nodeId# 20424 Add nodeId# 20528 Delete nodeId# 20506 Enable nodeId# 20420 Delete nodeId# 20498 Pick Disable nodeId# 20510 Enable nodeId# 20460 Pick Enable nodeId# 20481 Add nodeId# 20529 Disable nodeId# 20472 Delete nodeId# 20529 Delete nodeId# 20522 Enable nodeId# 20484 Disable nodeId# 20525 Disable nodeId# 20454 Enable nodeId# 20410 Disable nodeId# 20414 Pick Add nodeId# 20530 Enable nodeId# 20472 Enable nodeId# 20525 Disable nodeId# 20394 Disable nodeId# 20515 Add nodeId# 20531 Disable nodeId# 20495 Disable nodeId# 20453 Pick Disable nodeId# 20531 Enable nodeId# 20424 Pick Delete nodeId# 20516 Add nodeId# 20532 Delete nodeId# 20518 Add nodeId# 20533 Enable nodeId# 20417 Enable nodeId# 20376 Add nodeId# 20534 Add nodeId# 20535 Disable nodeId# 20343 Pick Disable nodeId# 20406 Pick Add nodeId# 20536 Pick Pick Add nodeId# 20537 Add nodeId# 20538 Add nodeId# 20539 Delete nodeId# 20519 Pick Pick Enable nodeId# 20501 Delete nodeId# 20424 Add nodeId# 20540 Enable nodeId# 20467 Add nodeId# 20541 Delete nodeId# 20504 Disable nodeId# 20448 Pick Delete nodeId# 20441 Enable nodeId# 20414 Add nodeId# 20542 Enable nodeId# 20524 Delete nodeId# 20453 Enable nodeId# 20395 >> YdbProxy::CreateTopic >> TCmsTest::SamePriorityRequest [GOOD] >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks+StreamLookup >> PartitionEndWatcher::EmptyPartition [GOOD] >> PartitionEndWatcher::AfterCommit [GOOD] >> YdbProxy::AlterTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] Leader for TabletID 72057594046678944 is [1:229:2151] sender: [1:230:2060] recipient: [1:212:2140] 2025-04-07T13:52:05.253193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:52:05.253299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:52:05.253359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:52:05.253396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:52:05.253435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:52:05.253466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:52:05.253521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:52:05.253843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:05.402963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:52:05.403036Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:05.428862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:05.429104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:52:05.429263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:52:05.462260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:52:05.463014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:52:05.463621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:52:05.463953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:52:05.481103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:52:05.482338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:52:05.482456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:52:05.482617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:52:05.482672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:52:05.482710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:52:05.482836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:52:05.509102Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:229:2151] sender: [1:341:2060] recipient: [1:17:2064] 2025-04-07T13:52:05.773507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:52:05.773760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:05.773985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:52:05.774193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:52:05.774268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:05.785696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:52:05.785841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:52:05.786045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:05.786111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:52:05.786158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:52:05.786189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:52:05.797189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:05.797274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:52:05.797309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:52:05.805567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:05.805631Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:05.805698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:52:05.805752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:52:05.809659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:52:05.814173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:52:05.814409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:52:05.815384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:52:05.815523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 237 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:52:05.815568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:52:05.815826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:52:05.815885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:52:05.816049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:52:05.816132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:52:05.829970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:52:05.830034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:52:05.830223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:52:05.830271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:308:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:52:05.830618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:05.830665Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:52:05.830758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:52:05.830791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:52:05.830877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:52:05.830938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:52:05.830994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:52:05.831024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:52:05.831096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:52:05.831133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:52:05.831166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:52:05.833071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:52:05.833231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:52:05.833315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:52:05.833372Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:52:05.833407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:52:05.833518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ablet# 72057594046678944 2025-04-07T13:52:51.238362Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-07T13:52:51.238395Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:52:51.238503Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T13:52:51.238532Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:52:51.238562Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:0 2025-04-07T13:52:51.238648Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:964:2740] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2025-04-07T13:52:51.238730Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:229:2151], Recipient [7:964:2740]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2025-04-07T13:52:51.238755Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-07T13:52:51.238782Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409551 state Ready 2025-04-07T13:52:51.238834Z node 7 :TX_DATASHARD DEBUG: 72075186233409551 Got TEvSchemaChangedResult from SS at 72075186233409551 2025-04-07T13:52:51.239045Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-07T13:52:51.239070Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-07T13:52:51.239104Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T13:52:51.239140Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-07T13:52:51.239236Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:52:51.239261Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 2/3 2025-04-07T13:52:51.239287Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-04-07T13:52:51.239318Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-04-07T13:52:51.239514Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-07T13:52:51.239557Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:52:51.239580Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:2 2025-04-07T13:52:51.239632Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:954:2732] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2025-04-07T13:52:51.239718Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:229:2151], Recipient [7:954:2732]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2025-04-07T13:52:51.239750Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-07T13:52:51.239778Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409550 state Ready 2025-04-07T13:52:51.239820Z node 7 :TX_DATASHARD DEBUG: 72075186233409550 Got TEvSchemaChangedResult from SS at 72075186233409550 2025-04-07T13:52:51.239974Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-07T13:52:51.240005Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-07T13:52:51.240041Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-07T13:52:51.240067Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:2 ProgressState 2025-04-07T13:52:51.240127Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:52:51.240151Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2025-04-07T13:52:51.240171Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-07T13:52:51.240201Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-04-07T13:52:51.240258Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:572:2403] message: TxId: 104 2025-04-07T13:52:51.240307Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-07T13:52:51.240347Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T13:52:51.240381Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T13:52:51.240560Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-04-07T13:52:51.240600Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-04-07T13:52:51.240619Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-04-07T13:52:51.240645Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-04-07T13:52:51.240679Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-04-07T13:52:51.240696Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-04-07T13:52:51.240730Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2025-04-07T13:52:51.243128Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:52:51.243197Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:52:51.243254Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:572:2403] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2025-04-07T13:52:51.243346Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T13:52:51.243378Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1015:2779] 2025-04-07T13:52:51.243528Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1017:2781], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:52:51.243551Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:52:51.243569Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-07T13:52:51.244163Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:546:2100], Recipient [7:229:2151] 2025-04-07T13:52:51.244197Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:52:51.245998Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:52:51.246437Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-07T13:52:51.246502Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-07T13:52:51.252389Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:52:51.255790Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:52:51.256020Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2025-04-07T13:52:51.256091Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-07T13:52:51.256468Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-07T13:52:51.256499Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-07T13:52:51.256862Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1079:2843], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:51.256914Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:51.256950Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-07T13:52:51.257126Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:572:2403], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2025-04-07T13:52:51.257165Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-07T13:52:51.257229Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-07T13:52:51.257330Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T13:52:51.257365Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1077:2841] 2025-04-07T13:52:51.257522Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1079:2843], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:52:51.257572Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:52:51.257612Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesNoUser [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds |86.1%| [TA] $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> QueryStats::Ranges [GOOD] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Enable |86.1%| [TA] $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK >> YdbProxy::CreateTopic [GOOD] >> YdbProxy::DescribeConsumer |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> TPQCompatTest::DiscoverTopics [GOOD] >> TPQCompatTest::SetupLockSession >> YdbProxy::AlterTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogPagingAfter [GOOD] Test command err: 2025-04-07T13:50:05.282905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568155608851755:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:05.283037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019ce/r3tmp/tmp60P0Y1/pdisk_1.dat 2025-04-07T13:50:05.946021Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:05.980312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:05.980407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:06.023324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14520, node 1 2025-04-07T13:50:06.271175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:06.271197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:06.271208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:06.271300Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:06.676775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:06.682380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:06.693702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:06.701974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:06.702181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:06.702196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:50:06.705790Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:06.705819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:06.707653Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:06.708426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:06.712154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033806753, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:06.712181Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:06.712416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:06.716935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:06.717105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:06.717158Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:06.717251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:06.717308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:06.717368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:06.719872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:06.719932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:06.719946Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:06.720029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:4026 2025-04-07T13:50:07.066200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:50:07.069226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:50:07.069493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:07.069514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:07.077648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2025-04-07T13:50:07.077911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:07.078203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:07.078288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-04-07T13:50:07.080154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:07.080194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:07.080217Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:07.080443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:50:07.080478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:50:07.080488Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:50:07.084308Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:50:07.112522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:07.112826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:07.129397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:07.129659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:07.129705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:50:07.142719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:50:07.313646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490568164198787259:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:50:07.313892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490568164198787259:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:50:07.314168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490568164198787259:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:50:07.314284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490568164198787259:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:50:07.314410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490568164198787259:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:50:07.314540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490568164198787259:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:50:07.314641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490568164198787259:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:50:07.314781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490568164198787259:2291];tablet_id=72075186224037888;process=TTxIn ... P_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7490568851509452650:3661], CA [28:7490568851509452673:3677], CA [28:7490568851509452658:3667], CA [28:7490568851509452664:3671], CA [28:7490568851509452647:3659], CA [28:7490568851509452651:3662], CA [28:7490568851509452649:3660], CA [28:7490568851509452672:3676], CA [28:7490568851509452653:3663], CA [28:7490568851509452657:3666], CA [28:7490568851509452629:3645], 2025-04-07T13:52:47.951577Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7490568851509452670:3675], task: 64, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1350 DurationUs: 89000 Tasks { TaskId: 64 CpuTimeUs: 553 FinishTimeMs: 1744033967934 ComputeCpuTimeUs: 34 BuildCpuTimeUs: 519 WaitInputTimeUs: 65295 HostName: "ghrun-lqtfcktism" NodeId: 28 StartTimeMs: 1744033967845 } MaxMemoryUsage: 1048576 } 2025-04-07T13:52:47.951613Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7490568851509452650:3661], CA [28:7490568851509452673:3677], CA [28:7490568851509452658:3667], CA [28:7490568851509452664:3671], CA [28:7490568851509452647:3659], CA [28:7490568851509452651:3662], CA [28:7490568851509452649:3660], CA [28:7490568851509452672:3676], CA [28:7490568851509452653:3663], CA [28:7490568851509452657:3666], CA [28:7490568851509452629:3645], 2025-04-07T13:52:47.951685Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7490568851509452649:3660], task: 49, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1214 DurationUs: 105000 Tasks { TaskId: 49 CpuTimeUs: 567 FinishTimeMs: 1744033967934 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 34 BuildCpuTimeUs: 533 WaitInputTimeUs: 16176 HostName: "ghrun-lqtfcktism" NodeId: 28 StartTimeMs: 1744033967829 } MaxMemoryUsage: 1048576 } 2025-04-07T13:52:47.951700Z node 28 :KQP_EXECUTER INFO: TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7490568851509452649:3660] 2025-04-07T13:52:47.951733Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7490568851509452650:3661], CA [28:7490568851509452673:3677], CA [28:7490568851509452658:3667], CA [28:7490568851509452664:3671], CA [28:7490568851509452647:3659], CA [28:7490568851509452651:3662], CA [28:7490568851509452672:3676], CA [28:7490568851509452653:3663], CA [28:7490568851509452657:3666], CA [28:7490568851509452629:3645], 2025-04-07T13:52:47.951802Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7490568851509452657:3666], task: 55, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1645 DurationUs: 75000 Tasks { TaskId: 55 CpuTimeUs: 738 FinishTimeMs: 1744033967934 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 35 BuildCpuTimeUs: 703 WaitInputTimeUs: 51028 HostName: "ghrun-lqtfcktism" NodeId: 28 StartTimeMs: 1744033967859 } MaxMemoryUsage: 1048576 } 2025-04-07T13:52:47.951814Z node 28 :KQP_EXECUTER INFO: TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7490568851509452657:3666] 2025-04-07T13:52:47.951841Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7490568851509452650:3661], CA [28:7490568851509452673:3677], CA [28:7490568851509452658:3667], CA [28:7490568851509452664:3671], CA [28:7490568851509452647:3659], CA [28:7490568851509452651:3662], CA [28:7490568851509452672:3676], CA [28:7490568851509452653:3663], CA [28:7490568851509452629:3645], 2025-04-07T13:52:47.951895Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7490568851509452649:3660], task: 49, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1214 DurationUs: 105000 Tasks { TaskId: 49 CpuTimeUs: 567 FinishTimeMs: 1744033967934 ComputeCpuTimeUs: 34 BuildCpuTimeUs: 533 WaitInputTimeUs: 16176 HostName: "ghrun-lqtfcktism" NodeId: 28 StartTimeMs: 1744033967829 } MaxMemoryUsage: 1048576 } 2025-04-07T13:52:47.951918Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7490568851509452650:3661], CA [28:7490568851509452673:3677], CA [28:7490568851509452658:3667], CA [28:7490568851509452664:3671], CA [28:7490568851509452647:3659], CA [28:7490568851509452651:3662], CA [28:7490568851509452672:3676], CA [28:7490568851509452653:3663], CA [28:7490568851509452629:3645], 2025-04-07T13:52:47.951974Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7490568851509452657:3666], task: 55, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1645 DurationUs: 75000 Tasks { TaskId: 55 CpuTimeUs: 738 FinishTimeMs: 1744033967934 ComputeCpuTimeUs: 35 BuildCpuTimeUs: 703 WaitInputTimeUs: 51028 HostName: "ghrun-lqtfcktism" NodeId: 28 StartTimeMs: 1744033967859 } MaxMemoryUsage: 1048576 } 2025-04-07T13:52:47.951995Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7490568851509452650:3661], CA [28:7490568851509452673:3677], CA [28:7490568851509452658:3667], CA [28:7490568851509452664:3671], CA [28:7490568851509452647:3659], CA [28:7490568851509452651:3662], CA [28:7490568851509452672:3676], CA [28:7490568851509452653:3663], CA [28:7490568851509452629:3645], 2025-04-07T13:52:47.952059Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7490568851509452650:3661], task: 50, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1624 DurationUs: 105000 Tasks { TaskId: 50 CpuTimeUs: 580 FinishTimeMs: 1744033967935 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 52 BuildCpuTimeUs: 528 WaitInputTimeUs: 80872 HostName: "ghrun-lqtfcktism" NodeId: 28 StartTimeMs: 1744033967830 } MaxMemoryUsage: 1048576 } 2025-04-07T13:52:47.952075Z node 28 :KQP_EXECUTER INFO: TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7490568851509452650:3661] 2025-04-07T13:52:47.952112Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7490568851509452673:3677], CA [28:7490568851509452658:3667], CA [28:7490568851509452664:3671], CA [28:7490568851509452647:3659], CA [28:7490568851509452651:3662], CA [28:7490568851509452672:3676], CA [28:7490568851509452653:3663], CA [28:7490568851509452629:3645], 2025-04-07T13:52:47.952174Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7490568851509452650:3661], task: 50, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1624 DurationUs: 105000 Tasks { TaskId: 50 CpuTimeUs: 580 FinishTimeMs: 1744033967935 ComputeCpuTimeUs: 52 BuildCpuTimeUs: 528 WaitInputTimeUs: 80872 HostName: "ghrun-lqtfcktism" NodeId: 28 StartTimeMs: 1744033967830 } MaxMemoryUsage: 1048576 } 2025-04-07T13:52:47.952197Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568851509452584:3601] TxId: 281474976710770. Ctx: { TraceId: 01jr88ctkd81k82wev7afarqv4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZWU3MTZkZTItNjQzODMzODctMzBiOWU3YWQtZDI3ZTI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7490568851509452673:3677], CA [28:7490568851509452658:3667], CA [28:7490568851509452664:3671], CA [28:7490568851509452647:3659], CA [28:7490568851509452651:3662], CA [28:7490568851509452672:3676], CA [28:7490568851509452653:3663], CA [28:7490568851509452629:3645], >> KqpProxy::InvalidSessionID >> TableCreation::ConcurrentTableCreationWithDifferentVersions >> TableCreation::ConcurrentTableCreation >> ScriptExecutionsTest::RunCheckLeaseStatus >> TableCreation::MultipleTablesCreation >> TableCreation::SimpleTableCreation >> KqpProxy::PingNotExistedSession >> KqpProxy::NoLocalSessionExecution >> KqpProxy::CalcPeerStats [GOOD] >> KqpProxy::CreatesScriptExecutionsTable >> KqpProxy::PassErrroViaSessionActor >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTable [GOOD] Test command err: 2025-04-07T13:52:53.030234Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568880548948935:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:53.037655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001362/r3tmp/tmpIutCEV/pdisk_1.dat 2025-04-07T13:52:53.409567Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:53.417851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:53.417977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:53.421544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28043 TServer::EnableGrpc on GrpcPort 11388, node 1 2025-04-07T13:52:53.618041Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:53.618067Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:53.618077Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:53.618168Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:53.934536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:55.786294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:52:55.924626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks+StreamLookup [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks-StreamLookup |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |86.1%| [TA] {RESULT} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbProxy::MakeDirectory |86.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut >> YdbProxy::DropTable >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 |86.1%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Balancing::Balancing_OneTopic_PQv1 [GOOD] >> Balancing::Balancing_ManyTopics_TopicApi >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 >> YdbProxy::ListDirectory |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |86.1%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut >> YdbOlapStore::LogTsRangeDescending [GOOD] >> YdbProxy::DescribePath >> YdbProxy::RemoveDirectory |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication >> YdbProxy::DescribeConsumer [GOOD] |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |86.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention [GOOD] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad >> YdbProxy::CreateTable >> BasicUsage::ReadMirrored [GOOD] >> KqpProxy::InvalidSessionID [GOOD] >> KqpProxy::LoadedMetadataAfterCompilationTimeout |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |86.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |86.2%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] Test command err: 2025-04-07T13:52:52.473499Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568872982537711:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:52.473712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001370/r3tmp/tmpDflezv/pdisk_1.dat 2025-04-07T13:52:52.857874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:52.858030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:52.859382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:52.883119Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9193 TServer::EnableGrpc on GrpcPort 24371, node 1 2025-04-07T13:52:53.254601Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:53.254629Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:53.254642Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:53.254731Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:53.743929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:56.186357Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568890655674112:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001370/r3tmp/tmpWl49AO/pdisk_1.dat 2025-04-07T13:52:56.308778Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:56.429836Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:56.467236Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:56.467319Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:56.481520Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63494 TServer::EnableGrpc on GrpcPort 5625, node 2 2025-04-07T13:52:56.913443Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:56.913468Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:56.913479Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:56.913573Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:57.399812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> KqpProxy::PassErrroViaSessionActor [GOOD] >> KqpProxy::NodeDisconnectedTest |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |86.2%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2025-04-07T13:52:15.242657Z :PropagateSessionClosed INFO: Random seed for debugging is 1744033935242631 2025-04-07T13:52:15.755399Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568716166104252:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:15.853928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:15.968780Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568717213391853:2193];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001828/r3tmp/tmpwfZycA/pdisk_1.dat 2025-04-07T13:52:16.141309Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:16.142926Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:16.161873Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:16.362219Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:16.385550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:16.385714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:16.388243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:16.388329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:16.392941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:16.394703Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:52:16.396559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18830, node 1 2025-04-07T13:52:16.529511Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001828/r3tmp/yandexhlfV0Y.tmp 2025-04-07T13:52:16.529534Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001828/r3tmp/yandexhlfV0Y.tmp 2025-04-07T13:52:16.529663Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001828/r3tmp/yandexhlfV0Y.tmp 2025-04-07T13:52:16.529755Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:16.579291Z INFO: TTestServer started on Port 20750 GrpcPort 18830 TClient is connected to server localhost:20750 PQClient connected to localhost:18830 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:16.866475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:52:16.952939Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:19.515712Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568734393261208:2285], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.515821Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568734393261197:2282], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.526505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.534000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:52:19.624872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568734393261211:2286], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:52:19.946640Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568733345974362:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:19.947840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:52:19.957944Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568734393261246:2290], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:19.959628Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzQ5MGZjNWMtNzcxOWM5NjItNWQ5NjAzN2MtMzhhZTVhMjg=, ActorId: [2:7490568734393261195:2281], ActorState: ExecuteState, TraceId: 01jr88c01d4nj0yhjyhf7jg5gy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:19.966448Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:19.965114Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Nzg2NDk3MTktNDMwYWViN2QtZDY4MmU0OWQtY2U1MmQ2NTE=, ActorId: [1:7490568733345974291:2302], ActorState: ExecuteState, TraceId: 01jr88c03eaz6mcvbvtq1gsddy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:19.967129Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:20.241398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:52:20.443059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:18830", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-07T13:52:20.778167Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568716166104252:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:20.778228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:20.927722Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr88c15p9zn3m3bp3q5ek2hf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzZiNzM5ODEtYzUxYmM0NTItMjMxNGU5MTYtNTlkM2ZmMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:20.955690Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568717213391853:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:20.955746Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7490568741935909365:2971] === CheckClustersList. Ok 2025-04-07T13:52:27.053507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:18830 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T13:52:27.243134Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:18830 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true R ... fd-8a131018-2ebd8123-68b81f5d_0] Write session: acknoledged message 5 2025-04-07T13:53:00.133917Z :DEBUG: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] [] Returning serverBytesSize = 0 to budget 2025-04-07T13:53:00.138007Z :DEBUG: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] [] Got ReadResponse, serverBytesSize = 1562, now ReadSizeBudget = 0, ReadSizeServerDelta = 8387046 2025-04-07T13:53:00.138111Z :DEBUG: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 8387046 2025-04-07T13:53:00.140901Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|db43c9fd-8a131018-2ebd8123-68b81f5d_0] Write session will now close 2025-04-07T13:53:00.140888Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 2 (1-4) 2025-04-07T13:53:00.140954Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|db43c9fd-8a131018-2ebd8123-68b81f5d_0] Write session: aborting 2025-04-07T13:53:00.140976Z :DEBUG: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] [] Returning serverBytesSize = 1562 to budget 2025-04-07T13:53:00.141001Z :DEBUG: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] [] In ContinueReadingDataImpl, ReadSizeBudget = 1562, ReadSizeServerDelta = 8387046 2025-04-07T13:53:00.141135Z :DEBUG: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 8388608 2025-04-07T13:53:00.141191Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (1-1) 2025-04-07T13:53:00.141224Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (2-2) 2025-04-07T13:53:00.141246Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 1} (3-3) 2025-04-07T13:53:00.141266Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 2} (4-4) 2025-04-07T13:53:00.141325Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|db43c9fd-8a131018-2ebd8123-68b81f5d_0] Write session: gracefully shut down, all writes complete >>> Writes to test-topic-mirrored-from-dc3 successful 2025-04-07T13:53:00.141369Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|db43c9fd-8a131018-2ebd8123-68b81f5d_0] Write session: destroy >>> event from dataHandler: DataReceived { Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 Message { Data: ..130 bytes.. Information: { Offset: 1 ProducerId: "src_id" SeqNo: 2 CreateTime: 2025-04-07T13:53:00.021000Z WriteTime: 2025-04-07T13:53:00.031000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:48118", "_ip": "ipv6:[::1]:48118" } MessageMeta: { } } Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..240 bytes.. Information: { Offset: 2 ProducerId: "src_id" SeqNo: 3 CreateTime: 2025-04-07T13:53:00.021000Z WriteTime: 2025-04-07T13:53:00.103000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:48118", "_ip": "ipv6:[::1]:48118" } MessageMeta: { } } Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..350 bytes.. Information: { Offset: 3 ProducerId: "src_id" SeqNo: 4 CreateTime: 2025-04-07T13:53:00.021000Z WriteTime: 2025-04-07T13:53:00.103000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:48118", "_ip": "ipv6:[::1]:48118" } MessageMeta: { } } Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..460 bytes.. Information: { Offset: 4 ProducerId: "src_id" SeqNo: 5 CreateTime: 2025-04-07T13:53:00.021000Z WriteTime: 2025-04-07T13:53:00.103000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:48118", "_ip": "ipv6:[::1]:48118" } MessageMeta: { } } Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } } >>> get 4 messages in this event 2025-04-07T13:53:00.141681Z :DEBUG: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] [] The application data is transferred to the client. Number of messages 4, size 1180 bytes 2025-04-07T13:53:00.141710Z :DEBUG: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] [] Returning serverBytesSize = 0 to budget 2025-04-07T13:53:00.148375Z :INFO: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] Closing read session. Close timeout: 18446744073709.551615s 2025-04-07T13:53:00.148453Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic-mirrored-from-dc3:0:1:4:0 -:test-topic:0:2:4:0 2025-04-07T13:53:00.148485Z :INFO: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] Counters: { Errors: 0 CurrentSessionLifetimeMs: 835 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:53:00.146839Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|db43c9fd-8a131018-2ebd8123-68b81f5d_0 grpc read done: success: 0 data: 2025-04-07T13:53:00.146865Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|db43c9fd-8a131018-2ebd8123-68b81f5d_0 grpc read failed 2025-04-07T13:53:00.146888Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|db43c9fd-8a131018-2ebd8123-68b81f5d_0 grpc closed 2025-04-07T13:53:00.148144Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:00.148185Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7490568909621152146:2575] destroyed 2025-04-07T13:53:00.146902Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|db43c9fd-8a131018-2ebd8123-68b81f5d_0 is DEAD 2025-04-07T13:53:00.147547Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:53:00.148228Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:53:00.147692Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_11275983461112166552_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1562 } } 2025-04-07T13:53:00.147761Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_11275983461112166552_v1 got read request: guid# e9d84b25-3977e338-ad1bc5f1-e5a46afb 2025-04-07T13:53:00.149968Z :INFO: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] Closing read session. Close timeout: 0.000000s 2025-04-07T13:53:00.150016Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic-mirrored-from-dc3:0:1:4:0 -:test-topic:0:2:4:0 2025-04-07T13:53:00.150045Z :INFO: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] Counters: { Errors: 0 CurrentSessionLifetimeMs: 837 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:53:00.150223Z :INFO: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] Closing read session. Close timeout: 0.000000s 2025-04-07T13:53:00.150260Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic-mirrored-from-dc3:0:1:4:0 -:test-topic:0:2:4:0 2025-04-07T13:53:00.150288Z :INFO: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] Counters: { Errors: 0 CurrentSessionLifetimeMs: 837 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:53:00.150375Z :NOTICE: [/Root] [/Root] [d2fb0eb0-c5357a0d-f8c11024-37c89f44] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:53:00.152867Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_11275983461112166552_v1 grpc read done: success# 0, data# { } 2025-04-07T13:53:00.152894Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_11275983461112166552_v1 grpc read failed 2025-04-07T13:53:00.152915Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_11275983461112166552_v1 grpc closed 2025-04-07T13:53:00.152965Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_11275983461112166552_v1 is DEAD 2025-04-07T13:53:00.154652Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:00.154679Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_3_1_11275983461112166552_v1 2025-04-07T13:53:00.154703Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7490568905326184695:2559] destroyed 2025-04-07T13:53:00.154724Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:00.154751Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session shared/user_3_1_11275983461112166552_v1 2025-04-07T13:53:00.154768Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [3:7490568905326184692:2558] destroyed 2025-04-07T13:53:00.154804Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_11275983461112166552_v1 2025-04-07T13:53:00.154818Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_11275983461112166552_v1 2025-04-07T13:53:00.154838Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:00.154849Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_11275983461112166552_v1 2025-04-07T13:53:00.154862Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7490568905326184696:2560] destroyed 2025-04-07T13:53:00.154879Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_11275983461112166552_v1 2025-04-07T13:53:00.155503Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7490568905326184685:2553] disconnected; active server actors: 1 2025-04-07T13:53:00.155538Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7490568905326184685:2553] client user disconnected session shared/user_3_1_11275983461112166552_v1 2025-04-07T13:53:00.155611Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490568905326184686:2553] disconnected; active server actors: 1 2025-04-07T13:53:00.155626Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490568905326184686:2553] client user disconnected session shared/user_3_1_11275983461112166552_v1 2025-04-07T13:53:00.155672Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7490568905326184687:2553] disconnected; active server actors: 1 2025-04-07T13:53:00.155684Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7490568905326184687:2553] client user disconnected session shared/user_3_1_11275983461112166552_v1 2025-04-07T13:53:00.675225Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:53:00.675271Z node 3 :IMPORT WARN: Table profiles were not loaded >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] >> TableCreation::ConcurrentTableCreation [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp |86.2%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme >> TableCreation::SimpleTableCreation [GOOD] >> TableCreation::SimpleUpdateTable |86.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |86.2%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK >> KqpProxy::PingNotExistedSession [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease >> YdbProxy::MakeDirectory [GOOD] >> YdbProxy::OAuthToken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks-StreamLookup [GOOD] Test command err: 2025-04-07T13:52:51.496762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:51.497016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:51.497294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fad/r3tmp/tmpF9RTC9/pdisk_1.dat 2025-04-07T13:52:51.803403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:51.843832Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:51.894133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:51.894260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:51.905860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:52.034442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:52.081667Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:653:2553] 2025-04-07T13:52:52.081981Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:52.124402Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:52.124818Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:656:2555] 2025-04-07T13:52:52.125026Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:52.131917Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:52.133706Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:52.133819Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:52.133863Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:52.134160Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:52.171654Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:52.171898Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:52.172072Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:687:2573] 2025-04-07T13:52:52.172156Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:52.172196Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:52.172243Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:52.173457Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:52.173574Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:52.174108Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:644:2548], serverId# [1:676:2566], sessionId# [0:0:0] 2025-04-07T13:52:52.174387Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:52.174431Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:52.174466Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:52.174510Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:52.174606Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:52.174821Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:52.175106Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:52.175216Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:52.175691Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:52.177231Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T13:52:52.177336Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T13:52:52.177398Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T13:52:52.177742Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:52.177785Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T13:52:52.177843Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:52.177905Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:694:2576] 2025-04-07T13:52:52.177933Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T13:52:52.177959Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T13:52:52.178028Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T13:52:52.178881Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T13:52:52.178947Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T13:52:52.180423Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:52:52.180479Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:52.180513Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T13:52:52.180548Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:52:52.181200Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:645:2549], serverId# [1:681:2570], sessionId# [0:0:0] 2025-04-07T13:52:52.181599Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:661:2557] 2025-04-07T13:52:52.181792Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:52.190611Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T13:52:52.190826Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-07T13:52:52.190905Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-07T13:52:52.192971Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:52.193087Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:52.194525Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-07T13:52:52.194603Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-07T13:52:52.194656Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-07T13:52:52.194926Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:52.194971Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-07T13:52:52.195072Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:52.195146Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:713:2585] 2025-04-07T13:52:52.195176Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-07T13:52:52.195206Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-07T13:52:52.195245Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T13:52:52.195740Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:52.195813Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T13:52:52.195860Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-07T13:52:52.195926Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-07T13:52:52.196061Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T13:52:52.196106Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:52.196139Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-07T13:52:52.196178Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T13:52:52.207433Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:52.207564Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:52.207664Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T13:52:52.207711Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:52.252651Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:646:2550], serverId# [1:722:2593], sessionId# [0:0:0] 2025-04-07T13:52:52.252850Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-07T13:52:52.253078Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-04-07T13:52:52.253196Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-04-07T13:52:52.254009Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-07T13:52:52.264680Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-07T13:52:52.264771Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:52.417265Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:736:2607], serverId# [1:740:2611], sessionId# [0:0:0] 2025-04-07T13:52:52.417458Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:737:2608], serverId# [1:741:2612], sessionId# [0:0:0] 2025-04-07T13:52:52.417777Z node 1 :TX_DATA ... nd 0 read sets to remove in 72075186224037888 2025-04-07T13:53:01.925818Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:628:2534], serverId# [3:639:2541], sessionId# [0:0:0] 2025-04-07T13:53:01.926234Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:01.926282Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:01.926325Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:53:01.926366Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:01.926491Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:01.926701Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:53:01.926783Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:53:01.928494Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:01.939297Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:01.939426Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:53:02.134982Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:667:2559], serverId# [3:669:2561], sessionId# [0:0:0] 2025-04-07T13:53:02.135625Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 534 RawX2: 12884904352 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:53:02.135686Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:02.137061Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:02.137117Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:53:02.137163Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:53:02.137426Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:53:02.137561Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:53:02.137752Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:02.137809Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:53:02.138257Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:53:02.138663Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:02.140169Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:53:02.140221Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:02.142447Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:53:02.142537Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:53:02.142607Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:02.143818Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:02.143862Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:53:02.143910Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:53:02.143971Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:393:2385], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:53:02.144024Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:53:02.144112Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:02.144584Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:02.146608Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:53:02.146680Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:53:02.147538Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:53:02.156341Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.156480Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:711:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.156574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.162673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:53:02.170905Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:02.399452Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:02.402226Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:53:02.540515Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88d9pa20kfps63z1cfawwe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTFmNzI3MWYtODYyNmE1ZGEtZjBlYzJjOWItMzNmNDlmMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:53:02.541327Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:816:2654], serverId# [3:817:2655], sessionId# [0:0:0] 2025-04-07T13:53:02.541555Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:02.557333Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:02.557486Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:02.690683Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr88da37330bvbbhcnjnrejb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWNkMDA2YTUtYjlkY2VjOTQtZjE1OTMzZWUtNmVmNmMwZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:53:02.692547Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint64_value: 0 } } 2025-04-07T13:53:02.712729Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:855:2685], serverId# [3:856:2686], sessionId# [0:0:0] 2025-04-07T13:53:02.714062Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:53:02.725645Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:53:02.725742Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:02.725809Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-07T13:53:02.726492Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-04-07T13:53:02.726570Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:02.726741Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:53:02.726777Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037888 2025-04-07T13:53:02.727002Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:02.727047Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:02.727095Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:53:02.727144Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:02.727258Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:855:2685], serverId# [3:856:2686], sessionId# [0:0:0] 2025-04-07T13:53:02.831467Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr88da87f43wtq6kgj0hz5aq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWNkMDA2YTUtYjlkY2VjOTQtZjE1OTMzZWUtNmVmNmMwZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:53:02.831959Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:02.845191Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:02.845336Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:02.846033Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OWNkMDA2YTUtYjlkY2VjOTQtZjE1OTMzZWUtNmVmNmMwZGE=, ActorId: [3:823:2660], ActorState: ExecuteState, TraceId: 01jr88da87f43wtq6kgj0hz5aq, Create QueryResponse for error on request, msg: 2025-04-07T13:53:02.846930Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr88da87f43wtq6kgj0hz5aq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWNkMDA2YTUtYjlkY2VjOTQtZjE1OTMzZWUtNmVmNmMwZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:53:02.847223Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:02.847634Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:02.847693Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> YdbProxy::DescribePath [GOOD] >> YdbProxy::DescribeTable >> KqpProxy::NoLocalSessionExecution [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] >> TableCreation::ConcurrentUpdateTable >> YdbProxy::ReadTopic >> TableCreation::MultipleTablesCreation [GOOD] >> TableCreation::CreateOldTable |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |86.2%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] Test command err: 2025-04-07T13:52:51.527042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:51.527326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:51.527558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fb8/r3tmp/tmp9iCkVL/pdisk_1.dat 2025-04-07T13:52:51.836529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:51.881005Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:51.932684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:51.932831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:51.944281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:52.071409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:52.112105Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:52:52.112425Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:52.161131Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:52.161261Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:52.162902Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:52.163006Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:52.163062Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:52.163471Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:52.200242Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:52.200398Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:52.200502Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:52:52.200531Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:52.200561Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:52.200587Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:52.204435Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:52.204583Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:52.205034Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:52:52.205109Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:52.205165Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:52.205209Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:52.205249Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:52.205418Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:52.205711Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:52.205799Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:52.207600Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:52.221212Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:52.221316Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:52.426899Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:52:52.455277Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:52:52.455381Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:52.456384Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:52.456494Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:52:52.456557Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:52:52.465155Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:52:52.465393Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:52:52.466333Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:52.466411Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:52:52.468638Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:52:52.469177Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:52.478416Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:52:52.478492Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:52.479162Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:52:52.479239Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:52:52.479307Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:52.484502Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:52.484551Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:52.484599Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:52:52.484651Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:52:52.484709Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:52:52.484784Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:52.488321Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:52.490444Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:52:52.490561Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:52:52.490626Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:52:52.501261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:52.501409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:52.501492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:52.507845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:52:52.514528Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:52.733604Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:52.736519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:52:53.156953Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88d08j8xc98ae4h21nhyc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzBjYjU5ZTktN2M1NmU5MDAtZTk1ZTQ1ZTUtNGY2NjI3Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:52:53.162698Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2025-04-07T13:52:53.162971Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:53.177236Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:53.177387Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:53.181347Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2025-04-07T13:52:53.182555Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:52:53.194246Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:52:53 ... _DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:53:02.546195Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:628:2534], serverId# [3:639:2541], sessionId# [0:0:0] 2025-04-07T13:53:02.546594Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:02.546645Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:02.546691Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:53:02.546737Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:02.546898Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:02.547141Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:53:02.547235Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:53:02.549217Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:02.561062Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:02.561204Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:53:02.766922Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:667:2559], serverId# [3:669:2561], sessionId# [0:0:0] 2025-04-07T13:53:02.767735Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 534 RawX2: 12884904352 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:53:02.767804Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:02.775557Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:02.775644Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:53:02.775703Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:53:02.776010Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:53:02.776186Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:53:02.776519Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:02.776638Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:53:02.777186Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:53:02.777681Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:02.779880Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:53:02.779965Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:02.780960Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:53:02.781035Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:53:02.781107Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:02.782474Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:02.782528Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:53:02.782603Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:53:02.782673Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:393:2385], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:53:02.782727Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:53:02.782855Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:02.783476Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:02.800117Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:53:02.800231Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:53:02.805155Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:53:02.842318Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.842442Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:711:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.842574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.850102Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T13:53:02.860720Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:03.091388Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:03.097644Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:53:03.253380Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88dabb9cgkqn4ekybbydsz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2RjODNlNGMtZjYxYTJiYTYtM2I1YTllMTYtMWVjZjVhMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:53:03.253998Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:816:2654], serverId# [3:817:2655], sessionId# [0:0:0] 2025-04-07T13:53:03.254218Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:03.269365Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:03.269546Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:03.273965Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:824:2661], serverId# [3:825:2662], sessionId# [0:0:0] 2025-04-07T13:53:03.275252Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-07T13:53:03.289491Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-07T13:53:03.289588Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:03.289862Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:53:03.289903Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-07T13:53:03.290245Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:03.290305Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:03.290352Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:53:03.290418Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:03.290501Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:824:2661], serverId# [3:825:2662], sessionId# [0:0:0] 2025-04-07T13:53:03.291487Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:03.291900Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:03.292101Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:03.292160Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:53:03.292231Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-07T13:53:03.292512Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:53:03.292572Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:03.297540Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-07T13:53:03.297938Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 42, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T13:53:03.298099Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-07T13:53:03.298165Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-07T13:53:03.420726Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:53:03.420812Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-07T13:53:03.421352Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:03.421397Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:53:03.421440Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-07T13:53:03.421582Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:03.421652Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:03.421699Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TopicAutoscaling::ControlPlane_CDC_Enable [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Disable >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic >> YdbProxy::ListDirectory [GOOD] >> YdbProxy::DropTopic >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] >> TSchemeShardViewTest::CreateView >> YdbProxy::RemoveDirectory [GOOD] >> YdbProxy::StaticCreds >> ScriptExecutionsTest::RunCheckLeaseStatus [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] Leader for TabletID 72057594046678944 is [1:226:2148] sender: [1:228:2060] recipient: [1:212:2140] 2025-04-07T13:52:06.217612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:52:06.217713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:52:06.217752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:52:06.217794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:52:06.217832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:52:06.217858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:52:06.217931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:52:06.218274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:06.296866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:52:06.296924Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:06.308061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:06.311982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:52:06.312205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:52:06.319023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:52:06.319593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:52:06.320150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:52:06.320355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:52:06.322814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:52:06.323921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:52:06.323973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:52:06.324054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:52:06.324094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:52:06.324134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:52:06.324317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.330405Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:226:2148] sender: [1:342:2060] recipient: [1:17:2064] 2025-04-07T13:52:06.485677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:52:06.485907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.486117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:52:06.486387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:52:06.486455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.493899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:52:06.494039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:52:06.494232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.494299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:52:06.494334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:52:06.494366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:52:06.505717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.505800Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:52:06.505835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:52:06.512551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.512613Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.512655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:52:06.512751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:52:06.526382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:52:06.528750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:52:06.528939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:52:06.529913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:52:06.530045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 239 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:52:06.530090Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:52:06.530320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:52:06.530384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:52:06.530535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:52:06.530636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:52:06.533028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:52:06.533082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:52:06.533322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:52:06.533359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:309:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:52:06.533779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.533828Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:52:06.533918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:52:06.533952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:52:06.534007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:52:06.534049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:52:06.534079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:52:06.534107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:52:06.534182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:52:06.534230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:52:06.534283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:52:06.536024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:52:06.536122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:52:06.536157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:52:06.536201Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:52:06.536250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:52:06.536365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-04-07T13:53:05.340536Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-07T13:53:05.340561Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-04-07T13:53:05.340585Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-07T13:53:05.340650Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-07T13:53:05.345011Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:53:05.347959Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:53:05.349259Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-07T13:53:05.349302Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T13:53:05.349426Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-07T13:53:05.349446Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-07T13:53:05.349653Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-07T13:53:05.349687Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-07T13:53:05.349942Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:668:2499], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:53:05.349991Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:53:05.350023Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-07T13:53:05.350161Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:572:2403], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-04-07T13:53:05.350195Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-07T13:53:05.350265Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-07T13:53:05.350372Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-07T13:53:05.350410Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:666:2497] 2025-04-07T13:53:05.350594Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:668:2499], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:05.350638Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:05.350677Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2025-04-07T13:53:05.351025Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:546:2100], Recipient [7:229:2151] 2025-04-07T13:53:05.351067Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:53:05.352445Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 546 RawX2: 34359740468 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:05.352637Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T13:53:05.352757Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-07T13:53:05.352884Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:53:05.354369Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:05.354510Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-04-07T13:53:05.354559Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-04-07T13:53:05.354796Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-04-07T13:53:05.354825Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-04-07T13:53:05.355051Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:674:2505], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:53:05.355081Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:53:05.355108Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-07T13:53:05.355207Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:572:2403], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2025-04-07T13:53:05.355228Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-07T13:53:05.355270Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-04-07T13:53:05.355340Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-07T13:53:05.355368Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:672:2503] 2025-04-07T13:53:05.355486Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:674:2505], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:05.355507Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:05.355534Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-07T13:53:05.355781Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:546:2100], Recipient [7:229:2151] 2025-04-07T13:53:05.355808Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:53:05.357340Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 546 RawX2: 34359740468 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:05.357593Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T13:53:05.357653Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2025-04-07T13:53:05.357836Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T13:53:05.361020Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:05.361236Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-04-07T13:53:05.361296Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-07T13:53:05.361647Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-07T13:53:05.361691Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-07T13:53:05.362025Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:680:2511], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:53:05.362071Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:53:05.362107Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-07T13:53:05.362223Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:572:2403], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2025-04-07T13:53:05.362250Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-07T13:53:05.362304Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-07T13:53:05.362403Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-07T13:53:05.362447Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:678:2509] 2025-04-07T13:53:05.362611Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:680:2511], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:05.362643Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:05.362674Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 >> TSchemeShardViewTest::CreateView [GOOD] >> TSchemeShardViewTest::AsyncCreateSameView >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogTsRangeDescending [GOOD] Test command err: 2025-04-07T13:48:49.741176Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567831232637649:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:49.742405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019ff/r3tmp/tmp5eNY8M/pdisk_1.dat 2025-04-07T13:48:50.762535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:48:50.816830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:50.825518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:50.825613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:50.838638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24494, node 1 2025-04-07T13:48:51.161442Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:51.161462Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:51.161468Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:51.161556Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15168 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:51.944102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:51.949878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:51.976758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:51.989660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:48:51.989845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:48:51.989860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:48:51.992165Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:48:51.992188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:48:51.993517Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:51.996681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033732042, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:51.996710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:48:51.996962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 waiting... 2025-04-07T13:48:52.005454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:52.005598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:52.005640Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:48:52.005713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:48:52.005755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:48:52.005808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-04-07T13:48:52.008799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:48:52.016786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:48:52.016820Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:48:52.016921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2025-04-07T13:48:52.017165Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:15168 2025-04-07T13:48:52.661987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:52.664426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:48:52.664649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:52.672789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:52.675212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2025-04-07T13:48:52.675402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:52.675757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:52.675817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-04-07T13:48:52.677843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:48:52.677873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:48:52.677889Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:48:52.678102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:48:52.678124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:48:52.678133Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:48:52.692203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:48:52.712030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:52.712260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:52.712386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:52.719488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:48:52.719575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:48:52.726683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:48:52.901896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490567844117540383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:48:52.902095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490567844117540383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:48:52.902339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490567844117540383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:48:52.902442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490567844117540383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:48:52.902538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490567844117540383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:48:52.902624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490567844117540383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:48:52.902701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490567844117540383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normaliz ... ivery in input channelId: 35, seqNo: [1] 2025-04-07T13:52:57.921011Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 36, seqNo: [1] 2025-04-07T13:52:57.921026Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 37, seqNo: [1] 2025-04-07T13:52:57.921040Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 38, seqNo: [1] 2025-04-07T13:52:57.921055Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 39, seqNo: [1] 2025-04-07T13:52:57.921071Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 40, seqNo: [1] 2025-04-07T13:52:57.921087Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 41, seqNo: [1] 2025-04-07T13:52:57.921105Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 42, seqNo: [1] 2025-04-07T13:52:57.921121Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 43, seqNo: [1] 2025-04-07T13:52:57.921136Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 44, seqNo: [1] 2025-04-07T13:52:57.921151Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 45, seqNo: [1] 2025-04-07T13:52:57.921167Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 46, seqNo: [1] 2025-04-07T13:52:57.921183Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 47, seqNo: [1] 2025-04-07T13:52:57.921199Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 48, seqNo: [1] 2025-04-07T13:52:57.921215Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 49, seqNo: [1] 2025-04-07T13:52:57.921233Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 50, seqNo: [1] 2025-04-07T13:52:57.921250Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 51, seqNo: [1] 2025-04-07T13:52:57.921268Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 52, seqNo: [1] 2025-04-07T13:52:57.921285Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 53, seqNo: [1] 2025-04-07T13:52:57.921303Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 54, seqNo: [1] 2025-04-07T13:52:57.921321Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 55, seqNo: [1] 2025-04-07T13:52:57.921337Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 56, seqNo: [1] 2025-04-07T13:52:57.921353Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 57, seqNo: [1] 2025-04-07T13:52:57.921372Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 58, seqNo: [1] 2025-04-07T13:52:57.921388Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 59, seqNo: [1] 2025-04-07T13:52:57.921406Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 60, seqNo: [1] 2025-04-07T13:52:57.921423Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 61, seqNo: [1] 2025-04-07T13:52:57.921438Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 62, seqNo: [1] 2025-04-07T13:52:57.921454Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 63, seqNo: [1] 2025-04-07T13:52:57.921471Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 64, seqNo: [1] 2025-04-07T13:52:57.921499Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished 2025-04-07T13:52:57.921533Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7490568898257835786:3661], TxId: 281474976715770, task: 65. Ctx: { SessionId : ydb://session/3?node_id=28&id=ZDQ4YjI4Y2QtOWNmZDIyMzUtNGEzNjk1OGEtY2I4M2Q3Mjg=. CustomerSuppliedId : . TraceId : 01jr88d4b9c6n83v7vqnnmd7e0. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-04-07T13:52:57.921733Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. pass away 2025-04-07T13:52:57.921941Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715770;task_id=65;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T13:52:57.922155Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7490568898257835786:3661], TxId: 281474976715770, task: 65. Ctx: { SessionId : ydb://session/3?node_id=28&id=ZDQ4YjI4Y2QtOWNmZDIyMzUtNGEzNjk1OGEtY2I4M2Q3Mjg=. CustomerSuppliedId : . TraceId : 01jr88d4b9c6n83v7vqnnmd7e0. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Send stats to executor actor [28:7490568898257835687:3585] TaskId: 65 Stats: CpuTimeUs: 10118 DurationUs: 172000 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 1512 FinishTimeMs: 1744033977920 ComputeCpuTimeUs: 104 BuildCpuTimeUs: 1408 WaitInputTimeUs: 79023 HostName: "ghrun-lqtfcktism" NodeId: 28 StartTimeMs: 1744033977748 } MaxMemoryUsage: 1048576 2025-04-07T13:52:57.922933Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568898257835687:3585] TxId: 281474976715770. Ctx: { TraceId: 01jr88d4b9c6n83v7vqnnmd7e0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZDQ4YjI4Y2QtOWNmZDIyMzUtNGEzNjk1OGEtY2I4M2Q3Mjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7490568898257835786:3661], task: 65, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 10118 DurationUs: 172000 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 1512 FinishTimeMs: 1744033977920 ComputeCpuTimeUs: 104 BuildCpuTimeUs: 1408 WaitInputTimeUs: 79023 HostName: "ghrun-lqtfcktism" NodeId: 28 StartTimeMs: 1744033977748 } MaxMemoryUsage: 1048576 } 2025-04-07T13:52:57.922980Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715770. Ctx: { TraceId: 01jr88d4b9c6n83v7vqnnmd7e0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZDQ4YjI4Y2QtOWNmZDIyMzUtNGEzNjk1OGEtY2I4M2Q3Mjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7490568898257835786:3661] 2025-04-07T13:52:57.923112Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568898257835687:3585] TxId: 281474976715770. Ctx: { TraceId: 01jr88d4b9c6n83v7vqnnmd7e0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZDQ4YjI4Y2QtOWNmZDIyMzUtNGEzNjk1OGEtY2I4M2Q3Mjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T13:52:57.923192Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490568898257835687:3585] TxId: 281474976715770. Ctx: { TraceId: 01jr88d4b9c6n83v7vqnnmd7e0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZDQ4YjI4Y2QtOWNmZDIyMzUtNGEzNjk1OGEtY2I4M2Q3Mjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.025139s ReadRows: 0 ReadBytes: 0 ru: 16 rate limiter was not found force flag: 1 2025-04-07T13:52:57.923297Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDQ4YjI4Y2QtOWNmZDIyMzUtNGEzNjk1OGEtY2I4M2Q3Mjg=, ActorId: [28:7490568893962868349:3585], ActorState: ExecuteState, TraceId: 01jr88d4b9c6n83v7vqnnmd7e0, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-07T13:52:57.923678Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=ZDQ4YjI4Y2QtOWNmZDIyMzUtNGEzNjk1OGEtY2I4M2Q3Mjg=, ActorId: [28:7490568893962868349:3585], ActorState: ExecuteState, TraceId: 01jr88d4b9c6n83v7vqnnmd7e0, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 333.488 QueriesCount: 1 2025-04-07T13:52:57.923753Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDQ4YjI4Y2QtOWNmZDIyMzUtNGEzNjk1OGEtY2I4M2Q3Mjg=, ActorId: [28:7490568893962868349:3585], ActorState: ExecuteState, TraceId: 01jr88d4b9c6n83v7vqnnmd7e0, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T13:52:57.923890Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=ZDQ4YjI4Y2QtOWNmZDIyMzUtNGEzNjk1OGEtY2I4M2Q3Mjg=, ActorId: [28:7490568893962868349:3585], ActorState: ExecuteState, TraceId: 01jr88d4b9c6n83v7vqnnmd7e0, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:52:57.923953Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDQ4YjI4Y2QtOWNmZDIyMzUtNGEzNjk1OGEtY2I4M2Q3Mjg=, ActorId: [28:7490568893962868349:3585], ActorState: ExecuteState, TraceId: 01jr88d4b9c6n83v7vqnnmd7e0, EndCleanup, isFinal: 1 2025-04-07T13:52:57.924036Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDQ4YjI4Y2QtOWNmZDIyMzUtNGEzNjk1OGEtY2I4M2Q3Mjg=, ActorId: [28:7490568893962868349:3585], ActorState: ExecuteState, TraceId: 01jr88d4b9c6n83v7vqnnmd7e0, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7490568842423256231:2256] 2025-04-07T13:52:57.924092Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDQ4YjI4Y2QtOWNmZDIyMzUtNGEzNjk1OGEtY2I4M2Q3Mjg=, ActorId: [28:7490568893962868349:3585], ActorState: unknown state, TraceId: 01jr88d4b9c6n83v7vqnnmd7e0, Cleanup temp tables: 0 RESULT: [] --------------------- STATS: total CPU: 2785 duration: 1377 usec cpu: 1377 usec duration: 320110 usec cpu: 329267 usec { name: "/Root/OlapStore/log1" } 2025-04-07T13:52:57.953079Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744033977000, txId: 18446744073709551615] shutting down 2025-04-07T13:52:57.953289Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDQ4YjI4Y2QtOWNmZDIyMzUtNGEzNjk1OGEtY2I4M2Q3Mjg=, ActorId: [28:7490568893962868349:3585], ActorState: unknown state, TraceId: 01jr88d4b9c6n83v7vqnnmd7e0, Session actor destroyed 2025-04-07T13:52:58.155039Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[28:7490568846718224360:2293];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:52:58.155122Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[28:7490568846718224326:2290];fline=actor.cpp:33;event=skip_flush_writing; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:06.605869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:06.605987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:06.606028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:06.606059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:06.606100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:06.606138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:06.606196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:06.606497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:06.667011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:06.667075Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:06.677303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:06.677450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:06.677592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:06.697065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:06.697254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:06.697845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:06.698047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:06.700285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:06.701450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:06.701513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:06.701619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:06.701673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:06.701714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:06.701868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:06.708157Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:06.850590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:06.850810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:06.850999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:06.851186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:06.851235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:06.853455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:06.853633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:06.853754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:06.854013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:06.854043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:06.854065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:06.856195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:06.856252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:06.856285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:06.857852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:06.857892Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:06.857942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:06.857981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:06.861129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:06.862421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:06.862566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:06.863354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:06.863483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:06.863546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:06.863776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:06.863818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:06.863961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:06.864020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:06.865593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:06.865643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:06.865816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:06.865853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:06.866045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:06.866080Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:06.866166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:06.866191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:06.866237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:06.866279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:06.866308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:06.866331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:06.866378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:06.866429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:06.866463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:06.867991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:06.868068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:06.868093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:06.868116Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:06.868148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:06.868218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... cute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "Some query" } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:06.886446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0 2025-04-07T13:53:06.886495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0, viewDescription: Name: "MyView" QueryText: "Some query" 2025-04-07T13:53:06.886616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:53:06.886698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-07T13:53:06.886761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:06.888372Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:53:06.890240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusAccepted TxId: 100 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-07T13:53:06.890385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-04-07T13:53:06.891043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T13:53:06.891093Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 ProgressState 2025-04-07T13:53:06.891140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-07T13:53:06.891276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:06.892275Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:53:06.893211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-07T13:53:06.893387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-07T13:53:06.893763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:06.893899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:06.893948Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-07T13:53:06.894078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-07T13:53:06.894241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:06.894316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 100 2025-04-07T13:53:06.896022Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:06.896064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:06.896196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:53:06.896280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:06.896324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-07T13:53:06.896390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-07T13:53:06.896627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T13:53:06.896686Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-07T13:53:06.896772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-07T13:53:06.896816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T13:53:06.896869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-07T13:53:06.896905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T13:53:06.896941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-07T13:53:06.897015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-07T13:53:06.897073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:53:06.897098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-07T13:53:06.897122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-07T13:53:06.897140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-07T13:53:06.897760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T13:53:06.897850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T13:53:06.897929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-07T13:53:06.897968Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-07T13:53:06.898001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:06.898830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T13:53:06.898902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T13:53:06.898925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-07T13:53:06.898950Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T13:53:06.898972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T13:53:06.899037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-07T13:53:06.902829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T13:53:06.904845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 101 2025-04-07T13:53:06.905124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T13:53:06.905165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T13:53:06.905532Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T13:53:06.905616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T13:53:06.905648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:294:2286] TestWaitNotification: OK eventTxId 101 2025-04-07T13:53:06.906102Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:53:06.906265Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 203us result status StatusSuccess 2025-04-07T13:53:06.906588Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardViewTest::ReadOnlyMode >> YdbProxy::CreateTable [GOOD] >> YdbProxy::CreateCdcStream >> YdbProxy::OAuthToken [GOOD] >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:07.917710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:07.917815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:07.917873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:07.917904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:07.917943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:07.917980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:07.918033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:07.918323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:07.993345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:07.993400Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:08.020601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:08.020760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:08.020909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:08.025950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:08.026112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:08.026930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:08.027122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:08.032501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:08.033753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:08.033819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:08.033963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:08.034011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:08.034049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:08.034205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:08.040841Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:08.179760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:08.180005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:08.180190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:08.180417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:08.180471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:08.184008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:08.184157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:08.184337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:08.184414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:08.184451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:08.184498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:08.186560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:08.186616Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:08.186648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:08.193473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:08.193542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:08.193597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:08.193642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:08.201418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:08.203392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:08.203584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:08.204538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:08.204697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:08.204757Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:08.204996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:08.205047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:08.205208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:08.205293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:08.209569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:08.209631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:08.209821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:08.209861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:08.210071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:08.210110Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:08.210205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:08.210236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:08.210286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:08.210334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:08.210365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:08.210402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:08.210465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:08.210511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:08.210544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:08.212309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:08.212426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:08.212463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:08.212496Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:08.212544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:08.212643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... Propose Complete, txId: 103, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/MyView\', error: path exists but creating right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateCreate)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-07T13:53:08.285628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path exists but creating right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateCreate), operation: CREATE VIEW, path: /MyRoot/MyView 2025-04-07T13:53:08.286321Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:53:08.293532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-07T13:53:08.293677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-07T13:53:08.294046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:08.294161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:08.294216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-07T13:53:08.294347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-07T13:53:08.294524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:08.294593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T13:53:08.301923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:08.301982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:08.302132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:53:08.302220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:08.302255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T13:53:08.302309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T13:53:08.302547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:53:08.302591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T13:53:08.302690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T13:53:08.302728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:53:08.302774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T13:53:08.302808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:53:08.302840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T13:53:08.302869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T13:53:08.302939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:53:08.302972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-07T13:53:08.302999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-07T13:53:08.303025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-07T13:53:08.303829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:08.303916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:08.303952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:53:08.303987Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-07T13:53:08.304023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:08.305055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:08.305140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:08.305169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:53:08.305195Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T13:53:08.305219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T13:53:08.305281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T13:53:08.314096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:53:08.315104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-04-07T13:53:08.315396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T13:53:08.315436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-07T13:53:08.315534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T13:53:08.315555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-04-07T13:53:08.315612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T13:53:08.315631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T13:53:08.316123Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T13:53:08.316255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T13:53:08.316298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:300:2292] 2025-04-07T13:53:08.316531Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T13:53:08.316586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:53:08.316615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:300:2292] 2025-04-07T13:53:08.316728Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T13:53:08.316799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:53:08.316820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:300:2292] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-07T13:53:08.317244Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:53:08.317435Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 186us result status StatusSuccess 2025-04-07T13:53:08.317753Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardViewTest::EmptyName >> TopicAutoscaling::Simple_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 >> TableCreation::SimpleUpdateTable [GOOD] >> YdbProxy::DescribeTable [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::OAuthToken [GOOD] Test command err: 2025-04-07T13:52:59.546982Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568907140858059:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:59.547024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001354/r3tmp/tmphyICl4/pdisk_1.dat 2025-04-07T13:53:00.356721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:00.356858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:00.358206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:00.392181Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15670 TServer::EnableGrpc on GrpcPort 26420, node 1 2025-04-07T13:53:00.885199Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:00.885224Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:00.885230Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:00.885302Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:01.724502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001354/r3tmp/tmpUcSKDK/pdisk_1.dat 2025-04-07T13:53:05.081100Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:05.086362Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:05.125782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:05.125858Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:05.127583Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10093 TServer::EnableGrpc on GrpcPort 62668, node 2 2025-04-07T13:53:05.381399Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:05.381426Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:05.381433Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:05.381527Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:05.744568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TableCreation::ConcurrentUpdateTable [GOOD] >> YdbProxy::DescribeTopic [GOOD] >> TSchemeShardViewTest::EmptyName [GOOD] >> TSchemeShardViewTest::ReadOnlyMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] Test command err: 2025-04-07T13:52:57.806024Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568896531681830:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:57.806084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a5d/r3tmp/tmpoHHoFi/pdisk_1.dat 2025-04-07T13:52:58.647150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:58.647246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:58.679685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:58.763818Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:58.836819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:13425 TServer::EnableGrpc on GrpcPort 23090, node 1 2025-04-07T13:52:59.335682Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:59.335708Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:59.335715Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:59.335798Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:52:59.614719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:01.378850Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:01.379762Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:01.383498Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-07T13:53:01.383517Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-07T13:53:01.383552Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-07T13:53:01.383662Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-07T13:53:01.383666Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-07T13:53:01.383687Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-07T13:53:01.385164Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:01.385206Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:01.386067Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-07T13:53:01.386079Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-07T13:53:01.386105Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-07T13:53:01.397267Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T13:53:01.397297Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:53:01.397318Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:01.397379Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:01.406966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-07T13:53:01.408559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:53:01.410187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:53:01.416276Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-07T13:53:01.416278Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-07T13:53:01.416317Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710658 2025-04-07T13:53:01.416317Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710659 2025-04-07T13:53:01.416728Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-07T13:53:01.416751Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710660 2025-04-07T13:53:01.548585Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-07T13:53:01.589665Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-07T13:53:01.598239Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-07T13:53:01.599206Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-07T13:53:01.668475Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-07T13:53:01.689016Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-07T13:53:01.696894Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 245407f3-5eaf769a-78651b2b-a4605060, Bootstrap. Database: /dc-1 2025-04-07T13:53:01.704775Z node 1 :KQP_PROXY DEBUG: Request has 18445000039727.846881s seconds to be completed 2025-04-07T13:53:01.740949Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=NzlhMDUzMmItYTZmYzRkNzgtNDI2M2YzMjgtNWM5MzEzOTY=, workerId: [1:7490568913711551883:2305], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-07T13:53:01.741082Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:01.762076Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 245407f3-5eaf769a-78651b2b-a4605060, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-07T13:53:01.770917Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NzlhMDUzMmItYTZmYzRkNzgtNDI2M2YzMjgtNWM5MzEzOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7490568913711551883:2305] 2025-04-07T13:53:01.770964Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7490568913711551887:2462] 2025-04-07T13:53:01.778908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568913711551888:2307], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:01.784842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:01.792524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568913711551900:2310], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:01.800425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-07T13:53:01.808060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568913711551902:2311], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:53:02.425835Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.809194Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568896531681830:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:02.809236 ... ROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944681Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944694Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944706Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944721Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944734Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944746Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944757Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944768Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944780Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944792Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944806Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944828Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944936Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944949Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.944969Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715702. Doublechecking... 2025-04-07T13:53:07.949973Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-07T13:53:07.950026Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-07T13:53:07.950046Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-07T13:53:07.951467Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7490568938005615802:2335], selfId: [2:7490568925120712633:2255], source: [2:7490568938005615780:2333] 2025-04-07T13:53:07.952031Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: ef082cba-8666aefc-f4662009-49022e6d, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTYzNzUyODgtMmI1YzZlYjQtMzBiZGYxMDQtYTI2ZDUyYzA=, TxId: 2025-04-07T13:53:07.952052Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: ef082cba-8666aefc-f4662009-49022e6d, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTYzNzUyODgtMmI1YzZlYjQtMzBiZGYxMDQtYTI2ZDUyYzA=, TxId: 2025-04-07T13:53:07.952299Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: ef082cba-8666aefc-f4662009-49022e6d, start saving rows range [0; 1) 2025-04-07T13:53:07.952346Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: ef082cba-8666aefc-f4662009-49022e6d, Bootstrap. Database: /dc-1 2025-04-07T13:53:07.952976Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=OTYzNzUyODgtMmI1YzZlYjQtMzBiZGYxMDQtYTI2ZDUyYzA=, workerId: [2:7490568938005615780:2333], local sessions count: 2 2025-04-07T13:53:07.953049Z node 2 :KQP_PROXY DEBUG: Request has 18445000039721.598575s seconds to be completed 2025-04-07T13:53:07.954340Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MWU3YzlkM2YtMmExOTkyMzAtM2M0MDlhNC01NmJmZTdkZQ==, workerId: [2:7490568938005615930:2348], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-07T13:53:07.954436Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:07.954827Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: ef082cba-8666aefc-f4662009-49022e6d, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-07T13:53:07.955169Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MWU3YzlkM2YtMmExOTkyMzAtM2M0MDlhNC01NmJmZTdkZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7490568938005615930:2348] 2025-04-07T13:53:07.955197Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7490568938005615932:3047] 2025-04-07T13:53:08.000927Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.000983Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.001097Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.009480Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.009565Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.017171Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.017250Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.017290Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.017332Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.018252Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.022946Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.023012Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.023047Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.032958Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.033088Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.033108Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.033126Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.036990Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.037039Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.043196Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-07T13:53:08.127834Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7490568938005615931:2349], selfId: [2:7490568925120712633:2255], source: [2:7490568938005615930:2348] 2025-04-07T13:53:08.128118Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: ef082cba-8666aefc-f4662009-49022e6d, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWU3YzlkM2YtMmExOTkyMzAtM2M0MDlhNC01NmJmZTdkZQ==, TxId: 2025-04-07T13:53:08.128139Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: ef082cba-8666aefc-f4662009-49022e6d, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWU3YzlkM2YtMmExOTkyMzAtM2M0MDlhNC01NmJmZTdkZQ==, TxId: 2025-04-07T13:53:08.128252Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: ef082cba-8666aefc-f4662009-49022e6d, result part successfully saved 2025-04-07T13:53:08.128263Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: ef082cba-8666aefc-f4662009-49022e6d, reply SUCCESS, issues: 2025-04-07T13:53:08.128519Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: ef082cba-8666aefc-f4662009-49022e6d, Bootstrap. Database: /dc-1 2025-04-07T13:53:08.128588Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MWU3YzlkM2YtMmExOTkyMzAtM2M0MDlhNC01NmJmZTdkZQ==, workerId: [2:7490568938005615930:2348], local sessions count: 2 2025-04-07T13:53:08.128655Z node 2 :KQP_PROXY DEBUG: Request has 18445000039721.422974s seconds to be completed 2025-04-07T13:53:08.130232Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=Njc0MmZmZWEtZGY0NDAyZWQtMzIyZmYzNWUtODMwMWFiYjI=, workerId: [2:7490568942300583281:2360], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-07T13:53:08.130344Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:08.130565Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: ef082cba-8666aefc-f4662009-49022e6d, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-07T13:53:08.130814Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=Njc0MmZmZWEtZGY0NDAyZWQtMzIyZmYzNWUtODMwMWFiYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7490568942300583281:2360] 2025-04-07T13:53:08.130836Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7490568942300583283:3087] 2025-04-07T13:53:08.153522Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MTM1N2Y4ZWItYmJiZjQxNTQtOGUyYTU2MWItYmU0OGY0NDM=, workerId: [2:7490568938005615301:2331], local sessions count: 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::SimpleUpdateTable [GOOD] Test command err: 2025-04-07T13:52:57.752753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568898037810751:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:57.768509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a63/r3tmp/tmpXaX5oh/pdisk_1.dat 2025-04-07T13:52:58.651116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:58.651195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:58.679739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:58.759853Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:58.812961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:26399 TServer::EnableGrpc on GrpcPort 9333, node 1 2025-04-07T13:52:59.337540Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:59.337589Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:59.337597Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:59.337696Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:52:59.651671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:59.681898Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:01.365868Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:01.373512Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:01.386087Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-07T13:53:01.386114Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-07T13:53:01.386178Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-07T13:53:01.386209Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-07T13:53:01.386216Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-07T13:53:01.386336Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-07T13:53:01.386502Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-07T13:53:01.386507Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-07T13:53:01.386639Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-07T13:53:01.386880Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T13:53:01.388556Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:53:01.388585Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:01.388704Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:01.388849Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:01.388906Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:01.390139Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:01.393054Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:01.405192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-07T13:53:01.413531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:53:01.415224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:53:01.425954Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-07T13:53:01.426011Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710658 2025-04-07T13:53:01.427761Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-07T13:53:01.427787Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-04-07T13:53:01.437015Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-07T13:53:01.437054Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-04-07T13:53:01.694759Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-07T13:53:01.776812Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-07T13:53:01.782431Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-07T13:53:01.784183Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-07T13:53:01.852957Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-07T13:53:01.876970Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-07T13:53:01.877623Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 38cb3d3a-29ddcaee-a2d81a66-5d0eb0f0, Bootstrap. Database: /dc-1 2025-04-07T13:53:01.877948Z node 1 :KQP_PROXY DEBUG: Request has 18445000039727.673690s seconds to be completed 2025-04-07T13:53:01.917129Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=OTVmMDk3ZDYtMjM5ZjQyZjYtNDA1ZDg5MzEtNTU4MTM1ODY=, workerId: [1:7490568915217680803:2306], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-07T13:53:01.917288Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:01.937201Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 38cb3d3a-29ddcaee-a2d81a66-5d0eb0f0, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-07T13:53:01.937867Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=OTVmMDk3ZDYtMjM5ZjQyZjYtNDA1ZDg5MzEtNTU4MTM1ODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7490568915217680803:2306] 2025-04-07T13:53:01.937903Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7490568915217680806:2461] 2025-04-07T13:53:01.940617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568915217680807:2308], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:01.940765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:01.954091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568915217680821:2311], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:01.965501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-07T13:53:01.994343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568915217680824:2312], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976 ... 00.000000s actor id: [2:7490568942004868111:2521] 2025-04-07T13:53:08.322489Z node 2 :KQP_PROXY DEBUG: TraceId: "01jr88dfq12aqwpzhj90dzbkts", Request has 18445000039721.229151s seconds to be completed 2025-04-07T13:53:08.324222Z node 2 :KQP_PROXY DEBUG: TraceId: "01jr88dfq12aqwpzhj90dzbkts", Created new session, sessionId: ydb://session/3?node_id=2&id=NTVhZDM0OWItMTRkNDUyNGYtZThhMWMxY2UtMTZmNTE2Mw==, workerId: [2:7490568942004868125:2331], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-07T13:53:08.324335Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jr88dfq12aqwpzhj90dzbkts 2025-04-07T13:53:08.327548Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2025-04-07T13:53:08.327558Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2025-04-07T13:53:08.327582Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-04-07T13:53:08.330023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480 2025-04-07T13:53:08.331616Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715664 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-04-07T13:53:08.331638Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715664 2025-04-07T13:53:08.403295Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 250da531-2aa30f03-8d891b75-fc1723ec, Bootstrap. Database: /dc-1 2025-04-07T13:53:08.413880Z node 2 :KQP_PROXY DEBUG: Request has 18445000039721.137761s seconds to be completed 2025-04-07T13:53:08.415441Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YTRmMmFjODAtMTA5YTgzNzUtMmY0Yjk5NTQtYWEyNGIzODM=, workerId: [2:7490568942004868207:2335], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-07T13:53:08.415536Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:08.416004Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7490568937709900700:2453], selfId: [2:7490568924824998171:2256], source: [2:7490568942004868109:2321] 2025-04-07T13:53:08.416342Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 250da531-2aa30f03-8d891b75-fc1723ec, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-04-07T13:53:08.417804Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YTRmMmFjODAtMTA5YTgzNzUtMmY0Yjk5NTQtYWEyNGIzODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 8, targetId: [2:7490568942004868207:2335] 2025-04-07T13:53:08.417839Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 8 timeout: 300.000000s actor id: [2:7490568942004868214:2580] 2025-04-07T13:53:08.434785Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976715664. Doublechecking... 2025-04-07T13:53:08.521006Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-07T13:53:08.528976Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2025-04-07T13:53:08.529030Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-04-07T13:53:08.530306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:53:08.531318Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715666 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 } 2025-04-07T13:53:08.531341Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715666 2025-04-07T13:53:08.590662Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976715666. Doublechecking... 2025-04-07T13:53:08.609028Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7490568942004868209:2336], selfId: [2:7490568924824998171:2256], source: [2:7490568942004868207:2335] 2025-04-07T13:53:08.609840Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 250da531-2aa30f03-8d891b75-fc1723ec, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTRmMmFjODAtMTA5YTgzNzUtMmY0Yjk5NTQtYWEyNGIzODM=, TxId: 2025-04-07T13:53:08.609871Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 250da531-2aa30f03-8d891b75-fc1723ec, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTRmMmFjODAtMTA5YTgzNzUtMmY0Yjk5NTQtYWEyNGIzODM=, TxId: 2025-04-07T13:53:08.610001Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 250da531-2aa30f03-8d891b75-fc1723ec, start saving rows range [0; 1) 2025-04-07T13:53:08.610066Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 250da531-2aa30f03-8d891b75-fc1723ec, Bootstrap. Database: /dc-1 2025-04-07T13:53:08.613076Z node 2 :KQP_PROXY DEBUG: Request has 18445000039720.938556s seconds to be completed 2025-04-07T13:53:08.614794Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZGU0MjcwMjItNTZjMGE2NjYtNjM5MDgwYzMtOTQxNTlmMw==, workerId: [2:7490568942004868280:2347], database: /dc-1, longSession: 1, local sessions count: 4 2025-04-07T13:53:08.614902Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:08.615259Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YTRmMmFjODAtMTA5YTgzNzUtMmY0Yjk5NTQtYWEyNGIzODM=, workerId: [2:7490568942004868207:2335], local sessions count: 3 2025-04-07T13:53:08.615482Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 250da531-2aa30f03-8d891b75-fc1723ec, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-07T13:53:08.615787Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZGU0MjcwMjItNTZjMGE2NjYtNjM5MDgwYzMtOTQxNTlmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7490568942004868280:2347] 2025-04-07T13:53:08.615812Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7490568942004868283:2628] 2025-04-07T13:53:08.659299Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-07T13:53:08.696608Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NTVhZDM0OWItMTRkNDUyNGYtZThhMWMxY2UtMTZmNTE2Mw==, workerId: [2:7490568942004868125:2331], local sessions count: 2 2025-04-07T13:53:08.725064Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:08.782347Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7490568942004868282:2348], selfId: [2:7490568924824998171:2256], source: [2:7490568942004868280:2347] 2025-04-07T13:53:08.782927Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 250da531-2aa30f03-8d891b75-fc1723ec, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGU0MjcwMjItNTZjMGE2NjYtNjM5MDgwYzMtOTQxNTlmMw==, TxId: 2025-04-07T13:53:08.782949Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 250da531-2aa30f03-8d891b75-fc1723ec, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGU0MjcwMjItNTZjMGE2NjYtNjM5MDgwYzMtOTQxNTlmMw==, TxId: 2025-04-07T13:53:08.783057Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 250da531-2aa30f03-8d891b75-fc1723ec, result part successfully saved 2025-04-07T13:53:08.783069Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 250da531-2aa30f03-8d891b75-fc1723ec, reply SUCCESS, issues: 2025-04-07T13:53:08.783524Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 250da531-2aa30f03-8d891b75-fc1723ec, Bootstrap. Database: /dc-1 2025-04-07T13:53:08.783827Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZGU0MjcwMjItNTZjMGE2NjYtNjM5MDgwYzMtOTQxNTlmMw==, workerId: [2:7490568942004868280:2347], local sessions count: 1 2025-04-07T13:53:08.783888Z node 2 :KQP_PROXY DEBUG: Request has 18445000039720.767737s seconds to be completed 2025-04-07T13:53:08.785771Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MWFhODQ1OWUtODkyOWQzODEtMzAwYWE3MTUtZDkzYmYxMDM=, workerId: [2:7490568942004868316:2360], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-07T13:53:08.785881Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:08.786091Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 250da531-2aa30f03-8d891b75-fc1723ec, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-07T13:53:08.786401Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MWFhODQ1OWUtODkyOWQzODEtMzAwYWE3MTUtZDkzYmYxMDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7490568942004868316:2360] 2025-04-07T13:53:08.786433Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7490568942004868318:2646] >> YdbProxy::DropTopic [GOOD] >> KqpScripting::ScriptExplainCreatedTable >> TTopicYqlTest::DropTopicYql [GOOD] >> TTopicYqlTest::CreateTopicYqlBackCompatibility ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:09.785726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:09.785836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:09.785875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:09.785937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:09.785977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:09.786005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:09.786051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:09.786386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:09.868984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:09.869042Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:09.882461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:09.882619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:09.882766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:09.888820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:09.889007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:09.889678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:09.889880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:09.891830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:09.893151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:09.893217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:09.893370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:09.893426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:09.893473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:09.893627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:09.900248Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:10.032199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:10.032465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.032679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:10.032900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:10.032956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.037804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:10.037957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:10.038139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.038200Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:10.038242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:10.038274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:10.040966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.041039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:10.041079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:10.045716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.045778Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.045844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:10.045893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:10.057325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:10.061625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:10.061862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:10.062864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:10.063039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:10.063102Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:10.063385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:10.063441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:10.063621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:10.063696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:10.066680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:10.066749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:10.066932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:10.066974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:10.067182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.067224Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:10.067338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:10.067378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:10.067420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:10.067464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:10.067497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:10.067524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:10.067594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:10.067653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:10.067689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:10.069567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:10.069830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:10.069872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:10.069911Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:10.069959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:10.070066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:53:10.073258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:53:10.073767Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T13:53:10.074313Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T13:53:10.090650Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T13:53:10.092182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:10.092292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2025-04-07T13:53:10.092329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2025-04-07T13:53:10.092414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-04-07T13:53:10.094013Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:53:10.101112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:10.101335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE VIEW, path: /MyRoot/ 2025-04-07T13:53:10.102562Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> YdbProxy::StaticCreds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:08.760310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:08.760456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:08.760490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:08.760521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:08.760561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:08.760586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:08.760632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:08.760972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:08.846013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:08.846075Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:08.859548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:08.859711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:08.859874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:08.867887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:08.868115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:08.868938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:08.869186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:08.871518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:08.872805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:08.872866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:08.872992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:08.873047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:08.873087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:08.873243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:08.880429Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:09.125266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:09.125511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:09.125727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:09.125954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:09.126006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:09.131009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:09.131173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:09.131379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:09.131441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:09.131486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:09.131524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:09.133784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:09.133847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:09.133880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:09.135719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:09.135766Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:09.135827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:09.135871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:09.139389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:09.141668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:09.141896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:09.142917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:09.143076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:09.143139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:09.143429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:09.143476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:09.143663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:09.143736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:09.146375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:09.146438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:09.146638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:09.146683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:09.146932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:09.146976Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:09.147105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:09.147140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:09.147180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:09.147222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:09.147253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:09.147277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:09.147348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:09.147406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:09.147436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:09.149379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:09.149480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:09.149518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:09.149553Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:09.149599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:09.149700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ead records: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.227135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.227428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.227499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.227628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.227716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.227782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.232967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.233124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.233271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.233472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.233589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.233647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.233702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.269722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:10.269812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:10.270435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:10.270495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:10.270534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:10.270852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:374:2345] sender: [1:430:2058] recipient: [1:15:2062] 2025-04-07T13:53:10.311121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:10.311369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2025-04-07T13:53:10.311443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2025-04-07T13:53:10.311563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T13:53:10.311628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-07T13:53:10.311674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:10.318560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-04-07T13:53:10.318717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2025-04-07T13:53:10.318891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.318940Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2025-04-07T13:53:10.318992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-07T13:53:10.319112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:10.322344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-07T13:53:10.322635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000003 2025-04-07T13:53:10.323333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:10.323448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:10.323532Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003 2025-04-07T13:53:10.323747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-07T13:53:10.323905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:10.323959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 FAKE_COORDINATOR: Erasing txId 103 2025-04-07T13:53:10.329930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:10.329989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:10.330167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:53:10.330268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:10.330301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:424:2385], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-07T13:53:10.330364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:424:2385], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-07T13:53:10.330448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:53:10.330488Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-07T13:53:10.330574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T13:53:10.330620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:53:10.330667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-07T13:53:10.330719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:53:10.330769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T13:53:10.330809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T13:53:10.330886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:53:10.330921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-04-07T13:53:10.330949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T13:53:10.330980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-07T13:53:10.332073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:53:10.332198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:53:10.332235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:53:10.332276Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T13:53:10.332314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T13:53:10.334242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:53:10.334329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:53:10.334380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:53:10.334430Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-07T13:53:10.334467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T13:53:10.334540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-07T13:53:10.341659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:53:10.343133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTable [GOOD] Test command err: 2025-04-07T13:53:01.189311Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568912053414476:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:01.190106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001332/r3tmp/tmpDqqcQ6/pdisk_1.dat 2025-04-07T13:53:01.690677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:01.690790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:01.696715Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:01.697321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10314 TServer::EnableGrpc on GrpcPort 14661, node 1 2025-04-07T13:53:02.045317Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:02.045344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:02.045351Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:02.045459Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:02.462073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:02.487200Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:02.582678Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:53:05.072335Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568931915904711:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:05.072395Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001332/r3tmp/tmpjnfB5V/pdisk_1.dat 2025-04-07T13:53:05.407275Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:05.420155Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:05.420231Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:05.425149Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24486 TServer::EnableGrpc on GrpcPort 10433, node 2 2025-04-07T13:53:05.873249Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:05.873278Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:05.873286Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:05.873693Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24486 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:06.682847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:06.690577Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:08.863585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2025-04-07T13:52:59.634208Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568903190371049:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:59.641148Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00135e/r3tmp/tmp3HOjwI/pdisk_1.dat 2025-04-07T13:53:00.403455Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:00.453103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:00.453216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:00.461504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25571 TServer::EnableGrpc on GrpcPort 9061, node 1 2025-04-07T13:53:01.025185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:01.025204Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:01.025210Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:01.025282Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25571 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:01.609986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:01.627054Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:04.259156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:53:04.500279Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-07T13:53:04.628824Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568903190371049:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:04.628889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:05.513781Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568929953896577:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:05.513819Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00135e/r3tmp/tmplyoEvG/pdisk_1.dat 2025-04-07T13:53:05.696702Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:05.712444Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:05.712525Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:05.717886Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6164 TServer::EnableGrpc on GrpcPort 11213, node 2 2025-04-07T13:53:06.356580Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:06.356600Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:06.356606Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:06.356692Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:06.897886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:06.905796Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 >> KqpScripting::QueryStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentUpdateTable [GOOD] Test command err: 2025-04-07T13:52:57.771084Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568895736581990:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:57.771207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a73/r3tmp/tmpt19BXg/pdisk_1.dat 2025-04-07T13:52:58.648987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:58.649075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:58.680465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:58.732948Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:58.814295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:16193 TServer::EnableGrpc on GrpcPort 61171, node 1 2025-04-07T13:52:59.332648Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:59.335041Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:59.335064Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:59.335169Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:52:59.600127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:59.614005Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:02.189844Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:02.191178Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:02.211899Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.211948Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.211974Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T13:53:02.212002Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:53:02.212017Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:02.222853Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.224170Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-07T13:53:02.224190Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-07T13:53:02.224224Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-07T13:53:02.224341Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-07T13:53:02.224357Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-07T13:53:02.224395Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-07T13:53:02.226313Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-07T13:53:02.226336Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-07T13:53:02.226365Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-07T13:53:02.237108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-07T13:53:02.239840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:53:02.266755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:53:02.305725Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-07T13:53:02.305799Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710658 2025-04-07T13:53:02.305931Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-07T13:53:02.305992Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-04-07T13:53:02.312825Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-07T13:53:02.312881Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710659 2025-04-07T13:53:02.447534Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-07T13:53:02.501045Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-07T13:53:02.511136Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-07T13:53:02.527374Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-07T13:53:02.572554Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-07T13:53:02.602483Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-07T13:53:02.609322Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 78d244db-f63a8996-ee3ad451-24acb606, Bootstrap. Database: /dc-1 2025-04-07T13:53:02.611011Z node 1 :KQP_PROXY DEBUG: Request has 18445000039726.940632s seconds to be completed 2025-04-07T13:53:02.614207Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=NDJhYzkwNmYtYjU2N2I5OTktODM0YmY3MjQtYWViMGUxMWQ=, workerId: [1:7490568917211419211:2306], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-07T13:53:02.614324Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:02.649150Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 78d244db-f63a8996-ee3ad451-24acb606, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-07T13:53:02.649689Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NDJhYzkwNmYtYjU2N2I5OTktODM0YmY3MjQtYWViMGUxMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7490568917211419211:2306] 2025-04-07T13:53:02.649719Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7490568917211419213:2461] 2025-04-07T13:53:02.651887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568917211419214:2308], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.651994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.652790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568917211419226:2311], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.656233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-07T13:53:02.671572Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-07T13:53:02.672010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568917211419228:2312], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:53:02.784084Z node 1 :METADATA_PROVIDER ERROR: ... :53:09.142121Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710667 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-07T13:53:09.142131Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-07T13:53:09.142250Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710674 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-07T13:53:09.142258Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-07T13:53:09.142301Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710670 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-07T13:53:09.142310Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-07T13:53:09.142348Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710671 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-07T13:53:09.142360Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-07T13:53:09.142508Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710675 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-07T13:53:09.142514Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-07T13:53:09.161064Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976710666. Doublechecking... 2025-04-07T13:53:09.198657Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-07T13:53:09.205462Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 7, sender: [2:7490568943842040297:2332], selfId: [2:7490568930957137642:2256], source: [2:7490568943842040295:2331] 2025-04-07T13:53:09.205763Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: da03d987-26b2cbd6-ac9f041d-d3acdff6, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Njk1MzA2ODctYjgxMzk4ZGQtMmU3MmQ3NWMtNjU3NTM3YTU=, TxId: 2025-04-07T13:53:09.205795Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: da03d987-26b2cbd6-ac9f041d-d3acdff6, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Njk1MzA2ODctYjgxMzk4ZGQtMmU3MmQ3NWMtNjU3NTM3YTU=, TxId: 2025-04-07T13:53:09.206040Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=Njk1MzA2ODctYjgxMzk4ZGQtMmU3MmQ3NWMtNjU3NTM3YTU=, workerId: [2:7490568943842040295:2331], local sessions count: 2 2025-04-07T13:53:09.206062Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: da03d987-26b2cbd6-ac9f041d-d3acdff6, start saving rows range [0; 1) 2025-04-07T13:53:09.206120Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-07T13:53:09.206220Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: da03d987-26b2cbd6-ac9f041d-d3acdff6, Bootstrap. Database: /dc-1 2025-04-07T13:53:09.206291Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-07T13:53:09.206474Z node 2 :KQP_PROXY DEBUG: Request has 18445000039720.345154s seconds to be completed 2025-04-07T13:53:09.208325Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YTE5MjVkYjYtYWUyNmZjNDQtZGJhYThkZmMtMzE5NjI0ZWU=, workerId: [2:7490568948137007812:2347], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-07T13:53:09.208456Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:09.208906Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: da03d987-26b2cbd6-ac9f041d-d3acdff6, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-07T13:53:09.209506Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-07T13:53:09.209851Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YTE5MjVkYjYtYWUyNmZjNDQtZGJhYThkZmMtMzE5NjI0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7490568948137007812:2347] 2025-04-07T13:53:09.209883Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7490568948137007815:2691] 2025-04-07T13:53:09.211194Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-07T13:53:09.217160Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-07T13:53:09.225115Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-07T13:53:09.228986Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-07T13:53:09.244780Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-07T13:53:09.244780Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-07T13:53:09.278046Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YzU5MjhmZjMtODU2MmI2M2MtZTliNmYxZWItNDdlOGNkMDc=, workerId: [2:7490568943842040301:2335], local sessions count: 2 2025-04-07T13:53:09.398440Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7490568948137007813:2348], selfId: [2:7490568930957137642:2256], source: [2:7490568948137007812:2347] 2025-04-07T13:53:09.398846Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: da03d987-26b2cbd6-ac9f041d-d3acdff6, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTE5MjVkYjYtYWUyNmZjNDQtZGJhYThkZmMtMzE5NjI0ZWU=, TxId: 2025-04-07T13:53:09.398873Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: da03d987-26b2cbd6-ac9f041d-d3acdff6, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTE5MjVkYjYtYWUyNmZjNDQtZGJhYThkZmMtMzE5NjI0ZWU=, TxId: 2025-04-07T13:53:09.398974Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: da03d987-26b2cbd6-ac9f041d-d3acdff6, result part successfully saved 2025-04-07T13:53:09.398986Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: da03d987-26b2cbd6-ac9f041d-d3acdff6, reply SUCCESS, issues: 2025-04-07T13:53:09.399955Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YTE5MjVkYjYtYWUyNmZjNDQtZGJhYThkZmMtMzE5NjI0ZWU=, workerId: [2:7490568948137007812:2347], local sessions count: 1 2025-04-07T13:53:09.399958Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: da03d987-26b2cbd6-ac9f041d-d3acdff6, Bootstrap. Database: /dc-1 2025-04-07T13:53:09.400058Z node 2 :KQP_PROXY DEBUG: Request has 18445000039720.151568s seconds to be completed 2025-04-07T13:53:09.409882Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZTAzMGI3NjQtYWY2OTU0YjQtZDE2NDY1ZDEtNDU5YzRmNzk=, workerId: [2:7490568948137007853:2360], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-07T13:53:09.410008Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:09.410230Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: da03d987-26b2cbd6-ac9f041d-d3acdff6, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-07T13:53:09.410471Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZTAzMGI3NjQtYWY2OTU0YjQtZDE2NDY1ZDEtNDU5YzRmNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7490568948137007853:2360] 2025-04-07T13:53:09.410497Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7490568948137007855:2714] >> TableCreation::CreateOldTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DropTopic [GOOD] Test command err: 2025-04-07T13:53:00.358364Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568908809540038:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:00.363354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001346/r3tmp/tmpPQleJ2/pdisk_1.dat 2025-04-07T13:53:01.210197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:01.210281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:01.225316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:01.339685Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:01.372864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:8571 TServer::EnableGrpc on GrpcPort 29548, node 1 2025-04-07T13:53:01.901250Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:01.901278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:01.901285Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:01.901377Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8571 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:02.475726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001346/r3tmp/tmpb6d6li/pdisk_1.dat 2025-04-07T13:53:05.976787Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:06.027615Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:06.066944Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:06.067019Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:06.071009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4750 TServer::EnableGrpc on GrpcPort 29257, node 2 2025-04-07T13:53:06.584511Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:06.584534Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:06.584540Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:06.584609Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:07.138365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:07.145362Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:07.430468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:53:07.452871Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-07T13:53:07.452900Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::StaticCreds [GOOD] Test command err: 2025-04-07T13:53:01.307939Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568914814713874:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:01.308132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001343/r3tmp/tmpHiEB5A/pdisk_1.dat 2025-04-07T13:53:02.189267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:02.189364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:02.197834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:02.265290Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7333 TServer::EnableGrpc on GrpcPort 15654, node 1 2025-04-07T13:53:02.784848Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:02.784886Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:02.784894Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:02.796910Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:03.331934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:03.413459Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:53:03.432252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:53:06.356717Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568933587815758:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:06.360851Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001343/r3tmp/tmpdVj3Gv/pdisk_1.dat 2025-04-07T13:53:06.704498Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:06.726695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:06.740841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:06.750526Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2978 TServer::EnableGrpc on GrpcPort 17053, node 2 2025-04-07T13:53:07.305208Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:07.305235Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:07.305240Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:07.305313Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:07.765123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:07.773803Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:07.848082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744033987815 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 ... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744033987815 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 2 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 ... (TRUNCATED) 2025-04-07T13:53:08.224269Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> KqpPragma::ResetPerQuery >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::ControlPlane_CDC >> YdbProxy::CreateCdcStream [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::CreateOldTable [GOOD] Test command err: 2025-04-07T13:52:57.807306Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568896318768809:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:57.807454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a68/r3tmp/tmp81SUce/pdisk_1.dat 2025-04-07T13:52:58.646642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:58.650002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:58.680290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:58.724392Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:58.843299Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:3004 TServer::EnableGrpc on GrpcPort 22738, node 1 2025-04-07T13:52:59.677336Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:59.677372Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:59.677379Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:59.677481Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:52:59.959062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:02.379144Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:02.398784Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:02.409418Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T13:53:02.409456Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:53:02.409472Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:02.409506Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:02.409703Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.409733Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.410398Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.410437Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.413013Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-07T13:53:02.413033Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-07T13:53:02.413065Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-07T13:53:02.413172Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-07T13:53:02.413177Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-07T13:53:02.413192Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-07T13:53:02.413217Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-07T13:53:02.413220Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-07T13:53:02.413229Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-07T13:53:02.416310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-07T13:53:02.426905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:53:02.428353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:53:02.434948Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-07T13:53:02.435000Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-04-07T13:53:02.435425Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-07T13:53:02.435440Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710658 2025-04-07T13:53:02.435508Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-07T13:53:02.435522Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-04-07T13:53:02.674893Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-07T13:53:02.741749Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-07T13:53:02.760932Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-07T13:53:02.795770Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-07T13:53:02.807561Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568896318768809:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:02.807635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:02.837548Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-07T13:53:02.881083Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-07T13:53:02.881818Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 149eafee-5e1d2c44-775595a0-d5119bd5, Bootstrap. Database: /dc-1 2025-04-07T13:53:02.882083Z node 1 :KQP_PROXY DEBUG: Request has 18445000039726.669553s seconds to be completed 2025-04-07T13:53:02.884894Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=ZGZiZDJiNy1kOWU0MDVmNy1hMzUyNTk5ZC1kNTAwMDY4Mw==, workerId: [1:7490568917793606023:2307], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-07T13:53:02.885036Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:02.917420Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 149eafee-5e1d2c44-775595a0-d5119bd5, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-07T13:53:02.920825Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZGZiZDJiNy1kOWU0MDVmNy1hMzUyNTk5ZC1kNTAwMDY4Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7490568917793606023:2307] 2025-04-07T13:53:02.920866Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7490568917793606027:2465] 2025-04-07T13:53:02.922972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568917793606028:2309], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.923086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.923485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568917793606040:2312], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.926921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 20 ... ZGIxN2ZhMTMtNzhlYTQwYmU=, workerId: [2:7490568957211719230:2323], database: dc-1, longSession: 1, local sessions count: 2 2025-04-07T13:53:11.092697Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:11.092998Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MzIyZmFiZjctOGI4MjUwOGQtYWRiYWQzNjAtNmUzOTQ3NDA=, workerId: [2:7490568952916751826:2307], local sessions count: 1 2025-04-07T13:53:11.094652Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjYyOWRlNTAtZmNhNGE4Y2QtZGIxN2ZhMTMtNzhlYTQwYmU=, CurrentExecutionId: 1c9e716e-72faacf9-d01b3974-d67f2d6c, CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [2:7490568957211719230:2323] 2025-04-07T13:53:11.094693Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [2:7490568957211719232:2525] 2025-04-07T13:53:11.194380Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 1c9e716e-72faacf9-d01b3974-d67f2d6c, Bootstrap. Database: /dc-1 2025-04-07T13:53:11.195159Z node 2 :KQP_PROXY DEBUG: Request has 18445000039718.356471s seconds to be completed 2025-04-07T13:53:11.196888Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MWNmNTYxZDYtODJkMDkxOGQtMjYzOTAwMWUtNDAzNjZjYmM=, workerId: [2:7490568957211719259:2333], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-07T13:53:11.197002Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:11.197069Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7490568952916751823:2459], selfId: [2:7490568931441914483:2060], source: [2:7490568957211719230:2323] 2025-04-07T13:53:11.197427Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 1c9e716e-72faacf9-d01b3974-d67f2d6c, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-04-07T13:53:11.197762Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MWNmNTYxZDYtODJkMDkxOGQtMjYzOTAwMWUtNDAzNjZjYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 7, targetId: [2:7490568957211719259:2333] 2025-04-07T13:53:11.197800Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 7 timeout: 300.000000s actor id: [2:7490568957211719261:2539] 2025-04-07T13:53:11.213525Z node 2 :KQP_PROXY DEBUG: TraceId: "01jr88djhdf5231sxxw73sdr8c", Request has 18445000039718.338124s seconds to be completed 2025-04-07T13:53:11.214863Z node 2 :KQP_PROXY DEBUG: TraceId: "01jr88djhdf5231sxxw73sdr8c", Created new session, sessionId: ydb://session/3?node_id=2&id=MjIwOTkyMjMtMWRiZmMyZTYtZWU5YTRmZDMtZTY0YWJhMzQ=, workerId: [2:7490568957211719265:2337], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-07T13:53:11.214976Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jr88djhdf5231sxxw73sdr8c 2025-04-07T13:53:11.218050Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2025-04-07T13:53:11.218067Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2025-04-07T13:53:11.218090Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-04-07T13:53:11.220545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:1, at schemeshard: 72057594046644480 2025-04-07T13:53:11.226140Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710665 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-04-07T13:53:11.226180Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976710665 2025-04-07T13:53:11.277764Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976710665. Doublechecking... 2025-04-07T13:53:11.361626Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-07T13:53:11.362095Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-07T13:53:11.405534Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MjIwOTkyMjMtMWRiZmMyZTYtZWU5YTRmZDMtZTY0YWJhMzQ=, workerId: [2:7490568957211719265:2337], local sessions count: 2 2025-04-07T13:53:11.448700Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 7, sender: [2:7490568957211719260:2334], selfId: [2:7490568931441914483:2060], source: [2:7490568957211719259:2333] 2025-04-07T13:53:11.449256Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 1c9e716e-72faacf9-d01b3974-d67f2d6c, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWNmNTYxZDYtODJkMDkxOGQtMjYzOTAwMWUtNDAzNjZjYmM=, TxId: 2025-04-07T13:53:11.449276Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 1c9e716e-72faacf9-d01b3974-d67f2d6c, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWNmNTYxZDYtODJkMDkxOGQtMjYzOTAwMWUtNDAzNjZjYmM=, TxId: 2025-04-07T13:53:11.449414Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 1c9e716e-72faacf9-d01b3974-d67f2d6c, start saving rows range [0; 1) 2025-04-07T13:53:11.449477Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 1c9e716e-72faacf9-d01b3974-d67f2d6c, Bootstrap. Database: /dc-1 2025-04-07T13:53:11.449863Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MWNmNTYxZDYtODJkMDkxOGQtMjYzOTAwMWUtNDAzNjZjYmM=, workerId: [2:7490568957211719259:2333], local sessions count: 1 2025-04-07T13:53:11.449930Z node 2 :KQP_PROXY DEBUG: Request has 18445000039718.101696s seconds to be completed 2025-04-07T13:53:11.451669Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YmM4NDNlZTgtNzljZjg5NGItODc3OWU5ZTYtZGYxN2M=, workerId: [2:7490568957211719384:2350], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-07T13:53:11.451771Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:11.452095Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 1c9e716e-72faacf9-d01b3974-d67f2d6c, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-07T13:53:11.452428Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YmM4NDNlZTgtNzljZjg5NGItODc3OWU5ZTYtZGYxN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7490568957211719384:2350] 2025-04-07T13:53:11.452452Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7490568957211719386:2615] 2025-04-07T13:53:11.481490Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:11.691462Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7490568957211719385:2351], selfId: [2:7490568931441914483:2060], source: [2:7490568957211719384:2350] 2025-04-07T13:53:11.692138Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 1c9e716e-72faacf9-d01b3974-d67f2d6c, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmM4NDNlZTgtNzljZjg5NGItODc3OWU5ZTYtZGYxN2M=, TxId: 2025-04-07T13:53:11.692161Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 1c9e716e-72faacf9-d01b3974-d67f2d6c, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmM4NDNlZTgtNzljZjg5NGItODc3OWU5ZTYtZGYxN2M=, TxId: 2025-04-07T13:53:11.692309Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 1c9e716e-72faacf9-d01b3974-d67f2d6c, result part successfully saved 2025-04-07T13:53:11.692322Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 1c9e716e-72faacf9-d01b3974-d67f2d6c, reply SUCCESS, issues: 2025-04-07T13:53:11.693006Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 1c9e716e-72faacf9-d01b3974-d67f2d6c, Bootstrap. Database: /dc-1 2025-04-07T13:53:11.693539Z node 2 :KQP_PROXY DEBUG: Request has 18445000039717.858104s seconds to be completed 2025-04-07T13:53:11.695387Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ODY3YTZkMy1hMjMxYmU4NS1iNTQ0MzkzYy05ZDllZTVlMA==, workerId: [2:7490568957211719415:2362], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-07T13:53:11.695497Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:11.695560Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YmM4NDNlZTgtNzljZjg5NGItODc3OWU5ZTYtZGYxN2M=, workerId: [2:7490568957211719384:2350], local sessions count: 2 2025-04-07T13:53:11.695774Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 1c9e716e-72faacf9-d01b3974-d67f2d6c, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-07T13:53:11.696003Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ODY3YTZkMy1hMjMxYmU4NS1iNTQ0MzkzYy05ZDllZTVlMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7490568957211719415:2362] 2025-04-07T13:53:11.696026Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7490568957211719417:2631] >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> Initializer::Simple >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> ReadSessionImplTest::UsesOnRetryStateDuringRetries >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] Test command err: 2025-04-07T13:52:58.077349Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568901340918222:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:58.077627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a71/r3tmp/tmpctNeBK/pdisk_1.dat 2025-04-07T13:52:58.762341Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:59.119383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:59.365010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:59.365123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:59.368962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:59.369047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:59.371882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:59.382881Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:52:59.383905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:59.424271Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8622 2025-04-07T13:53:02.837364Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:02.849682Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:02.870989Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZjZhNzcyYTgtMzEzMzg3MS04M2YyNWEzZi04N2RiOWZlMw==, workerId: [2:7490568916913734531:2282], database: , longSession: 1, local sessions count: 1 2025-04-07T13:53:02.871031Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:02.871136Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:02.871289Z node 2 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T13:53:02.871314Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:53:02.871335Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:02.871444Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:02.876775Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.876881Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.882592Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.882677Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.882700Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:03.032809Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568901340918222:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:03.032885Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:03.107974Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:03.109732Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:03.113846Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjZhNzcyYTgtMzEzMzg3MS04M2YyNWEzZi04N2RiOWZlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [2:8678280833929343339:121] 2025-04-07T13:53:03.113883Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 600.000000s actor id: [1:7490568922815755351:2473] 2025-04-07T13:53:03.113932Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T13:53:03.113997Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:03.114030Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:03.114053Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:53:03.114066Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:03.117179Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjZhNzcyYTgtMzEzMzg3MS04M2YyNWEzZi04N2RiOWZlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7490568916913734531:2282] 2025-04-07T13:53:03.117223Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [2:7490568921208701843:2118] 2025-04-07T13:53:03.119757Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568921208701844:2283], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:03.119902Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:03.124749Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:03.124941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568922815755353:2283], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:03.125091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:03.464284Z node 2 :KQP_PROXY DEBUG: TraceId: "01jr88damd4r2wm4j3y5zt83ze", Created new session, sessionId: ydb://session/3?node_id=2&id=MmQ2MzM2YmUtNzQyMDk3NWEtMjExNmRiOGUtMjY0NjlkNzA=, workerId: [2:7490568921208701855:2285], database: , longSession: 0, local sessions count: 2 2025-04-07T13:53:03.464484Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jr88damd4r2wm4j3y5zt83ze, Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MmQ2MzM2YmUtNzQyMDk3NWEtMjExNmRiOGUtMjY0NjlkNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 4, targetId: [2:7490568921208701855:2285] 2025-04-07T13:53:03.464506Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 4 timeout: 300.000000s actor id: [2:7490568921208701856:2121] 2025-04-07T13:53:03.465888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568921208701857:2286], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:03.465976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:03.466223Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568921208701862:2289], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:03.484061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:53:03.543794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568921208701864:2290], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:53:03.831827Z node 2 :KQP_PROXY DEBUG: TraceId: "01jr88damd4r2wm4j3y5zt83ze", Forwarded response to sender actor, requestId: 4, sender: [2:7490568921208701854:2284], selfId: [2:7490568899733865053:2060], source: [2:7490568921208701855:2285] 2025-04-07T13:53:03.833573Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MmQ2MzM2YmUtNzQyMDk3NWEtMjExNmRiOGUtMjY0NjlkNzA=, workerId: [2:7490568921208701855:2285], local sessions count: 1 2025-04-07T13:53:03.846373Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7490568901340918299:2256], selfId: [2:7490568899733865053:2060], source: [2:7490568916913734531:2282] 2025-04-07T13:53:03.849220Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7490568905635886108:2450], selfId: [1:7490568901340918299:2256], source: [2:7490568899733865053:2060] 2025-04-07T13:53:04.117284Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:05.655105Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568930350305150:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:05.655569Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a71/r3tmp/tmpno6Oyj/pdisk_1.dat 2025-04-07T13:53:05.910472Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:05.953334Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:05.953410Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:05.955856Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30043, node 3 2025-04-07T13:53:06.157437Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:06.157474Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:06.157496Z ... ress is 2/3 2025-04-07T13:53:10.313879Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710661:0 ProgressState 2025-04-07T13:53:10.313915Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710661:0 progress is 3/3 2025-04-07T13:53:10.313940Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710661:0 2025-04-07T13:53:10.313983Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710661:1 2025-04-07T13:53:10.313996Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710661:2 2025-04-07T13:53:10.314009Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710661, publications: 4, subscribers: 1 2025-04-07T13:53:10.316507Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 11 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-04-07T13:53:10.316544Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710661 2025-04-07T13:53:10.316559Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 11 2025-04-07T13:53:10.316753Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-04-07T13:53:10.316770Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710661 2025-04-07T13:53:10.316781Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2025-04-07T13:53:10.316868Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-04-07T13:53:10.316882Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710661 2025-04-07T13:53:10.316891Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 5 2025-04-07T13:53:10.316977Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 8 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-04-07T13:53:10.316991Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710661 2025-04-07T13:53:10.316998Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 8], version: 2 2025-04-07T13:53:10.317044Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710661, subscribers: 1 2025-04-07T13:53:10.320411Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490568951825143054:2318], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:53:10.418395Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710662:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T13:53:10.418571Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710662:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:53:10.429644Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710662, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T13:53:10.666759Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568930350305150:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:10.666832Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:10.919348Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [3:7490568951825143038:2313], selfId: [3:7490568930350305356:2256], source: [3:7490568951825143037:2312] 2025-04-07T13:53:10.919875Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 2c63fa54-98d46aeb-add3489-48de503d, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=MzBiNmNjMTQtNzE4NDRkMC1mZDNhNDJkLTE0NDVlMjIy, TxId: 2025-04-07T13:53:10.919909Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 2c63fa54-98d46aeb-add3489-48de503d, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=MzBiNmNjMTQtNzE4NDRkMC1mZDNhNDJkLTE0NDVlMjIy, TxId: 2025-04-07T13:53:10.919923Z node 3 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: 2c63fa54-98d46aeb-add3489-48de503d. Result: SUCCESS. Issues: 2025-04-07T13:53:10.937382Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YTFhZjQ3YzQtODQzZmE2YWMtZGMwYjhlZWItZmJhYzkwZDc=, workerId: [3:7490568951825143199:2330], database: /Root, longSession: 1, local sessions count: 2 2025-04-07T13:53:10.937534Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:10.937935Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jr88de9xfrm0ez4r40tr0ngj, Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YTFhZjQ3YzQtODQzZmE2YWMtZGMwYjhlZWItZmJhYzkwZDc=, CurrentExecutionId: 2c63fa54-98d46aeb-add3489-48de503d, CustomerSuppliedId: 01jr88de9xfrm0ez4r40tr0ngj, PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [3:7490568951825143199:2330] 2025-04-07T13:53:10.937965Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [3:7490568951825143200:3032] 2025-04-07T13:53:10.938353Z node 3 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=3&id=MzBiNmNjMTQtNzE4NDRkMC1mZDNhNDJkLTE0NDVlMjIy, workerId: [3:7490568951825143037:2312], local sessions count: 1 2025-04-07T13:53:10.952007Z node 3 :KQP_PROXY DEBUG: TraceId: "01jr88dj971p5k9vdrd6y02e0b", Request has 18445000039718.599637s seconds to be completed 2025-04-07T13:53:10.953940Z node 3 :KQP_PROXY DEBUG: TraceId: "01jr88dj971p5k9vdrd6y02e0b", Created new session, sessionId: ydb://session/3?node_id=3&id=OWVmZTBiNGQtZDg5NzljNmMtMTYzMzg1YjEtN2E1NTE3OTk=, workerId: [3:7490568951825143211:2336], database: /Root, longSession: 1, local sessions count: 2 2025-04-07T13:53:10.954056Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jr88dj971p5k9vdrd6y02e0b 2025-04-07T13:53:10.964043Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jr88dj9k6kyk85yqmwsz7xx6, Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=OWVmZTBiNGQtZDg5NzljNmMtMTYzMzg1YjEtN2E1NTE3OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 7, targetId: [3:7490568951825143211:2336] 2025-04-07T13:53:10.964087Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 7 timeout: 300.000000s actor id: [3:7490568951825143214:3036] 2025-04-07T13:53:10.973609Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7490568951825143220:3040], for# user@builtin, access# DescribeSchema 2025-04-07T13:53:10.973643Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7490568951825143220:3040], for# user@builtin, access# DescribeSchema 2025-04-07T13:53:11.005668Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490568951825143215:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:11.039383Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OWVmZTBiNGQtZDg5NzljNmMtMTYzMzg1YjEtN2E1NTE3OTk=, ActorId: [3:7490568951825143211:2336], ActorState: ExecuteState, TraceId: 01jr88dj9k6kyk85yqmwsz7xx6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:53:11.039655Z node 3 :KQP_PROXY DEBUG: TraceId: "01jr88dj9k6kyk85yqmwsz7xx6", Forwarded response to sender actor, requestId: 7, sender: [3:7490568951825143213:2337], selfId: [3:7490568930350305356:2256], source: [3:7490568951825143211:2336] 2025-04-07T13:53:11.052745Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 2c63fa54-98d46aeb-add3489-48de503d, Bootstrap. Database: /Root 2025-04-07T13:53:11.054377Z node 3 :KQP_PROXY DEBUG: TraceId: "01jr88de9xfrm0ez4r40tr0ngj", Forwarded response to sender actor, requestId: 5, sender: [3:7490568951825143034:2917], selfId: [3:7490568930350305356:2256], source: [3:7490568951825143199:2330] 2025-04-07T13:53:11.054415Z node 3 :KQP_PROXY DEBUG: Request has 18445000039718.497212s seconds to be completed 2025-04-07T13:53:11.056479Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ZmZjYmViYzQtZTExYWYxNGMtNzY5NWYyYTYtZjdmOTQyNWE=, workerId: [3:7490568956120110529:2341], database: /Root, longSession: 1, local sessions count: 3 2025-04-07T13:53:11.056601Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:11.057056Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 2c63fa54-98d46aeb-add3489-48de503d, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-04-07T13:53:11.057412Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ZmZjYmViYzQtZTExYWYxNGMtNzY5NWYyYTYtZjdmOTQyNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 9, targetId: [3:7490568956120110529:2341] 2025-04-07T13:53:11.057442Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 9 timeout: 300.000000s actor id: [3:7490568956120110531:3048] 2025-04-07T13:53:11.081553Z node 3 :TX_PROXY ERROR: Access denied for user@builtin with access DescribeSchema to path Root/.metadata ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] Test command err: 2025-04-07T13:52:58.329102Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568899863372877:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:58.329283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019e0/r3tmp/tmpuHwt7G/pdisk_1.dat 2025-04-07T13:52:59.516972Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:59.522252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:59.599970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:59.600081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:59.611313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12384, node 1 2025-04-07T13:52:59.969246Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:59.969281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:59.969288Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:59.969387Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:00.791035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:53:00.797948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:53:00.799587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:53:00.808131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:53:00.808406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:53:00.808428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:53:00.811960Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:53:00.813781Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:53:00.813807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:53:00.825661Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:53:00.836296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033980878, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:53:00.836329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:53:00.836622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:53:00.838847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:53:00.839025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:53:00.839074Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:53:00.839151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:53:00.839185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:53:00.839224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:53:00.842085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:53:00.842129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:53:00.842144Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:53:00.842274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T13:53:03.196866Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:03.197753Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-07T13:53:03.201480Z node 1 :KQP_PROXY DEBUG: Received ping session request, request_id: 2, sender: [1:7490568912748275612:2286], trace_id: 01jr88d8m82xkb470ysz77sqn6 2025-04-07T13:53:03.201840Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 5.000000s actor id: [0:0:0] 2025-04-07T13:53:03.201898Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T13:53:03.201925Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:03.201959Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:03.202035Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:53:03.202052Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:03.202343Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:03.202589Z node 1 :KQP_PROXY DEBUG: Session not found, targetId: [2:8678280833929343339:121] requestId: 2 2025-04-07T13:53:03.205048Z node 1 :KQP_PROXY DEBUG: TraceId: "01jr88d8m82xkb470ysz77sqn6", Forwarded response to sender actor, requestId: 2, sender: [1:7490568912748275612:2286], selfId: [1:7490568899863372964:2256], source: [1:7490568899863372964:2256] 2025-04-07T13:53:03.325177Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568899863372877:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:03.325249Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019e0/r3tmp/tmpez9D8U/pdisk_1.dat 2025-04-07T13:53:04.854562Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:04.860059Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:04.893532Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:04.893620Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:04.895783Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64548 TServer::EnableGrpc on GrpcPort 7718, node 4 2025-04-07T13:53:05.137339Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:05.137365Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:05.137376Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:05.137482Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:53:05.189964Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:07.925949Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:07.926908Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:07.934829Z node 4 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T13:53:07.934858Z node 4 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:53:07.934883Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:07.934914Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:07.934943Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:07.934979Z node 4 ... XY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:11.722526Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: cfeea72e-b32af876-9c76d942-4d0a0646, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-07T13:53:11.722817Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=OGM1NTczZGQtZmI5NjkxMDctNWU1ZWMxYzYtOWZiYTRmZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 20, targetId: [4:7490568955332467558:2413] 2025-04-07T13:53:11.722841Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 20 timeout: 300.000000s actor id: [4:7490568955332467560:2665] 2025-04-07T13:53:11.735384Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 20, sender: [4:7490568955332467559:2414], selfId: [4:7490568925267695225:2060], source: [4:7490568955332467558:2413] 2025-04-07T13:53:11.735726Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: cfeea72e-b32af876-9c76d942-4d0a0646, State: Get operation info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=OGM1NTczZGQtZmI5NjkxMDctNWU1ZWMxYzYtOWZiYTRmZjg=, TxId: 01jr88dk1bazyg5h4bjx2h1sds 2025-04-07T13:53:11.736216Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: cfeea72e-b32af876-9c76d942-4d0a0646, State: Get operation info, RunDataQuery: -- TSaveScriptFinalStatusActor::FinishScriptExecution DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $operation_status AS Int32; DECLARE $execution_status AS Int32; DECLARE $finalization_status AS Int32; DECLARE $issues AS JsonDocument; DECLARE $plan AS JsonDocument; DECLARE $stats AS JsonDocument; DECLARE $ast AS Optional; DECLARE $ast_compressed AS Optional; DECLARE $ast_compression_method AS Optional; DECLARE $operation_ttl AS Interval; DECLARE $customer_supplied_id AS Text; DECLARE $user_token AS Text; DECLARE $script_sinks AS Optional; DECLARE $script_secret_names AS Optional; DECLARE $applicate_script_external_effect_required AS Bool; UPDATE `.metadata/script_executions` SET operation_status = $operation_status, execution_status = $execution_status, finalization_status = IF($applicate_script_external_effect_required, $finalization_status, NULL), issues = $issues, plan = $plan, end_ts = CurrentUtcTimestamp(), stats = $stats, ast = $ast, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2025-04-07T13:53:11.737059Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=OGM1NTczZGQtZmI5NjkxMDctNWU1ZWMxYzYtOWZiYTRmZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 21, targetId: [4:7490568955332467558:2413] 2025-04-07T13:53:11.737101Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 21 timeout: 300.000000s actor id: [4:7490568955332467581:2670] 2025-04-07T13:53:11.767565Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 21, sender: [4:7490568955332467580:2420], selfId: [4:7490568925267695225:2060], source: [4:7490568955332467558:2413] 2025-04-07T13:53:11.768190Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: cfeea72e-b32af876-9c76d942-4d0a0646, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=OGM1NTczZGQtZmI5NjkxMDctNWU1ZWMxYzYtOWZiYTRmZjg=, TxId: 2025-04-07T13:53:11.768261Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: cfeea72e-b32af876-9c76d942-4d0a0646, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=OGM1NTczZGQtZmI5NjkxMDctNWU1ZWMxYzYtOWZiYTRmZjg=, TxId: 2025-04-07T13:53:11.768317Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: cfeea72e-b32af876-9c76d942-4d0a0646. UNAVAILABLE. Issues: {
: Error: Lease expired } 2025-04-07T13:53:11.769001Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=OGM1NTczZGQtZmI5NjkxMDctNWU1ZWMxYzYtOWZiYTRmZjg=, workerId: [4:7490568955332467558:2413], local sessions count: 1 2025-04-07T13:53:11.769082Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: cfeea72e-b32af876-9c76d942-4d0a0646, successfully finalized script execution operation 2025-04-07T13:53:11.769105Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: cfeea72e-b32af876-9c76d942-4d0a0646, reply success 2025-04-07T13:53:11.785640Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jr88dk3918ap5fy7g8pb7b1s, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=MWMzZmQ4MTYtNTNhOGQyMzctOGJlZDk0MmYtMTUyYzJjZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 22, targetId: [4:7490568946742532700:2333] 2025-04-07T13:53:11.785684Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 22 timeout: 300.000000s actor id: [4:7490568955332467604:2678] 2025-04-07T13:53:12.014577Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:12.443625Z node 4 :KQP_PROXY DEBUG: TraceId: "01jr88dk3918ap5fy7g8pb7b1s", Forwarded response to sender actor, requestId: 22, sender: [4:7490568955332467603:2425], selfId: [4:7490568925267695225:2060], source: [4:7490568946742532700:2333] 2025-04-07T13:53:12.453971Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: cfeea72e-b32af876-9c76d942-4d0a0646, Bootstrap. Database: /dc-1 2025-04-07T13:53:12.454267Z node 4 :KQP_PROXY DEBUG: Request has 18445000039717.097362s seconds to be completed 2025-04-07T13:53:12.455885Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=ZDRkYWVkMjgtZWRlNTFmZWItMTU5YWE0Y2YtNTJmN2Y2MmQ=, workerId: [4:7490568959627434956:2439], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-07T13:53:12.456006Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:12.456479Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: cfeea72e-b32af876-9c76d942-4d0a0646, RunDataQuery: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-07T13:53:12.456821Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=ZDRkYWVkMjgtZWRlNTFmZWItMTU5YWE0Y2YtNTJmN2Y2MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 24, targetId: [4:7490568959627434956:2439] 2025-04-07T13:53:12.456846Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 24 timeout: 300.000000s actor id: [4:7490568959627434958:2708] 2025-04-07T13:53:12.972573Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 24, sender: [4:7490568959627434957:2440], selfId: [4:7490568925267695225:2060], source: [4:7490568959627434956:2439] 2025-04-07T13:53:12.973073Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: cfeea72e-b32af876-9c76d942-4d0a0646, State: Get lease info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=ZDRkYWVkMjgtZWRlNTFmZWItMTU5YWE0Y2YtNTJmN2Y2MmQ=, TxId: 01jr88dm800rpb9svk9yp4bwkv 2025-04-07T13:53:12.973184Z node 4 :KQP_PROXY WARN: [TQueryBase] [TScriptLeaseUpdater] TraceId: cfeea72e-b32af876-9c76d942-4d0a0646, State: Get lease info, Finish with BAD_REQUEST, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=4&id=ZDRkYWVkMjgtZWRlNTFmZWItMTU5YWE0Y2YtNTJmN2Y2MmQ=, TxId: 01jr88dm800rpb9svk9yp4bwkv 2025-04-07T13:53:12.973215Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: cfeea72e-b32af876-9c76d942-4d0a0646, State: Get lease info, Rollback transaction: 01jr88dm800rpb9svk9yp4bwkv 2025-04-07T13:53:12.980846Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=ZDRkYWVkMjgtZWRlNTFmZWItMTU5YWE0Y2YtNTJmN2Y2MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 25, targetId: [4:7490568959627434956:2439] 2025-04-07T13:53:12.980893Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 25 timeout: 600.000000s actor id: [4:7490568959627434992:2727] 2025-04-07T13:53:12.982984Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 25, sender: [4:7490568959627434991:2448], selfId: [4:7490568925267695225:2060], source: [4:7490568959627434956:2439] 2025-04-07T13:53:12.983290Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: cfeea72e-b32af876-9c76d942-4d0a0646, State: Get lease info, RollbackTransactionResult: SUCCESS. Issues: 2025-04-07T13:53:12.983826Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=ZDRkYWVkMjgtZWRlNTFmZWItMTU5YWE0Y2YtNTJmN2Y2MmQ=, workerId: [4:7490568959627434956:2439], local sessions count: 1 2025-04-07T13:53:13.006994Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=MWMzZmQ4MTYtNTNhOGQyMzctOGJlZDk0MmYtMTUyYzJjZmU=, workerId: [4:7490568946742532700:2333], local sessions count: 0 >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-04-07T13:53:15.118775Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.118804Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.118822Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:15.119277Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:15.119835Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:15.132548Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.133004Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:15.138045Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.138067Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.138085Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:15.138374Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:15.154086Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:15.154280Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.154592Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:15.154904Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T13:53:15.156006Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.156035Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.156061Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:15.156379Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:15.159514Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:15.159666Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.160866Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:15.161612Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.161892Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:53:15.161974Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:15.162016Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-07T13:53:15.163122Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.163141Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.163162Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:15.164821Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:15.177386Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:15.177586Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.177891Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 11 Compressed message data size: 31 2025-04-07T13:53:15.178884Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T13:53:15.179147Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-07T13:53:15.180996Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-07T13:53:15.181218Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-07T13:53:15.181336Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:15.181384Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T13:53:15.181425Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T13:53:15.181551Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-04-07T13:53:15.181649Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T13:53:15.181667Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-07T13:53:15.181685Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T13:53:15.181804Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-04-07T13:53:15.181852Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-07T13:53:15.181868Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-07T13:53:15.181883Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T13:53:15.181955Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-04-07T13:53:15.181994Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-07T13:53:15.182011Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-07T13:53:15.182037Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T13:53:15.182128Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-04-07T13:53:15.193678Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.193707Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.193725Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:15.194027Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:15.194549Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:15.194698Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.195020Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2025-04-07T13:53:15.195912Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T13:53:15.196113Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-07T13:53:15.204341Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-07T13:53:15.204554Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-07T13:53:15.204748Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:15.204782Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T13:53:15.204800Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T13:53:15.204814Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-07T13:53:15.204855Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T13:53:15.205102Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2025-04-07T13:53:15.205233Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-07T13:53:15.205254Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-07T13:53:15.205281Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-07T13:53:15.205303Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-07T13:53:15.205329Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T13:53:15.205470Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2025-04-07T13:53:15.218173Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.218208Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.218230Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:15.224029Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:15.227808Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:15.228058Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.228374Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:15.229721Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T13:53:15.230682Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T13:53:15.231071Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-04-07T13:53:15.231199Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T13:53:15.231331Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:15.231374Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T13:53:15.231407Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-04-07T13:53:15.231426Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-04-07T13:53:15.231464Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-04-07T13:53:15.231484Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-07T13:53:15.231629Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2025-04-07T13:53:15.231756Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::CreateCdcStream [GOOD] Test command err: 2025-04-07T13:53:02.081034Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568918239409879:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:02.081094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001339/r3tmp/tmpfJIWIS/pdisk_1.dat 2025-04-07T13:53:02.651731Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:02.653243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:02.653342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:02.666983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21843 TServer::EnableGrpc on GrpcPort 21877, node 1 2025-04-07T13:53:03.232817Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:03.232839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:03.232846Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:03.232926Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:03.997368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:04.062698Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:06.822237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:53:07.085128Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568918239409879:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:07.085260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001339/r3tmp/tmpsIzMHp/pdisk_1.dat 2025-04-07T13:53:08.732976Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:08.745790Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:08.764768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:08.764837Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:08.769880Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31046 TServer::EnableGrpc on GrpcPort 22049, node 2 2025-04-07T13:53:09.194254Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:09.194276Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:09.194282Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:09.194354Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:09.552900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:13.078592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-04-07T13:53:15.165057Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.165110Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.165138Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:15.169346Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:15.169977Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T13:53:15.170037Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.174760Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.174783Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.174799Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:15.184925Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:15.185391Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T13:53:15.185451Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.189553Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.189574Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.189594Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:15.189896Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T13:53:15.189950Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.189974Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.190478Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-04-07T13:53:15.191509Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.191525Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.191540Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:15.198436Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-07T13:53:15.198479Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.198509Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.198576Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-04-07T13:53:15.205889Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-07T13:53:15.205915Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-07T13:53:15.205941Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:15.216896Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:15.229030Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:15.271880Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-07T13:53:15.272894Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:15.273228Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (empty maybe) 2025-04-07T13:53:15.287380Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-04-07T13:53:15.287673Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:15.287712Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T13:53:15.287730Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T13:53:15.287746Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-07T13:53:15.287764Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-07T13:53:15.287777Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-07T13:53:15.287791Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-04-07T13:53:15.287805Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-04-07T13:53:15.287920Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-04-07T13:53:15.287940Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-04-07T13:53:15.287954Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-04-07T13:53:15.287969Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-04-07T13:53:15.287983Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-04-07T13:53:15.287997Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-04-07T13:53:15.288024Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-04-07T13:53:15.288043Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-04-07T13:53:15.288141Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-04-07T13:53:15.288174Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-04-07T13:53:15.288194Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-04-07T13:53:15.288207Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-04-07T13:53:15.288220Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-04-07T13:53:15.288233Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-04-07T13:53:15.288247Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-04-07T13:53:15.288261Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-04-07T13:53:15.288275Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-04-07T13:53:15.288292Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-04-07T13:53:15.288334Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-04-07T13:53:15.288356Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-04-07T13:53:15.288372Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-04-07T13:53:15.288385Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-04-07T13:53:15.288400Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-04-07T13:53:15.288413Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-04-07T13:53:15.288455Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-04-07T13:53:15.288482Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-04-07T13:53:15.288503Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-04-07T13:53:15.288518Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-04-07T13:53:15.288532Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-04-07T13:53:15.288545Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-04-07T13:53:15.288560Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-04-07T13:53:15.288573Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-04-07T13:53:15.288587Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-04-07T13:53:15.288601Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-04-07T13:53:15.288626Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-04-07T13:53:15.288650Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-04-07T13:53:15.288682Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-04-07T13:53:15.288696Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-04-07T13:53:15.288708Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-04-07T13:53:15.288720Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-04-07T13:53:15.288733Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-04-07T13:53:15.288752Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-04-07T13:53:15.288805Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-07T13:53:15.295142Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-04-07T13:53:15.295780Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-04-07T13:53:15.295815Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-04-07T13:53:15.295835Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-04-07T13:53:15.295849Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-04-07T13:53:15.295868Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-04-07T13:53:15.295881Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-04-07T13:53:15.295909Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-04-07T13:53:15.295938Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-04-07T13:53:15.295967Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-04-07T13:53:15.295981Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-04-07T13:53:15.295994Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-04-07T13:53:15.296008Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-04-07T13:53:15.296020Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-04-07T13:53:15.296040Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-04-07T13:53:15.296059Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-04-07T13:53:15.296087Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-04-07T13:53:15.296117Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-04-07T13:53:15.296132Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-04-07T13:53:15.296145Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-04-07T13:53:15.296158Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-04-07T13:53:15.296171Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-04-07T13:53:15.296182Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-04-07T13:53:15.296192Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-04-07T13:53:15.296212Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-04-07T13:53:15.296234Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-04-07T13:53:15.296245Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-04-07T13:53:15.296255Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-04-07T13:53:15.296265Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-04-07T13:53:15.296278Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-04-07T13:53:15.296288Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-04-07T13:53:15.296299Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-04-07T13:53:15.296326Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-04-07T13:53:15.296367Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-04-07T13:53:15.296385Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-04-07T13:53:15.296399Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-04-07T13:53:15.296414Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-04-07T13:53:15.296428Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-04-07T13:53:15.296441Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-04-07T13:53:15.296455Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-04-07T13:53:15.296479Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-04-07T13:53:15.296507Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-04-07T13:53:15.296523Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-04-07T13:53:15.296536Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-04-07T13:53:15.296550Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-04-07T13:53:15.296561Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-04-07T13:53:15.296575Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-04-07T13:53:15.296590Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-04-07T13:53:15.296603Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-04-07T13:53:15.296617Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-04-07T13:53:15.296641Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-04-07T13:53:15.296721Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-07T13:53:15.296859Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-07T13:53:15.298176Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.298197Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.298220Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:15.304921Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:15.312036Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:15.312254Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.313222Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:15.414993Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.415446Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T13:53:15.415484Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:15.415510Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-07T13:53:15.415556Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-07T13:53:15.621593Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-04-07T13:53:15.724973Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-07T13:53:15.725222Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T13:53:15.725535Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-07T13:53:15.726624Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.726640Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.726656Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:15.737615Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:15.747218Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:15.747383Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.747880Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:15.853554Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.854097Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T13:53:15.854146Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:15.854181Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-07T13:53:15.854270Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-04-07T13:53:15.854350Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-07T13:53:15.854547Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-07T13:53:15.854609Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T13:53:15.854692Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> BasicUsage::PreferredDatabaseNoFallback [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] Test command err: 2025-04-07T13:52:57.761060Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568898240153555:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:57.768489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a6c/r3tmp/tmpgHf6T7/pdisk_1.dat 2025-04-07T13:52:58.648455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:58.648523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:58.677366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:58.747736Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:58.841674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:4556 TServer::EnableGrpc on GrpcPort 26986, node 1 2025-04-07T13:52:59.339008Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:59.339036Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:59.339043Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:59.339335Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:52:59.584526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:01.972744Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:01.975638Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:01.986321Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:01.986379Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:01.986463Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T13:53:01.986486Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:53:01.986520Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:01.996483Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:01.997508Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-07T13:53:01.997523Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-07T13:53:01.997570Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-07T13:53:01.997711Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-07T13:53:01.997730Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-07T13:53:01.997744Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-07T13:53:01.998116Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-07T13:53:01.998122Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-07T13:53:01.998137Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-07T13:53:02.002801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-07T13:53:02.011072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:53:02.013306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:53:02.022205Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-07T13:53:02.022276Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710660 2025-04-07T13:53:02.024598Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-07T13:53:02.024629Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710658 2025-04-07T13:53:02.024845Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-07T13:53:02.024888Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710659 2025-04-07T13:53:02.202319Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-07T13:53:02.264877Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-07T13:53:02.267961Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-07T13:53:02.289650Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-07T13:53:02.329035Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-07T13:53:02.340987Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-07T13:53:02.341540Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: ccda070b-16433219-a169efa1-1b0c3fa9, Bootstrap. Database: /dc-1 2025-04-07T13:53:02.341943Z node 1 :KQP_PROXY DEBUG: Request has 18445000039727.209698s seconds to be completed 2025-04-07T13:53:02.344811Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=YzU3NDcyNDktMmFjZGI2NWEtM2YxYjI2ZjEtMWZjZDViNTA=, workerId: [1:7490568919714990771:2306], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-07T13:53:02.344932Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:02.371890Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: ccda070b-16433219-a169efa1-1b0c3fa9, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-07T13:53:02.372523Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=YzU3NDcyNDktMmFjZGI2NWEtM2YxYjI2ZjEtMWZjZDViNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7490568919714990771:2306] 2025-04-07T13:53:02.372558Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7490568919714990773:2462] 2025-04-07T13:53:02.375112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568919714990774:2308], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.375234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.375854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568919714990786:2311], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.379565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-07T13:53:02.397693Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-07T13:53:02.398276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568919714990788:2312], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:53:02.757049Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568898240153555:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:0 ... YmZkNGEtOGViNDFkMDc=, TxId: 01jr88dkbva04j19sjrvdxvevd 2025-04-07T13:53:12.099455Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 5b8710f2-2e8dce6f-75e26777-218dc3e2, State: Get operation info, RunDataQuery: -- TSaveScriptFinalStatusActor::FinishScriptExecution DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $operation_status AS Int32; DECLARE $execution_status AS Int32; DECLARE $finalization_status AS Int32; DECLARE $issues AS JsonDocument; DECLARE $plan AS JsonDocument; DECLARE $stats AS JsonDocument; DECLARE $ast AS Optional; DECLARE $ast_compressed AS Optional; DECLARE $ast_compression_method AS Optional; DECLARE $operation_ttl AS Interval; DECLARE $customer_supplied_id AS Text; DECLARE $user_token AS Text; DECLARE $script_sinks AS Optional; DECLARE $script_secret_names AS Optional; DECLARE $applicate_script_external_effect_required AS Bool; UPDATE `.metadata/script_executions` SET operation_status = $operation_status, execution_status = $execution_status, finalization_status = IF($applicate_script_external_effect_required, $finalization_status, NULL), issues = $issues, plan = $plan, end_ts = CurrentUtcTimestamp(), stats = $stats, ast = $ast, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2025-04-07T13:53:12.099903Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YmIwYTc1YzEtNTg3ZGFlZTYtOTZlYmZkNGEtOGViNDFkMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 18, targetId: [2:7490568957910332128:2364] 2025-04-07T13:53:12.099932Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 18 timeout: 300.000000s actor id: [2:7490568962205299516:2623] 2025-04-07T13:53:12.237168Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:12.282808Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 17, sender: [2:7490568957910332182:2379], selfId: [2:7490568936435494741:2228], source: [2:7490568957910332181:2378] 2025-04-07T13:53:12.284830Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 2d82a3f6-9be74ef2-a35fe1cd-11fab1dd, State: Get lease info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGU1ZWFlNzctMjU3MGJkNWEtZjQyYzRjNmItMzkxNmQ3Zjg=, TxId: 01jr88dkj646t38n0kjn5spp4e 2025-04-07T13:53:12.284994Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 2d82a3f6-9be74ef2-a35fe1cd-11fab1dd, State: Get lease info, RunDataQuery: -- TScriptLeaseUpdater::OnGetLeaseInfo DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $lease_duration AS Interval; UPDATE `.metadata/script_execution_leases` SET lease_deadline=(CurrentUtcTimestamp() + $lease_duration) WHERE database = $database AND execution_id = $execution_id; 2025-04-07T13:53:12.285493Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZGU1ZWFlNzctMjU3MGJkNWEtZjQyYzRjNmItMzkxNmQ3Zjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 19, targetId: [2:7490568957910332181:2378] 2025-04-07T13:53:12.285536Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 19 timeout: 300.000000s actor id: [2:7490568962205299546:2637] 2025-04-07T13:53:12.647123Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 19, sender: [2:7490568962205299545:2398], selfId: [2:7490568936435494741:2228], source: [2:7490568957910332181:2378] 2025-04-07T13:53:12.647825Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 2d82a3f6-9be74ef2-a35fe1cd-11fab1dd, State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGU1ZWFlNzctMjU3MGJkNWEtZjQyYzRjNmItMzkxNmQ3Zjg=, TxId: 2025-04-07T13:53:12.647880Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 2d82a3f6-9be74ef2-a35fe1cd-11fab1dd, State: Update lease, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGU1ZWFlNzctMjU3MGJkNWEtZjQyYzRjNmItMzkxNmQ3Zjg=, TxId: 2025-04-07T13:53:12.741124Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jr88dkzgawgqbsxxh1jtrqhn, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YmY5MmJiZjEtMjEyZmM2My01OGM2Y2I4YS00ZmU4MDFmMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 20, targetId: [2:7490568953615364730:2331] 2025-04-07T13:53:12.741180Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 20 timeout: 300.000000s actor id: [2:7490568962205299578:2651] 2025-04-07T13:53:12.741260Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZGU1ZWFlNzctMjU3MGJkNWEtZjQyYzRjNmItMzkxNmQ3Zjg=, workerId: [2:7490568957910332181:2378], local sessions count: 3 2025-04-07T13:53:13.181652Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 18, sender: [2:7490568962205299515:2389], selfId: [2:7490568936435494741:2228], source: [2:7490568957910332128:2364] 2025-04-07T13:53:13.182184Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 5b8710f2-2e8dce6f-75e26777-218dc3e2, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmIwYTc1YzEtNTg3ZGFlZTYtOTZlYmZkNGEtOGViNDFkMDc=, TxId: 2025-04-07T13:53:13.182267Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 5b8710f2-2e8dce6f-75e26777-218dc3e2, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmIwYTc1YzEtNTg3ZGFlZTYtOTZlYmZkNGEtOGViNDFkMDc=, TxId: 2025-04-07T13:53:13.182281Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: 5b8710f2-2e8dce6f-75e26777-218dc3e2. SUCCESS. Issues: 2025-04-07T13:53:13.183019Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YmIwYTc1YzEtNTg3ZGFlZTYtOTZlYmZkNGEtOGViNDFkMDc=, workerId: [2:7490568957910332128:2364], local sessions count: 2 2025-04-07T13:53:13.189319Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZjJjMjFkOWUtM2RiNTkxYWEtYzIzMDY1ZWYtNDAzMWU5NGE=, workerId: [2:7490568953615364714:2321], local sessions count: 1 2025-04-07T13:53:13.715631Z node 2 :KQP_PROXY DEBUG: TraceId: "01jr88dkzgawgqbsxxh1jtrqhn", Forwarded response to sender actor, requestId: 20, sender: [2:7490568962205299575:2407], selfId: [2:7490568936435494741:2228], source: [2:7490568953615364730:2331] 2025-04-07T13:53:13.722247Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: 2d82a3f6-9be74ef2-a35fe1cd-11fab1dd, Bootstrap. Start TCheckLeaseStatusQueryActor 2025-04-07T13:53:13.722352Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 2d82a3f6-9be74ef2-a35fe1cd-11fab1dd, Bootstrap. Database: /dc-1 2025-04-07T13:53:13.724948Z node 2 :KQP_PROXY DEBUG: Request has 18445000039715.826698s seconds to be completed 2025-04-07T13:53:13.726892Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZjFlYWViZTItNjFiODcwNDItMzJhN2FkMTgtMzBjNzQ1NDg=, workerId: [2:7490568966500266954:2425], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-07T13:53:13.727017Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:13.732300Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 2d82a3f6-9be74ef2-a35fe1cd-11fab1dd, RunDataQuery: -- TCheckLeaseStatusQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, execution_status, finalization_status, issues, run_script_actor_id FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-07T13:53:13.732763Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjFlYWViZTItNjFiODcwNDItMzJhN2FkMTgtMzBjNzQ1NDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 22, targetId: [2:7490568966500266954:2425] 2025-04-07T13:53:13.732806Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 22 timeout: 300.000000s actor id: [2:7490568966500266956:2690] 2025-04-07T13:53:13.745344Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:14.429293Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 22, sender: [2:7490568966500266955:2426], selfId: [2:7490568936435494741:2228], source: [2:7490568966500266954:2425] 2025-04-07T13:53:14.430115Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 2d82a3f6-9be74ef2-a35fe1cd-11fab1dd, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjFlYWViZTItNjFiODcwNDItMzJhN2FkMTgtMzBjNzQ1NDg=, TxId: 2025-04-07T13:53:14.430267Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 2d82a3f6-9be74ef2-a35fe1cd-11fab1dd, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjFlYWViZTItNjFiODcwNDItMzJhN2FkMTgtMzBjNzQ1NDg=, TxId: 2025-04-07T13:53:14.430373Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: 2d82a3f6-9be74ef2-a35fe1cd-11fab1dd, reply success 2025-04-07T13:53:14.435551Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZjFlYWViZTItNjFiODcwNDItMzJhN2FkMTgtMzBjNzQ1NDg=, workerId: [2:7490568966500266954:2425], local sessions count: 1 2025-04-07T13:53:14.447206Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YmY5MmJiZjEtMjEyZmM2My01OGM2Y2I4YS00ZmU4MDFmMg==, workerId: [2:7490568953615364730:2331], local sessions count: 0 >> ReadSessionImplTest::ReconnectOnTmpError >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> TPQCompatTest::SetupLockSession [GOOD] >> TPQCompatTest::BadTopics >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Disable [GOOD] >> TopicAutoscaling::MidOfRange [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-04-07T13:53:14.667174Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:14.667198Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:14.667220Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:14.681455Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:14.684736Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:14.718423Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:14.720254Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:14.721146Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T13:53:14.721635Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T13:53:14.723603Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-07T13:53:14.723716Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:53:14.726152Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:14.726195Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-07T13:53:14.726234Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T13:53:14.726254Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T13:53:14.733977Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:14.734001Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:14.734027Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:14.740987Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:14.755840Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:14.756072Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:14.756419Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2025-04-07T13:53:14.757339Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T13:53:14.757555Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-07T13:53:14.757846Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-07T13:53:14.758054Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-07T13:53:14.760330Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:14.760365Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T13:53:14.760400Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T13:53:14.760540Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-04-07T13:53:14.760647Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T13:53:14.760681Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-07T13:53:14.760700Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T13:53:14.760812Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-04-07T13:53:14.760863Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-07T13:53:14.760879Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-07T13:53:14.760896Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T13:53:14.760961Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-04-07T13:53:14.760994Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-07T13:53:14.761010Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-07T13:53:14.761031Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T13:53:14.761109Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-04-07T13:53:14.773693Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:14.773717Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:14.773742Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:14.784954Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:14.796876Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:14.797039Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:14.797337Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 100 Compressed message data size: 91 2025-04-07T13:53:14.798252Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T13:53:14.798456Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-07T13:53:14.798822Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-07T13:53:14.799081Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-07T13:53:14.799184Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:14.799221Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T13:53:14.799318Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2025-04-07T13:53:14.799395Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T13:53:14.799427Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T13:53:14.799497Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2025-04-07T13:53:14.799560Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T13:53:14.799576Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T13:53:14.799622Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2025-04-07T13:53:14.799672Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-07T13:53:14.799692Z :DEBUG: [db] [sessionid] [cluster] ... estTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T13:53:17.316533Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2025-04-07T13:53:17.630666Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-07T13:53:17.630703Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-07T13:53:17.630735Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:17.636941Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:17.648981Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:17.649237Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-07T13:53:17.649652Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-04-07T13:53:17.845605Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-04-07T13:53:17.845814Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:17.845845Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T13:53:17.845863Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T13:53:17.845877Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-07T13:53:17.845897Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-07T13:53:17.845912Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-07T13:53:17.845928Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-04-07T13:53:17.845942Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-04-07T13:53:17.845961Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-04-07T13:53:17.845976Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-04-07T13:53:17.846018Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-04-07T13:53:17.846155Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T13:53:17.848159Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2025-04-07T13:53:17.861874Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:17.861898Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:17.861925Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:17.877496Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:17.883005Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:17.883425Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:17.888921Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:17.889354Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-04-07T13:53:17.893935Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:17.893962Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:17.893986Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:17.909034Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:17.924980Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:17.925161Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:17.925744Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:17.929904Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:53:17.932767Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:17.932834Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T13:53:17.936167Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] Test command err: 2025-04-07T13:52:52.558181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:52:52.558426Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:52.558671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fa4/r3tmp/tmpFFmLno/pdisk_1.dat 2025-04-07T13:52:52.925755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:52:52.972138Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:53.030069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:53.030174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:53.041684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:53.159305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:52:53.232031Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:653:2553] 2025-04-07T13:52:53.232368Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:53.287340Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:53.287751Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:656:2555] 2025-04-07T13:52:53.287959Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:53.303836Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:53.305387Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:52:53.305494Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:52:53.305546Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:52:53.305866Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:53.336587Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:52:53.336895Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:53.337023Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:687:2573] 2025-04-07T13:52:53.337060Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:52:53.337134Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:52:53.337185Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:52:53.338184Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:52:53.338302Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:52:53.338827Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:644:2548], serverId# [1:676:2566], sessionId# [0:0:0] 2025-04-07T13:52:53.339100Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:52:53.339145Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:53.339180Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:52:53.339219Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:52:53.339300Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:53.339437Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:52:53.339668Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:52:53.339764Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:52:53.341958Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:53.343530Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T13:52:53.343588Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T13:52:53.343630Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T13:52:53.343858Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:53.343906Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T13:52:53.343964Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:53.344036Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:694:2576] 2025-04-07T13:52:53.344063Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T13:52:53.344084Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T13:52:53.344108Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T13:52:53.344863Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T13:52:53.344940Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T13:52:53.345937Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:52:53.345973Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:53.346000Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T13:52:53.346032Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:52:53.346572Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:645:2549], serverId# [1:681:2570], sessionId# [0:0:0] 2025-04-07T13:52:53.346908Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:661:2557] 2025-04-07T13:52:53.347087Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:53.367662Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T13:52:53.367862Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-07T13:52:53.367930Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-07T13:52:53.375348Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:53.375460Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:52:53.376700Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-07T13:52:53.376772Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-07T13:52:53.376815Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-07T13:52:53.377125Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:52:53.377184Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-07T13:52:53.377257Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:52:53.377326Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:713:2585] 2025-04-07T13:52:53.377350Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-07T13:52:53.377371Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-07T13:52:53.377395Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T13:52:53.377998Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:52:53.378088Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T13:52:53.378136Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-07T13:52:53.378192Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-07T13:52:53.378279Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T13:52:53.378315Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:52:53.378345Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-07T13:52:53.378375Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T13:52:53.389833Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:52:53.389941Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:53.390022Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T13:52:53.390053Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:53.438348Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:646:2550], serverId# [1:722:2593], sessionId# [0:0:0] 2025-04-07T13:52:53.438488Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-07T13:52:53.438657Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-04-07T13:52:53.438762Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-04-07T13:52:53.439181Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-07T13:52:53.452959Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-07T13:52:53.453048Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme 2025-04-07T13:52:53.630110Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:736:2607], serverId# [1:740:2611], sessionId# [0:0:0] 2025-04-07T13:52:53.630280Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:737:2608], serverId# [1:741:2612], sessionId# [0:0:0] 2025-04-07T13:52:53.630498Z node 1 :TX_DATA ... 63 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 19 Seqno# 6 Flags# 0} 2025-04-07T13:53:15.555387Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-07T13:53:15.555481Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:15.555519Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:53:15.555580Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2000:281474976715663] at 72075186224037888 for LoadAndWaitInRS 2025-04-07T13:53:15.556009Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:15.556168Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T13:53:15.556199Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:53:15.556228Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2000:281474976715663] at 72075186224037890 for LoadAndWaitInRS 2025-04-07T13:53:15.556474Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:15.567706Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:15.567817Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [3:1056:2808], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T13:53:15.567916Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2000 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 5} 2025-04-07T13:53:15.567966Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:15.568086Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2025-04-07T13:53:15.568148Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1056:2808] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037888, status# 2 2025-04-07T13:53:15.568247Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T13:53:15.568291Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715663] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1056:2808], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T13:53:15.568348Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037890 {TEvReadSet step# 2000 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 6} 2025-04-07T13:53:15.568378Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T13:53:15.568451Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715663 2025-04-07T13:53:15.568487Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1056:2808] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037890, status# 2 2025-04-07T13:53:15.568525Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1056:2808] Reply: txId# 281474976715663, status# OK, error# 2025-04-07T13:53:15.569008Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-07T13:53:15.569066Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-04-07T13:53:15.569266Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1050:2803], serverId# [3:1051:2804], sessionId# [0:0:0] 2025-04-07T13:53:15.569371Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:53:15.569414Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:15.569462Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T13:53:15.569534Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:53:15.570787Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T13:53:15.571211Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T13:53:15.571414Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:53:15.571464Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:53:15.571522Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037889 for WaitForStreamClearance 2025-04-07T13:53:15.571801Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:53:15.571888Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:53:15.572497Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037889, TxId: 281474976715666, MessageQuota: 1 2025-04-07T13:53:15.573274Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037889, TxId: 281474976715666, Size: 70, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T13:53:15.573456Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037889, TxId: 281474976715666, PendingAcks: 0 2025-04-07T13:53:15.573515Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037889, TxId: 281474976715666, MessageQuota: 0 2025-04-07T13:53:15.576193Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-07T13:53:15.576253Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037889 2025-04-07T13:53:15.576780Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:53:15.576825Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:53:15.576867Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037889 for ReadTableScan 2025-04-07T13:53:15.577007Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:15.577075Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:53:15.577121Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T13:53:15.579917Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:15.580368Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:15.580589Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:15.580647Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:53:15.580737Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037888 for WaitForStreamClearance 2025-04-07T13:53:15.581025Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:53:15.581101Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:15.581820Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715667, MessageQuota: 1 2025-04-07T13:53:15.582081Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715667, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T13:53:15.582227Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715667, PendingAcks: 0 2025-04-07T13:53:15.582283Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715667, MessageQuota: 0 2025-04-07T13:53:15.619709Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T13:53:15.619798Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715667, at: 72075186224037888 2025-04-07T13:53:15.620264Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:15.620302Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:53:15.620365Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037888 for ReadTableScan 2025-04-07T13:53:15.620504Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:15.620563Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:15.620611Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:15.623386Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-07T13:53:15.623739Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-07T13:53:15.623922Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T13:53:15.623975Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:53:15.624033Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037890 for WaitForStreamClearance 2025-04-07T13:53:15.624297Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:53:15.624391Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T13:53:15.628345Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2025-04-07T13:53:15.628695Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T13:53:15.630272Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2025-04-07T13:53:15.630370Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2025-04-07T13:53:15.828558Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-07T13:53:15.828629Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037890 2025-04-07T13:53:15.844756Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T13:53:15.844824Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-07T13:53:15.844867Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037890 for ReadTableScan 2025-04-07T13:53:15.845006Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:15.845068Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T13:53:15.845110Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-04-07T13:53:18.098038Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.098065Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.098082Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:18.100236Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T13:53:18.100285Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.100344Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.101547Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007648s 2025-04-07T13:53:18.102150Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:18.102576Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T13:53:18.102667Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.117620Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.117643Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.117659Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:18.132969Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T13:53:18.133011Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.133034Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.133097Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007199s 2025-04-07T13:53:18.152952Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:18.164957Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T13:53:18.165069Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.172766Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.172801Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.172822Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:18.180403Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-07T13:53:18.180452Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.180471Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.180537Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.259492s 2025-04-07T13:53:18.188012Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:18.208977Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T13:53:18.209066Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.213635Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.213653Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.213672Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:18.229006Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-07T13:53:18.229049Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.229076Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.229139Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.242253s 2025-04-07T13:53:18.240956Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:18.248945Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T13:53:18.249044Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.250261Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.250280Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.250308Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:18.264977Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:18.272915Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:18.288166Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.297945Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-04-07T13:53:18.297985Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.298008Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.298068Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.167805s 2025-04-07T13:53:18.298251Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-07T13:53:18.310006Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.310028Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.310047Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:18.325223Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:18.333006Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:18.333183Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.333712Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:18.448833Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.449274Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T13:53:18.449337Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:18.449373Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-07T13:53:18.449437Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-07T13:53:18.552761Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T13:53:18.552934Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-07T13:53:18.554084Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.554105Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.554128Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:18.560516Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:18.561153Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:18.561341Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.565214Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:18.669146Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:18.669929Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T13:53:18.669986Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:18.670020Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-07T13:53:18.670095Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-04-07T13:53:18.670195Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-07T13:53:18.670433Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-07T13:53:18.670492Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T13:53:18.670584Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> KqpProxy::NodeDisconnectedTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2025-04-07T13:52:15.024756Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1744033935024720 2025-04-07T13:52:15.445674Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568714362260544:2122];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:15.447248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:15.519586Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568717047944988:2167];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:15.851483Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:15.873786Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00183e/r3tmp/tmpqGfBp5/pdisk_1.dat 2025-04-07T13:52:15.946550Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:16.192464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:16.192568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:16.193861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:16.193939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:16.196678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:16.198474Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:52:16.200077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:16.232239Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16402, node 1 2025-04-07T13:52:16.384182Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:52:16.404029Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:52:16.418146Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/00183e/r3tmp/yandex7k3N4u.tmp 2025-04-07T13:52:16.418177Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/00183e/r3tmp/yandex7k3N4u.tmp 2025-04-07T13:52:16.429443Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/00183e/r3tmp/yandex7k3N4u.tmp 2025-04-07T13:52:16.429580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:16.485064Z INFO: TTestServer started on Port 8770 GrpcPort 16402 TClient is connected to server localhost:8770 PQClient connected to localhost:16402 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:16.795150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T13:52:19.289466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568734227814363:2282], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.295548Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.298154Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568734227814375:2285], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.325594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568731542130659:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.325718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568731542130653:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.341726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.346707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:52:19.414697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568731542130669:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:52:19.417428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568734227814377:2286], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:52:19.727627Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568734227814412:2290], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:19.729374Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjE2NWM0NDctMWExODQzZjYtNmZkMTcyMzMtNThlN2I5NTQ=, ActorId: [2:7490568734227814361:2281], ActorState: ExecuteState, TraceId: 01jr88bztna8n35s50zf6r2c3e, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:19.732471Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:19.733248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:52:19.742555Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568731542130783:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:19.752034Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWEzY2JhYWQtNmZhM2JiZTQtNjQ2ODZhOGItN2QzNmY0Yzg=, ActorId: [1:7490568731542130651:2303], ActorState: ExecuteState, TraceId: 01jr88bzvmb1ewsvbd2bg7qrgj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:19.757446Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:19.939125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:20.129897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:16402", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-07T13:52:20.442107Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568714362260544:2122];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:20.442166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:20.516809Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568717047944988:2167];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:20.516869Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:20.655593Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr88c0te6ppt4e9myyz4x9bq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGE4MDkwZjctNzgyZjg2OTItYjQ0NGZkNjEtZGM2OTlmZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... 8-227f02b0-56a49cd0_0 grpc read done: success: 0 data: 2025-04-07T13:52:44.369773Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|3b074c6c-b18ea918-227f02b0-56a49cd0_0 grpc read failed 2025-04-07T13:52:44.369796Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|3b074c6c-b18ea918-227f02b0-56a49cd0_0 grpc closed 2025-04-07T13:52:44.369814Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|3b074c6c-b18ea918-227f02b0-56a49cd0_0 is DEAD 2025-04-07T13:52:44.375978Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:52:44.376366Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:52:44.376407Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7490568841976650608:2470] destroyed 2025-04-07T13:52:44.376460Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ====TYdbPqTestRetryPolicy() ====ExpectBreakDown === Session was created, waiting for retries >>> Ready to answer: ok ====CreateRetryState ====CreateRetryState Initialized Test retry state: get retry delay 2025-04-07T13:52:44.451031Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-07T13:52:46.452030Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-04-07T13:52:47.603290Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:52:47.603321Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:47.721161Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay === In the next federation discovery response dc2 will be available 2025-04-07T13:52:48.460805Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-07T13:52:50.464957Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-07T13:52:52.468855Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-04-07T13:52:52.721417Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2025-04-07T13:52:54.470617Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-07T13:52:56.478618Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-04-07T13:52:57.721870Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2025-04-07T13:52:58.480904Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-07T13:53:00.484956Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-07T13:53:02.497377Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-04-07T13:53:02.722505Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2025-04-07T13:53:04.501671Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-07T13:53:06.508817Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-04-07T13:53:07.725002Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2025-04-07T13:53:08.512737Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-07T13:53:10.516834Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-07T13:53:12.524843Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-04-07T13:53:12.688724Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2025-04-07T13:53:12.688813Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 1 2025-04-07T13:53:12.689750Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvStatus 2025-04-07T13:53:12.689996Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-04-07T13:53:12.701604Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2025-04-07T13:53:12.702009Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 2025-04-07T13:53:12.729826Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 === Waiting for repair >>> Ready to answer: ok 2025-04-07T13:53:14.533033Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:8339" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:8339" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:8339" location: "dc3" status: AVAILABLE weight: 500 } ] } === Closing the session 2025-04-07T13:53:14.547858Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: try to update token 2025-04-07T13:53:14.551812Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2025-04-07T13:53:14.569708Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-04-07T13:53:14.565880Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: write to message_group: src_id 2025-04-07T13:53:14.570810Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: send init request: init_request { path: "test-topic" message_group_id: "src_id" } 2025-04-07T13:53:14.571294Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: OnWriteDone gRpcStatusCode: 0 2025-04-07T13:53:14.572716Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-07T13:53:14.572755Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-07T13:53:14.573462Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { path: "test-topic" message_group_id: "src_id" } 2025-04-07T13:53:14.573618Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 path: "test-topic" message_group_id: "src_id" from ipv6:[::1]:60494 2025-04-07T13:53:14.573642Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="topic server" ip=ipv6:[::1]:60494 proto=topic topic=test-topic durationSec=0 2025-04-07T13:53:14.573655Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T13:53:14.574682Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session will now close 2025-04-07T13:53:14.574796Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: aborting 2025-04-07T13:53:14.575602Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2025-04-07T13:53:14.575637Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: destroy 2025-04-07T13:53:14.580060Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-07T13:53:14.580200Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-07T13:53:14.580212Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T13:53:14.580222Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-07T13:53:14.580243Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568970825671101:2747] (SourceId=src_id, PreferedPartition=(NULL)) StartKqpSession 2025-04-07T13:53:14.583322Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568970825671101:2747] (SourceId=src_id, PreferedPartition=(NULL)) Select from the table 2025-04-07T13:53:14.616622Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568970825671101:2747] (SourceId=src_id, PreferedPartition=(NULL)) RequestPQRB 2025-04-07T13:53:14.617433Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490568970825671123:2747] connected; active server actors: 1 2025-04-07T13:53:14.617535Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568970825671101:2747] (SourceId=src_id, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src_id 2025-04-07T13:53:14.617555Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490568970825671101:2747] (SourceId=src_id, PreferedPartition=(NULL)) Update the table 2025-04-07T13:53:14.618388Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490568970825671123:2747] disconnected; active server actors: 1 2025-04-07T13:53:14.618409Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490568970825671123:2747] disconnected no session 2025-04-07T13:53:14.638847Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: grpc closed 2025-04-07T13:53:14.638878Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: is DEAD 2025-04-07T13:53:15.187241Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490568975120638470:2758], TxId: 281474976720737, task: 1, CA Id [3:7490568975120638468:2758]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-07T13:53:15.243578Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490568975120638470:2758], TxId: 281474976720737, task: 1, CA Id [3:7490568975120638468:2758]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:53:15.326178Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490568975120638470:2758], TxId: 281474976720737, task: 1, CA Id [3:7490568975120638468:2758]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:53:15.432483Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490568975120638470:2758], TxId: 281474976720737, task: 1, CA Id [3:7490568975120638468:2758]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpProxy::CreatesScriptExecutionsTable [GOOD] >> KqpProxy::DatabasesCacheForServerless >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay [GOOD] >> TPersQueueTest::PartitionsMapping ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::MidOfRange [GOOD] Test command err: 2025-04-07T13:52:20.366828Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568737647868036:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:20.371089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:20.684497Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b42/r3tmp/tmpQ75lxs/pdisk_1.dat 2025-04-07T13:52:20.982162Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:20.988559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:20.988704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:20.990622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14606, node 1 2025-04-07T13:52:21.113324Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001b42/r3tmp/yandexkXxsmR.tmp 2025-04-07T13:52:21.113348Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001b42/r3tmp/yandexkXxsmR.tmp 2025-04-07T13:52:21.113508Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001b42/r3tmp/yandexkXxsmR.tmp 2025-04-07T13:52:21.113608Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:21.174868Z INFO: TTestServer started on Port 1305 GrpcPort 14606 TClient is connected to server localhost:1305 PQClient connected to localhost:14606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:21.620775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:21.632656Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:21.659093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:52:21.809176Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:21.821108Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-07T13:52:24.202411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568754827737920:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.202551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.202869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568754827737947:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.208109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:52:24.237779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568754827737949:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:52:24.478586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:24.502728Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568754827738041:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:24.503299Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2FlZTAxZmUtNjNiNzgzZS00M2MyNmQ0NS0zODJmZjY1NA==, ActorId: [1:7490568754827737917:2308], ActorState: ExecuteState, TraceId: 01jr88c4khda98zmrnt7kg1w98, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:24.505310Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:24.507557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:24.616459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490568759122705593:2624] 2025-04-07T13:52:25.365954Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568737647868036:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:25.366031Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:52:31.239822Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-07T13:52:31.267629Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:52:31.268683Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7490568784892509680:2800], Recipient [1:7490568737647868391:2202]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.268719Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.268733Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T13:52:31.268776Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7490568784892509676:2797], Recipient [1:7490568737647868391:2202]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-07T13:52:31.268790Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:52:31.336546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "autoscalit-topic" TotalGroupCount: 5 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 5 MaxPartitionCount: 10 ScaleThresholdSeconds: 500 ScaleUpPartitionWriteSpeedThresholdPercent: 80 ScaleDownPartitionWriteSpeedThresholdPercent: 20 PartitionStrategyType: CAN_SPLIT } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:52:31.336971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/autoscalit-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:52:31.345038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: autoscalit-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-07T13:52:31.345095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-07T13:52:31.345122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-07T13:52:31.345146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-07T13:52:31.345159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-07T13:52:31.345172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72 ... 046644480, cookie: 281474976715673 2025-04-07T13:53:17.306438Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-07T13:53:17.306559Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [5:7490568930292847604:2236], Recipient [5:7490568930292847433:2137]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 15] Version: 2 } 2025-04-07T13:53:17.306583Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-07T13:53:17.306640Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 15 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715673 2025-04-07T13:53:17.306709Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 15 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715673 2025-04-07T13:53:17.306722Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715673 2025-04-07T13:53:17.306740Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715673, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], version: 2 2025-04-07T13:53:17.306759Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 4 2025-04-07T13:53:17.306841Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715673, subscribers: 1 2025-04-07T13:53:17.306863Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [5:7490568981832456180:2449] 2025-04-07T13:53:17.306890Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-07T13:53:17.307224Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715673 2025-04-07T13:53:17.307234Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-07T13:53:17.307307Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [5:7490568981832456180:2449] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715673 at schemeshard: 72057594046644480 2025-04-07T13:53:17.308007Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [5:7490568981832456330:2451], Recipient [5:7490568981832456201:2451]: NKikimr::TEvKeyValue::TEvIntermediate 2025-04-07T13:53:17.308567Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [5:7490568981832456329:2461], Recipient [5:7490568981832456201:2451]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-07T13:53:17.313144Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [5:7490568981832456191:2838], Recipient [5:7490568930292847433:2137]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:17.313180Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:17.313195Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-07T13:53:17.326446Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [5:7490568981832456201:2451], Recipient [5:7490568981832456201:2451]: NKikimrClient.TResponse Status: 1 Cookie: 5 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2025-04-07T13:53:17.326496Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2025-04-07T13:53:17.326518Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T13:53:17.326560Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715673, State DELETING 2025-04-07T13:53:17.326582Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] empty tx queue 2025-04-07T13:53:17.326600Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete TxId 281474976715673 2025-04-07T13:53:17.326741Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [5:7490568981832456201:2451], Recipient [5:7490568981832456201:2451]: NKikimr::TEvKeyValue::TEvCollect 2025-04-07T13:53:17.327709Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [5:7490568981832456335:2462], Recipient [5:7490568981832456201:2451]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-07T13:53:17.331578Z node 5 :PQ_READ_PROXY DEBUG: new alter topic request 2025-04-07T13:53:17.333570Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [5:7490568981832456346:2925], Recipient [5:7490568930292847433:2137]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:53:17.333610Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:53:17.333632Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T13:53:17.333720Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [5:7490568981832456343:2923], Recipient [5:7490568930292847433:2137]: {TEvModifySchemeTransaction txid# 281474976715674 TabletId# 72057594046644480} 2025-04-07T13:53:17.333738Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:53:17.336165Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "Root/origin/feed" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "streamImpl" PathId: 15 TotalGroupCount: 3 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "feed" TopicPath: "/Root/origin/feed/streamImpl" YdbDatabasePath: "/Root" MeteringMode: METERING_MODE_REQUEST_UNITS PartitionStrategy { MinPartitionCount: 3 MaxPartitionCount: 107 ScaleThresholdSeconds: 30 PartitionStrategyType: DISABLED } } Partitions { PartitionId: 0 TabletId: 72075186224037893 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 1 } ApplyIf { PathId: 15 PathVersion: 2 } AllowAccessToPrivatePaths: true } TxId: 281474976715674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:53:17.336532Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: Root/origin/feed/streamImpl, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T13:53:17.345689Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715674:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046644480 2025-04-07T13:53:17.346720Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-07T13:53:17.348309Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715674, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 281474976715674 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T13:53:17.348550Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715674, database: /Root, subject: root@builtin, status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: Root/origin/feed/streamImpl 2025-04-07T13:53:17.348582Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-07T13:53:17.349032Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [5:7490568981832456346:2925], Recipient [5:7490568930292847433:2137]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:17.349062Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:17.349077Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-07T13:53:17.365184Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490568981832456201:2451], Partition 0, Sender [0:0:0], Recipient [5:7490568981832456296:2457], Cookie: 0 2025-04-07T13:53:17.365284Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490568981832456296:2457]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:17.365314Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:17.365386Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:17.365492Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:17.365523Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:17.365575Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:17.472846Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490568981832456201:2451], Partition 0, Sender [0:0:0], Recipient [5:7490568981832456296:2457], Cookie: 0 2025-04-07T13:53:17.472950Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490568981832456296:2457]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:17.472983Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:17.473042Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:17.473125Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:17.473153Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:17.473190Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:17.578931Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490568981832456201:2451], Partition 0, Sender [0:0:0], Recipient [5:7490568981832456296:2457], Cookie: 0 2025-04-07T13:53:17.589052Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490568981832456296:2457]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:17.589098Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:17.589149Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:17.589238Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:17.589266Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:17.589299Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 |86.4%| [TA] $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> CompressExecutor::TestReorderedExecutor |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |86.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |86.4%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |86.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> KqpProxy::LoadedMetadataAfterCompilationTimeout [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NodeDisconnectedTest [GOOD] Test command err: 2025-04-07T13:52:57.759847Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568895298335208:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:57.771274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a52/r3tmp/tmpaUUOHH/pdisk_1.dat 2025-04-07T13:52:58.719161Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:58.719820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:58.719896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:58.757820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:58.808074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:8181 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:52:59.467798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:59.516696Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:02.316343Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:02.395318Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:02.412095Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=YmU0MTIzMi1iNjQzZDY3NC1mMmUwZmZiNi1hMWJhYmY0, workerId: [1:7490568916773172281:2284], database: , longSession: 0, local sessions count: 1 2025-04-07T13:53:02.412134Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:02.412340Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=1&id=YmU0MTIzMi1iNjQzZDY3NC1mMmUwZmZiNi1hMWJhYmY0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.010000s timeout: 0.010000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [1:7490568916773172281:2284] 2025-04-07T13:53:02.412363Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 0.010000s actor id: [0:0:0] 2025-04-07T13:53:02.422787Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T13:53:02.422834Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:53:02.422855Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:02.422903Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:02.423052Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.423087Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.423120Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.423150Z node 1 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-04-07T13:53:02.423841Z node 1 :KQP_PROXY DEBUG: Reply timeout: requestId 2 sessionId: ydb://session/3?node_id=1&id=YmU0MTIzMi1iNjQzZDY3NC1mMmUwZmZiNi1hMWJhYmY0 status: TIMEOUT round: 0 2025-04-07T13:53:02.424343Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmU0MTIzMi1iNjQzZDY3NC1mMmUwZmZiNi1hMWJhYmY0, ActorId: [1:7490568916773172281:2284], ActorState: ReadyState, Reply query error, msg:
: Error: SomeUniqTextForUt proxyRequestId: 2 2025-04-07T13:53:02.429531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568916773172282:2285], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.429621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:02.432930Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.432993Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:02.433094Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7490568903888270357:2282], selfId: [1:7490568895298335448:2256], source: [1:7490568916773172281:2284] 2025-04-07T13:53:02.482770Z node 1 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-04-07T13:53:02.482802Z node 1 :KQP_PROXY DEBUG: Invalid request info while on request timeout handle. RequestId: 2 2025-04-07T13:53:12.581570Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:635:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:53:12.595133Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:637:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:53:12.595920Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:12.596094Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:53:12.597311Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:12.597472Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a52/r3tmp/tmp7csK17/pdisk_1.dat 2025-04-07T13:53:13.289312Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12311 KQP PROXY1 [2:8678280833929343339:121] KQP PROXY2 [3:8678280833929343339:121] SENDER [2:1069:2649] 2025-04-07T13:53:14.013081Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=NTA1NmE0MTUtMTdhMmYzODQtYjk0YjUyMWItZGFhNjNhM2Y=, workerId: [3:1070:2342], database: , longSession: 1, local sessions count: 1 2025-04-07T13:53:14.013311Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=NTA1NmE0MTUtMTdhMmYzODQtYjk0YjUyMWItZGFhNjNhM2Y= 2025-04-07T13:53:14.014066Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=NTA1NmE0MTUtMTdhMmYzODQtYjk0YjUyMWItZGFhNjNhM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [3:8678280833929343339:121] 2025-04-07T13:53:14.014140Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 0.001000s actor id: [0:0:0] 2025-04-07T13:53:14.014572Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=NTA1NmE0MTUtMTdhMmYzODQtYjk0YjUyMWItZGFhNjNhM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [3:1070:2342] 2025-04-07T13:53:14.014606Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 0.001000s actor id: [0:0:0] 2025-04-07T13:53:14.349553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1071:2650], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:14.349746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:14.350075Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1072:2343], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:14.350218Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:14.393008Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(3) 2025-04-07T13:53:14.393108Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 3 sessionId: ydb://session/3?node_id=3&id=NTA1NmE0MTUtMTdhMmYzODQtYjk0YjUyMWItZGFhNjNhM2Y= status: TIMEOUT round: 0 2025-04-07T13:53:14.393232Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-04-07T13:53:14.393264Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 2 sessionId: ydb://session/3?node_id=3&id=NTA1NmE0MTUtMTdhMmYzODQtYjk0YjUyMWItZGFhNjNhM2Y= status: TIMEOUT round: 0 2025-04-07T13:53:14.393470Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA1NmE0MTUtMTdhMmYzODQtYjk0YjUyMWItZGFhNjNhM2Y=, ActorId: [3:1070:2342], ActorState: ExecuteState, TraceId: 01jr88dn8y8z0sy3st8tnzcec2, Create QueryResponse for error on request, msg: 2025-04-07T13:53:14.393729Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [2:1069:2649], selfId: [2:162:2148], source: [2:162:2148] 2025-04-07T13:53:14.396171Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: ... o sender actor, requestId: 56, sender: [2:1069:2649], selfId: [2:162:2148], source: [2:162:2148] 2025-04-07T13:53:17.659803Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 84, sender: [2:162:2148], selfId: [3:192:2106], source: [3:1332:2481] 2025-04-07T13:53:17.660022Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 56 2025-04-07T13:53:17.673420Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=MzRlMTc0ZjQtNTFmMGFjMmEtMWE2NmM5YTAtOTgzMWVmNWU=, workerId: [3:1339:2485], database: , longSession: 1, local sessions count: 56 2025-04-07T13:53:17.673595Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:17.674091Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 57, sender: [2:1069:2649], trace_id: 2025-04-07T13:53:17.674213Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 57 timeout: 0.001000s actor id: [0:0:0] 2025-04-07T13:53:17.684725Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(57) 2025-04-07T13:53:17.684825Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 57 sessionId: ydb://session/3?node_id=3&id=MzRlMTc0ZjQtNTFmMGFjMmEtMWE2NmM5YTAtOTgzMWVmNWU= status: TIMEOUT round: 0 2025-04-07T13:53:17.684960Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 57, sender: [2:1069:2649], selfId: [2:162:2148], source: [2:162:2148] 2025-04-07T13:53:17.687029Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=NDBkYzQ1MWUtMmJkNjEwMWItM2EyMjAwYmItNmZkZDYwZA==, workerId: [3:1340:2486], database: , longSession: 1, local sessions count: 57 2025-04-07T13:53:17.692875Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=NDBkYzQ1MWUtMmJkNjEwMWItM2EyMjAwYmItNmZkZDYwZA== 2025-04-07T13:53:17.693694Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=NDBkYzQ1MWUtMmJkNjEwMWItM2EyMjAwYmItNmZkZDYwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 58, targetId: [3:8678280833929343339:121] 2025-04-07T13:53:17.693756Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 58 timeout: 0.001000s actor id: [0:0:0] 2025-04-07T13:53:17.694243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1341:2712], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:17.694387Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=NDBkYzQ1MWUtMmJkNjEwMWItM2EyMjAwYmItNmZkZDYwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 87, targetId: [3:1340:2486] 2025-04-07T13:53:17.694449Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 87 timeout: 0.001000s actor id: [0:0:0] 2025-04-07T13:53:17.694568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:17.772108Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1343:2487], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:17.772386Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:17.782988Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(87) 2025-04-07T13:53:17.783072Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 87 sessionId: ydb://session/3?node_id=3&id=NDBkYzQ1MWUtMmJkNjEwMWItM2EyMjAwYmItNmZkZDYwZA== status: TIMEOUT round: 0 2025-04-07T13:53:17.783158Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(58) 2025-04-07T13:53:17.783187Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 58 sessionId: ydb://session/3?node_id=3&id=NDBkYzQ1MWUtMmJkNjEwMWItM2EyMjAwYmItNmZkZDYwZA== status: TIMEOUT round: 0 2025-04-07T13:53:17.783317Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 58, sender: [2:1069:2649], selfId: [2:162:2148], source: [2:162:2148] 2025-04-07T13:53:17.783411Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDBkYzQ1MWUtMmJkNjEwMWItM2EyMjAwYmItNmZkZDYwZA==, ActorId: [3:1340:2486], ActorState: ExecuteState, TraceId: 01jr88drvy41sbzhyzsywdv2s1, Create QueryResponse for error on request, msg: 2025-04-07T13:53:17.785557Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 87, sender: [2:162:2148], selfId: [3:192:2106], source: [3:1340:2486] 2025-04-07T13:53:17.785775Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 58 2025-04-07T13:53:17.788139Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YjAwYzBjMjctOTRlODg4NmUtMmI0NmEwMy00NDgwZWVhMA==, workerId: [3:1347:2490], database: , longSession: 1, local sessions count: 58 2025-04-07T13:53:17.788278Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:17.788699Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 59, sender: [2:1069:2649], trace_id: 2025-04-07T13:53:17.788816Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 59 timeout: 0.001000s actor id: [0:0:0] 2025-04-07T13:53:17.788983Z node 3 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=3&id=YjAwYzBjMjctOTRlODg4NmUtMmI0NmEwMy00NDgwZWVhMA==, rpc ctrl: [0:0:0], sameNode: 0, trace_id: 2025-04-07T13:53:17.789118Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 59, sender: [2:1069:2649], selfId: [2:162:2148], source: [3:192:2106] 2025-04-07T13:53:17.790828Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YzMzNjlmMy04ZTU3YWNjMC0xNjJlOGFlNC1lMmI0OTEw, workerId: [3:1348:2491], database: , longSession: 1, local sessions count: 59 2025-04-07T13:53:17.790962Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=YzMzNjlmMy04ZTU3YWNjMC0xNjJlOGFlNC1lMmI0OTEw 2025-04-07T13:53:17.791406Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YzMzNjlmMy04ZTU3YWNjMC0xNjJlOGFlNC1lMmI0OTEw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 60, targetId: [3:8678280833929343339:121] 2025-04-07T13:53:17.791474Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 60 timeout: 0.001000s actor id: [0:0:0] 2025-04-07T13:53:17.791935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1349:2714], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:17.792085Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YzMzNjlmMy04ZTU3YWNjMC0xNjJlOGFlNC1lMmI0OTEw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 90, targetId: [3:1348:2491] 2025-04-07T13:53:17.792134Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 90 timeout: 0.001000s actor id: [0:0:0] 2025-04-07T13:53:17.792261Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:17.861439Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1351:2492], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:17.861688Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:17.877138Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(90) 2025-04-07T13:53:17.877222Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 90 sessionId: ydb://session/3?node_id=3&id=YzMzNjlmMy04ZTU3YWNjMC0xNjJlOGFlNC1lMmI0OTEw status: TIMEOUT round: 0 2025-04-07T13:53:17.877338Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(59) 2025-04-07T13:53:17.877364Z node 2 :KQP_PROXY DEBUG: Invalid request info while on request timeout handle. RequestId: 59 2025-04-07T13:53:17.877475Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzMzNjlmMy04ZTU3YWNjMC0xNjJlOGFlNC1lMmI0OTEw, ActorId: [3:1348:2491], ActorState: ExecuteState, TraceId: 01jr88drz0c7m58m6kcnae0psg, Create QueryResponse for error on request, msg: 2025-04-07T13:53:17.877600Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(60) 2025-04-07T13:53:17.877629Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 60 sessionId: ydb://session/3?node_id=3&id=YzMzNjlmMy04ZTU3YWNjMC0xNjJlOGFlNC1lMmI0OTEw status: TIMEOUT round: 0 2025-04-07T13:53:17.879437Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 60, sender: [2:1069:2649], selfId: [2:162:2148], source: [2:162:2148] 2025-04-07T13:53:17.879652Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 90, sender: [2:162:2148], selfId: [3:192:2106], source: [3:1348:2491] 2025-04-07T13:53:17.879857Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 60 2025-04-07T13:53:17.888832Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=MmM2OTE1NmYtYjQ3NjY0MzAtOTQxOTg3ODktYzVjMmE3ZmM=, workerId: [3:1355:2495], database: , longSession: 1, local sessions count: 60 2025-04-07T13:53:17.889006Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T13:53:17.889518Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 61, sender: [2:1069:2649], trace_id: 2025-04-07T13:53:17.889659Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 61 timeout: 0.001000s actor id: [0:0:0] 2025-04-07T13:53:17.907911Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:17.908035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:17.929704Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:17.929817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:17.940952Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(61) 2025-04-07T13:53:17.941041Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 61 sessionId: ydb://session/3?node_id=3&id=MmM2OTE1NmYtYjQ3NjY0MzAtOTQxOTg3ODktYzVjMmE3ZmM= status: TIMEOUT round: 0 2025-04-07T13:53:17.941161Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 61, sender: [2:1069:2649], selfId: [2:162:2148], source: [2:162:2148] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |86.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed >> KqpScripting::QueryStats [GOOD] >> KqpScripting::Pure >> KqpScripting::ScriptExplainCreatedTable [GOOD] >> KqpScripting::ScriptExplain |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |86.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpPragma::ResetPerQuery [GOOD] >> KqpPragma::Warning >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Balancing::Balancing_ManyTopics_TopicApi [GOOD] >> Balancing::Balancing_ManyTopics_PQv1 >> LocalPartitionReader::FeedSlowly >> LocalPartitionReader::FeedSlowly [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |86.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |86.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange >> BasicUsage::SimpleHandlers [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 >> TPopulatorTest::Boot |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |86.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] >> TPopulatorTestWithResets::UpdateAck ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] Test command err: 2025-04-07T13:52:15.108300Z :WaitEventBlocksBeforeDiscovery INFO: Random seed for debugging is 1744033935108266 2025-04-07T13:52:16.057882Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568720673363722:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:16.057945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:16.102322Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568721054122435:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:16.102353Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:16.561158Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00184f/r3tmp/tmp8XdYuK/pdisk_1.dat 2025-04-07T13:52:16.662194Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:17.133842Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:17.168848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:17.326419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:17.326530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:17.327283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:17.327320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:17.337007Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:52:17.337094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:17.337962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18065, node 1 2025-04-07T13:52:17.509380Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:52:17.540577Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:52:17.635447Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:17.635744Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/00184f/r3tmp/yandexfNdBtF.tmp 2025-04-07T13:52:17.635759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/00184f/r3tmp/yandexfNdBtF.tmp 2025-04-07T13:52:17.635870Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/00184f/r3tmp/yandexfNdBtF.tmp 2025-04-07T13:52:17.635953Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:17.701442Z INFO: TTestServer started on Port 26851 GrpcPort 18065 TClient is connected to server localhost:26851 PQClient connected to localhost:18065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:18.189458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T13:52:20.922236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568737853233689:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:20.922352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:20.930006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568737853233701:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:20.957771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T13:52:21.080201Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568720673363722:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:21.080550Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:21.125324Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568721054122435:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:21.125379Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:21.187486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568737853233703:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:52:21.494433Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568742148201109:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:21.496100Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yzk1NThiM2MtNTE5YTAyYWUtZGY2NjhlMGUtZTJiMGJmMzQ=, ActorId: [1:7490568737853233685:2304], ActorState: ExecuteState, TraceId: 01jr88c1b39tk0ms9zbvghfqfy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:21.501686Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:21.502522Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568742528959249:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:21.502683Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmFhMDRhNWYtMjA5YjQ2MmItMzgwZGU1YTctNTU1ZWIxYzE=, ActorId: [2:7490568742528959209:2282], ActorState: ExecuteState, TraceId: 01jr88c1h99fbqyted553zvjpb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:21.502952Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:21.504197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:52:21.667429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:21.810578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:18065", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-07T13:52:22.131347Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr88c2d33t2rwqrdg7a9tkzg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWE5YTgxNzQtNDliMDI3NDEtOGRlZTA3ZDMtNTJjZjNjMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490568746443168802:2983] === CheckClustersList. Ok 2025-04-07T13:52:28.442643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:18065 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T13:52:28.600205Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3. ... t] [c3a27292-d82edc87-15f9e75d-4cce367a] Counters: { Errors: 0 CurrentSessionLifetimeMs: 4784 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:53:27.032069Z :INFO: [/Root] [/Root] [215c7ea7-2c45c94a-397650ab-69d95598] Closing read session. Close timeout: 0.000000s 2025-04-07T13:53:27.032087Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-07T13:53:27.032107Z :INFO: [/Root] [/Root] [215c7ea7-2c45c94a-397650ab-69d95598] Counters: { Errors: 0 CurrentSessionLifetimeMs: 4782 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:53:27.032126Z :INFO: [/Root] [/Root] [b1d3f04d-8e5a2f0e-afcea94d-48dd482] Closing read session. Close timeout: 0.000000s 2025-04-07T13:53:27.032147Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-07T13:53:27.032167Z :INFO: [/Root] [/Root] [b1d3f04d-8e5a2f0e-afcea94d-48dd482] Counters: { Errors: 0 CurrentSessionLifetimeMs: 4781 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:53:27.032195Z :INFO: [/Root] [/Root] [b1d3f04d-8e5a2f0e-afcea94d-48dd482] Closing read session. Close timeout: 0.000000s 2025-04-07T13:53:27.032222Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-07T13:53:27.032264Z :INFO: [/Root] [/Root] [b1d3f04d-8e5a2f0e-afcea94d-48dd482] Counters: { Errors: 0 CurrentSessionLifetimeMs: 4781 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:53:27.032366Z :NOTICE: [/Root] [/Root] [b1d3f04d-8e5a2f0e-afcea94d-48dd482] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:53:27.032502Z :INFO: [/Root] [/Root] [215c7ea7-2c45c94a-397650ab-69d95598] Closing read session. Close timeout: 0.000000s 2025-04-07T13:53:27.032521Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-07T13:53:27.032541Z :INFO: [/Root] [/Root] [215c7ea7-2c45c94a-397650ab-69d95598] Counters: { Errors: 0 CurrentSessionLifetimeMs: 4783 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:53:27.032578Z :NOTICE: [/Root] [/Root] [215c7ea7-2c45c94a-397650ab-69d95598] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:53:27.032624Z :INFO: [/Root] [/Root] [c3a27292-d82edc87-15f9e75d-4cce367a] Closing read session. Close timeout: 0.000000s 2025-04-07T13:53:27.032648Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-04-07T13:53:27.033059Z :INFO: [/Root] [/Root] [c3a27292-d82edc87-15f9e75d-4cce367a] Counters: { Errors: 0 CurrentSessionLifetimeMs: 4785 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:53:27.033114Z :NOTICE: [/Root] [/Root] [c3a27292-d82edc87-15f9e75d-4cce367a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:53:27.032844Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_9177588570613904022_v1 grpc read done: success# 1, data# { read_request { bytes_size: 131356 } } 2025-04-07T13:53:27.032984Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_9177588570613904022_v1 got read request: guid# 1154c074-da781ad5-e63362ee-95b67360 2025-04-07T13:53:27.041591Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_9177588570613904022_v1 grpc read done: success# 0, data# { } 2025-04-07T13:53:27.041613Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_11034337695034605230_v1 grpc read done: success# 0, data# { } 2025-04-07T13:53:27.041626Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_9177588570613904022_v1 grpc read failed 2025-04-07T13:53:27.041626Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_11034337695034605230_v1 grpc read failed 2025-04-07T13:53:27.041654Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_9177588570613904022_v1 grpc closed 2025-04-07T13:53:27.041655Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_11034337695034605230_v1 grpc closed 2025-04-07T13:53:27.048980Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_11034337695034605230_v1 is DEAD 2025-04-07T13:53:27.049713Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_9177588570613904022_v1 is DEAD 2025-04-07T13:53:27.081532Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_14380210888728288176_v1 grpc read done: success# 0, data# { } 2025-04-07T13:53:27.081567Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_14380210888728288176_v1 grpc read failed 2025-04-07T13:53:27.081589Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_14380210888728288176_v1 grpc closed 2025-04-07T13:53:27.081606Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_14380210888728288176_v1 is DEAD 2025-04-07T13:53:27.082275Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|beef160c-edddc63-cdf98305-adfb1d79_0 grpc read done: success: 0 data: 2025-04-07T13:53:27.082287Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|beef160c-edddc63-cdf98305-adfb1d79_0 grpc read failed 2025-04-07T13:53:27.082309Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|beef160c-edddc63-cdf98305-adfb1d79_0 grpc closed 2025-04-07T13:53:27.082327Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|beef160c-edddc63-cdf98305-adfb1d79_0 is DEAD 2025-04-07T13:53:27.082947Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:53:27.085408Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490569005471986498:2503] disconnected; active server actors: 1 2025-04-07T13:53:27.104104Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:27.104153Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7490569005471986560:2514] destroyed 2025-04-07T13:53:27.104171Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:27.104206Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_2_9177588570613904022_v1 2025-04-07T13:53:27.104225Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7490569005471986517:2518] destroyed 2025-04-07T13:53:27.104286Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:53:27.104386Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_2_9177588570613904022_v1 2025-04-07T13:53:27.107584Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490569005471986498:2503] client user disconnected session shared/user_3_3_14380210888728288176_v1 2025-04-07T13:53:27.120027Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-04-07T13:53:27.120143Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490569005471986496:2501] disconnected; active server actors: 1 2025-04-07T13:53:27.120200Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490569005471986496:2501] client user disconnected session shared/user_3_1_11034337695034605230_v1 2025-04-07T13:53:27.120302Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490569005471986505:2502] disconnected; active server actors: 1 2025-04-07T13:53:27.120322Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490569005471986505:2502] client user disconnected session shared/user_3_2_9177588570613904022_v1 2025-04-07T13:53:28.035381Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569031241790694:2566], TxId: 281474976715698, task: 1, CA Id [3:7490569031241790692:2566]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-07T13:53:28.080796Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569031241790694:2566], TxId: 281474976715698, task: 1, CA Id [3:7490569031241790692:2566]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:53:28.128373Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569031241790694:2566], TxId: 281474976715698, task: 1, CA Id [3:7490569031241790692:2566]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:53:28.169500Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569031241790694:2566], TxId: 281474976715698, task: 1, CA Id [3:7490569031241790692:2566]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:53:28.239878Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569031241790694:2566], TxId: 281474976715698, task: 1, CA Id [3:7490569031241790692:2566]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:53:28.278299Z node 3 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715699. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T13:53:28.278451Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7490569031241790701:2558] TxId: 281474976715699. Ctx: { TraceId: 01jr88e28kd7bkwr23dc1m9exm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTkyMTU0NzgtOTUwZWFhZjEtMzU2YzlhNmItNGQ1ZmVhMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T13:53:28.295283Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTkyMTU0NzgtOTUwZWFhZjEtMzU2YzlhNmItNGQ1ZmVhMWU=, ActorId: [3:7490569026946823358:2558], ActorState: ExecuteState, TraceId: 01jr88e28kd7bkwr23dc1m9exm, Create QueryResponse for error on request, msg: 2025-04-07T13:53:28.305401Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr88e30r0cqbqh4t08ghwzq9" } } YdbStatus: UNAVAILABLE ConsumedRu: 520 } 2025-04-07T13:53:28.395990Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569031241790694:2566], TxId: 281474976715698, task: 1, CA Id [3:7490569031241790692:2566]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> TWebLoginService::AuditLogLdapLoginBadUser |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |86.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap >> TWebLoginService::AuditLogLdapLoginSuccess >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] >> TopicAutoscaling::ControlPlane_CDC [GOOD] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2025-04-07T13:53:30.266657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:30.266757Z node 1 :IMPORT WARN: Table profiles were not loaded >> TPopulatorTestWithResets::UpdateAck [GOOD] >> TWebLoginService::AuditLogLoginBadPassword >> TPopulatorTest::RemoveDir >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] >> TWebLoginService::AuditLogLoginSuccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:107:2139] 2025-04-07T13:53:31.809427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:31.809521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:31.809556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:31.809588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:31.809636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:31.809664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:31.809720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:31.810029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:32.019299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:32.019363Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:32.033075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:32.033213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:32.033406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:32.049682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:32.050113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:32.050709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:32.050920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:32.065404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:32.066813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:32.066884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:32.067047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:32.067096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:32.067137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:32.067319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.081616Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-07T13:53:32.237071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:32.237332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.237558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:32.237771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:32.237823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.241695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:32.241945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:32.242126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.242186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:32.242227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:32.242257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:32.244365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.244421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:32.244458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:32.246285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.246334Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.246379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:32.246441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:32.250081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:32.252169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:32.252423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:32.253426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:32.253554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:32.253602Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:32.253862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:32.253919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:32.254125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:32.254215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:32.256406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:32.256445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:32.256676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:32.256715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:32.257063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.257104Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:32.257193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:32.257224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:32.257268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:32.257318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:32.257363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:32.257388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:32.257458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:32.257494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:32.257525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:32.259198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:32.259295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:32.259378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:32.259413Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:32.259493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:32.259606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:53:32.262708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:53:32.263258Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:32.264517Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] Bootstrap 2025-04-07T13:53:32.280577Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] Become StateWork (SchemeCache [1:275:2267]) 2025-04-07T13:53:32.280853Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-07T13:53:32.281159Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:29567, port: 29567 2025-04-07T13:53:32.281960Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T13:53:32.309806Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=bad_user, attributes: 1.1 2025-04-07T13:53:32.310165Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user bad_user does not exist. LDAP search for filter uid=bad_user on server ldap://localhost:29567 return no entries 2025-04-07T13:53:32.311146Z node 1 :HTTP ERROR: Login fail for bad_user@ldap: Could not login via LDAP 2025-04-07T13:53:32.311482Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:53:32.328368Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2025-04-07T13:53:32.241876Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-07T13:53:32.310826Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP user bad_user does not exist. LDAP search for filter uid=bad_user on server ldap://localhost:29567 return no entries, login_user=bad_user@ldap, sanitized_token={none} AUDIT LOG checked line: 2025-04-07T13:53:32.310826Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP user bad_user does not exist. LDAP search for filter uid=bad_user on server ldap://localhost:29567 return no entries, login_user=bad_user@ldap, sanitized_token={none} |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:107:2139] 2025-04-07T13:53:31.949183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:31.949273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:31.949306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:31.949335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:31.949376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:31.949401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:31.949458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:31.949808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:32.093522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:32.093591Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:32.116322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:32.116446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:32.116640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:32.134164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:32.134563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:32.135131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:32.135308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:32.145423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:32.146766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:32.146825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:32.146982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:32.147020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:32.147053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:32.147236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.163454Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-07T13:53:32.294741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:32.294978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.295220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:32.295454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:32.295504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.301298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:32.301501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:32.301701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.301765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:32.301807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:32.301836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:32.304237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.304317Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:32.304360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:32.306666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.306728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.306783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:32.306841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:32.310507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:32.312683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:32.312896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:32.313898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:32.314032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:32.314076Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:32.314336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:32.314394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:32.314580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:32.314675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:32.326326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:32.326385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:32.326607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:32.326657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:32.327026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:32.327081Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:32.327187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:32.327222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:32.327273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:32.327327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:32.327365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:32.327392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:32.327471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:32.327510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:32.327541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:32.329352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:32.329464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:32.329501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:32.329534Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:32.329616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:32.329755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:53:32.332867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:53:32.333377Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:32.334585Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] Bootstrap 2025-04-07T13:53:32.357493Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] Become StateWork (SchemeCache [1:275:2267]) 2025-04-07T13:53:32.357698Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-07T13:53:32.357994Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10716, port: 10716 2025-04-07T13:53:32.358719Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T13:53:32.389559Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=user1, attributes: 1.1 2025-04-07T13:53:32.433518Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: uid=user1,dc=search,dc=yandex,dc=net 2025-04-07T13:53:32.434613Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:53:32.442184Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:53:32.442746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2025-04-07T13:53:32.442788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-07T13:53:32.476875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc3MjEyLCJleHRlcm5hbF9hdXRoZW50aWNhdGlvbiI6ImxkYXAiLCJpYXQiOjE3NDQwMzQwMTIsInN1YiI6InVzZXIxIn0.BgrT6SUsAj8L0OpsMowclZ77uAJHp6xJkFp037iE6QCJ6bVnW3urjSBKVSWMN-gmHsACCSP_wATb64-4NnjNpwi-aQ-MNB-1Q6bZDTsi6X4OB4IusowcIf1TwIIGj60iUoJGvAzHwzsPOqaPerqLJK63-0KGhkHCthgWPrQurEKc1Wfb_7iOSEQCi1xqkmTsRuE1zbvpQEbLU79AE8O8_Gae3LUdks-CVZr1OSO_CZzGrtw9SViaQyfc44ct2ALSOH_jYqyf1xEQ1K_bt-zjNULAol_PkEmJoZw1_lQwA_s4PXUDo12NsgbcLqfTPxL3Mkh_AWN62m3W-j54rHTGHg" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc3MjEyLCJleHRlcm5hbF9hdXRoZW50aWNhdGlvbiI6ImxkYXAiLCJpYXQiOjE3NDQwMzQwMTIsInN1YiI6InVzZXIxIn0.**", at schemeshard: 72057594046678944 2025-04-07T13:53:32.478090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:32.478139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:32.478319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:32.478385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-07T13:53:32.479150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(2): 2025-04-07T13:53:32.301459Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-07T13:53:32.477402Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1@ldap, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc3MjEyLCJleHRlcm5hbF9hdXRoZW50aWNhdGlvbiI6ImxkYXAiLCJpYXQiOjE3NDQwMzQwMTIsInN1YiI6InVzZXIxIn0.** AUDIT LOG checked line: 2025-04-07T13:53:32.477402Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1@ldap, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc3MjEyLCJleHRlcm5hbF9hdXRoZW50aWNhdGlvbiI6ImxkYXAiLCJpYXQiOjE3NDQwMzQwMTIsInN1YiI6InVzZXIxIn0.** >> TPopulatorTest::RemoveDir [GOOD] >> KqpPragma::Warning [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2025-04-07T13:53:31.531652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:31.531724Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-04-07T13:53:31.679372Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 311, preserialized size# 48 2025-04-07T13:53:31.679467Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-04-07T13:53:31.681009Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T13:53:31.681115Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T13:53:31.681154Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T13:53:31.681514Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 217, preserialized size# 2 2025-04-07T13:53:31.681616Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-04-07T13:53:31.681728Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T13:53:31.681763Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-04-07T13:53:31.690045Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 321, preserialized size# 53 2025-04-07T13:53:31.690159Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-04-07T13:53:31.690495Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 222, preserialized size# 2 2025-04-07T13:53:31.690529Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-07T13:53:31.713018Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-04-07T13:53:31.713082Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:93:2120] Successful handshake: replica# [1:12:2059] 2025-04-07T13:53:31.713129Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:93:2120] Resume sync: replica# [1:12:2059], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:53:31.713265Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-04-07T13:53:31.713289Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2121] Successful handshake: replica# [1:15:2062] 2025-04-07T13:53:31.713337Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2121] Resume sync: replica# [1:15:2062], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:53:31.713388Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-04-07T13:53:31.713411Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2122] Successful handshake: replica# [1:18:2065] 2025-04-07T13:53:31.713429Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2122] Resume sync: replica# [1:18:2065], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:53:31.713514Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:24339059:0] }: sender# [1:93:2120] 2025-04-07T13:53:31.713604Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:92:2119] 2025-04-07T13:53:31.713739Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:93:2120] 2025-04-07T13:53:31.713797Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2025-04-07T13:53:31.713897Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:93:2120] 2025-04-07T13:53:31.713950Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 0 2025-04-07T13:53:31.714035Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:1099535966835:0] }: sender# [1:94:2121] 2025-04-07T13:53:31.714073Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2025-04-07T13:53:31.714148Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:92:2119] 2025-04-07T13:53:31.714230Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:94:2121] 2025-04-07T13:53:31.714302Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 0 2025-04-07T13:53:31.714342Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2025-04-07T13:53:31.714419Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:94:2121] 2025-04-07T13:53:31.714450Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 0 2025-04-07T13:53:31.714514Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:2199047594611:0] }: sender# [1:95:2122] 2025-04-07T13:53:31.714555Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2025-04-07T13:53:31.714596Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:92:2119] 2025-04-07T13:53:31.714675Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:95:2122] 2025-04-07T13:53:31.714727Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 0 2025-04-07T13:53:31.714760Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2025-04-07T13:53:31.714821Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:95:2122] 2025-04-07T13:53:31.714851Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 0 2025-04-07T13:53:31.714905Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:24339059:0] }: sender# [1:93:2120] 2025-04-07T13:53:31.714953Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:92:2119] 2025-04-07T13:53:31.715006Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2025-04-07T13:53:31.715069Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:93:2120], cookie# 0 2025-04-07T13:53:31.715108Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:93:2120], cookie# 0 2025-04-07T13:53:31.715156Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 0 2025-04-07T13:53:31.715226Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:93:2120], cookie# 100 2025-04-07T13:53:31.715262Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-04-07T13:53:31.715328Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:1099535966835:0] }: sender# [1:94:2121] 2025-04-07T13:53:31.715376Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:92:2119] 2025-04-07T13:53:31.715423Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 0 2025-04-07T13:53:31.715442Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:93:2120], cookie# 0 2025-04-07T13:53:31.715467Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-04-07T13:53:31.715562Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 100 2025-04-07T13:53:31.715614Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:94:2121], cookie# 0 2025-04-07T13:53:31.715634Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:94:2121], cookie# 0 2025-04-07T13:53:31.715661Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:94:2121], cookie# 100 2025-04-07T13:53:31.715691Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-04-07T13:53:31.715742Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-04-07T13:53:31.716080Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:2199047594611:0] }: sender# [1:95:2122] 2025-04-07T13:53:31.716147Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:92:2119] 2025-04-07T13:53:31.716481Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 0 2025-04-07T13:53:31.716509Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:94:2121], cookie# 0 2025-04-07T13:53:31.720283Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 100 2025-04-07T13:53:31.720379Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-04-07T13:53:31.720424Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-04-07T13:53:31.720474Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-04-07T13:53:31.720529Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:95:2122], cookie# 0 2025-04-07T13:53:31.720555Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 0 2025-04-07T13:53:31.721164Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:95:2122], cookie# 100 2025-04-07T13:53:31.721195Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 100 2025-04-07T13:53:31.721361Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 0 2025-04-07T13:53:31.721387Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 0 2025-04-07T13:53:31.721486Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 100 2025-04-07T13:53:31.721510Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 100 TestWaitNotification: OK eventTxId 100 >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2025-04-07T13:53:32.929474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:32.929549Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-04-07T13:53:33.132366Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 311, preserialized size# 48 2025-04-07T13:53:33.132480Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-04-07T13:53:33.150309Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T13:53:33.150452Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T13:53:33.150492Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T13:53:33.150919Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 217, preserialized size# 2 2025-04-07T13:53:33.150977Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-04-07T13:53:33.151107Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-04-07T13:53:33.151180Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-04-07T13:53:33.151226Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-04-07T13:53:33.151617Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T13:53:33.151662Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T13:53:33.151723Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T13:53:33.151908Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:93:2120], cookie# 100 2025-04-07T13:53:33.152008Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:94:2121], cookie# 100 2025-04-07T13:53:33.152037Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-04-07T13:53:33.152090Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-04-07T13:53:33.152136Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-04-07T13:53:33.152181Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-04-07T13:53:33.152532Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:95:2122], cookie# 100 2025-04-07T13:53:33.161105Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:93:2120], cookie# 100 2025-04-07T13:53:33.161583Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:94:2121], cookie# 100 2025-04-07T13:53:33.161627Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-04-07T13:53:33.161692Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:95:2122], cookie# 100 2025-04-07T13:53:33.161750Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-04-07T13:53:33.169513Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 321, preserialized size# 53 2025-04-07T13:53:33.169585Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-04-07T13:53:33.169735Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T13:53:33.169795Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T13:53:33.169862Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T13:53:33.170530Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 222, preserialized size# 2 2025-04-07T13:53:33.170577Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2025-04-07T13:53:33.170708Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [Owner ... 95365Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2025-04-07T13:53:33.195397Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2025-04-07T13:53:33.195458Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:18:2065], cookie# 101 2025-04-07T13:53:33.195879Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:95:2122], cookie# 101 2025-04-07T13:53:33.195970Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:12:2059], cookie# 101 2025-04-07T13:53:33.196022Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:15:2062], cookie# 101 2025-04-07T13:53:33.196061Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2025-04-07T13:53:33.196164Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:93:2120], cookie# 101 2025-04-07T13:53:33.196190Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 5 2025-04-07T13:53:33.196264Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 101 2025-04-07T13:53:33.196324Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 101 2025-04-07T13:53:33.196354Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 101 2025-04-07T13:53:33.196612Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:94:2121], cookie# 101 2025-04-07T13:53:33.204626Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 101 2025-04-07T13:53:33.205013Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 101 2025-04-07T13:53:33.205050Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-04-07T13:53:33.205482Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 101 2025-04-07T13:53:33.205537Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-07T13:53:33.207101Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 101, event size# 219, preserialized size# 2 2025-04-07T13:53:33.207151Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 6 2025-04-07T13:53:33.207265Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2025-04-07T13:53:33.207304Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2025-04-07T13:53:33.207331Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T13:53:33.207658Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000002, drop txId: 101" Path: "/Root/DirB" PathId: 2 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 101, event size# 303, preserialized size# 0 2025-04-07T13:53:33.207695Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 101, is deletion# true, version: 0 2025-04-07T13:53:33.207805Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2025-04-07T13:53:33.207838Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2025-04-07T13:53:33.207895Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:18:2065], cookie# 101 2025-04-07T13:53:33.207976Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:12:2059], cookie# 101 2025-04-07T13:53:33.208008Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:15:2062], cookie# 101 2025-04-07T13:53:33.208038Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2025-04-07T13:53:33.208240Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:95:2122], cookie# 101 2025-04-07T13:53:33.208314Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:93:2120], cookie# 101 2025-04-07T13:53:33.208355Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2025-04-07T13:53:33.208419Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2025-04-07T13:53:33.208485Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2025-04-07T13:53:33.208522Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2025-04-07T13:53:33.208563Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:94:2121], cookie# 101 2025-04-07T13:53:33.217368Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:93:2120], cookie# 101 2025-04-07T13:53:33.217477Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:94:2121], cookie# 101 2025-04-07T13:53:33.217510Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 18446744073709551615 2025-04-07T13:53:33.218076Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:95:2122], cookie# 101 2025-04-07T13:53:33.218130Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> TWebLoginService::AuditLogLoginBadPassword [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] Test command err: Trying to start YDB, gRPC: 1797, MsgBus: 19180 2025-04-07T13:53:14.176226Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568970079869539:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:14.176397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e99/r3tmp/tmpFawC0U/pdisk_1.dat 2025-04-07T13:53:14.954194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:14.954283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:14.962881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1797, node 1 2025-04-07T13:53:15.027849Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:53:15.027879Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:53:15.279928Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:15.279957Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:15.279964Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:15.280058Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:53:15.290545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:15.310345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:19180 TClient is connected to server localhost:19180 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:16.676417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:53:16.745626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:53:17.243303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:17.832971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:18.027240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:19.170589Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568970079869539:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:19.170671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:21.971272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569000144642178:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:21.971458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:22.466575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:22.524985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:22.607941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:22.738205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:53:22.793204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:53:22.856074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:53:22.972816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569004439609998:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:22.972870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:22.973177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569004439610003:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:22.977540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:53:23.013475Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:53:23.014237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569004439610005:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:53:26.214100Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569021619479610:2488], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:2:50: Error: At function: AssumeColumnOrderPartial
:2:20: Error: At function: Aggregate /lib/yql/aggregate.yql:644:18: Error: At function: AggregationTraits /lib/yql/aggregate.yql:58:31: Error: At function: AggrCountInit
:2:20: Error: At function: PersistableRepr
:2:26: Error: At function: Member
:2:26: Error: Member not found: _yql_partition_id 2025-04-07T13:53:26.215819Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2Q1ZjUxMDgtMjE4MjMxNTctNDQ3NTE0MGEtZWZhYmMyYjg=, ActorId: [1:7490569017324512228:2475], ActorState: ExecuteState, TraceId: 01jr88e14sc2ngbm9w7rgm1a77, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 13950, MsgBus: 7700 2025-04-07T13:53:27.235446Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569023966240365:2073];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e99/r3tmp/tmpiaQg03/pdisk_1.dat 2025-04-07T13:53:27.320435Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:27.529253Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:27.584900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:27.584996Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:27.596592Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13950, node 2 2025-04-07T13:53:27.653241Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:27.653261Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:27.653267Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:27.653368Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7700 TClient is connected to server localhost:7700 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:28.098328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:28.135243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:28.251136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:28.442187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:28.520707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:30.756338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569036851143914:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:30.763198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:30.810765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:30.881536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:30.956326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:31.025000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:53:31.059250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:53:31.109717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:53:31.218957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569041146111707:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:31.219086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:31.219409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569041146111712:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:31.223553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:53:31.241451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490569041146111714:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:53:32.236789Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490569023966240365:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:32.280246Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TWebLoginService::AuditLogLoginSuccess [GOOD] >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] Test command err: 2025-04-07T13:52:57.789201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568894549395334:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:57.789266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a78/r3tmp/tmpJ48M0M/pdisk_1.dat 2025-04-07T13:52:58.677689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:58.677767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:58.681195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:58.745816Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:58.842742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:22808 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:52:59.445383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:01.709784Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:01.713001Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:01.728819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568911729264897:2283], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:01.728923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:01.744878Z node 1 :KQP_PROXY WARN: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-04-07T13:53:01.752907Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T13:53:01.752951Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:53:01.752971Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:01.753030Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:01.753123Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:01.753166Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:01.753286Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7490568903139330269:2279], selfId: [1:7490568894549395361:2256], source: [1:7490568894549395361:2256] 2025-04-07T13:53:01.753805Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:01.753829Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:01.760905Z node 1 :KQP_PROXY WARN: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-04-07T13:53:01.761064Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7490568903139330269:2279], selfId: [1:7490568894549395361:2256], source: [1:7490568894549395361:2256] 2025-04-07T13:53:01.761463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568911729264921:2284], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:01.761520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:01.767725Z node 1 :KQP_PROXY WARN: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq 2025-04-07T13:53:01.767827Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 4, sender: [1:7490568903139330269:2279], selfId: [1:7490568894549395361:2256], source: [1:7490568894549395361:2256] 2025-04-07T13:53:01.769250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568911729264923:2285], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:01.769312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:07.113983Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:53:07.114420Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:07.114510Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a78/r3tmp/tmpUVsAdE/pdisk_1.dat 2025-04-07T13:53:07.561397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:53:07.616813Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:53:07.616904Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:07.617186Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:07.657593Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:281:2325], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:53:07.659534Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:281:2325], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-07T13:53:07.659678Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:281:2325], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:576:2499] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:53:07.659831Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:281:2325], cacheItem# { Subscriber: { Subscriber: [2:576:2499] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:53:07.659932Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:281:2325], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-07T13:53:07.660018Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:281:2325], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:577:2500] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T13:53:07.660117Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:281:2325], cacheItem# { Subscriber: { Subscriber: [2:577:2500] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:53:07.660333Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:590:2501], recipient# [2:288:2331], result# { ErrorCount: 2 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073 ... lerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR 2025-04-07T13:53:21.689105Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(20) 2025-04-07T13:53:21.689186Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 20 sessionId: ydb://session/3?node_id=2&id=YzIyOWFjZDItZjM0MGM2NjItOWFlMDczMmYtYmRlYzRhYjI= status: TIMEOUT round: 0 2025-04-07T13:53:21.689381Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzIyOWFjZDItZjM0MGM2NjItOWFlMDczMmYtYmRlYzRhYjI=, ActorId: [2:1092:2899], ActorState: ExecuteState, TraceId: 01jr88dv7m7dvckk2ja4ba7hwy, Create QueryResponse for error on request, msg: 2025-04-07T13:53:21.689636Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 20, sender: [2:558:2485], selfId: [2:50:2097], source: [2:1092:2899] Send scheduled evet back 2025-04-07T13:53:21.689782Z node 2 :KQP_COMPILE_ACTOR NOTICE: Compilation timeout, self: [2:1094:2901], cluster: db, database: , text: "SELECT * FROM `/Root/Table`;", startTime: 2025-04-07T13:53:20.146040Z 2025-04-07T13:53:21.689870Z node 2 :KQP_COMPILE_ACTOR DEBUG: Send response, self: [2:1094:2901], owner: [2:276:2320], status: TIMEOUT, issues:
: Error: Query compilation timed out. , uid: 71d6830a-69d198cc-c7f05f09-82b3f8fe Send captured event back Send captured event back Send captured event back Send captured event back Send captured event back 2025-04-07T13:53:23.738911Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490569006082632322:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:23.738970Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a78/r3tmp/tmpE6tR3H/pdisk_1.dat 2025-04-07T13:53:24.114834Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:24.254404Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:24.254490Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:24.260967Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8626, node 3 2025-04-07T13:53:24.607715Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:24.607750Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:24.607763Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:24.607885Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:25.558046Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:53:25.558433Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:53:25.558457Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:53:25.577505Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-04-07T13:53:25.577769Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:53:25.577791Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T13:53:25.579113Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:53:25.593867Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:53:25.593900Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:53:25.598130Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:53:25.607638Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034005651, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:53:25.607695Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:53:25.607964Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:53:25.610290Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:53:25.610522Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:53:25.610572Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:53:25.610652Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:53:25.610699Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:53:25.610754Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:53:25.626760Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:53:25.626854Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:53:25.626878Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:53:25.626999Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T13:53:28.742753Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490569006082632322:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:28.742842Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:31.859661Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:31.861722Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-07T13:53:31.870304Z node 3 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T13:53:31.870346Z node 3 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:53:31.870375Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:31.870427Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-07T13:53:31.870501Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:31.870629Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:31.874488Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:31.874540Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] Test command err: 2025-04-07T13:50:16.275026Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568205810802480:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:16.275068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:50:16.805211Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:50:16.813089Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014bc/r3tmp/tmpUhJ4DQ/pdisk_1.dat 2025-04-07T13:50:16.917563Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:17.386165Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:17.389847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:17.423740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:17.423849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:17.425805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:17.425864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:17.436617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:17.468618Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:50:17.484626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22242, node 1 2025-04-07T13:50:17.689620Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0014bc/r3tmp/yandexAaX7Wd.tmp 2025-04-07T13:50:17.689656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0014bc/r3tmp/yandexAaX7Wd.tmp 2025-04-07T13:50:17.689805Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0014bc/r3tmp/yandexAaX7Wd.tmp 2025-04-07T13:50:17.689923Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:17.757457Z INFO: TTestServer started on Port 18085 GrpcPort 22242 TClient is connected to server localhost:18085 PQClient connected to localhost:22242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:18.151300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:50:18.218873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:50:21.276291Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568205810802480:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:21.276343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:21.661212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568227285640011:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:21.661332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:21.661771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568227285640038:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:21.665200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:50:21.720552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568227285640040:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:50:21.926922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:50:21.934929Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568227285640139:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:50:21.944846Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzA1ZjZlMTItNTJkYzAwYWUtZWVjNTEzMDctNmM4N2FkMWE=, ActorId: [1:7490568227285640008:2306], ActorState: ExecuteState, TraceId: 01jr888cyk0xxxv945cebskwbz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:50:21.946968Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:50:22.163518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:50:22.378528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:50:22.842671Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr888dv54p4gc0fb667w7wz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODBlZjllNzMtNGNlNGM5MTQtZTJlNmRhMTgtYTQ4YTdjYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490568231580607893:3099] === CheckClustersList. Ok CreateTopicNoLegacy: /Root/PQ/rt3.dc1--topic1 2025-04-07T13:50:28.989325Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490568205418204136:2102], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:28.989500Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490568205418204136:2102], cacheItem# { Subscriber: { Subscriber: [2:7490568209713171456:2108] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T13:50:28.989626Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490568256957811909:2215], recipient# [2:7490568256957811908:2302], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:28.995657Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490568205810802740:2150], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T13:50:28.995806Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490568257350412125:3391], recipient# [1:7490568257350412124:2435], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: RootUnknown Kind: KindUnknown DomainInfo }] } Create topic: /Root/PQ/rt3.dc1--topic1 AddTopic: /Root/PQ/rt3.dc1--topic1 ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ ... generation 1 [25:7490569027487409916:2458] 2025-04-07T13:53:28.310131Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--legacy--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:53:28.311526Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'rt3.dc1--legacy--topic1' partition 0 user c1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:53:28.311812Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:53:28.305617Z node 26 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][rt3.dc1--legacy--topic1] TEvClientConnected TabletId 72075186224037893, NodeId 25, Generation 1 2025-04-07T13:53:28.305649Z node 26 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][rt3.dc1--legacy--topic1] TEvClientConnected TabletId 72057594046644480, NodeId 25, Generation 2 2025-04-07T13:53:28.305682Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T13:53:28.306271Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [25:7490568958767931244:2194] txId 281474976715680 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 1 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-07T13:53:28.390606Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 1 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-07T13:53:28.390753Z node 26 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:53:28.393843Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892] Config applied version 0 actor [25:7490568958767931244:2194] txId 281474976715680 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 1 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-07T13:53:28.393938Z node 26 :PERSQUEUE DEBUG: [rt3.dc1--legacy--topic1:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:53:28.394434Z node 26 :PERSQUEUE DEBUG: [rt3.dc1--legacy--topic1:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:53:28.394700Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 1, State: StateInit] bootstrapping 1 [26:7490569029912001283:2379] 2025-04-07T13:53:28.407198Z node 26 :PERSQUEUE DEBUG: [rt3.dc1--legacy--topic1:1:Initializer] Initializing completed. 2025-04-07T13:53:28.407260Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--legacy--topic1' partition 1 generation 1 [26:7490569029912001283:2379] 2025-04-07T13:53:28.407302Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--legacy--topic1 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:53:28.408768Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Topic 'rt3.dc1--legacy--topic1' partition 1 user c1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:53:28.409136Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 1 TClient::Ls request: /Root/PQ/rt3.dc1--legacy--topic1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--legacy--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976715680 CreateStep: 1744034008479 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186224037894 } PersQueueGroup { Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: ... (TRUNCATED) === PATH DESCRIPTION: Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } YdbDatabasePath: "/Root" PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } Partitions { PartitionId: 0 TabletId: 72075186224037893 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186224037892 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLoginSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:107:2139] 2025-04-07T13:53:34.398088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:34.398194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:34.398234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:34.398288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:34.398331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:34.398360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:34.398412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:34.398720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:34.561653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:34.561715Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:34.618790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:34.618897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:34.619072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:34.651226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:34.651655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:34.652257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:34.652466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:34.666232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:34.667619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:34.667699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:34.667864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:34.667917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:34.667966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:34.668185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:34.693608Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-07T13:53:35.026638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:35.026887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:35.027100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:35.027330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:35.027389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:35.029802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:35.030262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:35.030432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:35.030495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:35.030533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:35.030565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:35.032630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:35.032705Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:35.032740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:35.034443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:35.034483Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:35.034530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:35.034585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:35.037930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:35.039724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:35.039901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:35.040868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:35.041005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:35.041049Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:35.041300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:35.041352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:35.041514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:35.041583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:35.043592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:35.043633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:35.043807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:35.043847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:35.044154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:35.044329Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:35.044443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:35.044472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:35.044516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:35.044593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:35.044624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:35.044650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:35.044728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:35.044758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:35.044788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:35.046533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:35.046666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:35.046703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:35.046744Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:35.046780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:35.046873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:53:35.056492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:53:35.057039Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T13:53:35.059447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:35.067199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:35.067366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T13:53:35.067404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:53:35.067478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:35.067534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T13:53:35.067574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:53:35.067607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T13:53:35.067644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-04-07T13:53:35.067678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-07T13:53:35.067951Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] Bootstrap 2025-04-07T13:53:35.100533Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] Become StateWork (SchemeCache [1:272:2264]) 2025-04-07T13:53:35.101844Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:53:35.104474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:35.104696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-04-07T13:53:35.105016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:35.105058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:35.105202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:35.105244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T13:53:35.106848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:35.106950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:35.106984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:53:35.107022Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-07T13:53:35.107060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:35.107169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T13:53:35.107541Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:53:35.108996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-04-07T13:53:35.109415Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-07T13:53:35.111363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2025-04-07T13:53:35.111426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-07T13:53:35.209557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc3MjE1LCJpYXQiOjE3NDQwMzQwMTUsInN1YiI6InVzZXIxIn0.qDxNzj6sQZ6bkbhlzHbmU9bVRlKWIcywfjkmurHp5FTAZGT8JH4_xXdEpGl0YCU4pAbLCfvn8YBsQEvGcCoX1HIi4iidXot2dtfKrJjzyn37GtrTsOTu2k1RTAe5WQ7q-1dxVSfJFhAciqWKJH0JlrU93TzSbLKtAd7cS2cp95md4p8r8C5sJp5dAiUAmSwfgaPKff65O3hYxv-WoGsDzEZHRPCIUWXOiviqkbtC5ujatbZ1elTlRaWkAUpPraIkhT1sgWH70xwRTYFjs1_8AhJ5ujg3A_lUx5Wn4-bRw08TbYFZFv32Ta2QM-kiCjs6Kiou6pe6KgPoehePpdhR-A" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc3MjE1LCJpYXQiOjE3NDQwMzQwMTUsInN1YiI6InVzZXIxIn0.**", at schemeshard: 72057594046678944 2025-04-07T13:53:35.210494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:35.210552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:35.210748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:35.210801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-07T13:53:35.211931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(3): 2025-04-07T13:53:35.029923Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-07T13:53:35.104618Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user=user1 2025-04-07T13:53:35.210256Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc3MjE1LCJpYXQiOjE3NDQwMzQwMTUsInN1YiI6InVzZXIxIn0.** AUDIT LOG checked line: 2025-04-07T13:53:35.210256Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc3MjE1LCJpYXQiOjE3NDQwMzQwMTUsInN1YiI6InVzZXIxIn0.** |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLoginBadPassword [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:107:2139] 2025-04-07T13:53:33.866224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:33.866318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:33.866355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:33.866388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:33.866432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:33.866457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:33.866509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:33.866886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:34.018988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:34.019051Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:34.061325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:34.061441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:34.061689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:34.084691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:34.085120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:34.085724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:34.085898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:34.099137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:34.100382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:34.100470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:34.100642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:34.100709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:34.100751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:34.100963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:34.127822Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-07T13:53:34.522565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:34.522793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:34.522996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:34.523288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:34.523335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:34.525610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:34.525771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:34.525938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:34.526006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:34.526047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:34.526076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:34.529044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:34.529110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:34.529157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:34.536690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:34.536764Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:34.536828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:34.536899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:34.540526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:34.546429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:34.546628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:34.547549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:34.547684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:34.547727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:34.547984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:34.548031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:34.548227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:34.548301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:34.564457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:34.564535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:34.564750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:34.564818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:34.565166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:34.565226Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:34.565335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:34.565366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:34.565412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:34.565475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:34.565508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:34.565536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:34.565602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:34.565639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:34.565671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:34.570859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:34.571045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:34.571118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:34.571166Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:34.571292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:34.571456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:53:34.582314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:53:34.582855Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T13:53:34.585493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:34.598301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:34.598471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T13:53:34.598507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:53:34.598577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:34.598628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T13:53:34.598672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:53:34.598704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T13:53:34.598739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-04-07T13:53:34.598772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-07T13:53:34.599036Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] Bootstrap 2025-04-07T13:53:34.666416Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] Become StateWork (SchemeCache [1:272:2264]) 2025-04-07T13:53:34.670238Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:53:34.687595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:34.687817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-04-07T13:53:34.688149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:34.688214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:34.688403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:34.688444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T13:53:34.689282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:34.689380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:34.692756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:53:34.692826Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-07T13:53:34.692868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:34.692990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T13:53:34.693406Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:53:34.696815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-04-07T13:53:34.697250Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-07T13:53:34.699078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2025-04-07T13:53:34.699142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-07T13:53:34.826243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-07T13:53:34.827144Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:34.827193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:34.827379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:34.827437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-07T13:53:34.828045Z node 1 :HTTP ERROR: Login fail for user1: Invalid password 2025-04-07T13:53:34.828602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(3): 2025-04-07T13:53:34.525729Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-07T13:53:34.687758Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user=user1 2025-04-07T13:53:34.826905Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Invalid password, login_user=user1, sanitized_token={none} AUDIT LOG checked line: 2025-04-07T13:53:34.826905Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Invalid password, login_user=user1, sanitized_token={none} >> KqpScripting::Pure [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> KqpScripting::ScriptExplain [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad >> KqpScanArrowInChanels::AggregateNoColumn >> Worker::Basic >> Cdc::KeysOnlyLog[PqRunner] >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 >> TOlap::CustomDefaultPresets >> KqpScanArrowFormat::AggregateCountStar >> TOlap::StoreStats >> TOlap::CreateStore >> TOlap::AlterStore >> TOlap::CreateTable >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TOlap::CreateDropTable |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ut/ydb-core-security-ut |86.6%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 24534, MsgBus: 24830 2025-04-07T13:53:12.360944Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568959772503386:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:12.361120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e93/r3tmp/tmpnbivFe/pdisk_1.dat 2025-04-07T13:53:13.184689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:13.184789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:13.191762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24534, node 1 2025-04-07T13:53:13.235523Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:53:13.235550Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:53:13.449666Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:13.454878Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:13.454895Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:13.454901Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:13.454984Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:53:13.470125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:24830 TClient is connected to server localhost:24830 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:14.539678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:14.579463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:14.766513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:14.968404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:15.067215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:17.360803Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568959772503386:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:17.360907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:19.610828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568989837276026:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:19.610954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:20.651187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:20.739025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:20.863271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:20.944157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:53:21.020907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:53:21.138179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:53:21.233936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568998427211160:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:21.237051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568998427211156:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:21.237113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:21.238591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:53:21.263158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568998427211162:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:53:25.333899Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034005350, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 6266, MsgBus: 61433 2025-04-07T13:53:26.638313Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569021864043333:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:26.638353Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e93/r3tmp/tmpyHfdoz/pdisk_1.dat 2025-04-07T13:53:27.081289Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:27.081386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:27.081785Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:27.102098Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6266, node 2 2025-04-07T13:53:27.253305Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:27.253328Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:27.253334Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:27.253471Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61433 TClient is connected to server localhost:61433 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:28.257903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:28.279807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:28.406498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:28.764842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:28.959117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:31.640916Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490569021864043333:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:31.640994Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:33.099094Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569051928815977:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:33.099212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:33.137292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:33.188107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:33.259067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:33.319055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:53:33.352691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:53:33.393895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:53:33.529117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569051928816483:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:33.529217Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:33.529529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569051928816488:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:33.533180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:53:33.552381Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490569051928816490:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> TOlap::CreateDropStandaloneTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptExplain [GOOD] Test command err: Trying to start YDB, gRPC: 3227, MsgBus: 4295 2025-04-07T13:53:11.465701Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568956271024997:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e7f/r3tmp/tmpp8CxVj/pdisk_1.dat 2025-04-07T13:53:11.762813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:12.030926Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:12.033146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:12.033240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:12.045938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3227, node 1 2025-04-07T13:53:12.277354Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:12.277377Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:12.277385Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:12.277513Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4295 TClient is connected to server localhost:4295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:13.515681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:13.529823Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:13.543020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:13.768278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:53:14.014861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:53:14.131134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:16.442692Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568956271024997:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:16.442765Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:18.057792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568986335797606:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:18.057923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:18.469372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:18.554850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:18.593750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:18.687963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:53:18.766056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:53:18.862372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:53:18.991029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568986335798127:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:18.991124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:18.991483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568986335798132:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:18.995681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:53:19.016878Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:53:19.018087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568986335798134:2439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:53:21.470459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28963, MsgBus: 7257 2025-04-07T13:53:26.820494Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569023140077762:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e7f/r3tmp/tmp1U4g4c/pdisk_1.dat 2025-04-07T13:53:27.093587Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:27.149294Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:27.180186Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:27.180308Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28963, node 2 2025-04-07T13:53:27.183815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:27.345281Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:27.345303Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:27.345310Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:27.345413Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7257 TClient is connected to server localhost:7257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:28.330969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:28.375153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:28.469362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:28.665487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:28.759912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:31.754493Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490569023140077762:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:31.759897Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:31.888928Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569044614915788:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:31.889159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:31.916816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:31.987107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:32.044174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:32.155310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:53:32.241627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:53:32.392073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:53:32.552520Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569048909883591:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:32.552632Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:32.553284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569048909883598:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:32.557529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:53:32.588076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490569048909883600:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:53:35.919269Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490569061794785929:2476], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:168: Error: At function: DataQueryBlocks
:1:185: Error: At function: TKiDataQueryBlock
:1:208: Error: At function: KiEffects
:1:219: Error: At function: KiWriteTable!
:1:219: Error: Cannot find table 'db.[/Root/ScriptingTest]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:35.920945Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGUxOGVhZGUtMjIxZTU2YjctNDI3MTZmYWEtYzE5NzE5MmY=, ActorId: [2:7490569061794785926:2475], ActorState: ExecuteState, TraceId: 01jr88eakc0w394fx7dzqh6w5y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> TOlap::Decimal >> TOlap::CreateStoreWithDirs >> TOlap::CustomDefaultPresets [GOOD] >> TOlap::CreateTableTtl >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl >> TOlap::CreateStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CustomDefaultPresets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:38.501295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:38.501418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:38.501459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:38.501506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:38.501563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:38.501591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:38.501653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:38.502012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:38.585623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:38.585693Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:38.607542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:38.607701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:38.607879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:38.654905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:38.655148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:38.655823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:38.656057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:38.661450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:38.662815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:38.662881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:38.663025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:38.663083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:38.663128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:38.663289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:38.687420Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:38.851033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:38.851296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:38.851509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:38.851714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:38.851769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:38.854164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:38.854307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:38.854472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:38.854549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:38.854586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:38.854619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:38.856746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:38.856808Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:38.856849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:38.865106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:38.865207Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:38.865268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:38.865328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:38.869808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:38.881768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:38.882034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:38.883066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:38.883227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:38.883280Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:38.883529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:38.883578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:38.883740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:38.883812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:38.894070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:38.894158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:38.894338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:38.894388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:38.894614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:38.894658Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:38.894758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:38.894791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:38.894838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:38.894890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:38.894925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:38.894957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:38.895049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:38.895107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:38.895145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:38.897268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:38.897407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:38.897449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:38.897488Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:38.897551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:38.897681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... fCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:53:39.321394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-07T13:53:39.341180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:39.341233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:53:39.341438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:53:39.341623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.341664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T13:53:39.341708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-07T13:53:39.342074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.342125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId#102:0 ProgressState at tablet: 72057594046678944 2025-04-07T13:53:39.342187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId#102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-04-07T13:53:39.342907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:53:39.343007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:53:39.343052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:53:39.343105Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-07T13:53:39.343152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:53:39.344229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:53:39.344309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:53:39.344335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:53:39.344364Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-07T13:53:39.344391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T13:53:39.344465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T13:53:39.347105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-07T13:53:39.347200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697639 2025-04-07T13:53:39.347291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2025-04-07T13:53:39.347528Z node 1 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2025-04-07T13:53:39.347884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.348014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2025-04-07T13:53:39.348605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:53:39.349900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:53:39.351344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T13:53:39.364517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2025-04-07T13:53:39.364575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T13:53:39.364699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2025-04-07T13:53:39.364768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2025-04-07T13:53:39.365141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2025-04-07T13:53:39.365189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T13:53:39.365277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2025-04-07T13:53:39.374660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.375191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.375377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.375434Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T13:53:39.375553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T13:53:39.375589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:53:39.375644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T13:53:39.375720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:363:2343] message: TxId: 102 2025-04-07T13:53:39.375771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:53:39.375816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T13:53:39.375863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T13:53:39.376004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:53:39.378413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:53:39.378480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:418:2397] TestWaitNotification: OK eventTxId 102 2025-04-07T13:53:39.379073Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:53:39.379359Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 298us result status StatusSuccess 2025-04-07T13:53:39.379881Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TOlap::Decimal [GOOD] >> TOlap::CreateDropTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateStore [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:39.135409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:39.135507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:39.135550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:39.135583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:39.135621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:39.135666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:39.135728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:39.136043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:39.216048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:39.216095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:39.230380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:39.230498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:39.230649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:39.236137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:39.236319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:39.236897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.237072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:39.238651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.239724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:39.239774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.239878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:39.239931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:39.239968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:39.240104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.246071Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:39.379092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:39.379309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.379510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:39.379719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:39.379768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.383934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.384085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:39.384264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.384322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:39.384353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:39.384382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:39.387698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.387769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:39.387805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:39.393653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.393727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.393789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.393836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.403558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:39.405735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:39.405923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:39.406870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.406995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:39.407039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.407269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:39.407317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.407462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:39.407527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:39.409645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:39.409713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:39.409890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.409930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:39.410142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.410189Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:39.410273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:39.410303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.410344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:39.410377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.410407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:39.410430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:39.410514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:39.410581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:39.410611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:39.412357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:39.412465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:39.412501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:39.412534Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:39.412580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:39.412701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... n: 72057594037968897 2025-04-07T13:53:39.733991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 102:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-04-07T13:53:39.734088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 102:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-07T13:53:39.734177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 2 -> 3 2025-04-07T13:53:39.735823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:53:39.738657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:53:39.738815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.739237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.739287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TConfigureParts operationId#102:0 ProgressState at tabletId# 72057594046678944 2025-04-07T13:53:39.739377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TConfigureParts operationId#102:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409547 2025-04-07T13:53:39.775642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382272 2025-04-07T13:53:39.775845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72075186233409547 2025-04-07T13:53:39.876323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:53:39.876437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:53:39.876740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:53:39.876883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:53:39.876989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:53:39.877098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:53:39.877210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:53:39.877309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:53:39.877423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:53:39.877515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:53:39.877598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:53:39.877705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:53:39.884262Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:53:39.884624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:53:39.884715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:53:39.884773Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:53:39.884881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:53:39.884917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:53:39.884946Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:53:39.885128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:53:39.885171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:53:39.885196Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:53:39.885296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:53:39.885345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:53:39.885371Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:53:39.885430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:53:39.885466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:53:39.885489Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:53:39.885530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:53:39.885575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:53:39.885601Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:53:39.885974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:53:39.886030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:53:39.886058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:53:39.886243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:53:39.886283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:53:39.886311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:53:39.886439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:53:39.886474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:53:39.886499Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:53:39.886723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:53:39.886769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:53:39.886795Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:53:39.886913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:53:39.886948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; TestModificationResult got TxId: 102, wait until txId: 102 >> TOlap::CreateTable [GOOD] >> TOlap::CreateStoreWithDirs [GOOD] >> KqpProxy::DatabasesCacheForServerless [GOOD] >> TOlap::AlterTtl [GOOD] >> TOlap::CreateDropStandaloneTable [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:40.063942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:40.064038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:40.064091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:40.064129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:40.064193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:40.064224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:40.064309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:40.064615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:40.151088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:40.151139Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:40.162895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:40.163050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:40.163228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:40.169823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:40.170026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:40.170655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:40.170848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:40.172833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:40.174173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:40.174229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:40.174362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:40.174426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:40.174482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:40.174645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.181918Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:40.293224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:40.293478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.293693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:40.293935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:40.294000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.297602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:40.297751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:40.297919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.297974Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:40.298010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:40.298059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:40.300011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.300074Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:40.300132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:40.302112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.302181Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.302261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:40.302322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:40.306302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:40.308326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:40.308559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:40.309768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:40.309918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:40.309975Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:40.310211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:40.310261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:40.310410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:40.310493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:40.312679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:40.312743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:40.312951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:40.313000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:40.313231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.313276Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:40.313373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:40.313406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:40.313456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:40.313498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:40.313530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:40.313561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:40.313639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:40.313705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:40.313738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:40.315900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:40.315998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:40.316036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:40.316076Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:40.316128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:40.316268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ode 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2025-04-07T13:53:40.574071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:40.574160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:40.574210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TPropose operationId#101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2025-04-07T13:53:40.574367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2025-04-07T13:53:40.574490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:40.574552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-04-07T13:53:40.576826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:40.576867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:40.576990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:53:40.577127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:40.577162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T13:53:40.577224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T13:53:40.577504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.577567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TProposedWaitParts operationId#101:0 ProgressState at tablet: 72057594046678944 2025-04-07T13:53:40.577635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TProposedWaitParts operationId#101:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-04-07T13:53:40.578234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:40.578308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:40.578337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:53:40.578373Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T13:53:40.578404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:40.578980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:40.579048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:40.579072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:53:40.579095Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T13:53:40.579117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:53:40.579168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-07T13:53:40.581275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-07T13:53:40.582309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:53:40.582605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:53:40.598455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2025-04-07T13:53:40.598507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-07T13:53:40.598614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2025-04-07T13:53:40.598706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T13:53:40.599023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-04-07T13:53:40.599089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-07T13:53:40.599209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-04-07T13:53:40.601240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.602084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.602204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.602258Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T13:53:40.602361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T13:53:40.602394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:53:40.602457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-07T13:53:40.602518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:363:2343] message: TxId: 101 2025-04-07T13:53:40.602562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:53:40.602601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T13:53:40.602657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T13:53:40.602808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:53:40.607164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T13:53:40.607216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:364:2344] TestWaitNotification: OK eventTxId 101 2025-04-07T13:53:40.607724Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:53:40.607993Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 250us result status StatusSuccess 2025-04-07T13:53:40.608635Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Decimal(35,9)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:39.617996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:39.618125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:39.618168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:39.618200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:39.618248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:39.618278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:39.618334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:39.618642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:39.711852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:39.711898Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:39.730003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:39.730155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:39.730316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:39.776466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:39.776686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:39.777251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.777431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:39.779557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.780865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:39.780925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.781074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:39.781142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:39.781190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:39.781339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.788018Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:39.927997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:39.928233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.928415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:39.928617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:39.928910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.931176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.931305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:39.931470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.931520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:39.931567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:39.931607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:39.933701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.933752Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:39.933789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:39.935824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.935878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.935978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.936018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.939371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:39.942389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:39.942579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:39.943514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.943643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:39.943687Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.943928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:39.943995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.944131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:39.944213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:39.951097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:39.951155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:39.951337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.951392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:39.951600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.951641Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:39.951728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:39.951756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.951804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:39.951839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.951870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:39.951895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:39.951964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:39.952016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:39.952044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:39.953953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:39.954060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:39.954097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:39.954138Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:39.954199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:39.954343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... lanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:40.759425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 107 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:40.759479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TPropose operationId#107:0 HandleReply TEvOperationPlan at schemeshard: 72057594046678944, stepId: 5000007 2025-04-07T13:53:40.759597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 128 -> 129 2025-04-07T13:53:40.759724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:40.759786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: Erasing txId 107 2025-04-07T13:53:40.761522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:40.761573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:40.761676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:53:40.761801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:40.761835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 107, path id: 1 2025-04-07T13:53:40.761868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-04-07T13:53:40.762184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.762236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedWaitParts operationId#107:0 ProgressState at schemeshard: 72057594046678944 2025-04-07T13:53:40.762288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TDropOlapStore TProposedWaitParts operationId#107:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-04-07T13:53:40.762615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-07T13:53:40.762680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-07T13:53:40.762708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-07T13:53:40.762735Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T13:53:40.762761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:53:40.763059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 107 2025-04-07T13:53:40.763121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 107 2025-04-07T13:53:40.763145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-07T13:53:40.763173Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T13:53:40.763195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:40.763254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-04-07T13:53:40.773344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 107:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-07T13:53:40.773460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 107, partId: 0, tablet: 72075186233409546 2025-04-07T13:53:40.773656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-07T13:53:40.773973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 107 2025-04-07T13:53:40.774010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409546, partId: 0 2025-04-07T13:53:40.774123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 107 2025-04-07T13:53:40.774167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 130 2025-04-07T13:53:40.774719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-07T13:53:40.776263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.776387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.776438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedDeleteParts operationId#107:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:40.776512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:53:40.776614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-07T13:53:40.776648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-07T13:53:40.776707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-07T13:53:40.776767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-07T13:53:40.776801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-07T13:53:40.776827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-07T13:53:40.776923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:53:40.778665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T13:53:40.779037Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-07T13:53:40.791250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:40.792121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409546 2025-04-07T13:53:40.792830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:53:40.792881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T13:53:40.792951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:40.807273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T13:53:40.807352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T13:53:40.807666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-04-07T13:53:40.807977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-04-07T13:53:40.808012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-04-07T13:53:40.808475Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-04-07T13:53:40.808552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-07T13:53:40.808593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:656:2634] TestWaitNotification: OK eventTxId 107 2025-04-07T13:53:40.809163Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:53:40.809320Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 173us result status StatusPathDoesNotExist 2025-04-07T13:53:40.809466Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T13:53:40.810043Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-04-07T13:53:40.810152Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 92us result status StatusPathDoesNotExist 2025-04-07T13:53:40.810226Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpScanArrowFormat::SingleKey >> TOlap::CreateTableTtl [GOOD] >> KqpScanArrowInChanels::AllTypesColumns >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:39.346686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:39.346772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:39.346811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:39.346841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:39.346893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:39.346919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:39.346971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:39.347335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:39.444736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:39.444776Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:39.455713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:39.455833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:39.455989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:39.462780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:39.462976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:39.463585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.463761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:39.465973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.467431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:39.467489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.467617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:39.467681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:39.467733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:39.467894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.475846Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:39.863133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:39.863364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.863552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:39.863767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:39.863823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.866957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.867121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:39.867322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.867384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:39.867418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:39.867450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:39.870260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.870322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:39.870363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:39.872948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.873016Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.873069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.873114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.876504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:39.879314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:39.879540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:39.880586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.880751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:39.880825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.881064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:39.881117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.881277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:39.881377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:39.892183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:39.892259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:39.892455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.892501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:39.892764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.892811Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:39.892903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:39.892931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.892977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:39.893016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.893052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:39.893078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:39.893162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:39.893225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:39.893257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:39.895111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:39.895220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:39.895254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:39.895293Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:39.895341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:39.895453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... rd: 72057594046678944 2025-04-07T13:53:40.893557Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 111:2 ProgressState 2025-04-07T13:53:40.893674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#111:2 progress is 3/3 2025-04-07T13:53:40.893755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2025-04-07T13:53:40.893820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 111, ready parts: 3/3, is published: true 2025-04-07T13:53:40.893888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:363:2343] message: TxId: 111 2025-04-07T13:53:40.893929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2025-04-07T13:53:40.893966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 111:0 2025-04-07T13:53:40.893998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 111:0 2025-04-07T13:53:40.896805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-07T13:53:40.896871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 111:1 2025-04-07T13:53:40.896896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 111:1 2025-04-07T13:53:40.896940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-07T13:53:40.896958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 111:2 2025-04-07T13:53:40.897012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 111:2 2025-04-07T13:53:40.897093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2025-04-07T13:53:40.901373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2025-04-07T13:53:40.901440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [1:629:2607] TestWaitNotification: OK eventTxId 111 2025-04-07T13:53:40.902205Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/DirA/DirB/NestedTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:53:40.906493Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/DirA/DirB/NestedTable" took 4.29ms result status StatusSuccess 2025-04-07T13:53:40.906937Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/DirA/DirB/NestedTable" PathDescription { Self { Name: "NestedTable" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 111 CreateStep: 5000006 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "NestedTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 112 2025-04-07T13:53:40.910438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore/MyDir" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TableWithTiers" Schema { Columns { Name: "timestamp" Type: "Timestamp" } Columns { Name: "data" Type: "Utf8" } KeyColumnNames: "timestamp" } ColumnShardCount: 1 } } TxId: 112 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:40.910784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/OlapStore/MyDir/TableWithTiers, opId: 112:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.911067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 3], parent name: MyDir, child name: TableWithTiers, child id: [OwnerId: 72057594046678944, LocalPathId: 9], at schemeshard: 72057594046678944 2025-04-07T13:53:40.911143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2025-04-07T13:53:40.911303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 1 2025-04-07T13:53:40.911504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 112:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:40.911545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 112:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.911642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T13:53:40.911703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-04-07T13:53:40.923117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 112, response: Status: StatusAccepted TxId: 112 SchemeshardId: 72057594046678944 PathId: 9, at schemeshard: 72057594046678944 2025-04-07T13:53:40.923335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 112, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/MyDir/ 2025-04-07T13:53:40.923641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:40.923680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:53:40.923812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 9] 2025-04-07T13:53:40.923916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:40.923967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 112, path id: 3 2025-04-07T13:53:40.924056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 112, path id: 9 2025-04-07T13:53:40.924376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.924428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TConfigureParts operationId#112:0 ProgressState at tabletId# 72057594046678944 2025-04-07T13:53:40.924560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TConfigureParts operationId#112:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2025-04-07T13:53:40.925251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:53:40.925329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:53:40.925371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-07T13:53:40.925431Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 8 2025-04-07T13:53:40.925475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T13:53:40.928701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 1 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:53:40.928802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 1 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:53:40.928834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-07T13:53:40.928864Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 1 2025-04-07T13:53:40.928892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-04-07T13:53:40.928983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-07T13:53:40.930671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2025-04-07T13:53:40.930800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 0, tablet: 72075186233409546 2025-04-07T13:53:40.932709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-07T13:53:40.942357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 TestModificationResult got TxId: 112, wait until txId: 112 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateStoreWithDirs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:40.434014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:40.434135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:40.434175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:40.434212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:40.434264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:40.434293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:40.434367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:40.434667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:40.533970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:40.534023Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:40.545765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:40.545898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:40.546044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:40.553479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:40.553660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:40.554238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:40.554408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:40.556009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:40.557204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:40.557266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:40.557380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:40.557438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:40.557496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:40.557647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.564856Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:40.711576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:40.711815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.711997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:40.712224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:40.712276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.724610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:40.724781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:40.724957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.725015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:40.725055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:40.725088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:40.727007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.727065Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:40.727103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:40.728720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.728773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.728840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:40.728886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:40.732938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:40.734590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:40.734771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:40.735670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:40.735796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:40.735843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:40.736070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:40.736119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:40.736284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:40.736348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:40.744849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:40.744944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:40.745123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:40.745164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:40.745376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.745418Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:40.745516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:40.745551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:40.745596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:40.745634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:40.745669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:40.745698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:40.745778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:40.745827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:40.745862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:40.747676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:40.747779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:40.747814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:40.747852Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:40.747896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:40.747990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 41.108374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: false 2025-04-07T13:53:41.113303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:41.113420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:41.113457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:53:41.113500Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T13:53:41.113540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:41.115414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:41.115497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:41.115522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:53:41.115550Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-07T13:53:41.115594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:53:41.116039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:41.116170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:41.116201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:53:41.116245Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-04-07T13:53:41.116277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T13:53:41.117166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:41.117232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:53:41.117255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:53:41.117284Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-07T13:53:41.117326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T13:53:41.117387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-07T13:53:41.118295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:2 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-07T13:53:41.125123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:53:41.127101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:53:41.127193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:53:41.127252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:53:41.142910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2025-04-07T13:53:41.142978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-04-07T13:53:41.143111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2025-04-07T13:53:41.143217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T13:53:41.143616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-04-07T13:53:41.143677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-04-07T13:53:41.143812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-04-07T13:53:41.151836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T13:53:41.152393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T13:53:41.152519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T13:53:41.152575Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-07T13:53:41.152705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 3/3 2025-04-07T13:53:41.152745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-07T13:53:41.152807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-07T13:53:41.152884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:367:2347] message: TxId: 101 2025-04-07T13:53:41.152956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-07T13:53:41.153003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T13:53:41.153048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T13:53:41.153125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:53:41.153158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-07T13:53:41.153176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-07T13:53:41.153200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:53:41.153221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-07T13:53:41.153235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-07T13:53:41.153295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T13:53:41.162018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T13:53:41.162097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:368:2348] TestWaitNotification: OK eventTxId 101 2025-04-07T13:53:41.162634Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/DirB/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:53:41.162888Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/DirB/OlapStore" took 267us result status StatusSuccess 2025-04-07T13:53:41.163484Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/DirB/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:39.074130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:39.074224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:39.074261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:39.074291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:39.074331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:39.074361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:39.074427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:39.074742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:39.202732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:39.202794Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:39.226176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:39.226338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:39.226507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:39.233747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:39.233944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:39.234514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.234698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:39.236751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.238015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:39.238068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.238189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:39.238247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:39.238284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:39.238421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.245880Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:39.375465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:39.375701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.375913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:39.376141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:39.376213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.386473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.386628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:39.386886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.386955Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:39.386993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:39.387040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:39.394335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.394416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:39.394451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:39.401743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.401824Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.401883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.401939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.405530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:39.409504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:39.409707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:39.410730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.410906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:39.410958Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.411183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:39.411226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.411364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:39.411444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:39.413930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:39.413980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:39.414186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.414225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:39.414438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.414482Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:39.414576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:39.414609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.414654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:39.414692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.414723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:39.414749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:39.414894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:39.414950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:39.414979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:39.416995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:39.417102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:39.417138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:39.417172Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:39.417224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:39.417321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-07T13:53:41.538584Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-04-07T13:53:41.555096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2025-04-07T13:53:41.555156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409546, partId: 0 2025-04-07T13:53:41.555293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2025-04-07T13:53:41.555337Z node 2 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-07T13:53:41.555388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 105:0, left await: 0, at schemeshard: 72057594046678944 2025-04-07T13:53:41.555418Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 3 -> 128 2025-04-07T13:53:41.557425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:53:41.557584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:53:41.557656Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId#105:0 HandleReply ProgressState at tablet: 72057594046678944 2025-04-07T13:53:41.557743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-04-07T13:53:41.557898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:41.559404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2025-04-07T13:53:41.559519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 105 at step: 5000006 2025-04-07T13:53:41.559862Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:41.559966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 8589936745 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:41.560027Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId#105:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000006 2025-04-07T13:53:41.560618Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 129 2025-04-07T13:53:41.560851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:53:41.560923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000006 2025-04-07T13:53:41.563796Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:41.563841Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:53:41.564058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:53:41.564236Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:41.564275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:201:2204], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-04-07T13:53:41.564331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:201:2204], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-07T13:53:41.564654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:53:41.564726Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TProposedWaitParts operationId#105:0 ProgressState at tablet: 72057594046678944 2025-04-07T13:53:41.564783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TAlterColumnTable TProposedWaitParts operationId#105:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-04-07T13:53:41.565541Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:53:41.565633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:53:41.565665Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-07T13:53:41.565705Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-07T13:53:41.565741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:53:41.566455Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:53:41.566515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:53:41.566537Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-07T13:53:41.566562Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 14 2025-04-07T13:53:41.566585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T13:53:41.566637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-07T13:53:41.569097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-07T13:53:41.569447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:53:41.576232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:53:41.588759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 Step: 5000006 2025-04-07T13:53:41.588822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409546, partId: 0 2025-04-07T13:53:41.588932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 Step: 5000006 2025-04-07T13:53:41.588988Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 Step: 5000006 2025-04-07T13:53:41.589368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 105 2025-04-07T13:53:41.589410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409546, partId: 0 2025-04-07T13:53:41.589494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 105 FAKE_COORDINATOR: Erasing txId 105 2025-04-07T13:53:41.592484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:53:41.592914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:53:41.593032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:53:41.593077Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-07T13:53:41.593179Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-07T13:53:41.593214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T13:53:41.593276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-04-07T13:53:41.593362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:331:2311] message: TxId: 105 2025-04-07T13:53:41.593413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T13:53:41.593448Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-07T13:53:41.593481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-07T13:53:41.593596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:53:41.595249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T13:53:41.595294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:509:2487] TestWaitNotification: OK eventTxId 105 >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:40.463526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:40.463626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:40.463714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:40.463754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:40.463799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:40.463827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:40.463889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:40.464239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:40.546048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:40.546098Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:40.578138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:40.578268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:40.578428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:40.599982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:40.600185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:40.600844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:40.601045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:40.609729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:40.610945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:40.611003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:40.611158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:40.611218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:40.611262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:40.611408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.637563Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:40.874360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:40.874598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.874791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:40.875000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:40.875051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.878370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:40.878500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:40.878678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.878763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:40.878797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:40.878831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:40.885136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.885222Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:40.885261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:40.897612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.897678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.897730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:40.897789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:40.903261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:40.905458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:40.905674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:40.906596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:40.906734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:40.906782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:40.907001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:40.907046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:40.907183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:40.907263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:40.910602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:40.910657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:40.910806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:40.910865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:40.911077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.911117Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:40.911203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:40.911233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:40.911278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:40.911311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:40.911343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:40.911369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:40.911439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:40.911487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:40.911521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:40.913246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:40.913339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:40.913372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:40.913415Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:40.913463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:40.913554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ult> execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 104 2025-04-07T13:53:41.685498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T13:53:41.685681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T13:53:41.685819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T13:53:41.685870Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-07T13:53:41.685980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-07T13:53:41.686017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T13:53:41.686085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-07T13:53:41.686149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:363:2343] message: TxId: 104 2025-04-07T13:53:41.686192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T13:53:41.686237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T13:53:41.686268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T13:53:41.686395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-07T13:53:41.688706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T13:53:41.688768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:568:2546] TestWaitNotification: OK eventTxId 104 2025-04-07T13:53:41.689432Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:53:41.689714Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/Table3" took 273us result status StatusSuccess 2025-04-07T13:53:41.690242Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/Table3" PathDescription { Self { Name: "Table3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 104 CreateStep: 5000005 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "Table3" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 360 EvictToExternalStorage { StorageName: "Tier1" } } } Version: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 105 2025-04-07T13:53:41.694355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "Table4" TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 3600000000 EvictToExternalStorage { StorageName: "Tier1" } } } } ColumnShardCount: 1 } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:41.694707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/OlapStore/Table4, opId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:53:41.695030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: OlapStore, child name: Table4, child id: [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-04-07T13:53:41.695098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 0 2025-04-07T13:53:41.695257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-04-07T13:53:41.695473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:41.695522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:53:41.695620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T13:53:41.695672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-07T13:53:41.698290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusAccepted TxId: 105 SchemeshardId: 72057594046678944 PathId: 6, at schemeshard: 72057594046678944 2025-04-07T13:53:41.698449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/ 2025-04-07T13:53:41.698675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:41.698710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:53:41.698890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-04-07T13:53:41.698990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:41.699040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-04-07T13:53:41.699082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 105, path id: 6 2025-04-07T13:53:41.699427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:53:41.699506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TConfigureParts operationId#105:0 ProgressState at tabletId# 72057594046678944 2025-04-07T13:53:41.699674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TConfigureParts operationId#105:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2025-04-07T13:53:41.700357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:53:41.700456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:53:41.700494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-07T13:53:41.700531Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2025-04-07T13:53:41.700569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-07T13:53:41.701771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:53:41.701851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:53:41.701877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-07T13:53:41.701905Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 1 2025-04-07T13:53:41.701939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-07T13:53:41.702020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-07T13:53:41.704527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2025-04-07T13:53:41.704675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 0, tablet: 72075186233409546 2025-04-07T13:53:41.706716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:53:41.706862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] Test command err: 2025-04-07T13:52:58.891001Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568900943212708:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:58.891050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:59.017340Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568904640587402:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:59.076395Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490568903160806557:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:59.076447Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:59.017376Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:59.213480Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490568903383570849:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a8a/r3tmp/tmp6mdxnb/pdisk_1.dat 2025-04-07T13:53:00.150699Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:00.168360Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:00.208407Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:00.329457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:00.345316Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:01.193190Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:01.237329Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:01.253894Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:01.425197Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:01.445339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:01.468178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:01.468252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:01.469965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:01.470035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:01.470180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:01.470206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:01.492794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:01.492861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:01.493036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:01.493079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:01.516911Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:53:01.521063Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-07T13:53:01.521268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:01.522306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:01.522717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:01.534337Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T13:53:01.534370Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-07T13:53:01.568217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:01.569550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:01.573442Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17484 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T13:53:03.147492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:03.892971Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568900943212708:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:03.893035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:04.017876Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490568904640587402:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:04.017924Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:04.028762Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490568903383570849:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:04.028823Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:04.080034Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490568903160806557:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:04.080128Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:09.918021Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:09.941670Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T13:53:10.082415Z node 3 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T13:53:10.093166Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:10.093213Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:10.093244Z node 3 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T13:53:10.093274Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T13:53:10.110809Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T13:53:10.114322Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-07T13:53:10.114333Z node 3 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-07T13:53:10.114372Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-07T13:53:10.114483Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-07T13:53:10.114490Z node 3 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-07T13:53:10.114514Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-07T13:53:10.124993Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-07T13:53:10.125006Z node 3 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-07T13:53:10.125039Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-07T13:53:10.143163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:1, at schemeshard: 72057594046644480 2025-04-07T13:53:10.146369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:53:10.147952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemes ... e] Updated node info, noode count: 3 2025-04-07T13:53:27.540936Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7490569027394612670:2302], Start check tables existence, number paths: 2 2025-04-07T13:53:27.541102Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzJhOWU5OTQtOTdkYTRkMmItYWNhYjIwYWYtZjcwMjFjN2U=, ActorId: [6:7490569027394612672:2304], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:53:27.548519Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7490569027394612670:2302], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T13:53:27.548588Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7490569027394612670:2302], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T13:53:27.548655Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7490569027394612670:2302], Successfully finished 2025-04-07T13:53:27.548774Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T13:53:27.625643Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:53:27.658497Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7490569024215977594:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:27.658545Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:27.736169Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:27.736263Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:27.754914Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-04-07T13:53:27.756807Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:27.986481Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:28.035483Z node 8 :STATISTICS WARN: [72075186224037897] TTxInit::Complete. EnableColumnStatistics=false 2025-04-07T13:53:28.150300Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:28.150399Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:28.156989Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:28.280288Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:53:28.331492Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7490569031730962566:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:28.374103Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:28.374176Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:28.387205Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2025-04-07T13:53:28.388105Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:28.395438Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:28.576545Z node 7 :STATISTICS WARN: [72075186224037907] TTxInit::Complete. EnableColumnStatistics=false 2025-04-07T13:53:28.578653Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:28.668743Z node 7 :HIVE WARN: HIVE#72075186224037898 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:28.668817Z node 7 :HIVE WARN: HIVE#72075186224037898 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:28.708556Z node 7 :HIVE WARN: HIVE#72075186224037898 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:28.818679Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:28.854477Z node 7 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:53:28.854631Z node 7 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:53:28.854720Z node 7 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:53:28.854858Z node 7 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:53:28.854940Z node 7 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:53:28.855025Z node 7 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:53:29.245923Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:29.423245Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7490569036025930659:2520], Database: /Root/test-serverless, Start database fetching 2025-04-07T13:53:29.423382Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7490569036025930659:2520], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2025-04-07T13:53:32.660895Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7490569024215977594:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:32.660965Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:33.225675Z node 8 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:53:33.226163Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:53:33.226181Z node 8 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:53:33.226267Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7490569049985782125:2319], Start check tables existence, number paths: 2 2025-04-07T13:53:33.227356Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-07T13:53:33.227806Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7490569049985782125:2319], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T13:53:33.227874Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7490569049985782125:2319], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T13:53:33.227904Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7490569049985782125:2319], Successfully finished 2025-04-07T13:53:33.227935Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T13:53:33.330925Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7490569031730962566:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:33.330995Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:34.472885Z node 7 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:53:34.472955Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7490569057500767285:2348], Start check tables existence, number paths: 2 2025-04-07T13:53:34.473495Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:53:34.473511Z node 7 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:53:34.478308Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-07T13:53:34.479022Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7490569057500767285:2348], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T13:53:34.479096Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7490569057500767285:2348], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T13:53:34.479132Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7490569057500767285:2348], Successfully finished 2025-04-07T13:53:34.479188Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T13:53:37.460721Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:53:37.460748Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:39.431386Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 7 2025-04-07T13:53:39.436953Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T13:53:39.456107Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-04-07T13:53:39.457157Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T13:53:39.489369Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=YzJhOWU5OTQtOTdkYTRkMmItYWNhYjIwYWYtZjcwMjFjN2U=, ActorId: [6:7490569027394612672:2304], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:53:39.489863Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=YzJhOWU5OTQtOTdkYTRkMmItYWNhYjIwYWYtZjcwMjFjN2U=, ActorId: [6:7490569027394612672:2304], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:53:39.489910Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzJhOWU5OTQtOTdkYTRkMmItYWNhYjIwYWYtZjcwMjFjN2U=, ActorId: [6:7490569027394612672:2304], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:53:39.510277Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzJhOWU5OTQtOTdkYTRkMmItYWNhYjIwYWYtZjcwMjFjN2U=, ActorId: [6:7490569027394612672:2304], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:53:39.524992Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzJhOWU5OTQtOTdkYTRkMmItYWNhYjIwYWYtZjcwMjFjN2U=, ActorId: [6:7490569027394612672:2304], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] Leader for TabletID 72057594046678944 is [1:229:2151] sender: [1:230:2060] recipient: [1:212:2140] 2025-04-07T13:52:06.190229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:52:06.190336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:52:06.190388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:52:06.190426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:52:06.190472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:52:06.190502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:52:06.190593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:52:06.190973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:52:06.292394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:52:06.292462Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:06.306355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:52:06.306606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:52:06.306797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:52:06.320600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:52:06.321246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:52:06.321854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:52:06.322103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:52:06.324714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:52:06.325868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:52:06.325937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:52:06.326067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:52:06.326107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:52:06.326146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:52:06.326250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.332866Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:229:2151] sender: [1:341:2060] recipient: [1:17:2064] 2025-04-07T13:52:06.460796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:52:06.461056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.461243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:52:06.461465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:52:06.461532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.469920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:52:06.470063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:52:06.470244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.470305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:52:06.470337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:52:06.470368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:52:06.477692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.477762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:52:06.477830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:52:06.480042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.480128Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.480172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:52:06.480222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:52:06.483582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:52:06.485593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:52:06.485795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:52:06.486766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:52:06.486901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 237 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:52:06.486949Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:52:06.487181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:52:06.487231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:52:06.487392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:52:06.487470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:52:06.489887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:52:06.489953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:52:06.490133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:52:06.490168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:308:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:52:06.490517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:52:06.490563Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:52:06.490653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:52:06.490683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:52:06.490739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:52:06.490774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:52:06.490805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:52:06.490834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:52:06.490897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:52:06.490949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:52:06.490984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:52:06.492613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:52:06.492992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:52:06.493037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:52:06.493069Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:52:06.493116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:52:06.493230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... CE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:39.041384Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:39.041458Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:39.041498Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:39.330354Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:39.330431Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:39.330503Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:39.330531Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:39.614112Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:39.614188Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:39.614265Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:39.614295Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:39.894076Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:39.894152Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:39.894222Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:39.894251Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:40.192700Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:40.192792Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:40.193286Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:40.193323Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:40.466747Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:40.466817Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:40.466887Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:40.466914Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:40.773244Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:40.773310Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:40.773374Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:40.773401Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:41.038634Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:41.038703Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:41.038794Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:41.038823Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:41.318231Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:41.318312Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:41.318382Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:41.318411Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:41.641231Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:41.641316Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:41.641396Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:41.641423Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:41.689544Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1070:2835], Recipient [7:229:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-07T13:53:41.689643Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-07T13:53:41.689799Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:53:41.690046Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 246us result status StatusPathDoesNotExist 2025-04-07T13:53:41.690225Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T13:53:41.690825Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1071:2836], Recipient [7:229:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-07T13:53:41.690885Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-07T13:53:41.690989Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:53:41.691189Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 201us result status StatusPathDoesNotExist 2025-04-07T13:53:41.691373Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T13:53:41.691894Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1072:2837], Recipient [7:229:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2025-04-07T13:53:41.691950Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-07T13:53:41.692054Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:53:41.692253Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 207us result status StatusPathDoesNotExist 2025-04-07T13:53:41.692414Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpScanArrowFormat::AllTypesColumns >> CompressExecutor::TestReorderedExecutor [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> Cdc::KeysOnlyLog[PqRunner] [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] |86.6%| [TA] $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce >> Cdc::DocApi[PqRunner] |86.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes >> Cdc::UuidExchange[PqRunner] >> TTicketParserTest::TicketFromCertificateWithValidationGood >> Viewer::JsonStorageListingV2NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV1PDiskIdFilter |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> TPQCompatTest::BadTopics [GOOD] |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |86.6%| [TA] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQCompatTest::CommitOffsets |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> TTicketParserTest::AuthenticationWithUserAccount |86.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::PartitionsMapping [GOOD] >> TPersQueueTest::MessageMetadata >> Cdc::KeysOnlyLog[YdsRunner] [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AggregateByColumn >> TBackupTests::ShouldSucceedOnLargeData[Raw] >> Worker::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex [GOOD] Test command err: Trying to start YDB, gRPC: 20148, MsgBus: 5895 2025-04-07T13:51:33.810234Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568534051528154:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:33.810414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ad3/r3tmp/tmpviqKaT/pdisk_1.dat 2025-04-07T13:51:34.593857Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:34.602107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:51:34.602189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:51:34.609717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20148, node 1 2025-04-07T13:51:34.945126Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:34.945144Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:34.945156Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:34.945226Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5895 TClient is connected to server localhost:5895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:51:36.065819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:36.106574Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:51:36.128782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:51:36.329516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:51:36.568428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:36.747197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:38.807420Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568534051528154:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:51:38.807496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:51:39.244380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568559821333499:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:39.244505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:39.512145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.555869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.604708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.636143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.662224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.727501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:51:39.828316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568559821334004:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:39.828470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:39.830064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568559821334009:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:51:39.833192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:51:39.854345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568559821334012:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:51:41.363758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:51:43.106095Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710708. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 18] Access: 2 SyncVersion: false Status: PathErrorNotExist Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-07T13:51:43.115775Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTFmMjY4NmItZTE1ZDU1ZDYtY2E5NWQ0OTctMzg2ZDA1ZTA=, ActorId: [1:7490568568411269707:2527], ActorState: ExecuteState, TraceId: 01jr88awdncjrrpn458gbknr7c, Create QueryResponse for error on request, msg: 2025-04-07T13:51:49.582422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:51:49.582449Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:51.943440Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-04-07T13:51:51.943481Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-04-07T13:51:51.970485Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490568611360949082:2527] TxId: 281474976711096. Ctx: { TraceId: 01jr88b4xd1x4a93aa985b2qnj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFmMjY4NmItZTE1ZDU1ZDYtY2E5NWQ0OTctMzg2ZDA1ZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: [WRONG_SHARD_STATE] Rejecting data TxId 281474976711096 because datashard 72075186224037927: is in process of split opId 281474976715660 state SplitSrcWaitForPartitioningChanged (wrong shard state); 2025-04-07T13:51:51.970923Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTFmMjY4NmItZTE1ZDU1ZDYtY2E5NWQ0OTctMzg2ZDA1ZTA=, ActorId: [1:7490568568411269707:2527], ActorState: ExecuteState, TraceId: 01jr88b4xd1x4a93aa985b2qnj, Create QueryResponse for error on request, msg: 2025-04-07T13:51:51.986089Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-04-07T13:51:52.011808Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-04-07T13:51:52.092072Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-04-07T13:51:52.092107Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-04-07T13:51:52.098182Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2025-04-07T13:51:52.098314Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-04-07T13:51:52.109633Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-04-07T13:51:56.387200Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-04-07T13:51:56.392372Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-04-07T13:52:01.910479Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-04-07T13:52:01.939221Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2025-04-07T13:52:01.939258Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found 2025-04-07T13:52:01.939283Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 20 ... node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:50.246489Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:50.250035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6296, node 2 2025-04-07T13:52:50.321187Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:52:50.321212Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:52:50.321225Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:52:50.321358Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28240 TClient is connected to server localhost:28240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:50.719487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:50.729345Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:52:50.739091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:50.815748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:50.994100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:51.072005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:53.589898Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568881251503596:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:53.589990Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:53.634915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:52:53.665641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:52:53.701733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:53.738605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:53.768222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:52:53.809611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:52:53.866994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568881251504089:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:53.867085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:53.867324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568881251504094:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:53.870797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:52:53.892843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568881251504096:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:52:55.040906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:55.047641Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568868366599991:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:55.047712Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:05.188927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:53:05.188956Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:05.392868Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-07T13:53:05.399088Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-07T13:53:05.399153Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-07T13:53:05.399175Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found 2025-04-07T13:53:05.414838Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037937:1][72075186224037927][2:7490568889841440414:2586] Apply status: status# 2, reason# 7 2025-04-07T13:53:05.468818Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-04-07T13:53:05.481585Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037937:1][72075186224037927][2:7490568932791117694:2586] Handshake status: status# 2, reason# 7 2025-04-07T13:53:05.483441Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2025-04-07T13:53:05.483476Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037922 not found 2025-04-07T13:53:05.483494Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2025-04-07T13:53:05.492615Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2025-04-07T13:53:05.499895Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2025-04-07T13:53:05.499938Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-04-07T13:53:10.324714Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-04-07T13:53:10.324756Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2025-04-07T13:53:15.400419Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037939 not found 2025-04-07T13:53:15.400457Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-07T13:53:15.500262Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-04-07T13:53:15.545832Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037937:1][72075186224037919][2:7490568889841440415:2586] Apply status: status# 2, reason# 7 2025-04-07T13:53:15.565522Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037940 not found 2025-04-07T13:53:15.583181Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037937:1][72075186224037919][2:7490568975740794589:2586] Handshake status: status# 2, reason# 7 2025-04-07T13:53:22.459967Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037936:1][72075186224037919][2:7490569005805568031:2594] Handshake status: status# 2, reason# 7 2025-04-07T13:53:25.508936Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037938 not found 2025-04-07T13:53:25.523586Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037944 not found 2025-04-07T13:53:25.523639Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037943 not found 2025-04-07T13:53:25.600053Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037945 not found 2025-04-07T13:53:25.633043Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037942 not found 2025-04-07T13:53:25.703867Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037919 not found >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |86.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2025-04-07T13:53:37.876011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569069041342952:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:37.876893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001539/r3tmp/tmpXCBcgv/pdisk_1.dat 2025-04-07T13:53:38.512755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:38.512864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:38.521980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:38.532949Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26756 TServer::EnableGrpc on GrpcPort 29273, node 1 2025-04-07T13:53:38.821290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:38.821327Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:38.821333Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:38.821405Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:39.274512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:39.289445Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:39.435229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034019574 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) 2025-04-07T13:53:39.605141Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490569077631278136:2400] Handshake: worker# [1:7490569077631278135:2400] 2025-04-07T13:53:39.605199Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handshake: worker# [1:7490569077631278135:2400] 2025-04-07T13:53:39.607850Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:53:39.608085Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 3] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-07T13:53:39.608129Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Send handshake: worker# [1:7490569077631278135:2400] 2025-04-07T13:53:39.608194Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7490569077631278135:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-07T13:53:39.608204Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7490569077631278135:2400] Handshake with writer: sender# [1:7490569077631278137:2400] 2025-04-07T13:53:39.617230Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490569077631278136:2400] Create read session: session# [1:7490569077631278140:2287] 2025-04-07T13:53:39.617283Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7490569077631278135:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-07T13:53:39.617295Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7490569077631278135:2400] Handshake with reader: sender# [1:7490569077631278136:2400] 2025-04-07T13:53:39.617324Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490569077631278136:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-07T13:53:39.662813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-07T13:53:41.752120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569086221212887:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:41.752301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:41.753154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569086221212912:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:41.753208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569086221212913:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:41.758063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:2, at schemeshard: 72057594046644480 2025-04-07T13:53:41.780839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569086221212917:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:53:41.780917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569086221212916:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:53:42.880936Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569069041342952:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:42.880996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:43.043640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-07T13:53:43.591091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:53:44.204930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:53:44.973237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-07T13:53:45.683819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T13:53:47.009855Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490569077631278136:2400] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 0 Data: 36b Codec: RAW }] } } 2025-04-07T13:53:47.009952Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7490569077631278135:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 0 Data: 36b CreateTime: 2025-04-07T13:53:46Z }] } 2025-04-07T13:53:47.010019Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 0 Data: 36b CreateTime: 2025-04-07T13:53:46Z }] } 2025-04-07T13:53:47.010147Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2025-04-07T13:53:47.010256Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7490569111991017540:2400] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-07T13:53:47.010293Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-07T13:53:47.010354Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7490569111991017540:2400] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-07T13:53:47.021080Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7490569111991017540:2400] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-07T13:53:47.021157Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-07T13:53:47.021215Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2025-04-07T13:53:47.021289Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7490569077631278135:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-07T13:53:47.021320Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490569077631278136:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-07T13:53:47.281576Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490569077631278136:2400] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 1 Data: 36b Codec: RAW }] } } 2025-04-07T13:53:47.281653Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7490569077631278135:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 1 Data: 36b CreateTime: 2025-04-07T13:53:47Z }] } 2025-04-07T13:53:47.281701Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 1 Data: 36b CreateTime: 2025-04-07T13:53:47Z }] } 2025-04-07T13:53:47.281794Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2025-04-07T13:53:47.281882Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7490569111991017540:2400] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-07T13:53:47.286776Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7490569111991017540:2400] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-07T13:53:47.286838Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-07T13:53:47.286875Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-04-07T13:53:47.286918Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7490569077631278135:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-07T13:53:47.286946Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490569077631278136:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-07T13:53:47.524409Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490569077631278136:2400] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 2 Data: 36b Codec: RAW }] } } 2025-04-07T13:53:47.524462Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7490569077631278135:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 2 Data: 36b CreateTime: 2025-04-07T13:53:47Z }] } 2025-04-07T13:53:47.524497Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 2 Data: 36b CreateTime: 2025-04-07T13:53:47Z }] } 2025-04-07T13:53:47.524561Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2025-04-07T13:53:47.524619Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7490569111991017540:2400] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-07T13:53:47.526483Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7490569111991017540:2400] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-07T13:53:47.526564Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-07T13:53:47.526607Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7490569077631278137:2400] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2025-04-07T13:53:47.526656Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7490569077631278135:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-07T13:53:47.526691Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490569077631278136:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-07T13:53:47.744825Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490569077631278136:2400] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2025-04-07T13:53:47.744852Z node 1 :REPLICATION_SERVICE INFO: [RemoteTopicReader][/Root/topic][0][1:7490569077631278136:2400] Leave 2025-04-07T13:53:47.744900Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7490569077631278135:2400] Reader has gone: sender# [1:7490569077631278136:2400] 2025-04-07T13:53:47.744945Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490569111991017735:2400] Handshake: worker# [1:7490569077631278135:2400] 2025-04-07T13:53:47.751405Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490569111991017735:2400] Create read session: session# [1:7490569111991017740:2287] 2025-04-07T13:53:47.751466Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7490569077631278135:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-07T13:53:47.751476Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7490569077631278135:2400] Handshake with reader: sender# [1:7490569111991017735:2400] 2025-04-07T13:53:47.751496Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7490569111991017735:2400] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerGood >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams >> TTicketParserTest::AuthenticationWithUserAccount [GOOD] >> TTicketParserTest::AuthenticationUnsupported >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] >> Balancing::Balancing_ManyTopics_PQv1 [GOOD] >> TPersQueueMirrorer::TestBasicRemote >> TTicketParserTest::NebiusAuthorizationRetryError >> Cdc::UuidExchange[PqRunner] [GOOD] >> Cdc::UuidExchange[YdsRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:39.871030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:39.871145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:39.871188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:39.871227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:39.871272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:39.871304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:39.871369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:39.871732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:39.946592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:39.946650Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:39.957524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:39.957670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:39.957860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:39.963889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:39.964099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:39.964778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.964980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:39.966813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.968063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:39.968134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.968283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:39.968337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:39.968377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:39.968525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.975383Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:40.183706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:40.183977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.184207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:40.184429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:40.193593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.200939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:40.201120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:40.201414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.201491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:40.201548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:40.201597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:40.203979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.204049Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:40.204084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:40.206075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.206142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.206203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:40.206253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:40.209788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:40.211967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:40.212193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:40.213272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:40.213422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:40.213474Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:40.213720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:40.213773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:40.213930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:40.214001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:40.216436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:40.216481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:40.216738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:40.216784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:40.217029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:40.217074Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:40.217172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:40.217211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:40.217256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:40.217298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:40.217347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:40.217384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:40.217481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:40.217528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:40.217563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:40.219349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:40.219462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:40.219500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:40.219541Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:40.219590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:40.219697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 678944 2025-04-07T13:53:52.075277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-07T13:53:52.075378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T13:53:52.075676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:50 2025-04-07T13:53:52.075706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:50 tabletId 72075186233409595 2025-04-07T13:53:52.075803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:52 2025-04-07T13:53:52.075821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:52 tabletId 72075186233409597 2025-04-07T13:53:52.075899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:46 2025-04-07T13:53:52.075919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:46 tabletId 72075186233409591 2025-04-07T13:53:52.075978Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:48 2025-04-07T13:53:52.076001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:48 tabletId 72075186233409593 2025-04-07T13:53:52.076056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2025-04-07T13:53:52.076086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2025-04-07T13:53:52.076169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:44 2025-04-07T13:53:52.076189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:44 tabletId 72075186233409589 2025-04-07T13:53:52.076252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:65 2025-04-07T13:53:52.076281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:65 tabletId 72075186233409610 2025-04-07T13:53:52.076350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:63 2025-04-07T13:53:52.076369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:63 tabletId 72075186233409608 2025-04-07T13:53:52.082357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:61 2025-04-07T13:53:52.082398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:61 tabletId 72075186233409606 2025-04-07T13:53:52.082529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:59 2025-04-07T13:53:52.082549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:59 tabletId 72075186233409604 2025-04-07T13:53:52.082626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-07T13:53:52.082641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-07T13:53:52.082742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T13:53:52.082785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T13:53:52.082896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T13:53:52.082917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-07T13:53:52.083008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-04-07T13:53:52.083025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-04-07T13:53:52.083108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2025-04-07T13:53:52.083128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-04-07T13:53:52.083194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2025-04-07T13:53:52.083216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-04-07T13:53:52.083832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2025-04-07T13:53:52.083871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2025-04-07T13:53:52.083954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-04-07T13:53:52.083966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-04-07T13:53:52.083993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2025-04-07T13:53:52.084004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2025-04-07T13:53:52.084048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2025-04-07T13:53:52.084076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-04-07T13:53:52.084168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2025-04-07T13:53:52.084188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2025-04-07T13:53:52.084238Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2025-04-07T13:53:52.084259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-04-07T13:53:52.091782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2025-04-07T13:53:52.091843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2025-04-07T13:53:52.091964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:40 2025-04-07T13:53:52.091985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 2025-04-07T13:53:52.092071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2025-04-07T13:53:52.092122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-04-07T13:53:52.092224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2025-04-07T13:53:52.092244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2025-04-07T13:53:52.092372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2025-04-07T13:53:52.092404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-04-07T13:53:52.092491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2025-04-07T13:53:52.092510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2025-04-07T13:53:52.092618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2025-04-07T13:53:52.092649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2025-04-07T13:53:52.092779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-07T13:53:52.092800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-04-07T13:53:52.093795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:57 2025-04-07T13:53:52.093830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:57 tabletId 72075186233409602 2025-04-07T13:53:52.093946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:53 2025-04-07T13:53:52.093969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:53 tabletId 72075186233409598 2025-04-07T13:53:52.094034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:55 2025-04-07T13:53:52.094055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:55 tabletId 72075186233409600 2025-04-07T13:53:52.094108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:49 2025-04-07T13:53:52.094128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:49 tabletId 72075186233409594 2025-04-07T13:53:52.094223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:47 2025-04-07T13:53:52.094253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:47 tabletId 72075186233409592 2025-04-07T13:53:52.094329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:51 2025-04-07T13:53:52.094358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:51 tabletId 72075186233409596 2025-04-07T13:53:52.094428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:45 2025-04-07T13:53:52.094461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:45 tabletId 72075186233409590 2025-04-07T13:53:52.094523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:43 2025-04-07T13:53:52.094554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:43 tabletId 72075186233409588 2025-04-07T13:53:52.099445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:41 2025-04-07T13:53:52.099526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:41 tabletId 72075186233409586 2025-04-07T13:53:52.099796Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 2025-04-07T13:53:52.101476Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:53:52.101705Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyDir/ColumnTable" took 264us result status StatusPathDoesNotExist 2025-04-07T13:53:52.101851Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/MyDir/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T13:53:52.102532Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-04-07T13:53:52.102615Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 4 took 98us result status StatusPathDoesNotExist 2025-04-07T13:53:52.102687Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey >> TTicketParserTest::LoginBad >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:51.255363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:51.256790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:51.256842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:51.256894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:51.256954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:51.256997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:51.257055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:51.257376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:51.351282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:51.351352Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:51.387942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:51.388086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:51.388263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:51.406991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:51.407250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:51.407875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:51.408124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:51.416741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:51.422054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:51.422130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:51.422256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:51.422327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:51.422380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:51.422578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.434823Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:51.646503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:51.646884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.647234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:51.647554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:51.647651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.661734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:51.661944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:51.662161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.662236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:51.662283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:51.662328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:51.668046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.668149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:51.668191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:51.677626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.677696Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.677745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:51.677818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:51.682342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:51.697634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:51.697865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:51.699039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:51.699193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:51.699237Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:51.699478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:51.699530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:51.699708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:51.699839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:51.711842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:51.711937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:51.712141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:51.712180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:51.712449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.712496Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:51.712619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:51.712684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:51.712729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:51.712806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:51.712846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:51.712874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:51.712963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:51.712997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:51.713026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:51.715078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:51.715184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:51.715219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:51.715275Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:51.715320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:51.715409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 4-07T13:53:52.684982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:53:52.685028Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T13:53:52.685077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T13:53:52.685191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /data_01.csv.zst HTTP/1.1 HEADERS: Host: localhost:20632 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9815D2D8-617F-4A14-B5CB-49429AB43C81 amz-sdk-request: attempt=1 content-length: 20 content-md5: 8NOHH1ycwPXC5K+v+37u8g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv.zst / / 20 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:20632 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 94607B09-7238-4067-B4F0-6C5C23A2A199 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-07T13:53:52.689648Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2025-04-07T13:53:52.689711Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:473:2434], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-07T13:53:52.689883Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:472:2432], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-07T13:53:52.696747Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:466:2429], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } FAKE_COORDINATOR: Erasing txId 102 2025-04-07T13:53:52.702158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:20632 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BDD6D221-D4A8-4536-BBB0-970DE83634BD amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2025-04-07T13:53:52.703926Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:466:2429], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:20632 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AA894202-BE9A-48E9-AEA3-E5AE91B187BB amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-04-07T13:53:52.724803Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:466:2429], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-04-07T13:53:52.725153Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:465:2428] 2025-04-07T13:53:52.725294Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:466:2429], sender# [1:465:2428], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } 2025-04-07T13:53:52.725810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 4294969596 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:53:52.725860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-07T13:53:52.726035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 4294969596 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:53:52.726167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 316 RawX2: 4294969596 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:53:52.726247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:52.726389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:20632 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BC8A1978-2E33-4F6D-976B-788F621D7437 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-04-07T13:53:52.728890Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:466:2429], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-04-07T13:53:52.729036Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:466:2429], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-07T13:53:52.729193Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:465:2428], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-07T13:53:52.737124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:53:52.757812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 4294969594 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:53:52.757873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T13:53:52.758015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 4294969594 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:53:52.758117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 4294969594 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:53:52.758178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:52.758234Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:53:52.758276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T13:53:52.758310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T13:53:52.758333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T13:53:52.758541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:52.761678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:53:52.762093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:53:52.762168Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T13:53:52.762270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T13:53:52.762304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:53:52.762363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T13:53:52.762435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:374:2339] message: TxId: 102 2025-04-07T13:53:52.762490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:53:52.762539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T13:53:52.762573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T13:53:52.762810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:53:52.764927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:53:52.764974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:444:2408] TestWaitNotification: OK eventTxId 102 >> Cdc::KeysOnlyLog[TopicRunner] [GOOD] >> Cdc::KeysOnlyLogDebezium ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:51.600654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:51.600781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:51.600826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:51.600885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:51.600951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:51.600985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:51.601043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:51.601383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:51.709239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:51.709314Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:51.726387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:51.726525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:51.726700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:51.763097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:51.763368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:51.764036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:51.764276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:51.772925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:51.774376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:51.774452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:51.774601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:51.774683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:51.774750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:51.774962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.793679Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:52.175972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:52.176306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:52.179965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:52.180479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:52.180594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:52.186889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:52.187096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:52.187329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:52.187407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:52.187453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:52.187499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:52.202090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:52.202196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:52.202240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:52.208038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:52.208136Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:52.208179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:52.208229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:52.211945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:52.214311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:52.214515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:52.215734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:52.215878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:52.215925Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:52.216206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:52.216268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:52.216473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:52.216570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:52.218923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:52.218995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:52.219185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:52.219223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:52.219506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:52.219568Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:52.219674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:52.219704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:52.219746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:52.219798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:52.219835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:52.219866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:52.219956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:52.219997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:52.220050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:52.221955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:52.222055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:52.222090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:52.222145Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:52.222186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:52.222282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:52.746930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T13:53:52.747129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-07T13:53:52.748387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:52.748510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:52.748566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-07T13:53:52.748711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-07T13:53:52.748847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:53:52.904966Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:402:2375], attempt# 0 2025-04-07T13:53:52.998665Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:402:2375], sender# [1:401:2374] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:1039 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 61DAE0A1-E2C2-4FFE-8AA7-60FB255D4E41 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-07T13:53:53.016360Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:402:2375], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T13:53:53.030049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:53.030143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:53:53.030431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:53.030475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T13:53:53.030900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:53:53.030956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:53.031775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:53:53.031878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:53:53.031930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:53:53.031992Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T13:53:53.032031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:53:53.032130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:1039 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6E3E8B1A-B16E-4490-AA38-E571C3E62BF5 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2025-04-07T13:53:53.044106Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:402:2375], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } 2025-04-07T13:53:53.053712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:1039 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8CD682F1-7E15-486A-9A76-5CDF6760D06D amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-07T13:53:53.057916Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:402:2375], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-04-07T13:53:53.058057Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:401:2374] 2025-04-07T13:53:53.058285Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:402:2375], sender# [1:401:2374], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:1039 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3D462E76-1907-47B8-8F5A-D1B3D1487D8D amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-04-07T13:53:53.068849Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:402:2375], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-04-07T13:53:53.068937Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:402:2375], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-07T13:53:53.069129Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:401:2374], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-07T13:53:53.084078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:53:53.084179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T13:53:53.084443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:53:53.084539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:53:53.084598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:53.084634Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:53:53.084703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T13:53:53.084752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T13:53:53.084924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:53.088810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:53:53.089225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:53:53.089280Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T13:53:53.089407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T13:53:53.089449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:53:53.089496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T13:53:53.089570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 102 2025-04-07T13:53:53.089615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:53:53.089651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T13:53:53.089687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T13:53:53.089812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:53:53.092237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:53:53.092290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:386:2360] TestWaitNotification: OK eventTxId 102 >> TOlap::StoreStats [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> TTicketParserTest::AuthenticationUnsupported [GOOD] >> TTicketParserTest::AuthenticationUnknown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::StoreStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:38.836095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:38.836238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:38.836290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:38.836328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:38.836369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:38.836395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:38.836455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:38.836801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:38.904106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:38.904168Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:38.922317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:38.922460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:38.922643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:38.931200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:38.931422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:38.932001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:38.932263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:38.934243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:38.935437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:38.935722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:38.935867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:38.935931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:38.935970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:38.936116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:38.942698Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:39.058563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:39.058826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.059038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:39.059248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:39.059297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.061798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.061950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:39.062152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.062206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:39.062240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:39.062271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:39.065447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.065543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:39.065580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:39.067642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.067723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.067784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.067830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.071481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:39.073555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:39.073783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:39.074760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:39.074918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:39.074972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.075230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:39.075273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:39.075429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:39.075501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:39.077847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:39.077908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:39.078110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:39.078150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:39.078374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:39.078420Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:39.078510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:39.078543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.078583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:39.078619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:39.078650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:39.078679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:39.078764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:39.078822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:39.078858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:39.080743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:39.080850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:39.080880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:39.080912Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:39.080951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:39.081064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 6678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } TableStats { DataSize: 1270768 RowCount: 100000 IndexSize: 0 LastAccessTime: 1744034028478 LastUpdateTime: 1744034028478 ImmediateTxCompleted: 11 PlannedTxCompleted: 12 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 2 RowUpdates: 1100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1270768 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 115 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 33 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } } Children { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } TableStats { DataSize: 1270768 RowCount: 100000 IndexSize: 0 LastAccessTime: 1744034028478 LastUpdateTime: 1744034028478 ImmediateTxCompleted: 11 PlannedTxCompleted: 12 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 2 RowUpdates: 1100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1270768 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 115 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 33 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944 2025-04-07T13:53:55.212149Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:53:55.212365Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 202us result status StatusSuccess 2025-04-07T13:53:55.212609Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 1270768 RowCount: 100000 IndexSize: 0 LastAccessTime: 1744034028478 LastUpdateTime: 1744034028478 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 115 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 33 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 1270768 RowCount: 100000 IndexSize: 0 LastAccessTime: 1744034028478 LastUpdateTime: 1744034028478 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 115 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 33 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 3 PathOwnerId: 72057594046678944 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; >> TTicketParserTest::LoginGood >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> TPersQueueTest::AllEqual [GOOD] >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> BasicUsage::RecreateObserver ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] Test command err: 2025-04-07T13:52:20.767826Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568737352724238:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:20.768173Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b41/r3tmp/tmpwCX0Xp/pdisk_1.dat 2025-04-07T13:52:21.038234Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:21.305982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:21.306085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:21.330233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:21.368863Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7205, node 1 2025-04-07T13:52:21.471516Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001b41/r3tmp/yandex93CdxW.tmp 2025-04-07T13:52:21.471537Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001b41/r3tmp/yandex93CdxW.tmp 2025-04-07T13:52:21.471696Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001b41/r3tmp/yandex93CdxW.tmp 2025-04-07T13:52:21.472002Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:21.516688Z INFO: TTestServer started on Port 20382 GrpcPort 7205 TClient is connected to server localhost:20382 PQClient connected to localhost:7205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:21.775973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:21.786366Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:21.803044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:52:21.950535Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-07T13:52:24.529338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568754532594053:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.529524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.529994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568754532594066:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.534952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:52:24.582403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568754532594068:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:52:24.828881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:24.856682Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568754532594160:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:24.858218Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjI4ZDQ1ODYtMTBjNDNkNi0yNmI5NjgwYS01NjAwMGJiMA==, ActorId: [1:7490568754532594050:2308], ActorState: ExecuteState, TraceId: 01jr88c4xt3e5kvj8cm5f1jfmq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:24.860001Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:24.910655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:25.034477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490568758827561719:2620] 2025-04-07T13:52:25.767126Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568737352724238:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:25.767193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:52:31.244721Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-07T13:52:31.267160Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:52:31.268347Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7490568784597365785:2776], Recipient [1:7490568741647691794:2198]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.268390Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.268407Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T13:52:31.268445Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7490568784597365781:2773], Recipient [1:7490568741647691794:2198]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-07T13:52:31.268461Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:52:31.370432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:52:31.370922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:52:31.371176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-07T13:52:31.371217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-07T13:52:31.371238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-07T13:52:31.371270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-07T13:52:31.371309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-07T13:52:31.371497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:52:31.371 ... 25-04-07T13:53:53.261779Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:53.261805Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:53.261822Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:53.261888Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [5:7490569134599053495:2465], Partition 2, Sender [5:7490569134599053582:2478], Recipient [5:7490569134599053574:2475], Cookie: 0 2025-04-07T13:53:53.261927Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [5:7490569134599053582:2478], Recipient [5:7490569134599053574:2475]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-07T13:53:53.261942Z node 5 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-07T13:53:53.262197Z node 5 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-07T13:53:53.262288Z node 5 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "/Root/dir/origin" 2025-04-07T13:53:53.262389Z node 5 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/dir/origin 2025-04-07T13:53:53.269018Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569130304086062:2445], Partition 0, Sender [0:0:0], Recipient [5:7490569134599053420:2449], Cookie: 0 2025-04-07T13:53:53.269085Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569134599053420:2449]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.269102Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.269138Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:53.269200Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:53.269225Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:53.269250Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:53.363429Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569134599053490:2464], Partition 1, Sender [0:0:0], Recipient [5:7490569134599053569:2471], Cookie: 0 2025-04-07T13:53:53.363510Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569134599053569:2471]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.363529Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.363570Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:53.363628Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:53.363650Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:53.363674Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:53.363721Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569134599053495:2465], Partition 2, Sender [0:0:0], Recipient [5:7490569134599053574:2475], Cookie: 0 2025-04-07T13:53:53.363770Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569134599053574:2475]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.363783Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.363803Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:53.363829Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:53.363841Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:53.363854Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:53.435432Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569130304086062:2445], Partition 0, Sender [0:0:0], Recipient [5:7490569134599053420:2449], Cookie: 0 2025-04-07T13:53:53.435509Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569134599053420:2449]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.435527Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.435560Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:53.435613Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:53.435636Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:53.435659Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:53.464906Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569134599053490:2464], Partition 1, Sender [0:0:0], Recipient [5:7490569134599053569:2471], Cookie: 0 2025-04-07T13:53:53.464992Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569134599053569:2471]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.465009Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.465048Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:53.465109Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:53.465129Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:53.465151Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:53.465219Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569134599053495:2465], Partition 2, Sender [0:0:0], Recipient [5:7490569134599053574:2475], Cookie: 0 2025-04-07T13:53:53.465265Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569134599053574:2475]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.465278Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.465297Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:53.465324Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:53.465336Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:53.465350Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:53.535026Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569130304086062:2445], Partition 0, Sender [0:0:0], Recipient [5:7490569134599053420:2449], Cookie: 0 2025-04-07T13:53:53.535101Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569134599053420:2449]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.535121Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.535172Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:53.535232Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:53.535258Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:53.535286Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:53.581127Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569134599053490:2464], Partition 1, Sender [0:0:0], Recipient [5:7490569134599053569:2471], Cookie: 0 2025-04-07T13:53:53.581200Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569134599053569:2471]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.581221Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.581259Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:53.581319Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:53.581346Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:53.581371Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:53.581422Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569134599053495:2465], Partition 2, Sender [0:0:0], Recipient [5:7490569134599053574:2475], Cookie: 0 2025-04-07T13:53:53.581474Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569134599053574:2475]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.581489Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:53.581514Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:53.581548Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:53.581564Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:53.581583Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TTicketParserTest::LoginBad [GOOD] >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] Test command err: 2025-04-07T13:53:19.631255Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.631285Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.631304Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:19.632313Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:19.657025Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:19.657335Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.657719Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:19.658146Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.658243Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:53:19.658348Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:19.658382Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T13:53:19.659307Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.659326Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.659345Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:19.659741Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:19.676631Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:19.676856Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.677298Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:19.677763Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.677908Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:53:19.678188Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:19.678236Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T13:53:19.679414Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.679436Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.679454Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:19.679889Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:19.680763Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:19.680875Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.681131Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:19.681854Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.682010Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:53:19.682104Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:19.682142Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T13:53:19.683095Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.683260Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.683281Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:19.683695Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:19.684920Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:19.685022Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.685262Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:19.686783Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.687553Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:53:19.697731Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:19.697802Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T13:53:19.699114Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.699135Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.699155Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:19.704871Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:19.705476Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:19.705614Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.707058Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:19.707540Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.707673Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:53:19.707778Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:19.707837Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-07T13:53:19.708728Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.708748Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.708788Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:19.709101Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:19.721452Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:19.721634Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.723872Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:19.724404Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.724540Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:53:19.728931Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:19.728994Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-07T13:53:19.733692Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.733716Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.733741Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:19.734087Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:19.745093Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:19.745275Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.748905Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:19.749739Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.749940Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:53:19.750022Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:19.750068Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-07T13:53:19.751197Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.751222Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.751244Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:19.760909Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:53:19.772905Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:53:19.773100Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.780970Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:53:19.782746Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:19.783237Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:53:19.783331Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:53:19.783375Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T13:53:19.820471Z :ReadSession INFO: Random seed for debugging is 1744033999820438 2025-04-07T13:53:20.653159Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568996167090159:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:20.653202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:20.763286Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568995745252525:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:21.320879Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/bui ... r ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 2 2025-04-07T13:53:44.140202Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 3 WriteTimestampMS: 1744034024024 CreateTimestampMS: 1744034024011 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 20 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 2 } 2025-04-07T13:53:44.140327Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 4 from offset3 2025-04-07T13:53:44.140368Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 930fc2cd-da2a73f1-3da9aa95-b55c7152 has messages 1 2025-04-07T13:53:44.140538Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 read done: guid# 930fc2cd-da2a73f1-3da9aa95-b55c7152, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 200 2025-04-07T13:53:44.140583Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 response to read: guid# 930fc2cd-da2a73f1-3da9aa95-b55c7152 2025-04-07T13:53:44.140970Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 Process answer. Aval parts: 0 2025-04-07T13:53:44.141846Z :DEBUG: [/Root] [/Root] [b3e49cc4-6092bab-f2ef719d-c10d3890] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:44.144750Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-04-07T13:53:44.144844Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) GOT MESSAGE: Message { Data: "message3" Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-04-07T13:53:44.011000Z WriteTime: 2025-04-07T13:53:44.024000Z Ip: "ipv6:[::1]:58132" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:58132" } } } 2025-04-07T13:53:44.148873Z :DEBUG: [/Root] [/Root] [b3e49cc4-6092bab-f2ef719d-c10d3890] [dc1] Commit offsets [2, 3). Partition stream id: 1 2025-04-07T13:53:44.149195Z :DEBUG: [/Root] [/Root] [b3e49cc4-6092bab-f2ef719d-c10d3890] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T13:53:44.149153Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 grpc read done: success# 1, data# { read { } } 2025-04-07T13:53:44.149263Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 got read request: guid# a8bfc2fc-fc493d0d-da764ce5-9ce3f2cc 2025-04-07T13:53:44.154713Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 grpc read done: success# 1, data# { commit { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } } 2025-04-07T13:53:44.154929Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 4 2025-04-07T13:53:44.156927Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:53:44.156966Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:53:44.157079Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_3497060675755757612_v1 2025-04-07T13:53:44.157195Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:53:44.167496Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:53:44.167563Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:53:44.167618Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 4 2025-04-07T13:53:44.168094Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 4 } 2025-04-07T13:53:44.168136Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 4 2025-04-07T13:53:44.168188Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 replying for commits: assignId# 1, from# 4, to# 4, offset# 3 2025-04-07T13:53:44.169451Z :DEBUG: [/Root] [/Root] [b3e49cc4-6092bab-f2ef719d-c10d3890] [dc1] Committed response: { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } 2025-04-07T13:53:44.213605Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1d3c0fe4-a88719f-4d783cdd-6d253367_0] Write session will now close 2025-04-07T13:53:44.213655Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1d3c0fe4-a88719f-4d783cdd-6d253367_0] Write session: aborting 2025-04-07T13:53:44.215014Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1d3c0fe4-a88719f-4d783cdd-6d253367_0] Write session: gracefully shut down, all writes complete 2025-04-07T13:53:44.215153Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1d3c0fe4-a88719f-4d783cdd-6d253367_0] Write session is aborting and will not restart 2025-04-07T13:53:44.215207Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1d3c0fe4-a88719f-4d783cdd-6d253367_0] Write session: destroy 2025-04-07T13:53:44.215250Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|1d3c0fe4-a88719f-4d783cdd-6d253367_0 grpc read done: success: 0 data: 2025-04-07T13:53:44.215277Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|1d3c0fe4-a88719f-4d783cdd-6d253367_0 grpc read failed 2025-04-07T13:53:44.215303Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|1d3c0fe4-a88719f-4d783cdd-6d253367_0 grpc closed 2025-04-07T13:53:44.215319Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|1d3c0fe4-a88719f-4d783cdd-6d253367_0 is DEAD 2025-04-07T13:53:44.215933Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:53:44.216363Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:44.216399Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7490569094951340749:2575] destroyed 2025-04-07T13:53:44.216438Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:53:45.920922Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:53:46.238679Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2025-04-07T13:53:50.919628Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:53:54.143540Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2025-04-07T13:53:54.228746Z :INFO: [/Root] [/Root] [b3e49cc4-6092bab-f2ef719d-c10d3890] Closing read session. Close timeout: 0.000000s 2025-04-07T13:53:54.228822Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-04-07T13:53:54.228871Z :INFO: [/Root] [/Root] [b3e49cc4-6092bab-f2ef719d-c10d3890] Counters: { Errors: 0 CurrentSessionLifetimeMs: 17143 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:53:54.228993Z :NOTICE: [/Root] [/Root] [b3e49cc4-6092bab-f2ef719d-c10d3890] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T13:53:54.229036Z :DEBUG: [/Root] [/Root] [b3e49cc4-6092bab-f2ef719d-c10d3890] [dc1] Abort session to cluster 2025-04-07T13:53:54.233086Z :NOTICE: [/Root] [/Root] [b3e49cc4-6092bab-f2ef719d-c10d3890] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:53:54.252564Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 grpc read done: success# 0, data# { } 2025-04-07T13:53:54.256920Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 grpc read failed 2025-04-07T13:53:54.258531Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:54.258563Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_3497060675755757612_v1 2025-04-07T13:53:54.258590Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7490569069181536434:2497] destroyed 2025-04-07T13:53:54.258648Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_3497060675755757612_v1 2025-04-07T13:53:54.257000Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 grpc closed 2025-04-07T13:53:54.257050Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_3497060675755757612_v1 is DEAD 2025-04-07T13:53:54.259339Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7490569069181536431:2494] disconnected; active server actors: 1 2025-04-07T13:53:54.259371Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7490569069181536431:2494] client user disconnected session shared/user_1_1_3497060675755757612_v1 2025-04-07T13:53:54.939484Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7490569142195981590:2675], TxId: 281474976710722, task: 1, CA Id [1:7490569142195981588:2675]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 >> TTicketParserTest::AccessServiceAuthenticationOk >> TTicketParserTest::TicketFromCertificateCheckIssuerGood [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerBad >> TTicketParserTest::AuthorizationRetryError >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad >> TTicketParserTest::BulkAuthorizationRetryError >> Cdc::DocApi[PqRunner] [GOOD] >> Cdc::DocApi[YdsRunner] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn |86.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> Cdc::UuidExchange[YdsRunner] [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> TTicketParserTest::LoginGood [GOOD] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> Cdc::UuidExchange[TopicRunner] >> TTicketParserTest::LoginGoodWithGroups >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> KqpScanArrowInChanels::AggregateWithFunction >> TTicketParserTest::AuthenticationUnknown [GOOD] >> TTicketParserTest::Authorization |86.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |86.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::AllEqual [GOOD] Test command err: 2025-04-07T13:50:35.447677Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:50:35.447749Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T13:50:36.120471Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:50:36.120518Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST === Server->StartServer(false); 2025-04-07T13:50:36.722766Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490568289281846583:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:36.722805Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001482/r3tmp/tmpH6uk8G/pdisk_1.dat 2025-04-07T13:50:37.146085Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:50:37.223527Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:50:37.252516Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:37.596346Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:37.603874Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:37.618096Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:37.618186Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:37.645443Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-07T13:50:37.645687Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:37.647164Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:37.749508Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:37.813919Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1475, node 3 2025-04-07T13:50:37.973500Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001482/r3tmp/yandexyJdSIt.tmp 2025-04-07T13:50:37.973528Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001482/r3tmp/yandexyJdSIt.tmp 2025-04-07T13:50:37.973709Z node 3 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001482/r3tmp/yandexyJdSIt.tmp 2025-04-07T13:50:37.973863Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:38.120588Z INFO: TTestServer started on Port 9406 GrpcPort 1475 TClient is connected to server localhost:9406 PQClient connected to localhost:1475 === TenantModeEnabled() = 0 === Init PQ - start server on port 1475 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:38.711453Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:50:38.711672Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:38.711898Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T13:50:38.712177Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:38.712268Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:38.714636Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:38.714753Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:38.714934Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:38.714967Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:38.714987Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-04-07T13:50:38.714997Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T13:50:38.715849Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:38.715895Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-04-07T13:50:38.715912Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:38.716780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:38.716815Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:38.716843Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:50:38.719393Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:38.719418Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:38.719443Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:38.719477Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-04-07T13:50:38.737432Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:50:38.741479Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-04-07T13:50:38.741644Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T13:50:38.744700Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033838792, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:38.744877Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 7490568293576814487 RawX2: 12884904245 } } Step: 1744033838792 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T13:50:38.744909Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:38.745170Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:50:38.745218Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:38.745438Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T13:50:38.745510Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:50:38.747241Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:38.747255Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:50:38.747388Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:38.747403Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7490568293576814554:2411], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-04-07T13:50:38.747436Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshar ... 04-07T13:53:50.481704Z node 19 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:53:50.481798Z node 19 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 0 MaxSeqNo: 2 sessionId: 123|5e5b6abf-4facfbdc-43897b0d-a3e3bc48_0 2025-04-07T13:53:50.483933Z node 19 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: 123|5e5b6abf-4facfbdc-43897b0d-a3e3bc48_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-07T13:53:50.484192Z node 19 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037901 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-07T13:53:50.484409Z node 19 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic' requestId: 2025-04-07T13:53:50.484437Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] got client message batch for topic 'rt3.dc1--account--topic' partition 0 2025-04-07T13:53:50.484528Z node 19 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-07T13:53:50.484578Z node 19 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037901 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T13:53:50.488361Z node 19 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic' requestId: 2025-04-07T13:53:50.488407Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] got client message batch for topic 'rt3.dc1--account--topic' partition 0 2025-04-07T13:53:50.488466Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] got client message topic: rt3.dc1--account--topic partition: 0 SourceId: '\000123' SeqNo: 3 partNo : 0 messageNo: 1 size 370 offset: -1 2025-04-07T13:53:50.488564Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Send write quota request. Topic: "rt3.dc1--account--topic". Partition: 0. Amount: 374. Cookie: 3 2025-04-07T13:53:50.891323Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037899] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:53:51.488800Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7490569124367688978:2692] (SourceId=123, PreferedPartition=(NULL)) Update the table 2025-04-07T13:53:51.675221Z node 19 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037892 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 14] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-07T13:53:51.775632Z node 19 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-07T13:53:51.775758Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 14 shard idx 72057594046644480:5 data size 0 row count 0 2025-04-07T13:53:51.775819Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037892 maps to shardIdx: 72057594046644480:5 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 14], pathId map=user, is column=0, is olap=0 2025-04-07T13:53:51.775843Z node 19 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037892 followerId=0, pathId 14: RowCount 0, DataSize 0 2025-04-07T13:53:51.776934Z node 19 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-07T13:53:51.859962Z node 19 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0815 2025-04-07T13:53:51.963439Z node 19 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-07T13:53:51.963574Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 0 row count 0 2025-04-07T13:53:51.963631Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=SourceIdMeta2, is column=0, is olap=0 2025-04-07T13:53:51.963655Z node 19 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 0, DataSize 0 2025-04-07T13:53:51.973309Z node 19 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-07T13:53:52.072583Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7490569124367688978:2692] (SourceId=123, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=1 Status=SUCCESS 2025-04-07T13:53:52.072629Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7490569124367688978:2692] (SourceId=123, PreferedPartition=(NULL)) Start idle 2025-04-07T13:53:52.357541Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:53:53.305968Z node 19 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 10] state 'Ready' dataSize 776 rowCount 2 cpuUsage 0.2787 2025-04-07T13:53:53.405102Z node 19 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-07T13:53:53.405236Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 10 shard idx 72057594046644480:2 data size 776 row count 2 2025-04-07T13:53:53.405295Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037889 maps to shardIdx: 72057594046644480:2 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], pathId map=Cluster, is column=0, is olap=0 2025-04-07T13:53:53.405319Z node 19 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 10: RowCount 2, DataSize 776 2025-04-07T13:53:53.408934Z node 19 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-07T13:53:53.568501Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Got quota. Topic: "rt3.dc1--account--topic". Partition: 0: Cookie: 3 2025-04-07T13:53:53.568708Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId '\000123' seqNo 3 partNo 0 2025-04-07T13:53:53.585264Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId '\000123' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 441 count 1 nextOffset 3 batches 1 2025-04-07T13:53:53.585904Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 429 WTime 1744034033585 2025-04-07T13:53:53.586139Z node 19 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:53:53.594150Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 374 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:53:53.594214Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:53:53.594269Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Answering for message sourceid: '\000123', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-07T13:53:53.594548Z node 19 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-07T13:53:53.594646Z node 19 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037901 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T13:53:53.598984Z :DEBUG: [] MessageGroupId [123] SessionId [123|5e5b6abf-4facfbdc-43897b0d-a3e3bc48_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 8 queued_in_partition_duration_ms: 3096 throttled_on_topic_duration_ms: 3080 } 2025-04-07T13:53:53.599043Z :DEBUG: [] MessageGroupId [123] SessionId [123|5e5b6abf-4facfbdc-43897b0d-a3e3bc48_0] Write session: acknoledged message 1 2025-04-07T13:53:53.637249Z :INFO: [] MessageGroupId [123] SessionId [123|5e5b6abf-4facfbdc-43897b0d-a3e3bc48_0] Write session will now close 2025-04-07T13:53:53.637323Z :DEBUG: [] MessageGroupId [123] SessionId [123|5e5b6abf-4facfbdc-43897b0d-a3e3bc48_0] Write session: aborting 2025-04-07T13:53:53.637913Z :INFO: [] MessageGroupId [123] SessionId [123|5e5b6abf-4facfbdc-43897b0d-a3e3bc48_0] Write session: gracefully shut down, all writes complete 2025-04-07T13:53:53.637964Z :DEBUG: [] MessageGroupId [123] SessionId [123|5e5b6abf-4facfbdc-43897b0d-a3e3bc48_0] Write session: destroy 2025-04-07T13:53:53.643242Z node 19 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: 123|5e5b6abf-4facfbdc-43897b0d-a3e3bc48_0 grpc read done: success: 0 data: 2025-04-07T13:53:53.643281Z node 19 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: 123|5e5b6abf-4facfbdc-43897b0d-a3e3bc48_0 grpc read failed 2025-04-07T13:53:53.643309Z node 19 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: 123|5e5b6abf-4facfbdc-43897b0d-a3e3bc48_0 grpc closed 2025-04-07T13:53:53.643337Z node 19 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: 123|5e5b6abf-4facfbdc-43897b0d-a3e3bc48_0 is DEAD 2025-04-07T13:53:53.645428Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:53:53.651218Z node 19 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037901 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:53:53.655267Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:53.655339Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] server disconnected, pipe [19:7490569124367689026:2692] destroyed 2025-04-07T13:53:53.655407Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:53:53.959683Z node 19 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 12] state 'Ready' dataSize 832 rowCount 2 cpuUsage 0.6365 2025-04-07T13:53:54.059827Z node 19 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-07T13:53:54.059977Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 12 shard idx 72057594046644480:4 data size 832 row count 2 2025-04-07T13:53:54.060044Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037891 maps to shardIdx: 72057594046644480:4 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], pathId map=Versions, is column=0, is olap=0 2025-04-07T13:53:54.060072Z node 19 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037891 followerId=0, pathId 12: RowCount 2, DataSize 832 2025-04-07T13:53:54.060628Z node 19 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-07T13:53:54.141082Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:53:54.412781Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Handle TEvPQ::TEvPartitionCounters PartitionId 0 >> Cdc::KeysOnlyLogDebezium [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] >> YdbOlapStore::DuplicateRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] Test command err: 2025-04-07T13:52:22.417519Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568744085741579:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:22.417556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:22.664979Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ade/r3tmp/tmpVOdi8D/pdisk_1.dat 2025-04-07T13:52:22.924354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:22.924487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:22.925841Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:22.929696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3699, node 1 2025-04-07T13:52:23.426844Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001ade/r3tmp/yandexR6MnkW.tmp 2025-04-07T13:52:23.426871Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001ade/r3tmp/yandexR6MnkW.tmp 2025-04-07T13:52:23.427033Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001ade/r3tmp/yandexR6MnkW.tmp 2025-04-07T13:52:23.427163Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:23.479280Z INFO: TTestServer started on Port 7872 GrpcPort 3699 TClient is connected to server localhost:7872 PQClient connected to localhost:3699 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:23.834514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:23.853091Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:23.866585Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:52:23.872178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:52:25.966317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568756970644242:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:25.966431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:25.966520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568756970644263:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:25.970157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:52:25.983072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568756970644265:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:52:26.209280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:26.269157Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568761265611651:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:26.271197Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGJkOWYyYmItMzQ5ODBlNjktOWQwNjliY2EtYmM2MjdkMWQ=, ActorId: [1:7490568756970644223:2307], ActorState: ExecuteState, TraceId: 01jr88c6ba7dhfntz0w2jseyv0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:26.273653Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:26.276209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:26.347599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490568761265611909:2618] 2025-04-07T13:52:27.419661Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568744085741579:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:27.419728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:52:31.597137Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-07T13:52:31.616681Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:52:31.617882Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7490568782740448636:2756], Recipient [1:7490568744085741989:2191]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.617920Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.617946Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T13:52:31.618000Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7490568782740448632:2753], Recipient [1:7490568744085741989:2191]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-04-07T13:52:31.618014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:52:31.701207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:52:31.701796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:52:31.702098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-07T13:52:31.702137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-07T13:52:31.702176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-07T13:52:31.702215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-07T13:52:31.702258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-07T13:52:31.702439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710672:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:52:31.70288 ... 4Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:58.551580Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:58.551613Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:58.551651Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:58.551725Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7490569105731010153:2421], Partition 0, Sender [0:0:0], Recipient [6:7490569105731010214:2425], Cookie: 0 2025-04-07T13:53:58.551768Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7490569105731010214:2425]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.551786Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.551816Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:58.551854Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:58.551874Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:58.551892Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:58.551934Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7490569148680684357:2740], Partition 2, Sender [0:0:0], Recipient [6:7490569148680684447:2750], Cookie: 0 2025-04-07T13:53:58.551965Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7490569148680684447:2750]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.551981Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.552003Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:58.552036Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:58.552051Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:58.552080Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:58.645121Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [6:7490569148680684357:2740], Partition 2, Sender [6:7490569148680684456:2756], Recipient [6:7490569148680684447:2750], Cookie: 0 2025-04-07T13:53:58.645214Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [6:7490569148680684456:2756], Recipient [6:7490569148680684447:2750]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-07T13:53:58.645245Z node 6 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-07T13:53:58.651731Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7490569148680684358:2741], Partition 1, Sender [0:0:0], Recipient [6:7490569148680684449:2752], Cookie: 0 2025-04-07T13:53:58.651887Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7490569148680684449:2752]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.651918Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.651974Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:58.652105Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:58.652166Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:58.652211Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:58.652272Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7490569105731010153:2421], Partition 0, Sender [0:0:0], Recipient [6:7490569105731010214:2425], Cookie: 0 2025-04-07T13:53:58.652336Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7490569105731010214:2425]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.652354Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.656782Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:58.656892Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:58.656923Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:58.656961Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:58.657075Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7490569148680684357:2740], Partition 2, Sender [0:0:0], Recipient [6:7490569148680684447:2750], Cookie: 0 2025-04-07T13:53:58.657142Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7490569148680684447:2750]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.657173Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.657215Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:58.657246Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:58.657262Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:58.657278Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:58.688850Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [6:7490569049896434249:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:58.688909Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:53:58.688998Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [6:7490569049896434249:2145], Recipient [6:7490569049896434249:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:58.689022Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:53:58.728879Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [6:7490569148680684358:2741], Partition 1, Sender [6:7490569148680684471:2761], Recipient [6:7490569148680684449:2752], Cookie: 0 2025-04-07T13:53:58.728975Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [6:7490569148680684471:2761], Recipient [6:7490569148680684449:2752]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-07T13:53:58.729002Z node 6 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-07T13:53:58.752825Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7490569148680684358:2741], Partition 1, Sender [0:0:0], Recipient [6:7490569148680684449:2752], Cookie: 0 2025-04-07T13:53:58.752918Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7490569148680684449:2752]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.752949Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.753001Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:58.753081Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:58.753110Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:58.753145Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:58.753204Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7490569105731010153:2421], Partition 0, Sender [0:0:0], Recipient [6:7490569105731010214:2425], Cookie: 0 2025-04-07T13:53:58.753240Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7490569105731010214:2425]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.753252Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.753273Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:58.753303Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:58.753319Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:58.753335Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:53:58.759333Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7490569148680684357:2740], Partition 2, Sender [0:0:0], Recipient [6:7490569148680684447:2750], Cookie: 0 2025-04-07T13:53:58.759432Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7490569148680684447:2750]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.759466Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:53:58.759530Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:53:58.759628Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:53:58.759660Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:53:58.759699Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallback |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |86.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::JoinWithParams >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce >> TTicketParserTest::TicketFromCertificateCheckIssuerBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood >> TTicketParserTest::AccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> TTicketParserTest::LoginGoodWithGroups [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::NebiusAuthenticationUnavailable >> Cdc::UuidExchange[TopicRunner] [GOOD] >> Cdc::UpdatesLog[PqRunner] >> TargetDiscoverer::SystemObjects >> TTicketParserTest::Authorization [GOOD] >> TTicketParserTest::AuthorizationModify ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-04-07T13:53:15.924493Z :SpecifyClustersExplicitly INFO: Random seed for debugging is 1744033995924459 2025-04-07T13:53:16.763016Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568977553590398:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:16.763057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:17.348588Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0011ed/r3tmp/tmppOvBvD/pdisk_1.dat 2025-04-07T13:53:17.499020Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:53:17.649803Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:17.984554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:18.110726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:18.110831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:18.117214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:18.117288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:18.148476Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:53:18.148629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:18.159214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:18.226465Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27117, node 1 2025-04-07T13:53:18.697467Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0011ed/r3tmp/yandexcpACAp.tmp 2025-04-07T13:53:18.697500Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0011ed/r3tmp/yandexcpACAp.tmp 2025-04-07T13:53:18.697651Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0011ed/r3tmp/yandexcpACAp.tmp 2025-04-07T13:53:18.697759Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:53:18.880175Z INFO: TTestServer started on Port 26443 GrpcPort 27117 TClient is connected to server localhost:26443 PQClient connected to localhost:27117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:19.939938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:53:20.112986Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:21.767366Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568977553590398:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:21.772879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:25.195536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569016208296871:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:25.195644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:25.194949Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569016408661373:2289], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:25.200752Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569016408661348:2286], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:25.200897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:25.207333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:53:25.210034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569016208296898:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:25.326093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490569016408661377:2290], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:53:25.354565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569016208296906:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:53:25.881081Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569016208297007:2325], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:25.882974Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGRkODFjNTYtODMxN2ZhNGQtMmQwYTdlZjAtMTk4OTU3NGQ=, ActorId: [1:7490569016208296868:2311], ActorState: ExecuteState, TraceId: 01jr88e05m6h3zk8za8fjyk9kd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:53:25.886342Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490569016408661412:2294], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:25.887997Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDEwMTY4NTgtNGI1NjBmYjEtMTBmOTZlYWItMWUwZmZiNzE=, ActorId: [2:7490569016408661346:2285], ActorState: ExecuteState, TraceId: 01jr88e04x3vecvef2471k00jk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:53:25.888280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:25.893018Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:53:25.893219Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:53:26.189548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:26.573833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:27117", true, true, 1000); 2025-04-07T13:53:27.102997Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr88e1p60rns4sksx5t75b03, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQyYjY1ZTktZmJiMTllZDEtODljYjBkY2YtMWNkYTlhNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490569024798232036:3015] 2025-04-07T13:53:33.201021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:53:33.201051Z node 1 :IMPORT WARN: Table profiles were not loaded === CheckClustersList. Ok 2025-04-07T13:53:33.811293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: ... 13:53:59.842628Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_2896075865000164670_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid d7d2dd15-ae11c58f-7c011e7d-783b0290 has messages 1 2025-04-07T13:53:59.842744Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_2896075865000164670_v1 read done: guid# d7d2dd15-ae11c58f-7c011e7d-783b0290, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2025-04-07T13:53:59.842772Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_2896075865000164670_v1 response to read: guid# d7d2dd15-ae11c58f-7c011e7d-783b0290 2025-04-07T13:53:59.843063Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_2896075865000164670_v1 Process answer. Aval parts: 0 2025-04-07T13:53:59.850104Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_2896075865000164670_v1 grpc read done: success# 1, data# { read { } } 2025-04-07T13:53:59.850163Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2896075865000164670_v1 grpc closed 2025-04-07T13:53:59.850211Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2896075865000164670_v1 is DEAD 2025-04-07T13:53:59.850970Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490569159226017603:2527] disconnected; active server actors: 1 2025-04-07T13:53:59.850994Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490569159226017603:2527] client user disconnected session shared/user_3_1_2896075865000164670_v1 2025-04-07T13:53:59.851258Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:53:59.851291Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_2896075865000164670_v1 2025-04-07T13:53:59.851329Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7490569159226017606:2530] destroyed 2025-04-07T13:53:59.851384Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_2896075865000164670_v1 2025-04-07T13:54:00.061148Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:54:00.678925Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569167815952401:2564], TxId: 281474976715697, task: 1, CA Id [3:7490569167815952399:2564]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-07T13:54:00.710795Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569167815952401:2564], TxId: 281474976715697, task: 1, CA Id [3:7490569167815952399:2564]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:54:00.775144Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569167815952401:2564], TxId: 281474976715697, task: 1, CA Id [3:7490569167815952399:2564]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:54:00.854227Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569167815952401:2564], TxId: 281474976715697, task: 1, CA Id [3:7490569167815952399:2564]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:54:00.969955Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569167815952401:2564], TxId: 281474976715697, task: 1, CA Id [3:7490569167815952399:2564]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:54:02.407837Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:02.407869Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:02.407907Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:54:02.416960Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:54:02.428984Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:54:02.429224Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:02.431207Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-04-07T13:54:02.434353Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:02.434386Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:02.434422Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:54:02.436114Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:54:02.455436Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:54:02.455687Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:02.456995Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:54:02.458220Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T13:54:02.464936Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-04-07T13:54:02.465065Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-04-07T13:54:02.467643Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:54:02.467703Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T13:54:02.467732Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T13:54:02.467805Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-07T13:54:02.472824Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:02.472870Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:02.472904Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:54:02.478751Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:54:02.485272Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:54:02.485507Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:02.487880Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:54:02.488845Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:02.492874Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:54:02.496821Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:54:02.496924Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T13:54:02.497049Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2025-04-07T13:54:02.503688Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:02.503723Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:02.503754Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:54:02.509269Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:54:02.517008Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:54:02.517220Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:02.520978Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T13:54:02.521894Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T13:54:02.522640Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T13:54:02.522842Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-07T13:54:02.522920Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:54:02.523026Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:54:02.523064Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-07T13:54:02.524816Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T13:54:02.524876Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T13:54:04.561400Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:04.561446Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:04.561481Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:54:04.572878Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T13:54:04.581194Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T13:54:04.581376Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:04.583400Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:04.583618Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:54:04.583679Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T13:54:04.583773Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> Cdc::NewAndOldImagesLog[PqRunner] [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadFromTimestamp_PQv1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::DuplicateRows [GOOD] Test command err: 2025-04-07T13:48:47.402575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567821468824844:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:48:47.402693Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a45/r3tmp/tmp8qcHzi/pdisk_1.dat 2025-04-07T13:48:48.113839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:48:48.113950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:48:48.123065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:48:48.163121Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14348, node 1 2025-04-07T13:48:48.201180Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:48:48.201210Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:48:48.212623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:48:48.221671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:48.221860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:48:48.241270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:48.260561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:48.265444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:48.265600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:48:48.267830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T13:48:48.679137Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:48:48.679156Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:48:48.679168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:48:48.679291Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:48:49.028063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:49.048804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:49.057449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:48:49.067307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:48:49.067472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:48:49.067482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:48:49.070136Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:48:49.070153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:48:49.074746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:48:49.085480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033729123, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:48:49.085515Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:48:49.085805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:48:49.093084Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:48:49.093662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:49.093831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:48:49.093883Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:48:49.093961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:48:49.094002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:48:49.094056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:48:49.096624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:48:49.097907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:48:49.097932Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:48:49.098017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:48:49.282262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Uint8" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:43914" , at schemeshard: 72057594046644480 2025-04-07T13:48:49.282682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:49.283248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: LogStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T13:48:49.283289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-07T13:48:49.283356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-07T13:48:49.283429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-07T13:48:49.283473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-07T13:48:49.283551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-07T13:48:49.283873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-04-07T13:48:49.285460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:48:49.285749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:48:49.285770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:48:49.285925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T13:48:49.285994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-04-07T13:48:49.297480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-07T13:48:49.297654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-04-07T13:48:49.297922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:48:49.297940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:48:49.298089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPubli ... iedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 SrcEndpoint { ActorId { RawX1: 7490569162615514506 RawX2: 4503904570052291 } } DstEndpoint { ActorId { RawX1: 7490569162615514503 RawX2: 4503904570052176 } } InMemory: true } 2025-04-07T13:53:59.239031Z node 71 :KQP_COMPUTE DEBUG: SelfId: [71:7490569162615514506:3779], TxId: 281474976710764, task: 1. Ctx: { SessionId : ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=. TraceId : 01jr88ezwy1dxjwwgqsfvzpz3d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T13:53:59.239180Z node 71 :KQP_COMPUTE DEBUG: SelfId: [71:7490569162615514506:3779], TxId: 281474976710764, task: 1. Ctx: { SessionId : ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=. TraceId : 01jr88ezwy1dxjwwgqsfvzpz3d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [71:7490569162615514503:3664] TaskId: 1 Stats: CpuTimeUs: 779 Tasks { TaskId: 1 CpuTimeUs: 615 FinishTimeMs: 1744034039238 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 79 BuildCpuTimeUs: 536 HostName: "ghrun-lqtfcktism" NodeId: 71 StartTimeMs: 1744034039238 } MaxMemoryUsage: 1048576 2025-04-07T13:53:59.239215Z node 71 :KQP_COMPUTE DEBUG: SelfId: [71:7490569162615514506:3779], TxId: 281474976710764, task: 1. Ctx: { SessionId : ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=. TraceId : 01jr88ezwy1dxjwwgqsfvzpz3d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T13:53:59.239258Z node 71 :KQP_COMPUTE DEBUG: TxId: 281474976710764, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-07T13:53:59.239337Z node 71 :KQP_EXECUTER DEBUG: ActorId: [71:7490569162615514503:3664] TxId: 281474976710764. Ctx: { TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [71:7490569162615514506:3779], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-07T13:53:59.239370Z node 71 :KQP_EXECUTER DEBUG: ActorId: [71:7490569162615514503:3664] TxId: 281474976710764. Ctx: { TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [71:7490569162615514506:3779], 2025-04-07T13:53:59.239540Z node 71 :KQP_EXECUTER DEBUG: ActorId: [71:7490569162615514503:3664] TxId: 281474976710764. Ctx: { TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [71:7490569154025579524:3664], seqNo: 1, nRows: 1 2025-04-07T13:53:59.239699Z node 71 :KQP_EXECUTER DEBUG: ActorId: [71:7490569162615514503:3664] TxId: 281474976710764. Ctx: { TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [71:7490569162615514506:3779], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 779 Tasks { TaskId: 1 CpuTimeUs: 615 FinishTimeMs: 1744034039238 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 79 BuildCpuTimeUs: 536 HostName: "ghrun-lqtfcktism" NodeId: 71 StartTimeMs: 1744034039238 } MaxMemoryUsage: 1048576 } 2025-04-07T13:53:59.239727Z node 71 :KQP_EXECUTER DEBUG: ActorId: [71:7490569162615514503:3664] TxId: 281474976710764. Ctx: { TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [71:7490569162615514506:3779], 2025-04-07T13:53:59.239777Z node 71 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, ActorId: [71:7490569154025579524:3664], ActorState: ExecuteState, TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Forwarded TEvStreamData to [71:7490569154025579522:3663] 2025-04-07T13:53:59.240254Z node 71 :KQP_EXECUTER DEBUG: TxId: 281474976710764, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 8388572, to: [71:7490569162615514507:3779] 2025-04-07T13:53:59.240315Z node 71 :KQP_COMPUTE DEBUG: SelfId: [71:7490569162615514506:3779], TxId: 281474976710764, task: 1. Ctx: { SessionId : ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=. TraceId : 01jr88ezwy1dxjwwgqsfvzpz3d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T13:53:59.240350Z node 71 :KQP_COMPUTE DEBUG: TxId: 281474976710764, task: 1. Tasks execution finished 2025-04-07T13:53:59.240366Z node 71 :KQP_COMPUTE DEBUG: SelfId: [71:7490569162615514506:3779], TxId: 281474976710764, task: 1. Ctx: { SessionId : ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=. TraceId : 01jr88ezwy1dxjwwgqsfvzpz3d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T13:53:59.240453Z node 71 :KQP_COMPUTE DEBUG: TxId: 281474976710764, task: 1. pass away 2025-04-07T13:53:59.240545Z node 71 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710764;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T13:53:59.240970Z node 71 :KQP_EXECUTER DEBUG: ActorId: [71:7490569162615514503:3664] TxId: 281474976710764. Ctx: { TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [71:7490569162615514506:3779], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1529 DurationUs: 2000 Tasks { TaskId: 1 CpuTimeUs: 619 FinishTimeMs: 1744034039240 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 83 BuildCpuTimeUs: 536 HostName: "ghrun-lqtfcktism" NodeId: 71 StartTimeMs: 1744034039238 } MaxMemoryUsage: 1048576 } 2025-04-07T13:53:59.241004Z node 71 :KQP_EXECUTER INFO: TxId: 281474976710764. Ctx: { TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [71:7490569162615514506:3779] 2025-04-07T13:53:59.241094Z node 71 :KQP_EXECUTER DEBUG: ActorId: [71:7490569162615514503:3664] TxId: 281474976710764. Ctx: { TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T13:53:59.241138Z node 71 :KQP_EXECUTER DEBUG: ActorId: [71:7490569162615514503:3664] TxId: 281474976710764. Ctx: { TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001529s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-07T13:53:59.241197Z node 71 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, ActorId: [71:7490569154025579524:3664], ActorState: ExecuteState, TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-07T13:53:59.241484Z node 71 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, ActorId: [71:7490569154025579524:3664], ActorState: ExecuteState, TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 723.671 QueriesCount: 1 2025-04-07T13:53:59.241539Z node 71 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, ActorId: [71:7490569154025579524:3664], ActorState: ExecuteState, TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T13:53:59.241633Z node 71 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, ActorId: [71:7490569154025579524:3664], ActorState: ExecuteState, TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 RESULT: [[322u]] --------------------- STATS: total CPU: 1068 duration: 700941 usec cpu: 779236 usec { name: "/Root/OlapStore/log1" reads { rows: 2 bytes: 16 } } duration: 5303 usec cpu: 2930 usec 2025-04-07T13:53:59.242084Z node 71 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034038005, txId: 18446744073709551615] shutting down 2025-04-07T13:53:59.242392Z node 71 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, ActorId: [71:7490569154025579524:3664], ActorState: CleanupState, TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, EndCleanup, isFinal: 1 2025-04-07T13:53:59.242454Z node 71 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, ActorId: [71:7490569154025579524:3664], ActorState: CleanupState, TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Sent query response back to proxy, proxyRequestId: 2, proxyId: [71:7490568909212436873:2173] 2025-04-07T13:53:59.242490Z node 71 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, ActorId: [71:7490569154025579524:3664], ActorState: unknown state, TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Cleanup temp tables: 0 2025-04-07T13:53:59.242987Z node 71 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=71&id=OTY1MDlmMzktYmU0YTUzN2MtZjQzZmE4NzAtYTg3OTEwMDU=, ActorId: [71:7490569154025579524:3664], ActorState: unknown state, TraceId: 01jr88ezwy1dxjwwgqsfvzpz3d, Session actor destroyed 2025-04-07T13:53:59.292802Z node 71 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[71:7490568917802372413:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:53:59.292803Z node 71 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[71:7490568917802372414:2292];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:53:59.292861Z node 71 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[71:7490568917802372470:2294];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:53:59.299556Z node 71 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[71:7490568917802372425:2293];fline=actor.cpp:33;event=skip_flush_writing; >> TTicketParserTest::BulkAuthorizationWithUserAccount [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 >> TTicketParserTest::NebiusAuthorizationRetryError [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately >> TargetDiscoverer::InvalidCredentials >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK >> TTicketParserTest::TicketFromCertificateWithValidationBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:54:05.939192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:54:05.939278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:05.939313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:54:05.939365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:54:05.939425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:54:05.939458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:54:05.939519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:05.939820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:54:06.020288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:54:06.020368Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:06.033247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:54:06.033389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:54:06.033541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:54:06.039999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:54:06.040236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:54:06.040843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:06.041062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:54:06.043004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:06.044262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:06.044323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:06.044434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:54:06.044507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:06.044564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:54:06.044780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:54:06.053731Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:54:06.220016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:54:06.220275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:06.220557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:54:06.220845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:54:06.220924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:06.222972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:06.223097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:54:06.223239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:06.223289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:54:06.223330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:54:06.223359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:54:06.224965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:06.225012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:54:06.225051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:54:06.226775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:06.226817Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:06.226851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:06.226895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:54:06.232019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:54:06.234499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:54:06.234700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:54:06.235673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:06.235806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:06.235858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:06.236114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:54:06.236162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:06.236319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:06.236409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:54:06.238376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:06.238442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:06.238604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:06.238656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:54:06.238878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:06.238925Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:54:06.239048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:54:06.239084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:06.239125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:54:06.239168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:06.239219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:54:06.239251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:54:06.239307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:06.239340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:54:06.239368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:54:06.251343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:06.251469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:06.251500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:54:06.251546Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:54:06.251583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:06.251684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... SchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:06.965896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T13:54:06.966214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:54:06.966262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:54:06.966647Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:466:2429], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-04-07T13:54:06.969039Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: 8ec321cb31fe732aef669066d1d41519 } 2025-04-07T13:54:06.969094Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:473:2434], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-07T13:54:06.969548Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:472:2432], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:61522 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0095AB4E-19CC-46FF-8AF3-35AEE6AE0CBE amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2025-04-07T13:54:06.979125Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:466:2429], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } 2025-04-07T13:54:06.985201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:54:06.985332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:54:06.985369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:54:06.985420Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T13:54:06.985465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T13:54:06.985592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:61522 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 93235FA0-606B-486F-BD7A-73BEDEAACA50 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-04-07T13:54:06.990426Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:466:2429], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-04-07T13:54:06.991791Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:465:2428] 2025-04-07T13:54:06.991894Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:466:2429], sender# [1:465:2428], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:61522 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0E0985B8-EAB9-4DBB-A731-8DD1BFBDB929 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-04-07T13:54:06.996812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:54:06.997440Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:466:2429], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-04-07T13:54:06.997516Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:466:2429], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-07T13:54:06.997660Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:465:2428], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-07T13:54:07.018492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 4294969594 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:54:07.018581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T13:54:07.018764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 4294969594 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:54:07.018864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 4294969594 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:54:07.018916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:07.019047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:07.019635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 4294969596 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:54:07.019669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-07T13:54:07.019763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 4294969596 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:54:07.019821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 316 RawX2: 4294969596 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T13:54:07.019852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:07.019878Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:54:07.019913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T13:54:07.019998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T13:54:07.020025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T13:54:07.020152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:07.022806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:54:07.023168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:54:07.023286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:54:07.023323Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T13:54:07.023422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T13:54:07.023454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:54:07.023495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T13:54:07.023553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:374:2339] message: TxId: 102 2025-04-07T13:54:07.023593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:54:07.023626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T13:54:07.023695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T13:54:07.023833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:54:07.025626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:54:07.025686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:444:2408] TestWaitNotification: OK eventTxId 102 >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk [GOOD] >> TTicketParserTest::AuthenticationUnavailable >> TPQCompatTest::CommitOffsets [GOOD] >> TPQCompatTest::LongProducerAndLongMessageGroupId >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood [GOOD] >> TTicketParserTest::NebiusAuthenticationUnavailable [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryError >> TargetDiscoverer::SystemObjects [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AggregateByColumn >> TTicketParserTest::AuthorizationModify [GOOD] >> Cdc::UpdatesLog[PqRunner] [GOOD] >> Cdc::UpdatesLog[YdsRunner] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood [GOOD] Test command err: 2025-04-07T13:53:47.901734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569111685000210:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016a6/r3tmp/tmpqu1zbi/pdisk_1.dat 2025-04-07T13:53:48.288519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:48.640898Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:48.654633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:48.660891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:48.664633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8877, node 1 2025-04-07T13:53:48.841356Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:48.841379Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:48.841385Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:48.841479Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:49.549984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:49.591430Z node 1 :TICKET_PARSER DEBUG: Ticket BE0131BE4DBF7E554241CAEEA5EFC0B91669805E () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-07T13:53:53.750772Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569136995300555:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:53.843742Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016a6/r3tmp/tmpLL6tYu/pdisk_1.dat 2025-04-07T13:53:54.074901Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:54.109098Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:54.109183Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:54.120204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1035, node 2 2025-04-07T13:53:54.325444Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:54.325463Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:54.325469Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:54.325545Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:55.049490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:55.060131Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:53:55.063462Z node 2 :TICKET_PARSER DEBUG: Ticket 05DFE5F26DD51BB106B39AF55FF43EEC7CC8A7B9 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-07T13:53:59.939048Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490569162892263286:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:59.939191Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016a6/r3tmp/tmpk0zPdb/pdisk_1.dat 2025-04-07T13:54:00.223320Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:00.277940Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:00.278017Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:00.287085Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8371, node 3 2025-04-07T13:54:00.441299Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:00.441327Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:00.441335Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:00.441442Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:01.088416Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:01.098506Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:01.101907Z node 3 :TICKET_PARSER DEBUG: Ticket 8AC9BC409E3E8FFA867DA9E990567BD4B6251218 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-07T13:54:01.102701Z node 3 :TICKET_PARSER ERROR: Ticket 8AC9BC409E3E8FFA867DA9E990567BD4B6251218: Cannot create token from certificate. Client certificate failed verification 2025-04-07T13:54:04.949422Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490569186358114082:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:04.949584Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016a6/r3tmp/tmpJyyusX/pdisk_1.dat 2025-04-07T13:54:05.444395Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:05.444486Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:05.448992Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:05.474021Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15024, node 4 2025-04-07T13:54:05.663101Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:05.663125Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:05.663138Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:05.663243Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:06.126928Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:06.153139Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:06.156426Z node 4 :TICKET_PARSER DEBUG: Ticket AF9FD640BB95938BE3E7DAD71BF2C98C2E358F9F () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert >> KqpScanArrowInChanels::JoinWithParams [GOOD] >> Cdc::DocApi[YdsRunner] [GOOD] >> Cdc::DocApi[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2025-04-07T13:54:05.857143Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569189581691340:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:05.857200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001aed/r3tmp/tmpgjJv4e/pdisk_1.dat 2025-04-07T13:54:06.664692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:06.664774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:06.701739Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:06.702459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12335 TServer::EnableGrpc on GrpcPort 28625, node 1 2025-04-07T13:54:07.137275Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:07.137305Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:07.137310Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:07.137400Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:07.519981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:07.537924Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:07.541481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:54:07.684039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:07.806333Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744034047574, tx_id: 1 } } } 2025-04-07T13:54:07.806363Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-07T13:54:07.828975Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034047665, tx_id: 281474976710658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744034047721, tx_id: 281474976710659 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-07T13:54:07.829002Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-07T13:54:09.565183Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034047665, tx_id: 281474976710658 } } } 2025-04-07T13:54:09.565216Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-04-07T13:54:09.565234Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 29898, MsgBus: 26923 2025-04-07T13:53:13.915700Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568964813902558:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:13.917728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e9c/r3tmp/tmpncDKcV/pdisk_1.dat 2025-04-07T13:53:14.686431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:14.686507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:14.701979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:14.716854Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29898, node 1 2025-04-07T13:53:15.085278Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:15.085302Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:15.085315Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:15.085406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26923 TClient is connected to server localhost:26923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:16.632055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:16.689325Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:16.700689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:53:17.070758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:17.861042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:18.220282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:18.990766Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568964813902558:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:18.992340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:22.192520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569003468609805:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:22.201656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:22.587253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:22.628169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:22.672894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:22.717302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:53:22.754238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:53:22.843541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:53:22.948986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569003468610315:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:22.949077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:22.949548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569003468610320:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:22.960388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:53:23.002622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569003468610322:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:53:25.863016Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034005686, txId: 281474976710673] shutting down 2025-04-07T13:53:25.863839Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034005686, txId: 281474976710674] shutting down 2025-04-07T13:53:25.865903Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034005686, txId: 281474976710671] shutting down 2025-04-07T13:53:25.866375Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034005686, txId: 281474976710672] shutting down 2025-04-07T13:53:26.424647Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034006372, txId: 281474976710679] shutting down 2025-04-07T13:53:26.642848Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034006372, txId: 281474976710680] shutting down 2025-04-07T13:53:26.840141Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034006785, txId: 281474976710684] shutting down 2025-04-07T13:53:27.069711Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034006785, txId: 281474976710683] shutting down 2025-04-07T13:53:27.136382Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034007142, txId: 281474976710687] shutting down 2025-04-07T13:53:27.139830Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034007135, txId: 281474976710688] shutting down 2025-04-07T13:53:27.726585Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034007492, txId: 281474976710691] shutting down 2025-04-07T13:53:27.904618Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034007751, txId: 281474976710693] shutting down 2025-04-07T13:53:27.905187Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034007751, txId: 281474976710694] shutting down 2025-04-07T13:53:28.094885Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034007940, txId: 281474976710698] shutting down 2025-04-07T13:53:28.095654Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034007940, txId: 281474976710697] shutting down 2025-04-07T13:53:28.286030Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034008234, txId: 281474976710701] shutting down 2025-04-07T13:53:28.286164Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034008234, txId: 281474976710702] shutting down 2025-04-07T13:53:28.357389Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034008339, txId: 281474976710705] shutting down 2025-04-07T13:53:28.358299Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034008339, txId: 281474976710706] shutting down 2025-04-07T13:53:28.865796Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034008787, txId: 281474976710710] shutting down 2025-04-07T13:53:28.866362Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034008787, txId: 281474976710709] shutting down 2025-04-07T13:53:29.539816Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034009410, txId: 281474976710713] shutting down 2025-04-07T13:53:29.610856Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034009571, txId: 281474976710716] shutting down 2025-04-07T13:53:29.623102Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034009571, ... node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034034904, txId: 281474976710682] shutting down 2025-04-07T13:53:55.219221Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034035128, txId: 281474976710685] shutting down 2025-04-07T13:53:55.258405Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034035128, txId: 281474976710686] shutting down 2025-04-07T13:53:55.924532Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034035471, txId: 281474976710689] shutting down 2025-04-07T13:53:56.032107Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034035471, txId: 281474976710690] shutting down 2025-04-07T13:53:56.169105Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034035821, txId: 281474976710693] shutting down 2025-04-07T13:53:56.272995Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034036227, txId: 281474976710695] shutting down 2025-04-07T13:53:56.754283Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034036514, txId: 281474976710697] shutting down 2025-04-07T13:53:56.875632Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034036808, txId: 281474976710700] shutting down 2025-04-07T13:53:56.877430Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034036808, txId: 281474976710699] shutting down 2025-04-07T13:53:57.481196Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034037291, txId: 281474976710704] shutting down 2025-04-07T13:53:57.483045Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034037291, txId: 281474976710703] shutting down 2025-04-07T13:53:57.483578Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034037291, txId: 281474976710705] shutting down 2025-04-07T13:53:57.661515Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034037655, txId: 281474976710710] shutting down 2025-04-07T13:53:57.662183Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034037655, txId: 281474976710709] shutting down 2025-04-07T13:53:57.828775Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034037837, txId: 281474976710713] shutting down 2025-04-07T13:53:57.829694Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034037837, txId: 281474976710714] shutting down 2025-04-07T13:53:58.145346Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034038138, txId: 281474976710717] shutting down 2025-04-07T13:53:58.159035Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034038138, txId: 281474976710718] shutting down 2025-04-07T13:53:58.479733Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034038397, txId: 281474976710722] shutting down 2025-04-07T13:53:58.483295Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034038397, txId: 281474976710721] shutting down 2025-04-07T13:53:58.803670Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034038789, txId: 281474976710726] shutting down 2025-04-07T13:53:58.806901Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034038789, txId: 281474976710725] shutting down 2025-04-07T13:53:59.005717Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034039013, txId: 281474976710729] shutting down 2025-04-07T13:53:59.277350Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034039258, txId: 281474976710731] shutting down 2025-04-07T13:53:59.278855Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034039258, txId: 281474976710732] shutting down 2025-04-07T13:53:59.625768Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034039587, txId: 281474976710735] shutting down 2025-04-07T13:53:59.894706Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034039895, txId: 281474976710737] shutting down 2025-04-07T13:53:59.896112Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034039895, txId: 281474976710738] shutting down 2025-04-07T13:54:00.122673Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034040077, txId: 281474976710741] shutting down 2025-04-07T13:54:00.154584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:54:00.154612Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:00.415157Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034040322, txId: 281474976710743] shutting down 2025-04-07T13:54:00.494422Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034040518, txId: 281474976710745] shutting down 2025-04-07T13:54:00.684019Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034040665, txId: 281474976710747] shutting down 2025-04-07T13:54:01.074616Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034040980, txId: 281474976710749] shutting down 2025-04-07T13:54:01.273713Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034041232, txId: 281474976710752] shutting down 2025-04-07T13:54:01.274394Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034041232, txId: 281474976710751] shutting down 2025-04-07T13:54:01.617653Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034041596, txId: 281474976710755] shutting down 2025-04-07T13:54:02.117886Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034041925, txId: 281474976710757] shutting down 2025-04-07T13:54:02.167324Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034042142, txId: 281474976710759] shutting down 2025-04-07T13:54:02.172000Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034042142, txId: 281474976710760] shutting down 2025-04-07T13:54:02.428396Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034042408, txId: 281474976710763] shutting down 2025-04-07T13:54:02.674022Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034042667, txId: 281474976710765] shutting down 2025-04-07T13:54:02.880011Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034042877, txId: 281474976710767] shutting down 2025-04-07T13:54:03.131719Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034043073, txId: 281474976710769] shutting down 2025-04-07T13:54:03.432909Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034043381, txId: 281474976710771] shutting down 2025-04-07T13:54:03.885566Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034043787, txId: 281474976710773] shutting down 2025-04-07T13:54:03.886466Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034043787, txId: 281474976710774] shutting down 2025-04-07T13:54:04.120817Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034044102, txId: 281474976710777] shutting down 2025-04-07T13:54:04.257429Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034044277, txId: 281474976710779] shutting down 2025-04-07T13:54:04.514789Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034044515, txId: 281474976710781] shutting down 2025-04-07T13:54:04.794876Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034044795, txId: 281474976710783] shutting down 2025-04-07T13:54:05.099512Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034045110, txId: 281474976710785] shutting down 2025-04-07T13:54:05.524838Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034045516, txId: 281474976710787] shutting down 2025-04-07T13:54:05.737534Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034045740, txId: 281474976710789] shutting down 2025-04-07T13:54:05.763073Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034045740, txId: 281474976710790] shutting down 2025-04-07T13:54:06.044914Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034046048, txId: 281474976710793] shutting down 2025-04-07T13:54:06.336552Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034046342, txId: 281474976710795] shutting down 2025-04-07T13:54:06.656248Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034046671, txId: 281474976710797] shutting down 2025-04-07T13:54:06.956410Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034046923, txId: 281474976710799] shutting down 2025-04-07T13:54:07.176482Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034047189, txId: 281474976710801] shutting down 2025-04-07T13:54:07.454563Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034047455, txId: 281474976710803] shutting down 2025-04-07T13:54:07.771312Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034047784, txId: 281474976710805] shutting down 2025-04-07T13:54:08.149078Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034048134, txId: 281474976710807] shutting down 2025-04-07T13:54:08.478083Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034048491, txId: 281474976710809] shutting down 2025-04-07T13:54:08.772817Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034048736, txId: 281474976710811] shutting down 2025-04-07T13:54:08.921022Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034048939, txId: 281474976710813] shutting down 2025-04-07T13:54:09.194378Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034049219, txId: 281474976710815] shutting down >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateEmptySum >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |86.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> TTicketParserTest::BulkAuthorizationRetryError [GOOD] >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationModify [GOOD] Test command err: 2025-04-07T13:53:48.806721Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569115564353519:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:49.116974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00167d/r3tmp/tmpeud0ln/pdisk_1.dat 2025-04-07T13:53:49.358682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:49.358796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:49.364358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:49.373286Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11430, node 1 2025-04-07T13:53:49.561953Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:49.561987Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:49.561996Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:49.562080Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14537 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:50.257413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:50.304620Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:50.314755Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-07T13:53:50.314839Z node 1 :GRPC_CLIENT DEBUG: [5160000312d0] Connect to grpc://localhost:18754 2025-04-07T13:53:50.321287Z node 1 :GRPC_CLIENT DEBUG: [5160000312d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-07T13:53:50.357093Z node 1 :GRPC_CLIENT DEBUG: [5160000312d0] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:53:50.357476Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-07T13:53:50.359292Z node 1 :GRPC_CLIENT DEBUG: [5160000318d0] Connect to grpc://localhost:16080 2025-04-07T13:53:50.360328Z node 1 :GRPC_CLIENT DEBUG: [5160000318d0] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-07T13:53:50.378174Z node 1 :GRPC_CLIENT DEBUG: [5160000318d0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-07T13:53:50.379878Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-07T13:53:53.297081Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569138114587727:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:53.297116Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00167d/r3tmp/tmpvkaCa6/pdisk_1.dat 2025-04-07T13:53:53.443759Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:53.443842Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:53.447095Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:53.454472Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11233, node 2 2025-04-07T13:53:53.505577Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:53.505601Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:53.505610Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:53.505688Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19828 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:53.735006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:53.741984Z node 2 :TICKET_PARSER ERROR: Ticket **** (8E120919): Token is not supported 2025-04-07T13:53:56.670359Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490569148633730424:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:56.670407Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00167d/r3tmp/tmpncY6uY/pdisk_1.dat 2025-04-07T13:53:57.033448Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:57.106385Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:57.106472Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:57.115724Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23430, node 3 2025-04-07T13:53:57.317352Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:57.317375Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:57.317383Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:57.317494Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:57.733677Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:57.741918Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:57.746498Z node 3 :TICKET_PARSER ERROR: Ticket **** (8E120919): Unknown token 2025-04-07T13:54:01.491265Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490569172025159523:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00167d/r3tmp/tmp72WKWX/pdisk_1.dat 2025-04-07T13:54:01.582955Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:54:01.877256Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:01.932255Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:01.932356Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:01.937976Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19671, node 4 2025-04-07T13:54:02.133350Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:02.133380Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:02.133388Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:02.133510Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: ... ble:0 2025-04-07T13:54:02.571503Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-07T13:54:02.572413Z node 4 :TICKET_PARSER TRACE: Ticket ApiK****alid (AB5B5EA8) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:02.572620Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Request AuthorizeRequest { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } api_key: "ApiK****alid (AB5B5EA8)" } 2025-04-07T13:54:02.585144Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Response AuthorizeResponse { subject { user_account { id: "ApiKey-value-valid" } } } 2025-04-07T13:54:02.588798Z node 4 :TICKET_PARSER TRACE: Ticket ApiK****alid (AB5B5EA8) permission something.read now has a valid subject "ApiKey-value-valid@as" 2025-04-07T13:54:02.588902Z node 4 :TICKET_PARSER DEBUG: Ticket ApiK****alid (AB5B5EA8) () has now valid token of ApiKey-value-valid@as 2025-04-07T13:54:02.596614Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-07T13:54:02.596853Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:02.610007Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Status 16 Access Denied 2025-04-07T13:54:02.610351Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Access Denied" retryable:0 2025-04-07T13:54:02.610400Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-04-07T13:54:02.611155Z node 4 :TICKET_PARSER TRACE: Ticket **** (E2D1584C) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:02.611340Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Request AuthorizeRequest { iam_token: "**** (E2D1584C)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:02.621434Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Status 16 Access Denied 2025-04-07T13:54:02.621869Z node 4 :TICKET_PARSER TRACE: Ticket **** (E2D1584C) permission something.read now has a permanent error "Access Denied" retryable:0 2025-04-07T13:54:02.621899Z node 4 :TICKET_PARSER DEBUG: Ticket **** (E2D1584C) () has now permanent error message 'Access Denied' 2025-04-07T13:54:02.622423Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:02.622624Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Request AuthorizeRequest { iam_token: "**** (BE2EA0D0)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:02.624826Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Status 16 Access Denied 2025-04-07T13:54:02.625088Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) permission something.read now has a permanent error "Access Denied" retryable:0 2025-04-07T13:54:02.625114Z node 4 :TICKET_PARSER DEBUG: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2025-04-07T13:54:02.625578Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:02.625756Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2025-04-07T13:54:02.627487Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Status 16 Access Denied 2025-04-07T13:54:02.627831Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2025-04-07T13:54:02.627863Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-04-07T13:54:02.628440Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:02.628632Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:02.630515Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:02.630684Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-07T13:54:02.630772Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-07T13:54:02.631363Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:02.631576Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2025-04-07T13:54:02.633255Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:02.633988Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-07T13:54:02.634067Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-07T13:54:02.634530Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(monitoring.view) 2025-04-07T13:54:02.634663Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "monitoring.view" resource_path { id: "gizmo" type: "iam.gizmo" } } 2025-04-07T13:54:02.636175Z node 4 :GRPC_CLIENT DEBUG: [5160000da1d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:02.644798Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission monitoring.view now has a valid subject "user1@as" 2025-04-07T13:54:02.644905Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-07T13:54:06.077806Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490569191042849352:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:06.077887Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00167d/r3tmp/tmpjXXvKC/pdisk_1.dat 2025-04-07T13:54:06.270991Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:06.293766Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:06.295841Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:06.298013Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28674, node 5 2025-04-07T13:54:06.417262Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:06.417285Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:06.417297Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:06.417407Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:06.803218Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:06.813733Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:06.818569Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:06.818660Z node 5 :GRPC_CLIENT DEBUG: [5160000a5fd0] Connect to grpc://localhost:28878 2025-04-07T13:54:06.819424Z node 5 :GRPC_CLIENT DEBUG: [5160000a5fd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:06.845459Z node 5 :GRPC_CLIENT DEBUG: [5160000a5fd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:06.846521Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-07T13:54:06.846622Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-07T13:54:06.852839Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:06.852907Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-07T13:54:06.853092Z node 5 :GRPC_CLIENT DEBUG: [5160000a5fd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:06.853710Z node 5 :GRPC_CLIENT DEBUG: [5160000a5fd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:06.856198Z node 5 :GRPC_CLIENT DEBUG: [5160000a5fd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:06.856332Z node 5 :GRPC_CLIENT DEBUG: [5160000a5fd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:06.856531Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-07T13:54:06.856576Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-04-07T13:54:06.856678Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> TTicketParserTest::AuthorizationRetryError [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 28739, MsgBus: 27926 2025-04-07T13:53:42.565823Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569089334439341:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:42.566058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a62/r3tmp/tmpPlV6Rz/pdisk_1.dat 2025-04-07T13:53:43.453184Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:43.528566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:43.528727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:43.584953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28739, node 1 2025-04-07T13:53:43.841123Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:43.841143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:43.841152Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:43.841266Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27926 TClient is connected to server localhost:27926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:45.439198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:45.511069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:45.927327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:46.323602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:46.583701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:47.536312Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569089334439341:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:47.536392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:49.828777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569119399211999:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:49.847568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:49.897178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:49.939401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:49.995909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:50.047159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:53:50.091579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:53:50.194502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:53:50.303198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569123694179800:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:50.303303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:50.303670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569123694179805:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:50.310475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:53:50.326371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569123694179807:2439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:53:52.014103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 864000000000 2025-04-07T13:53:52.825200Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034032853, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 13554, MsgBus: 2815 2025-04-07T13:53:54.271847Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569142687814100:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a62/r3tmp/tmpYnVEJ7/pdisk_1.dat 2025-04-07T13:53:54.437365Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:54.540262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:54.552109Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:54.552510Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:54.574597Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13554, node 2 2025-04-07T13:53:54.837261Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:54.837284Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:54.837292Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:54.837384Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2815 TClient is connected to server localhost:2815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:55.930204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:55.938191Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:55.949814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:56.137186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:56.496256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:56.630012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:59.202784Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490569142687814100:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:59.203652Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:59.237876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569164162652115:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:59.237954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:59.316451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:59.384362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:59.441927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:59.541669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:53:59.593596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:53:59.671709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:53:59.740279Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569164162652618:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:59.740355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:59.740692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569164162652623:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:59.745866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:53:59.777967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490569164162652625:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:54:02.049165Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034042065, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 29355, MsgBus: 19385 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a62/r3tmp/tmp4Sk9yq/pdisk_1.dat 2025-04-07T13:54:03.363111Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:03.448212Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:03.468330Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:03.468423Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:03.470808Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29355, node 3 2025-04-07T13:54:03.685223Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:03.685251Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:03.685260Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:03.685368Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19385 TClient is connected to server localhost:19385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:04.369699Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:04.394907Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:04.539674Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:04.792931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:04.894992Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:08.092525Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569201250851171:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:08.092616Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:08.145132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:54:08.193762Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:54:08.274882Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:54:08.326686Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:54:08.378562Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:54:08.459514Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:54:08.554811Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569201250851679:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:08.554909Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:08.555303Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569201250851684:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:08.560172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:54:08.575522Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490569201250851686:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:54:10.184162Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034050199, txId: 281474976710671] shutting down 2025-04-07T13:54:10.407015Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034050444, txId: 281474976710673] shutting down >> TPersQueueMirrorer::TestBasicRemote [GOOD] >> TPersQueueMirrorer::ValidStartStream >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAuthorization >> Cdc::NewAndOldImagesLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] >> TTicketParserTest::AuthenticationUnavailable [GOOD] >> TTicketParserTest::AuthenticationRetryError >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions >> TPersQueueTest::MessageMetadata [GOOD] >> TPersQueueTest::LOGBROKER_7820 >> StoragePool::TestDistributionRandomProbability [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin >> TargetDiscoverer::InvalidCredentials [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> BasicUsage::RecreateObserver [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] Test command err: 2025-04-07T13:54:08.918888Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569203250901328:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:08.919118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ae1/r3tmp/tmp9VgU8U/pdisk_1.dat 2025-04-07T13:54:09.706383Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:09.741901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:09.741984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:09.771027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7243 TServer::EnableGrpc on GrpcPort 23031, node 1 2025-04-07T13:54:10.241279Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:10.241298Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:10.241305Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:10.241396Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7243 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:10.709865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:10.747307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:11.110930Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: CLIENT_UNAUTHENTICATED, issues: {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_types/credentials/login/login.cpp:192: Invalid user } } } 2025-04-07T13:54:11.111003Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# CLIENT_UNAUTHENTICATED, issues# {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_types/credentials/login/login.cpp:192: Invalid user } >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex >> Cdc::UpdatesLog[YdsRunner] [GOOD] >> Cdc::UpdatesLog[TopicRunner] >> TTicketParserTest::AuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions >> BasicUsage::BrokenCredentialsProvider [GOOD] >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::BulkAuthorization >> TTicketParserTest::NebiusAuthorization [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |86.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] Test command err: 2025-04-07T13:53:54.368716Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569141889485799:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:54.368766Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001663/r3tmp/tmpuOyT4J/pdisk_1.dat 2025-04-07T13:53:54.996120Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:55.028825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:55.028932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:55.031955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10681, node 1 2025-04-07T13:53:55.337403Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:55.337424Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:55.337429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:55.337511Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15902 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:55.810279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:55.833512Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:55.948455Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T13:53:55.949118Z node 1 :TICKET_PARSER DEBUG: Ticket **** (5DAB89DE) () has now permanent error message 'Token is not in correct format' 2025-04-07T13:53:55.949166Z node 1 :TICKET_PARSER ERROR: Ticket **** (5DAB89DE): Token is not in correct format 2025-04-07T13:53:58.415489Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569157211715815:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:58.415525Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001663/r3tmp/tmp1uGXjm/pdisk_1.dat 2025-04-07T13:53:58.574105Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:58.592906Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:58.593006Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:58.598558Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19019, node 2 2025-04-07T13:53:58.709238Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:58.709257Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:58.709267Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:58.709345Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:58.968837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:58.974743Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:53:58.976917Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-07T13:53:58.976988Z node 2 :GRPC_CLIENT DEBUG: [516000039cd0] Connect to grpc://localhost:13523 2025-04-07T13:53:58.983846Z node 2 :GRPC_CLIENT DEBUG: [516000039cd0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-07T13:53:59.003208Z node 2 :GRPC_CLIENT DEBUG: [516000039cd0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2025-04-07T13:53:59.003592Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-04-07T13:53:59.003688Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-07T13:53:59.009279Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-07T13:53:59.009493Z node 2 :GRPC_CLIENT DEBUG: [516000039cd0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-07T13:53:59.012618Z node 2 :GRPC_CLIENT DEBUG: [516000039cd0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2025-04-07T13:53:59.012939Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-04-07T13:53:59.012993Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for folder_id aaaa1234 - Access Denied' 2025-04-07T13:54:03.565016Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490569179937431177:2225];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:03.585463Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001663/r3tmp/tmpBp5FKv/pdisk_1.dat 2025-04-07T13:54:03.724822Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:03.749894Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:03.749965Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:03.751070Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27211, node 3 2025-04-07T13:54:03.841284Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:03.841307Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:03.841313Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:03.841407Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:04.154139Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 720575940 ... ase" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-07T13:54:04.229974Z node 3 :GRPC_CLIENT DEBUG: [5160001227d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:04.230265Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-07T13:54:04.230450Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-07T13:54:07.706257Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490569195692392266:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001663/r3tmp/tmpx6fstb/pdisk_1.dat 2025-04-07T13:54:07.742658Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:54:07.813353Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:07.832344Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:07.832435Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:07.833707Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24556, node 4 2025-04-07T13:54:07.885248Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:07.885269Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:07.885275Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:07.885378Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:08.119541Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:08.126178Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:54:08.129152Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read somewhere.sleep something.list something.write something.eat) 2025-04-07T13:54:08.129203Z node 4 :GRPC_CLIENT DEBUG: [51600003e1d0] Connect to grpc://localhost:17076 2025-04-07T13:54:08.130229Z node 4 :GRPC_CLIENT DEBUG: [51600003e1d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "somewhere.sleep" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.list" ...(truncated) } 2025-04-07T13:54:08.137863Z node 4 :GRPC_CLIENT DEBUG: [51600003e1d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "something.list" r...(truncated) } 2025-04-07T13:54:08.138468Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read access denied for subject "user1@as" 2025-04-07T13:54:08.138492Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep access denied for subject "user1@as" 2025-04-07T13:54:08.138503Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list access denied for subject "user1@as" 2025-04-07T13:54:08.138519Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat access denied for subject "user1@as" 2025-04-07T13:54:08.138536Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-07T13:54:08.138709Z node 4 :GRPC_CLIENT DEBUG: [51600003e4d0] Connect to grpc://localhost:22709 2025-04-07T13:54:08.139414Z node 4 :GRPC_CLIENT DEBUG: [51600003e4d0] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-07T13:54:08.148923Z node 4 :GRPC_CLIENT DEBUG: [51600003e4d0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-07T13:54:08.149237Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-07T13:54:12.660281Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490569219648830107:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:12.660497Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001663/r3tmp/tmpaJ0gtj/pdisk_1.dat 2025-04-07T13:54:12.743418Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62348, node 5 2025-04-07T13:54:12.773753Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:12.773837Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:12.776117Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:54:12.885397Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:12.885426Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:12.885434Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:12.885553Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14092 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:13.154100Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:13.165474Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:54:13.171155Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-07T13:54:13.171197Z node 5 :GRPC_CLIENT DEBUG: [5160000c09d0] Connect to grpc://localhost:26399 2025-04-07T13:54:13.171856Z node 5 :GRPC_CLIENT DEBUG: [5160000c09d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-07T13:54:13.193436Z node 5 :GRPC_CLIENT DEBUG: [5160000c09d0] Status 14 Service Unavailable 2025-04-07T13:54:13.193592Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-07T13:54:13.193614Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-04-07T13:54:13.193645Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:13.193734Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-07T13:54:13.194005Z node 5 :GRPC_CLIENT DEBUG: [5160000c09d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-07T13:54:13.212798Z node 5 :GRPC_CLIENT DEBUG: [5160000c09d0] Status 1 CANCELLED 2025-04-07T13:54:13.213553Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2025-04-07T13:54:13.213570Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2025-04-07T13:54:13.213591Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::RecreateObserver [GOOD] Test command err: 2025-04-07T13:52:15.913473Z :RetryDiscoveryWithCancel INFO: Random seed for debugging is 1744033935913442 2025-04-07T13:52:16.770665Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568719492132112:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:17.141420Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:17.146139Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001825/r3tmp/tmpLW8D7i/pdisk_1.dat 2025-04-07T13:52:17.382667Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:17.417165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:52:17.703542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:17.703637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:17.710197Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:17.712396Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:52:17.715105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11932, node 1 2025-04-07T13:52:17.810232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:17.810318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:17.839898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:17.861019Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001825/r3tmp/yandexiQOXhL.tmp 2025-04-07T13:52:17.861045Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001825/r3tmp/yandexiQOXhL.tmp 2025-04-07T13:52:17.861198Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001825/r3tmp/yandexiQOXhL.tmp 2025-04-07T13:52:17.861295Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:17.997514Z INFO: TTestServer started on Port 7657 GrpcPort 11932 TClient is connected to server localhost:7657 PQClient connected to localhost:11932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:18.427760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T13:52:21.239165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568743350854247:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:21.251758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:21.255738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568743350854259:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:21.280122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2025-04-07T13:52:21.319331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568743350854261:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-04-07T13:52:21.668793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-04-07T13:52:21.677987Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568743350854356:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:21.679479Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2M2NDI0ZTctY2JmNmZhZGItNWUyMTFjZDEtZTA3NGMwNWE=, ActorId: [1:7490568743350854244:2303], ActorState: ExecuteState, TraceId: 01jr88c1psf6cyg2v4e6wd63h8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:21.682227Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568740966968768:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:21.683473Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTNlZDRlMDctYjBjMzM4NjItN2FkN2U4ZDQtNjNjMzUwMQ==, ActorId: [2:7490568740966968743:2282], ActorState: ExecuteState, TraceId: 01jr88c1y73r610yjkyg7k47aj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:21.716031Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:21.717224Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:21.717308Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568719492132112:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:21.717350Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:21.886517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:22.076958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:11932", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-07T13:52:22.372221Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jr88c2mta93wby587cw5d4tc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDNjNDgzNDYtNzYxYzUxYjUtNDQ4MGYyMTctMWRkODczODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490568747645822108:3020] === CheckClustersList. Ok waiting... 2025-04-07T13:52:27.819150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720675:0, at schemeshard: 72057594046644480 PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:11932 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T13:52:28.052787Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:11932 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976720678 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK AddTopic: rt3.dc1--test-topic ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = ... 7fb896-82d9030f 2025-04-07T13:54:14.217934Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_16993794441657467074_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037892 Generation: 1 2025-04-07T13:54:14.220749Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:54:14.220801Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7490569226553374032:2509], now have 1 active actors on pipe 2025-04-07T13:54:14.220884Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:54:14.220908Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:54:14.220946Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Created session shared/user_3_1_16993794441657467074_v1 on pipe: [3:7490569226553374032:2509] 2025-04-07T13:54:14.221026Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/user_3_1_16993794441657467074_v1:1 with generation 1 2025-04-07T13:54:14.221200Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user session is set to 0 (startOffset 0) session shared/user_3_1_16993794441657467074_v1 2025-04-07T13:54:14.221363Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:54:14.284751Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:54:14.284808Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 >>> Got event: StartPartitionSession { Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 CommittedOffset: 0 EndOffset: 0 } 2025-04-07T13:54:14.327139Z :INFO: [/Root] [/Root] [a67de4bb-66af347c-35e293eb-1a535f11] Closing read session. Close timeout: 0.000000s 2025-04-07T13:54:14.327189Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-07T13:54:14.327232Z :INFO: [/Root] [/Root] [a67de4bb-66af347c-35e293eb-1a535f11] Counters: { Errors: 0 CurrentSessionLifetimeMs: 286 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:54:14.327348Z :NOTICE: [/Root] [/Root] [a67de4bb-66af347c-35e293eb-1a535f11] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T13:54:14.327400Z :DEBUG: [/Root] [/Root] [a67de4bb-66af347c-35e293eb-1a535f11] [] Abort session to cluster 2025-04-07T13:54:14.327847Z :INFO: [/Root] [/Root] [28b9066f-c575735a-6e1ae994-69542fd3] Closing read session. Close timeout: 0.000000s 2025-04-07T13:54:14.327884Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-07T13:54:14.327912Z :INFO: [/Root] [/Root] [28b9066f-c575735a-6e1ae994-69542fd3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 269 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:54:14.327969Z :NOTICE: [/Root] [/Root] [28b9066f-c575735a-6e1ae994-69542fd3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T13:54:14.327993Z :DEBUG: [/Root] [/Root] [28b9066f-c575735a-6e1ae994-69542fd3] [] Abort session to cluster 2025-04-07T13:54:14.328220Z :INFO: [/Root] [/Root] [938100b0-d58f29a6-86c69d21-a70fa195] Closing read session. Close timeout: 0.000000s 2025-04-07T13:54:14.328251Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-04-07T13:54:14.328276Z :INFO: [/Root] [/Root] [938100b0-d58f29a6-86c69d21-a70fa195] Counters: { Errors: 0 CurrentSessionLifetimeMs: 267 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:54:14.328322Z :NOTICE: [/Root] [/Root] [938100b0-d58f29a6-86c69d21-a70fa195] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T13:54:14.328343Z :DEBUG: [/Root] [/Root] [938100b0-d58f29a6-86c69d21-a70fa195] [] Abort session to cluster 2025-04-07T13:54:14.328535Z :INFO: [/Root] [/Root] [938100b0-d58f29a6-86c69d21-a70fa195] Closing read session. Close timeout: 0.000000s 2025-04-07T13:54:14.328563Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-04-07T13:54:14.328593Z :INFO: [/Root] [/Root] [938100b0-d58f29a6-86c69d21-a70fa195] Counters: { Errors: 0 CurrentSessionLifetimeMs: 267 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:54:14.328685Z :NOTICE: [/Root] [/Root] [938100b0-d58f29a6-86c69d21-a70fa195] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:54:14.317554Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_16993794441657467074_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 0 WriteTimestampMS: 1744034049878 CreateTimestampMS: 1744034049878 SizeLag: 0 WriteTimestampEstimateMS: 1744034054168 } Cookie: 18446744073709551615 } 2025-04-07T13:54:14.338999Z :INFO: [/Root] [/Root] [28b9066f-c575735a-6e1ae994-69542fd3] Closing read session. Close timeout: 0.000000s 2025-04-07T13:54:14.339058Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-07T13:54:14.339095Z :INFO: [/Root] [/Root] [28b9066f-c575735a-6e1ae994-69542fd3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 280 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:54:14.339175Z :NOTICE: [/Root] [/Root] [28b9066f-c575735a-6e1ae994-69542fd3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:54:14.340016Z :INFO: [/Root] [/Root] [a67de4bb-66af347c-35e293eb-1a535f11] Closing read session. Close timeout: 0.000000s 2025-04-07T13:54:14.340049Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-07T13:54:14.340076Z :INFO: [/Root] [/Root] [a67de4bb-66af347c-35e293eb-1a535f11] Counters: { Errors: 0 CurrentSessionLifetimeMs: 299 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:54:14.340130Z :NOTICE: [/Root] [/Root] [a67de4bb-66af347c-35e293eb-1a535f11] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:54:14.317618Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_16993794441657467074_v1 INIT DONE TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 0 readOffset 0 committedOffset 0 2025-04-07T13:54:14.361127Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:14.317710Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_16993794441657467074_v1 sending to client partition status 2025-04-07T13:54:14.368771Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_16993794441657467074_v1 2025-04-07T13:54:14.368843Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7490569226553374032:2509] destroyed 2025-04-07T13:54:14.368925Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_16993794441657467074_v1 2025-04-07T13:54:14.340911Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_9181865286384255891_v1 grpc read done: success# 0, data# { } 2025-04-07T13:54:14.340935Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_9181865286384255891_v1 grpc read failed 2025-04-07T13:54:14.340971Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_9181865286384255891_v1 grpc closed 2025-04-07T13:54:14.340996Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_9181865286384255891_v1 is DEAD 2025-04-07T13:54:14.357312Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_1123030500098164685_v1 grpc read done: success# 0, data# { } 2025-04-07T13:54:14.357331Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_1123030500098164685_v1 grpc read failed 2025-04-07T13:54:14.357350Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_1123030500098164685_v1 grpc closed 2025-04-07T13:54:14.357370Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_1123030500098164685_v1 is DEAD 2025-04-07T13:54:14.357864Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_16993794441657467074_v1 grpc read done: success# 0, data# { } 2025-04-07T13:54:14.357875Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_16993794441657467074_v1 grpc read failed 2025-04-07T13:54:14.357891Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_16993794441657467074_v1 grpc closed 2025-04-07T13:54:14.357921Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_16993794441657467074_v1 is DEAD 2025-04-07T13:54:14.359379Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490569226553374019:2496] disconnected; active server actors: 1 2025-04-07T13:54:14.359418Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490569226553374019:2496] client user disconnected session shared/user_3_3_1123030500098164685_v1 2025-04-07T13:54:14.359471Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-04-07T13:54:14.359516Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490569226553374014:2494] disconnected; active server actors: 1 2025-04-07T13:54:14.359528Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490569226553374014:2494] client user disconnected session shared/user_3_1_16993794441657467074_v1 2025-04-07T13:54:14.359576Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490569226553374017:2495] disconnected; active server actors: 1 2025-04-07T13:54:14.359590Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7490569226553374017:2495] client user disconnected session shared/user_3_2_9181865286384255891_v1 2025-04-07T13:54:14.881214Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] >> TSchemeShardSubDomainTest::SetSchemeLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-04-07T13:53:15.954701Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1744033995954669 2025-04-07T13:53:17.161395Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568982934181322:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:17.161472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:17.196465Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568984405310125:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:17.196504Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:17.597233Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:53:17.601822Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0011f9/r3tmp/tmpV6avX3/pdisk_1.dat 2025-04-07T13:53:18.193085Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:18.203063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:18.203151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:18.205490Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:18.207378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:18.207460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:18.214570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:18.224575Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:53:18.226368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:18.321883Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2517, node 1 2025-04-07T13:53:18.412735Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:53:18.459967Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:53:18.644840Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0011f9/r3tmp/yandexmyb0ct.tmp 2025-04-07T13:53:18.644875Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0011f9/r3tmp/yandexmyb0ct.tmp 2025-04-07T13:53:18.645045Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0011f9/r3tmp/yandexmyb0ct.tmp 2025-04-07T13:53:18.645185Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:53:18.749997Z INFO: TTestServer started on Port 26237 GrpcPort 2517 TClient is connected to server localhost:26237 PQClient connected to localhost:2517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:19.447256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T13:53:19.636984Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T13:53:22.164215Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568982934181322:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:22.164314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:22.196904Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568984405310125:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:22.196982Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:24.754061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569012998953205:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:24.754944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:24.764711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569012998953244:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:24.783041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T13:53:24.866057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569012998953248:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:53:25.311873Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569012998953339:2325], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:25.319131Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490569014470081549:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:25.325540Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDNhMjZmNWEtZTI4M2UwMWItNDIxNmZkMy05ODIzNWYwNg==, ActorId: [2:7490569014470081509:2287], ActorState: ExecuteState, TraceId: 01jr88dzt844xc3h3dwnv5zt3a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:53:25.327839Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:53:25.333105Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODEzNDY3NWUtY2RhZmMwOTUtNzM4ZTA3NDAtZjQyMmU5YmI=, ActorId: [1:7490569012998953201:2309], ActorState: ExecuteState, TraceId: 01jr88dzpte8cwgn2rpdz0zm1n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:53:25.333529Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:53:25.337645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:25.689404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:25.935834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:2517", true, true, 1000); 2025-04-07T13:53:26.462383Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr88e1534bsqpse377aky9cp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODYxZDNhM2ItMmE2ZWM3OGYtYmUwYzMxODktOTMxZTdmMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490569021588888352:3006] === CheckClustersList. Ok 2025-04-07T13:53:32.639564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:2517 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T13:53:32.968872Z node 1 :PERSQUEUE INFO: proxy answer CallPers ... RB for SourceId=src 2025-04-07T13:54:13.918027Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490569224614936562:2477] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-07T13:54:13.918566Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7490569224614936619:2477] disconnected; active server actors: 1 2025-04-07T13:54:13.918590Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7490569224614936619:2477] disconnected no session 2025-04-07T13:54:14.097809Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490569224614936562:2477] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-07T13:54:14.097849Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490569224614936562:2477] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-07T13:54:14.097870Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490569224614936562:2477] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-07T13:54:14.097901Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T13:54:14.099524Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:54:14.099565Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7490569228909903939:2477], now have 1 active actors on pipe 2025-04-07T13:54:14.099829Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2025-04-07T13:54:14.100116Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:54:14.100165Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:54:14.100247Z node 6 :PERSQUEUE INFO: new Cookie src|13a79456-309556f-d5f2f102-4f34a3bc_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-07T13:54:14.100341Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T13:54:14.100388Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:54:14.101233Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:54:14.101257Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:54:14.101333Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:54:14.101722Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|13a79456-309556f-d5f2f102-4f34a3bc_0 2025-04-07T13:54:14.105337Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744034054105 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:54:14.105460Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|13a79456-309556f-d5f2f102-4f34a3bc_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-07T13:54:14.105803Z :INFO: [] MessageGroupId [src] SessionId [src|13a79456-309556f-d5f2f102-4f34a3bc_0] Write session: close. Timeout = 0 ms 2025-04-07T13:54:14.105846Z :INFO: [] MessageGroupId [src] SessionId [src|13a79456-309556f-d5f2f102-4f34a3bc_0] Write session will now close 2025-04-07T13:54:14.105884Z :DEBUG: [] MessageGroupId [src] SessionId [src|13a79456-309556f-d5f2f102-4f34a3bc_0] Write session: aborting 2025-04-07T13:54:14.106359Z :INFO: [] MessageGroupId [src] SessionId [src|13a79456-309556f-d5f2f102-4f34a3bc_0] Write session: gracefully shut down, all writes complete 2025-04-07T13:54:14.106406Z :DEBUG: [] MessageGroupId [src] SessionId [src|13a79456-309556f-d5f2f102-4f34a3bc_0] Write session: destroy 2025-04-07T13:54:14.110911Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|13a79456-309556f-d5f2f102-4f34a3bc_0 grpc read done: success: 0 data: 2025-04-07T13:54:14.110944Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|13a79456-309556f-d5f2f102-4f34a3bc_0 grpc read failed 2025-04-07T13:54:14.110970Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|13a79456-309556f-d5f2f102-4f34a3bc_0 grpc closed 2025-04-07T13:54:14.110995Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|13a79456-309556f-d5f2f102-4f34a3bc_0 is DEAD 2025-04-07T13:54:14.111570Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:54:14.112931Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:14.112991Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7490569228909903939:2477] destroyed 2025-04-07T13:54:14.113042Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:54:14.245434Z :INFO: [/Root] [/Root] [d9207cb6-43167f1c-943a3cb2-ebea7760] Starting read session 2025-04-07T13:54:14.245501Z :DEBUG: [/Root] [/Root] [d9207cb6-43167f1c-943a3cb2-ebea7760] Starting session to cluster null (localhost:17725) 2025-04-07T13:54:14.247637Z :DEBUG: [/Root] [/Root] [d9207cb6-43167f1c-943a3cb2-ebea7760] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:14.248377Z :DEBUG: [/Root] [/Root] [d9207cb6-43167f1c-943a3cb2-ebea7760] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:14.248423Z :DEBUG: [/Root] [/Root] [d9207cb6-43167f1c-943a3cb2-ebea7760] [null] Reconnecting session to cluster null in 0.000000s 2025-04-07T13:54:14.249947Z :ERROR: [/Root] [/Root] [d9207cb6-43167f1c-943a3cb2-ebea7760] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2025-04-07T13:54:14.250011Z :DEBUG: [/Root] [/Root] [d9207cb6-43167f1c-943a3cb2-ebea7760] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:14.250049Z :DEBUG: [/Root] [/Root] [d9207cb6-43167f1c-943a3cb2-ebea7760] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:54:14.250183Z :INFO: [/Root] [/Root] [d9207cb6-43167f1c-943a3cb2-ebea7760] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2025-04-07T13:54:14.250376Z :NOTICE: [/Root] [/Root] [d9207cb6-43167f1c-943a3cb2-ebea7760] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:54:14.250406Z :DEBUG: [/Root] [/Root] [d9207cb6-43167f1c-943a3cb2-ebea7760] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2025-04-07T13:54:14.250503Z :INFO: [/Root] [/Root] [d9207cb6-43167f1c-943a3cb2-ebea7760] Closing read session. Close timeout: 0.000000s 2025-04-07T13:54:14.250541Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-07T13:54:14.250578Z :INFO: [/Root] [/Root] [d9207cb6-43167f1c-943a3cb2-ebea7760] Counters: { Errors: 1 CurrentSessionLifetimeMs: 5 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:54:14.250676Z :NOTICE: [/Root] [/Root] [d9207cb6-43167f1c-943a3cb2-ebea7760] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:54:14.560471Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:54:14.560509Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:15.486589Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490569233204871324:2509], TxId: 281474976715689, task: 1, CA Id [5:7490569233204871322:2509]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-07T13:54:15.521248Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490569233204871324:2509], TxId: 281474976715689, task: 1, CA Id [5:7490569233204871322:2509]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:54:15.578746Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490569233204871324:2509], TxId: 281474976715689, task: 1, CA Id [5:7490569233204871322:2509]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:54:15.647584Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490569233204871324:2509], TxId: 281474976715689, task: 1, CA Id [5:7490569233204871322:2509]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:54:15.716830Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490569233204871324:2509], TxId: 281474976715689, task: 1, CA Id [5:7490569233204871322:2509]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:54:15.840762Z node 5 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715690. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T13:54:15.840889Z node 5 :KQP_EXECUTER WARN: ActorId: [5:7490569233204871347:2500] TxId: 281474976715690. Ctx: { TraceId: 01jr88fgmkcg8tvv5k65ahhz9q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NmFmMmMwNy05ZTYzZDJiNi1mNDE4YjMwYy0zMWVhMzViMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T13:54:15.841232Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NmFmMmMwNy05ZTYzZDJiNi1mNDE4YjMwYy0zMWVhMzViMA==, ActorId: [5:7490569228909903981:2500], ActorState: ExecuteState, TraceId: 01jr88fgmkcg8tvv5k65ahhz9q, Create QueryResponse for error on request, msg: 2025-04-07T13:54:15.842321Z node 5 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr88fhex0g73e1w39gaap9fa" } } YdbStatus: UNAVAILABLE ConsumedRu: 543 } 2025-04-07T13:54:15.922782Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490569233204871324:2509], TxId: 281474976715689, task: 1, CA Id [5:7490569233204871322:2509]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:54:16.134720Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490569233204871324:2509], TxId: 281474976715689, task: 1, CA Id [5:7490569233204871322:2509]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed [GOOD] >> TTopicYqlTest::CreateAndAlterTopicYql >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p >> TStoragePoolsQuotasTest::DifferentQuotasInteraction >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> KqpScanArrowInChanels::AggregateByColumn [GOOD] >> TBlobStorageProxyTest::TestNormal >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] Test command err: 2025-04-07T13:53:53.312373Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569137573430952:2252];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:53.312427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001674/r3tmp/tmp9B6Knx/pdisk_1.dat 2025-04-07T13:53:54.049280Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:54.075584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:54.075679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:54.084981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10820, node 1 2025-04-07T13:53:54.308623Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:54.308652Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:54.308697Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:54.308782Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25040 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:54.994433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:55.021015Z node 1 :TICKET_PARSER DEBUG: Ticket 773E272484BCA6CD4857E93BC86E539BECB44290 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-07T13:53:59.683892Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569163838827231:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001674/r3tmp/tmpB0jafo/pdisk_1.dat 2025-04-07T13:53:59.807022Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:59.942736Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:59.942806Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:59.946000Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:59.969256Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10717, node 2 2025-04-07T13:54:00.221273Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:00.221291Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:00.221297Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:00.221376Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:00.802569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:00.812276Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:00.814906Z node 2 :TICKET_PARSER DEBUG: Ticket CE4EB83DE7163226488F365B424B730912E14F3B () has now permanent error message 'Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers' 2025-04-07T13:54:00.815432Z node 2 :TICKET_PARSER ERROR: Ticket CE4EB83DE7163226488F365B424B730912E14F3B: Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers 2025-04-07T13:54:04.945215Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490569185180272913:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001674/r3tmp/tmpJJCGb2/pdisk_1.dat 2025-04-07T13:54:05.036717Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:54:05.168501Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:05.193691Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:05.193781Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:05.197218Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17768, node 3 2025-04-07T13:54:05.345900Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:05.345918Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:05.345932Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:05.346024Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:54:05.654863Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:05.664497Z node 3 :TICKET_PARSER DEBUG: Ticket 5E38F9ADA8C428AB3A2938933ACE5F0A1F1FC239 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-07T13:54:05.665068Z node 3 :TICKET_PARSER ERROR: Ticket 5E38F9ADA8C428AB3A2938933ACE5F0A1F1FC239: Cannot create token from certificate. Client certificate failed verification test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001674/r3tmp/tmpvp18h7/pdisk_1.dat 2025-04-07T13:54:10.006038Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:10.006952Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:10.030676Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:10.030766Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:10.032289Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29114, node 4 2025-04-07T13:54:10.221609Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:10.221635Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:10.221644Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:10.221787Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15442 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:10.670443Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:10.677720Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:10.682222Z node 4 :TICKET_PARSER DEBUG: Ticket B41D2D3B29F50A78EAFC8152BEF7714CE5BBB09A () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-07T13:54:10.682881Z node 4 :TICKET_PARSER ERROR: Ticket B41D2D3B29F50A78EAFC8152BEF7714CE5BBB09A: Cannot create token from certificate. Client certificate failed verification 2025-04-07T13:54:14.733132Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490569226112942187:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:14.733204Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001674/r3tmp/tmpSJslYf/pdisk_1.dat 2025-04-07T13:54:14.907183Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:14.909886Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:14.909983Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:14.914615Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20965, node 5 2025-04-07T13:54:15.029383Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:15.029415Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:15.029436Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:15.029580Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:54:15.454506Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:15.460512Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:15.463561Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-07T13:54:15.463617Z node 5 :GRPC_CLIENT DEBUG: [516000011ad0] Connect to grpc://localhost:16008 2025-04-07T13:54:15.467423Z node 5 :GRPC_CLIENT DEBUG: [516000011ad0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-04-07T13:54:15.481806Z node 5 :GRPC_CLIENT DEBUG: [516000011ad0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2025-04-07T13:54:15.482084Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "" 2025-04-07T13:54:15.482245Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-07T13:54:15.482651Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-07T13:54:15.482935Z node 5 :GRPC_CLIENT DEBUG: [516000011ad0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-04-07T13:54:15.485063Z node 5 :GRPC_CLIENT DEBUG: [516000011ad0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2025-04-07T13:54:15.485219Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-04-07T13:54:15.485299Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for aaaa1234 bbbb4554 - PERMISSION_DENIED' >> Cdc::NewAndOldImagesLog[TopicRunner] [GOOD] >> Cdc::NewAndOldImagesLogDebezium >> TTicketParserTest::AuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] >> TBlobStorageProxyTest::TestVPutVGetPersistence >> TTicketParserTest::BulkAuthorization [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:54:19.807103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:54:19.807244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:19.807290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:54:19.807338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:54:19.807389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:54:19.807421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:54:19.807516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:19.807875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:54:19.875545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:54:19.875613Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:19.888329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:54:19.888532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:54:19.888764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:54:19.894564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:54:19.894830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:54:19.895528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:19.895756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:54:19.898047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:19.899375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:19.899441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:19.899575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:54:19.899631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:19.899670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:54:19.899858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:54:19.906860Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:54:20.040022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:54:20.040290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.040504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:54:20.040806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:54:20.040874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.043304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:20.043455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:54:20.043648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.043718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:54:20.043753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:54:20.043796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:54:20.045872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.045934Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:54:20.045979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:54:20.047757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.047813Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.047853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:20.047926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:54:20.051363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:54:20.053444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:54:20.053632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:54:20.054624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:20.054763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:20.054819Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:20.055077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:54:20.055126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:20.055290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:20.055371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:54:20.057434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:20.057475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:20.057646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:20.057685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:54:20.057931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.057974Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:54:20.058097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:54:20.058139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:20.058192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:54:20.058259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:20.058305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:54:20.058343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:54:20.058471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:20.058564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:54:20.058598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:54:20.060502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:20.060605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:20.060639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:54:20.060777Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:54:20.060833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:20.060935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:54:20.441351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-07T13:54:20.441508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-07T13:54:20.442232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:20.442359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:20.442414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-07T13:54:20.442727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-07T13:54:20.442782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-07T13:54:20.442974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:20.443055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:54:20.443113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-07T13:54:20.445396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:20.445446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:20.445594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:54:20.445704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:20.445739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:330:2308], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-07T13:54:20.445786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:330:2308], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-07T13:54:20.446118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.446158Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-07T13:54:20.446279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-07T13:54:20.446315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T13:54:20.446360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-07T13:54:20.446399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T13:54:20.446455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-07T13:54:20.446490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-07T13:54:20.446676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T13:54:20.446723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-07T13:54:20.446757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T13:54:20.446790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-07T13:54:20.447798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T13:54:20.447900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T13:54:20.447939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-07T13:54:20.447981Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T13:54:20.448024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:20.448839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T13:54:20.448941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T13:54:20.448977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-07T13:54:20.449005Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T13:54:20.449038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:54:20.449102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-07T13:54:20.455872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T13:54:20.457144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-07T13:54:20.457443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-07T13:54:20.457499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-07T13:54:20.457948Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-07T13:54:20.458077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T13:54:20.458111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:477:2429] TestWaitNotification: OK eventTxId 100 2025-04-07T13:54:20.458627Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:54:20.458874Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 246us result status StatusSuccess 2025-04-07T13:54:20.459278Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 3 ShardsInside: 2 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 DatabaseQuotas { data_stream_shards_quota: 3 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:20.459767Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:54:20.459927Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 164us result status StatusSuccess 2025-04-07T13:54:20.460318Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 3 ShardsInside: 0 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateByColumn [GOOD] Test command err: Trying to start YDB, gRPC: 19364, MsgBus: 15957 2025-04-07T13:53:42.406029Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569089069110793:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:42.406064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a72/r3tmp/tmp3wtsep/pdisk_1.dat 2025-04-07T13:53:43.389540Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:43.391563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:43.391654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:43.409464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:43.412295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 19364, node 1 2025-04-07T13:53:43.669227Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:43.669250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:43.669257Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:43.669341Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15957 TClient is connected to server localhost:15957 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:45.103617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:45.166716Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:45.177607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:45.450728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:45.824891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:45.988185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:47.408910Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569089069110793:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:47.429810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:49.609667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569119133883600:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:49.609792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:50.319310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:50.413717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:50.491794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:50.553898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:53:50.604051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:53:50.690066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:53:50.777235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569123428851412:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:50.777323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:50.777534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569123428851417:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:50.781301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:53:50.803144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569123428851419:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:53:52.351941Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034032377, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 11421, MsgBus: 14009 2025-04-07T13:53:53.358990Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569136255116053:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:53.359029Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a72/r3tmp/tmpsx0RyG/pdisk_1.dat 2025-04-07T13:53:53.800205Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:53.804537Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:53.805204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:53.807817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11421, node 2 2025-04-07T13:53:54.007912Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:54.007931Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:54.007939Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:54.008017Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14009 TClient is connected to server localhost:14009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:54.815066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:54.835295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:54.984889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:55.377617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:55.479905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsa ... able, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:03.889743Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:03.968753Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:06.868637Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569194883969392:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:06.868745Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:06.921175Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:54:07.010361Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:54:07.069411Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:54:07.122817Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:54:07.168272Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:54:07.218362Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:54:07.324841Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569199178937186:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:07.324962Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:07.327065Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569199178937191:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:07.332383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:54:07.357195Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490569199178937193:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:54:09.950737Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034049163, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 10635, MsgBus: 28952 2025-04-07T13:54:10.932798Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490569208693771205:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:10.932886Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a72/r3tmp/tmpFaKlJJ/pdisk_1.dat 2025-04-07T13:54:11.082909Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:11.100912Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:11.100996Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:11.103514Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10635, node 4 2025-04-07T13:54:11.211088Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:11.211113Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:11.211122Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:11.211268Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28952 TClient is connected to server localhost:28952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:12.071257Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:12.081778Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:54:12.095973Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:12.270041Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:12.562939Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:12.681385Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:15.806898Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569230168609374:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:15.806982Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:15.857981Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:54:15.900361Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:54:15.922605Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490569208693771205:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:15.922668Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:54:15.963565Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:54:16.007481Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:54:16.043549Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:54:16.101946Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:54:16.201830Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569234463577173:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:16.201933Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:16.202179Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569234463577178:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:16.207052Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:54:16.232473Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490569234463577180:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:54:19.147918Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034058354, txId: 281474976715671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] Test command err: 2025-04-07T13:52:21.348553Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568741525183650:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:21.353038Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b13/r3tmp/tmp8W5Zsh/pdisk_1.dat 2025-04-07T13:52:21.605328Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:21.876049Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:21.899190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:21.899289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:21.901175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28772, node 1 2025-04-07T13:52:21.973252Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001b13/r3tmp/yandexikha8e.tmp 2025-04-07T13:52:21.973276Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001b13/r3tmp/yandexikha8e.tmp 2025-04-07T13:52:21.973435Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001b13/r3tmp/yandexikha8e.tmp 2025-04-07T13:52:21.973543Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:22.017773Z INFO: TTestServer started on Port 19762 GrpcPort 28772 TClient is connected to server localhost:19762 PQClient connected to localhost:28772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:22.293506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:52:22.325128Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:52:22.339897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:52:22.492214Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:24.877963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568754410086323:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.878534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.878592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568754410086336:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.899185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:52:24.933215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568754410086341:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.933517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.937686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568754410086339:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:52:25.196360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:25.243437Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568758705053725:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:25.244912Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGJhZmM1MjAtZGI3NDdjNTktZGI4Y2NlYTEtYjk3NTQyMWI=, ActorId: [1:7490568754410086321:2308], ActorState: ExecuteState, TraceId: 01jr88c57pe2jr4xjxyzpqshpg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:25.247250Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:25.267264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:25.387933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490568758705053980:2618] 2025-04-07T13:52:26.348912Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568741525183650:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:26.348995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:52:31.768868Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-07T13:52:31.815784Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:52:31.817132Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7490568784474858052:2780], Recipient [1:7490568741525184060:2195]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.817155Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.817175Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T13:52:31.817222Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7490568784474858048:2777], Recipient [1:7490568741525184060:2195]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-07T13:52:31.817246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:52:31.934562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:52:31.935003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:52:31.935252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-07T13:52:31.935287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-07T13:52:31.935325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046 ... 88505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569224820060548:2744], Partition 1, Sender [0:0:0], Recipient [5:7490569229115027948:2759], Cookie: 0 2025-04-07T13:54:18.607719Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569229115027948:2759]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.607741Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.607785Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:18.607852Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:18.607876Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:18.607902Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:18.607951Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569224820060547:2743], Partition 2, Sender [0:0:0], Recipient [5:7490569229115027946:2757], Cookie: 0 2025-04-07T13:54:18.607985Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569229115027946:2757]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.607998Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.608022Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:18.608050Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:18.608066Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:18.608081Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:18.608119Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569194755288211:2430], Partition 0, Sender [0:0:0], Recipient [5:7490569194755288275:2434], Cookie: 0 2025-04-07T13:54:18.608151Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569194755288275:2434]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.608164Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.608199Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:18.608226Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:18.608241Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:18.608258Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:18.628993Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569237704962881:2830], Partition 3, Sender [0:0:0], Recipient [5:7490569237704962976:2841], Cookie: 0 2025-04-07T13:54:18.629080Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569237704962976:2841]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.629104Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.629145Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:18.629217Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:18.629242Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:18.629271Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:18.629328Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569237704962879:2829], Partition 4, Sender [0:0:0], Recipient [5:7490569237704962974:2839], Cookie: 0 2025-04-07T13:54:18.629362Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569237704962974:2839]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.629375Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.629402Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:18.629432Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:18.629448Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:18.629468Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:18.724838Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569224820060548:2744], Partition 1, Sender [0:0:0], Recipient [5:7490569229115027948:2759], Cookie: 0 2025-04-07T13:54:18.724922Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569229115027948:2759]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.724946Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.724989Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:18.725062Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:18.725084Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:18.725113Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:18.725162Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569224820060547:2743], Partition 2, Sender [0:0:0], Recipient [5:7490569229115027946:2757], Cookie: 0 2025-04-07T13:54:18.725195Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569229115027946:2757]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.725210Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.725232Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:18.725261Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:18.725276Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:18.725292Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:18.725331Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569194755288211:2430], Partition 0, Sender [0:0:0], Recipient [5:7490569194755288275:2434], Cookie: 0 2025-04-07T13:54:18.725362Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569194755288275:2434]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.725374Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.725394Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:18.725424Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:18.725440Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:18.725455Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:18.732832Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569237704962881:2830], Partition 3, Sender [0:0:0], Recipient [5:7490569237704962976:2841], Cookie: 0 2025-04-07T13:54:18.732919Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569237704962976:2841]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.732944Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.732988Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:18.733061Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:18.733090Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:18.733121Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:18.733182Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569237704962879:2829], Partition 4, Sender [0:0:0], Recipient [5:7490569237704962974:2839], Cookie: 0 2025-04-07T13:54:18.733218Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569237704962974:2839]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.733234Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:18.733258Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:18.733290Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:18.733306Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:18.733325Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:18.746030Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 65538, Sender [0:0:0], Recipient [5:7490569224820060547:2743]: NActors::TEvents::TEvWakeup 2025-04-07T13:54:18.753186Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 65538, Sender [0:0:0], Recipient [5:7490569224820060548:2744]: NActors::TEvents::TEvWakeup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 32464, MsgBus: 24791 2025-04-07T13:53:37.757645Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569066861315242:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:37.757772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a7d/r3tmp/tmp8PlI73/pdisk_1.dat 2025-04-07T13:53:38.351183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:38.351291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:38.353016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:38.392424Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32464, node 1 2025-04-07T13:53:38.465428Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:38.465452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:38.465459Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:38.465566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24791 TClient is connected to server localhost:24791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:39.289920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:39.323250Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:39.336265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:39.502448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:39.759185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:39.859784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:42.199331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569088336153292:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:42.199445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:42.555879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:42.642180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:42.725935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:42.753753Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569066861315242:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:42.754008Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:42.767048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:53:42.838787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:53:42.926590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:53:43.047253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569092631121092:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:43.047337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:43.047647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569092631121097:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:43.051870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:53:43.078731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569092631121099:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:53:49.896055Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034026700, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 1862, MsgBus: 13594 2025-04-07T13:53:50.836905Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569122716958331:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a7d/r3tmp/tmph3AfgF/pdisk_1.dat 2025-04-07T13:53:50.889306Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:51.066861Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:51.090025Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:51.090098Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:51.093918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1862, node 2 2025-04-07T13:53:51.297315Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:51.297333Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:51.297340Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:51.297433Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13594 TClient is connected to server localhost:13594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:51.967451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:51.992553Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:51.998519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:52.117091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:52.380517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:52.488182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28147497671066 ... ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:05.330085Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569188987335232:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:05.330213Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:05.371832Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:54:05.442915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:54:05.498991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:54:05.604698Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:54:05.687607Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:54:05.769369Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:54:05.823128Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490569167512497210:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:05.823193Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:54:05.853733Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569188987335733:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:05.853840Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:05.854242Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569188987335738:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:05.858433Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:54:05.901994Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490569188987335740:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:54:08.120192Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034047679, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 20485, MsgBus: 15710 2025-04-07T13:54:09.504637Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490569204376924702:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a7d/r3tmp/tmpaMhytE/pdisk_1.dat 2025-04-07T13:54:09.624032Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:54:09.696991Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:09.745301Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:09.745402Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:09.752354Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20485, node 4 2025-04-07T13:54:09.939648Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:09.939674Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:09.939683Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:09.939804Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15710 TClient is connected to server localhost:15710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:10.892529Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:10.906248Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:54:10.928236Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:11.108504Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:11.458086Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:11.637649Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:14.487356Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490569204376924702:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:14.487446Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:54:15.806121Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569230146730009:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:15.806199Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:15.859758Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:54:15.923960Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:54:15.965656Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:54:16.036186Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:54:16.105917Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:54:16.155720Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:54:16.306149Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569234441697811:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:16.306332Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:16.306740Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569234441697817:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:16.316879Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:54:16.346333Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490569234441697819:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:54:19.408633Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034059110, txId: 281474976715671] shutting down >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] >> TTicketParserTest::NebiusAuthorizationModify >> Cdc::UpdatesLog[TopicRunner] [GOOD] >> Cdc::VirtualTimestamps[PqRunner] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> TBlobStorageProxyTest::TestBlockPersistence >> Cdc::DocApi[TopicRunner] [GOOD] >> Cdc::HugeKey[PqRunner] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable >> TBlobStorageProxyTest::TestPartialGetBlock >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] >> TColumnShardTestReadWrite::WriteReadNoCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 13556, MsgBus: 27883 2025-04-07T13:53:38.498991Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569071876493388:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:38.499193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a9a/r3tmp/tmpTXvL7R/pdisk_1.dat 2025-04-07T13:53:39.210668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:39.210790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:39.215108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13556, node 1 2025-04-07T13:53:39.284528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:39.521601Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:39.521636Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:39.521646Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:39.521730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27883 TClient is connected to server localhost:27883 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:40.477798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:40.521657Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:40.539704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:40.761848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:41.063851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:41.203560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:43.549171Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569071876493388:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:43.549251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:44.021606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569097646298733:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:44.021737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:44.480743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:44.566757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:44.630612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:44.670736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:53:44.736157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:53:44.840422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:53:45.038641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569101941266538:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:45.038734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:45.044791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569101941266543:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:45.049655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:53:45.061862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569101941266545:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:53:48.968266Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034027687, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 3581, MsgBus: 17100 2025-04-07T13:53:50.249812Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569124109646038:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a9a/r3tmp/tmpfb3t8v/pdisk_1.dat 2025-04-07T13:53:50.411850Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:50.553166Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:50.571297Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:50.580934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:50.589179Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3581, node 2 2025-04-07T13:53:50.813356Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:50.813381Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:50.813387Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:50.813482Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17100 TClient is connected to server localhost:17100 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:51.426361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:51.433786Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:51.452913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:51.573007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:51.819835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:51.936772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28147497671066 ... ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:04.663890Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490569164523412316:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:04.663966Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:54:04.826369Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569185998250484:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:04.826498Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:04.903954Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:54:04.998724Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:54:05.050041Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:54:05.130090Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:54:05.201917Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:54:05.308421Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:54:05.378534Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569190293218288:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:05.378631Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:05.378876Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569190293218293:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:05.382717Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:54:05.395385Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490569190293218295:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:54:11.117240Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034048330, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 10207, MsgBus: 12581 2025-04-07T13:54:12.383532Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490569218166580460:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a9a/r3tmp/tmpwihMS6/pdisk_1.dat 2025-04-07T13:54:12.524328Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:54:12.649252Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:12.673733Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:12.673841Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:12.675100Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10207, node 4 2025-04-07T13:54:12.784768Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:12.784795Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:12.784807Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:12.784941Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12581 TClient is connected to server localhost:12581 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:13.552840Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:13.560437Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:13.567056Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:13.706193Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:14.010957Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:14.126276Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:17.293301Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490569218166580460:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:17.293369Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:54:18.768907Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569243936385773:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:18.769027Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:18.856477Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:54:18.911866Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:54:18.952396Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:54:19.001933Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:54:19.063883Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:54:19.148269Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:54:19.227362Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569248231353575:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:19.227541Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:19.227862Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569248231353581:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:19.232810Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:54:19.248130Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490569248231353583:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:54:21.448517Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034061105, txId: 281474976710671] shutting down >> TTicketParserTest::NebiusAuthenticationRetryError [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] Test command err: 2025-04-07T13:52:19.990517Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568735109648064:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:19.992476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:20.217160Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b60/r3tmp/tmpD9MoIF/pdisk_1.dat 2025-04-07T13:52:20.455278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:20.455403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:20.468636Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:20.470150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21282, node 1 2025-04-07T13:52:20.535704Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001b60/r3tmp/yandexKF4V9K.tmp 2025-04-07T13:52:20.535720Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001b60/r3tmp/yandexKF4V9K.tmp 2025-04-07T13:52:20.535884Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001b60/r3tmp/yandexKF4V9K.tmp 2025-04-07T13:52:20.535961Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:20.579221Z INFO: TTestServer started on Port 15664 GrpcPort 21282 TClient is connected to server localhost:15664 PQClient connected to localhost:21282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:20.906905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:20.921546Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:20.931126Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:52:20.940139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:52:23.534024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568752289517901:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:23.534122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:23.534558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568752289517913:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:23.538647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:52:23.559182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568752289517915:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:52:24.035849Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568752289517988:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:24.055612Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjFkMjIzMjctMmVkZDBlOWMtNjU0MjllMDEtMTUwNWI5MTU=, ActorId: [1:7490568752289517892:2308], ActorState: ExecuteState, TraceId: 01jr88c3zb99aamyxzkrs7j0m4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:24.057824Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:24.191199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:24.227816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:24.327350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:52:25.038659Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568735109648064:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:25.038781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7490568760879452878:2636] === CheckClustersList. Ok 2025-04-07T13:52:31.250249Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-07T13:52:31.269206Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:52:31.270463Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7490568786649256948:2801], Recipient [1:7490568739404615633:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.270497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.270521Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T13:52:31.270564Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7490568786649256944:2798], Recipient [1:7490568739404615633:2187]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-07T13:52:31.270578Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:52:31.367002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:52:31.367518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:52:31.367801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-07T13:52:31.367840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-07T13:52:31.367866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-07T13:52:31.367901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-07T13:52:31.367946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-07T13:52:31.368099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:52:31. ... 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:21.375186Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:21.375260Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:21.375260Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:21.375285Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:21.375285Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:21.375314Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:21.375314Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:21.381512Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569215660315798:2432], Partition 0, Sender [0:0:0], Recipient [5:7490569215660315858:2436], Cookie: 0 2025-04-07T13:54:21.381588Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569215660315858:2436]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:21.381611Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:21.381657Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:21.381752Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:21.381793Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:21.381825Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:21.409007Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [5:7490569258609990320:3593], Recipient [5:7490569159825739840:2142]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:54:21.409065Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:54:21.409089Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T13:54:21.409601Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [5:7490569215660315935:2451], Recipient [5:7490569159825739840:2142]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037894 TableLocalId: 15 Generation: 1 Round: 0 TableStats { DataSize: 808 RowCount: 3 IndexSize: 0 InMemSize: 808 LastAccessTime: 1744034060749 LastUpdateTime: 1744034058566 ImmediateTxCompleted: 6 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 6 RowDeletes: 0 RowReads: 6 RangeReads: 7 PartCount: 0 RangeReadRows: 24 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 6 LocksWholeShard: 0 LocksBroken: 6 } TabletMetrics { Memory: 82256 } ShardState: 2 UserTablePartOwners: 72075186224037894 NodeId: 5 StartTime: 1744034051369 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-07T13:54:21.409631Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-07T13:54:21.409672Z node 5 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037894 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 15] state 'Ready' dataSize 808 rowCount 3 cpuUsage 0 2025-04-07T13:54:21.409775Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037894 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 15] raw table stats: DataSize: 808 RowCount: 3 IndexSize: 0 InMemSize: 808 LastAccessTime: 1744034060749 LastUpdateTime: 1744034058566 ImmediateTxCompleted: 6 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 6 RowDeletes: 0 RowReads: 6 RangeReads: 7 PartCount: 0 RangeReadRows: 24 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 6 LocksWholeShard: 0 LocksBroken: 6 2025-04-07T13:54:21.409797Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099995s, queue# 1 2025-04-07T13:54:21.421309Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [5:7490569159825739840:2142]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-07T13:54:21.421367Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-07T13:54:21.421390Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-07T13:54:21.421400Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-07T13:54:21.421452Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-04-07T13:54:21.421760Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [5:7490569159825739840:2142]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-07T13:54:21.421782Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-07T13:54:21.421798Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-07T13:54:21.476936Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569254315022743:2761], Partition 1, Sender [0:0:0], Recipient [5:7490569254315022835:2775], Cookie: 0 2025-04-07T13:54:21.477018Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569254315022835:2775]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:21.477043Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:21.477091Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:21.477163Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:21.477190Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:21.477219Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:21.477275Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569254315022749:2762], Partition 2, Sender [0:0:0], Recipient [5:7490569254315022828:2772], Cookie: 0 2025-04-07T13:54:21.477317Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569254315022828:2772]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:21.477332Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:21.477355Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:21.477399Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:21.477417Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:21.477435Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:21.483119Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569215660315798:2432], Partition 0, Sender [0:0:0], Recipient [5:7490569215660315858:2436], Cookie: 0 2025-04-07T13:54:21.483194Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569215660315858:2436]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:21.483218Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:21.483264Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:21.483329Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:21.483356Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:21.483383Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:21.510124Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [5:7490569159825739840:2142]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T13:54:21.510175Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T13:54:21.510191Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-07T13:54:21.510250Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-07T13:54:21.510270Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-07T13:54:21.510337Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 15 shard idx 72057594046644480:7 data size 808 row count 3 2025-04-07T13:54:21.510392Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037894 maps to shardIdx: 72057594046644480:7 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], pathId map=migrations, is column=0, is olap=0 2025-04-07T13:54:21.510416Z node 5 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037894 followerId=0, pathId 15: RowCount 3, DataSize 808 2025-04-07T13:54:21.510433Z node 5 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037894, followerId 0 2025-04-07T13:54:21.510495Z node 5 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:7 with partCount# 0, rowCount# 3, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-07T13:54:21.510611Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-07T13:54:21.511051Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [5:7490569159825739840:2142]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T13:54:21.511079Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T13:54:21.511100Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 >> TBlobStorageProxyTest::TestNormal [GOOD] >> TBlobStorageProxyTest::TestNormalMirror >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount [GOOD] >> TTicketParserTest::AuthorizationUnavailable >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationModify >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:54:20.228314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:54:20.228441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:20.228482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:54:20.228523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:54:20.228567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:54:20.228610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:54:20.228691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:20.229023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:54:20.312505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:54:20.312569Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:20.336387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:54:20.336551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:54:20.336955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:54:20.353805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:54:20.354048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:54:20.354748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:20.354971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:54:20.356996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:20.360498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:20.360570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:20.360720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:54:20.360768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:20.360808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:54:20.361004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.370028Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:54:20.517313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:54:20.517582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.517841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:54:20.518085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:54:20.518159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.525009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:20.525164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:54:20.525369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.525432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:54:20.525495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:54:20.525557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:54:20.531700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.531796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:54:20.531842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:54:20.536470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.536547Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.536594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:20.536709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:54:20.541224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:54:20.547410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:54:20.547638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:54:20.548714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:20.548892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:20.548965Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:20.549248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:54:20.549309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:20.549480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:20.549590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:54:20.558395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:20.558477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:20.558662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:20.558722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:54:20.559031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:20.559083Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:54:20.559201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:54:20.559242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:20.559300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:54:20.559349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:20.559388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:54:20.559417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:54:20.559506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:20.559549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:54:20.559580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:54:20.561517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:20.561664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:20.561706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:54:20.561744Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:54:20.561801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:20.561904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify ... { ShardId: 72075186233409548 CpuTimeUsec: 547 } } 2025-04-07T13:54:23.841633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 434 RawX2: 4294969696 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-07T13:54:23.841676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-04-07T13:54:23.841805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 434 RawX2: 4294969696 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-07T13:54:23.841857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-07T13:54:23.862162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:54:23.862252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-07T13:54:23.862300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T13:54:23.862341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-04-07T13:54:23.862444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-07T13:54:23.862590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-04-07T13:54:23.862723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:54:23.862814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T13:54:23.866775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:54:23.877607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:54:23.885592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:23.885669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:54:23.885925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:54:23.886116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:23.886192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-07T13:54:23.886297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-07T13:54:23.886801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:54:23.886862Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-07T13:54:23.886946Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:54:23.886988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-07T13:54:23.887053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-07T13:54:23.887955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:54:23.888059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:54:23.888093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:54:23.888160Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-07T13:54:23.888205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T13:54:23.889403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:54:23.889493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:54:23.889535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:54:23.889566Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-07T13:54:23.889594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T13:54:23.889663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-07T13:54:23.905506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:54:23.905596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:54:23.906027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T13:54:23.906234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T13:54:23.906271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:54:23.906316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-07T13:54:23.906395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:403:2372] message: TxId: 103 2025-04-07T13:54:23.906436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:54:23.906471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T13:54:23.906499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T13:54:23.906580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:54:23.906918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:23.907004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:54:23.907924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:54:23.914672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:54:23.916393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:23.916455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-07T13:54:23.916564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:54:23.916606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:731:2669] 2025-04-07T13:54:23.917454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-04-07T13:54:23.918917Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:54:23.919164Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 221us result status StatusSuccess 2025-04-07T13:54:23.919610Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] Test command err: 2025-04-07T13:52:19.825594Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568732286735978:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:19.825639Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:20.068157Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b6f/r3tmp/tmp7LIxzj/pdisk_1.dat 2025-04-07T13:52:20.398900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:20.398988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:20.408924Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:20.411553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21068, node 1 2025-04-07T13:52:20.512485Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001b6f/r3tmp/yandexIcqF7O.tmp 2025-04-07T13:52:20.512513Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001b6f/r3tmp/yandexIcqF7O.tmp 2025-04-07T13:52:20.512694Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001b6f/r3tmp/yandexIcqF7O.tmp 2025-04-07T13:52:20.512782Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:20.560136Z INFO: TTestServer started on Port 3937 GrpcPort 21068 TClient is connected to server localhost:3937 PQClient connected to localhost:21068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:20.898619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:20.910872Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:20.923616Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:52:20.929215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:52:21.081483Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:23.429016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568749466605949:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:23.429183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:23.433383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568749466605961:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:23.452394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:52:23.469197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568749466605963:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:52:24.056619Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568749466606035:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:24.058511Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjRiYzg4NTAtMTMxNzM0MjEtZGE5YjAyZGItOWRkNzQ5MjM=, ActorId: [1:7490568749466605946:2308], ActorState: ExecuteState, TraceId: 01jr88c3v7evk0f33rm26d7bxt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:24.060791Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:24.192255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:24.229282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:24.319838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:52:24.827235Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568732286735978:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:24.827301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7490568758056540934:2639] === CheckClustersList. Ok 2025-04-07T13:52:30.995798Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-07T13:52:31.055189Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:52:31.071397Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7490568783826344990:2789], Recipient [1:7490568736581703669:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.071456Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.071492Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T13:52:31.071568Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7490568783826344986:2786], Recipient [1:7490568736581703669:2184]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-07T13:52:31.071585Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:52:31.130922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:52:31.131388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:52:31.132907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-07T13:52:31.132957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-07T13:52:31.132986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-07T13:52:31.133016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-07T13:52:31.133085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-07T13:52:31.133252Z node 1 :FLAT_TX ... 2:0:1 2025-04-07T13:54:22.578857Z :INFO: [/Root] [/Root] [605d6d7-8f26d53d-9898047d-55b84a3a] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2533 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 29 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:54:22.578962Z :NOTICE: [/Root] [/Root] [605d6d7-8f26d53d-9898047d-55b84a3a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >>>>> Session-2 Received TSessionClosedEvent message SessionClosed { Status: SUCCESS Issues: "
: Error: Session was gracefully closed " } 2025-04-07T13:54:22.582262Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_5_2_1141062367756515672_v1 grpc read done: success# 0, data# { } 2025-04-07T13:54:22.582305Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_5_2_1141062367756515672_v1 grpc read failed 2025-04-07T13:54:22.582336Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_5_2_1141062367756515672_v1 grpc closed 2025-04-07T13:54:22.582366Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_5_2_1141062367756515672_v1 is DEAD 2025-04-07T13:54:22.583393Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [5:7490569253322360191:2754] disconnected; active server actors: 1 2025-04-07T13:54:22.583430Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [5:7490569253322360191:2754] client test-consumer disconnected session test-consumer_5_2_1141062367756515672_v1 2025-04-07T13:54:22.583603Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7490569261912295226:3652], Recipient [5:7490569214667653328:2435]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:22.583626Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:22.583644Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:22.583674Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_5_2_1141062367756515672_v1 2025-04-07T13:54:22.583705Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7490569261912295225:2852] destroyed 2025-04-07T13:54:22.583748Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_2_1141062367756515672_v1 2025-04-07T13:54:22.595027Z :INFO: [/Root] TraceId [] SessionId [producer-1|822bd784-c040f40b-f64a777b-d195f67e_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-07T13:54:22.595085Z :INFO: [/Root] TraceId [] SessionId [producer-1|822bd784-c040f40b-f64a777b-d195f67e_0] PartitionId [0] Generation [1] Write session will now close 2025-04-07T13:54:22.595131Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|822bd784-c040f40b-f64a777b-d195f67e_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-07T13:54:22.595873Z :INFO: [/Root] TraceId [] SessionId [producer-1|822bd784-c040f40b-f64a777b-d195f67e_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T13:54:22.595915Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|822bd784-c040f40b-f64a777b-d195f67e_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-07T13:54:22.620399Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: producer-1|822bd784-c040f40b-f64a777b-d195f67e_0 grpc read done: success: 0 data: 2025-04-07T13:54:22.620438Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|822bd784-c040f40b-f64a777b-d195f67e_0 grpc read failed 2025-04-07T13:54:22.620459Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|822bd784-c040f40b-f64a777b-d195f67e_0 grpc closed 2025-04-07T13:54:22.620473Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|822bd784-c040f40b-f64a777b-d195f67e_0 is DEAD 2025-04-07T13:54:22.621344Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateIdle, received event# 65543, Sender [5:7490569253322360211:2761], Recipient [5:7490569253322360213:2761]: NActors::TEvents::TEvPoison 2025-04-07T13:54:22.621417Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:54:22.621623Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7490569253322360244:3444], Recipient [5:7490569214667653328:2435]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:22.621654Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:22.621673Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:22.621705Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7490569253322360242:2761] destroyed 2025-04-07T13:54:22.621790Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188506 (NKikimr::TEvPQ::TEvPipeDisconnected), Tablet [5:7490569214667653328:2435], Partition 0, Sender [5:7490569214667653328:2435], Recipient [5:7490569214667653376:2439], Cookie: 0 2025-04-07T13:54:22.621854Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188506, Sender [5:7490569214667653328:2435], Recipient [5:7490569214667653376:2439]: NKikimr::TEvPQ::TEvPipeDisconnected 2025-04-07T13:54:22.621888Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPipeDisconnected 2025-04-07T13:54:22.621919Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:54:22.621959Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessChangeOwnerRequests. 2025-04-07T13:54:22.621995Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:22.622062Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:22.622087Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:22.622114Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:22.628103Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569214667653328:2435], Partition 0, Sender [0:0:0], Recipient [5:7490569214667653376:2439], Cookie: 0 2025-04-07T13:54:22.628178Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569214667653376:2439]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:22.628201Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:22.628256Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:22.628329Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:22.628351Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:22.628380Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:22.642242Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [5:7490569158833077325:2136]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:54:22.642298Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:54:22.642352Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [5:7490569158833077325:2136], Recipient [5:7490569158833077325:2136]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:54:22.642370Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:54:22.680127Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569257617327644:2793], Partition 2, Sender [0:0:0], Recipient [5:7490569257617327733:2802], Cookie: 0 2025-04-07T13:54:22.680192Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569257617327733:2802]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:22.680210Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:22.680255Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:22.680309Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:22.680328Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:22.680346Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:22.680378Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569257617327650:2794], Partition 1, Sender [0:0:0], Recipient [5:7490569257617327736:2804], Cookie: 0 2025-04-07T13:54:22.680401Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569257617327736:2804]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:22.680409Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:22.680423Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:22.680441Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:22.680451Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:22.680461Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:22.728598Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569214667653328:2435], Partition 0, Sender [0:0:0], Recipient [5:7490569214667653376:2439], Cookie: 0 2025-04-07T13:54:22.728699Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569214667653376:2439]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:22.728726Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:22.728771Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:22.728850Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:22.728881Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:22.728909Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: Took 10.810952 seconds >> TColumnShardTestReadWrite::WriteExoticTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateWithFunction [GOOD] Test command err: Trying to start YDB, gRPC: 24860, MsgBus: 9354 2025-04-07T13:53:44.518397Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569099322499299:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:44.525070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a3e/r3tmp/tmpO2sGaQ/pdisk_1.dat 2025-04-07T13:53:45.446416Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:45.545309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:45.545403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:45.588970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24860, node 1 2025-04-07T13:53:45.773228Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:45.773250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:45.773257Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:45.773340Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9354 TClient is connected to server localhost:9354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:47.236487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:47.304759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:47.552549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:47.840707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:48.011221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:49.512952Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569099322499299:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:49.513071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:51.625171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569129387271955:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:51.625358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:51.706736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:51.766225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:51.804792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:51.845387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:53:51.907023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:53:51.983799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:53:52.086656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569133682239756:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:52.086754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:52.089102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569133682239761:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:52.095928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:53:52.116512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569133682239763:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:53:53.591488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 864000000000 2025-04-07T13:53:54.585372Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034034533, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 20762, MsgBus: 29535 2025-04-07T13:53:55.520381Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569145279235817:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:55.520574Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a3e/r3tmp/tmpk5MveL/pdisk_1.dat 2025-04-07T13:53:55.994748Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:56.004191Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:56.004265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:56.012881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20762, node 2 2025-04-07T13:53:56.257266Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:56.257294Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:56.257301Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:56.257410Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29535 TClient is connected to server localhost:29535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:57.218847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:57.233442Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:53:57.255634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:57.395336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:57.740148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:57.855896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself ... at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:05.823307Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:05.999153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:06.104321Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:08.852797Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569202333702918:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:08.859684Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:08.905584Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:54:08.948165Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:54:09.017458Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:54:09.089412Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:54:09.150522Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:54:09.224034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:54:09.324464Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569206628670713:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:09.324570Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:09.324916Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569206628670718:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:09.329423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:54:09.346236Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:54:09.346504Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490569206628670720:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:54:09.619920Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490569185153832051:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:09.620016Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:54:15.047581Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034052110, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 4350, MsgBus: 10548 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a3e/r3tmp/tmpXJvQH3/pdisk_1.dat 2025-04-07T13:54:16.362365Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:16.441884Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:16.466119Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:16.466238Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:16.471213Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4350, node 4 2025-04-07T13:54:16.654001Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:16.654031Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:16.654040Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:16.654159Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10548 TClient is connected to server localhost:10548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:17.445276Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:17.485341Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:17.628626Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:17.847972Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:18.024488Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:21.049914Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569257058297913:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:21.049994Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:21.115943Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:54:21.211510Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:54:21.284520Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:54:21.336416Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:54:21.418746Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:54:21.468899Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:54:21.587634Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569257058298420:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:21.587741Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:21.587957Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569257058298425:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:21.594341Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:54:21.611712Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490569257058298427:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:54:23.764339Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034063310, txId: 281474976710671] shutting down >> TTicketParserTest::NebiusAuthorizationModify [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood [GOOD] >> TTicketParserTest::LoginCheckRemovedUser |86.8%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::NewAndOldImagesLogDebezium [GOOD] >> TBlobStorageProxyTest::TestBlockPersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage >> Cdc::OldImageLogDebezium >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes >> Cdc::VirtualTimestamps[PqRunner] [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] >> TBlobStorageProxyTest::TestNormalMirror [GOOD] >> JsonChangeRecord::DataChange [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe >> JsonChangeRecord::DataChangeVersion [GOOD] |86.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TTicketParserTest::AuthenticationRetryError [GOOD] >> TTicketParserTest::AuthenticationRetryErrorImmediately |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] >> Cdc::HugeKey[PqRunner] [GOOD] >> Cdc::HugeKey[YdsRunner] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] |86.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] |86.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] |86.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationModify [GOOD] Test command err: 2025-04-07T13:53:53.855096Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569137756914343:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:53.855288Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00166a/r3tmp/tmptkf3ZF/pdisk_1.dat 2025-04-07T13:53:54.504408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:54.522414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:54.527707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25625, node 1 2025-04-07T13:53:54.745169Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:54.907931Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:54.914875Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:54.914895Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:54.914993Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:55.520227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:55.581717Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:55.584642Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-07T13:53:55.584721Z node 1 :GRPC_CLIENT DEBUG: [5160000300d0] Connect to grpc://localhost:21043 2025-04-07T13:53:55.587131Z node 1 :GRPC_CLIENT DEBUG: [5160000300d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-07T13:53:55.608210Z node 1 :GRPC_CLIENT DEBUG: [5160000300d0] Status 14 Service Unavailable 2025-04-07T13:53:55.608544Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-07T13:53:55.608573Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-07T13:53:55.608631Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-07T13:53:55.608864Z node 1 :GRPC_CLIENT DEBUG: [5160000300d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-07T13:53:55.614317Z node 1 :GRPC_CLIENT DEBUG: [5160000300d0] Status 14 Service Unavailable 2025-04-07T13:53:55.614587Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-07T13:53:55.614663Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-07T13:53:56.908990Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-07T13:53:56.909070Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-07T13:53:56.909473Z node 1 :GRPC_CLIENT DEBUG: [5160000300d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-07T13:53:56.917326Z node 1 :GRPC_CLIENT DEBUG: [5160000300d0] Status 14 Service Unavailable 2025-04-07T13:53:56.917681Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-07T13:53:56.917707Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-07T13:53:57.911273Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-07T13:53:57.911364Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-07T13:53:57.911894Z node 1 :GRPC_CLIENT DEBUG: [5160000300d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2025-04-07T13:53:57.919222Z node 1 :GRPC_CLIENT DEBUG: [5160000300d0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-04-07T13:53:57.921079Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-07T13:53:58.816899Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569137756914343:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:58.816957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:54:08.571599Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569199493358234:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:08.571791Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00166a/r3tmp/tmpQMo2iq/pdisk_1.dat 2025-04-07T13:54:08.823504Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:08.872416Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:08.872510Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:08.881838Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11713, node 2 2025-04-07T13:54:09.061408Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:09.061432Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:09.061438Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:09.061520Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:54:09.499600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:09.514345Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-07T13:54:09.514394Z node 2 :GRPC_CLIENT DEBUG: [5160000b01d0] Connect to grpc://localhost:22531 2025-04-07T13:54:09.515204Z node 2 :GRPC_CLIENT DEBUG: [5160000b01d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-07T13:54:09.559854Z node 2 :GRPC_CLIENT DEBUG: [5160000b01d0] Status 14 Service Unavailable 2025-04-07T13:54:09.560123Z node 2 :TICKET_PARSER TRACE: ... 7490569238071589267:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:17.685172Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00166a/r3tmp/tmpmRojMQ/pdisk_1.dat 2025-04-07T13:54:17.817217Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:17.831492Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:17.831577Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:17.833575Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17964, node 4 2025-04-07T13:54:17.954790Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:17.954813Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:17.954820Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:17.954940Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:54:18.281532Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:18.289471Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-07T13:54:18.289548Z node 4 :GRPC_CLIENT DEBUG: [5160000a6bd0] Connect to grpc://localhost:63118 2025-04-07T13:54:18.290786Z node 4 :GRPC_CLIENT DEBUG: [5160000a6bd0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-07T13:54:18.306359Z node 4 :GRPC_CLIENT DEBUG: [5160000a6bd0] Status 14 Service Unavailable 2025-04-07T13:54:18.312961Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-07T13:54:18.313002Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-04-07T13:54:18.313035Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:18.313108Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-07T13:54:18.313504Z node 4 :GRPC_CLIENT DEBUG: [5160000a6bd0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 14: "Service Unavailable" 2025-04-07T13:54:18.318150Z node 4 :GRPC_CLIENT DEBUG: [5160000a6bd0] Status 14 Service Unavailable 2025-04-07T13:54:18.320784Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-07T13:54:18.320831Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-04-07T13:54:18.320882Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:21.987574Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490569256396193842:2189];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00166a/r3tmp/tmpv57Dso/pdisk_1.dat 2025-04-07T13:54:22.045582Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:54:22.100988Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:22.114059Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:22.114146Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:22.115817Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63644, node 5 2025-04-07T13:54:22.320933Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:22.320956Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:22.320965Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:22.321072Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:22.652436Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:22.669007Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-07T13:54:22.669067Z node 5 :GRPC_CLIENT DEBUG: [5160000387d0] Connect to grpc://localhost:23704 2025-04-07T13:54:22.670052Z node 5 :GRPC_CLIENT DEBUG: [5160000387d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-07T13:54:22.690490Z node 5 :GRPC_CLIENT DEBUG: [5160000387d0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-04-07T13:54:22.692954Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-07T13:54:22.693399Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-07T13:54:22.693697Z node 5 :GRPC_CLIENT DEBUG: [5160000387d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-07T13:54:22.701214Z node 5 :GRPC_CLIENT DEBUG: [5160000387d0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { account { user_account { id: "user1" } } } } } 2025-04-07T13:54:22.704279Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> JsonChangeRecord::Heartbeat [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> TTicketParserTest::AuthorizationUnavailable [GOOD] |86.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAccessKeySignatureUnsupported |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] |86.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationUnavailable [GOOD] Test command err: 2025-04-07T13:53:59.342768Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569163247123900:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:59.343053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015af/r3tmp/tmpDpgleL/pdisk_1.dat 2025-04-07T13:53:59.790374Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:59.794186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:59.794274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:59.802590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30954, node 1 2025-04-07T13:53:59.924968Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:59.924987Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:59.924993Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:59.925082Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:00.401991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:00.418249Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:00.418363Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Connect to grpc://localhost:14761 2025-04-07T13:54:00.430270Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:00.455225Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Status 14 Service Unavailable 2025-04-07T13:54:00.455720Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-07T13:54:00.455762Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:00.455788Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:00.456131Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:00.463581Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Status 14 Service Unavailable 2025-04-07T13:54:00.463985Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-07T13:54:00.464022Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:01.355518Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-07T13:54:01.355589Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:01.355972Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:01.373066Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Status 14 Service Unavailable 2025-04-07T13:54:01.373311Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-07T13:54:01.373335Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:02.356366Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-07T13:54:02.356409Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:02.356634Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:02.362605Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Status 14 Service Unavailable 2025-04-07T13:54:02.363088Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-07T13:54:02.363117Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:04.332487Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569163247123900:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:04.332567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:54:04.364801Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-07T13:54:04.364852Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:04.365120Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:04.367508Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:04.368121Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a valid subject "user1@as" 2025-04-07T13:54:04.368247Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-04-07T13:54:13.184506Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569222183720130:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:13.184578Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015af/r3tmp/tmpXejtXb/pdisk_1.dat 2025-04-07T13:54:13.475303Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:13.477766Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:13.477862Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:13.480895Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16080, node 2 2025-04-07T13:54:13.633086Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:13.633108Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:13.633114Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:13.633202Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:13.921852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:13.930450Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:13.938581Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:13.938668Z node 2 :GRPC_CLIENT DEBUG: [5160000b01d0] Connect to grpc://localhost:10440 2025-04-07T13:54:13.939757Z node 2 :GRPC_CLIENT DEBUG: [5160000b01d0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:13.951694Z node 2 :GRPC_CLIENT DEBUG: [5160000b01d0] Status 14 Service Unavailable 2025-04-07T13:54:13.951821Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error ... 7968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:21.177629Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20622, node 4 2025-04-07T13:54:21.214658Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:21.214683Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:21.214690Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:21.214796Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:21.458673Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:21.466901Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:21.467033Z node 4 :GRPC_CLIENT DEBUG: [5160000096d0] Connect to grpc://localhost:3884 2025-04-07T13:54:21.467832Z node 4 :GRPC_CLIENT DEBUG: [5160000096d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:21.476998Z node 4 :GRPC_CLIENT DEBUG: [5160000096d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:21.477179Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-07T13:54:21.477211Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-07T13:54:21.478727Z node 4 :GRPC_CLIENT DEBUG: [5160000087d0] Connect to grpc://localhost:3553 2025-04-07T13:54:21.479730Z node 4 :GRPC_CLIENT DEBUG: [5160000087d0] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-07T13:54:21.503612Z node 4 :GRPC_CLIENT DEBUG: [5160000087d0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-07T13:54:21.504113Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-07T13:54:21.504702Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-07T13:54:21.504921Z node 4 :GRPC_CLIENT DEBUG: [5160000096d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:21.507266Z node 4 :GRPC_CLIENT DEBUG: [5160000096d0] Status 16 Access Denied 2025-04-07T13:54:21.507564Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Access Denied" retryable:0 2025-04-07T13:54:21.507598Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-04-07T13:54:21.508237Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:21.508300Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-07T13:54:21.508459Z node 4 :GRPC_CLIENT DEBUG: [5160000096d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:21.508980Z node 4 :GRPC_CLIENT DEBUG: [5160000096d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:21.512550Z node 4 :GRPC_CLIENT DEBUG: [5160000096d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:21.512783Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-07T13:54:21.521218Z node 4 :GRPC_CLIENT DEBUG: [5160000096d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:21.521544Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-04-07T13:54:21.521567Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-07T13:54:21.521730Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-07T13:54:24.825596Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490569271797736125:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015af/r3tmp/tmp4HO2Eq/pdisk_1.dat 2025-04-07T13:54:24.906898Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:54:24.981037Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:24.995685Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:24.995802Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:24.997372Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14549, node 5 2025-04-07T13:54:25.126815Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:25.126841Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:25.126849Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:25.126960Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:25.494082Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:25.503326Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:25.505453Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:25.505509Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-07T13:54:25.505554Z node 5 :GRPC_CLIENT DEBUG: [516000078fd0] Connect to grpc://localhost:17706 2025-04-07T13:54:25.506395Z node 5 :GRPC_CLIENT DEBUG: [516000078fd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:25.506743Z node 5 :GRPC_CLIENT DEBUG: [516000078fd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:25.521967Z node 5 :GRPC_CLIENT DEBUG: [516000078fd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:25.522144Z node 5 :GRPC_CLIENT DEBUG: [516000078fd0] Status 14 Service Unavailable 2025-04-07T13:54:25.522572Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-07T13:54:25.522638Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2025-04-07T13:54:25.522673Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:25.522709Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:25.522766Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-07T13:54:25.522981Z node 5 :GRPC_CLIENT DEBUG: [516000078fd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:25.523591Z node 5 :GRPC_CLIENT DEBUG: [516000078fd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:25.527128Z node 5 :GRPC_CLIENT DEBUG: [516000078fd0] Status 1 CANCELLED 2025-04-07T13:54:25.527327Z node 5 :GRPC_CLIENT DEBUG: [516000078fd0] Status 1 CANCELLED 2025-04-07T13:54:25.528412Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" 2025-04-07T13:54:25.528461Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "CANCELLED" retryable:1 2025-04-07T13:54:25.528489Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' |86.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable |86.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |86.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |86.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TPersQueueMirrorer::ValidStartStream [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot >> TColumnShardTestReadWrite::ReadSomePrograms >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> TTicketParserTest::BulkAuthorizationModify [GOOD] |86.9%| [TA] $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQCompatTest::LongProducerAndLongMessageGroupId [GOOD] >> TPQCompatTest::ReadWriteSessions >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] [GOOD] >> Cdc::VirtualTimestamps[TopicRunner] >> TColumnShardTestReadWrite::Write >> TColumnShardTestReadWrite::ReadAggregate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] Test command err: 2025-04-07T13:54:26.433702Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:26.594610Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:26.640791Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:26.640914Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:26.641187Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:26.650038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:26.650273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:26.650504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:26.650615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:26.650719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:26.650833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:26.650964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:26.651091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:26.651190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:26.651275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:26.651365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:26.651499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:26.675714Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:26.678159Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:26.678307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:26.678368Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:26.678567Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:26.678730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:26.678799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:26.678836Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:26.678922Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:26.678982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:26.679027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:26.679066Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:26.679239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:26.679288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:26.679323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:26.679347Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:26.679445Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:26.679499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:26.679542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:26.679572Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:26.679646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:26.679683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:26.679712Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:26.679773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:26.679810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:26.679834Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:26.679986Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-07T13:54:26.680073Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-07T13:54:26.680159Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-04-07T13:54:26.680246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-07T13:54:26.680405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:26.680462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:26.680493Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:26.680715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:26.680762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:26.680792Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:26.680911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:26.680945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:26.680983Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:26.681176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:26.681228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:26.681253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:26.681364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... ;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-07T13:54:31.205182Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] >> Normalizers::CleanEmptyPortionsNormalizer |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] Test command err: 2025-04-07T13:53:59.366782Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569164441208738:2256];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:59.367179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00164d/r3tmp/tmpuLe626/pdisk_1.dat 2025-04-07T13:53:59.747076Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:59.808428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:59.808648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:59.811947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22127, node 1 2025-04-07T13:53:59.877361Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:59.877394Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:59.877401Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:59.877519Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:00.323931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:00.342352Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-07T13:54:00.342418Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Connect to grpc://localhost:24380 2025-04-07T13:54:00.357377Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-07T13:54:00.373865Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Status 14 Service Unavailable 2025-04-07T13:54:00.374119Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-07T13:54:00.374158Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:00.374262Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-07T13:54:00.374531Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-07T13:54:00.376624Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Status 14 Service Unavailable 2025-04-07T13:54:00.376913Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-07T13:54:00.376937Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:01.372962Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-07T13:54:01.373102Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-07T13:54:01.373734Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-07T13:54:01.377014Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Status 14 Service Unavailable 2025-04-07T13:54:01.378253Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-07T13:54:01.378282Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:02.376999Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-07T13:54:02.377109Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-07T13:54:02.377599Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-07T13:54:02.382971Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:02.383248Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-04-07T13:54:04.366937Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569164441208738:2256];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:04.367010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:54:13.197361Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569221575202759:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00164d/r3tmp/tmpCON4mn/pdisk_1.dat 2025-04-07T13:54:13.284995Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:54:13.344018Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:13.377514Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:13.377622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:13.383883Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10040, node 2 2025-04-07T13:54:13.537737Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:13.537765Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:13.537773Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:13.537867Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:54:13.893435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:13.905381Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-07T13:54:13.905431Z node 2 :GRPC_CLIENT DEBUG: [5160000861d0] Connect to grpc://localhost:27207 2025-04-07T13:54:13.906348Z node 2 :GRPC_CLIENT DEBUG: [5160000861d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-07T13:54:13.920868Z node 2 :GRPC_CLIENT DEBUG: [5160000861d0] Status 14 Service Unavailable 2025-04-07T13:54:13.921222Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-07T13:54:13.921267Z node 2 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:13.921327Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-07T13:54:13.921630Z node 2 :GRPC_CLIENT DEBUG: [5160000861d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-07T13:54:13.923640Z node 2 :GRPC_CLIENT DEBUG: [5160000861d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:13.923816Z node 2 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as test_client.cpp: SetPath # /home ... ILED } 2025-04-07T13:54:18.455632Z node 3 :GRPC_CLIENT DEBUG: [51600007c8d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:18.455847Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-07T13:54:21.467135Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490569255872730945:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00164d/r3tmp/tmppeyd5z/pdisk_1.dat 2025-04-07T13:54:21.518657Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:54:21.617735Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15542, node 4 2025-04-07T13:54:21.696273Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:21.696377Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:21.700415Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:54:21.715609Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:21.715630Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:21.715637Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:21.715752Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:54:22.050060Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:22.064312Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-07T13:54:22.064391Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(somewhere.sleep) 2025-04-07T13:54:22.064421Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.list) 2025-04-07T13:54:22.064445Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-07T13:54:22.064466Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.eat) 2025-04-07T13:54:22.064513Z node 4 :GRPC_CLIENT DEBUG: [5160000702d0] Connect to grpc://localhost:12691 2025-04-07T13:54:22.067743Z node 4 :GRPC_CLIENT DEBUG: [5160000702d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:22.068122Z node 4 :GRPC_CLIENT DEBUG: [5160000702d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:22.068281Z node 4 :GRPC_CLIENT DEBUG: [5160000702d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.list" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:22.068381Z node 4 :GRPC_CLIENT DEBUG: [5160000702d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:22.068500Z node 4 :GRPC_CLIENT DEBUG: [5160000702d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.eat" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-07T13:54:22.081743Z node 4 :GRPC_CLIENT DEBUG: [5160000702d0] Status 16 Access Denied 2025-04-07T13:54:22.082037Z node 4 :GRPC_CLIENT DEBUG: [5160000702d0] Status 16 Access Denied 2025-04-07T13:54:22.082371Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list now has a permanent error "Access Denied" retryable:0 2025-04-07T13:54:22.082436Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2025-04-07T13:54:22.090802Z node 4 :GRPC_CLIENT DEBUG: [5160000702d0] Status 16 Access Denied 2025-04-07T13:54:22.090923Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat now has a permanent error "Access Denied" retryable:0 2025-04-07T13:54:22.091273Z node 4 :GRPC_CLIENT DEBUG: [5160000702d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:22.091401Z node 4 :GRPC_CLIENT DEBUG: [5160000702d0] Status 16 Access Denied 2025-04-07T13:54:22.091430Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-04-07T13:54:22.091453Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep now has a permanent error "Access Denied" retryable:0 2025-04-07T13:54:22.091471Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-07T13:54:22.093238Z node 4 :GRPC_CLIENT DEBUG: [51600006fcd0] Connect to grpc://localhost:2780 2025-04-07T13:54:22.094305Z node 4 :GRPC_CLIENT DEBUG: [51600006fcd0] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-07T13:54:22.101354Z node 4 :GRPC_CLIENT DEBUG: [51600006fcd0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-07T13:54:22.101768Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-07T13:54:25.077545Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490569273116679149:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:25.078486Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00164d/r3tmp/tmpJGg74k/pdisk_1.dat 2025-04-07T13:54:25.231406Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:25.259954Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:25.260038Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:25.265429Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11534, node 5 2025-04-07T13:54:25.332880Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:25.332913Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:25.332924Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:25.333048Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:25.757974Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:25.764819Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:54:25.768133Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2025-04-07T13:54:25.768184Z node 5 :GRPC_CLIENT DEBUG: [516000094bd0] Connect to grpc://localhost:23407 2025-04-07T13:54:25.769363Z node 5 :GRPC_CLIENT DEBUG: [516000094bd0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-07T13:54:25.785852Z node 5 :GRPC_CLIENT DEBUG: [516000094bd0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:25.786168Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-07T13:54:25.786700Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-07T13:54:25.786917Z node 5 :GRPC_CLIENT DEBUG: [516000094bd0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-07T13:54:25.788457Z node 5 :GRPC_CLIENT DEBUG: [516000094bd0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:25.788626Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> Backup::ProposeBackup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] Test command err: 2025-04-07T13:54:27.567954Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:27.740741Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:27.811207Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:27.811306Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:27.817412Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:27.840225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:27.840493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:27.840837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:27.840950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:27.841050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:27.841189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:27.841302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:27.841407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:27.841503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:27.841596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:27.841693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:27.841794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:27.926546Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:27.929120Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:27.929322Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:27.929387Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:27.929584Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:27.929753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:27.929823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:27.929859Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:27.929945Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:27.930014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:27.930054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:27.930094Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:27.930277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:27.930343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:27.930378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:27.930405Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:27.930490Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:27.930538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:27.930592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:27.930619Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:27.930702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:27.930751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:27.930777Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:27.930822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:27.930880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:27.930914Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:27.931099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-04-07T13:54:27.931177Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-07T13:54:27.931268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-07T13:54:27.931361Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-07T13:54:27.931511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:27.931559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:27.931589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:27.933929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:27.934007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:27.934042Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:27.934182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:27.934238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:27.934268Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:27.934473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:27.934515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:27.934549Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:27.934776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... ;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-07T13:54:32.239366Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] >> Normalizers::ColumnChunkNormalizer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] Test command err: 2025-04-07T13:52:21.452780Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568742146646117:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:21.452822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:21.645548Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001aec/r3tmp/tmp8f99U7/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5425, node 1 2025-04-07T13:52:21.821785Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:21.840302Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:52:21.840322Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:52:21.904083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:21.911249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:21.917414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:21.936070Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001aec/r3tmp/yandexx5dVkm.tmp 2025-04-07T13:52:21.936097Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001aec/r3tmp/yandexx5dVkm.tmp 2025-04-07T13:52:21.936267Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001aec/r3tmp/yandexx5dVkm.tmp 2025-04-07T13:52:21.936376Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:21.975453Z INFO: TTestServer started on Port 26071 GrpcPort 5425 TClient is connected to server localhost:26071 PQClient connected to localhost:5425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:22.238232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:22.253481Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:22.263068Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:52:22.269477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:52:25.028314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568759326515854:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:25.028550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:25.029942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568759326515874:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:25.033981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:52:25.049780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568759326515876:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:52:25.346302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:25.351476Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568759326515948:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:25.353423Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTMzODY0YWUtODNlZTQ4MTktZWJhMTVhZGUtYTdiNjYyMw==, ActorId: [1:7490568759326515845:2308], ActorState: ExecuteState, TraceId: 01jr88c5dnf7h5bw6zf59e251y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:25.355409Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:25.403817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:25.520210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490568759326516218:2617] 2025-04-07T13:52:26.456907Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568742146646117:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:26.500730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:52:31.948838Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-07T13:52:31.996067Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:52:31.997365Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7490568785096320305:2794], Recipient [1:7490568742146646319:2201]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.997398Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.997424Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T13:52:31.997460Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7490568785096320301:2791], Recipient [1:7490568742146646319:2201]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-07T13:52:31.997475Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:52:32.074848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:52:32.075539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:52:32.075832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-07T13:52:32.075873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-07T13:52:32.075898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-07T13:52:32.075934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-07T13:52:32.075978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-07T13:52:32.076108Z no ... AD_BALANCER NOTICE: [72075186224037893][test-topic] pipe [5:7490569289209089850:2728] client test-consumer disconnected session test-consumer_5_1_4136134821063939914_v1 2025-04-07T13:54:30.537516Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7490569293504057361:3449], Recipient [5:7490569293504057222:2747]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:30.537544Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:30.537574Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:30.537595Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Destroy direct read session test-consumer_5_1_4136134821063939914_v1 2025-04-07T13:54:30.537623Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [5:7490569293504057357:2773] destroyed 2025-04-07T13:54:30.537678Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7490569293504057360:3448], Recipient [5:7490569293504057217:2746]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:30.537691Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:30.537702Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:30.537714Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_5_1_4136134821063939914_v1 2025-04-07T13:54:30.537733Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [5:7490569293504057356:2772] destroyed 2025-04-07T13:54:30.537776Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7490569289209089854:3331], Recipient [5:7490569259144317688:2432]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:30.537790Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:30.537801Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:30.537813Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_5_1_4136134821063939914_v1 2025-04-07T13:54:30.537828Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7490569289209089853:2731] destroyed 2025-04-07T13:54:30.538329Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_4136134821063939914_v1 2025-04-07T13:54:30.538358Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_4136134821063939914_v1 2025-04-07T13:54:30.538373Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_4136134821063939914_v1 2025-04-07T13:54:30.696227Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569293504057217:2746], Partition 2, Sender [0:0:0], Recipient [5:7490569293504057315:2761], Cookie: 0 2025-04-07T13:54:30.696326Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569293504057315:2761]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:30.696348Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:30.696390Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:30.696461Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:30.696483Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:30.696512Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:30.696556Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569259144317688:2432], Partition 0, Sender [0:0:0], Recipient [5:7490569259144317738:2435], Cookie: 0 2025-04-07T13:54:30.696589Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569259144317738:2435]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:30.696601Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:30.696626Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:30.696654Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:30.696970Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:30.696997Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:30.697043Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569293504057222:2747], Partition 1, Sender [0:0:0], Recipient [5:7490569293504057320:2763], Cookie: 0 2025-04-07T13:54:30.697078Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569293504057320:2763]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:30.697094Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:30.697117Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:30.697148Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:30.697164Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:30.697182Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:30.796915Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569293504057217:2746], Partition 2, Sender [0:0:0], Recipient [5:7490569293504057315:2761], Cookie: 0 2025-04-07T13:54:30.796989Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569293504057315:2761]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:30.797009Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:30.797052Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:30.797121Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:30.797141Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:30.797168Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:30.797208Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569259144317688:2432], Partition 0, Sender [0:0:0], Recipient [5:7490569259144317738:2435], Cookie: 0 2025-04-07T13:54:30.797240Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569259144317738:2435]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:30.797253Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:30.797278Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:30.797304Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:30.797319Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:30.797336Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:30.797598Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569293504057222:2747], Partition 1, Sender [0:0:0], Recipient [5:7490569293504057320:2763], Cookie: 0 2025-04-07T13:54:30.797632Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569293504057320:2763]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:30.797644Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:30.797669Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:30.797697Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:30.797713Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:30.797729Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:30.813010Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [5:7490569211899676629:2291], Recipient [5:7490569207604709013:2138]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 3 Generation: 1 Round: 1 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 498 Memory: 119120 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 5 StartTime: 1744034050654 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-07T13:54:30.813057Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-07T13:54:30.813091Z node 5 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0498 2025-04-07T13:54:30.813203Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-07T13:54:30.813224Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099995s, queue# 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::ValidStartStream [GOOD] Test command err: 2025-04-07T13:52:19.707398Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568734652320710:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:19.707456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:19.896752Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b75/r3tmp/tmpnoUJie/pdisk_1.dat 2025-04-07T13:52:20.208777Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:20.210650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:20.210754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:20.214939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5289, node 1 2025-04-07T13:52:20.389220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001b75/r3tmp/yandexyysCmL.tmp 2025-04-07T13:52:20.389247Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001b75/r3tmp/yandexyysCmL.tmp 2025-04-07T13:52:20.398074Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001b75/r3tmp/yandexyysCmL.tmp 2025-04-07T13:52:20.398206Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:20.427710Z INFO: TTestServer started on Port 19484 GrpcPort 5289 TClient is connected to server localhost:19484 PQClient connected to localhost:5289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:20.807115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:20.831137Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:20.842978Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:52:20.848483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:52:21.001078Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:21.011781Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-07T13:52:23.533613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568751832190475:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:23.533733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:23.536747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568751832190487:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:23.549959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:52:23.575355Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-04-07T13:52:23.576412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568751832190489:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:52:24.035789Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568751832190565:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:24.046957Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDg4NjFlMTMtYTkyMGQ5ZTMtMjhjZGIwYzUtMThhMGQ5ZmI=, ActorId: [1:7490568751832190449:2308], ActorState: ExecuteState, TraceId: 01jr88c3yc9tne3v4dec7gn9vh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:24.048290Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:24.193359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:24.227738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:24.367758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:52:24.708970Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568734652320710:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:24.709035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7490568760422125462:2638] === CheckClustersList. Ok 2025-04-07T13:52:31.051205Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-07T13:52:31.136348Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:52:31.137571Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7490568786191929531:2801], Recipient [1:7490568738947288227:2197]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.137605Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.137648Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T13:52:31.137698Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7490568786191929527:2798], Recipient [1:7490568738947288227:2197]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-07T13:52:31.137711Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:52:31.225523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 10 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:52:31.226040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:52:31.226356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-07T13:52:31.226405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-07T13:52:31.226437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-07T13:52:31.226486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-07T13:52:31.226544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard ... UG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 sending to client partition status 2025-04-07T13:54:28.703348Z :INFO: [] [] [7c7fa701-67d80a79-be57eb0a-1b3af5b2] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: 5 2025-04-07T13:54:28.705153Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 grpc read done: success# 1, data# { read_request { bytes_size: 52428800 } } 2025-04-07T13:54:28.705331Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 got read request: guid# cd20d9f3-a333c1d4-88e90570-a8a5b48a 2025-04-07T13:54:28.705615Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 5 } } 2025-04-07T13:54:28.705846Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, commitOffset# (empty maybe) 2025-04-07T13:54:28.705938Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 4 committedOffset 4 clientCommitOffset (empty maybe) clientReadOffset 5 2025-04-07T13:54:28.706001Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 5 endOffset 10 2025-04-07T13:54:28.706103Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, endOffset# 10, WTime# 1744034068417, sizeLag# 831 2025-04-07T13:54:28.706134Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1TEvPartitionReady. Aval parts: 1 2025-04-07T13:54:28.706230Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 performing read request: guid# 49433c90-7b493650-6fa3bcb7-2d18d6de, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 6, size# 997, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-07T13:54:28.706334Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 6 maxSize 997 maxTimeLagMs 0 readTimestampMs 0 readOffset 5 EndOffset 10 ClientCommitOffset 4 committedOffset 4 Guid 49433c90-7b493650-6fa3bcb7-2d18d6de 2025-04-07T13:54:28.706849Z node 8 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-07T13:54:28.706889Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-07T13:54:28.706994Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 4 Topic 'rt3.dc1--topic1' partition 0 user user offset 5 count 6 size 997 endOffset 10 max time lag 0ms effective offset 5 2025-04-07T13:54:28.707018Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 4 added 0 blobs, size 0 count 0 last offset 5 2025-04-07T13:54:28.707161Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 4. All data is from uncompacted head. 2025-04-07T13:54:28.707182Z node 8 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:54:28.707319Z node 8 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 5 2025-04-07T13:54:28.707366Z node 8 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T13:54:28.708427Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 10 Result { Offset: 5 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 6 WriteTimestampMS: 1744034068532 CreateTimestampMS: 1744034068516 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 6 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 7 WriteTimestampMS: 1744034068596 CreateTimestampMS: 1744034068516 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 7 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 8 WriteTimestampMS: 1744034068596 CreateTimestampMS: 1744034068516 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 8 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 9 WriteTimestampMS: 1744034068596 CreateTimestampMS: 1744034068516 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 9 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 10 WriteTimestampMS: 1744034068596 CreateTimestampMS: 1744034068516 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 18446744073709551581 RealReadOffset: 9 WaitQuotaTimeMs: 0 } Cookie: 5 } 2025-04-07T13:54:28.708645Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset10 2025-04-07T13:54:28.708702Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 ReadOffset 10 ReadGuid 49433c90-7b493650-6fa3bcb7-2d18d6de has messages 1 2025-04-07T13:54:28.708895Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 read done: guid# 49433c90-7b493650-6fa3bcb7-2d18d6de, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 558 2025-04-07T13:54:28.708925Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 response to read: guid# 49433c90-7b493650-6fa3bcb7-2d18d6de 2025-04-07T13:54:28.709132Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 Process answer. Aval parts: 0 2025-04-07T13:54:28.709864Z :DEBUG: [] [] [7c7fa701-67d80a79-be57eb0a-1b3af5b2] [] Got ReadResponse, serverBytesSize = 558, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428242 2025-04-07T13:54:28.709991Z :DEBUG: [] [] [7c7fa701-67d80a79-be57eb0a-1b3af5b2] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428242 2025-04-07T13:54:28.710228Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (5-9) 2025-04-07T13:54:28.710282Z :DEBUG: [] [] [7c7fa701-67d80a79-be57eb0a-1b3af5b2] [] Returning serverBytesSize = 558 to budget 2025-04-07T13:54:28.710318Z :DEBUG: [] [] [7c7fa701-67d80a79-be57eb0a-1b3af5b2] [] In ContinueReadingDataImpl, ReadSizeBudget = 558, ReadSizeServerDelta = 52428242 2025-04-07T13:54:28.710552Z :DEBUG: [] [] [7c7fa701-67d80a79-be57eb0a-1b3af5b2] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-07T13:54:28.712779Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (5-5) 2025-04-07T13:54:28.712849Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (6-6) 2025-04-07T13:54:28.712886Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (7-7) 2025-04-07T13:54:28.712921Z :DEBUG: [] Take Data. Partition 0. Read: {1, 2} (8-8) 2025-04-07T13:54:28.712957Z :DEBUG: [] Take Data. Partition 0. Read: {1, 3} (9-9) 2025-04-07T13:54:28.713020Z :DEBUG: [] [] [7c7fa701-67d80a79-be57eb0a-1b3af5b2] [] The application data is transferred to the client. Number of messages 5, size 115 bytes 2025-04-07T13:54:28.713071Z :DEBUG: [] [] [7c7fa701-67d80a79-be57eb0a-1b3af5b2] [] Returning serverBytesSize = 0 to budget 2025-04-07T13:54:28.713218Z :INFO: [] [] [7c7fa701-67d80a79-be57eb0a-1b3af5b2] Closing read session. Close timeout: 0.000000s 2025-04-07T13:54:28.713255Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:9:4 2025-04-07T13:54:28.713299Z :INFO: [] [] [7c7fa701-67d80a79-be57eb0a-1b3af5b2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 95 BytesRead: 115 MessagesRead: 5 BytesReadCompressed: 115 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:54:28.713405Z :NOTICE: [] [] [7c7fa701-67d80a79-be57eb0a-1b3af5b2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T13:54:28.713451Z :DEBUG: [] [] [7c7fa701-67d80a79-be57eb0a-1b3af5b2] [] Abort session to cluster 2025-04-07T13:54:28.713359Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 grpc read done: success# 1, data# { read_request { bytes_size: 558 } } 2025-04-07T13:54:28.713477Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 got read request: guid# 945b5628-5d4294a9-42bb647f-2c759b8a 2025-04-07T13:54:28.713654Z :NOTICE: [] [] [7c7fa701-67d80a79-be57eb0a-1b3af5b2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:54:28.720020Z :DEBUG: [] MessageGroupId [src-id-test] SessionId [src-id-test|be5888b4-9253cace-2d01e590-5c0b2b82_0] Write session: destroy 2025-04-07T13:54:28.724862Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 grpc read done: success# 0, data# { } 2025-04-07T13:54:28.724897Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 grpc read failed 2025-04-07T13:54:28.724927Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 grpc closed 2025-04-07T13:54:28.724971Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_13256928184599761685_v1 is DEAD 2025-04-07T13:54:28.726056Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [7:7490569288084291094:2489] disconnected; active server actors: 1 2025-04-07T13:54:28.726086Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [7:7490569288084291094:2489] client user disconnected session shared/user_7_2_13256928184599761685_v1 2025-04-07T13:54:28.726910Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:28.726956Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_2_13256928184599761685_v1 2025-04-07T13:54:28.727002Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7490569288084291098:2492] destroyed 2025-04-07T13:54:28.727055Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_2_13256928184599761685_v1 2025-04-07T13:54:29.464228Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:54:29.464265Z node 7 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] Test command err: 2025-04-07T13:53:59.203833Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569164742639348:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:59.204044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00164e/r3tmp/tmplr1oQD/pdisk_1.dat 2025-04-07T13:54:00.162077Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:00.163697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:00.163792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:00.187706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21295, node 1 2025-04-07T13:54:00.453186Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:00.453205Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:00.453214Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:00.453294Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:01.175447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:01.219052Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:01.222085Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-07T13:54:01.222139Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Connect to grpc://localhost:8816 2025-04-07T13:54:01.228236Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-07T13:54:01.248074Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:01.248359Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-07T13:54:04.757106Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569184017961172:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00164e/r3tmp/tmppsdWuS/pdisk_1.dat 2025-04-07T13:54:04.826520Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:54:05.096859Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:05.097159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:05.097242Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:05.106352Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7412, node 2 2025-04-07T13:54:05.337223Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:05.337239Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:05.337246Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:05.337317Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:05.753978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:05.760578Z node 2 :TICKET_PARSER TRACE: Ticket ApiK****alid (AB5B5EA8) asking for AccessServiceAuthentication 2025-04-07T13:54:05.760619Z node 2 :GRPC_CLIENT DEBUG: [5160000d11d0] Connect to grpc://localhost:6233 2025-04-07T13:54:05.761389Z node 2 :GRPC_CLIENT DEBUG: [5160000d11d0] Request AuthenticateRequest { api_key: "ApiK****alid (AB5B5EA8)" } 2025-04-07T13:54:05.774773Z node 2 :GRPC_CLIENT DEBUG: [5160000d11d0] Response AuthenticateResponse { subject { user_account { id: "ApiKey-value-valid" } } } 2025-04-07T13:54:05.776216Z node 2 :TICKET_PARSER DEBUG: Ticket ApiK****alid (AB5B5EA8) () has now valid token of ApiKey-value-valid@as test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00164e/r3tmp/tmpGA4xI2/pdisk_1.dat 2025-04-07T13:54:10.096165Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:10.098742Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:10.136623Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:10.140875Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:10.142888Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28556, node 3 2025-04-07T13:54:10.368455Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:10.368482Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:10.368490Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:10.368623Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20136 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:10.750264Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:10.773408Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:10.778599Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-07T13:54:10.784873Z node 3 :GRPC_CLIENT DEBUG: [5160000e16d0] Connect to grpc://localhost:6965 2025-04-07T13:54:10.791537Z node 3 :GRPC_CLIENT DEBUG: [5160000e16d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-07T13:54:10.814549Z node 3 :GRPC_CLIENT DEBUG: [5160000e16d0] Status 14 Service Unavailable 2025-04-07T13:54:10.814857Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:10.814896Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-07T13:54:10.815085Z node 3 :GRPC_CLIENT DEBUG: [5160000e16d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-07T13:54:10.817325Z node 3 :GRPC_CLIENT DEBUG: [5160000e16d0] Status 14 Service Unavailable 2025-04-07T13:54:10.819685Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:14.600982Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490569228005767359:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:14.621748Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00164e/r3tmp/tmpNmKGrs/pdisk_1.dat 2025-04-07T13:54:14.806129Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3246, node 4 2025-04-07T13:54:14.879962Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:14.879984Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:14.879992Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:14.880124Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:54:14.925362Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:14.925460Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:14.927698Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:15.280111Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:15.290895Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-07T13:54:15.290967Z node 4 :GRPC_CLIENT DEBUG: [5160000c9fd0] Connect to grpc://localhost:18607 2025-04-07T13:54:15.292328Z node 4 :GRPC_CLIENT DEBUG: [5160000c9fd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-07T13:54:15.310956Z node 4 :GRPC_CLIENT DEBUG: [5160000c9fd0] Status 14 Service Unavailable 2025-04-07T13:54:15.311575Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:15.311607Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-07T13:54:15.311818Z node 4 :GRPC_CLIENT DEBUG: [5160000c9fd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-07T13:54:15.317188Z node 4 :GRPC_CLIENT DEBUG: [5160000c9fd0] Status 14 Service Unavailable 2025-04-07T13:54:15.317671Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:15.656818Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-07T13:54:15.656866Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-07T13:54:15.657076Z node 4 :GRPC_CLIENT DEBUG: [5160000c9fd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-07T13:54:15.664781Z node 4 :GRPC_CLIENT DEBUG: [5160000c9fd0] Status 14 Service Unavailable 2025-04-07T13:54:15.665533Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:17.660871Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-07T13:54:17.660926Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-07T13:54:17.661115Z node 4 :GRPC_CLIENT DEBUG: [5160000c9fd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-07T13:54:17.666038Z node 4 :GRPC_CLIENT DEBUG: [5160000c9fd0] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:17.669833Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-04-07T13:54:19.600109Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490569228005767359:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:19.600217Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:54:28.195083Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490569286367327126:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:28.195141Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00164e/r3tmp/tmpv2V30Q/pdisk_1.dat 2025-04-07T13:54:28.392895Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:28.407586Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:28.407676Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:28.415751Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22918, node 5 2025-04-07T13:54:28.553547Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:28.553578Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:28.553603Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:28.553748Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:28.963063Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:28.974299Z node 5 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-07T13:54:28.974369Z node 5 :GRPC_CLIENT DEBUG: [5160001398d0] Connect to grpc://localhost:15675 2025-04-07T13:54:28.975416Z node 5 :GRPC_CLIENT DEBUG: [5160001398d0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-07T13:54:28.994159Z node 5 :GRPC_CLIENT DEBUG: [5160001398d0] Status 14 Service Unavailable 2025-04-07T13:54:28.994309Z node 5 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:28.994367Z node 5 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-07T13:54:28.994611Z node 5 :GRPC_CLIENT DEBUG: [5160001398d0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-07T13:54:28.996786Z node 5 :GRPC_CLIENT DEBUG: [5160001398d0] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-07T13:54:28.997141Z node 5 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] Test command err: 2025-04-07T13:54:32.418505Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:32.603711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:32.625826Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:32.625913Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:32.626141Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:32.634474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:32.634719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:32.635009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:32.635104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:32.635213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:32.635305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:32.635418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:32.635517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:32.635620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:32.635722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:32.635830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:32.635935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:32.661181Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:32.666694Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:32.666877Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:32.666957Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:32.667145Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:32.667336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:32.667399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:32.667437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:32.667507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:32.667545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:32.667575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:32.667603Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:32.667713Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:32.667751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:32.667775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:32.667794Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:32.667867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:32.667901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:32.667927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:32.667965Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:32.668048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:32.668083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:32.668111Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:32.668148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:32.668178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:32.668214Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:32.668393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-07T13:54:32.668473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-07T13:54:32.668551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-04-07T13:54:32.668624Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=25; 2025-04-07T13:54:32.668784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:32.668841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:32.668872Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:32.669048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:32.669094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:32.669125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:32.669238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:32.669271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:32.669296Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:32.669461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:32.669486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:32.669505Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:32.669594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... fline=tx_controller.cpp:311;event=start; 2025-04-07T13:54:33.364377Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;method=TTxController::StartProposeOnExecute;tx_info=100:TX_KIND_COMMIT_WRITE;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;fline=tx_controller.cpp:340;event=registered; 2025-04-07T13:54:33.377156Z node 1 :TX_COLUMNSHARD DEBUG: method=TTxController::FinishProposeOnComplete;tx_id=100;fline=tx_controller.cpp:371;event=start;tx_info=100:TX_KIND_COMMIT_WRITE;min=1744034073553;max=1744034103553;plan=0;src=[1:97:2132];cookie=0; 2025-04-07T13:54:33.377460Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 100 at tablet 9437184, mediator 0 2025-04-07T13:54:33.377534Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] execute at tablet 9437184 2025-04-07T13:54:33.377670Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:54:33.377894Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=abstract.h:78;progress_tx_id=100;lock_id=1;broken=0; 2025-04-07T13:54:33.390307Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] complete at tablet 9437184 2025-04-07T13:54:33.390429Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=abstract.h:88;progress_tx_id=100;lock_id=1;broken=0; 2025-04-07T13:54:33.390542Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=3384; 2025-04-07T13:54:33.390860Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=manager.cpp:9;event=lock;process_id=CS::INDEXATION::d5b60c78-13b711f0-b463ff27-3dd5f674; 2025-04-07T13:54:33.390905Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-04-07T13:54:33.391025Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:725;event=indexation;bytes=3384;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=d5b60c78-13b711f0-b463ff27-3dd5f674; 2025-04-07T13:54:33.391228Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:615;event=start_changes;type=CS::INDEXATION;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674; 2025-04-07T13:54:33.391813Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=3035;external_task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;type=CS::INDEXATION;priority=0;; 2025-04-07T13:54:33.392024Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=1;task=cpu=0;mem=3035;external_task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;type=CS::INDEXATION;priority=0;; 2025-04-07T13:54:33.392068Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;mem=3035;cpu=0; 2025-04-07T13:54:33.392212Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;task_id=1;mem=3035;cpu=0; 2025-04-07T13:54:33.392355Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=d5b60c78-13b711f0-b463ff27-3dd5f674; 2025-04-07T13:54:33.397431Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-07T13:54:33.397609Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-04-07T13:54:33.400065Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-07T13:54:33.400385Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[7] (CS::INDEXATION) apply at tablet 9437184 2025-04-07T13:54:33.401289Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-07T13:54:33.401422Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:54:33.401489Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:54:33.402014Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:212;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:54:33.402064Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:54:33.402144Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:740;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=3384;indexing_debug={task_ids=d5b60c78-13b711f0-b463ff27-3dd5f674,;}; 2025-04-07T13:54:33.402237Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T13:54:33.402300Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:54:33.402354Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:54:33.402413Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:54:33.402460Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:54:33.402547Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:54:33.402902Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 100 scanId: 0 version: {100:100} readable: {100:max} at tablet 9437184 2025-04-07T13:54:33.417669Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-07T13:54:33.417750Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;fline=with_appended.cpp:80;portions=1,;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674; 2025-04-07T13:54:33.417845Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=5656;portion_bytes=5668;portion_raw_bytes=4582; 2025-04-07T13:54:33.417921Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=1;before_size=0;after_size=5668;before_rows=0;after_rows=100; 2025-04-07T13:54:33.417971Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=5656;portion_bytes=5668;portion_raw_bytes=4582; 2025-04-07T13:54:33.418236Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;fline=manager.cpp:14;event=unlock;process_id=CS::INDEXATION::d5b60c78-13b711f0-b463ff27-3dd5f674; 2025-04-07T13:54:33.418298Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:54:33.418371Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:54:33.418449Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T13:54:33.418515Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:54:33.418573Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:54:33.418622Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:54:33.418672Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:54:33.418751Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:54:33.418847Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:3384:0] 2025-04-07T13:54:33.418904Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-07T13:54:33.418997Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:54:33.419135Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=d5b60c78-13b711f0-b463ff27-3dd5f674;mem=3035;cpu=0; 2025-04-07T13:54:33.419358Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2025-04-07T13:54:33.419490Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=100;scan_id=0;gen=0;table=;snapshot={100:100};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: Can't parse TOlapProgram; |86.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |86.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |86.9%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |86.9%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] >> Backup::ProposeBackup [GOOD] >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] >> TColumnShardTestReadWrite::ReadWithProgram >> TColumnShardTestReadWrite::WriteStandalone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Backup::ProposeBackup [GOOD] Test command err: 2025-04-07T13:54:33.920216Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:34.034940Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:34.063722Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:34.063801Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:34.064063Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:34.071311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:34.071519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:34.071730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:34.071871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:34.071966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:34.072061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:34.072174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:34.072329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:34.072416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:34.072477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:34.072539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:34.072599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:34.095068Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:34.095164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:54:34.110102Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:34.110457Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:34.110510Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:34.110688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:34.110843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:34.110912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:34.110948Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:34.111027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:34.111082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:34.111121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:34.111152Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:34.111325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:34.111392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:34.111428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:34.111456Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:34.111583Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:34.111647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:34.111691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:34.111720Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:34.111784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:34.111820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:34.111846Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:34.111902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:34.111935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:34.111964Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:34.112152Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-07T13:54:34.112253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-07T13:54:34.112353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=57; 2025-04-07T13:54:34.112422Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-04-07T13:54:34.112559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:34.112628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:34.112661Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:34.113300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:34.113356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:34.113392Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:34.113556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:34.113602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:34.113646Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:34.113841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:34.113885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:34.113912Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execu ... =9437184;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=2400;num_rows=100;batch_columns=_yql_plan_step,_yql_tx_id,_yql_write_id; 2025-04-07T13:54:35.039132Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:296:2314] send ScanData to [1:290:2308] txId: 1 scanId: 1 gen: 9437184 tablet: 9437184 bytes: 2400 rows: 100 page faults: 0 finished: 0 pageFault: 0 arrow schema: _yql_plan_step: uint64 _yql_tx_id: uint64 _yql_write_id: uint64 2025-04-07T13:54:35.039292Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:1;records_count:100;schema=_yql_plan_step: uint64 _yql_tx_id: uint64 _yql_write_id: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2025-04-07T13:54:35.039423Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:100;schema=_yql_plan_step: uint64 _yql_tx_id: uint64 _yql_write_id: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2025-04-07T13:54:35.039463Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:54:35.039525Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:35.040121Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:129;method=PutObject;id=[9437184:0:0:1:2:632:0]; 2025-04-07T13:54:35.055631Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:35.055800Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:100;schema=_yql_plan_step: uint64 _yql_tx_id: uint64 _yql_write_id: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2025-04-07T13:54:35.055850Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:54:35.055953Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;);columns=3;rows=100; 2025-04-07T13:54:35.056012Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=2400;num_rows=100;batch_columns=_yql_plan_step,_yql_tx_id,_yql_write_id; 2025-04-07T13:54:35.056236Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:296:2314] send ScanData to [1:290:2308] txId: 1 scanId: 1 gen: 9437184 tablet: 9437184 bytes: 2400 rows: 100 page faults: 0 finished: 0 pageFault: 0 arrow schema: _yql_plan_step: uint64 _yql_tx_id: uint64 _yql_write_id: uint64 2025-04-07T13:54:35.056366Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2025-04-07T13:54:35.056470Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2025-04-07T13:54:35.056640Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2025-04-07T13:54:35.057218Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:129;method=PutObject;id=[9437184:0:0:1:3:632:0]; 2025-04-07T13:54:35.069237Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:35.069367Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2025-04-07T13:54:35.069440Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2025-04-07T13:54:35.069471Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:296:2314] finished for tablet 9437184 2025-04-07T13:54:35.069543Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:296:2314] send ScanData to [1:290:2308] txId: 1 scanId: 1 gen: 9437184 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:54:35.070002Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:296:2314] and sent to [1:290:2308] packs: 0 txId: 1 scanId: 1 gen: 9437184 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.003},{"events":["f_ProduceResults"],"t":0.005},{"events":["l_bootstrap"],"t":0.008},{"events":["f_processing","f_task_result"],"t":0.009},{"events":["f_ack"],"t":0.017},{"events":["l_task_result"],"t":0.02},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.065}],"full":{"a":1744034075004373,"name":"_full_task","f":1744034075004373,"d_finished":0,"c":0,"l":1744034075069572,"d":65199},"events":[{"name":"bootstrap","f":1744034075007430,"d_finished":5199,"c":1,"l":1744034075012629,"d":5199},{"a":1744034075069215,"name":"ack","f":1744034075021532,"d_finished":2554,"c":3,"l":1744034075056698,"d":2911},{"a":1744034075069203,"name":"processing","f":1744034075013615,"d_finished":7232,"c":18,"l":1744034075056701,"d":7601},{"name":"ProduceResults","f":1744034075009898,"d_finished":5259,"c":23,"l":1744034075069457,"d":5259},{"a":1744034075069461,"name":"Finish","f":1744034075069461,"d_finished":0,"c":0,"l":1744034075069572,"d":111},{"name":"task_result","f":1744034075013647,"d_finished":4461,"c":15,"l":1744034075025247,"d":4461}],"id":"9437184::1"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;) 2025-04-07T13:54:35.070075Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:54:35.003528Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=0;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13880;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13880;selected_rows=0; 2025-04-07T13:54:35.070105Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:54:35.070163Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;fline=context.h:77;profile=; 2025-04-07T13:54:35.070247Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:296:2314];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;; 2025-04-07T13:54:35.070532Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 9437184 2025-04-07T13:54:35.100749Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::NOlap::NBackground::TEvExecuteGeneralLocalTransaction;method=TTxController::FinishProposeOnExecute;tx_id=115;fline=tx_controller.cpp:360;event=start; 2025-04-07T13:54:35.100878Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::NOlap::NBackground::TEvExecuteGeneralLocalTransaction;method=TTxController::FinishProposeOnComplete;tx_id=115;fline=tx_controller.cpp:371;event=start;tx_info=115:TX_KIND_BACKUP;min=0;max=18446744073709551615;plan=0;src=[1:239:2257];cookie=0; 2025-04-07T13:54:35.100925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::NOlap::NBackground::TEvExecuteGeneralLocalTransaction;method=TTxController::FinishProposeOnComplete;tx_id=115;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:239:2257]; 2025-04-07T13:54:35.100968Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::NOlap::NBackground::TEvExecuteGeneralLocalTransaction;method=TTxController::FinishProposeOnComplete;tx_id=115;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=115; >> Normalizers::PortionsNormalizer >> TopicAutoscaling::ReadFromTimestamp_PQv1 [GOOD] >> TColumnShardTestReadWrite::WriteStandaloneOverload >> Normalizers::ColumnChunkNormalizer [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 >> Cdc::OldImageLogDebezium [GOOD] >> Cdc::NewImageLogDebezium >> TColumnShardTestReadWrite::RebootWriteReadStandalone >> Cdc::HugeKey[YdsRunner] [GOOD] >> Cdc::HugeKey[TopicRunner] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] Test command err: 2025-04-07T13:54:23.979889Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:24.103954Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:24.144194Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:24.144319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:24.144607Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:24.154992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:24.155416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:24.155684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:24.155831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:24.155935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:24.156047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:24.156165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:24.156306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:24.156436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:24.156538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:24.156688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:24.156797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:24.194991Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:24.205487Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:24.205669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:24.205722Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:24.205934Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:24.206121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:24.206192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:24.206235Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:24.206325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:24.206383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:24.206429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:24.206489Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:24.206672Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:24.206746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:24.206784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:24.206825Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:24.206926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:24.207001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:24.207041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:24.207075Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:24.207173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:24.207212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:24.207246Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:24.207302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:24.207349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:24.207379Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:24.207540Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-07T13:54:24.207617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-07T13:54:24.207713Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-07T13:54:24.207813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-07T13:54:24.207974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:24.208027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:24.208057Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:24.208271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:24.208321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:24.208349Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:24.208497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:24.208537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:24.208566Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:24.212910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:24.213012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:24.213069Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:24.213202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... 3:54:35.155701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T13:54:35.155800Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T13:54:35.155848Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=31;finished=1; 2025-04-07T13:54:35.155883Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:35.156143Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:35.156409Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T13:54:35.156459Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:54:35.156605Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-07T13:54:35.156920Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-07T13:54:35.157123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1022:2893] send ScanData to [1:1021:2892] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 1984 rows: 31 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string 2025-04-07T13:54:35.157300Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T13:54:35.157427Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T13:54:35.157543Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T13:54:35.157775Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:35.157935Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T13:54:35.158106Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T13:54:35.158148Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1022:2893] finished for tablet 9437184 2025-04-07T13:54:35.158457Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1022:2893] send ScanData to [1:1021:2892] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:54:35.158963Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:1022:2893] and sent to [1:1021:2892] packs: 0 txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.018},{"events":["l_ProduceResults","f_Finish"],"t":0.02},{"events":["l_ack","l_processing","l_Finish"],"t":0.021}],"full":{"a":1744034075137182,"name":"_full_task","f":1744034075137182,"d_finished":0,"c":0,"l":1744034075158500,"d":21318},"events":[{"name":"bootstrap","f":1744034075137391,"d_finished":3741,"c":1,"l":1744034075141132,"d":3741},{"a":1744034075157753,"name":"ack","f":1744034075156108,"d_finished":1472,"c":1,"l":1744034075157580,"d":2219},{"a":1744034075157740,"name":"processing","f":1744034075141202,"d_finished":6506,"c":10,"l":1744034075157582,"d":7266},{"name":"ProduceResults","f":1744034075139039,"d_finished":4175,"c":13,"l":1744034075158134,"d":4175},{"a":1744034075158137,"name":"Finish","f":1744034075158137,"d_finished":0,"c":0,"l":1744034075158500,"d":363},{"name":"task_result","f":1744034075141218,"d_finished":4894,"c":9,"l":1744034075155929,"d":4894}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;) 2025-04-07T13:54:35.159064Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:54:35.136620Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-07T13:54:35.159111Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:54:35.159191Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T13:54:35.159362Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] Test command err: 2025-04-07T13:54:01.223161Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569172418248029:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:01.223328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00159d/r3tmp/tmpT0nkr8/pdisk_1.dat 2025-04-07T13:54:02.029872Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:02.059933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:02.060031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:02.072578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13621, node 1 2025-04-07T13:54:02.313490Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:02.313524Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:02.313531Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:02.313596Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:02.815599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:02.830795Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:02.837716Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-07T13:54:02.837779Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Connect to grpc://localhost:5999 2025-04-07T13:54:02.839942Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-04-07T13:54:02.871853Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-04-07T13:54:02.872224Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00159d/r3tmp/tmpz0Iqp9/pdisk_1.dat 2025-04-07T13:54:05.904933Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:05.927911Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:05.931866Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:05.931933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:05.936601Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12786, node 2 2025-04-07T13:54:06.073978Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:06.073997Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:06.074007Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:06.074806Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:06.313580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:06.321442Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:54:06.325616Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-07T13:54:06.325694Z node 2 :GRPC_CLIENT DEBUG: [5160000d1dd0] Connect to grpc://localhost:5289 2025-04-07T13:54:06.331494Z node 2 :GRPC_CLIENT DEBUG: [5160000d1dd0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-04-07T13:54:06.341016Z node 2 :GRPC_CLIENT DEBUG: [5160000d1dd0] Status 14 Service Unavailable 2025-04-07T13:54:06.343366Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:06.343391Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-07T13:54:06.343535Z node 2 :GRPC_CLIENT DEBUG: [5160000d1dd0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-04-07T13:54:06.349622Z node 2 :GRPC_CLIENT DEBUG: [5160000d1dd0] Status 14 Service Unavailable 2025-04-07T13:54:06.351988Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00159d/r3tmp/tmpQA0zVC/pdisk_1.dat 2025-04-07T13:54:10.545741Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:10.702952Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:10.703022Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:10.706690Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:10.724082Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4738, node 3 2025-04-07T13:54:10.889429Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:10.889451Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:10.889458Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:10.889559Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18495 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:11.375000Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:11.392467Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:54:11.394381Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-07T13:54:11.394430Z node 3 :GRPC_CLIENT DEBUG: [516000044dd0] Connect to grpc://localhost:3719 2025-04-07T13:54:11.395136Z node 3 :GRPC_CLIENT DEBUG: [516000044dd0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-04-07T13:54:11.403374Z node 3 :GRPC_CLIENT DEBUG: [516000044dd0] Status 14 Service Unavailable 2025-04-07T13:54:11.403710Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:11.403736Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-07T13:54:11.403868Z node 3 :GRPC_CLIENT DEBUG: [516000044dd0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-04-07T13:54:11.405794Z node 3 :GRPC_CLIENT DEBUG: [516000044dd0] Status 14 Service Unavailable 2025-04-07T13:54:11.406050Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:12.459459Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-07T13:54:12.459492Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-07T13:54:12.459619Z node 3 :GRPC_CLIENT DEBUG: [516000044dd0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-04-07T13:54:12.465363Z node 3 :GRPC_CLIENT DEBUG: [516000044dd0] Status 14 Service Unavailable 2025-04-07T13:54:12.466123Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:14.464960Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-07T13:54:14.464995Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-07T13:54:14.465171Z node 3 :GRPC_CLIENT DEBUG: [516000044dd0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-04-07T13:54:14.472524Z node 3 :GRPC_CLIENT DEBUG: [516000044dd0] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-04-07T13:54:14.472916Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-07T13:54:24.242183Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490569272181178038:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:24.242228Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00159d/r3tmp/tmpj2aezJ/pdisk_1.dat 2025-04-07T13:54:24.398105Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:24.429304Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:24.429389Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:24.432010Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64468, node 4 2025-04-07T13:54:24.565970Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:24.565988Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:24.565997Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:24.566065Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:24.883903Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:24.907135Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:24.914724Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-07T13:54:24.914798Z node 4 :GRPC_CLIENT DEBUG: [5160000fcfd0] Connect to grpc://localhost:15092 2025-04-07T13:54:24.915679Z node 4 :GRPC_CLIENT DEBUG: [5160000fcfd0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-04-07T13:54:24.944009Z node 4 :GRPC_CLIENT DEBUG: [5160000fcfd0] Status 14 Service Unavailable 2025-04-07T13:54:24.944152Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-07T13:54:24.944183Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-07T13:54:24.944351Z node 4 :GRPC_CLIENT DEBUG: [5160000fcfd0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-04-07T13:54:24.946502Z node 4 :GRPC_CLIENT DEBUG: [5160000fcfd0] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-04-07T13:54:24.946676Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00159d/r3tmp/tmpEDNGq3/pdisk_1.dat 2025-04-07T13:54:30.304900Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:30.309150Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:30.336817Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:30.336925Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:30.338103Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16634, node 5 2025-04-07T13:54:30.584961Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:30.584979Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:30.584987Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:30.585086Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:30.954312Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:30.973360Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:54:30.975160Z node 5 :TICKET_PARSER ERROR: Ticket AKIA****MPLE (B3EDC139): Access key signature is not supported |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::ColumnChunkNormalizer [GOOD] Test command err: 2025-04-07T13:54:34.247104Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:34.389666Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:34.412233Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:34.412300Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:34.412555Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:34.420001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:34.420237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:34.420470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:34.420589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:34.424846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:34.425060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:34.425199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:34.425318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:34.425479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:34.425581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:34.425690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:34.425785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:34.457624Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:34.463611Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:34.463867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:34.463914Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-07T13:54:34.464204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:34.464395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:34.464464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:34.464512Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-07T13:54:34.464601Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:34.464704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:34.464752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:34.464784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:34.464951Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:34.465020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:34.465064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:34.465091Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:34.465188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:34.465246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:34.465283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:34.465329Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:34.465408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:34.465446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:34.465471Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:34.465516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:34.465548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:34.465587Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:34.465764Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-04-07T13:54:34.465835Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=27; 2025-04-07T13:54:34.465942Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=59; 2025-04-07T13:54:34.466019Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-07T13:54:34.466172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:34.466228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:34.466259Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:34.466459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:34.466506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:34.466543Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:34.466691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:34.466738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:34.466772Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:34.466969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:34.467017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:34.467051Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;las ... =[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-04-07T13:54:35.853396Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:35.853442Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:54:35.853475Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:35.853598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:35.853682Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:35.853718Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:54:35.853743Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:35.853994Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:35.854027Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T13:54:35.854084Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-07T13:54:35.854138Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-04-07T13:54:35.854187Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T13:54:35.854280Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:35.854331Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-04-07T13:54:35.854363Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:35.854541Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:35.854695Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:35.854744Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:54:35.854869Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-04-07T13:54:35.854952Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-04-07T13:54:35.855059Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:333:2338] send ScanData to [1:331:2337] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 2405760 rows: 20048 page faults: 0 finished: 0 pageFault: 0 arrow schema: key1: uint64 key2: uint64 field: string 2025-04-07T13:54:35.855192Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:35.855293Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:35.855385Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:35.856252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:35.856426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:35.856530Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:35.856575Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:333:2338] finished for tablet 9437184 2025-04-07T13:54:35.856648Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:333:2338] send ScanData to [1:331:2337] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:54:35.857091Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:333:2338] and sent to [1:331:2337] packs: 0 txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.08},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.082}],"full":{"a":1744034075774299,"name":"_full_task","f":1744034075774299,"d_finished":0,"c":0,"l":1744034075856716,"d":82417},"events":[{"name":"bootstrap","f":1744034075774521,"d_finished":3110,"c":1,"l":1744034075777631,"d":3110},{"a":1744034075856208,"name":"ack","f":1744034075854510,"d_finished":920,"c":1,"l":1744034075855430,"d":1428},{"a":1744034075856189,"name":"processing","f":1744034075777719,"d_finished":68879,"c":9,"l":1744034075855433,"d":69406},{"name":"ProduceResults","f":1744034075776445,"d_finished":2700,"c":12,"l":1744034075856561,"d":2700},{"a":1744034075856563,"name":"Finish","f":1744034075856563,"d_finished":0,"c":0,"l":1744034075856716,"d":153},{"name":"task_result","f":1744034075777740,"d_finished":67806,"c":8,"l":1744034075854408,"d":67806}],"id":"9437184::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;) 2025-04-07T13:54:35.857212Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:54:35.773702Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=258720;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=258720;selected_rows=0; 2025-04-07T13:54:35.857254Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:54:35.857315Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T13:54:35.857428Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:333:2338];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> TTopicYqlTest::CreateAndAlterTopicYql [GOOD] >> TTopicYqlTest::BadRequests >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] >> TColumnShardTestReadWrite::ReadAggregate [GOOD] >> TColumnShardTestReadWrite::Write [GOOD] >> TColumnShardTestReadWrite::ReadWithProgramLike ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadFromTimestamp_PQv1 [GOOD] Test command err: 2025-04-07T13:52:20.757914Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568737556549453:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:20.757967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b2d/r3tmp/tmpvYdnQs/pdisk_1.dat 2025-04-07T13:52:20.953362Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:21.141252Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11523, node 1 2025-04-07T13:52:21.159728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:21.163498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:21.170911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:21.244077Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001b2d/r3tmp/yandexCkAmsB.tmp 2025-04-07T13:52:21.244105Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001b2d/r3tmp/yandexCkAmsB.tmp 2025-04-07T13:52:21.244257Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001b2d/r3tmp/yandexCkAmsB.tmp 2025-04-07T13:52:21.244362Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:21.287451Z INFO: TTestServer started on Port 5574 GrpcPort 11523 TClient is connected to server localhost:5574 PQClient connected to localhost:11523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:21.617319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:21.652829Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:21.672890Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:52:21.685373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:52:21.877542Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:24.281807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568754736419215:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.281927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.282408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568754736419227:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.286106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:52:24.312189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568754736419229:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:52:24.571573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:24.612841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:24.614130Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568754736419319:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:24.616884Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTU4MjY0ZWQtMzc2NzU3ZjctYTk3ODMxODQtZWEyZjAzMg==, ActorId: [1:7490568754736419212:2308], ActorState: ExecuteState, TraceId: 01jr88c4p83fnm7qvbj941mb7v, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:24.620583Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:24.701951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490568759031386872:2621] 2025-04-07T13:52:25.758853Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568737556549453:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:25.768053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:52:31.254561Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-07T13:52:31.273117Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:52:31.274398Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7490568784801190960:2798], Recipient [1:7490568741851516953:2203]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.274436Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.274449Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T13:52:31.274493Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7490568784801190956:2795], Recipient [1:7490568741851516953:2203]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-07T13:52:31.274505Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:52:31.338181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:52:31.338539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:52:31.338719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-07T13:52:31.338740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-07T13:52:31.338760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-07T13:52:31.338780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-07T13:52:31.338810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-07T13:52:31.338937Z node 1 :FLAT_TX ... 186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:34.922091Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:34.922115Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_15989618919664239557_v1 2025-04-07T13:54:34.922130Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_15989618919664239557_v1 2025-04-07T13:54:34.989163Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [5:7490569228120249641:2341], Recipient [5:7490569198055477892:2142]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037890 TableLocalId: 11 Generation: 1 Round: 1 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 602 Memory: 119120 } ShardState: 2 UserTablePartOwners: 72075186224037890 NodeId: 5 StartTime: 1744034054744 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-07T13:54:34.989214Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-07T13:54:34.989264Z node 5 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 11] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0602 2025-04-07T13:54:34.989389Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 11] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-07T13:54:34.989422Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099994s, queue# 1 2025-04-07T13:54:35.021151Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569305429662897:2809], Partition 1, Sender [0:0:0], Recipient [5:7490569305429662974:2818], Cookie: 0 2025-04-07T13:54:35.021239Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569305429662974:2818]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.021266Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.021311Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:35.021393Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.021421Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.021454Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:35.021633Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569253890053855:2438], Partition 0, Sender [0:0:0], Recipient [5:7490569253890053916:2442], Cookie: 0 2025-04-07T13:54:35.021688Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569253890053916:2442]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.021706Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.021745Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:35.021794Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.021824Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.021847Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:35.021897Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569305429662893:2808], Partition 2, Sender [0:0:0], Recipient [5:7490569305429662972:2816], Cookie: 0 2025-04-07T13:54:35.021935Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569305429662972:2816]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.021949Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.021976Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:35.022011Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.022030Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.022050Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:35.092203Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [5:7490569198055477892:2142]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T13:54:35.092257Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T13:54:35.092279Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-07T13:54:35.092346Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-07T13:54:35.092364Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-07T13:54:35.092425Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 11 shard idx 72057594046644480:3 data size 0 row count 0 2025-04-07T13:54:35.092481Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037890 maps to shardIdx: 72057594046644480:3 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], pathId map=Topics, is column=0, is olap=0 2025-04-07T13:54:35.092516Z node 5 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 11: RowCount 0, DataSize 0 2025-04-07T13:54:35.092532Z node 5 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037890, followerId 0 2025-04-07T13:54:35.092595Z node 5 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:3 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-07T13:54:35.092716Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-07T13:54:35.092821Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [5:7490569198055477892:2142]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T13:54:35.092840Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T13:54:35.092855Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-07T13:54:35.124806Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569305429662897:2809], Partition 1, Sender [0:0:0], Recipient [5:7490569305429662974:2818], Cookie: 0 2025-04-07T13:54:35.124882Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569305429662974:2818]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.124903Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.124946Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:35.125022Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.125047Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.125074Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:35.125139Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569253890053855:2438], Partition 0, Sender [0:0:0], Recipient [5:7490569253890053916:2442], Cookie: 0 2025-04-07T13:54:35.125174Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569253890053916:2442]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.125187Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.125210Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:35.125238Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.125252Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.125268Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:35.125308Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569305429662893:2808], Partition 2, Sender [0:0:0], Recipient [5:7490569305429662972:2816], Cookie: 0 2025-04-07T13:54:35.125340Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569305429662972:2816]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.125353Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.125376Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:35.125401Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.125418Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.125434Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8328;columns=19; 2025-04-07T13:54:33.083297Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:33.201248Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:33.224696Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:33.224778Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:33.225040Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:33.233510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:33.233759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:33.234013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:33.234157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:33.234287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:33.234390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:33.234471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:33.234556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:33.234677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:33.234786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:33.234911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:33.235002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:33.257355Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:33.265656Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:33.265823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:33.265894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:33.266108Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:33.266260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:33.266342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:33.266389Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:33.266484Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:33.266549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:33.266592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:33.266620Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:33.266803Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:33.266875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:33.266924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:33.266958Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:33.267062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:33.267121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:33.267166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:33.267196Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:33.267258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:33.267292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:33.267321Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:33.267378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:33.267420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:33.267466Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:33.267653Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-04-07T13:54:33.267740Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-07T13:54:33.267824Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-07T13:54:33.267921Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=57; 2025-04-07T13:54:33.268084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:33.268137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:33.268175Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:33.268386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:33.268434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:33.268478Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:33.268650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:33.268776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:33.268814Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:33.269003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:33.269062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:33.269099Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:33.269227Z ... ;size=0;details={columns=(column_ids=19;column_names=jsondoc;);;};;scan_step_idx=8; 2025-04-07T13:54:36.862563Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:36.862593Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=fetching.cpp:15;event=apply; 2025-04-07T13:54:36.862624Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=interval.cpp:31;event=fetched;interval_idx=0; 2025-04-07T13:54:36.862664Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=interval.cpp:15;event=start_construct_result;interval_idx=0;interval_id=76; 2025-04-07T13:54:36.863004Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=merge.cpp:131;event=DoExecute;interval_idx=0; 2025-04-07T13:54:36.867885Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-04-07T13:54:36.868029Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-07T13:54:36.868073Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:54:36.868107Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:36.868328Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:36.868377Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T13:54:36.868411Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=76; 2025-04-07T13:54:36.868453Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=76; 2025-04-07T13:54:36.868488Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T13:54:36.868568Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-07T13:54:36.868598Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-04-07T13:54:36.868631Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:36.869110Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:36.869252Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:1;schema=res_min: binary res_max: binary res_some: binary res_count: uint64;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-07T13:54:36.869292Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:54:36.869382Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;);columns=4;rows=1; 2025-04-07T13:54:36.869444Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=26;num_rows=1;batch_columns=res_min,res_max,res_some,res_count; 2025-04-07T13:54:36.869560Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: Scan [2:431:2449] send ScanData to [2:430:2448] txId: 100 scanId: 0 gen: 0 tablet: 9437184 bytes: 26 rows: 1 page faults: 0 finished: 0 pageFault: 0 arrow schema: res_min: binary res_max: binary res_some: binary res_count: uint64 2025-04-07T13:54:36.869670Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-07T13:54:36.869764Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-07T13:54:36.869841Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-07T13:54:36.870192Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:36.870286Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-07T13:54:36.870364Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-07T13:54:36.870401Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: Scan [2:431:2449] finished for tablet 9437184 2025-04-07T13:54:36.870467Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: Scan [2:431:2449] send ScanData to [2:430:2448] txId: 100 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:54:36.870838Z node 2 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [2:431:2449] and sent to [2:430:2448] packs: 0 txId: 100 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.016},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.018}],"full":{"a":1744034076852077,"name":"_full_task","f":1744034076852077,"d_finished":0,"c":0,"l":1744034076870501,"d":18424},"events":[{"name":"bootstrap","f":1744034076852170,"d_finished":1844,"c":1,"l":1744034076854014,"d":1844},{"a":1744034076870174,"name":"ack","f":1744034076869073,"d_finished":788,"c":1,"l":1744034076869861,"d":1115},{"a":1744034076870161,"name":"processing","f":1744034076855101,"d_finished":8118,"c":10,"l":1744034076869863,"d":8458},{"name":"ProduceResults","f":1744034076853136,"d_finished":2194,"c":13,"l":1744034076870383,"d":2194},{"a":1744034076870385,"name":"Finish","f":1744034076870385,"d_finished":0,"c":0,"l":1744034076870501,"d":116},{"name":"task_result","f":1744034076855114,"d_finished":7211,"c":9,"l":1744034076868692,"d":7211}],"id":"9437184::76"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;) 2025-04-07T13:54:36.870916Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:54:36.851835Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=16001;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=16001;selected_rows=0; 2025-04-07T13:54:36.870949Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:54:36.871013Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T13:54:36.871090Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable [GOOD] >> Normalizers::PortionsNormalizer [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] Test command err: 2025-04-07T13:54:35.886786Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:36.019097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:36.041980Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:36.042062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:36.042320Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:36.051409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:36.051570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:36.051762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:36.051834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:36.051903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:36.051966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:36.052038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:36.052112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:36.052240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:36.052301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.052394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:36.052454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:36.077991Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:36.081163Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:36.081377Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:36.081428Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:36.081630Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:36.081781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:36.081846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:36.081882Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:36.081964Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:36.082016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:36.082056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:36.082100Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:36.082287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:36.082352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:36.082395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:36.082421Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:36.082517Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:36.082582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:36.082638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:36.082691Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:36.082766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:36.082802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:36.082833Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:36.082879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:36.082917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:36.082946Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:36.083133Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=66; 2025-04-07T13:54:36.083216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-07T13:54:36.083335Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=52; 2025-04-07T13:54:36.083429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-07T13:54:36.083610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:36.083671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:36.083705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:36.083928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:36.083977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.084375Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.084582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:36.084628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:36.084656Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:36.084899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:36.084952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:36.084982Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:36.087838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... AN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:36.900281Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T13:54:36.900391Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:159;scan_step=name=ASSEMBLER::SPEC;duration=0.000000s;size=0;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};;scan_step_idx=2; 2025-04-07T13:54:36.900758Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:159;scan_step=name=ASSEMBLER::PK;duration=0.000000s;size=0;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};;scan_step_idx=3; 2025-04-07T13:54:36.901249Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:159;scan_step=name=SNAPSHOT;duration=0.000000s;size=0;details={};;scan_step_idx=4; 2025-04-07T13:54:36.901467Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:159;scan_step=name=ALLOCATE_MEMORY::Filter;duration=0.000000s;size=0;details={stage=Filter;};;scan_step_idx=5; 2025-04-07T13:54:36.901545Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:159;scan_step=name=ASSEMBLER::EF;duration=0.000000s;size=0;details={columns=(column_ids=9;column_names=saved_at;);;};;scan_step_idx=6; 2025-04-07T13:54:36.901677Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:159;scan_step=name=PROGRAM;duration=0.000000s;size=0;details={};;scan_step_idx=7; 2025-04-07T13:54:36.902142Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:36.902179Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=fetching.cpp:15;event=apply; 2025-04-07T13:54:36.902232Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=interval.cpp:31;event=fetched;interval_idx=0; 2025-04-07T13:54:36.902267Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=interval.cpp:15;event=start_construct_result;interval_idx=0;interval_id=2; 2025-04-07T13:54:36.902334Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-04-07T13:54:36.902445Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-07T13:54:36.902481Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:54:36.902531Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:36.902602Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:36.902676Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-07T13:54:36.902705Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:54:36.902728Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:36.902771Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:36.902804Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T13:54:36.902836Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-07T13:54:36.902871Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=2; 2025-04-07T13:54:36.902953Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T13:54:36.903045Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-07T13:54:36.903191Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-07T13:54:36.903481Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:36.903635Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-07T13:54:36.903739Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-07T13:54:36.903775Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:279:2297] finished for tablet 9437184 2025-04-07T13:54:36.903857Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:279:2297] send ScanData to [1:278:2296] txId: 100 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:54:36.904435Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:279:2297] and sent to [1:278:2296] packs: 0 txId: 100 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing"],"t":0.002},{"events":["f_task_result"],"t":0.003},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.007}],"full":{"a":1744034076896156,"name":"_full_task","f":1744034076896156,"d_finished":0,"c":0,"l":1744034076903902,"d":7746},"events":[{"name":"bootstrap","f":1744034076896371,"d_finished":1923,"c":1,"l":1744034076898294,"d":1923},{"a":1744034076903460,"name":"ack","f":1744034076903460,"d_finished":0,"c":0,"l":1744034076903902,"d":442},{"a":1744034076903448,"name":"processing","f":1744034076899152,"d_finished":1977,"c":8,"l":1744034076903360,"d":2431},{"name":"ProduceResults","f":1744034076897567,"d_finished":1449,"c":10,"l":1744034076903760,"d":1449},{"a":1744034076903763,"name":"Finish","f":1744034076903763,"d_finished":0,"c":0,"l":1744034076903902,"d":139},{"name":"task_result","f":1744034076899162,"d_finished":1899,"c":8,"l":1744034076903358,"d":1899}],"id":"9437184::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;) 2025-04-07T13:54:36.904542Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:54:36.895724Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-07T13:54:36.904633Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:54:36.904732Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T13:54:36.904848Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::Write [GOOD] Test command err: 2025-04-07T13:54:32.957836Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:33.120071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:33.143439Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:33.143533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:33.143821Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:33.153265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:33.153499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:33.153769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:33.153868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:33.153961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:33.154064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:33.154261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:33.154373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:33.154510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:33.154607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:33.154698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:33.154794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:33.179633Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:33.182462Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:33.182630Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:33.182680Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:33.182891Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:33.183059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:33.183134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:33.183191Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:33.183294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:33.183356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:33.183397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:33.183442Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:33.183643Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:33.183710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:33.183757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:33.183787Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:33.183877Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:33.183930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:33.183967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:33.184013Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:33.184132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:33.184173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:33.184205Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:33.184249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:33.184324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:33.184361Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:33.184551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-07T13:54:33.184638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-07T13:54:33.184776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=52; 2025-04-07T13:54:33.184887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-07T13:54:33.185053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:33.185113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:33.185147Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:33.185371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:33.185427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:33.185460Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:33.185618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:33.185664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:33.185690Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:33.185893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:33.185938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:33.185966Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:33.186099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... ;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-07T13:54:36.985620Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; >> Cdc::VirtualTimestamps[TopicRunner] [GOOD] >> Cdc::Write[PqRunner] >> TTicketParserTest::LoginCheckRemovedUser [GOOD] >> TTicketParserTest::LoginEmptyTicketBad |86.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |86.9%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> EvWrite::WriteInTransaction >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::PortionsNormalizer [GOOD] Test command err: 2025-04-07T13:54:36.273674Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:36.376424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:36.398774Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:36.398840Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:36.399094Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:36.406680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-04-07T13:54:36.406943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=LeakedBlobsNormalizer; 2025-04-07T13:54:36.407039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:36.407206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:36.407356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:36.407449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:36.407552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:36.407665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:36.407799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:36.407905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:36.407992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:36.408083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.408161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:36.408273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:36.430394Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:36.439295Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:36.439638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-04-07T13:54:36.439690Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-07T13:54:36.439966Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:283;tasks_for_remove=0; 2025-04-07T13:54:36.440095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:36.440168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:36.440220Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-07T13:54:36.440440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-07T13:54:36.440519Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-07T13:54:36.440620Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-07T13:54:36.440747Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2025-04-07T13:54:36.440845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:36.440897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-07T13:54:36.440934Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-07T13:54:36.441087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:36.441167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:36.441209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:36.441240Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-07T13:54:36.441329Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:36.441404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:36.441473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:36.441514Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:36.441675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:36.441735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:36.441771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:36.441833Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:36.441957Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:36.442009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:36.442047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:36.442076Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:36.442141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:36.442180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:36.442205Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:36.442572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:36.442629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:36.442659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:36.442813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=32; 2025-04-07T13:54:36.442918Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=62; 2025-04-07T13:54:36.442981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=22; 2025-04-07T13:54:36.443048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSche ... =EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T13:54:37.425348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2025-04-07T13:54:37.425385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=12365; 2025-04-07T13:54:37.425516Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:54:37.425603Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:271:2285];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:54:37.425658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:271:2285];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:54:37.425704Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:271:2285];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:54:37.425759Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:54:37.425802Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:54:37.425879Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:54:37.425961Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T13:54:37.426028Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:54:37.426077Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:54:37.426109Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:54:37.426161Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:54:37.426221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:54:37.426270Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:54:37.426705Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:54:37.426965Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:271:2285];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:54:37.427040Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:271:2285];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:54:37.427067Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:54:37.427091Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:54:37.427131Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:271:2285];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:54:37.427215Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:271:2285];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:54:37.427270Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:271:2285];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T13:54:37.427328Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:271:2285];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:54:37.427383Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:271:2285];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:54:37.427424Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:271:2285];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:54:37.427465Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:271:2285];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:54:37.427530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:271:2285];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:54:37.427591Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:271:2285];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:54:37.492442Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {11:111} readable: {11:max} at tablet 9437184 2025-04-07T13:54:37.492554Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-04-07T13:54:37.492769Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:271:2285];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "key1" } Columns { Name: "key2" } Columns { Name: "field" } } } ; 2025-04-07T13:54:37.492852Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:271:2285];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[key1;key2;field;];};]; 2025-04-07T13:54:37.494575Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:271:2285];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:335:2341];trace_detailed=; 2025-04-07T13:54:37.495252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:355;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2025-04-07T13:54:37.495536Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2025-04-07T13:54:37.495845Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:335:2341];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:37.496028Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:335:2341];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:37.496170Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:335:2341];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:37.496214Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:335:2341] finished for tablet 9437184 2025-04-07T13:54:37.496299Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:335:2341] send ScanData to [1:333:2340] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:54:37.496749Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:335:2341] and sent to [1:333:2340] packs: 0 txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744034077494474,"name":"_full_task","f":1744034077494474,"d_finished":0,"c":0,"l":1744034077496366,"d":1892},"events":[{"name":"bootstrap","f":1744034077494697,"d_finished":1011,"c":1,"l":1744034077495708,"d":1011},{"a":1744034077495823,"name":"ack","f":1744034077495823,"d_finished":0,"c":0,"l":1744034077496366,"d":543},{"a":1744034077495803,"name":"processing","f":1744034077495803,"d_finished":0,"c":0,"l":1744034077496366,"d":563},{"name":"ProduceResults","f":1744034077495681,"d_finished":347,"c":2,"l":1744034077496200,"d":347},{"a":1744034077496203,"name":"Finish","f":1744034077496203,"d_finished":0,"c":0,"l":1744034077496366,"d":163}],"id":"9437184::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;) 2025-04-07T13:54:37.496836Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:335:2341];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:54:37.492921Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T13:54:37.496885Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:335:2341];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:54:37.496929Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:335:2341];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T13:54:37.497032Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:335:2341];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] Test command err: 2025-04-07T13:52:20.443390Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568736825436590:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:20.444011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:20.610208Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b4c/r3tmp/tmp5u0vfo/pdisk_1.dat 2025-04-07T13:52:20.902493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:20.902610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:20.905616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8312, node 1 2025-04-07T13:52:21.073372Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:52:21.141482Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001b4c/r3tmp/yandexgVUa3P.tmp 2025-04-07T13:52:21.141509Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001b4c/r3tmp/yandexgVUa3P.tmp 2025-04-07T13:52:21.144075Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001b4c/r3tmp/yandexgVUa3P.tmp 2025-04-07T13:52:21.144222Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:21.212640Z INFO: TTestServer started on Port 24241 GrpcPort 8312 TClient is connected to server localhost:24241 PQClient connected to localhost:8312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:21.619290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:52:21.674188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:52:21.862913Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:52:24.288254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568754005306418:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.288689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568754005306407:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.288769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:24.292233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:52:24.312023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568754005306421:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:52:24.591796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:24.606698Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568754005306495:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:24.608433Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjcyMWQ2YTEtYjU4OTQzMzYtN2ZlYWRjOTUtYjJhOGUwYWU=, ActorId: [1:7490568754005306404:2308], ActorState: ExecuteState, TraceId: 01jr88c4pcde4yb7tk73a0d8xf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:24.611059Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:24.633676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:24.761769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490568758300274065:2623] 2025-04-07T13:52:25.432839Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568736825436590:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:25.432933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:52:31.078745Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-07T13:52:31.089858Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7490568736825436841:2194]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:52:31.089913Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T13:52:31.089968Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7490568736825436841:2194], Recipient [1:7490568736825436841:2194]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:52:31.089987Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T13:52:31.128415Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:52:31.129612Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7490568784070078150:2798], Recipient [1:7490568736825436841:2194]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.129643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:52:31.129665Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T13:52:31.129703Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7490568784070078146:2795], Recipient [1:7490568736825436841:2194]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-07T13:52:31.129716Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T13:52:31.187219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:52:31.187763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:52:31.188053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-07T13:52:31.188095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-07T13:52:31.188127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathI ... 037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.556683Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.556703Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:35.556712Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569272799099410:2851], Partition 1, Sender [0:0:0], Recipient [5:7490569272799099508:2866], Cookie: 0 2025-04-07T13:54:35.556752Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569272799099409:2850], Partition 2, Sender [0:0:0], Recipient [5:7490569272799099505:2864], Cookie: 0 2025-04-07T13:54:35.556763Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569272799099508:2866]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.556783Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.556792Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569272799099505:2864]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.556814Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.556832Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:35.556846Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:35.556891Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.556903Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.556909Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.556925Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.556926Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:35.556949Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:35.556975Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569216964522935:2434], Partition 0, Sender [0:0:0], Recipient [5:7490569216964522998:2438], Cookie: 0 2025-04-07T13:54:35.557007Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569216964522998:2438]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.557025Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.557049Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:35.557080Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.557095Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.557110Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:35.647417Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [5:7490569186899751502:2350], Recipient [5:7490569161129946990:2151]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037891 TableLocalId: 12 Generation: 1 Round: 2 TableStats { DataSize: 640 RowCount: 2 IndexSize: 0 InMemSize: 640 LastAccessTime: 1744034074859 LastUpdateTime: 1744034046177 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 2 RowDeletes: 0 RowReads: 18 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 993 Memory: 132944 } ShardState: 2 UserTablePartOwners: 72075186224037891 NodeId: 5 StartTime: 1744034045582 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-07T13:54:35.647477Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-07T13:54:35.647514Z node 5 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 12] state 'Ready' dataSize 640 rowCount 2 cpuUsage 0.0993 2025-04-07T13:54:35.647629Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 12] raw table stats: DataSize: 640 RowCount: 2 IndexSize: 0 InMemSize: 640 LastAccessTime: 1744034074859 LastUpdateTime: 1744034046177 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 2 RowDeletes: 0 RowReads: 18 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-07T13:54:35.647657Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099995s, queue# 1 2025-04-07T13:54:35.658212Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569272799099410:2851], Partition 1, Sender [0:0:0], Recipient [5:7490569272799099508:2866], Cookie: 0 2025-04-07T13:54:35.658302Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569272799099508:2866]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.658340Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.658387Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:35.658467Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.658491Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.658523Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:35.658581Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569298568903789:2989], Partition 3, Sender [0:0:0], Recipient [5:7490569298568903905:3010], Cookie: 0 2025-04-07T13:54:35.658619Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569298568903905:3010]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.658635Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.658661Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:35.658693Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.658711Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.658730Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:35.658775Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569298568903788:2988], Partition 4, Sender [0:0:0], Recipient [5:7490569298568903903:3008], Cookie: 0 2025-04-07T13:54:35.658812Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569298568903903:3008]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.658826Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.658850Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:35.658883Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.658897Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.658914Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:35.658962Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569272799099409:2850], Partition 2, Sender [0:0:0], Recipient [5:7490569272799099505:2864], Cookie: 0 2025-04-07T13:54:35.658998Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569272799099505:2864]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.659011Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.659036Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:35.659068Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.659082Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.659099Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-07T13:54:35.659144Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7490569216964522935:2434], Partition 0, Sender [0:0:0], Recipient [5:7490569216964522998:2438], Cookie: 0 2025-04-07T13:54:35.659176Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7490569216964522998:2438]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.659188Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-07T13:54:35.659216Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-07T13:54:35.659248Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-07T13:54:35.659262Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-07T13:54:35.659278Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> Normalizers::SchemaVersionsNormalizer >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] Test command err: 2025-04-07T13:54:37.923815Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:38.013150Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:38.039317Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:38.039391Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:38.039607Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:38.047165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:38.047348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:38.047548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:38.047645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:38.047739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:38.047830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:38.047952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:38.048051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:38.048157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:38.048240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:38.048364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:38.048467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:38.069717Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:38.073778Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:38.073926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:38.073970Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:38.074168Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:38.074333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:38.074397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:38.074434Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:38.074510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:38.074570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:38.074608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:38.074637Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:38.074787Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:38.074861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:38.074900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:38.074925Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:38.075021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:38.075072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:38.075112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:38.075142Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:38.075200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:38.075231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:38.075261Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:38.075303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:38.075336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:38.075362Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:38.075547Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-04-07T13:54:38.075624Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-07T13:54:38.075731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-04-07T13:54:38.075834Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-04-07T13:54:38.075983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:38.076050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:38.076100Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:38.076291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:38.076336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:38.076384Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:38.076500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:38.076533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:38.076559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:38.076787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:38.076830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:38.076859Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:38.076959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... =0; 2025-04-07T13:54:38.853745Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-07T13:54:38.853775Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:54:38.853816Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:38.853878Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:38.853959Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-07T13:54:38.853990Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:54:38.854013Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:38.854170Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:38.854202Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T13:54:38.854237Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-04-07T13:54:38.854274Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=6; 2025-04-07T13:54:38.854309Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T13:54:38.854377Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-07T13:54:38.854405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-04-07T13:54:38.854432Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:38.854633Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:38.854745Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:10;schema=message: string;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-07T13:54:38.854778Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:54:38.854881Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;);columns=1;rows=10; 2025-04-07T13:54:38.854940Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=59;num_rows=10;batch_columns=message; 2025-04-07T13:54:38.855031Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:297:2315] send ScanData to [1:296:2314] txId: 100 scanId: 0 gen: 0 tablet: 9437184 bytes: 59 rows: 10 page faults: 0 finished: 0 pageFault: 0 arrow schema: message: string 2025-04-07T13:54:38.855132Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-07T13:54:38.855242Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-07T13:54:38.855321Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-07T13:54:38.855431Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:38.855526Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-07T13:54:38.855617Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-07T13:54:38.855659Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:297:2315] finished for tablet 9437184 2025-04-07T13:54:38.855725Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:297:2315] send ScanData to [1:296:2314] txId: 100 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:54:38.856056Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:297:2315] and sent to [1:296:2314] packs: 0 txId: 100 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.009}],"full":{"a":1744034078845959,"name":"_full_task","f":1744034078845959,"d_finished":0,"c":0,"l":1744034078855756,"d":9797},"events":[{"name":"bootstrap","f":1744034078846084,"d_finished":1745,"c":1,"l":1744034078847829,"d":1745},{"a":1744034078855416,"name":"ack","f":1744034078854613,"d_finished":726,"c":1,"l":1744034078855339,"d":1066},{"a":1744034078855408,"name":"processing","f":1744034078848772,"d_finished":4193,"c":9,"l":1744034078855340,"d":4541},{"name":"ProduceResults","f":1744034078847056,"d_finished":2177,"c":12,"l":1744034078855646,"d":2177},{"a":1744034078855649,"name":"Finish","f":1744034078855649,"d_finished":0,"c":0,"l":1744034078855756,"d":107},{"name":"task_result","f":1744034078848787,"d_finished":3352,"c":8,"l":1744034078854474,"d":3352}],"id":"9437184::6"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;) 2025-04-07T13:54:38.856123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:54:38.845653Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-07T13:54:38.856150Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:54:38.856203Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T13:54:38.856268Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:297:2315];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;; >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp >> EvWrite::WriteInTransaction [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> TColumnShardTestReadWrite::WriteStandalone [GOOD] >> Initializer::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteInTransaction [GOOD] Test command err: 2025-04-07T13:54:38.588078Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:38.693689Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:38.715695Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:38.715771Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:38.716029Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:38.724489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:38.724702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:38.724930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:38.725043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:38.725146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:38.725233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:38.725351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:38.725469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:38.725594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:38.725694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:38.725779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:38.725864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:38.752641Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:38.759335Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:38.759652Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:38.759725Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:38.759910Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:38.760084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:38.760168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:38.760230Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:38.760433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:38.760497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:38.760542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:38.760581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:38.760811Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:38.760897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:38.760940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:38.760988Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:38.761087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:38.761156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:38.761204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:38.761238Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:38.761322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:38.761380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:38.761407Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:38.761449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:38.761480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:38.761508Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:38.761658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=40; 2025-04-07T13:54:38.761781Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=48; 2025-04-07T13:54:38.761878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-07T13:54:38.761948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-07T13:54:38.762089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:38.762169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:38.762207Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:38.762397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:38.762446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:38.762497Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:38.762667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:38.762705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:38.762732Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:38.762922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:38.762961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:38.762990Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:38.763107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... 073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T13:54:39.501481Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:54:39.501520Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:39.501615Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:39.501694Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T13:54:39.501719Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:54:39.501754Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:39.501881Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:39.501926Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T13:54:39.501982Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=1; 2025-04-07T13:54:39.502036Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=2048;merger=0;interval_id=1; 2025-04-07T13:54:39.502088Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T13:54:39.502175Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T13:54:39.502205Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=2048;finished=1; 2025-04-07T13:54:39.502244Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:39.502435Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:39.502572Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:2048;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T13:54:39.502613Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:54:39.502715Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=2048; 2025-04-07T13:54:39.502774Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=229376;num_rows=2048;batch_columns=key,field; 2025-04-07T13:54:39.502876Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:270:2288] send ScanData to [1:267:2285] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 229376 rows: 2048 page faults: 0 finished: 0 pageFault: 0 arrow schema: key: uint64 field: string 2025-04-07T13:54:39.503016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T13:54:39.503128Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T13:54:39.503257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T13:54:39.503512Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:39.503636Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T13:54:39.503733Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T13:54:39.503766Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:270:2288] finished for tablet 9437184 2025-04-07T13:54:39.503849Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:270:2288] send ScanData to [1:267:2285] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:54:39.504280Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:270:2288] and sent to [1:267:2285] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.019},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.02}],"full":{"a":1744034079483076,"name":"_full_task","f":1744034079483076,"d_finished":0,"c":0,"l":1744034079503904,"d":20828},"events":[{"name":"bootstrap","f":1744034079483499,"d_finished":2463,"c":1,"l":1744034079485962,"d":2463},{"a":1744034079503490,"name":"ack","f":1744034079502398,"d_finished":892,"c":1,"l":1744034079503290,"d":1306},{"a":1744034079503478,"name":"processing","f":1744034079487484,"d_finished":12206,"c":9,"l":1744034079503292,"d":12632},{"name":"ProduceResults","f":1744034079484774,"d_finished":2369,"c":12,"l":1744034079503751,"d":2369},{"a":1744034079503754,"name":"Finish","f":1744034079503754,"d_finished":0,"c":0,"l":1744034079503904,"d":150},{"name":"task_result","f":1744034079487504,"d_finished":10979,"c":8,"l":1744034079502293,"d":10979}],"id":"9437184::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;) 2025-04-07T13:54:39.504350Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:54:39.482663Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=19232;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=19232;selected_rows=0; 2025-04-07T13:54:39.504409Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:54:39.504478Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T13:54:39.504575Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> EvWrite::AbortInTransaction >> Normalizers::SchemaVersionsNormalizer [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] Test command err: 2025-04-07T13:54:36.113856Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:36.210522Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:36.234773Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:36.234860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:36.235147Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:36.253298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:36.253522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:36.253786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:36.253925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:36.254038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:36.254162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:36.254286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:36.254421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:36.254536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:36.254644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.254751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:36.254867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:36.279030Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:36.281464Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:36.281626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:36.281695Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:36.281900Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:36.282061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:36.282142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:36.282192Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:36.282294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:36.282364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:36.282409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:36.282455Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:36.282649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:36.282717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:36.282757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:36.282788Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:36.282897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:36.282966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:36.283013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:36.283059Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:36.283139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:36.283178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:36.283213Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:36.283278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:36.283325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:36.283355Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:36.283536Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-04-07T13:54:36.283633Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-04-07T13:54:36.283743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=47; 2025-04-07T13:54:36.283848Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-07T13:54:36.284007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:36.284060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:36.284098Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:36.284325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:36.284397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.284431Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.284574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:36.284614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:36.284647Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:36.284904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:36.284950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:36.284982Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:36.285129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... ;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-07T13:54:39.529292Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SchemaVersionsNormalizer [GOOD] Test command err: 2025-04-07T13:54:38.865442Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:38.966920Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:38.987156Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:38.987227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:38.987458Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:38.994984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2025-04-07T13:54:38.995216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:38.995375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:38.995521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:38.995616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:38.995699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:38.995818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:38.995927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:38.996039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:38.996135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:38.996228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:38.996312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:38.996407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:39.019488Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:39.030371Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:39.030741Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2025-04-07T13:54:39.030811Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-07T13:54:39.031168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:39.031250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-07T13:54:39.031291Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-07T13:54:39.031455Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:39.031559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:39.031609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:39.031642Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-07T13:54:39.031725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:39.031777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:39.031820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:39.031876Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:39.032049Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:39.032105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:39.032150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:39.032181Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:39.032272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:39.032322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:39.032381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:39.032413Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:39.032474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:39.032523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:39.032557Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:39.032607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:39.032683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:39.032714Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:39.032926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=80; 2025-04-07T13:54:39.033012Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-07T13:54:39.033105Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-07T13:54:39.033220Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2025-04-07T13:54:39.033416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:39.033465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:39.033502Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:39.033701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:39.033750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:39.033783Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:39.033934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:39.033990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;flin ... 1;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-04-07T13:54:40.084938Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:40.084984Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:54:40.085015Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:40.085131Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:40.085200Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:40.085226Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:54:40.085259Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:40.085465Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:40.085511Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T13:54:40.085552Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-07T13:54:40.085603Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-04-07T13:54:40.085645Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T13:54:40.085791Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:40.085831Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-04-07T13:54:40.085861Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:40.086051Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:40.086227Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:40.086271Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:54:40.086389Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-04-07T13:54:40.086474Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-04-07T13:54:40.086590Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:325:2331] send ScanData to [1:323:2330] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 2405760 rows: 20048 page faults: 0 finished: 0 pageFault: 0 arrow schema: key1: uint64 key2: uint64 field: string 2025-04-07T13:54:40.086755Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:40.086878Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:40.086980Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:40.087928Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:40.088081Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:40.088209Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:40.088252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:325:2331] finished for tablet 9437184 2025-04-07T13:54:40.088335Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:325:2331] send ScanData to [1:323:2330] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:54:40.088875Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:325:2331] and sent to [1:323:2330] packs: 0 txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.055},{"events":["f_ack"],"t":0.056},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.058}],"full":{"a":1744034080029907,"name":"_full_task","f":1744034080029907,"d_finished":0,"c":0,"l":1744034080088404,"d":58497},"events":[{"name":"bootstrap","f":1744034080030075,"d_finished":2165,"c":1,"l":1744034080032240,"d":2165},{"a":1744034080087904,"name":"ack","f":1744034080086025,"d_finished":979,"c":1,"l":1744034080087004,"d":1479},{"a":1744034080087879,"name":"processing","f":1744034080033350,"d_finished":47074,"c":9,"l":1744034080087006,"d":47599},{"name":"ProduceResults","f":1744034080031334,"d_finished":2535,"c":12,"l":1744034080088235,"d":2535},{"a":1744034080088237,"name":"Finish","f":1744034080088237,"d_finished":0,"c":0,"l":1744034080088404,"d":167},{"name":"task_result","f":1744034080033364,"d_finished":45980,"c":8,"l":1744034080085903,"d":45980}],"id":"9437184::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;) 2025-04-07T13:54:40.089031Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:54:40.029478Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=256192;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=256192;selected_rows=0; 2025-04-07T13:54:40.089075Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:54:40.089138Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T13:54:40.089238Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [GOOD] Test command err: 2025-04-07T13:53:21.018669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:53:21.019096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:21.019345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a35/r3tmp/tmpu8B7GJ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28273, node 1 TClient is connected to server localhost:12707 2025-04-07T13:53:22.164431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:53:22.295556Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:22.360186Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:22.360242Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:22.360295Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:22.360463Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:53:22.430386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:22.430513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:22.443158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:32.755669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:643:2535], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:32.755855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:654:2540], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:32.756002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:32.777755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:53:32.956356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:657:2543], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:53:34.116901Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:739:2593], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:34.141535Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGUxMDk4OTktOWVkNDgwMzUtYWJjYzhlZWItZDJhNjM0YmE=, ActorId: [1:639:2532], ActorState: ExecuteState, TraceId: 01jr88e7j43tawe1mhh5ny3e1g, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2025-04-07T13:53:34.309854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-07T13:53:38.794856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:39.439986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:53:40.520624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715670:0, at schemeshard: 72057594046644480 Initialization finished 2025-04-07T13:53:51.812256Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jr88et2k6z051wk0hyhwwqka, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzMzOTk1ZC04NmNmN2U5Yy03NDRmYzBjMS0yZTMzNWNhZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=1 REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-04-07T13:54:06.113471Z node 1 :TX_PROXY ERROR: Access denied for root@builtin with access RemoveSchema to path Root/.metadata/test REQUEST=DROP TABLE `/Root/.metadata/test`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin to path Root/.metadata/test, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-04-07T13:54:16.870303Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr88fjgcbftw3b724mxbxmjn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThlMDY0OGYtY2MxYjYyYTItMmRjZDQ4ODItNjA2ZTA3Mzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;RESULT=
: Fatal: ydb/core/kqp/host/kqp_host.cpp:943 ExecuteDataQuery(): requirement false failed, message: Unexpected query type for execute script action: Ddl, code: 1 ;EXPECTATION=0 FINISHED_REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 2025-04-07T13:54:38.847226Z node 1 :TX_PROXY ERROR: Access denied for root@builtin with access RemoveSchema to path Root/.metadata/initialization/migrations REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin to path Root/.metadata/initialization/migrations, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> EvWrite::AbortInTransaction [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> Cdc::NewImageLogDebezium [GOOD] >> Cdc::NaN[PqRunner] >> Cdc::HugeKey[TopicRunner] [GOOD] >> Cdc::HugeKeyDebezium ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] Test command err: 2025-04-07T13:54:40.569668Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:40.660444Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:40.682887Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:40.682968Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:40.683221Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:40.692521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:40.692721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:40.692965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:40.693096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:40.693178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:40.693233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:40.693307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:40.693387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:40.693475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:40.693566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:40.693638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:40.693718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:40.714383Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:40.725340Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:40.725730Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:40.725805Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:40.725984Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:40.726131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:40.726199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:40.726253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:40.726360Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:40.726431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:40.726470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:40.726505Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:40.726707Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:40.726780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:40.726816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:40.726869Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:40.726968Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:40.727044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:40.727083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:40.727113Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:40.727177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:40.727213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:40.727238Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:40.727282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:40.727316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:40.727344Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:40.727515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-07T13:54:40.727639Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-07T13:54:40.727719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-04-07T13:54:40.727804Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-07T13:54:40.727956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:40.728028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:40.728071Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:40.728292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:40.728340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:40.728386Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:40.728525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:40.728578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:40.728628Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:40.729035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:40.729086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:40.729120Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:40.729257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... X_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=00:0;;fline=tx_controller.cpp:340;event=registered; 2025-04-07T13:54:41.313495Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=00:0;;this=88923003807456;op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=00:0;;int_this=89197881026880;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2025-04-07T13:54:41.313651Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=00:0;;this=88923003807456;op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=00:0;;int_this=89197881026880;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=tx_controller.cpp:371;event=start;tx_info=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=00:0;; 2025-04-07T13:54:41.313752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=00:0;;this=88923003807456;op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=00:0;;int_this=89197881026880;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2025-04-07T13:54:41.313809Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=00:0;;this=88923003807456;op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=00:0;;int_this=89197881026880;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2025-04-07T13:54:41.314165Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 10 at tablet 9437184, mediator 0 2025-04-07T13:54:41.314250Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-04-07T13:54:41.314398Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:54:41.314696Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2025-04-07T13:54:41.320615Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine_logs.cpp:557;event=OnTieringModified;new_count_tierings=0; 2025-04-07T13:54:41.320826Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=tables_manager.cpp:262;method=RegisterTable;path_id=1; 2025-04-07T13:54:41.320884Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine.h:339;event=RegisterTable;path_id=1; 2025-04-07T13:54:41.327588Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T13:54:41.355915Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-07T13:54:41.356106Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=229592;columns=2; 2025-04-07T13:54:41.359376Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:54:41.359450Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:54:41.373882Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:135:2167];fline=actor.cpp:22;event=flush_writing;size=229592;count=1; 2025-04-07T13:54:41.382642Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-04-07T13:54:41.382985Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-07T13:54:41.398808Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-07T13:54:41.399022Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:54:41.399529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:111;event=abort;tx_id=222;problem=finished; 2025-04-07T13:54:41.399614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:128;event=abort;tx_id=222;problem=finished; 2025-04-07T13:54:41.399891Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 10 at tablet 9437184, mediator 0 2025-04-07T13:54:41.399947Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] execute at tablet 9437184 2025-04-07T13:54:41.399996Z node 1 :TX_COLUMNSHARD ERROR: TxPlanStep[5] Ignore old txIds [112] for step 10 last planned step 10 at tablet 9437184 2025-04-07T13:54:41.400048Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] complete at tablet 9437184 2025-04-07T13:54:41.400463Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {10:max} readable: {10:max} at tablet 9437184 2025-04-07T13:54:41.400593Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T13:54:41.403050Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "key" } Columns { Name: "field" } } } ; 2025-04-07T13:54:41.403190Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[key;field;];};]; 2025-04-07T13:54:41.404330Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:252:2270];trace_detailed=; 2025-04-07T13:54:41.406255Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:355;ff_first=(column_ids=1,2;column_names=field,key;);; 2025-04-07T13:54:41.406466Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; 2025-04-07T13:54:41.406860Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:252:2270];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:41.407013Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:252:2270];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T13:54:41.407165Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:252:2270];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T13:54:41.407214Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:252:2270] finished for tablet 9437184 2025-04-07T13:54:41.407307Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:252:2270] send ScanData to [1:246:2264] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:54:41.407734Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:252:2270] and sent to [1:246:2264] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":1744034081404244,"name":"_full_task","f":1744034081404244,"d_finished":0,"c":0,"l":1744034081407361,"d":3117},"events":[{"name":"bootstrap","f":1744034081404514,"d_finished":2136,"c":1,"l":1744034081406650,"d":2136},{"a":1744034081406828,"name":"ack","f":1744034081406828,"d_finished":0,"c":0,"l":1744034081407361,"d":533},{"a":1744034081406809,"name":"processing","f":1744034081406809,"d_finished":0,"c":0,"l":1744034081407361,"d":552},{"name":"ProduceResults","f":1744034081406623,"d_finished":320,"c":2,"l":1744034081407197,"d":320},{"a":1744034081407201,"name":"Finish","f":1744034081407201,"d_finished":0,"c":0,"l":1744034081407361,"d":160}],"id":"9437184::1"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;) 2025-04-07T13:54:41.407855Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:252:2270];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:54:41.403294Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T13:54:41.407911Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:252:2270];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:54:41.407961Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:252:2270];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T13:54:41.408065Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:252:2270];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TTicketParserTest::LoginEmptyTicketBad [GOOD] >> TNebiusAccessServiceTest::Authenticate >> Cdc::Write[PqRunner] [GOOD] >> Cdc::Write[YdsRunner] >> TNebiusAccessServiceTest::Authenticate [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] >> TNebiusAccessServiceTest::PassRequestId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] Test command err: 2025-04-07T13:54:33.201816Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:33.323040Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:33.346572Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:33.346634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:33.346888Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:33.352780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-04-07T13:54:33.353027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:33.353195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:33.353353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:33.353452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:33.353549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:33.353655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:33.353759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:33.353881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:33.353989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:33.354072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:33.354157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:33.354235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:33.378880Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:33.383099Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:33.383400Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-04-07T13:54:33.383451Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-07T13:54:33.383780Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:283;tasks_for_remove=0; 2025-04-07T13:54:33.383906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:33.383969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-07T13:54:33.384007Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-07T13:54:33.384135Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:33.384217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:33.384274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:33.384333Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-07T13:54:33.384429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:33.384482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:33.384517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:33.384550Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:33.384740Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:33.384798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:33.384841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:33.384869Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:33.384952Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:33.385002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:33.385039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:33.385064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:33.385127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:33.385190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:33.385230Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:33.385296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:33.385333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:33.385365Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:33.385532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-07T13:54:33.385657Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=67; 2025-04-07T13:54:33.385763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-07T13:54:33.385867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=45; 2025-04-07T13:54:33.386066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:33.386121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:33.386153Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:33.386338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:33.386390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:33.386428Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:33.386584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CL ... 37184;ScanId=0;TxId=111;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-04-07T13:54:42.163984Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:42.164029Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:54:42.164071Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:42.164190Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:42.164265Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:42.164292Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T13:54:42.164370Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:42.164607Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T13:54:42.164652Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T13:54:42.164752Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-07T13:54:42.164815Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-04-07T13:54:42.164868Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T13:54:42.164964Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:42.164997Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-04-07T13:54:42.165033Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:42.165236Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:42.165412Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:42.165459Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:54:42.165613Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-04-07T13:54:42.165714Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-04-07T13:54:42.165828Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:360:2364] send ScanData to [1:358:2363] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 2405760 rows: 20048 page faults: 0 finished: 0 pageFault: 0 arrow schema: key1: uint64 key2: uint64 field: string 2025-04-07T13:54:42.165981Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:42.166110Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:42.166239Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:42.167145Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:42.167298Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:42.167429Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T13:54:42.167476Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:360:2364] finished for tablet 9437184 2025-04-07T13:54:42.167584Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:360:2364] send ScanData to [1:358:2363] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:54:42.168088Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:360:2364] and sent to [1:358:2363] packs: 0 txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.082},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.084}],"full":{"a":1744034082082867,"name":"_full_task","f":1744034082082867,"d_finished":0,"c":0,"l":1744034082167629,"d":84762},"events":[{"name":"bootstrap","f":1744034082083089,"d_finished":2769,"c":1,"l":1744034082085858,"d":2769},{"a":1744034082167110,"name":"ack","f":1744034082165208,"d_finished":1060,"c":1,"l":1744034082166268,"d":1579},{"a":1744034082167087,"name":"processing","f":1744034082087038,"d_finished":73127,"c":9,"l":1744034082166271,"d":73669},{"name":"ProduceResults","f":1744034082084725,"d_finished":2804,"c":12,"l":1744034082167457,"d":2804},{"a":1744034082167460,"name":"Finish","f":1744034082167460,"d_finished":0,"c":0,"l":1744034082167629,"d":169},{"name":"task_result","f":1744034082087062,"d_finished":71923,"c":8,"l":1744034082165082,"d":71923}],"id":"9437184::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;) 2025-04-07T13:54:42.168223Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:54:42.082317Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=258720;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=258720;selected_rows=0; 2025-04-07T13:54:42.168288Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:54:42.168373Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T13:54:42.168499Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:360:2364];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-04-07T13:54:43.652919Z node 2 :GRPC_CLIENT DEBUG: [51600000c0d0]{reqId} Connect to grpc://localhost:29783 2025-04-07T13:54:43.655498Z node 2 :GRPC_CLIENT DEBUG: [51600000c0d0]{reqId} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-04-07T13:54:43.665480Z node 2 :GRPC_CLIENT DEBUG: [51600000c0d0]{reqId} Response AuthenticateResponse { account { user_account { id: "1234" } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] Test command err: 2025-04-07T13:54:43.597284Z node 3 :GRPC_CLIENT DEBUG: [51600000d2d0] Connect to grpc://localhost:5827 2025-04-07T13:54:43.609348Z node 3 :GRPC_CLIENT DEBUG: [51600000d2d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2025-04-07T13:54:43.625393Z node 3 :GRPC_CLIENT DEBUG: [51600000d2d0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user_id" } } } } } 2025-04-07T13:54:43.626051Z node 3 :GRPC_CLIENT DEBUG: [51600000d2d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (79225CA9)" } } } 2025-04-07T13:54:43.627850Z node 3 :GRPC_CLIENT DEBUG: [51600000d2d0] Status 7 Permission Denied 2025-04-07T13:54:43.628360Z node 3 :GRPC_CLIENT DEBUG: [51600000d2d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "denied" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2025-04-07T13:54:43.629699Z node 3 :GRPC_CLIENT DEBUG: [51600000d2d0] Status 7 Permission Denied 2025-04-07T13:54:43.630136Z node 3 :GRPC_CLIENT DEBUG: [51600000d2d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "p" } } iam_token: "**** (717F937C)" } } } 2025-04-07T13:54:43.631349Z node 3 :GRPC_CLIENT DEBUG: [51600000d2d0] Status 7 Permission Denied |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] Test command err: 2025-04-07T13:54:43.325271Z node 1 :GRPC_CLIENT DEBUG: [5160000099d0] Connect to grpc://localhost:24441 2025-04-07T13:54:43.418369Z node 1 :GRPC_CLIENT DEBUG: [5160000099d0] Request AuthenticateRequest { iam_token: "**** (3C4833B6)" } 2025-04-07T13:54:43.444200Z node 1 :GRPC_CLIENT DEBUG: [5160000099d0] Status 7 Permission Denied 2025-04-07T13:54:43.444901Z node 1 :GRPC_CLIENT DEBUG: [5160000099d0] Request AuthenticateRequest { iam_token: "**** (86DDB286)" } 2025-04-07T13:54:43.447204Z node 1 :GRPC_CLIENT DEBUG: [5160000099d0] Response AuthenticateResponse { account { user_account { id: "1234" } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::LoginEmptyTicketBad [GOOD] Test command err: 2025-04-07T13:53:57.052059Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569152774243094:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:57.052128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001659/r3tmp/tmpEaQ0vz/pdisk_1.dat 2025-04-07T13:53:57.784525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:57.784615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:57.808997Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:57.841230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4993, node 1 2025-04-07T13:53:57.977277Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:57.977297Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:57.977303Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:57.977410Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:58.323693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:58.358503Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:58.398471Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T13:53:58.412461Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****HeyA (75CBFF90) () has now valid token of user1 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001659/r3tmp/tmps0injh/pdisk_1.dat 2025-04-07T13:54:01.040031Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:01.041121Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 9072, node 2 2025-04-07T13:54:01.136258Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:01.136342Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:01.141418Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:54:01.157204Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:01.157225Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:01.157230Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:01.157309Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:01.409980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:01.459720Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T13:54:01.468817Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****61QA (89490CEB) () has now valid token of user1 2025-04-07T13:54:05.022819Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490569185430786019:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:05.022862Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001659/r3tmp/tmpeqRhjU/pdisk_1.dat 2025-04-07T13:54:05.250079Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5075, node 3 2025-04-07T13:54:05.328312Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:05.328393Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:05.329694Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:54:05.397101Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:05.397120Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:05.397131Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:05.397232Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:05.615335Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:05.633352Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:54:06.020940Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T13:54:06.030146Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****LflQ (8C2989FA) () has now valid token of user1 2025-04-07T13:54:06.037619Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T13:54:10.029442Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490569185430786019:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:10.029528Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:54:11.054895Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****LflQ (8C2989FA) 2025-04-07T13:54:11.055063Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****LflQ (8C2989FA) () has now valid token of user1 2025-04-07T13:54:15.068826Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****LflQ (8C2989FA) 2025-04-07T13:54:15.069005Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****LflQ (8C2989FA) () has now valid token of user1 2025-04-07T13:54:16.041268Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T13:54:20.079769Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****LflQ (8C2989FA) 2025-04-07T13:54:20.079884Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****LflQ (8C2989FA) () has now valid token of user1 2025-04-07T13:54:20.222166Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:54:20.222199Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:23.084052Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****LflQ (8C2989FA) 2025-04-07T13:54:23.084206Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****LflQ (8C2989FA) () has now valid token of user1 2025-04-07T13:54:26.827042Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490569280125649698:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:26.827110Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001659/r3tmp/tmp5MDOaD/pdisk_1.dat 2025-04-07T13:54:27.062376Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4978, node 4 2025-04-07T13:54:27.163812Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:27.163936Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:27.205183Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:54:27.241322Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:27.241345Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:27.241351Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:27.241449Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:27.582149Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:27.593208Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:54:27.659353Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T13:54:27.674739Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****mcHg (A9571B49) () has now valid token of user1 2025-04-07T13:54:27.675250Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T13:54:31.831371Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490569280125649698:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:31.831466Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:54:31.867235Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****mcHg (A9571B49) 2025-04-07T13:54:31.867324Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****mcHg (A9571B49) () has now permanent error message 'User not found' 2025-04-07T13:54:35.884842Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****mcHg (A9571B49) 2025-04-07T13:54:38.388690Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490569331021409559:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:38.388809Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001659/r3tmp/tmpMZ2HY4/pdisk_1.dat 2025-04-07T13:54:38.590310Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:38.610580Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:38.610693Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:38.614638Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27826, node 5 2025-04-07T13:54:38.805772Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:38.805807Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:38.805817Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:38.805944Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:54:39.113370Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:39.215479Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T13:54:39.242551Z node 5 :TICKET_PARSER ERROR: Ticket **** (00000000): Ticket is empty >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] |87.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |87.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |87.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] |87.0%| [TA] $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] >> BasicUsage::WriteSessionSwitchDatabases [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TPersQueueTest::LOGBROKER_7820 [GOOD] >> TPersQueueTest::InflightLimit |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |87.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] Test command err: 2025-04-07T13:54:36.607042Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:36.706660Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:36.738097Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:36.738173Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:36.738456Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:36.746440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:36.746629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:36.746859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:36.746947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:36.747051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:36.747132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:36.747201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:36.747264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:36.747330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:36.747388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.747454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:36.747533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:36.765008Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:36.767828Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:36.767957Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:36.768013Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:36.768257Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:36.768404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:36.768463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:36.768498Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:36.768588Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:36.768648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:36.768702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:36.768753Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:36.768895Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:36.768953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:36.768991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:36.769023Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:36.769137Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:36.769190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:36.769229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:36.769259Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:36.769324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:36.769357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:36.769383Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:36.769425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:36.769474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:36.769510Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:36.769667Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-04-07T13:54:36.769760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-04-07T13:54:36.769853Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-07T13:54:36.769943Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-07T13:54:36.770089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:36.770136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:36.770166Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:36.770366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:36.770415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.770441Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.770562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:36.770597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:36.770620Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:36.770825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:36.770863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:36.770900Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:36.771032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... al_id=26; 2025-04-07T13:54:45.482167Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T13:54:45.482265Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T13:54:45.482301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=31;finished=1; 2025-04-07T13:54:45.482336Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T13:54:45.482644Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:45.482884Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T13:54:45.482939Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T13:54:45.483105Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-07T13:54:45.483206Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-07T13:54:45.483359Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1022:2893] send ScanData to [1:1021:2892] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 1984 rows: 31 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string 2025-04-07T13:54:45.483537Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T13:54:45.483678Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T13:54:45.483837Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T13:54:45.484091Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T13:54:45.484247Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T13:54:45.484427Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T13:54:45.484472Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1022:2893] finished for tablet 9437184 2025-04-07T13:54:45.484555Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1022:2893] send ScanData to [1:1021:2892] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T13:54:45.485263Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:1022:2893] and sent to [1:1021:2892] packs: 0 txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.016}],"full":{"a":1744034085468321,"name":"_full_task","f":1744034085468321,"d_finished":0,"c":0,"l":1744034085484619,"d":16298},"events":[{"name":"bootstrap","f":1744034085468562,"d_finished":3220,"c":1,"l":1744034085471782,"d":3220},{"a":1744034085484068,"name":"ack","f":1744034085482616,"d_finished":1251,"c":1,"l":1744034085483867,"d":1802},{"a":1744034085484046,"name":"processing","f":1744034085473261,"d_finished":6573,"c":10,"l":1744034085483870,"d":7146},{"name":"ProduceResults","f":1744034085470352,"d_finished":3261,"c":13,"l":1744034085484452,"d":3261},{"a":1744034085484456,"name":"Finish","f":1744034085484456,"d_finished":0,"c":0,"l":1744034085484619,"d":163},{"name":"task_result","f":1744034085473281,"d_finished":5177,"c":9,"l":1744034085482396,"d":5177}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;) 2025-04-07T13:54:45.485346Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T13:54:45.467805Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-07T13:54:45.485392Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T13:54:45.485475Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T13:54:45.485589Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:53:50.873340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:53:50.873436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:50.873475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:53:50.873515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:53:50.873575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:53:50.873612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:53:50.873675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:53:50.874005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:50.954459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:53:50.954545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:50.966793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:50.966928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:53:50.967097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:53:50.973512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:53:50.973762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:53:50.974491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:50.974714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:53:50.977060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:50.978437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:50.978506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:50.978657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:53:50.978726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:50.978784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:53:50.978986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:53:50.989328Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:53:51.178442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:53:51.178823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.179248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:53:51.179570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:53:51.179669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.183240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:51.183422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:53:51.183646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.183713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:53:51.183759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:53:51.183795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:53:51.186296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.186378Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:53:51.186425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:53:51.188703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.188759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.188805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:51.188864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:53:51.193035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:53:51.195395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:53:51.195637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:53:51.196952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:53:51.197136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:53:51.197189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:51.197467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:53:51.197529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:53:51.197756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:51.197873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:53:51.200519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:53:51.200570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:53:51.200794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:53:51.200848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:53:51.201277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:53:51.201346Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:53:51.201487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:53:51.201526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:51.201576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:53:51.201632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:53:51.201683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:53:51.201736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:53:51.201811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:53:51.201853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:53:51.201887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:53:51.204165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:51.204320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:53:51.204363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:53:51.204417Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:53:51.204465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:53:51.204577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... canner] Handle TEvExportScan::TEvFeed: self# [1:3446:5412] 2025-04-07T13:54:44.609694Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3447:5413], sender# [1:3446:5412], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:13455 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B5CC0BDF-01C7-461A-A2CC-916E02A860D1 amz-sdk-request: attempt=1 content-length: 130 content-md5: rsyfbQ5vVOk4oQ1A/altew== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=99&uploadId=1 / 130 2025-04-07T13:54:44.616933Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3447:5413], result# UploadPartResult { ETag: aecc9f6d0e6f54e938a10d40fda96d7b } 2025-04-07T13:54:44.617268Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3446:5412] 2025-04-07T13:54:44.617457Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3447:5413], sender# [1:3446:5412], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:13455 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CDFCD721-9B1A-4EA4-874C-E0EE2DCAA718 amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2025-04-07T13:54:44.628893Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3447:5413], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2025-04-07T13:54:44.629229Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3446:5412] 2025-04-07T13:54:44.629350Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3447:5413], sender# [1:3446:5412], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:13455 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F92F01E3-0E7D-4EF0-9D8D-169511A2C054 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2025-04-07T13:54:44.641251Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3447:5413], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-04-07T13:54:44.641362Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3447:5413], success# 1, error# , multipart# 1, uploadId# 1 2025-04-07T13:54:44.654697Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3447:5413], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:13455 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0DB8B734-C441-4A1B-9F79-66619E4237DB amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-04-07T13:54:44.684975Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3447:5413], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2025-04-07T13:54:44.685460Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3446:5412], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-07T13:54:44.704187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-07T13:54:44.704277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T13:54:44.704505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-07T13:54:44.704627Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-07T13:54:44.707404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:44.707498Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:54:44.707546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T13:54:44.707596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T13:54:44.707843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:44.720041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:54:44.720456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:54:44.720539Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T13:54:44.720688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T13:54:44.720730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:54:44.720785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T13:54:44.720877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 102 2025-04-07T13:54:44.720933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:54:44.720975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T13:54:44.721008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T13:54:44.721163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:54:44.725425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:54:44.725503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3432:5399] TestWaitNotification: OK eventTxId 102 >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [FAIL] |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] Test command err: 2025-04-07T13:52:15.177759Z :WriteSessionNoAvailableDatabase INFO: Random seed for debugging is 1744033935177720 2025-04-07T13:52:15.721642Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568717819509715:2118];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:15.721690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:15.926184Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568716471643701:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:16.114294Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:52:16.137468Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:52:16.137693Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00182d/r3tmp/tmpk7zLXe/pdisk_1.dat 2025-04-07T13:52:16.410643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:16.410751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:16.411789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:52:16.411879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:52:16.415377Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:52:16.415582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:16.417789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:52:16.451944Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23617, node 1 2025-04-07T13:52:16.468898Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:52:16.468921Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:52:16.533442Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/00182d/r3tmp/yandexaKcCJm.tmp 2025-04-07T13:52:16.533461Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/00182d/r3tmp/yandexaKcCJm.tmp 2025-04-07T13:52:16.533603Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/00182d/r3tmp/yandexaKcCJm.tmp 2025-04-07T13:52:16.533700Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:52:16.575042Z INFO: TTestServer started on Port 17952 GrpcPort 23617 TClient is connected to server localhost:17952 PQClient connected to localhost:23617 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:52:16.879001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T13:52:16.974475Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T13:52:19.876903Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568733651513083:2282], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.877037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568733651513108:2285], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.877194Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.879984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568734999379810:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.880110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.880460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568734999379843:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:52:19.904946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T13:52:20.026780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568733651513113:2286], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:52:20.026479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568734999379845:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:52:20.278672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:52:20.283404Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568737946480444:2290], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:20.284733Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDY5MjAyMjctODJkNzU5NTMtOTQ1MmUzZGYtMTAxZDQ0NzY=, ActorId: [2:7490568733651513081:2281], ActorState: ExecuteState, TraceId: 01jr88c0ax2jrhd0q52ryhb8fk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:20.286083Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:20.282198Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568739294347246:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:52:20.283953Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzFkNDM2ZjItOWQ1ZTA0ZTgtYTBkYTA5ZDAtODllZGMyZTc=, ActorId: [1:7490568734999379808:2303], ActorState: ExecuteState, TraceId: 01jr88c08vec6g726sc48ydjqr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:52:20.286073Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:52:20.551295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:52:20.726407Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568717819509715:2118];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:20.726477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:52:20.833455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:52:20.928744Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568716471643701:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:52:20.928801Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:23617", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-07T13:52:21.234985Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr88c1hydq7qzxhbbynh4y4d, Database: , DatabaseId: /Root, SessionId: ydb: ... :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:54:23.793051Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0 2025-04-07T13:54:23.796819Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-07T13:54:23.796886Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744034063796 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:54:23.796996Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session established. Init response: last_seq_no: 2 session_id: "src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0" supported_codecs { codecs: 1 codecs: 2 codecs: 3 } 2025-04-07T13:54:24.793389Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490569267423468983:3343] (SourceId=src_id, PreferedPartition=(NULL)) Update the table 2025-04-07T13:54:24.823947Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490569267423468983:3343] (SourceId=src_id, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=1 Status=SUCCESS 2025-04-07T13:54:24.823983Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490569267423468983:3343] (SourceId=src_id, PreferedPartition=(NULL)) Start idle 2025-04-07T13:54:27.312529Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:54:32.312981Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:54:37.313377Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:54:42.165674Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvStatus 2025-04-07T13:54:42.165931Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-04-07T13:54:42.165082Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2025-04-07T13:54:42.165144Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 4 2025-04-07T13:54:42.166727Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 4 DataSize: 0 UsedReserveSize: 0 2025-04-07T13:54:42.166823Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 2025-04-07T13:54:42.320957Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:54:43.865067Z :DEBUG: [/Root] TraceId [] SessionId [src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0] MessageGroupId [src_id] Write 1 messages with Id from 1 to 1 >>> Got event: ReadyToAcceptEvent >>> Ready to answer: ok 2025-04-07T13:54:43.865645Z :DEBUG: [/Root] TraceId [] SessionId [src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0] MessageGroupId [src_id] Write session: try to update token 2025-04-07T13:54:43.865699Z :DEBUG: [/Root] TraceId [] SessionId [src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 3 2025-04-07T13:54:43.868157Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:54:43.868210Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:54:43.868304Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-07T13:54:43.867375Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-07T13:54:43.867702Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-07T13:54:43.868649Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-04-07T13:54:43.873083Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:54:43.873131Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:54:43.873194Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 3 partNo : 0 messageNo: 1 size 98 offset: -1 2025-04-07T13:54:43.873434Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2025-04-07T13:54:43.893064Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 172 count 1 nextOffset 3 batches 1 2025-04-07T13:54:43.893747Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 160 WTime 1744034083891 2025-04-07T13:54:43.893941Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:54:43.905159Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 105 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:54:43.905224Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:54:43.905287Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-07T13:54:43.905500Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] topic 'rt3.dc1--test-topicCounters. CacheSize 0 CachedBlobs 0 2025-04-07T13:54:43.905533Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-07T13:54:43.912922Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-04-07T13:54:43.920841Z :DEBUG: [/Root] TraceId [] SessionId [src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-07T13:54:43.921088Z :DEBUG: [/Root] TraceId [] SessionId [src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 3 written { offset: 2 } } write_statistics { persisting_time { nanos: 12000000 } min_queue_wait_time { nanos: 18000000 } max_queue_wait_time { nanos: 18000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-07T13:54:43.921124Z :DEBUG: [/Root] TraceId [] SessionId [src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-04-07T13:54:43.921148Z :DEBUG: [/Root] TraceId [] SessionId [src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0] MessageGroupId [src_id] Write session: acknoledged message 1 2025-04-07T13:54:43.932786Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0 grpc read done: success: 0 data: 2025-04-07T13:54:43.932827Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0 grpc read failed 2025-04-07T13:54:43.932849Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0 grpc closed 2025-04-07T13:54:43.932869Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0 is DEAD 2025-04-07T13:54:43.933382Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:54:43.935784Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:43.935839Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7490569267423469015:3343] destroyed 2025-04-07T13:54:43.935893Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:54:43.943520Z :DEBUG: [/Root] TraceId [] SessionId [src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: Cancelled on the server side, Details: , InternalError: 0 2025-04-07T13:54:43.943747Z :ERROR: [/Root] TraceId [] SessionId [src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0] MessageGroupId [src_id] Got error. Status: CLIENT_CANCELLED, Description:
: Error: GRpc error: (1): Cancelled on the server side 2025-04-07T13:54:43.943810Z :ERROR: [/Root] TraceId [] SessionId [src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0] MessageGroupId [src_id] Write session will not restart after a fatal error 2025-04-07T13:54:43.943867Z :INFO: [/Root] TraceId [] SessionId [src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0] MessageGroupId [src_id] Write session will now close 2025-04-07T13:54:43.943996Z :DEBUG: [/Root] TraceId [] SessionId [src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0] MessageGroupId [src_id] Write session: aborting 2025-04-07T13:54:43.967595Z :DEBUG: [/Root] TraceId [] SessionId [src_id|6fad2c44-72bfcdd8-893acd2b-13255626_0] MessageGroupId [src_id] Write session: destroy 2025-04-07T13:54:44.367891Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569357617783355:3530], TxId: 281474976710889, task: 1, CA Id [3:7490569357617783353:3530]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-07T13:54:44.409006Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569357617783355:3530], TxId: 281474976710889, task: 1, CA Id [3:7490569357617783353:3530]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:54:44.473227Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569357617783355:3530], TxId: 281474976710889, task: 1, CA Id [3:7490569357617783353:3530]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:54:44.551420Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569357617783355:3530], TxId: 281474976710889, task: 1, CA Id [3:7490569357617783353:3530]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:54:44.637553Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569357617783355:3530], TxId: 281474976710889, task: 1, CA Id [3:7490569357617783353:3530]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:54:44.862904Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569357617783355:3530], TxId: 281474976710889, task: 1, CA Id [3:7490569357617783353:3530]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:54:45.159407Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7490569357617783355:3530], TxId: 281474976710889, task: 1, CA Id [3:7490569357617783353:3530]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 |87.1%| [TA] {RESULT} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.1%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |87.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks >> TColumnShardTestReadWrite::WriteStandaloneOverload [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |87.1%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |87.1%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> Cdc::HugeKeyDebezium [GOOD] >> Cdc::Drop[PqRunner] |87.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneOverload [GOOD] Test command err: 2025-04-07T13:54:36.251927Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:36.355745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:36.378227Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:36.378300Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:36.378575Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:36.389064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:36.389283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:36.389506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:36.389620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:36.389733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:36.389868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:36.389972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:36.390087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:36.390185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:36.390282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.390375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:36.390474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:36.412138Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:36.416723Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:36.416865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:36.416908Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:36.417104Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:36.417248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:36.417309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:36.417346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:36.417432Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:36.417493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:36.417531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:36.417561Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:36.417703Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:36.417766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:36.417813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:36.417840Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:36.417957Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:36.418017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:36.418054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:36.418086Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:36.418164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:36.418216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:36.418248Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:36.418292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:36.418323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:36.418346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:36.418493Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-07T13:54:36.418585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-04-07T13:54:36.418679Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-07T13:54:36.418776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-07T13:54:36.418908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:36.418958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:36.418993Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:36.419192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:36.419245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.419273Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.419400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:36.419439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:36.419465Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:36.419698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:36.419746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:36.419774Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:36.419911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... =8;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:46.866171Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 6 at tablet 9437184 2025-04-07T13:54:46.866480Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-07T13:54:46.885422Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-07T13:54:46.885605Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=9;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:46.887646Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 7 at tablet 9437184 2025-04-07T13:54:46.887956Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-07T13:54:46.905412Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-07T13:54:46.905595Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=10;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:46.930125Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 8 at tablet 9437184 2025-04-07T13:54:46.930489Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-07T13:54:46.945817Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-07T13:54:46.945994Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=11;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:46.947738Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 9 at tablet 9437184 2025-04-07T13:54:46.948053Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-07T13:54:46.962212Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-07T13:54:46.962380Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=12;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:46.964053Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 10 at tablet 9437184 2025-04-07T13:54:46.964355Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-07T13:54:46.978553Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-07T13:54:46.978728Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=13;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:47.010397Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 11 at tablet 9437184 2025-04-07T13:54:47.010694Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-07T13:54:47.029430Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-07T13:54:47.029616Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=14;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:47.031349Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 12 at tablet 9437184 2025-04-07T13:54:47.031669Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-07T13:54:47.049786Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-07T13:54:47.049966Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=15;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:47.051938Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 13 at tablet 9437184 2025-04-07T13:54:47.052237Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-07T13:54:47.066845Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-07T13:54:47.067023Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=16;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:47.087755Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 14 at tablet 9437184 2025-04-07T13:54:47.088078Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-07T13:54:47.101949Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-07T13:54:47.102106Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=17;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:47.103838Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 15 at tablet 9437184 2025-04-07T13:54:47.104145Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-07T13:54:47.118511Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-07T13:54:47.118672Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=18;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:47.120476Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 16 at tablet 9437184 2025-04-07T13:54:47.120796Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-07T13:54:47.135068Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-07T13:54:47.135241Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=19;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:47.162620Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 17 at tablet 9437184 2025-04-07T13:54:47.162978Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-07T13:54:47.177425Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-07T13:54:47.177589Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=20;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:47.179353Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 18 at tablet 9437184 2025-04-07T13:54:47.179652Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-07T13:54:47.201656Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-07T13:54:47.201841Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=21;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:47.203511Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 19 at tablet 9437184 2025-04-07T13:54:47.203820Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-07T13:54:47.221559Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-07T13:54:47.221739Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=22;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:47.240786Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 20 at tablet 9437184 2025-04-07T13:54:47.241128Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-07T13:54:47.258019Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-07T13:54:47.258197Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=23;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:47.260052Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 21 at tablet 9437184 2025-04-07T13:54:47.260352Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-07T13:54:47.277445Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-07T13:54:47.277631Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=24;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T13:54:47.279351Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 22 at tablet 9437184 2025-04-07T13:54:47.279633Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-07T13:54:47.301474Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-07T13:54:47.301654Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=25;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:54:47.340837Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:54:47.340940Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:54:47.842525Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];fline=actor.cpp:22;event=flush_writing;size=6330728;count=1; CATCH TEvWrite, status OK 2025-04-07T13:54:47.961611Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 23 at tablet 9437184 2025-04-07T13:54:47.961951Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:23 Blob count: 1 2025-04-07T13:54:47.981565Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:23 Blob count: 1 2025-04-07T13:54:47.981767Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=26;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true >> ExternalIndex::Simple |87.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |87.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false |87.2%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |87.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> Cdc::Write[YdsRunner] [GOOD] >> Cdc::Write[TopicRunner] |87.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> TSchemeShardAuditSettings::CreateExtSubdomain >> TSchemeShardAuditSettings::CreateSubdomain >> Viewer::JsonStorageListingV1NodeIdFilter [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TSchemeShardAuditSettings::AlterSubdomain |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TDataShardLocksTest::Points_OneTx |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:54:51.453901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:54:51.454008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:51.454062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:54:51.454091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:54:51.454128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:54:51.454156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:54:51.454204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:51.454494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:54:51.517503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:54:51.517557Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:51.529382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:54:51.529535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:54:51.529700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:54:51.537993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:54:51.538181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:54:51.538881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:51.539094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:54:51.541085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:51.542323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:51.542390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:51.542502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:54:51.542542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:51.542595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:54:51.542745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.548917Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:54:51.684317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:54:51.684575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.684886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:54:51.685129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:54:51.685203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.694882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:51.695041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:54:51.695221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.695322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:54:51.695382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:54:51.695423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:54:51.706278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.706381Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:54:51.706430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:54:51.709011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.709080Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.709164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:51.709231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:54:51.713451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:54:51.715648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:54:51.715855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:54:51.716981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:51.717148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:51.717215Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:51.717495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:54:51.717546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:51.717707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:51.717779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:54:51.720401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:51.720476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:51.720744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:51.720785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:54:51.721019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.721058Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:54:51.721152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:54:51.721185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:51.721229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:54:51.721293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:51.721333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:54:51.721363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:54:51.721427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:51.721478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:54:51.721508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:54:51.723230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:51.723342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:51.723381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:54:51.723417Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:54:51.723463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:51.723562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 94046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.116427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.116465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-07T13:54:52.116492Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2025-04-07T13:54:52.116519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:52.117146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.117208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.117230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-07T13:54:52.117259Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-04-07T13:54:52.117282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-07T13:54:52.117345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-07T13:54:52.119229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2025-04-07T13:54:52.119348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2025-04-07T13:54:52.120121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:52.120207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:52.120248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2025-04-07T13:54:52.120308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:52.120336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-07T13:54:52.120379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 134 2025-04-07T13:54:52.121253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-07T13:54:52.121387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-07T13:54:52.123162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-07T13:54:52.123203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 112:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:54:52.123283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 134 -> 135 2025-04-07T13:54:52.123429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:52.123486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 FAKE_COORDINATOR: Erasing txId 112 2025-04-07T13:54:52.125896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:52.125964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:52.126090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-07T13:54:52.126189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:52.126218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-04-07T13:54:52.126257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 112, path id: 7 2025-04-07T13:54:52.126506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-07T13:54:52.126554Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2025-04-07T13:54:52.126626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 135 -> 240 2025-04-07T13:54:52.127239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.127315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.127339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-07T13:54:52.127362Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2025-04-07T13:54:52.127392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:52.128062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.128131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.128167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-07T13:54:52.128192Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-04-07T13:54:52.128230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-07T13:54:52.128288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-07T13:54:52.132274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-07T13:54:52.132328Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 112:0 ProgressState 2025-04-07T13:54:52.132401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-07T13:54:52.132428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-07T13:54:52.132478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: true 2025-04-07T13:54:52.132518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-07T13:54:52.132567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-04-07T13:54:52.132598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-04-07T13:54:52.132657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-07T13:54:52.133152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:54:52.133189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-07T13:54:52.133235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-04-07T13:54:52.133456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:54:52.133488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-07T13:54:52.133534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:52.134067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-07T13:54:52.135276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-07T13:54:52.137177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T13:54:52.137251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-04-07T13:54:52.137600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-04-07T13:54:52.137632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-04-07T13:54:52.138103Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-04-07T13:54:52.138196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-04-07T13:54:52.138228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:652:2644] TestWaitNotification: OK eventTxId 112 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:54:51.501055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:54:51.501162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:51.501205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:54:51.501235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:54:51.501277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:54:51.501303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:54:51.501353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:51.501761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:54:51.574519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:54:51.574599Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:51.586647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:54:51.586813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:54:51.586958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:54:51.597393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:54:51.597576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:54:51.598166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:51.598378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:54:51.603242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:51.604504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:51.604581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:51.604735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:54:51.604776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:51.604811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:54:51.604976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.617610Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:54:51.782061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:54:51.782281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.782513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:54:51.782729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:54:51.782773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.784682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:51.784820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:54:51.784969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.785055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:54:51.785115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:54:51.785149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:54:51.791488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.791542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:54:51.791568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:54:51.793258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.793309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.793367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:51.793427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:54:51.797004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:54:51.801524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:54:51.801745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:54:51.802745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:51.802891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:51.802960Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:51.803209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:54:51.803260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:51.803424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:51.803514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:54:51.805383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:51.805449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:51.805622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:51.805668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:54:51.805904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.805945Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:54:51.806025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:54:51.806061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:51.806122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:54:51.806165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:51.806211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:54:51.806245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:54:51.806297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:51.806347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:54:51.806388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:54:51.808239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:51.808334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:51.808371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:54:51.808410Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:54:51.808471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:51.808574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... etId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 112 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:54:52.198169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.198240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.198263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-07T13:54:52.198287Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2025-04-07T13:54:52.198311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:52.199077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.199154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.199176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-07T13:54:52.199201Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-04-07T13:54:52.199239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-07T13:54:52.199293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-07T13:54:52.204868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2025-04-07T13:54:52.205006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2025-04-07T13:54:52.205583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:52.205687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:52.205726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2025-04-07T13:54:52.205761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:52.205784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-07T13:54:52.205893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 130 2025-04-07T13:54:52.206063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:52.206127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-07T13:54:52.209109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-07T13:54:52.209211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 FAKE_COORDINATOR: Erasing txId 112 2025-04-07T13:54:52.210209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:52.210243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:52.210355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-07T13:54:52.210467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:52.210497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-04-07T13:54:52.210574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 112, path id: 7 2025-04-07T13:54:52.210640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-07T13:54:52.210674Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2025-04-07T13:54:52.210746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-07T13:54:52.210774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-07T13:54:52.210807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2025-04-07T13:54:52.210859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-07T13:54:52.210901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-04-07T13:54:52.210924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-04-07T13:54:52.210983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-07T13:54:52.211007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2025-04-07T13:54:52.211029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 27 2025-04-07T13:54:52.211050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 7], 18446744073709551615 2025-04-07T13:54:52.211734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.211811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.211843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-04-07T13:54:52.211881Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2025-04-07T13:54:52.211911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:52.213227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.213320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-07T13:54:52.213352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-04-07T13:54:52.213376Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-04-07T13:54:52.213402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-07T13:54:52.213485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-04-07T13:54:52.213745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:54:52.213785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-07T13:54:52.213847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-04-07T13:54:52.214198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:54:52.214230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-07T13:54:52.214280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:52.216126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-07T13:54:52.218473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-07T13:54:52.218540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T13:54:52.218584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-04-07T13:54:52.218840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-04-07T13:54:52.218861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-04-07T13:54:52.219215Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-04-07T13:54:52.219274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-04-07T13:54:52.219301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:652:2644] TestWaitNotification: OK eventTxId 112 >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeInvalid [GOOD] |87.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> BackupRestore::RestoreTablePartitioningSettings >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] |87.2%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TTopicYqlTest::BadRequests [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> Cdc::Drop[PqRunner] [GOOD] >> Cdc::Drop[YdsRunner] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir >> TPQCompatTest::ReadWriteSessions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:54:50.771049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:54:50.771146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:50.771187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:54:50.771226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:54:50.771278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:54:50.771302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:54:50.771355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:50.771675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:54:50.886193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:54:50.886259Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:50.907643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:54:50.907802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:54:50.907969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:54:50.915303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:54:50.915527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:54:50.916181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:50.921090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:54:50.925851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:50.927190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:50.927250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:50.927375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:54:50.927419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:50.927458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:54:50.927615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:54:50.934745Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:54:51.069658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:54:51.069832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.070039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:54:51.070222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:54:51.070263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.081445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:51.081552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:54:51.081668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.081744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:54:51.081773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:54:51.081797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:54:51.085657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.085734Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:54:51.085787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:54:51.087621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.087670Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.087740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:51.087803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:54:51.091649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:54:51.110211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:54:51.110441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:54:51.111600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:51.111769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:51.111835Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:51.112154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:54:51.112213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:51.112394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:51.112500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:54:51.114834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:51.114898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:51.115093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:51.115131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:54:51.115360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:51.115402Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:54:51.115499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:54:51.115532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:51.115585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:54:51.115652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:51.115692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:54:51.115747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:54:51.115813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:51.115861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:54:51.115895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:54:51.117852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:51.117965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:51.117998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:54:51.118032Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:54:51.118082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:51.118192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:53.706242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:53.706269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-07T13:54:53.706295Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2025-04-07T13:54:53.706323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:53.707371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:53.707560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:53.707594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-07T13:54:53.707632Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2025-04-07T13:54:53.707656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-07T13:54:53.707889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-07T13:54:53.712507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2025-04-07T13:54:53.712642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2025-04-07T13:54:53.713214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:53.713327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:53.713370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2025-04-07T13:54:53.713450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:53.713475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-07T13:54:53.713503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 134 2025-04-07T13:54:53.714793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-07T13:54:53.715782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-07T13:54:53.717458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-07T13:54:53.717506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:54:53.717608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2025-04-07T13:54:53.717767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:53.717854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 FAKE_COORDINATOR: Erasing txId 175 2025-04-07T13:54:53.719686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:53.719729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:53.719853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-07T13:54:53.719961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:53.719990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-04-07T13:54:53.720021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-04-07T13:54:53.720318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-07T13:54:53.720357Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-04-07T13:54:53.720405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2025-04-07T13:54:53.721117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:53.721212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:53.721248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-07T13:54:53.721276Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-04-07T13:54:53.721309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:53.721964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:53.722045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:53.722068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-07T13:54:53.722093Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-04-07T13:54:53.722117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-07T13:54:53.722185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-07T13:54:53.724770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-07T13:54:53.724817Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2025-04-07T13:54:53.724902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-07T13:54:53.724930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-07T13:54:53.724974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2025-04-07T13:54:53.725007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-07T13:54:53.725038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-04-07T13:54:53.725061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-04-07T13:54:53.725118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-07T13:54:53.725853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:54:53.725892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-07T13:54:53.725940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-04-07T13:54:53.726100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:54:53.726132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-07T13:54:53.726192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:53.727012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-07T13:54:53.727451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-07T13:54:53.729633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T13:54:53.729707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-04-07T13:54:53.730941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-04-07T13:54:53.730976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-04-07T13:54:53.732347Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-04-07T13:54:53.732439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-04-07T13:54:53.732484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2611:4603] TestWaitNotification: OK eventTxId 175 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1NodeIdFilter [GOOD] Test command err: 2025-04-07T13:48:14.407247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:14.407572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:14.407685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 2084, node 1 TClient is connected to server localhost:20293 2025-04-07T13:48:42.380300Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:285:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:48:42.380595Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:48:42.380749Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 16928, node 2 TClient is connected to server localhost:9892 2025-04-07T13:49:52.077244Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:2735:2389], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:52.078925Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:52.079698Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:52.084652Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:1267:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:52.085276Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:2732:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:52.085483Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:2737:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:52.085654Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:2741:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:52.086982Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:2745:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:52.087105Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:52.087815Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:52.087885Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:52.088041Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:52.090559Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:52.091136Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:52.091212Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:52.091275Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:52.091426Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:52.091954Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:2743:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:52.092048Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:52.092176Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:1269:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:52.093353Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:52.093441Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:52.093824Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:52.093874Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:52.094767Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:2739:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:52.096207Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:52.096535Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:49:52.576957Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:52.878388Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-07T13:49:52.902336Z node 3 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-07T13:49:53.539970Z node 3 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 11370, node 3 TClient is connected to server localhost:31787 2025-04-07T13:49:54.019361Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:54.019439Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:54.019490Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:54.020307Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:51:47.997286Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:2753:2389], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:48.000022Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:48.001158Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:51:48.004433Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:2107:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:48.005024Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [19:2113:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:48.005885Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:2750:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:48.006152Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:2105:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:48.007784Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:48.008841Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:2109:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:48.008965Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:48.009407Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T1 ... error=incorrect path status: LookupError; 2025-04-07T13:51:48.010243Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:51:48.010570Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:51:48.010624Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:48.010880Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [20:2115:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:48.011722Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:51:48.011837Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:48.012193Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:51:48.012567Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:2755:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:48.012744Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:48.013455Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:51:48.014959Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:2111:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:48.017358Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:48.018027Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:51:48.708847Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:51:49.198585Z node 12 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-07T13:51:49.291495Z node 12 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-07T13:51:50.765232Z node 12 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 31449, node 12 TClient is connected to server localhost:7296 2025-04-07T13:51:52.539235Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:51:52.539358Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:51:52.539444Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:51:52.540654Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:54:15.801200Z node 21 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [21:2758:2389], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:54:15.803381Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:15.804964Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:54:15.806763Z node 25 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [25:1972:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:54:15.807210Z node 28 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [28:1978:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:54:15.807417Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [29:1980:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:54:15.808526Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [22:2755:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:54:15.808774Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [23:2760:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:54:15.809898Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:15.810268Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:15.810360Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:15.811964Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:15.812044Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:15.812204Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:54:15.812454Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [27:1976:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:54:15.812562Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:54:15.812616Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:54:15.813162Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:54:15.813254Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:54:15.814389Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [26:1974:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:54:15.814500Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:15.814932Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:54:15.815678Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:15.816574Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:54:15.818486Z node 24 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [24:1541:2205], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:54:15.819206Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:54:15.819996Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:54:16.488362Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:16.919120Z node 21 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-07T13:54:17.029345Z node 21 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-07T13:54:18.660623Z node 21 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 20853, node 21 TClient is connected to server localhost:13015 2025-04-07T13:54:19.513489Z node 21 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:19.513608Z node 21 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:19.513694Z node 21 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:19.514439Z node 21 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve >> AnalyzeColumnshard::AnalyzeRebootColumnShard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:54:02.967322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:54:02.967424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:02.967461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:54:02.967511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:54:02.967561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:54:02.967594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:54:02.967644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:02.967939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:54:03.036327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:54:03.036404Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:03.052689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:54:03.052812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:54:03.052962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:54:03.059535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:54:03.059723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:54:03.060289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:03.060476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:54:03.062366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:03.063511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:03.063563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:03.063684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:54:03.063735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:03.063781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:54:03.064028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:54:03.078400Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:54:03.216370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:54:03.216604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:03.216902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:54:03.217121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:54:03.217189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:03.221635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:03.221982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:54:03.222193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:03.222269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:54:03.222310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:54:03.222339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:54:03.226590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:03.226658Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:54:03.226692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:54:03.237695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:03.237761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:03.237796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:03.237836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:54:03.243716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:54:03.253506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:54:03.253707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:54:03.254709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:03.254846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:03.254887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:03.255131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:54:03.255177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:03.255322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:03.255406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:54:03.257765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:03.257813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:03.257998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:03.258040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:54:03.258311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:03.258359Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:54:03.258452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:54:03.258482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:03.258520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:54:03.258567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:03.258598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:54:03.258623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:54:03.258691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:03.258725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:54:03.258754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:54:03.260608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:03.265762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:03.265865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:54:03.265951Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:54:03.266022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:03.266171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 3709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:54:52.729214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T13:54:52.729372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-07T13:54:52.729943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:52.730042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:52.730104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-07T13:54:52.730213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-07T13:54:52.730324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:54:52.888341Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:3447:5413], attempt# 0 2025-04-07T13:54:52.963462Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3447:5413], sender# [1:3446:5412] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:62587 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 748B2C95-AEAF-406A-8E99-1B7508EEAEC3 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-07T13:54:52.981461Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3447:5413], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-07T13:54:52.985351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:52.985411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:54:52.985675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:52.985718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T13:54:52.986492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:54:52.986548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T13:54:52.987808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:54:52.987921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:54:52.987955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:54:52.987993Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T13:54:52.988035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:54:52.988113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:62587 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E0A7D8B1-354A-4A94-9278-EC4E538C4BF1 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2025-04-07T13:54:52.993992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:54:52.995227Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3447:5413], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:62587 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B00A75C5-2676-46A1-84A0-C2A06983657E amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-07T13:54:53.008641Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3447:5413], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-04-07T13:54:53.009080Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3446:5412] 2025-04-07T13:54:53.009670Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3447:5413], sender# [1:3446:5412], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:62587 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 95EE052B-3966-406B-BA6A-EA8F67B86CD7 amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2025-04-07T13:54:53.016011Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3447:5413], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2025-04-07T13:54:53.016088Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3447:5413], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-07T13:54:53.016531Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3446:5412], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-07T13:54:53.049344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-07T13:54:53.049422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T13:54:53.049584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-07T13:54:53.049691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-07T13:54:53.049753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:53.049790Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:54:53.049838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T13:54:53.049879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T13:54:53.050028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:53.054594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:54:53.055307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:54:53.055367Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T13:54:53.055494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T13:54:53.055533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:54:53.055582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T13:54:53.055662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 102 2025-04-07T13:54:53.055715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:54:53.055753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T13:54:53.055784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T13:54:53.055909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:54:53.062627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:54:53.062697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3432:5399] TestWaitNotification: OK eventTxId 102 >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] >> Cdc::NaN[PqRunner] [GOOD] >> Cdc::NaN[YdsRunner] |87.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |87.3%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> Cdc::Write[TopicRunner] [GOOD] >> Cdc::UpdateStream ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::BadRequests [GOOD] Test command err: 2025-04-07T13:50:21.676030Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568228083551750:2153];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:21.681106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:50:21.718686Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568228241714701:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:22.053302Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:50:22.061821Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00148f/r3tmp/tmpIa8z5c/pdisk_1.dat 2025-04-07T13:50:22.105804Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:50:22.488373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:22.488475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:22.489812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:22.489862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:22.497055Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:50:22.497164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:22.497913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:22.498357Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17257, node 1 2025-04-07T13:50:22.691594Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/00148f/r3tmp/yandexjyfbUo.tmp 2025-04-07T13:50:22.691636Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/00148f/r3tmp/yandexjyfbUo.tmp 2025-04-07T13:50:22.691826Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/00148f/r3tmp/yandexjyfbUo.tmp 2025-04-07T13:50:22.691935Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:22.799672Z INFO: TTestServer started on Port 5737 GrpcPort 17257 TClient is connected to server localhost:5737 PQClient connected to localhost:17257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:23.244126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:50:23.321911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:50:23.794301Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:26.528323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568249558389184:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:26.528444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:26.528717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568249558389219:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:26.532076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:50:26.541223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568249558389253:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:26.541304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:26.566777Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-04-07T13:50:26.573485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568249558389221:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:50:26.672970Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568228083551750:2153];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:26.673028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:26.729027Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568228241714701:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:26.729652Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:26.902991Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568249716551423:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:50:26.905095Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWI0MzM2NjgtNWI3YjMyY2UtMmExNDUxODMtOWEwZjZmZWQ=, ActorId: [2:7490568249716551391:2285], ActorState: ExecuteState, TraceId: 01jr888hx9dw34g9pfqbv151zw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:50:26.907200Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:50:26.909913Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568249558389310:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:50:26.912906Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmQyMTM0YmQtZDlhZmQxYjYtZDA0MDVlZDktMWFiZTk3NWY=, ActorId: [1:7490568249558389179:2303], ActorState: ExecuteState, TraceId: 01jr888hpe8cvs7drn54x4azgp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:50:26.925232Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:50:26.926581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:50:27.189848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:50:27.444293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:50:27.925368Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr888jsy8ct7vdqzjqf7zfxp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2U5OWU1NGMtNTk3YmIzNGEtZmVhNjEwZjctMzI3YWVmNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490568258148324379:3106] === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 2 partitions CallPersQueueGRPC request to localhost:17257 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2025-04-07T13:50:33.133192Z node ... emeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:54:44.391450Z node 25 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr88gd7v3689tz3jbymsg0d8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=25&id=YjFiOWFjMjYtZDM0NzVmYjUtYTMwYzIzY2ItOTI5NmRhNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [25:7490569356424821548:3084] === CheckClustersList. Ok 2025-04-07T13:54:50.847190Z node 25 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [25:7490569382194625889:3419] connected; active server actors: 1 2025-04-07T13:54:50.847527Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-07T13:54:50.849061Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-07T13:54:50.851621Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] BALANCER INIT DONE for rt3.dc1--legacy--topic1: (0, 72075186224037892) 2025-04-07T13:54:50.854107Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] TEvClientConnected TabletId 72057594046644480, NodeId 25, Generation 2 2025-04-07T13:54:50.867225Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:54:50.868692Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-07T13:54:50.871576Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:54:50.871912Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-07T13:54:50.871943Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:54:50.871969Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-07T13:54:50.871996Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:54:50.872036Z node 26 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:54:50.872075Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2025-04-07T13:54:50.872101Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-07T13:54:50.873710Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] TEvClientConnected TabletId 72075186224037892, NodeId 26, Generation 1 2025-04-07T13:54:50.873714Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:54:50.873775Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [25:7490569382194625918:2449], now have 1 active actors on pipe 2025-04-07T13:54:50.874145Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:54:50.874179Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [25:7490569382194625888:3418], now have 1 active actors on pipe 2025-04-07T13:54:50.874612Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T13:54:50.875295Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [25:7490569326360049198:2191] txId 281474976710678 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-07T13:54:50.884437Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-07T13:54:50.884707Z node 26 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:54:50.886002Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892] Config applied version 0 actor [25:7490569326360049198:2191] txId 281474976710678 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-07T13:54:50.886102Z node 26 :PERSQUEUE DEBUG: [rt3.dc1--legacy--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:54:50.886589Z node 26 :PERSQUEUE DEBUG: [rt3.dc1--legacy--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:54:50.891194Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [26:7490569383349846427:2372] 2025-04-07T13:54:50.894989Z node 26 :PERSQUEUE DEBUG: [rt3.dc1--legacy--topic1:0:Initializer] Initializing completed. 2025-04-07T13:54:50.895043Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--legacy--topic1' partition 0 generation 1 [26:7490569383349846427:2372] 2025-04-07T13:54:50.895096Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--legacy--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:54:50.900303Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--legacy--topic1' partition 0 user c1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:54:50.900329Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--legacy--topic1' partition 0 user c2 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:54:50.900607Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:54:52.674096Z node 25 :KQP_EXECUTER ERROR: ActorId: [25:7490569390784560728:2493] TxId: 281474976710683. Ctx: { TraceId: 01jr88gnkzfb42jj5qhq1mpat4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=25&id=Mzg2OTMwMC1kMWRiMTkyLTJiNDgzYmZhLWZiYzk1NGYw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 26 2025-04-07T13:54:52.674438Z node 25 :KQP_COMPUTE ERROR: SelfId: [25:7490569390784560732:2493], TxId: 281474976710683, task: 2. Ctx: { SessionId : ydb://session/3?node_id=25&id=Mzg2OTMwMC1kMWRiMTkyLTJiNDgzYmZhLWZiYzk1NGYw. TraceId : 01jr88gnkzfb42jj5qhq1mpat4. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [25:7490569390784560728:2493], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-07T13:54:52.836780Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:54:52.836819Z node 25 :IMPORT WARN: Table profiles were not loaded >> AnalyzeColumnshard::AnalyzeServerless |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:54:52.519510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:54:52.519609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:52.519653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:54:52.519683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:54:52.519752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:54:52.519777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:54:52.519826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:52.520162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:54:52.640028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:54:52.640095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:52.662108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:54:52.662251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:54:52.662424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:54:52.672577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:54:52.672855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:54:52.673545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:52.673771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:54:52.676169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:52.677575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:52.677646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:52.677786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:54:52.677844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:52.677887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:54:52.678046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:54:52.686033Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:54:52.826614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:54:52.826880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:52.827125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:54:52.827375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:54:52.827429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:52.833794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:52.833967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:54:52.834169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:52.834245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:54:52.834286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:54:52.834324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:54:52.841807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:52.841915Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:54:52.841976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:54:52.844707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:52.844783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:52.844860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:52.844936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:54:52.848960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:54:52.850938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:54:52.851124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:54:52.851967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:52.852085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:52.852143Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:52.852376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:54:52.852423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:52.852592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:52.852940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:54:52.855059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:52.855112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:52.855285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:52.855328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:54:52.855577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:52.855635Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:54:52.855748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:54:52.855783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:52.855833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:54:52.855879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:52.855909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:54:52.855933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:54:52.855992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:52.856044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:54:52.856080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:54:52.857733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:52.857821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:52.857856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:54:52.857887Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:54:52.857930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:52.858034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ags: 2 } ExecLevel: 0 TxId: 175 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:54:56.013799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:56.013890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:56.013924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-07T13:54:56.013956Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2025-04-07T13:54:56.013988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:56.016866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:56.016949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:56.016973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-07T13:54:56.017001Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2025-04-07T13:54:56.017045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-07T13:54:56.017132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-07T13:54:56.030774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2025-04-07T13:54:56.030906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2025-04-07T13:54:56.036219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:56.036358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:56.036412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2025-04-07T13:54:56.036470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:56.036513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-07T13:54:56.036651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 130 2025-04-07T13:54:56.036936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:56.037013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-07T13:54:56.037770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-07T13:54:56.046055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 FAKE_COORDINATOR: Erasing txId 175 2025-04-07T13:54:56.054485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:56.054547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:56.054687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-07T13:54:56.054824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:56.054893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-04-07T13:54:56.054939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-04-07T13:54:56.055497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-07T13:54:56.055547Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-04-07T13:54:56.055638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-07T13:54:56.055667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-07T13:54:56.055710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: false 2025-04-07T13:54:56.055744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-07T13:54:56.055774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-04-07T13:54:56.055800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-04-07T13:54:56.055878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-07T13:54:56.055912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 175, publications: 2, subscribers: 0 2025-04-07T13:54:56.055965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 1], 103 2025-04-07T13:54:56.055994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 26], 18446744073709551615 2025-04-07T13:54:56.056640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:56.064907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:56.064982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 175 2025-04-07T13:54:56.065020Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-04-07T13:54:56.065092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:56.066210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:56.066312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:56.066346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 175 2025-04-07T13:54:56.066380Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-04-07T13:54:56.066421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-07T13:54:56.066538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 175, subscribers: 0 2025-04-07T13:54:56.066954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:54:56.066998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-07T13:54:56.067079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-04-07T13:54:56.067351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:54:56.067386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-07T13:54:56.067438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:56.075166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-07T13:54:56.075349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-07T13:54:56.075491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T13:54:56.079795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-04-07T13:54:56.081159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-04-07T13:54:56.081211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-04-07T13:54:56.082802Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-04-07T13:54:56.082930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-04-07T13:54:56.082961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2461:4453] TestWaitNotification: OK eventTxId 175 >> AnalyzeDatashard::DropTableNavigateError |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave >> TraverseDatashard::TraverseTwoTablesServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPQCompatTest::ReadWriteSessions [GOOD] Test command err: 2025-04-07T13:50:16.234541Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568207055026174:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:16.234609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00149f/r3tmp/tmpYyEk3v/pdisk_1.dat 2025-04-07T13:50:16.577460Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:50:16.588294Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:50:16.641525Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:16.930613Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:16.948287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:16.948390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:16.950627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:16.950707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:16.957330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:16.962693Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:50:16.968065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14033, node 1 2025-04-07T13:50:17.178035Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/00149f/r3tmp/yandexLVXjjN.tmp 2025-04-07T13:50:17.178055Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/00149f/r3tmp/yandexLVXjjN.tmp 2025-04-07T13:50:17.178198Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/00149f/r3tmp/yandexLVXjjN.tmp 2025-04-07T13:50:17.178283Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:17.265388Z INFO: TTestServer started on Port 12015 GrpcPort 14033 TClient is connected to server localhost:12015 PQClient connected to localhost:14033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:17.596527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:50:17.685917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:50:20.838758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568224298651657:2285], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:20.838945Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:20.840827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568224298651690:2289], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:20.850229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568224234896227:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:20.850371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:20.856459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:50:20.884948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568224234896241:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:20.907437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568224298651695:2290], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:50:21.075190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:50:21.075174Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568228593619025:2294], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:50:21.076932Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjRkMzcxYjUtZTA0YjNmMGQtNGFhODgzZWItYzllZTVjNzQ=, ActorId: [2:7490568224298651654:2283], ActorState: ExecuteState, TraceId: 01jr888c2p5mh9adnfg7farrpq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:50:21.079498Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568224234896340:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:50:21.081156Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MThkNjU1YS01YjFlY2E1NS04NzZhMzg0Ni02NTIwMDVkZA==, ActorId: [1:7490568224234896225:2305], ActorState: ExecuteState, TraceId: 01jr888c2j32kq4npejbzt99ve, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:50:21.083118Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:50:21.083118Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:50:21.238515Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568207055026174:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:21.238576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:21.351712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:50:21.617191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:50:22.745869Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr888d4sesxaw0re0m91jh00, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTE4MGUzODMtOTQ1MWViNjMtNTQzNGE4NDYtMmE3MDkyODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490568232824831429:3123] === CheckClustersList. Ok CreateTopicNoLegacy: /Root/PQ/rt3.dc1--demo Create topic: /Root/PQ/rt3.dc1--demo AddTopic: /Root/PQ/rt3.dc1--demo ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = /Root/PQ/rt3.dc1/demo, dc = unknown 2025-04-07T13:50:29.019458Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-07T13:50:29.171911Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7490568262889603001:3416] connected; active server actors: 1 2025-04-07T13:50:29.172286Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--demo] updating configuration. Deleted partitions []. Added partit ... kie 5 consumer shared/user session shared/user_27_5_5085207832937759838_v1 is DEAD 2025-04-07T13:54:52.428566Z node 27 :PQ_READ_PROXY DEBUG: new grpc connection 2025-04-07T13:54:52.428575Z node 27 :PQ_READ_PROXY DEBUG: new session created cookie 6 2025-04-07T13:54:52.428859Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][topic2] pipe [27:7490569388789552461:2599] disconnected; active server actors: 1 2025-04-07T13:54:52.428891Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][topic2] pipe [27:7490569388789552461:2599] client user disconnected session shared/user_27_5_5085207832937759838_v1 2025-04-07T13:54:52.429228Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:52.429277Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_27_5_5085207832937759838_v1 2025-04-07T13:54:52.429327Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [27:7490569388789552464:2602] destroyed 2025-04-07T13:54:52.429403Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_27_5_5085207832937759838_v1 2025-04-07T13:54:52.429189Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { path: "account/topic2-mirrored-from-dc2" } consumer: "user" } } 2025-04-07T13:54:52.429371Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_3153904913530361276_v1 read init: from# ipv6:[::1]:46352, request# { init_request { topics_read_settings { path: "account/topic2-mirrored-from-dc2" } consumer: "user" } } 2025-04-07T13:54:52.429758Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_3153904913530361276_v1 auth for : user 2025-04-07T13:54:52.429786Z node 27 :PQ_METACACHE DEBUG: Handle describe topics 2025-04-07T13:54:52.429801Z node 27 :PQ_METACACHE DEBUG: SendSchemeCacheRequest 2025-04-07T13:54:52.429860Z node 27 :PQ_METACACHE DEBUG: send request for 1 topics, got 1 requests infly 2025-04-07T13:54:52.430429Z node 27 :PQ_METACACHE DEBUG: Handle SchemeCache response: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 12 ResultSet [{ Path: Root/LbCommunal/account/topic2-mirrored-from-dc2 TableId: [72057594046644480:18:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTopic DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:54:52.430561Z node 27 :PQ_METACACHE DEBUG: Got describe topics SC response 2025-04-07T13:54:52.430598Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_3153904913530361276_v1 Handle describe topics response 2025-04-07T13:54:52.430819Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_3153904913530361276_v1 auth is DEAD 2025-04-07T13:54:52.430945Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_3153904913530361276_v1 auth ok: topics# 1, initDone# 0 2025-04-07T13:54:52.431901Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_3153904913530361276_v1 register session: topic# rt3.dc2--account--topic2 ===Got response: status: SUCCESS init_response { session_id: "shared/user_27_6_3153904913530361276_v1" } 2025-04-07T13:54:52.432645Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7490569388789552471:2603] connected; active server actors: 1 2025-04-07T13:54:52.432717Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] consumer "user" register session for pipe [27:7490569388789552471:2603] session shared/user_27_6_3153904913530361276_v1 2025-04-07T13:54:52.432776Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user register readable partition 0 2025-04-07T13:54:52.432865Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user family created family=1 (Status=Free, Partitions=[0]) 2025-04-07T13:54:52.432930Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user register reading session ReadingSession "shared/user_27_6_3153904913530361276_v1" (Sender=[27:7490569388789552468:2603], Pipe=[27:7490569388789552471:2603], Partitions=[], ActiveFamilyCount=0) 2025-04-07T13:54:52.432988Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user rebalancing was scheduled 2025-04-07T13:54:52.433066Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-04-07T13:54:52.433140Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_27_6_3153904913530361276_v1" (Sender=[27:7490569388789552468:2603], Pipe=[27:7490569388789552471:2603], Partitions=[], ActiveFamilyCount=0) 2025-04-07T13:54:52.433232Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user family 1 status Active partitions [0] session "shared/user_27_6_3153904913530361276_v1" sender [27:7490569388789552468:2603] lock partition 0 for ReadingSession "shared/user_27_6_3153904913530361276_v1" (Sender=[27:7490569388789552468:2603], Pipe=[27:7490569388789552471:2603], Partitions=[], ActiveFamilyCount=1) generation 1 step 3 2025-04-07T13:54:52.433315Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-07T13:54:52.433359Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing duration: 0.000256s 2025-04-07T13:54:52.461866Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_3153904913530361276_v1 assign: record# { Partition: 0 TabletId: 72075186224037898 Topic: "topic2-mirrored-from-dc2" Generation: 1 Step: 3 Session: "shared/user_27_6_3153904913530361276_v1" ClientId: "user" PipeClient { RawX1: 7490569388789552471 RawX2: 4503715591490091 } Path: "/Root/LbCommunal/account/topic2-mirrored-from-dc2" } 2025-04-07T13:54:52.462091Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_3153904913530361276_v1 INITING TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) 2025-04-07T13:54:52.465961Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_3153904913530361276_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037898 Generation: 1 2025-04-07T13:54:52.464994Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:54:52.465069Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server connected, pipe [27:7490569388789552474:2606], now have 1 active actors on pipe 2025-04-07T13:54:52.468926Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic2-mirrored-from-dc2' requestId: 2025-04-07T13:54:52.468973Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] got client message batch for topic 'rt3.dc2--account--topic2' partition 0 2025-04-07T13:54:52.469034Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Created session shared/user_27_6_3153904913530361276_v1 on pipe: [27:7490569388789552474:2606] 2025-04-07T13:54:52.472853Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/user_27_6_3153904913530361276_v1:1 with generation 1 2025-04-07T13:54:52.473065Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] Topic 'rt3.dc2--account--topic2' partition 0 user user session is set to 0 (startOffset 0) session shared/user_27_6_3153904913530361276_v1 2025-04-07T13:54:52.473417Z node 28 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:54:52.489029Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:54:52.489134Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'topic2-mirrored-from-dc2' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 ===Got response: status: SUCCESS start_partition_session_request { partition_session { partition_session_id: 1 path: "account/topic2-mirrored-from-dc2" } partition_offsets { } } 2025-04-07T13:54:52.489886Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_3153904913530361276_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 0 WriteTimestampMS: 1744034092288 CreateTimestampMS: 1744034092288 SizeLag: 0 WriteTimestampEstimateMS: 0 } Cookie: 18446744073709551615 } 2025-04-07T13:54:52.489962Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_3153904913530361276_v1 INIT DONE TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) EndOffset 0 readOffset 0 committedOffset 0 2025-04-07T13:54:52.490059Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_3153904913530361276_v1 sending to client partition status 2025-04-07T13:54:52.496844Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_3153904913530361276_v1 grpc closed 2025-04-07T13:54:52.496914Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_3153904913530361276_v1 is DEAD 2025-04-07T13:54:52.498025Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7490569388789552471:2603] disconnected; active server actors: 1 2025-04-07T13:54:52.498073Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7490569388789552471:2603] client user disconnected session shared/user_27_6_3153904913530361276_v1 2025-04-07T13:54:52.504904Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:54:52.504950Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Destroy direct read session shared/user_27_6_3153904913530361276_v1 2025-04-07T13:54:52.504993Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [27:7490569388789552474:2606] destroyed 2025-04-07T13:54:52.505061Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_27_6_3153904913530361276_v1 2025-04-07T13:54:52.681200Z node 28 :PQ_METACACHE DEBUG: Check version rescan 2025-04-07T13:54:52.692376Z node 28 :PQ_METACACHE DEBUG: Metacache: reset 2025-04-07T13:54:53.057168Z node 27 :PQ_METACACHE DEBUG: Check version rescan 2025-04-07T13:54:53.074631Z node 27 :PQ_METACACHE DEBUG: Metacache: reset 2025-04-07T13:54:53.815223Z node 27 :PQ_METACACHE DEBUG: HandleClustersUpdate 2025-04-07T13:54:53.815257Z node 27 :PQ_METACACHE DEBUG: HandleClustersUpdate LocalCluster !LocalCluster.empty() 2025-04-07T13:54:54.071329Z node 27 :PQ_METACACHE DEBUG: Check version rescan 2025-04-07T13:54:54.084335Z node 27 :PQ_METACACHE DEBUG: Metacache: reset |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |87.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |87.3%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:54:50.246771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:54:50.246864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:50.246904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:54:50.246930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:54:50.246959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:54:50.246977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:54:50.247012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:54:50.247227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:54:50.311120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:54:50.311180Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:50.338844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:54:50.339044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:54:50.339240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:54:50.349933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:54:50.350209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:54:50.351154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:50.351427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:54:50.353943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:50.355330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:50.355393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:50.355528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:54:50.355580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:50.355625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:54:50.355782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:54:50.363373Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:54:50.509787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:54:50.510069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:50.510335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:54:50.510577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:54:50.510647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:50.521814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:50.522013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:54:50.522246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:50.522325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:54:50.522395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:54:50.522435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:54:50.526158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:50.526248Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:54:50.526294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:54:50.528509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:50.528570Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:50.528633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:50.528738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:54:50.532526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:54:50.536604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:54:50.536871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:54:50.538029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:54:50.538212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:54:50.538285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:50.538601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:54:50.538657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:54:50.538835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:50.538920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:54:50.547848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:50.547927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:50.548126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:50.548176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:54:50.548427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:54:50.548501Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:54:50.548609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:54:50.548643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:50.548995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:54:50.549054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:54:50.549131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:54:50.549165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:54:50.549263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:50.549320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:54:50.549359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:54:50.551534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:50.551665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:54:50.551703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:54:50.551747Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:54:50.551800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:50.551936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 0, tablet: 72057594037968897 2025-04-07T13:54:58.395426Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteOwnerTablets, msg: { Owner: 72075186233409618 TxId: 175 } 2025-04-07T13:54:58.395604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free owner tablets reply, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897, at schemeshard: 72057594046678944 2025-04-07T13:54:58.395706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 175:0, at schemeshard: 72057594046678944, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897 2025-04-07T13:54:58.395761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 HandleReply TDeleteExternalShards, Status: ALREADY, from Hive: 72057594037968897, Owner: 72075186233409618, at schemeshard: 72057594046678944 2025-04-07T13:54:58.395900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2025-04-07T13:54:58.396082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:58.396171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2025-04-07T13:54:58.398209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-07T13:54:58.398443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:54:58.398494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:54:58.398621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-07T13:54:58.398790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:54:58.398824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-04-07T13:54:58.398858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-04-07T13:54:58.399208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-07T13:54:58.399250Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-04-07T13:54:58.399285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2025-04-07T13:54:58.400165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:58.400277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:58.400310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-07T13:54:58.400341Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-04-07T13:54:58.400376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:54:58.401132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:58.401206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-07T13:54:58.401244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-07T13:54:58.401285Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-04-07T13:54:58.401316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 6 2025-04-07T13:54:58.401394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-07T13:54:58.404379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:74 hive 72057594037968897 at ss 72057594046678944 2025-04-07T13:54:58.404448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:73 hive 72057594037968897 at ss 72057594046678944 2025-04-07T13:54:58.404483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:75 hive 72057594037968897 at ss 72057594046678944 2025-04-07T13:54:58.404784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-07T13:54:58.404837Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2025-04-07T13:54:58.404939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-07T13:54:58.404969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-07T13:54:58.405008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2025-04-07T13:54:58.405043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-07T13:54:58.405077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-04-07T13:54:58.405102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-04-07T13:54:58.405280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2025-04-07T13:54:58.406421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-07T13:54:58.407193Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 74 TabletID: 72075186233409619 2025-04-07T13:54:58.408366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 74 ShardOwnerId: 72057594046678944 ShardLocalIdx: 74, at schemeshard: 72057594046678944 2025-04-07T13:54:58.408693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 4 Forgetting tablet 72075186233409619 2025-04-07T13:54:58.409505Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 73 TabletID: 72075186233409618 2025-04-07T13:54:58.410500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:54:58.412954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 73 ShardOwnerId: 72057594046678944 ShardLocalIdx: 73, at schemeshard: 72057594046678944 2025-04-07T13:54:58.413242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-07T13:54:58.413705Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 75 TabletID: 72075186233409620 Forgetting tablet 72075186233409618 2025-04-07T13:54:58.415494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 75 ShardOwnerId: 72057594046678944 ShardLocalIdx: 75, at schemeshard: 72057594046678944 2025-04-07T13:54:58.415763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 Forgetting tablet 72075186233409620 2025-04-07T13:54:58.416614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:54:58.416657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-07T13:54:58.416873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-04-07T13:54:58.417526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-07T13:54:58.418049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:54:58.418090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-07T13:54:58.418152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:54:58.419957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:74 2025-04-07T13:54:58.420002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:74 tabletId 72075186233409619 2025-04-07T13:54:58.422936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:73 2025-04-07T13:54:58.422986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:73 tabletId 72075186233409618 2025-04-07T13:54:58.423111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:75 2025-04-07T13:54:58.423180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:75 tabletId 72075186233409620 2025-04-07T13:54:58.423260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T13:54:58.423489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-04-07T13:54:58.456169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-04-07T13:54:58.456232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-04-07T13:54:58.458178Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-04-07T13:54:58.458311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-04-07T13:54:58.458346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:6734:7741] TestWaitNotification: OK eventTxId 175 >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] |87.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} >> BackupRestore::RestoreTablePartitioningSettings [GOOD] >> BackupRestore::RestoreIndexTablePartitioningSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] Test command err: 2025-04-07T13:54:56.070751Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569406595936480:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:56.070800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002169/r3tmp/tmp1bWZsZ/pdisk_1.dat 2025-04-07T13:54:56.800481Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:56.826256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:56.826312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:56.835964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13751, node 1 2025-04-07T13:54:56.930122Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:56.930145Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:56.930151Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:56.930252Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:57.215580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:57.229985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:54:57.237016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:54:57.242155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:54:57.242396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:54:57.242448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:54:57.249141Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:54:57.249182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:54:57.250534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:57.257534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:57.264549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034097309, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:54:57.264585Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:54:57.264886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:54:57.267216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:57.267414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:57.267470Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:54:57.267549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:54:57.267638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:54:57.267686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:54:57.273221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:54:57.273286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:54:57.273307Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:54:57.273410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:54:57.348044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/dir, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:54:57.348285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:54:57.353960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/dir 2025-04-07T13:54:57.354184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:57.354466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:57.354521Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:54:57.356529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:54:57.356584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:54:57.356601Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:54:57.360134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:54:57.360181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:54:57.360195Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T13:54:57.360398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:54:57.364523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034097407, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:54:57.364564Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744034097407, at schemeshard: 72057594046644480 2025-04-07T13:54:57.364702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-07T13:54:57.377718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:57.377988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:57.378040Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2025-04-07T13:54:57.378088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T13:54:57.378120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T13:54:57.378178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2025-04-07T13:54:57.380150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:54:57.380200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:54:57.380214Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:54:57.380557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:54:57.380586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:54:57.380604Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:54:57.380653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 Backup "/Root" to "/home/runner/.ya/build/build_root/u6pr/002169/r3tmp/tmpC8GzGO/"Create temporary directory "/Root/~backup_20250407T135457"2025-04-07T13:54:57.422463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/~backup_20250407T135457, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:54:57.422630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:54:57.425682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/~backup_20250407T135457 2025-04-07T13:5 ... Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710663 2025-04-07T13:54:57.769060Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710663, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 10 2025-04-07T13:54:57.769296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710663 2025-04-07T13:54:57.769310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710663 2025-04-07T13:54:57.769318Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710663, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T13:54:57.770612Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-07T13:54:57.776182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034097820, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:54:57.776218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710663:0, step: 1744034097820, at schemeshard: 72057594046644480 2025-04-07T13:54:57.776340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710663:0 progress is 1/1 2025-04-07T13:54:57.776467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710663:0 2025-04-07T13:54:57.776521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710663, publications: 2, subscribers: 1 2025-04-07T13:54:57.780590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:57.780870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:57.782204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046644480, cookie: 281474976710663 2025-04-07T13:54:57.782253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710663 2025-04-07T13:54:57.782274Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710663, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 11 2025-04-07T13:54:57.782442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710663 2025-04-07T13:54:57.782456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710663 2025-04-07T13:54:57.782464Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710663, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 18446744073709551615 2025-04-07T13:54:57.782509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710663, subscribers: 1 2025-04-07T13:54:57.785147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/u6pr/002169/r3tmp/tmpC8GzGO/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/u6pr/002169/r3tmp/tmpC8GzGO/" to "/Root"Process "/home/runner/.ya/build/build_root/u6pr/002169/r3tmp/tmpC8GzGO/dir"Restore empty directory "/home/runner/.ya/build/build_root/u6pr/002169/r3tmp/tmpC8GzGO/dir" to "/Root/dir"2025-04-07T13:54:57.849456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/dir, operationId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:54:57.849624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710664:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:54:57.853896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710664, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/dir 2025-04-07T13:54:57.854090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:57.854316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:57.854393Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710664:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:54:57.855836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046644480, cookie: 281474976710664 2025-04-07T13:54:57.855877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710664 2025-04-07T13:54:57.855889Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 12 2025-04-07T13:54:57.856106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710664 2025-04-07T13:54:57.856138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710664 2025-04-07T13:54:57.856164Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2025-04-07T13:54:57.857187Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710664, at schemeshard: 72057594046644480 2025-04-07T13:54:57.860007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034097904, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:54:57.860041Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710664:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744034097904, at schemeshard: 72057594046644480 2025-04-07T13:54:57.860130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710664:0 128 -> 240 2025-04-07T13:54:57.861923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:57.862117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:57.862183Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710664:0 ProgressState 2025-04-07T13:54:57.862235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710664:0 progress is 1/1 2025-04-07T13:54:57.862266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710664:0 2025-04-07T13:54:57.862319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710664, publications: 2, subscribers: 1 2025-04-07T13:54:57.863371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046644480, cookie: 281474976710664 2025-04-07T13:54:57.863406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710664 2025-04-07T13:54:57.863420Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 13 2025-04-07T13:54:57.863620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710664 2025-04-07T13:54:57.863645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710664 2025-04-07T13:54:57.863653Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2025-04-07T13:54:57.863683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710664, subscribers: 1 Restore ACL "/home/runner/.ya/build/build_root/u6pr/002169/r3tmp/tmpC8GzGO/dir" to "/Root/dir"Read ACL from "/home/runner/.ya/build/build_root/u6pr/002169/r3tmp/tmpC8GzGO/dir/permissions.pb"2025-04-07T13:54:57.882860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/dir, operationId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:54:57.883048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T13:54:57.883066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:54:57.883138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-04-07T13:54:57.883232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710665:0 2025-04-07T13:54:57.883248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710665, publications: 2, subscribers: 0 2025-04-07T13:54:57.885517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/dir, set owner:root@builtin 2025-04-07T13:54:57.885646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:57.885871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:57.886613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710665 2025-04-07T13:54:57.886652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710665 2025-04-07T13:54:57.886665Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710665, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 4 2025-04-07T13:54:57.886918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046644480, cookie: 281474976710665 2025-04-07T13:54:57.886935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710665 2025-04-07T13:54:57.886948Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710665, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 14 2025-04-07T13:54:57.886980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710665, subscribers: 0 Restore completed successfully >> Cdc::Drop[YdsRunner] [GOOD] >> Cdc::Drop[TopicRunner] |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |87.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex |87.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |87.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] |87.3%| [TA] $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex >> Cdc::UpdateStream [GOOD] >> Cdc::UpdateShardCount ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] Test command err: 2025-04-07T13:54:55.606984Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569404779686212:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:55.607034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002160/r3tmp/tmpmrRLAZ/pdisk_1.dat 2025-04-07T13:54:56.409213Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:56.444719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:56.444829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29772, node 1 2025-04-07T13:54:56.477206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:54:56.477813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T13:54:56.477969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:54:56.497667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:54:56.550323Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:54:56.550718Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:54:56.588140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:56.588751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:56.588775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:56.588823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:54:56.588891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T13:54:56.633269Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:56.633294Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:56.633304Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:56.633402Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:57.169091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:57.183051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:54:57.205996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:57.213645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:54:57.213883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:54:57.213901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:54:57.217298Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:57.221544Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:54:57.221588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:54:57.233668Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:57.254515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034097288, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:54:57.254565Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:54:57.254869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:54:57.257586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:57.257754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:57.257804Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:54:57.257890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:54:57.257924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:54:57.257983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:54:57.261898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:54:57.261975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:54:57.262000Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:54:57.262093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:55:00.037549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569421959556210:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:00.041933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:00.080775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:00.081251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:55:00.085479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:00.085512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:00.085545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSequence Propose, path: /Root/table/_serial_column_Key, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-07T13:55:00.085726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 2 -> 2 2025-04-07T13:55:00.085913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:00.088791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/table 2025-04-07T13:55:00.088997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:00.089233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:00.089294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:1 ProgressState, operation type: TxCreateSequence, at tablet# 72057594046644480 2025-04-07T13:55:00.089515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:55:00.091090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:55:00.091127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:55:00.091143Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:55:00.091335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:55:00.091351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:55:00.091360Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T13:55:00.091499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:55:00.091523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:55:00.091531Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 1 2025-04-07T13:55:0 ... 281474976710671 2025-04-07T13:55:02.270215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2025-04-07T13:55:02.270234Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 14 2025-04-07T13:55:02.270533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710671 2025-04-07T13:55:02.270551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2025-04-07T13:55:02.270591Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 2 2025-04-07T13:55:02.270709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710671 2025-04-07T13:55:02.270735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2025-04-07T13:55:02.270745Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 1 2025-04-07T13:55:02.278679Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710671, at schemeshard: 72057594046644480 2025-04-07T13:55:02.293397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710671:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:55:02.293476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:0 2 -> 3 2025-04-07T13:55:02.297531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710671:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:55:02.314688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:1 3 -> 128 2025-04-07T13:55:02.346205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateSequence TPropose operationId#281474976710671:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:55:02.384046Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710671:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:55:02.384077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:55:02.384159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:0 3 -> 128 2025-04-07T13:55:02.385990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710671:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:55:02.389243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034102433, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:02.389286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710671:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744034102433 2025-04-07T13:55:02.389393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:0 128 -> 129 2025-04-07T13:55:02.389461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateSequence TPropose operationId#281474976710671:1 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2025-04-07T13:55:02.389541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:1 128 -> 240 2025-04-07T13:55:02.392854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:02.393294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:02.393360Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710671:1 ProgressState 2025-04-07T13:55:02.393436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710671:1 progress is 1/2 2025-04-07T13:55:02.393613Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710671:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:55:02.395396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046644480, cookie: 281474976710671 2025-04-07T13:55:02.395443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2025-04-07T13:55:02.395459Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 15 2025-04-07T13:55:02.395661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710671 2025-04-07T13:55:02.395678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2025-04-07T13:55:02.395688Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 4 2025-04-07T13:55:02.395786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710671 2025-04-07T13:55:02.395801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2025-04-07T13:55:02.395809Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 2 2025-04-07T13:55:02.396394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037891 Status: COMPLETE TxId: 281474976710671 Step: 1744034102433 OrderId: 281474976710671 ExecLatency: 0 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037891 CpuTimeUsec: 1081 } } 2025-04-07T13:55:02.397494Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710671:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:55:02.397519Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:55:02.397541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:0 129 -> 240 2025-04-07T13:55:02.400194Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710671:0 ProgressState 2025-04-07T13:55:02.400277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710671:0 progress is 2/2 2025-04-07T13:55:02.400323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710671:0 2025-04-07T13:55:02.400425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710671:1 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/u6pr/002160/r3tmp/tmpB7kVs3/table/data_00.csv"2025-04-07T13:55:02.591848Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr88gz5he5s0rjhsqf9qdks2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmYyZmJkZTgtNDg0MzM1YS1hZjMwNmQxNy0yMGUxZTU2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/u6pr/002160/r3tmp/tmpB7kVs3/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/u6pr/002160/r3tmp/tmpB7kVs3/table/permissions.pb"2025-04-07T13:55:02.639305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/table, operationId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:55:02.639519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710673:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:02.639544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:55:02.639612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710673:0 progress is 1/1 2025-04-07T13:55:02.639719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710673:0 2025-04-07T13:55:02.639737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710673, publications: 3, subscribers: 0 2025-04-07T13:55:02.641353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710673, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/table, set owner:root@builtin 2025-04-07T13:55:02.641493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:02.641763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 Restore completed successfully2025-04-07T13:55:02.643566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710673 2025-04-07T13:55:02.643616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710673 2025-04-07T13:55:02.643633Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710673, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 3 2025-04-07T13:55:02.643847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710673 2025-04-07T13:55:02.643864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710673 2025-04-07T13:55:02.643874Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710673, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 5 2025-04-07T13:55:02.643973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 16 PathOwnerId: 72057594046644480, cookie: 281474976710673 2025-04-07T13:55:02.643987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710673 2025-04-07T13:55:02.643995Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710673, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 16 2025-04-07T13:55:02.644026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710673, subscribers: 0 2025-04-07T13:55:02.837263Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr88gzct9z667fqwtsqx72rb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDM3MWM0MGYtMzY0NzY2YjctZDZkYzMwYmEtNzNiNjlmMmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount >> TDataShardLocksTest::UseLocksCache [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> Cdc::Drop[TopicRunner] [GOOD] >> Cdc::DescribeStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: 2025-04-07T13:54:56.305070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:54:56.305429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:56.305733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0020f7/r3tmp/tmp4Uupgs/pdisk_1.dat 2025-04-07T13:54:56.846069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:54:56.928162Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:56.983924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:56.984086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:56.998096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:54:57.158782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:57.241591Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:638:2541]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:54:57.245654Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:638:2541]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:54:57.246231Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:638:2541] 2025-04-07T13:54:57.246556Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:54:57.285487Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:625:2532], Recipient [1:639:2542]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:54:57.349578Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:638:2541]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:54:57.350017Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:625:2532], Recipient [1:639:2542]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:54:57.350508Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:639:2542] 2025-04-07T13:54:57.350747Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:54:57.366069Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:625:2532], Recipient [1:639:2542]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:54:57.367638Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:54:57.367836Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:54:57.369654Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:54:57.369753Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:54:57.369819Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:54:57.370143Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:54:57.416835Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:54:57.417058Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:54:57.417195Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:671:2559] 2025-04-07T13:54:57.417259Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:54:57.417300Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:54:57.417351Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:54:57.417647Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:638:2541], Recipient [1:638:2541]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:54:57.417692Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:54:57.423655Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:54:57.423824Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:54:57.424141Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:54:57.424234Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:54:57.424303Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T13:54:57.424370Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T13:54:57.424418Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T13:54:57.424459Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:54:57.424547Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:54:57.424691Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:54:57.424927Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:658:2552], Recipient [1:638:2541]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:54:57.424971Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:54:57.425040Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:631:2536], serverId# [1:658:2552], sessionId# [0:0:0] 2025-04-07T13:54:57.425104Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:54:57.426527Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T13:54:57.426597Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T13:54:57.426654Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T13:54:57.426928Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:54:57.426977Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T13:54:57.427050Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:54:57.427145Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:672:2560] 2025-04-07T13:54:57.427175Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T13:54:57.427199Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T13:54:57.427222Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T13:54:57.427345Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:658:2552] 2025-04-07T13:54:57.427435Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:54:57.427556Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:54:57.427767Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T13:54:57.427829Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:54:57.427925Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:54:57.427986Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T13:54:57.428031Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T13:54:57.428065Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T13:54:57.428101Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:54:57.428367Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T13:54:57.428405Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T13:54:57.428439Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T13:54:57.428474Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:54:57.428536Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T13:54:57.428569Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:54:57.428612Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T13:54:57.428649Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T13:54:57.430376Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T13:54:57.430606Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:639:2542], Recipient [1:639:2542]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:54:57.430647Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:54:57.431299Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T13:54:57.431376Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T13:54:57.431824Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T13:54:57.431861Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:54:57.431891Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037889 2025-04-07T13:54:57.431916Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-07T13:54:57.431941Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-07T13:54:57.431983Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T13:54:57.432032Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T13:54:57.432418Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:662:2556], Recipient [1:639:2542]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:54:57.432469 ... 8 is DelayComplete 2025-04-07T13:55:06.243563Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:55:06.243590Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T13:55:06.243614Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit CompletedOperations 2025-04-07T13:55:06.243657Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is Executed 2025-04-07T13:55:06.243678Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T13:55:06.243714Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037888 has finished 2025-04-07T13:55:06.257549Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:55:06.257625Z node 2 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715662] at 72075186224037888 on unit CompleteOperation 2025-04-07T13:55:06.257693Z node 2 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [2:899:2692], exec latency: 9 ms, propose latency: 9 ms 2025-04-07T13:55:06.257784Z node 2 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2500 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-07T13:55:06.257823Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:55:06.257970Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:55:06.258028Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037888 on unit FinishPropose 2025-04-07T13:55:06.258076Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T13:55:06.258154Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:55:06.258515Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:939:2749], Recipient [2:640:2542]: {TEvReadSet step# 2500 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-07T13:55:06.258600Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T13:55:06.258658Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2025-04-07T13:55:06.260129Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:640:2542]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2025-04-07T13:55:06.769756Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr88h2x17qj8a3f297qpfmtm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzY1MDU4OWMtM2RjM2I5MTgtMjVmYjc4MzEtMTIxODkyNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:06.775231Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:980:2775], Recipient [2:939:2749]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-07T13:55:06.775386Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T13:55:06.775473Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-07T13:55:06.775580Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-07T13:55:06.775624Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-07T13:55:06.775668Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T13:55:06.775700Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T13:55:06.775754Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-07T13:55:06.775796Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-07T13:55:06.775821Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T13:55:06.775842Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T13:55:06.775865Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-07T13:55:06.776001Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-07T13:55:06.776254Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-04-07T13:55:06.776308Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:980:2775], 0} after executionsCount# 1 2025-04-07T13:55:06.776369Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:980:2775], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T13:55:06.776448Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:980:2775], 0} finished in read 2025-04-07T13:55:06.776515Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-07T13:55:06.776543Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T13:55:06.776583Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T13:55:06.776611Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-07T13:55:06.776680Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-07T13:55:06.776703Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T13:55:06.776727Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-07T13:55:06.776777Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T13:55:06.776896Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-07T13:55:06.778040Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:980:2775], Recipient [2:640:2542]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-04-07T13:55:06.778140Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-07T13:55:06.778204Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2025-04-07T13:55:06.778264Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-07T13:55:06.778292Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2025-04-07T13:55:06.778320Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-07T13:55:06.778347Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-07T13:55:06.778393Z node 2 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037889 2025-04-07T13:55:06.778428Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-07T13:55:06.778480Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-07T13:55:06.778517Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2025-04-07T13:55:06.778541Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2025-04-07T13:55:06.778636Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-04-07T13:55:06.778818Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-04-07T13:55:06.778860Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[2:980:2775], 1} after executionsCount# 1 2025-04-07T13:55:06.778902Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:980:2775], 1} sends rowCount# 2, bytes# 48, quota rows left# 997, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T13:55:06.778963Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:980:2775], 1} finished in read 2025-04-07T13:55:06.779009Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-07T13:55:06.779038Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-04-07T13:55:06.779063Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-04-07T13:55:06.779097Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-04-07T13:55:06.779158Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-07T13:55:06.779180Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-04-07T13:55:06.779204Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-04-07T13:55:06.779231Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-07T13:55:06.779303Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-07T13:55:06.779456Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:980:2775], Recipient [2:939:2749]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T13:55:06.779518Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-07T13:55:06.780248Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:980:2775], Recipient [2:640:2542]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-04-07T13:55:06.780293Z node 2 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |87.3%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} >> BackupRestore::RestoreIndexTablePartitioningSettings [GOOD] >> Cdc::UpdateShardCount [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] >> BackupRestore::RestoreTableSplitBoundaries >> Cdc::UpdateRetentionPeriod ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [FAIL] Test command err: assertion failed at ydb/core/fq/libs/ydb/ut/ydb_ut.cpp:433, virtual void NFq::NTestSuiteTCheckGenerationTest::TTestCaseShouldRollbackTransactionWhenCheckFails2::Execute_(NUnitTest::TTestContext &): (status.IsSuccess())
: Warning: Table: local/ShouldRollbackTransactionWhenCheckFails/test, pk: masterId, current generation: 11, expected/new generation: 11, operation: Register, code: 400130 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xDF5C790) NFq::NTestSuiteTCheckGenerationTest::TTestCaseShouldRollbackTransactionWhenCheckFails2::Execute_(NUnitTest::TTestContext&)+1914 (0xD6EB4EA) std::__y1::__function::__func, void ()>::operator()()+280 (0xD6F5778) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0xDF920C9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xDF632F9) NFq::NTestSuiteTCheckGenerationTest::TCurrentTest::Execute()+1204 (0xD6F4944) NUnitTest::TTestFactory::Execute()+2438 (0xDF64BC6) NUnitTest::RunMain(int, char**)+5149 (0xDF8BD0D) ??+0 (0x7F070000AD90) __libc_start_main+128 (0x7F070000AE40) _start+41 (0xB7B2029) >> Cdc::NaN[YdsRunner] [GOOD] >> Cdc::NaN[TopicRunner] >> TestKinesisHttpProxy::GoodRequestPutRecords >> TestYmqHttpProxy::TestCreateQueueWithWrongBody ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] Test command err: 2025-04-07T13:54:34.216025Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:34.383907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:34.407463Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:34.407534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:34.407772Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:34.415395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:34.415563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:34.415784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:34.415909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:34.416010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:34.416457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:34.416582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:34.416698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:34.416789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:34.416906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:34.417000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:34.417140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:34.446734Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:34.449671Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:34.449844Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:34.449894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:34.450100Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:34.450265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:34.450340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:34.450382Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:34.450459Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:34.450516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:34.450582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:34.450629Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:34.450805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:34.450873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:34.450913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:34.450940Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:34.451038Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:34.451093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:34.451134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:34.451162Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:34.451248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:34.451292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:34.451318Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:34.451362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:34.451395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:34.451420Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:34.451594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-07T13:54:34.451668Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-07T13:54:34.451776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-07T13:54:34.451850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-07T13:54:34.452017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:34.452088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:34.452122Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:34.452370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:34.452423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:34.452460Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:34.452626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:34.455356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:34.455415Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:34.455664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:34.455733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:34.455781Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:34.455903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... me=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=14341; 2025-04-07T13:55:10.545509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=19730; 2025-04-07T13:55:10.546384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=799; 2025-04-07T13:55:10.548162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=796; 2025-04-07T13:55:10.548249Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1799; 2025-04-07T13:55:10.548439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=74; 2025-04-07T13:55:10.548560Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:55:10.548622Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=140; 2025-04-07T13:55:10.548981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=316; 2025-04-07T13:55:10.549316Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=283; 2025-04-07T13:55:10.550806Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1423; 2025-04-07T13:55:10.552416Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1547; 2025-04-07T13:55:10.552527Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=33; 2025-04-07T13:55:10.552615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=47; 2025-04-07T13:55:10.552675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-07T13:55:10.552712Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-04-07T13:55:10.552752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-04-07T13:55:10.552837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2025-04-07T13:55:10.552878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-07T13:55:10.552968Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=55; 2025-04-07T13:55:10.553025Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=19; 2025-04-07T13:55:10.553092Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2025-04-07T13:55:10.553125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=47285; 2025-04-07T13:55:10.553286Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7400888;raw_bytes=7389306; inactive portions=33;blobs=66;rows=1126788;bytes=87164812;raw_bytes=110830786; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:55:10.553466Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:55:10.553521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:55:10.553562Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:55:10.553631Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:55:10.553670Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:55:10.553738Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:10.553813Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T13:55:10.553870Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:55:10.553910Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:55:10.553956Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:10.553988Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:10.554027Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:55:10.554110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:55:10.554157Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:55:10.555156Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:10.555259Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1980:3967];tablet_id=9437184;parent=[1:1947:3941];fline=manager.h:99;event=ask_data;request=request_id=81;1={portions_count=35};; 2025-04-07T13:55:10.556150Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:55:10.559925Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:55:10.559974Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:55:10.560003Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:55:10.560079Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:55:10.560167Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:10.560237Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T13:55:10.560317Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:55:10.560376Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:55:10.560437Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:10.560475Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:10.560547Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:55:10.560627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:55:10.560693Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:55:10.561535Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=35;path_id=1; 2025-04-07T13:55:10.562991Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=35;path_id=1; 2025-04-07T13:55:10.566098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:55:10.566165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestoreS3::RestoreIndexTablePartitioningSettings >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |87.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut >> TStorageServiceTest::ShouldNotRegisterPrevGeneration >> TCheckpointStorageTest::ShouldRegisterCoordinator >> TStateStorageTest::ShouldDeleteNoCheckpoints >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> TStateStorageTest::ShouldSaveGetOldSmallState >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> TraverseDatashard::TraverseOneTable [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> TCheckpointStorageTest::ShouldRegisterCoordinator [GOOD] >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams >> Cdc::DescribeStream [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] >> TStorageServiceTest::ShouldNotRegisterPrevGeneration [GOOD] >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] >> TCheckpointStorageTest::ShouldCreateCheckpoint >> Cdc::UpdateRetentionPeriod [GOOD] >> BackupRestore::RestoreTableSplitBoundaries [GOOD] >> TStorageServiceTest::ShouldRegister >> TStateStorageTest::ShouldDeleteNoCheckpoints [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> BackupRestoreS3::RestoreIndexTablePartitioningSettings [GOOD] >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize [GOOD] >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] >> Cdc::NaN[TopicRunner] [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] >> TPersQueueTest::TestWriteStat [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence >> KqpLimits::OutOfSpaceYQLUpsertFail [GOOD] >> Cdc::DropColumn >> TestKinesisHttpProxy::TestRequestWithIAM >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> CompressExecutor::TestExecutorMemUsage >> TStateStorageTest::ShouldSaveGetOldSmallState [GOOD] >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks [GOOD] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace >> TPersQueueTest::TestWriteSessionsConflicts >> Cdc::DropColumn [GOOD] >> TCheckpointStorageTest::ShouldGetCoordinators >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] >> TCheckpointStorageTest::ShouldCreateCheckpoint [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered >> TStateStorageTest::ShouldDeleteNoCheckpoints2 >> TStorageServiceTest::ShouldRegister [GOOD] >> Cdc::SupportedTypes >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams [GOOD] >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] >> TestYmqHttpProxy::TestGetQueueUrl >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal >> BackupRestoreS3::RestoreIndexTableSplitBoundaries >> Cdc::RacyRebootAndSplitWithTxInflight [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] >> TStateStorageTest::ShouldSaveGetOldBigState >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace [GOOD] >> Cdc::DropIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] Test command err: 2025-04-07T13:54:35.292221Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:35.400235Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:35.423171Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:35.423244Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:35.423466Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:35.430876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:35.431061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:35.431264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:35.431359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:35.431438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:35.431546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:35.431633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:35.431731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:35.431841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:35.431932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:35.432021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:35.432108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:35.453652Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:35.457858Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:35.457995Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:35.458046Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:35.458251Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:35.458404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:35.458471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:35.458519Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:35.458607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:35.458672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:35.458711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:35.458745Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:35.458896Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:35.458962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:35.459002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:35.459027Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:35.459111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:35.459166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:35.459201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:35.459229Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:35.459320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:35.459359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:35.459396Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:35.459426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:35.459448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:35.459472Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:35.459615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=40; 2025-04-07T13:54:35.459702Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-07T13:54:35.459796Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-07T13:54:35.459867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-04-07T13:54:35.460002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:35.460043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:35.460072Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:35.460197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:35.460236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:35.460253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:35.460356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:35.460383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:35.460400Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:35.460583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:35.460622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:35.460695Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:35.460796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=3716; 2025-04-07T13:55:26.969471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9062; 2025-04-07T13:55:26.970226Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=698; 2025-04-07T13:55:26.971675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=666; 2025-04-07T13:55:26.971739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1456; 2025-04-07T13:55:26.971853Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=65; 2025-04-07T13:55:26.971964Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:55:26.972019Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=126; 2025-04-07T13:55:26.972338Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=280; 2025-04-07T13:55:26.972639Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=260; 2025-04-07T13:55:26.974248Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1511; 2025-04-07T13:55:26.976010Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1697; 2025-04-07T13:55:26.976098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=32; 2025-04-07T13:55:26.976162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=21; 2025-04-07T13:55:26.976198Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-07T13:55:26.976233Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-04-07T13:55:26.976269Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-04-07T13:55:26.976336Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=31; 2025-04-07T13:55:26.976374Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-07T13:55:26.976444Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=43; 2025-04-07T13:55:26.976481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T13:55:26.976534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=22; 2025-04-07T13:55:26.976565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=22113; 2025-04-07T13:55:26.976775Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7400888;raw_bytes=7389306; inactive portions=35;blobs=70;rows=1239297;bytes=98153860;raw_bytes=121802583; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:55:26.976874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:55:26.976932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:55:26.976977Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:55:26.977057Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:55:26.977091Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:55:26.977149Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:26.977216Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T13:55:26.977289Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T13:55:26.977329Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:55:26.977375Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:26.977405Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:26.977442Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:55:26.977491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:55:26.977530Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:55:26.978135Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:26.978226Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4059:5686];tablet_id=9437184;parent=[1:4004:5638];fline=manager.h:99;event=ask_data;request=request_id=177;1={portions_count=37};; 2025-04-07T13:55:26.979056Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:55:26.979382Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:55:26.979416Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:55:26.979438Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:55:26.979475Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:55:26.979525Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:26.979574Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T13:55:26.979620Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T13:55:26.979658Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:55:26.979697Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:26.979736Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:26.979772Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:55:26.979838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:55:26.979892Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:55:26.980592Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=37;path_id=1; 2025-04-07T13:55:26.981800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=37;path_id=1; 2025-04-07T13:55:26.984585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:55:26.984630Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; |87.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation >> TCheckpointStorageTest::ShouldCreateGetCheckpoints >> TStorageServiceTest::ShouldCreateCheckpoint >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration >> TestKinesisHttpProxy::TestPing >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> TestKinesisHttpProxy::ListShards >> TStateStorageTest::ShouldSaveGetOldBigState [GOOD] >> Cdc::RacyActivateAndEnqueue >> KqpLimits::QSReplySize >> TCheckpointStorageTest::ShouldGetCoordinators [GOOD] >> Cdc::DropIndex [GOOD] >> Cdc::AreJsonsEqualReturnsTrueOnEqual [GOOD] >> Cdc::AreJsonsEqualReturnsFalseOnDifferent [GOOD] >> Cdc::AreJsonsEqualFailsOnWildcardInArray [GOOD] >> Cdc::AlterViaTopicService >> TStorageServiceTest::ShouldCreateCheckpoint [GOOD] >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> TCheckpointStorageTest::ShouldCreateGetCheckpoints [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointTwice >> TStateStorageTest::ShouldDeleteNoCheckpoints2 [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration [GOOD] >> TGcTest::ShouldRemovePreviousCheckpoints >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> TStateStorageTest::ShouldDeleteCheckpoints >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestoreS3::RestoreIndexTableSplitBoundaries [GOOD] >> Cdc::SupportedTypes [GOOD] |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |87.4%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> Cdc::DisableStream >> TestKinesisHttpProxy::TestPing [GOOD] >> TestKinesisHttpProxy::ListShards [GOOD] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState >> TStateStorageTest::ShouldSaveGetIncrementSmallState >> TCheckpointStorageTest::ShouldMarkCheckpointsGc >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> Cdc::AlterViaTopicService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] Test command err: Trying to start YDB, gRPC: 13702, MsgBus: 24525 2025-04-07T13:49:32.461152Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568014037601415:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:32.461192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00139a/r3tmp/tmp2H3x7t/pdisk_1.dat 2025-04-07T13:49:33.000195Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:33.002437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:33.002497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:33.005275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13702, node 1 2025-04-07T13:49:33.079843Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:33.079861Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:33.079867Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:33.079963Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24525 TClient is connected to server localhost:24525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:33.686630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.701341Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:49:33.718599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:33.885984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.068355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:34.152300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:36.050002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568031217472330:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.050146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.262817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.295748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.324086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.348051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.393541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.471680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:36.527861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568031217472827:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.527953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.528166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568031217472832:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:36.531563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:36.705063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568031217472834:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:37.461272Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568014037601415:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:37.461328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:37.773564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:49:38.045757Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ2MmMzYjQtYjA2NzE3NjYtZmU0ODMwZWYtMjVkYjgwZTc=, ActorId: [1:7490568035512440445:2461], ActorState: ExecuteState, TraceId: 01jr8872bv7ecjpfkjmdtmr8qq, Create QueryResponse for error on request, msg: 2025-04-07T13:49:38.061007Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ2MmMzYjQtYjA2NzE3NjYtZmU0ODMwZWYtMjVkYjgwZTc=, ActorId: [1:7490568035512440445:2461], ActorState: ExecuteState, TraceId: 01jr8872c91k39wfqb7e4kvwe8, Create QueryResponse for error on request, msg: 2025-04-07T13:49:38.112598Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ2MmMzYjQtYjA2NzE3NjYtZmU0ODMwZWYtMjVkYjgwZTc=, ActorId: [1:7490568035512440445:2461], ActorState: ExecuteState, TraceId: 01jr8872d37j6teb15wns6yka0, Create QueryResponse for error on request, msg: 2025-04-07T13:49:38.128904Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ2MmMzYjQtYjA2NzE3NjYtZmU0ODMwZWYtMjVkYjgwZTc=, ActorId: [1:7490568035512440445:2461], ActorState: ExecuteState, TraceId: 01jr8872ef12610q2p65j6g14g, Create QueryResponse for error on request, msg: 2025-04-07T13:49:38.149003Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ2MmMzYjQtYjA2NzE3NjYtZmU0ODMwZWYtMjVkYjgwZTc=, ActorId: [1:7490568035512440445:2461], ActorState: ExecuteState, TraceId: 01jr8872ex35hews4zy0q789dv, Create QueryResponse for error on request, msg: 2025-04-07T13:49:38.175643Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ2MmMzYjQtYjA2NzE3NjYtZmU0ODMwZWYtMjVkYjgwZTc=, ActorId: [1:7490568035512440445:2461], ActorState: ExecuteState, TraceId: 01jr8872fncjxz3sag3861bv2x, Create QueryResponse for error on request, msg: 2025-04-07T13:49:38.203259Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ2MmMzYjQtYjA2NzE3NjYtZmU0ODMwZWYtMjVkYjgwZTc=, ActorId: [1:7490568035512440445:2461], ActorState: ExecuteState, TraceId: 01jr8872gkc9cnz04x2tehkq34, Create QueryResponse for error on request, msg: 2025-04-07T13:49:38.227721Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ2MmMzYjQtYjA2NzE3NjYtZmU0ODMwZWYtMjVkYjgwZTc=, ActorId: [1:7490568035512440445:2461], ActorState: ExecuteState, TraceId: 01jr8872h752aryarg9wz96e6a, Create QueryResponse for error on request, msg: 2025-04-07T13:49:38.246102Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ2MmMzYjQtYjA2NzE3NjYtZmU0ODMwZWYtMjVkYjgwZTc=, ActorId: [1:7490568035512440445:2461], ActorState: ExecuteState, TraceId: 01jr8872hv5k9nr80qfbec4tc5, Create QueryResponse for error on request, msg: 2025-04-07T13:49:38.264874Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ2MmMzYjQtYjA2NzE3NjYtZmU0ODMwZWYtMjVkYjgwZTc=, ActorId: [1:7490568035512440445:2461], ActorState: ExecuteState, TraceId: 01jr8872jf4paxc1jrsjpjy0j2, Create QueryResponse for error on request, msg: 2025-04-07T13:49:38.294500Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ2MmMzYjQtYjA2NzE3NjYtZmU0ODMwZWYtMjVkYjgwZTc=, ActorId: [1:7490568035512440445:2461], ActorState: ExecuteState, TraceId: 01jr8872k0ejp4qtrs61md0d8s, Create QueryResponse for error on request, msg: 2025-04-07T13:49:38.323079Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ2MmMzYjQtYjA2NzE3NjYtZmU0ODMwZWYtMjVkYjgwZTc=, ActorId: [1:7490568035512440445:2461], ActorState: ExecuteState, TraceId: 01jr8872m57mxv3r72c9173v7h, Create QueryResponse for error on request, msg: 2025-04-07T13:49:38.359876Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ2MmMzYjQtYjA2NzE3NjYtZmU0ODMwZWYtMjVkYjgwZTc=, ActorId: [1:7490568035512440445:2461], ActorState: ExecuteState, TraceId: 01jr8872n58rep9hxcn219hfdh, Create QueryResponse for error on request, msg: 2025-04-07T13:49:38.408847Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ2MmMzYjQtYjA2NzE3NjYtZmU0ODMwZWYtMjVkYjgwZTc=, Actor ... TczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gnhz1cwcpesq39149vpw, Create QueryResponse for error on request, msg: 2025-04-07T13:54:53.925730Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gpd20jpmnwm521hnk6wg, Create QueryResponse for error on request, msg: 2025-04-07T13:54:54.656911Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gq496qc8sf6z8er936zn, Create QueryResponse for error on request, msg: 2025-04-07T13:54:55.393038Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gqv92xscekwgmte7fvs5, Create QueryResponse for error on request, msg: 2025-04-07T13:54:56.418005Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88grr571tba1pnqh6knszk, Create QueryResponse for error on request, msg: 2025-04-07T13:54:56.967460Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gs9g8gsan9bpq3gg1xfj, Create QueryResponse for error on request, msg: 2025-04-07T13:54:57.454262Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gsvy9k3wea77ttdycqgf, Create QueryResponse for error on request, msg: 2025-04-07T13:54:58.369046Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gtqk0g8a25ss9v0syzh4, Create QueryResponse for error on request, msg: 2025-04-07T13:54:59.113032Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gvf16gbrd9kpwpf7my47, Create QueryResponse for error on request, msg: 2025-04-07T13:54:59.820493Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gw4p3a2749cpg3v2ed6v, Create QueryResponse for error on request, msg: 2025-04-07T13:55:00.506894Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gwtm5bn4x3fbqxm4e3c0, Create QueryResponse for error on request, msg: 2025-04-07T13:55:01.368923Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gxnm3xfky3758xfdntwz, Create QueryResponse for error on request, msg: 2025-04-07T13:55:01.833041Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gy451wncce5c57gp6bbg, Create QueryResponse for error on request, msg: 2025-04-07T13:55:02.629048Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gyww525hkvh32nywnac1, Create QueryResponse for error on request, msg: 2025-04-07T13:55:03.113213Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gzbmevtc2gdav4ber0ap, Create QueryResponse for error on request, msg: 2025-04-07T13:55:03.644989Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88gzwn2p12h0yw92t9778h, Create QueryResponse for error on request, msg: 2025-04-07T13:55:04.154109Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h0bf0fz6srpdhz1vtp61, Create QueryResponse for error on request, msg: 2025-04-07T13:55:04.671252Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h0wh93rgg6m25zy5hgbf, Create QueryResponse for error on request, msg: 2025-04-07T13:55:05.583936Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h1rg8hvv38pacbmx0sf2, Create QueryResponse for error on request, msg: 2025-04-07T13:55:06.084029Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h27z1k2cd3jf4gfg5fy0, Create QueryResponse for error on request, msg: 2025-04-07T13:55:06.671851Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h2tyd3acp3fgrsc9pgqf, Create QueryResponse for error on request, msg: 2025-04-07T13:55:07.165241Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h3a51g39wtv64g684rcw, Create QueryResponse for error on request, msg: 2025-04-07T13:55:07.992403Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h4425yfj8pyz1m716rsr, Create QueryResponse for error on request, msg: 2025-04-07T13:55:08.531932Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h4k6f40g11gey3c0e462, Create QueryResponse for error on request, msg: 2025-04-07T13:55:09.164947Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h58q69j32tqtgk5s6stb, Create QueryResponse for error on request, msg: 2025-04-07T13:55:09.648982Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h5qw13z05ee7m7h0n7sk, Create QueryResponse for error on request, msg: 2025-04-07T13:55:10.645861Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h6nt4ryk697zad6jx7zg, Create QueryResponse for error on request, msg: 2025-04-07T13:55:11.156681Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h75y143axp3yp5t6f0d1, Create QueryResponse for error on request, msg: 2025-04-07T13:55:11.732915Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h7rqfsb2fn5v64cj70n1, Create QueryResponse for error on request, msg: 2025-04-07T13:55:12.432960Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h8efat50e9ygah79ynfk, Create QueryResponse for error on request, msg: 2025-04-07T13:55:13.317431Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88h9a4211j59d98mawd5r6, Create QueryResponse for error on request, msg: 2025-04-07T13:55:14.053286Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88ha150jeerx0rpxpkw9hb, Create QueryResponse for error on request, msg: 2025-04-07T13:55:14.708983Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88hanf4bbec593v2tbnqc3, Create QueryResponse for error on request, msg: 2025-04-07T13:55:15.417535Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88hb9s6hnewwqatf1x7f5j, Create QueryResponse for error on request, msg: 2025-04-07T13:55:16.141231Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88hc228jq22611p1f8thhd, Create QueryResponse for error on request, msg: 2025-04-07T13:55:16.924023Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88hctg0p19t93gsjmay70q, Create QueryResponse for error on request, msg: 2025-04-07T13:55:17.565717Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88hdeg69s1c21j4txx95ty, Create QueryResponse for error on request, msg: 2025-04-07T13:55:18.234480Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88he3b6yc9mvd2ngr0qyv4, Create QueryResponse for error on request, msg: 2025-04-07T13:55:18.908687Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88herc3a64zgv0ddb619yb, Create QueryResponse for error on request, msg: 2025-04-07T13:55:19.548919Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88hfc587d6m9yx7r1e8d3e, Create QueryResponse for error on request, msg: 2025-04-07T13:55:20.166409Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTczMmIyMTItMWIxMWM1MjYtNTdlMWQxYzctYTUwMjA0YjQ=, ActorId: [3:7490568982544359715:2465], ActorState: ExecuteState, TraceId: 01jr88hfzb7sandtw1zm2qsg8q, Create QueryResponse for error on request, msg: >> TStorageServiceTest::ShouldGetCheckpoints >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointTwice [GOOD] >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] Test command err: 2025-04-07T13:55:01.991227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:01.991585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:01.991750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00140a/r3tmp/tmpsCIkgb/pdisk_1.dat 2025-04-07T13:55:02.549817Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17095, node 1 2025-04-07T13:55:02.984125Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:02.984183Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:02.984218Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:02.985162Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:55:02.987293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:55:03.138628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:03.138759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:03.166232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19211 2025-04-07T13:55:04.079466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:08.956513Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T13:55:09.053575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:09.053760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:09.105005Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:55:09.110146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:09.500730Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:09.611581Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T13:55:09.611699Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T13:55:09.663059Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T13:55:09.663264Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T13:55:09.663479Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T13:55:09.663545Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T13:55:09.663594Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T13:55:09.663655Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T13:55:09.663709Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T13:55:09.663761Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T13:55:09.664245Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T13:55:09.935153Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:09.935316Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:09.942827Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T13:55:09.959439Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T13:55:09.960560Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T13:55:09.961818Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2025-04-07T13:55:10.028519Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T13:55:10.028593Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T13:55:10.028655Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-07T13:55:10.030831Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:10.030962Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:10.053730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T13:55:10.076688Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T13:55:10.076891Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T13:55:10.101098Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:55:10.128429Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:10.177319Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T13:55:10.533297Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T13:55:10.729639Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T13:55:11.467837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:55:12.265544Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:12.544092Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-07T13:55:12.544174Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2025-04-07T13:55:12.544272Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2488:2905], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2025-04-07T13:55:12.545370Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2490:2907] 2025-04-07T13:55:12.545791Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2490:2907], schemeshard id = 72075186224037899 2025-04-07T13:55:13.780982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2623:3191], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:13.781210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:13.801558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-07T13:55:14.339289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2920:3238], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:14.339476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:14.353950Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2925:3242]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T13:55:14.354224Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T13:55:14.354368Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-07T13:55:14.354434Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2928:3245] 2025-04-07T13:55:14.354514Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2928:3245] 2025-04-07T13:55:14.355065Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2929:3142] 2025-04-07T13:55:14.355347Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2928:3245], server id = [2:2929:3142], tablet id = 72075186224037897, status = OK 2025-04-07T13:55:14.355558Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2929:3142], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T13:55:14.355626Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T13:55:14.355834Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:55:14.355932Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2925:3242], StatRequests.size() = 1 2025-04-07T13:55:14.390199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2933:3249], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:14.390387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:14.390974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2938:3254], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:14.398293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-04-07T13:55:14.553534Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T13:55:14.553612Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T13:55:14.593191Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2928:3245], schemeshard count = 1 2025-04-07T13:55:14.968391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2940:3256], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-07T13:55:15.291351Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3090:3344]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T13:55:15.291573Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T13:55:15.291702Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3090:3344], StatRequests.size() = 1 2025-04-07T13:55:15.382552Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr88has00wqj3dvkpxmqpkna, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODQ0NzkwYjItYTM0NmYzMGItNjRkNzQyMTYtYzY3ZGUzODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:15.473511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72075186224037899 2025-04-07T13:55:15.975711Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3407:3409]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T13:55:15.975910Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T13:55:15.975963Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3407:3409], StatRequests.size() = 1 2025-04-07T13:55:16.001051Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3416:3418]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T13:55:16.001235Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-07T13:55:16.001270Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3416:3418], StatRequests.size() = 1 2025-04-07T13:55:16.061269Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr88hcbt14n3n8b8z32t6er6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2RmZTAyNzYtZWYyMzdjYzAtNTA0NGE3NmItNzc1NDk5OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:16.116902Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3456:3391]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T13:55:16.119355Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T13:55:16.119414Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T13:55:16.119998Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T13:55:16.120043Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-07T13:55:16.120089Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T13:55:16.130331Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-07T13:55:16.130648Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-07T13:55:16.131015Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3481:3404]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T13:55:16.134450Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T13:55:16.134511Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T13:55:16.134817Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T13:55:16.134868Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-07T13:55:16.134910Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 3] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T13:55:16.137751Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-07T13:55:16.138013Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries >> Cdc::SplitTopicPartition_TopicAutoPartitioning >> TestKinesisHttpProxy::TestRequestBadJson >> TestKinesisHttpProxy::ListShardsEmptyFields >> TStateStorageTest::ShouldIssueErrorOnNonExistentState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementSmallState [GOOD] >> TCheckpointStorageTest::ShouldMarkCheckpointsGc [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> Cdc::Alter >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty >> TStorageServiceTest::ShouldGetCheckpoints [GOOD] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint [GOOD] >> TGcTest::ShouldRemovePreviousCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteCheckpoints [GOOD] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldSaveState >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes >> TestKinesisHttpProxy::TestWrongStream >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation >> TStorageServiceTest::ShouldAbortCheckpoint >> TGcTest::ShouldIgnoreIncrementCheckpoint |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut >> TStateStorageTest::ShouldLoadLastSnapshot >> TStateStorageTest::ShouldSaveGetIncrementBigState >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints >> YdbProxy::ReadTopic [GOOD] >> Cdc::DisableStream [GOOD] >> Cdc::InitialScan >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge >> TestKinesisHttpProxy::TestWrongStream [GOOD] >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] >> TStorageServiceTest::ShouldAbortCheckpoint [GOOD] >> TGcTest::ShouldIgnoreIncrementCheckpoint [GOOD] >> TStateStorageTest::ShouldCountStates >> Cdc::Alter [GOOD] >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> TStateStorageTest::ShouldLoadLastSnapshot [GOOD] >> YdbProxy::ReadNonExistentTopic >> TStorageServiceTest::ShouldSaveState [GOOD] >> TestYmqHttpProxy::TestCreateQueue >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending >> TStateStorageTest::ShouldDeleteGraph >> TestKinesisHttpProxy::TestWrongStream2 >> TStorageServiceTest::ShouldGetState >> Cdc::AddColumn >> TestKinesisHttpProxy::TestRequestNoAuthorization >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId >> TStateStorageTest::ShouldSaveGetIncrementBigState [GOOD] >> TestYmqHttpProxy::TestDeleteMessage >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState >> TStorageServiceTest::ShouldUseGc >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints [GOOD] >> YdbProxy::ReadNonExistentTopic [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged >> TCheckpointStorageTest::ShouldDeleteGraph >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> TStateStorageTest::ShouldCountStates [GOOD] >> TStorageServiceTest::ShouldGetState [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendState >> TStorageServiceTest::ShouldUseGc [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending [GOOD] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId >> TStateStorageTest::ShouldDeleteGraph [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> TStateStorageTest::ShouldLoadIncrementSnapshot >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged >> TCheckpointStorageTest::ShouldDeleteGraph [GOOD] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] >> TStateStorageTest::ShouldGetMultipleStates >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] >> TStateStorageTest::ShouldGetMultipleStates [GOOD] >> Cdc::RacyActivateAndEnqueue [GOOD] >> TestKinesisHttpProxy::TestWrongRequest >> Cdc::RacyCreateAndSend |87.4%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |87.4%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] Test command err: 2025-04-07T13:55:01.716271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:01.716856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:01.717084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001410/r3tmp/tmpuWbD2L/pdisk_1.dat 2025-04-07T13:55:02.489198Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23155, node 1 2025-04-07T13:55:02.934493Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:02.934550Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:02.934583Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:02.935235Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:55:02.949167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:55:03.082744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:03.082901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:03.099984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24759 2025-04-07T13:55:04.103163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:08.945419Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T13:55:09.034384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:09.034532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:09.076929Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:55:09.082096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:09.375886Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:09.438479Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T13:55:09.438595Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T13:55:09.484001Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T13:55:09.484237Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T13:55:09.484456Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T13:55:09.484522Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T13:55:09.484592Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T13:55:09.484652Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T13:55:09.484723Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T13:55:09.484783Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T13:55:09.485326Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T13:55:09.707271Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:09.707411Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1757:2551], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:09.722300Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1764:2557] 2025-04-07T13:55:09.730565Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1795:2575] 2025-04-07T13:55:09.730808Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1795:2575], schemeshard id = 72075186224037889 2025-04-07T13:55:09.735587Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T13:55:09.773777Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T13:55:09.773838Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T13:55:09.773900Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T13:55:09.778662Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:09.778778Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:09.786013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T13:55:09.799882Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T13:55:09.800041Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T13:55:09.855732Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:55:09.879137Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:09.918243Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T13:55:10.205554Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T13:55:10.401492Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T13:55:11.360202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2136:3015], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:11.360351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:11.379019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T13:55:12.028549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2427:3063], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:12.029539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:12.031040Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2432:3067]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T13:55:12.031282Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T13:55:12.031382Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2434:3069] 2025-04-07T13:55:12.031475Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2434:3069] 2025-04-07T13:55:12.032008Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2435:2944] 2025-04-07T13:55:12.032299Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2434:3069], server id = [2:2435:2944], tablet id = 72075186224037897, status = OK 2025-04-07T13:55:12.032506Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2435:2944], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T13:55:12.032579Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T13:55:12.033079Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:55:12.033162Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2432:3067], StatRequests.size() = 1 2025-04-07T13:55:12.053742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2439:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:12.053872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:12.054310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2444:3078], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:12.061515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T13:55:12.317202Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T13:55:12.317299Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T13:55:12.388052Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2434:3069], schemeshard count = 1 2025-04-07T13:55:12.746934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2446:3080], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T13:55:12.942826Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2592:3172]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T13:55:12.943000Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T13:55:12.943044Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2592:3172], StatRequests.size() = 1 2025-04-07T13:55:13.048487Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr88h8ge5sx8mm9yd7rhs51p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTI0MWQ0MzYtN2U5YzNmZjgtNjI1NmM4NGYtNTUyYjQ1M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:13.192149Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:2637:2996]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T13:55:13.195390Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T13:55:13.195465Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T13:55:13.196157Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T13:55:13.196221Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T13:55:13.196273Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T13:55:13.207755Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-07T13:55:13.208103Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldGetState [GOOD] Test command err: 2025-04-07T13:55:19.159279Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7490569501568352467:2048] with connection to localhost:28395:local 2025-04-07T13:55:19.159374Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:19.482743Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:19.482770Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:19.495274Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-07T13:55:20.861864Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-07T13:55:20.861894Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-07T13:55:21.904656Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7490569514216048746:2048] with connection to localhost:28395:local 2025-04-07T13:55:21.904772Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:22.182409Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:22.182439Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:22.182762Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-07T13:55:23.719845Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-07T13:55:23.719878Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-07T13:55:23.720212Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-07T13:55:24.041560Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-07T13:55:24.041583Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-07T13:55:24.048805Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-04-07T13:55:24.339786Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-04-07T13:55:24.339821Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-04-07T13:55:24.342911Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:24.614290Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:25.770932Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7490569531337203548:2048] with connection to localhost:28395:local 2025-04-07T13:55:25.771013Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:25.986444Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:25.986472Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:25.987095Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-07T13:55:27.347987Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-07T13:55:27.348021Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-07T13:55:27.348296Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-07T13:55:27.800964Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-07T13:55:27.801503Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-07T13:55:27.813151Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-07T13:55:27.995272Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-07T13:55:27.995303Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-07T13:55:27.995816Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-07T13:55:28.176992Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-04-07T13:55:28.177021Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-07T13:55:28.177556Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-04-07T13:55:28.370614Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-04-07T13:55:28.370676Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-04-07T13:55:28.371978Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-04-07T13:55:28.594949Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint aborted 2025-04-07T13:55:28.594987Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-04-07T13:55:28.595530Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvAbortCheckpointRequest 2025-04-07T13:55:28.750927Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint aborted 2025-04-07T13:55:28.750967Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvAbortCheckpointResponse 2025-04-07T13:55:28.751212Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:28.971939Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:30.033078Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7490569550341278334:2048] with connection to localhost:28395:local 2025-04-07T13:55:30.033209Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:30.312597Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:30.312625Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:30.317051Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-07T13:55:31.705208Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-07T13:55:31.705240Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-07T13:55:31.705517Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-04-07T13:55:31.836512Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-04-07T13:55:31.836580Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-04-07T13:55:31.837523Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvGetTaskState: tasks {1317} 2025-04-07T13:55:31.837570Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] GetState, tasks: 1317 2025-04-07T13:55:32.381716Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ListOfStates results: 2025-04-07T13:55:32.381845Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] taskId 1317 checkpoint id: 17:1, rows count: 1 2025-04-07T13:55:32.381885Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SkipStatesInFuture, skip 0 checkpoints 2025-04-07T13:55:32.389348Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SelectState: task_id 1317, seq_no 1, blob_seq_num 0 2025-04-07T13:55:32.737566Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] DeserializeState, task id 1317, blob size 49 2025-04-07T13:55:32.737634Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ApplyIncrements 2025-04-07T13:55:32.749886Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [{ Id: 1 Generation: 17 }] Send TEvGetTaskStateResult: tasks: {1317} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] Test command err: 2025-04-07T13:55:24.379430Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [1:35:2082] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-04-07T13:55:24.578724Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-04-07T13:55:25.251442Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph' up to 11:3 Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-04-07T13:55:32.060257Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [2:35:2082] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/ShouldIgnoreIncrementCheckpoint"); SELECT * FROM checkpoints_graphs_description; 2025-04-07T13:55:32.218772Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-04-07T13:55:32.218837Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC skip increment checkpoint for graph 'graph' |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldGetMultipleStates [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] Test command err: 2025-04-07T13:55:15.669087Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7490569486223116104:2048] with connection to localhost:3349:local 2025-04-07T13:55:15.669173Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:16.475818Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:16.475850Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:16.478507Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:16.844256Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.16] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldNotRegisterPrevGeneration/coordinators_sync, pk: graph_graphich, current generation: 17, expected/new generation: 16, operation: RegisterCheck, code: 400130 2025-04-07T13:55:16.844280Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:18.599673Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7490569499765260341:2048] with connection to localhost:3349:local 2025-04-07T13:55:18.599761Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-07T13:55:19.389915Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointWhenUnregistered/coordinators_sync, pk: graph_graphich, current generation: 0, expected/new generation: 17, operation: Check, code: 400130 2025-04-07T13:55:19.389956Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-07T13:55:20.195893Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7490569504502208764:2048] with connection to localhost:3349:local 2025-04-07T13:55:20.196001Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:20.456817Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:20.456844Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:20.457060Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-07T13:55:21.833329Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-07T13:55:21.833359Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-07T13:55:21.834456Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-07T13:55:22.156430Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Error: Conflict with existing key., code: 2012 2025-04-07T13:55:22.156466Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-07T13:55:23.236789Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7490569520028926771:2048] with connection to localhost:3349:local 2025-04-07T13:55:23.236869Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:23.504976Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:23.505008Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:23.505457Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-07T13:55:23.865991Z node 4 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-04-07T13:55:23.866033Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-07T13:55:25.082287Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7490569529605872304:2048] with connection to localhost:3349:local 2025-04-07T13:55:25.082368Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:25.351530Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:25.351568Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:25.353713Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-07T13:55:26.800301Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-07T13:55:26.800332Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-07T13:55:26.800936Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:27.056840Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-07T13:55:27.056868Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:27.057193Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-07T13:55:27.272840Z node 5 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-04-07T13:55:27.272898Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] Test command err: 2025-04-07T13:55:16.089418Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7490569487234328294:2048] with connection to localhost:27745:local 2025-04-07T13:55:16.089523Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:16.759949Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:16.759982Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:16.760689Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-07T13:55:18.412473Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-07T13:55:18.412520Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-07T13:55:18.414443Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:18.689682Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-07T13:55:18.689711Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:18.690106Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-07T13:55:18.905008Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:2] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointAfterGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-04-07T13:55:18.905038Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-07T13:55:20.029333Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7490569504982617261:2048] with connection to localhost:27745:local 2025-04-07T13:55:20.029403Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:20.260060Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:20.260086Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:20.260393Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-07T13:55:20.610876Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-04-07T13:55:20.610903Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-04-07T13:55:21.667947Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7490569513323628205:2048] with connection to localhost:27745:local 2025-04-07T13:55:21.668013Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:21.985162Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:21.985193Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:21.988913Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-04-07T13:55:22.324907Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to abort checkpoint:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-04-07T13:55:22.324954Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-04-07T13:55:23.399405Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7490569518647170604:2048] with connection to localhost:27745:local 2025-04-07T13:55:23.399796Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:23.610583Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:23.610617Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:23.610996Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-07T13:55:24.932959Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-07T13:55:24.932999Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-07T13:55:24.934566Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-07T13:55:25.221782Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Selected checkpoint '17:1' with status Pending, while expected PendingCommit, code: 400080 2025-04-07T13:55:25.221820Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-04-07T13:55:26.536982Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7490569534023594606:2048] with connection to localhost:27745:local 2025-04-07T13:55:26.537112Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:26.904577Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:26.904602Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:26.904915Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-07T13:55:28.072321Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-07T13:55:28.072355Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-07T13:55:28.073481Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-07T13:55:28.540611Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-07T13:55:28.540647Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-07T13:55:28.541094Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:28.750864Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-07T13:55:28.750905Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:28.751184Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-07T13:55:28.849743Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-04-07T13:55:28.849777Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldUseGc [GOOD] Test command err: 2025-04-07T13:55:19.264473Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7490569504154999487:2048] with connection to localhost:9244:local 2025-04-07T13:55:19.264595Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:19.959876Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:19.959903Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:21.157955Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7490569512590517895:2048] with connection to localhost:9244:local 2025-04-07T13:55:21.158051Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:21.428613Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:21.428685Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:21.429673Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:21.690940Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-07T13:55:21.690970Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:21.691345Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:21.919281Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldRegisterNextGeneration/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: RegisterCheck, code: 400130 2025-04-07T13:55:21.919315Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:23.066519Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7490569521249482949:2048] with connection to localhost:9244:local 2025-04-07T13:55:23.066634Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:23.297192Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:23.297220Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:23.297484Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-07T13:55:24.640958Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-07T13:55:24.640999Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-07T13:55:24.649209Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-07T13:55:25.124884Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-07T13:55:25.124917Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-07T13:55:25.126876Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-07T13:55:25.480950Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-07T13:55:25.480994Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-07T13:55:25.481283Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-07T13:55:25.808549Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-04-07T13:55:25.808586Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-07T13:55:25.809457Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-04-07T13:55:26.141025Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-04-07T13:55:26.141059Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-04-07T13:55:26.141452Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:26.437085Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:27.402965Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7490569536678913928:2048] with connection to localhost:9244:local 2025-04-07T13:55:27.403041Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:27.674269Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:27.674297Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:27.674629Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-07T13:55:29.004454Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-07T13:55:29.004484Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-07T13:55:29.009279Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-04-07T13:55:29.197591Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-04-07T13:55:29.197655Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-04-07T13:55:30.194031Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7490569547665882311:2048] with connection to localhost:9244:local 2025-04-07T13:55:30.194091Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [5:7490569551960849708:2128] 2025-04-07T13:55:30.194117Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-07T13:55:30.441507Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-07T13:55:30.441537Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-07T13:55:30.443288Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-07T13:55:31.715156Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-07T13:55:31.715192Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-07T13:55:31.715479Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-07T13:55:32.021156Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-07T13:55:32.021191Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-07T13:55:32.021453Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-07T13:55:32.153981Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'Completed' 2025-04-07T13:55:32.154026Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvNewCheckpointSucceeded 2025-04-07T13:55:32.154053Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-04-07T13:55:32.154134Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:1 for graph 'graph_graphich' 2025-04-07T13:55:32.155124Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-07T13:55:32.306323Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-07T13:55:32.306351Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-07T13:55:32.306671Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-07T13:55:32.419420Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-04-07T13:55:32.419457Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-07T13:55:32.419763Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-04-07T13:55:32.521146Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-04-07T13:55:32.521184Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvNewCheckpointSucceeded 2025-04-07T13:55:32.521207Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-04-07T13:55:32.521285Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:2 for graph 'graph_graphich' 2025-04-07T13:55:32.521608Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-04-07T13:55:32.684130Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-04-07T13:55:32.684170Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-04-07T13:55:32.684456Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-07T13:55:32.693254Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:1 2025-04-07T13:55:32.694700Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:2 2025-04-07T13:55:32.800076Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'PendingCommit' 2025-04-07T13:55:32.800133Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-07T13:55:32.800459Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCompleteCheckpointRequest 2025-04-07T13:55:32.892354Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'Completed' 2025-04-07T13:55:32.892385Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvNewCheckpointSucceeded 2025-04-07T13:55:32.892414Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCompleteCheckpointResponse 2025-04-07T13:55:32.892497Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:3 for graph 'graph_graphich' 2025-04-07T13:55:32.892777Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:32.924906Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:3 2025-04-07T13:55:33.087877Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:33.188534Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:33.196977Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:33.297544Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:33.308478Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:33.409032Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:33.417001Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:33.517574Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:33.525479Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:33.626140Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:33.636527Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:33.737419Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:33.746724Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:33.847401Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:33.855272Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:33.955930Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:33.969920Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:34.070593Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:34.079983Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:34.180711Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:34.192287Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:34.292884Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:34.312097Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-07T13:55:34.412816Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-07T13:55:34.438735Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] Test command err: 2025-04-07T13:54:33.007938Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:33.197779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:33.215830Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:33.216176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:33.216476Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:33.223735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:33.223880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:33.224059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:33.224127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:33.224186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:33.224272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:33.224346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:33.224403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:33.224474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:33.224553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:33.224624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:33.224736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:33.246247Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:33.251032Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:33.251228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:33.251277Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:33.251462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:33.251693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:33.251765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:33.251802Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:33.251897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:33.251966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:33.252007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:33.252040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:33.252225Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:33.252323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:33.252368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:33.252397Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:33.252487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:33.252551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:33.252608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:33.252643Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:33.252773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:33.252819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:33.252850Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:33.252894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:33.252926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:33.252952Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:33.253123Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-07T13:54:33.253196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-07T13:54:33.253311Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-04-07T13:54:33.253400Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=47; 2025-04-07T13:54:33.253543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:33.253611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:33.253664Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:33.253887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:33.253942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:33.253973Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:33.254096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:33.254135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:33.254178Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:33.254374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:33.254433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:33.254474Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:33.254578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... e=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=5729; 2025-04-07T13:55:25.746103Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=12432; 2025-04-07T13:55:25.746800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=641; 2025-04-07T13:55:25.748317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=722; 2025-04-07T13:55:25.748391Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1528; 2025-04-07T13:55:25.748525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=75; 2025-04-07T13:55:25.748629Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:55:25.748713Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=150; 2025-04-07T13:55:25.749055Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=281; 2025-04-07T13:55:25.749346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=243; 2025-04-07T13:55:25.750641Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1244; 2025-04-07T13:55:25.752112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1417; 2025-04-07T13:55:25.752210Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=45; 2025-04-07T13:55:25.752272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=24; 2025-04-07T13:55:25.752329Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-04-07T13:55:25.752368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-04-07T13:55:25.752403Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-07T13:55:25.752494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=62; 2025-04-07T13:55:25.752559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-07T13:55:25.752638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=48; 2025-04-07T13:55:25.752709Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-07T13:55:25.752775Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=36; 2025-04-07T13:55:25.752808Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=25195; 2025-04-07T13:55:25.752961Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=3;blobs=6;rows=75200;bytes=7465172;raw_bytes=7453400; inactive portions=51;blobs=102;rows=1251798;bytes=102220660;raw_bytes=124064310; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:55:25.753081Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4155:5790];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:55:25.753138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4155:5790];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:55:25.753175Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:55:25.753236Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:55:25.753271Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:55:25.753325Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:25.753403Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T13:55:25.753457Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T13:55:25.753492Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:55:25.753548Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:25.753594Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:25.753644Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:55:25.753692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:55:25.753729Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:55:25.754380Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:25.754464Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4210:5838];tablet_id=9437184;parent=[1:4155:5790];fline=manager.h:99;event=ask_data;request=request_id=199;1={portions_count=54};; 2025-04-07T13:55:25.755201Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:55:25.755569Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:55:25.755611Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:55:25.755632Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:55:25.755667Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:55:25.755715Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:25.755765Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T13:55:25.755812Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T13:55:25.755847Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:55:25.755887Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:25.755919Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:25.755954Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:55:25.756019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:55:25.756061Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:55:25.756754Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=54;path_id=1; 2025-04-07T13:55:25.758494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=54;path_id=1; 2025-04-07T13:55:25.762392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:55:25.762441Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-04-07T13:53:21.577020Z :TestReorderedExecutor INFO: Random seed for debugging is 1744034001576984 2025-04-07T13:53:22.536573Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569002396262496:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:22.536630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:23.104174Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0011db/r3tmp/tmpjH0HHA/pdisk_1.dat 2025-04-07T13:53:23.198234Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:53:23.262559Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:23.613707Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:23.841212Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:23.853804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:23.853898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:23.865803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:23.865878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:23.887753Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:53:23.888692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:23.889889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14015, node 1 2025-04-07T13:53:24.321396Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0011db/r3tmp/yandexGXPpbx.tmp 2025-04-07T13:53:24.321423Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0011db/r3tmp/yandexGXPpbx.tmp 2025-04-07T13:53:24.321548Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0011db/r3tmp/yandexGXPpbx.tmp 2025-04-07T13:53:24.321640Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:53:24.600358Z INFO: TTestServer started on Port 17355 GrpcPort 14015 TClient is connected to server localhost:17355 PQClient connected to localhost:14015 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:25.490787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T13:53:27.536703Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569002396262496:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:27.536771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:28.545119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569028166067108:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:28.545614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569028166067100:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:28.545691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:28.549644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-07T13:53:28.580474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569028166067115:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-07T13:53:28.930720Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490569030594874571:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:28.932619Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzU0YzZhZmQtNWFlMzYxMTMtMTRjMjA5NC0zMmUwMDQ4YQ==, ActorId: [2:7490569030594874528:2281], ActorState: ExecuteState, TraceId: 01jr88e3gk3rb9re7jz1cwrvaw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:53:28.951605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:28.953983Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569028166067214:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:28.955907Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2MzZDMxMzItNTNjNDRhMDktNDU4NmRhZmMtODAwN2UzNGE=, ActorId: [1:7490569028166067097:2305], ActorState: ExecuteState, TraceId: 01jr88e3ef18dcyp2d10kz2bc7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:53:28.959031Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:53:28.961081Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:53:29.232743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:53:29.436133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:14015", true, true, 1000); 2025-04-07T13:53:29.899754Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr88e4hqepv4efsp7wctp8wn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjcyZTNkZGEtZGM2MDhiYzItMzA3YjM2ODgtMmE1ZDJiNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490569032461034915:3000] === CheckClustersList. Ok 2025-04-07T13:53:36.571700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:14015 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T13:53:36.789012Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:14015 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976715681 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK AddTopic: rt3.dc1--test-topic ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO ` ... S Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-07T13:55:28.022760Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7490569544540181695:2461] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-07T13:55:28.025855Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7490569544540181695:2461] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-07T13:55:28.238118Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7490569544540181695:2461] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-07T13:55:28.238566Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7490569544540181748:2461] connected; active server actors: 1 2025-04-07T13:55:28.238648Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7490569544540181695:2461] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-07T13:55:28.238666Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7490569544540181695:2461] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-07T13:55:28.238894Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7490569544540181748:2461] disconnected; active server actors: 1 2025-04-07T13:55:28.238915Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7490569544540181748:2461] disconnected no session 2025-04-07T13:55:28.392113Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7490569544540181695:2461] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-07T13:55:28.392156Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7490569544540181695:2461] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-07T13:55:28.392177Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7490569544540181695:2461] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-07T13:55:28.392210Z node 13 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T13:55:28.394715Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:55:28.394770Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [13:7490569544540181769:2461], now have 1 active actors on pipe 2025-04-07T13:55:28.394971Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 14, Generation: 1 2025-04-07T13:55:28.395201Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:55:28.395234Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:55:28.395324Z node 14 :PERSQUEUE INFO: new Cookie src|b7568cb-b2c68b1a-f0913cef-c36c724a_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-07T13:55:28.395430Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T13:55:28.395482Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:55:28.396212Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T13:55:28.396237Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T13:55:28.396307Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:55:28.396649Z node 13 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|b7568cb-b2c68b1a-f0913cef-c36c724a_0 2025-04-07T13:55:28.399123Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744034128399 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:55:28.399269Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|b7568cb-b2c68b1a-f0913cef-c36c724a_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-07T13:55:28.400388Z :INFO: [] MessageGroupId [src] SessionId [src|b7568cb-b2c68b1a-f0913cef-c36c724a_0] Write session: close. Timeout = 0 ms 2025-04-07T13:55:28.400449Z :INFO: [] MessageGroupId [src] SessionId [src|b7568cb-b2c68b1a-f0913cef-c36c724a_0] Write session will now close 2025-04-07T13:55:28.400486Z :DEBUG: [] MessageGroupId [src] SessionId [src|b7568cb-b2c68b1a-f0913cef-c36c724a_0] Write session: aborting 2025-04-07T13:55:28.401457Z :INFO: [] MessageGroupId [src] SessionId [src|b7568cb-b2c68b1a-f0913cef-c36c724a_0] Write session: gracefully shut down, all writes complete 2025-04-07T13:55:28.401506Z :DEBUG: [] MessageGroupId [src] SessionId [src|b7568cb-b2c68b1a-f0913cef-c36c724a_0] Write session is aborting and will not restart 2025-04-07T13:55:28.401577Z :DEBUG: [] MessageGroupId [src] SessionId [src|b7568cb-b2c68b1a-f0913cef-c36c724a_0] Write session: destroy 2025-04-07T13:55:28.402612Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|b7568cb-b2c68b1a-f0913cef-c36c724a_0 grpc read done: success: 0 data: 2025-04-07T13:55:28.402640Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b7568cb-b2c68b1a-f0913cef-c36c724a_0 grpc read failed 2025-04-07T13:55:28.402663Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b7568cb-b2c68b1a-f0913cef-c36c724a_0 grpc closed 2025-04-07T13:55:28.402685Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b7568cb-b2c68b1a-f0913cef-c36c724a_0 is DEAD 2025-04-07T13:55:28.403307Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:55:28.422319Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:55:28.422377Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [13:7490569544540181769:2461] destroyed 2025-04-07T13:55:28.422491Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:55:28.459633Z :INFO: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Starting read session 2025-04-07T13:55:28.459686Z :DEBUG: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Starting cluster discovery 2025-04-07T13:55:28.459914Z :INFO: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:1757: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:1757
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:1757. " 2025-04-07T13:55:28.459960Z :DEBUG: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Restart cluster discovery in 0.008929s 2025-04-07T13:55:28.469963Z :DEBUG: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Starting cluster discovery 2025-04-07T13:55:28.470300Z :INFO: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:1757: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:1757
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:1757. " 2025-04-07T13:55:28.470404Z :DEBUG: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Restart cluster discovery in 0.012418s 2025-04-07T13:55:28.484211Z :DEBUG: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Starting cluster discovery 2025-04-07T13:55:28.484378Z :INFO: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:1757: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:1757
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:1757. " 2025-04-07T13:55:28.484402Z :DEBUG: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Restart cluster discovery in 0.034923s 2025-04-07T13:55:28.519866Z :DEBUG: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Starting cluster discovery 2025-04-07T13:55:28.520157Z :NOTICE: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:1757: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:1757
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:1757. " } 2025-04-07T13:55:28.520336Z :NOTICE: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:1757: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:1757
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:1757. " } 2025-04-07T13:55:28.520479Z :INFO: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Closing read session. Close timeout: 0.000000s 2025-04-07T13:55:28.520611Z :NOTICE: [/Root] [/Root] [f0d2d6d0-eeda1562-45c0a500-ef4df565] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:55:28.955955Z node 13 :KQP_COMPUTE WARN: SelfId: [13:7490569544540181812:2482], TxId: 281474976715686, task: 1, CA Id [13:7490569544540181810:2482]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-07T13:55:28.985571Z node 13 :KQP_COMPUTE WARN: SelfId: [13:7490569544540181812:2482], TxId: 281474976715686, task: 1, CA Id [13:7490569544540181810:2482]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:55:29.036084Z node 13 :KQP_COMPUTE WARN: SelfId: [13:7490569544540181812:2482], TxId: 281474976715686, task: 1, CA Id [13:7490569544540181810:2482]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:55:29.094902Z node 13 :KQP_COMPUTE WARN: SelfId: [13:7490569544540181812:2482], TxId: 281474976715686, task: 1, CA Id [13:7490569544540181810:2482]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:55:29.200793Z node 13 :KQP_COMPUTE WARN: SelfId: [13:7490569544540181812:2482], TxId: 281474976715686, task: 1, CA Id [13:7490569544540181810:2482]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T13:55:29.354213Z node 13 :KQP_COMPUTE WARN: SelfId: [13:7490569544540181812:2482], TxId: 281474976715686, task: 1, CA Id [13:7490569544540181810:2482]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00218f/r3tmp/tmpQMALYh/pdisk_1.dat 2025-04-07T13:54:55.435581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:55.871659Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:55.909166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:55.909247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:55.926661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3415, node 1 2025-04-07T13:54:56.302945Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:56.302966Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:56.302973Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:56.303088Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:56.793735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:56.801235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:54:56.802631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:56.805686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:54:56.805900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:54:56.805923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:54:56.809378Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:54:56.809406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:54:56.812567Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:56.813938Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:56.825835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034096868, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:54:56.825891Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:54:56.826213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:54:56.834325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:56.834525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:56.834572Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:54:56.834680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:54:56.834724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:54:56.834791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:54:56.837258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:54:56.837316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:54:56.837335Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:54:56.837437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:54:59.807218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569418952692688:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:59.807326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:00.071693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:00.072092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:55:00.072586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:00.072602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:00.077442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table 2025-04-07T13:55:00.077635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:00.077798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:00.077851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:55:00.079683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:55:00.079720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:55:00.079736Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:55:00.079933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:55:00.079966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:55:00.079978Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:55:00.081910Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:55:00.103243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:55:00.103339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:55:00.107679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:55:00.180992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:55:00.181022Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:55:00.181085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T13:55:00.188888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:55:00.197734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034100242, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:00.197802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744034100242 2025-04-07T13:55:00.197968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T13:55:00.199813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:00.200083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:00.200145Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:55:00.201999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:55:00.202027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:55:00.202039Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T13:55:00.202240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046 ... chemaChanged at tablet: 72057594046644480 2025-04-07T13:55:34.416734Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037905 Status: COMPLETE TxId: 281474976715765 Step: 1744034134437 OrderId: 281474976715765 ExecLatency: 12 ProposeLatency: 18 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037905 CpuTimeUsec: 977 } } 2025-04-07T13:55:34.416989Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037907 Status: COMPLETE TxId: 281474976715765 Step: 1744034134437 OrderId: 281474976715765 ExecLatency: 13 ProposeLatency: 18 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037907 CpuTimeUsec: 824 } } 2025-04-07T13:55:34.417599Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715765:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:55:34.417798Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715765:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:55:34.417826Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715765:2, at schemeshard: 72057594046644480 2025-04-07T13:55:34.417853Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715765:2 129 -> 240 2025-04-07T13:55:34.418281Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715765:0 ProgressState 2025-04-07T13:55:34.418413Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715765:0 progress is 2/3 2025-04-07T13:55:34.421080Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715765:2 ProgressState 2025-04-07T13:55:34.421252Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715765:2 progress is 3/3 2025-04-07T13:55:34.421347Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715765:0 2025-04-07T13:55:34.421507Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715765:1 2025-04-07T13:55:34.421528Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715765:2 2025-04-07T13:55:34.424293Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976715765 2025-04-07T13:55:34.426704Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 281474976715762, at schemeshard: 72057594046644480 2025-04-07T13:55:34.427938Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TDropLock Propose: opId# 281474976715766:0, path# /Root/table 2025-04-07T13:55:34.428047Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715766:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:34.429179Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715766, database: /Root, subject: , status: StatusAccepted, operation: DROP LOCK, path: /Root/table 2025-04-07T13:55:34.429251Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976715766, status# StatusAccepted 2025-04-07T13:55:34.429380Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976715766:0 ProgressState 2025-04-07T13:55:34.430647Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715766, at schemeshard: 72057594046644480 2025-04-07T13:55:34.432229Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034134479, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:34.432258Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976715766:0 HandleReply TEvOperationPlan: step# 1744034134479 2025-04-07T13:55:34.432270Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715766:0 128 -> 240 2025-04-07T13:55:34.433554Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715766:0 ProgressState 2025-04-07T13:55:34.433663Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715766:0 progress is 1/1 2025-04-07T13:55:34.433715Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715766:0 2025-04-07T13:55:34.434790Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976715766 2025-04-07T13:55:34.436963Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976715762 2025-04-07T13:55:34.615770Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7490569571338361063:2427] [0] Resolve database: name# /Root 2025-04-07T13:55:34.616092Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7490569571338361063:2427] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:55:34.616117Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7490569571338361063:2427] [0] Send request: schemeShardId# 72057594046644480 2025-04-07T13:55:34.616734Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7490569571338361063:2427] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710661 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:21342" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1744034133 } EndTime { seconds: 1744034134 } } 2025-04-07T13:55:34.625808Z node 13 :TX_PROXY DEBUG: actor# [13:7490569549863520932:2135] Handle TEvNavigate describe path /Root/table/byValue/indexImplTable 2025-04-07T13:55:34.625871Z node 13 :TX_PROXY DEBUG: Actor# [13:7490569571338361069:4985] HANDLE EvNavigateScheme /Root/table/byValue/indexImplTable 2025-04-07T13:55:34.626118Z node 13 :TX_PROXY DEBUG: Actor# [13:7490569571338361069:4985] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:55:34.626241Z node 13 :TX_PROXY DEBUG: Actor# [13:7490569571338361069:4985] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table/byValue/indexImplTable" Options { ReturnBoundaries: true ShowPrivateTable: true } 2025-04-07T13:55:34.627422Z node 13 :TX_PROXY DEBUG: Actor# [13:7490569571338361069:4985] Handle TEvDescribeSchemeResult Forward to# [13:7490569571338361067:2428] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 693 Record# Status: StatusSuccess Path: "/Root/table/byValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715764 CreateStep: 1744034134367 ParentPathId: 8 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "Value" Type: "Decimal(22,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 22 DecimalScale: 9 } IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Value" KeyColumnNames: "Key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 10 MaxPartitionsCount: 10 SplitByLoadSettings { Enabled: false } } } TableSchemaVersion: 2 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 12 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046644480 2025-04-07T13:55:34.725342Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7490569549863520720:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:34.725457Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [GOOD] Test command err: 2025-04-07T13:54:54.797670Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569399200168307:2121];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:54.797713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00217e/r3tmp/tmpxAFMd7/pdisk_1.dat 2025-04-07T13:54:55.712830Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:55.828209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:55.828292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:55.836317Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:55.842909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17037, node 1 2025-04-07T13:54:56.221526Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:56.221544Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:56.221551Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:56.221637Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:56.635975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:56.648999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:54:56.650763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:56.658262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:54:56.658533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:54:56.658587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:54:56.664254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:54:56.664292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:54:56.671677Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:56.673748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:56.686104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034096728, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:54:56.686154Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:54:56.686469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:54:56.693830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:56.694177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:56.694275Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:54:56.694392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:54:56.694449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:54:56.694524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:54:56.702734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:54:56.702800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:54:56.702827Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:54:56.702927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:54:59.813397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569420675005760:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:59.819786Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569399200168307:2121];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:59.819905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:59.819983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:54:59.833716Z node 1 :TX_PROXY DEBUG: actor# [1:7490569399200168473:2138] Handle TEvProposeTransaction 2025-04-07T13:54:59.833751Z node 1 :TX_PROXY DEBUG: actor# [1:7490569399200168473:2138] TxId# 281474976710658 ProcessProposeTransaction 2025-04-07T13:54:59.833800Z node 1 :TX_PROXY DEBUG: actor# [1:7490569399200168473:2138] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7490569420675005781:2631] 2025-04-07T13:54:59.967040Z node 1 :TX_PROXY DEBUG: Actor# [1:7490569420675005781:2631] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-04-07T13:54:59.967749Z node 1 :TX_PROXY DEBUG: Actor# [1:7490569420675005781:2631] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T13:54:59.967830Z node 1 :TX_PROXY DEBUG: Actor# [1:7490569420675005781:2631] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:54:59.967974Z node 1 :TX_PROXY DEBUG: Actor# [1:7490569420675005781:2631] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T13:54:59.968191Z node 1 :TX_PROXY DEBUG: Actor# [1:7490569420675005781:2631] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T13:54:59.968300Z node 1 :TX_PROXY DEBUG: Actor# [1:7490569420675005781:2631] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-07T13:54:59.968492Z node 1 :TX_PROXY DEBUG: Actor# [1:7490569420675005781:2631] txid# 281474976710658 HANDLE EvClientConnected 2025-04-07T13:54:59.968991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:54:59.969607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:54:59.970241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:54:59.970265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:54:59.981002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table 2025-04-07T13:54:59.981205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:59.981401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:59.981467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:54:59.981791Z node 1 :TX_PROXY DEBUG: Actor# [1:7490569420675005781:2631] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-07T13:54:59.981847Z node 1 :TX_PROXY DEBUG: Actor# [1:7490569420675005781:2631] txid# 281474976710658 SEND to# [1:7490569420675005780:2305] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-07T13:54:59.982745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:54:59.982798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:54:59.982816Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04- ... emeshard: 72057594046644480 2025-04-07T13:55:20.830351Z node 7 :FLAT_TX_SCHEMESHARD INFO: TRestore TProposedWaitParts, opId: 281474976710761:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:55:20.831738Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710761 2025-04-07T13:55:20.831771Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710761 2025-04-07T13:55:20.831788Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710761, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 4 REQUEST: HEAD /test_bucket/table/data_00.csv HTTP/1.1 HEADERS: Host: localhost:11258 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9E2398D8-9524-4089-AFA5-82AAD924C824 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250407/ru-central1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=f96d89ca15c16746fb1ec4cc5af825e2905ad0abf47144fe38efc898d7a039a4 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250407T135520Z S3_MOCK::HttpServeRead: /test_bucket/table/data_00.csv / 28 REQUEST: GET /test_bucket/table/data_00.csv HTTP/1.1 HEADERS: Host: localhost:11258 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 84C40C63-DE12-4581-A4DB-74EACEB3DFE3 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250407/ru-central1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=88d58e553975f1410e16f68f3095e519c751a50c4ff5ece1d295812e1870d806 content-type: application/xml range: bytes=0-27 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250407T135520Z S3_MOCK::HttpServeRead: /test_bucket/table/data_00.csv / 28 2025-04-07T13:55:20.920911Z node 7 :FLAT_TX_SCHEMESHARD INFO: TRestore TProposedWaitParts, opId: 281474976710761:0 HandleReply TEvSchemaChanged at tablet# 72057594046644480 message# Source { RawX1: 7490569510812372456 RawX2: 4503629692143922 } Origin: 72075186224037891 State: 2 TxId: 281474976710761 Step: 0 Generation: 1 OpResult { Success: true Explain: "" BytesProcessed: 56 RowsProcessed: 7 } 2025-04-07T13:55:20.920965Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710761:0, at schemeshard: 72057594046644480 2025-04-07T13:55:20.920989Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710761:0 129 -> 240 2025-04-07T13:55:20.921140Z node 7 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TRestore, opId# 281474976710761:0, reason# domain is not a serverless db, domain# /Root, domainPathId# [OwnerId: 72057594046644480, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046644480, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:55:20.929625Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710761:0 ProgressState 2025-04-07T13:55:20.929754Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-07T13:55:20.929807Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-04-07T13:55:20.938040Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-07T13:55:20.969458Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7490569489337534117:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:20.973869Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:55:21.220514Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7490569515107339935:2360] [0] Resolve database: name# /Root 2025-04-07T13:55:21.221016Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7490569515107339935:2360] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:55:21.221042Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7490569515107339935:2360] [0] Send request: schemeShardId# 72057594046644480 2025-04-07T13:55:21.222001Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7490569515107339935:2360] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715666 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:11258" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1744034120 } EndTime { seconds: 1744034120 } } 2025-04-07T13:55:21.236944Z node 7 :TX_PROXY DEBUG: actor# [7:7490569489337534261:2134] Handle TEvNavigate describe path /Root/table 2025-04-07T13:55:21.237086Z node 7 :TX_PROXY DEBUG: Actor# [7:7490569515107339941:3431] HANDLE EvNavigateScheme /Root/table 2025-04-07T13:55:21.237447Z node 7 :TX_PROXY DEBUG: Actor# [7:7490569515107339941:3431] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:55:21.237555Z node 7 :TX_PROXY DEBUG: Actor# [7:7490569515107339941:3431] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false ReturnSetVal: true } 2025-04-07T13:55:21.238893Z node 7 :TX_PROXY DEBUG: Actor# [7:7490569515107339941:3431] Handle TEvDescribeSchemeResult Forward to# [7:7490569515107339939:2361] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 11 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710760 CreateStep: 1744034120829 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table" Columns { Name: "Key" Type: "Int32" TypeId: 1 Id: 1 DefaultFromSequence: "_serial_column_Key" NotNull: true IsBuildInProgress: false } Columns { Name: "Value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableSchemaVersion: 1 IsBackup: false Sequences { Name: "_serial_column_Key" PathId { OwnerId: 72057594046644480 LocalId: 12 } Version: 1 SequenceShard: 72075186224037888 MinValue: 1 MaxValue: 2147483647 StartValue: 1 Cache: 1 Increment: 1 Cycle: false SetVal { NextValue: 8 NextUsed: false } DataType: "Int64" } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 11 PathOwnerId: 72057594046644480 2025-04-07T13:55:21.406347Z node 7 :TX_PROXY DEBUG: actor# [7:7490569489337534261:2134] Handle TEvExecuteKqpTransaction 2025-04-07T13:55:21.406379Z node 7 :TX_PROXY DEBUG: actor# [7:7490569489337534261:2134] TxId# 281474976715667 ProcessProposeKqpTransaction 2025-04-07T13:55:21.407234Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr88hhh92f3pmqk18d7sy4b9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZTk5ZjY1NmItYzA2YTVmNzMtZTliZDU5YzgtN2E0NjE1Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: 2025-04-07T13:54:54.389141Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569397963473547:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:54.389225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0021e5/r3tmp/tmpDWrgZy/pdisk_1.dat 2025-04-07T13:54:55.044740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:55.044814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:55.053318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:54:55.121614Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15541, node 1 2025-04-07T13:54:55.412265Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:55.412289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:55.412294Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:55.412368Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:54:56.009404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:56.014815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:54:56.016089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:56.025642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:54:56.025855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:54:56.025869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:54:56.028035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:54:56.028056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:54:56.029008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:54:56.030171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:56.034420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034096077, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:54:56.034472Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:54:56.034703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:54:56.036372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:56.036533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:56.036573Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:54:56.036657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:54:56.036707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:54:56.036751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:54:56.039680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:54:56.039721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:54:56.039739Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:54:56.039830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:54:59.058103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569419438311062:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:59.058236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:54:59.371186Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569397963473547:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:54:59.371279Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:54:59.390006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:54:59.390580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:54:59.391152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:54:59.391183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:54:59.396401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table 2025-04-07T13:54:59.396650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:59.396876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:59.396965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:54:59.399076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:54:59.399135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:54:59.399159Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:54:59.399453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:54:59.399476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:54:59.399487Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:54:59.400133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:54:59.405550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:54:59.405649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:54:59.411771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:54:59.473641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:54:59.473671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:54:59.473758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T13:54:59.475812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:54:59.484095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034099521, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:54:59.484137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744034099521 2025-04-07T13:54:59.484245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T13:54:59.493697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:54:59.494067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:54:59.494131Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:54:59.499508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, m ... : TAlterTable TPropose operationId#281474976710759:2 HandleReply TEvOperationPlan, operationId: 281474976710759:2, stepId: 1744034128466, at schemeshard: 72057594046644480 2025-04-07T13:55:28.422593Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:2 128 -> 129 2025-04-07T13:55:28.425678Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:28.426317Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:28.426388Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:1 ProgressState 2025-04-07T13:55:28.426481Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:1 progress is 1/3 2025-04-07T13:55:28.426656Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 ProgressState at tablet: 72057594046644480 2025-04-07T13:55:28.426788Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:55:28.426976Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037892 Status: COMPLETE TxId: 281474976710759 Step: 1744034128466 OrderId: 281474976710759 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037892 CpuTimeUsec: 1062 } } 2025-04-07T13:55:28.429101Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 9 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710759 2025-04-07T13:55:28.429158Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2025-04-07T13:55:28.429180Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 9], version: 6 2025-04-07T13:55:28.429439Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710759 2025-04-07T13:55:28.429469Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2025-04-07T13:55:28.429481Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-04-07T13:55:28.429588Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710759 2025-04-07T13:55:28.429613Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2025-04-07T13:55:28.429622Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 4 2025-04-07T13:55:28.429780Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:55:28.429818Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-07T13:55:28.429851Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-07T13:55:28.431997Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:0 ProgressState 2025-04-07T13:55:28.432105Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 2/3 2025-04-07T13:55:28.436731Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037893 Status: COMPLETE TxId: 281474976710759 Step: 1744034128466 OrderId: 281474976710759 ExecLatency: 8 ProposeLatency: 14 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037893 CpuTimeUsec: 1496 } } 2025-04-07T13:55:28.438795Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:55:28.438845Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:2, at schemeshard: 72057594046644480 2025-04-07T13:55:28.438872Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:2 129 -> 240 2025-04-07T13:55:28.443837Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:2 ProgressState 2025-04-07T13:55:28.443973Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:2 progress is 3/3 2025-04-07T13:55:28.444040Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-07T13:55:28.444188Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:1 2025-04-07T13:55:28.444202Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:2 2025-04-07T13:55:28.449802Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-07T13:55:28.455435Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 281474976715667, at schemeshard: 72057594046644480 2025-04-07T13:55:28.459199Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TDropLock Propose: opId# 281474976710760:0, path# /Root/table 2025-04-07T13:55:28.459403Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710760:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:28.461592Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /Root, subject: , status: StatusAccepted, operation: DROP LOCK, path: /Root/table 2025-04-07T13:55:28.461724Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2025-04-07T13:55:28.462017Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710760:0 ProgressState 2025-04-07T13:55:28.464574Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046644480 2025-04-07T13:55:28.467398Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034128508, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:28.467449Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 1744034128508 2025-04-07T13:55:28.467481Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-07T13:55:28.470623Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710760:0 ProgressState 2025-04-07T13:55:28.470752Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-07T13:55:28.470804Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-07T13:55:28.471989Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 Restore ACL "/home/runner/.ya/build/build_root/u6pr/0021e5/r3tmp/tmpCVf3lt/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/u6pr/0021e5/r3tmp/tmpCVf3lt/table/permissions.pb"2025-04-07T13:55:28.652747Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/table, operationId: 281474976715669:0, at schemeshard: 72057594046644480 2025-04-07T13:55:28.652966Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715669:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:28.652989Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-04-07T13:55:28.653059Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715669:0 progress is 1/1 2025-04-07T13:55:28.653178Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715669:0 2025-04-07T13:55:28.653191Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715669, publications: 4, subscribers: 0 2025-04-07T13:55:28.654969Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715669, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/table, set owner:root@builtin 2025-04-07T13:55:28.655129Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:28.655657Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 Restore completed successfully2025-04-07T13:55:28.656753Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715669 2025-04-07T13:55:28.656801Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715669 2025-04-07T13:55:28.656819Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715669, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 4 2025-04-07T13:55:28.657274Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715669 2025-04-07T13:55:28.657306Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715669 2025-04-07T13:55:28.657320Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715669, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 5 2025-04-07T13:55:28.657438Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715669 2025-04-07T13:55:28.657460Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715669 2025-04-07T13:55:28.657469Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715669, pathId: [OwnerId: 72057594046644480, LocalPathId: 9], version: 7 2025-04-07T13:55:28.657567Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046644480, cookie: 281474976715669 2025-04-07T13:55:28.657590Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715669 2025-04-07T13:55:28.657599Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715669, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 14 2025-04-07T13:55:28.657638Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715669, subscribers: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] Test command err: 2025-04-07T13:54:40.001883Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:40.080970Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:40.102582Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:40.102664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:40.102891Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:40.110924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:40.111142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:40.111357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:40.111454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:40.111551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:40.111645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:40.111746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:40.111827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:40.111906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:40.112015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:40.112101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:40.112196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:40.134534Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:40.136950Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:40.137090Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:40.137130Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:40.137368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:40.137529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:40.137591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:40.137629Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:40.137748Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:40.137818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:40.137861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:40.137899Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:40.138077Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:40.138152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:40.138197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:40.138228Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:40.138330Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:40.138390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:40.138444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:40.138484Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:40.138555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:40.138591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:40.138620Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:40.138660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:40.138694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:40.138724Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:40.138882Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-07T13:54:40.138960Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-07T13:54:40.139072Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-07T13:54:40.139159Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-04-07T13:54:40.139329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:40.139388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:40.139424Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:40.139624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:40.139672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:40.139699Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:40.139841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:40.139888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:40.139919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:40.140103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:40.140151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:40.140190Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:40.140320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... e_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=3928; 2025-04-07T13:55:16.751132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9154; 2025-04-07T13:55:16.752056Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=857; 2025-04-07T13:55:16.753969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=898; 2025-04-07T13:55:16.754051Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1904; 2025-04-07T13:55:16.754198Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=80; 2025-04-07T13:55:16.754308Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:55:16.754350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=109; 2025-04-07T13:55:16.754766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=374; 2025-04-07T13:55:16.755116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=296; 2025-04-07T13:55:16.756973Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1794; 2025-04-07T13:55:16.758035Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=999; 2025-04-07T13:55:16.758102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=24; 2025-04-07T13:55:16.758145Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=19; 2025-04-07T13:55:16.758170Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-04-07T13:55:16.758194Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=4; 2025-04-07T13:55:16.758219Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-04-07T13:55:16.758277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=30; 2025-04-07T13:55:16.758307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-04-07T13:55:16.758363Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=36; 2025-04-07T13:55:16.758423Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-07T13:55:16.758476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2025-04-07T13:55:16.758504Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=23040; 2025-04-07T13:55:16.758619Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7100088;raw_bytes=7088498; inactive portions=33;blobs=66;rows=1126788;bytes=85356024;raw_bytes=106323502; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:55:16.758716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:55:16.758763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:55:16.758793Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:55:16.758844Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:55:16.758876Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:55:16.758927Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:16.759001Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T13:55:16.759053Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:55:16.759086Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:55:16.759122Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:16.759147Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:16.759177Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:55:16.759218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:55:16.759250Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:55:16.760100Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:16.760208Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1980:3967];tablet_id=9437184;parent=[1:1947:3941];fline=manager.h:99;event=ask_data;request=request_id=81;1={portions_count=35};; 2025-04-07T13:55:16.761110Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:55:16.761514Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:55:16.761546Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:55:16.761563Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:55:16.761597Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:55:16.761649Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:16.761710Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T13:55:16.761771Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:55:16.761814Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:55:16.761861Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:16.761899Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:16.761945Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:55:16.762011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:55:16.762062Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:55:16.763860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=35;path_id=1; 2025-04-07T13:55:16.765027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=35;path_id=1; 2025-04-07T13:55:16.767347Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:55:16.767404Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] Test command err: 2025-04-07T13:54:32.277026Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:32.413750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:32.434417Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:32.434499Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:32.434707Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:32.442349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:32.442542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:32.442767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:32.442876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:32.442960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:32.443058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:32.443166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:32.443263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:32.443384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:32.443482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:32.443586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:32.443680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:32.468153Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:32.473840Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:32.474016Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:32.474071Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:32.474303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:32.474452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:32.474515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:32.474563Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:32.474654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:32.474715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:32.474771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:32.474813Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:32.474992Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:32.475066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:32.475108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:32.475142Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:32.475239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:32.475298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:32.475349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:32.475381Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:32.475447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:32.475482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:32.475510Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:32.475561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:32.475596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:32.475627Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:32.475784Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-07T13:54:32.475852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-07T13:54:32.476057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=131; 2025-04-07T13:54:32.476135Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-04-07T13:54:32.476283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:32.476373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:32.476446Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:32.476638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:32.476742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:32.476778Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:32.476954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:32.476994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:32.477022Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:32.477213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:32.477257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:32.477282Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T13:54:32.477380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=nor ... _name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=4370; 2025-04-07T13:55:25.748492Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8151; 2025-04-07T13:55:25.749398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=843; 2025-04-07T13:55:25.750957Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=727; 2025-04-07T13:55:25.751027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1568; 2025-04-07T13:55:25.751143Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=65; 2025-04-07T13:55:25.751233Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:55:25.751271Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=93; 2025-04-07T13:55:25.751604Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=298; 2025-04-07T13:55:25.751901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=253; 2025-04-07T13:55:25.753508Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1554; 2025-04-07T13:55:25.755228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1657; 2025-04-07T13:55:25.755310Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=30; 2025-04-07T13:55:25.755393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=29; 2025-04-07T13:55:25.755432Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-07T13:55:25.755468Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-04-07T13:55:25.755504Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-04-07T13:55:25.755571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=38; 2025-04-07T13:55:25.755610Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-07T13:55:25.755684Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=46; 2025-04-07T13:55:25.755720Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-07T13:55:25.755772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2025-04-07T13:55:25.755802Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=21411; 2025-04-07T13:55:25.755930Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7400888;raw_bytes=7389306; inactive portions=35;blobs=70;rows=1239297;bytes=98153860;raw_bytes=121802583; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:55:25.756034Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:55:25.756093Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:55:25.756133Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:55:25.756189Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:55:25.756220Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:55:25.756269Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:25.756342Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T13:55:25.756399Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T13:55:25.756443Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:55:25.756481Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:25.756509Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:25.756542Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:55:25.756596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:55:25.756631Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:55:25.757226Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:25.757317Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4059:5686];tablet_id=9437184;parent=[1:4004:5638];fline=manager.h:99;event=ask_data;request=request_id=177;1={portions_count=37};; 2025-04-07T13:55:25.757998Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:55:25.758391Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:55:25.758424Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:55:25.758445Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:55:25.758480Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:55:25.758529Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:55:25.758601Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T13:55:25.758651Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T13:55:25.758686Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:55:25.758725Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:25.758754Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:55:25.758788Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:55:25.758836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:55:25.758873Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:55:25.759402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=37;path_id=1; 2025-04-07T13:55:25.760544Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=37;path_id=1; 2025-04-07T13:55:25.763314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:55:25.763360Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |87.5%| [TA] $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions >> TestKinesisHttpProxy::DifferentContentTypes |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |87.5%| [TA] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.5%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> ResultFormatter::Void [GOOD] >> ResultFormatter::VariantTuple [GOOD] |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |87.5%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> ResultFormatter::EmptyResultSet [GOOD] >> ResultFormatter::EmptyList [GOOD] >> ResultFormatter::EmptyTuple [GOOD] |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> YdbTableSplit::SplitByLoadWithReads |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService >> TPersQueueTest::InflightLimit [GOOD] |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::EmptyTuple [GOOD] |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantTuple [GOOD] |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |87.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::ReadNonExistentTopic [GOOD] Test command err: 2025-04-07T13:53:05.361506Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568929541818969:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:05.361552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001328/r3tmp/tmp0J5IGJ/pdisk_1.dat 2025-04-07T13:53:05.875203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:05.882636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:05.882753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:05.885153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23952 TServer::EnableGrpc on GrpcPort 2776, node 1 2025-04-07T13:53:06.189238Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:06.189264Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:06.189270Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:06.189347Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:06.619618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:53:06.632376Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:53:06.942557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-07T13:53:08.861969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568942426721496:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:08.862167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:08.864269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568942426721512:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:08.864323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568942426721513:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:08.881278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-07T13:53:08.915145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568942426721516:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T13:53:08.915219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490568942426721517:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T13:53:11.646262Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568929541818969:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:11.648772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:12.741353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-07T13:53:13.660155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:53:14.413177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:53:15.416644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:53:16.399878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-04-07T13:53:20.845341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:53:20.845389Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:39.404798Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569593694139050:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:39.404895Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001328/r3tmp/tmpmkLzNu/pdisk_1.dat 2025-04-07T13:55:39.536624Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:39.571505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:39.571601Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:39.573175Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16545 TServer::EnableGrpc on GrpcPort 9228, node 2 2025-04-07T13:55:39.851907Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:39.851930Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:39.851935Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:39.852058Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16545 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:40.069209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |87.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join >> LocalTableWriter::WriteTable |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite >> CompressExecutor::TestExecutorMemUsage [GOOD] >> TestYmqHttpProxy::TestCreateQueue [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |87.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |87.6%| [TA] $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Secret::Validation >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted >> IndexBuildTest::BaseCase >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady >> Cdc::InitialScan [GOOD] >> Cdc::InitialScanDebezium ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::InflightLimit [GOOD] Test command err: 2025-04-07T13:50:17.025721Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568210439755655:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:17.047780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:50:17.199077Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568208169675922:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:17.591131Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001498/r3tmp/tmptMLzUb/pdisk_1.dat 2025-04-07T13:50:17.581180Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:50:17.657719Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:50:18.105525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:18.228336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:18.228439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:18.233459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:18.233540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:18.244475Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:50:18.244644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:18.246835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:18.273166Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24949, node 1 2025-04-07T13:50:18.376450Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:50:18.376477Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:50:18.402946Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001498/r3tmp/yandexF9i3KI.tmp 2025-04-07T13:50:18.402969Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001498/r3tmp/yandexF9i3KI.tmp 2025-04-07T13:50:18.403117Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001498/r3tmp/yandexF9i3KI.tmp 2025-04-07T13:50:18.403238Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:18.462601Z INFO: TTestServer started on Port 3936 GrpcPort 24949 TClient is connected to server localhost:3936 PQClient connected to localhost:24949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:18.753789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:50:18.864335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:50:21.890370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568225349545265:2283], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:21.890494Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:21.890884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490568225349545292:2286], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:21.900418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-04-07T13:50:21.944516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490568225349545294:2287], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-04-07T13:50:22.011716Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568210439755655:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:22.012019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:22.156790Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490568208169675922:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:22.156863Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:50:22.362891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:50:22.378858Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490568229644512625:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:50:22.380629Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2FmODJmOGItMWJkOWQyOWItNTYyMGQ4Zi1iNzcyNmRkMA==, ActorId: [2:7490568225349545263:2282], ActorState: ExecuteState, TraceId: 01jr888d5t3hbrpfxj9ayts2bz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:50:22.382928Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:50:22.385989Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490568231914593339:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:50:22.387391Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjdhMjk3NmUtNzlhZmUxNTgtOWMxM2JkNzgtYmMyM2M1NjY=, ActorId: [1:7490568227619625990:2307], ActorState: ExecuteState, TraceId: 01jr888d8vd1ehwjjjemwc4gx8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:50:22.387695Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:50:22.587848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:50:22.863401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:50:23.430923Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr888ec2ea2qt5w70a43a4ky, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODFjZWU0YjgtNWQxODE0ZWUtNzA1MDlhYy01YmJkNGRiNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490568236209561035:3087] === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 10 partitions CallPersQueueGRPC request to localhost:24949 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:24949 2025-04-07T13:50:30.338125Z node 1 :PQ_METACACHE DEBUG: HandleDescribeAllTopics 2025-04-07T13:50:30.338148Z node 1 :PQ_METACACHE DEBUG: ProcessDescribeAllTopics 2025-04-07T13:50:30.338157Z node 1 :PQ_METACACHE DEBUG: Describe all topics - send empty response 2025-04-07T13:50:30.338171Z node 1 :PQ_METACACHE DEBUG: Send descri ... data# { start_partition_session_response { partition_session_id: 1 read_offset: 0 } } 2025-04-07T13:55:33.846869Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_11613829216190585691_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-04-07T13:55:33.846915Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_11613829216190585691_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-04-07T13:55:33.846945Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_11613829216190585691_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2025-04-07T13:55:33.846992Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_11613829216190585691_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1744034130494, sizeLag# 82536 2025-04-07T13:55:33.847007Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_11613829216190585691_v1TEvPartitionReady. Aval parts: 1 2025-04-07T13:55:33.847034Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_11613829216190585691_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1048576 } } 2025-04-07T13:55:33.847116Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_11613829216190585691_v1 got read request: guid# 69c3c431-5734e578-92467def-a5953a16 2025-04-07T13:55:33.847147Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_11613829216190585691_v1 performing read request: guid# 91b856a1-895cf5ef-3e5ad286-ae844c7b, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 4, size# 99043, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-07T13:55:33.847198Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_11613829216190585691_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 4 maxSize 99043 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid 91b856a1-895cf5ef-3e5ad286-ae844c7b 2025-04-07T13:55:33.847586Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-07T13:55:33.847591Z node 28 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T13:55:33.847628Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-07T13:55:34.081213Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:55:37.087096Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2025-04-07T13:55:37.087166Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T13:55:37.087339Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-07T13:55:37.087379Z node 28 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:55:37.087604Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:55:37.088826Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_1400882780625692009_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1744034130494 CreateTimestampMS: 1744034130491 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1744034130595 CreateTimestampMS: 1744034130584 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1744034130635 CreateTimestampMS: 1744034130628 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1744034130663 CreateTimestampMS: 1744034130656 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 3243 } Cookie: 0 } 2025-04-07T13:55:37.089282Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_1400882780625692009_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-04-07T13:55:37.089345Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_1400882780625692009_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 82a663ec-36742307-e77f7f69-25055a93 has messages 1 2025-04-07T13:55:37.089492Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_1400882780625692009_v1 read done: guid# 82a663ec-36742307-e77f7f69-25055a93, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82616 2025-04-07T13:55:37.089541Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_1400882780625692009_v1 response to read: guid# 82a663ec-36742307-e77f7f69-25055a93 2025-04-07T13:55:37.089896Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_1400882780625692009_v1 Process answer. Aval parts: 0 Bytes readed: 82616 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2025-04-07T13:55:37.092480Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_1400882780625692009_v1 grpc read done: success# 0, data# { } 2025-04-07T13:55:37.092507Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_1400882780625692009_v1 grpc read failed 2025-04-07T13:55:37.092541Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_1400882780625692009_v1 closed 2025-04-07T13:55:37.093473Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:55:37.093523Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _27_2_1400882780625692009_v1 2025-04-07T13:55:37.093583Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [27:7490569566157599313:2536] destroyed 2025-04-07T13:55:37.093625Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _27_2_1400882780625692009_v1 2025-04-07T13:55:37.093222Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_1400882780625692009_v1 is DEAD 2025-04-07T13:55:38.365018Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:55:39.081363Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:55:41.108447Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2025-04-07T13:55:41.108514Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T13:55:41.108820Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2025-04-07T13:55:41.108865Z node 28 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:55:41.109064Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:55:41.110467Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_11613829216190585691_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1744034130494 CreateTimestampMS: 1744034130491 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1744034130595 CreateTimestampMS: 1744034130584 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1744034130635 CreateTimestampMS: 1744034130628 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1744034130663 CreateTimestampMS: 1744034130656 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 7260 } Cookie: 0 } 2025-04-07T13:55:41.111093Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_11613829216190585691_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-04-07T13:55:41.111162Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_11613829216190585691_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 91b856a1-895cf5ef-3e5ad286-ae844c7b has messages 1 2025-04-07T13:55:41.111415Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_11613829216190585691_v1 read done: guid# 91b856a1-895cf5ef-3e5ad286-ae844c7b, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82616 2025-04-07T13:55:41.111459Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_11613829216190585691_v1 response to read: guid# 91b856a1-895cf5ef-3e5ad286-ae844c7b 2025-04-07T13:55:41.111830Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_11613829216190585691_v1 Process answer. Aval parts: 0 Bytes readed: 82616 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2025-04-07T13:55:41.114271Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_11613829216190585691_v1 grpc read done: success# 0, data# { } 2025-04-07T13:55:41.114301Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_11613829216190585691_v1 grpc read failed 2025-04-07T13:55:41.114333Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_11613829216190585691_v1 grpc closed 2025-04-07T13:55:41.114382Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_11613829216190585691_v1 is DEAD 2025-04-07T13:55:41.116067Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:55:41.116136Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _27_3_11613829216190585691_v1 2025-04-07T13:55:41.116189Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [27:7490569566157599315:2537] destroyed 2025-04-07T13:55:41.116284Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _27_3_11613829216190585691_v1 >> TPersQueueTest::TestWriteSessionsConflicts [GOOD] >> TPersQueueTest::TestReadRuleServiceTypePassword >> IndexBuildTest::WithFollowers >> KqpLimits::QSReplySize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2025-04-07T13:53:19.952752Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1744033999952721 2025-04-07T13:53:20.801899Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568996254151184:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:20.801939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:53:21.537282Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:53:21.561552Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0011e3/r3tmp/tmp0ZnKw0/pdisk_1.dat 2025-04-07T13:53:21.677589Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:21.864353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:22.413293Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:22.422597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:22.422678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:22.437170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:22.437248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:22.458793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:22.459674Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:53:22.466412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25825, node 1 2025-04-07T13:53:22.841203Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0011e3/r3tmp/yandexwBk8lF.tmp 2025-04-07T13:53:22.841225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0011e3/r3tmp/yandexwBk8lF.tmp 2025-04-07T13:53:22.841359Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0011e3/r3tmp/yandexwBk8lF.tmp 2025-04-07T13:53:22.841439Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:53:23.045706Z INFO: TTestServer started on Port 24536 GrpcPort 25825 TClient is connected to server localhost:24536 PQClient connected to localhost:25825 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:23.775141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T13:53:25.804866Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568996254151184:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:25.804950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:28.421277Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569030616707330:2285], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:28.421356Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569030616707355:2288], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:28.421510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:28.437593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:53:28.530930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490569030616707359:2289], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:53:29.284360Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490569030616707394:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:29.292290Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569030613890689:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:29.294087Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YThlZWNkOWQtOTYxOTEyZjUtMzA2NzE0OGUtNGYzZjcyZmU=, ActorId: [1:7490569030613890655:2310], ActorState: ExecuteState, TraceId: 01jr88e3t1be384rcjhg1evc60, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:53:29.297848Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDA2YzVjMDItMjRjY2ExOGMtODBlN2U0YWMtY2E1YTUwMjE=, ActorId: [2:7490569030616707328:2284], ActorState: ExecuteState, TraceId: 01jr88e3a304napb2wk170b5de, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:53:29.299667Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:53:29.303104Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:53:29.360107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:53:29.740194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:30.036291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:53:30.366402Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569039203825612:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:30.371909Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWFhYWE4ODQtNTZmOTA0MDEtOWEzNGMzNWQtZWFkM2EzOWY=, ActorId: [1:7490569039203825610:2350], ActorState: ExecuteState, TraceId: 01jr88e55ye62em85hhkyrkyfr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:53:30.372259Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:53:30.377295Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490569039206642122:2308], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:30.383381Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzRjMTU3OTgtNzk5OTYwYjktMWEzY2Y5NzMtYzEwZTcxODk=, ActorId: [2:7490569039206642113:2304], ActorState: ExecuteState, TraceId: 01jr88e56fdeb0z44zrkkcfapz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:53:30.383769 ... st { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-04-07T13:55:43.321645Z node 13 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:49504 2025-04-07T13:55:43.321669Z node 13 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:49504 proto=v1 topic=test-topic durationSec=0 2025-04-07T13:55:43.321687Z node 13 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T13:55:43.328879Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-04-07T13:55:43.329038Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-07T13:55:43.329053Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T13:55:43.329065Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-07T13:55:43.329084Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7490569610749455018:2550] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-04-07T13:55:43.333061Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7490569610749455018:2550] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-04-07T13:55:43.546134Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715702. Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-07T13:55:43.546267Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7490569610749455032:2552] TxId: 281474976715702. Ctx: { TraceId: 01jr88j735bhdf2e61sxnwkkwx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NDc3ZTg0ZmQtNzVjZjM0YmYtMThiYjY2ZjMtZWI3YjIzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-07T13:55:43.546588Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NDc3ZTg0ZmQtNzVjZjM0YmYtMThiYjY2ZjMtZWI3YjIzMTc=, ActorId: [13:7490569610749455019:2552], ActorState: ExecuteState, TraceId: 01jr88j735bhdf2e61sxnwkkwx, Create QueryResponse for error on request, msg: 2025-04-07T13:55:43.547785Z node 13 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [13:7490569610749455018:2550] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=NDc3ZTg0ZmQtNzVjZjM0YmYtMThiYjY2ZjMtZWI3YjIzMTc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr88j7386hra9s4hb24p30dg" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2025-04-07T13:55:43.547881Z node 13 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=NDc3ZTg0ZmQtNzVjZjM0YmYtMThiYjY2ZjMtZWI3YjIzMTc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr88j7386hra9s4hb24p30dg" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2025-04-07T13:55:43.548200Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2025-04-07T13:55:43.583291Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c44f04d6-13511e98-ba22f5a2-713deb53_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=NDc3ZTg0ZmQtNzVjZjM0YmYtMThiYjY2ZjMtZWI3YjIzMTc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr88j7386hra9s4hb24p30dg" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2025-04-07T13:55:43.583335Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c44f04d6-13511e98-ba22f5a2-713deb53_0] Write session will restart in 2.000000s 2025-04-07T13:55:43.583465Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c44f04d6-13511e98-ba22f5a2-713deb53_0] Write session: Do CDS request 2025-04-07T13:55:43.583501Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c44f04d6-13511e98-ba22f5a2-713deb53_0] Do schedule cds request after 2000 ms 2025-04-07T13:55:43.911177Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720683. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T13:55:43.911333Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7490569608897434161:2478] TxId: 281474976720683. Ctx: { TraceId: 01jr88j7eqdx15m7bfajf7tv3q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=M2Q5Zjk4MGQtMWM2OWVmOTMtMmQzNDVlM2ItNGMzMjUzOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T13:55:43.911759Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=M2Q5Zjk4MGQtMWM2OWVmOTMtMmQzNDVlM2ItNGMzMjUzOGY=, ActorId: [14:7490569608897434158:2478], ActorState: ExecuteState, TraceId: 01jr88j7eqdx15m7bfajf7tv3q, Create QueryResponse for error on request, msg: 2025-04-07T13:55:43.913542Z node 14 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr88j7es5tq8dy57w1051m5f" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-07T13:55:44.022306Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715704. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T13:55:44.022479Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7490569610749455103:2562] TxId: 281474976715704. Ctx: { TraceId: 01jr88j7j47tyrk1wq12w3wyqw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2NiMGVhNTAtZWUyNzJmYjgtMWNkZjQ3MGYtZDBmNWU4N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T13:55:44.022903Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=N2NiMGVhNTAtZWUyNzJmYjgtMWNkZjQ3MGYtZDBmNWU4N2I=, ActorId: [13:7490569610749455100:2562], ActorState: ExecuteState, TraceId: 01jr88j7j47tyrk1wq12w3wyqw, Create QueryResponse for error on request, msg: 2025-04-07T13:55:44.025336Z node 13 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr88j7j47tyrk1wq16vp9qhx" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-07T13:55:44.288763Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c44f04d6-13511e98-ba22f5a2-713deb53_0] Write session: close. Timeout = 0 ms 2025-04-07T13:55:44.288838Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c44f04d6-13511e98-ba22f5a2-713deb53_0] Write session will now close 2025-04-07T13:55:44.288910Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c44f04d6-13511e98-ba22f5a2-713deb53_0] Write session: aborting 2025-04-07T13:55:44.289750Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c44f04d6-13511e98-ba22f5a2-713deb53_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-04-07T13:55:44.289799Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c44f04d6-13511e98-ba22f5a2-713deb53_0] Write session: destroy 2025-04-07T13:55:44.439409Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720685. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T13:55:44.439542Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7490569613192401514:2479] TxId: 281474976720685. Ctx: { TraceId: 01jr88j7g19ac75rk3r0s30bb1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ZGUzMzVmYjUtMjY0NDI1MDUtMTc0MWMzZGItNjIzYTczMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T13:55:44.439900Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZGUzMzVmYjUtMjY0NDI1MDUtMTc0MWMzZGItNjIzYTczMjI=, ActorId: [14:7490569608897434170:2479], ActorState: ExecuteState, TraceId: 01jr88j7g19ac75rk3r0s30bb1, Create QueryResponse for error on request, msg: 2025-04-07T13:55:44.441356Z node 14 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr88j7zx1w1t2th7vmhada2m" } } YdbStatus: UNAVAILABLE ConsumedRu: 327 } 2025-04-07T13:55:44.450743Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715706. Failed to resolve tablet: 72075186224037888 after several retries. 2025-04-07T13:55:44.450926Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7490569615044422450:2556] TxId: 281474976715706. Ctx: { TraceId: 01jr88j7g19c0y1ambps7zd1m8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2Q1MDNhYjgtMjJlYWQyOGItY2M5NzJkY2YtN2RiYjM4ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2025-04-07T13:55:44.451405Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=N2Q1MDNhYjgtMjJlYWQyOGItY2M5NzJkY2YtN2RiYjM4ODg=, ActorId: [13:7490569610749455087:2556], ActorState: ExecuteState, TraceId: 01jr88j7g19c0y1ambps7zd1m8, Create QueryResponse for error on request, msg: 2025-04-07T13:55:44.452648Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jr88j80c8yrwmybj6jbbeejw" } } YdbStatus: UNAVAILABLE ConsumedRu: 343 } >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] >> VectorIndexBuildTest::BaseCase >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] >> IndexBuildTest::CancellationNotEnoughRetries >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults >> Cdc::AddColumn [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> IndexBuildTest::CheckLimitWithDroppedIndex >> IndexBuildTest::Lock >> IndexBuildTest::ShadowDataNotAllowedByDefault >> IndexBuildTest::RejectsCreate >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn >> TestKinesisHttpProxy::TestWrongRequest [GOOD] >> TestKinesisHttpProxy::ListShardsTimestamp >> LocalTableWriter::ConsistentWrite [GOOD] >> IndexBuildTest::WithFollowers [GOOD] >> TestYmqHttpProxy::TestDeleteMessage [GOOD] >> LocalTableWriter::WriteTable [GOOD] >> KqpJoinOrder::TPCDS87-StreamLookupJoin-ColumnStore >> KqpJoin::IdxLookupLeftPredicate >> IndexBuildTest::ShadowDataNotAllowedByDefault [GOOD] >> IndexBuildTest::ShadowDataEdgeCases >> TestYmqHttpProxy::TestChangeMessageVisibility ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::WithFollowers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:55:48.212412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:55:48.212474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:48.212506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:55:48.212529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:55:48.212567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:55:48.212586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:55:48.212619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:48.212890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:55:48.273240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:55:48.273291Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:48.282342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:55:48.282496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:55:48.282651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:55:48.287565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:55:48.287726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:55:48.288242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:48.288497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:55:48.290296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:48.291502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:48.291552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:48.291667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:55:48.291711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:48.291743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:55:48.291890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:55:48.296874Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:55:48.411103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:55:48.411362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:48.411591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:55:48.411809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:55:48.411863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:48.421616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:48.421742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:55:48.421914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:48.421980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:55:48.422026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:55:48.422072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:55:48.430276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:48.430336Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:48.430405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:55:48.432041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:48.432079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:48.432111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:48.432155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:55:48.435414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:55:48.442432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:55:48.442646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:55:48.443613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:48.443748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:55:48.443796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:48.444042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:55:48.444090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:48.444242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:48.444307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:55:48.449863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:48.449924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:48.450092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:48.450124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:55:48.450350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:48.450391Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:55:48.450482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:55:48.450528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:48.450570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:55:48.450605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:48.450641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:55:48.450665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:55:48.450744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:55:48.450791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:55:48.450821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:55:48.452581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:48.455039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:48.455099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:55:48.455140Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:55:48.455187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:48.455332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 4, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T13:55:49.158311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.158346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-07T13:55:49.158412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-07T13:55:49.158444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-07T13:55:49.158467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 104, path id: 4 2025-04-07T13:55:49.158776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-07T13:55:49.158834Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:2 ProgressState at tablet: 72057594046678944 2025-04-07T13:55:49.158937Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-07T13:55:49.158988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:2, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T13:55:49.159033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:2 129 -> 240 2025-04-07T13:55:49.159615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.159656Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:1 ProgressState 2025-04-07T13:55:49.159773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:1 progress is 2/3 2025-04-07T13:55:49.159812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-04-07T13:55:49.159865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: false 2025-04-07T13:55:49.160168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T13:55:49.160252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T13:55:49.160285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-07T13:55:49.160317Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-07T13:55:49.160351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T13:55:49.161400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T13:55:49.161557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T13:55:49.161601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-07T13:55:49.161644Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-07T13:55:49.161681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T13:55:49.162814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T13:55:49.162893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T13:55:49.162918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-07T13:55:49.163504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T13:55:49.163608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T13:55:49.163637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-07T13:55:49.163706Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-07T13:55:49.163743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T13:55:49.163815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-04-07T13:55:49.164702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-07T13:55:49.164761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:2 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:49.165108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T13:55:49.165239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2025-04-07T13:55:49.165270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-07T13:55:49.165306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-04-07T13:55:49.165385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 104 2025-04-07T13:55:49.165443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-07T13:55:49.165487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T13:55:49.165658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T13:55:49.165768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:55:49.165812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-04-07T13:55:49.165838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-04-07T13:55:49.165865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:55:49.165887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-04-07T13:55:49.165918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-04-07T13:55:49.165953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T13:55:49.167794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T13:55:49.170440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T13:55:49.170511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T13:55:49.170617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T13:55:49.172077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T13:55:49.172132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:685:2646] TestWaitNotification: OK eventTxId 104 2025-04-07T13:55:49.172787Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/WithFollowers" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:55:49.173120Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/WithFollowers" took 306us result status StatusSuccess 2025-04-07T13:55:49.173604Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/WithFollowers" PathDescription { Self { Name: "WithFollowers" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } } Table { Name: "WithFollowers" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "valueFloat" Type: "Float" TypeId: 33 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Cdc::RacyCreateAndSend [GOOD] >> Cdc::RacySplitAndDropTable >> TestYmqHttpProxy::TestDeleteQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2025-04-07T13:55:46.123484Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569623433212113:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:46.123732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016e9/r3tmp/tmpz2UGR1/pdisk_1.dat 2025-04-07T13:55:46.507011Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:46.541573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:46.541685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:46.546181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29719 TServer::EnableGrpc on GrpcPort 19532, node 1 2025-04-07T13:55:46.733374Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:46.733396Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:46.733402Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:46.733513Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:47.026891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:47.044302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034147149 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) 2025-04-07T13:55:47.173979Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handshake: worker# [1:7490569627728179835:2283] 2025-04-07T13:55:47.174238Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:55:47.174489Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-07T13:55:47.174527Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Send handshake: worker# [1:7490569627728179835:2283] 2025-04-07T13:55:47.174836Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 48b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-07T13:55:47.179464Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-04-07T13:55:47.179642Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2025-04-07T13:55:47.179831Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7490569627728179938:2345] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-07T13:55:47.179903Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-07T13:55:47.180037Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7490569627728179938:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-07T13:55:47.182258Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7490569627728179938:2345] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-07T13:55:47.182308Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-07T13:55:47.182355Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2025-04-07T13:55:47.182676Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 4 Data: 19b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-07T13:55:47.182938Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 5 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 6 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-07T13:55:47.183186Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 7 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 8 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-07T13:55:47.183471Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2025-04-07T13:55:47.183549Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2025-04-07T13:55:47.183727Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7490569627728179938:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-04-07T13:55:47.185731Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7490569627728179938:2345] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-07T13:55:47.185770Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-07T13:55:47.185821Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2025-04-07T13:55:47.186105Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 9 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 10 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-07T13:55:47.186234Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2025-04-07T13:55:47.186334Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7490569627728179938:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-04-07T13:55:47.187189Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7490569627728179938:2345] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-07T13:55:47.187225Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-07T13:55:47.187252Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2025-04-07T13:55:47.187470Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569627728179935:2345] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 11 Data: 19b CreateTime: 1970-01-01T00:00:00Z }] } >> IndexBuildTest::Lock [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2025-04-07T13:55:46.131474Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569624162776596:2242];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:46.131542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016e0/r3tmp/tmp6fXebt/pdisk_1.dat 2025-04-07T13:55:46.484871Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:46.505685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:46.505767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:46.509835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12917 TServer::EnableGrpc on GrpcPort 20250, node 1 2025-04-07T13:55:46.857723Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:46.857749Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:46.857758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:46.857836Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12917 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:47.195248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:47.209777Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:55:47.213324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034147331 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) 2025-04-07T13:55:47.353325Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569628457744367:2345] Handshake: worker# [1:7490569628457744276:2285] 2025-04-07T13:55:47.353567Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569628457744367:2345] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:55:47.353784Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569628457744367:2345] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-07T13:55:47.353818Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569628457744367:2345] Send handshake: worker# [1:7490569628457744276:2285] 2025-04-07T13:55:47.354364Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569628457744367:2345] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 36b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 36b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 36b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-07T13:55:47.354569Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569628457744367:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 36 },{ Order: 3 BodySize: 36 }] } 2025-04-07T13:55:47.354741Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7490569628457744370:2345] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-07T13:55:47.354774Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569628457744367:2345] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-07T13:55:47.354881Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7490569628457744370:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-07T13:55:47.357381Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7490569628457744370:2345] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-07T13:55:47.357431Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569628457744367:2345] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-07T13:55:47.357482Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490569628457744367:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } >> KqpJoinOrder::TPCDS34-StreamLookupJoin-ColumnStore >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndex [GOOD] >> IndexBuildTest::DropIndex >> TestKinesisHttpProxy::DoubleCreateStream >> IndexBuildTest::ShadowDataEdgeCases [GOOD] >> IndexBuildTest::RejectsCreate [GOOD] >> IndexBuildTest::RejectsDropIndex >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] >> KqpJoin::JoinDupColumnRightPure >> Cdc::ResolvedTimestampsMultiplePartitions [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::ShadowDataEdgeCases [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:55:49.255985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:55:49.256085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:49.256131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:55:49.256160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:55:49.256199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:55:49.256225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:55:49.256279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:49.256600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:55:49.332591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:55:49.332648Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:49.343329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:55:49.343480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:55:49.343634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:55:49.351342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:55:49.351530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:55:49.352090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.352283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:55:49.354144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.355405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:49.355475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.355600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:55:49.355644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:49.355681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:55:49.355826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.361622Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:55:49.478049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:55:49.478256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.478454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:55:49.478658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:55:49.478703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.481367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.481490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:55:49.481640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.481687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:55:49.481760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:55:49.481805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:55:49.485487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.485551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:49.485635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:55:49.487487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.487532Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.487563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.487606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.495831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:55:49.497656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:55:49.497821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:55:49.498718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.498843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:55:49.498895Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.499113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:55:49.499155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.499311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:49.499388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:55:49.501209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:49.501251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:49.501414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.501446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:55:49.501668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.501709Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:55:49.501843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:55:49.501880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.501924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:55:49.501957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.502001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:55:49.502028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:55:49.502090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:55:49.502130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:55:49.502158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:55:49.503961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:49.504070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:49.504106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:55:49.504141Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:55:49.504190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:49.504290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 73709551615 PrepareArriveTime: 156500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 170 } } 2025-04-07T13:55:51.536048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-04-07T13:55:51.536221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 156500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 170 } } 2025-04-07T13:55:51.536480Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#109:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 156500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 170 } } 2025-04-07T13:55:51.536532Z node 2 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-07T13:55:51.536638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 109:0, left await: 0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.536691Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 3 -> 128 2025-04-07T13:55:51.538858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.539020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.539087Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#109:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:51.539171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 109 ready parts: 1/1 2025-04-07T13:55:51.539344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:55:51.541233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 109:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:109 msg type: 269090816 2025-04-07T13:55:51.541364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 109, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 109 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 109 at step: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 109 at step: 5000008 2025-04-07T13:55:51.542005Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:51.542130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 109 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:55:51.542196Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#109:0 HandleReply TEvOperationPlan, operationId: 109:0, stepId: 5000008, at schemeshard: 72057594046678944 2025-04-07T13:55:51.542410Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 128 -> 129 2025-04-07T13:55:51.542563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T13:55:51.547448Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:51.547508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T13:55:51.547718Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:51.547761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 109, path id: 4 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000008 2025-04-07T13:55:51.548904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.548967Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 ProgressState at tablet: 72057594046678944 2025-04-07T13:55:51.549613Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2025-04-07T13:55:51.549712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2025-04-07T13:55:51.549753Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 109 2025-04-07T13:55:51.549806Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-04-07T13:55:51.549850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T13:55:51.549936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 109 2025-04-07T13:55:51.556121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 2025-04-07T13:55:51.557848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1132 } } 2025-04-07T13:55:51.557906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-04-07T13:55:51.558066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1132 } } 2025-04-07T13:55:51.558162Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1132 } } 2025-04-07T13:55:51.558874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 667 RawX2: 8589937217 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-04-07T13:55:51.558931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-04-07T13:55:51.559072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: Source { RawX1: 667 RawX2: 8589937217 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-04-07T13:55:51.559123Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T13:55:51.559231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 667 RawX2: 8589937217 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-04-07T13:55:51.559303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 109:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:51.559343Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.559384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 109:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-07T13:55:51.559427Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 129 -> 240 2025-04-07T13:55:51.562408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.562696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.562992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.563058Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 109:0 ProgressState 2025-04-07T13:55:51.563201Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-04-07T13:55:51.563246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-07T13:55:51.563315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 1/1, is published: true 2025-04-07T13:55:51.563402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:340:2315] message: TxId: 109 2025-04-07T13:55:51.563466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-07T13:55:51.563513Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 109:0 2025-04-07T13:55:51.563552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 109:0 2025-04-07T13:55:51.563689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T13:55:51.568740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-04-07T13:55:51.568853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:769:2718] TestWaitNotification: OK eventTxId 109 >> IndexBuildTest::DropIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] Test command err: 2025-04-07T13:55:49.065409Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:55:49.065629Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:55:49.149857Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:55:49.162154Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:55:49.185269Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T13:55:49.185416Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:55:47.500567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:55:47.500701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:47.500752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:55:47.500806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:55:47.500861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:55:47.500897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:55:47.500962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:47.501369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:55:47.581553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:55:47.581648Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:47.604451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:55:47.604607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:55:47.604845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:55:47.611664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:55:47.611881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:55:47.612522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:47.612784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:55:47.615238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:47.616567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:47.616624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:47.616778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:55:47.616843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:47.616882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:55:47.617043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.626218Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:55:47.789493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:55:47.789781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.790030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:55:47.790275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:55:47.790355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.794048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:47.794230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:55:47.794449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.794512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:55:47.794581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:55:47.794631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:55:47.798224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.798297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:47.798391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:55:47.800545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.800605Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.800649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:47.800721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:55:47.804535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:55:47.808162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:55:47.808364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:55:47.809450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:47.809620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:55:47.809675Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:47.809938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:55:47.809986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:47.810188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:47.810279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:55:47.812586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:47.812647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:47.812898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:47.812941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:55:47.813220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.813268Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:55:47.813370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:55:47.813419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:47.813486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:55:47.813526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:47.813570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:55:47.813604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:55:47.813682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:55:47.813736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:55:47.813770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:55:47.815756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:47.816702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:47.816768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:55:47.816833Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:55:47.816887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:47.817042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... s, read records: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:51.670811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:55:51.670959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:51.671309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.671375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.671716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.671845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.671952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.672142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.672219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.672530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.672915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:51.673161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.673214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.673278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T13:55:51.699088Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T13:55:51.699238Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: by_embedding, IndexColumn: embedding, DataColumns: covered, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [0:0:0], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976720767, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976720768, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T13:55:51.699280Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 0 2025-04-07T13:55:51.710174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:51.710265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:51.711727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:55:51.711796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:51.711847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:55:51.712541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:2575:4319] sender: [1:2638:2058] recipient: [1:15:2062] 2025-04-07T13:55:51.765834Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/by_embedding" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:55:51.766123Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/by_embedding" took 339us result status StatusSuccess 2025-04-07T13:55:51.767242Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/by_embedding" PathDescription { Self { Name: "by_embedding" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 TableIndexVersion: 2 } } Children { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "by_embedding" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataColumnNames: "covered" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:55:49.310664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:55:49.310761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:49.310815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:55:49.310868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:55:49.310918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:55:49.310941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:55:49.310991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:49.311295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:55:49.382139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:55:49.382202Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:49.396824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:55:49.397007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:55:49.397188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:55:49.403207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:55:49.403383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:55:49.404005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.404188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:55:49.406017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.407312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:49.407368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.407506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:55:49.407546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:49.407587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:55:49.407772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.414318Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:55:49.551993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:55:49.552231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.552445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:55:49.552694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:55:49.552754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.555286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.555410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:55:49.555545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.555596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:55:49.555653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:55:49.555703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:55:49.561423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.561491Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:49.561588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:55:49.564822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.564870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.564909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.564967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.567856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:55:49.569289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:55:49.569435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:55:49.570189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.570283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:55:49.570356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.570541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:55:49.570574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.570709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:49.570766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:55:49.573581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:49.573614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:49.573748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.573774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:55:49.573936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.573971Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:55:49.574042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:55:49.574071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.574101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:55:49.574132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.574164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:55:49.574188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:55:49.574240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:55:49.574267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:55:49.574288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:55:49.575679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:49.575751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:49.575783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:55:49.575815Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:55:49.575866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:49.575939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... nges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:55:52.075438Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:55:52.075668Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index" took 242us result status StatusSuccess 2025-04-07T13:55:52.076378Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index" PathDescription { Self { Name: "Index" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:55:52.077044Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:55:52.077352Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable" took 344us result status StatusSuccess 2025-04-07T13:55:52.078245Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "alice" } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "bob" } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\005\000\000\000alice\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\003\000\000\000bob\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> IndexBuildTest::RejectsDropIndex [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::DropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:55:49.201704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:55:49.201796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:49.201840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:55:49.201876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:55:49.201920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:55:49.201954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:55:49.202021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:49.202366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:55:49.270185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:55:49.270243Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:49.280520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:55:49.280702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:55:49.280909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:55:49.286698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:55:49.286937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:55:49.287529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.287721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:55:49.289583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.290506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:49.290560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.290652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:55:49.290683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:49.290711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:55:49.290826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.300013Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:55:49.427333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:55:49.427579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.427810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:55:49.428042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:55:49.428103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.430540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.430662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:55:49.430839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.430890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:55:49.430965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:55:49.431020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:55:49.432796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.432866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:49.432973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:55:49.434550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.434589Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.434629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.434678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.439071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:55:49.441097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:55:49.441280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:55:49.442385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.442540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:55:49.442596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.442871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:55:49.442928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.443111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:49.443194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:55:49.446137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:49.446184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:49.446367Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.446412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:55:49.446675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.446736Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:55:49.446845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:55:49.446879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.446931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:55:49.446971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.447024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:55:49.447061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:55:49.447139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:55:49.447186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:55:49.447219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:55:49.449401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:49.449513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:49.449555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:55:49.449595Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:55:49.449653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:49.449784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ulator: [2:201:2204], at schemeshard: 72057594046678944, txId: 105, path id: 9 2025-04-07T13:55:52.571778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:55:52.571820Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 2025-04-07T13:55:52.571920Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:55:52.571960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:0, datashard: 72075186233409550, at schemeshard: 72057594046678944 2025-04-07T13:55:52.572001Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2025-04-07T13:55:52.572388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:1, at schemeshard: 72057594046678944 2025-04-07T13:55:52.572422Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:1 ProgressState 2025-04-07T13:55:52.572511Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 1/3 2025-04-07T13:55:52.572538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/3 2025-04-07T13:55:52.572574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/3, is published: false 2025-04-07T13:55:52.572900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2025-04-07T13:55:52.572931Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:2 ProgressState at tablet: 72057594046678944 2025-04-07T13:55:52.572970Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:2, at schemeshard: 72057594046678944 2025-04-07T13:55:52.572991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-07T13:55:52.573011Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:2 129 -> 240 2025-04-07T13:55:52.573314Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:55:52.573384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:55:52.573414Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-07T13:55:52.573447Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-04-07T13:55:52.573488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-04-07T13:55:52.573903Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:55:52.573973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:55:52.573998Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-07T13:55:52.574026Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 18446744073709551615 2025-04-07T13:55:52.574053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-04-07T13:55:52.574691Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:55:52.574753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:55:52.574776Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-07T13:55:52.574799Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-07T13:55:52.574823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:55:52.576193Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:55:52.576260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:55:52.576291Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-07T13:55:52.576907Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:55:52.576959Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:55:52.576975Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-07T13:55:52.577626Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:55:52.577676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T13:55:52.577692Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-07T13:55:52.577712Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-04-07T13:55:52.577753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 4 2025-04-07T13:55:52.577822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/3, is published: true 2025-04-07T13:55:52.578856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T13:55:52.578926Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:52.579192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-04-07T13:55:52.579337Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 2/3 2025-04-07T13:55:52.579382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-04-07T13:55:52.579429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: true 2025-04-07T13:55:52.582174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2025-04-07T13:55:52.582235Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:2 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:52.582457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-04-07T13:55:52.582567Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:2 progress is 3/3 2025-04-07T13:55:52.582597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-04-07T13:55:52.582640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 3/3, is published: true 2025-04-07T13:55:52.582712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:418:2373] message: TxId: 105 2025-04-07T13:55:52.582761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-04-07T13:55:52.582803Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-07T13:55:52.582843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-07T13:55:52.582928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-07T13:55:52.582973Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:1 2025-04-07T13:55:52.582993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:1 2025-04-07T13:55:52.583017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2025-04-07T13:55:52.583034Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:2 2025-04-07T13:55:52.583052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:2 2025-04-07T13:55:52.583082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-04-07T13:55:52.583817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:55:52.583899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:55:52.583933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:55:52.583961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:55:52.584001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:55:52.585761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T13:55:52.586005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T13:55:52.586065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:917:2847] TestWaitNotification: OK eventTxId 105 >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsDropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:55:49.547260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:55:49.547341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:49.547377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:55:49.547417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:55:49.547457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:55:49.547484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:55:49.547529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:49.547817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:55:49.619310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:55:49.619359Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:49.634199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:55:49.634335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:55:49.634518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:55:49.644004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:55:49.644204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:55:49.644792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.644981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:55:49.646640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.647872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:49.647935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.648058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:55:49.648103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:49.648138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:55:49.648290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.654170Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:55:49.764128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:55:49.764310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.764478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:55:49.764644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:55:49.764704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.768047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.768174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:55:49.768353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.768408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:55:49.768452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:55:49.768501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:55:49.770400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.770457Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:49.770541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:55:49.772149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.772195Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.772231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.772277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.775852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:55:49.777508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:55:49.777684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:55:49.778666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.778812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:55:49.778878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.779151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:55:49.779194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.779358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:49.779438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:55:49.781179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:49.781223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:49.781364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.781393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:55:49.781629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.781675Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:55:49.781763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:55:49.781793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.781834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:55:49.781874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.781903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:55:49.781933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:55:49.782001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:55:49.782037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:55:49.782058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:55:49.783935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:49.784037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:49.784075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:55:49.784108Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:55:49.784158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:49.784259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... let: 72057594046316545 cookie: 0:107 msg type: 269090816 2025-04-07T13:55:53.121659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 107, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 107 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 107 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 107 at step: 5000004 2025-04-07T13:55:53.122708Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:53.122864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 107 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 8589936745 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:55:53.122928Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId#107:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-04-07T13:55:53.123029Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 128 -> 136 2025-04-07T13:55:53.131126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T13:55:53.131215Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-07T13:55:53.131285Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, no renaming has been detected for this operation 2025-04-07T13:55:53.131328Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 136 -> 137 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 FAKE_COORDINATOR: Erasing txId 107 2025-04-07T13:55:53.132468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 663 } } 2025-04-07T13:55:53.132508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2025-04-07T13:55:53.132613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 663 } } 2025-04-07T13:55:53.132731Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 663 } } 2025-04-07T13:55:53.133329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 8589936893 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-07T13:55:53.133385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2025-04-07T13:55:53.133510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 8589936893 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-07T13:55:53.133568Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-07T13:55:53.136179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T13:55:53.136248Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-07T13:55:53.136302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T13:55:53.136346Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-04-07T13:55:53.136440Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-07T13:55:53.136563Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-04-07T13:55:53.136723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:53.136784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:55:53.137182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T13:55:53.137457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T13:55:53.138691Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:53.138744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:53.138892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:55:53.139051Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:53.139104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:201:2204], at schemeshard: 72057594046678944, txId: 107, path id: 1 2025-04-07T13:55:53.139153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:201:2204], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-04-07T13:55:53.139210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T13:55:53.139276Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-04-07T13:55:53.139356Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T13:55:53.139394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T13:55:53.139435Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-07T13:55:53.140651Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 107 2025-04-07T13:55:53.140772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 107 2025-04-07T13:55:53.140810Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-07T13:55:53.140867Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T13:55:53.140908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:55:53.141772Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-07T13:55:53.141849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-07T13:55:53.141874Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-07T13:55:53.141902Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T13:55:53.141930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T13:55:53.141994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-04-07T13:55:53.143201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T13:55:53.143254Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:53.143502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:55:53.143627Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-07T13:55:53.143674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-07T13:55:53.143719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-07T13:55:53.143805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:378:2343] message: TxId: 107 2025-04-07T13:55:53.143846Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-07T13:55:53.143884Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-07T13:55:53.143922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-07T13:55:53.144015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:55:53.145432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-07T13:55:53.146835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-07T13:55:53.147665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-07T13:55:53.147723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:568:2531] TestWaitNotification: OK eventTxId 107 >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize [GOOD] Test command err: Trying to start YDB, gRPC: 11967, MsgBus: 22455 2025-04-07T13:49:06.831766Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490567905321184805:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:06.831797Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f7/r3tmp/tmphqhI2m/pdisk_1.dat 2025-04-07T13:49:07.835382Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:07.842479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:07.851170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:49:07.851259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:49:07.871315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11967, node 1 2025-04-07T13:49:08.145333Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:08.145352Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:08.145361Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:08.145458Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22455 TClient is connected to server localhost:22455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:49:08.849024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:08.941017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:09.139323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:09.391055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:09.511422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:11.063983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567926796023941:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:11.064069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:11.442554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:49:11.544559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:49:11.637445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:49:11.754808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:49:11.810032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:49:11.849933Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490567905321184805:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:49:11.850104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:49:11.900789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:49:12.083435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567931090992095:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:12.083519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:12.087002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490567931090992100:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:49:12.090369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:49:12.134855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490567931090992102:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:49:13.317181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:49:22.802479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:49:22.802506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:28.616118Z node 1 :TX_DATASHARD ERROR: CPU usage 77.7689% is higher than threshold of 60% in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037919 table: [/Root/LargeTable] 2025-04-07T13:50:30.861377Z node 1 :TX_DATASHARD ERROR: CPU usage 134.524% is higher than threshold of 60% in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037919 table: [/Root/LargeTable] 2025-04-07T13:50:57.053503Z node 1 :OPS_COMPACT ERROR: Compact{72075186224037919.1.315, eph 79} end=2, 3blobs 0r (max 150), put Spent{2.146s wa 0.385s cnt 1} 2025-04-07T13:50:57.064892Z node 1 :TABLET_EXECUTOR ERROR: Leader{72075186224037919:1:334} Compact 201 on TGenCompactionParams{1001: gen 1 epoch 0, 5 parts} step 315, product {0 parts epoch 0} thrown 2025-04-07T13:50:57.111824Z node 1 :OPS_COMPACT ERROR: Compact{72075186224037919.1.289, eph 69} end=2, 46blobs 0r (max 600), put Spent{11.06s wa 1.122s cnt 15} 2025-04-07T13:50:57.111940Z node 1 :TABLET_EXECUTOR ERROR: Leader{72075186224037919:1:334} Compact 183 on TGenCompactionParams{1001: gen 2 epoch 0, 4 parts} step 289, product {0 parts epoch 0} thrown 2025-04-07T13:51:11.540300Z node 1 :TX_DATASHARD ERROR: CPU usage 92.0334% is higher than threshold of 60% in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037921 table: [/Root/LargeTable] 2025-04-07T13:51:28.059596Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923005952032}: tablet 72075186224037920 could not find a group for channel 0 pool /Root:test 2025-04-07T13:51:28.059646Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923005952032}: tablet 72075186224037920 could not find a group for channel 1 pool /Root:test 2025-04-07T13:51:28.059669Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923005952032}: tablet 72075186224037920 wasn't changed 2025-04-07T13:51:28.059690Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923005952032}: tablet 72075186224037920 skipped channel 0 2025-04-07T13:51:28.059714Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923005952032}: tablet 72075186224037920 skipped channel 1 2025-04-07T13:51:28.134124Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923009227136}: tablet 72075186224037921 could not find a group for channel 0 pool /Root:test 2025-04-07T13:51:28.134183Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923009227136}: tablet 72075186224037921 could not find a group for channel 1 pool /Root:test 2025-04-07T13:51:28.134198Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923009227136}: tablet 72075186224037921 wasn't changed 2025-04-07T13:51:28.134215Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923009227136}: tablet 72075186224037921 skipped channel 0 2025-04-07T13:51:28.134243Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923009227136}: tablet 72075186224037921 skipped channel 1 2025-04-07T13:51:29.933035Z node 1 :BS_SKELETON WARN: VDISK[82000000:_:0:0:0]: TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-04-07T13:51:30.257003Z node 1 :BS_SKELETON WARN: VDISK[0:_:0:0:0]: TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-04-07T13:51:30.948921Z node 1 :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: TDskSpaceTrackerActor: LIGHT_ORANGE ZONE Marker# BSVSOOST01 2025-04-07T13:51:31.259237Z node 1 :BS_SKELETON ERROR: VDISK[0:_:0:0:0]: TDskSpaceTrackerActor: LIGHT_ORANGE ZONE Marker# BSVSOOST01 2025-04-07T13:51:31.953001Z nod ... 527Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004798208}: tablet 72075186224037902 wasn't changed 2025-04-07T13:55:29.978540Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004798208}: tablet 72075186224037902 skipped channel 0 2025-04-07T13:55:29.978564Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004798208}: tablet 72075186224037902 skipped channel 1 2025-04-07T13:55:29.978731Z node 2 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923006265184}: tablet 72075186224037906 could not find a group for channel 0 pool /Root:test 2025-04-07T13:55:29.978757Z node 2 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923006265184}: tablet 72075186224037906 could not find a group for channel 1 pool /Root:test 2025-04-07T13:55:29.978770Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923006265184}: tablet 72075186224037906 wasn't changed 2025-04-07T13:55:29.978783Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923006265184}: tablet 72075186224037906 skipped channel 0 2025-04-07T13:55:29.978806Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923006265184}: tablet 72075186224037906 skipped channel 1 2025-04-07T13:55:30.294629Z node 2 :BS_SKELETON WARN: VDISK[82000000:_:0:0:0]: TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-04-07T13:55:31.090181Z node 2 :BS_SKELETON WARN: VDISK[0:_:0:0:0]: TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-04-07T13:55:31.296271Z node 2 :BS_SKELETON WARN: VDISK[82000000:_:0:0:0]: TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-04-07T13:55:31.751896Z node 2 :TX_DATASHARD ERROR: Cannot perform transaction: out of disk space at tablet 72075186224037921 txId 281474976710764 2025-04-07T13:55:31.762485Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 281474976710764 at tablet 72075186224037921 errors: OUT_OF_SPACE (Cannot perform transaction: out of disk space at tablet 72075186224037921 txId 281474976710764) | 2025-04-07T13:55:31.763425Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710764 at tablet 72075186224037921 status: ERROR errors: OUT_OF_SPACE (Cannot perform transaction: out of disk space at tablet 72075186224037921 txId 281474976710764) | 2025-04-07T13:55:31.783974Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7490569556131382865:2471] TxId: 281474976710764. Ctx: { TraceId: 01jr88hv3c5jwfezek4j9f4kdt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdlYjMyOWMtYWQwZjYzYTMtMzBmZWRjY2YtOTA2YmUwNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ERROR: [OUT_OF_SPACE] Cannot perform transaction: out of disk space at tablet 72075186224037921 txId 281474976710764; 2025-04-07T13:55:31.854135Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjdlYjMyOWMtYWQwZjYzYTMtMzBmZWRjY2YtOTA2YmUwNWQ=, ActorId: [2:7490568602648628167:2471], ActorState: ExecuteState, TraceId: 01jr88hv3c5jwfezek4j9f4kdt, Create QueryResponse for error on request, msg:
: Error: Kikimr cluster or one of its subsystems was unavailable., code: 2005
: Error: [OUT_OF_SPACE] Cannot perform transaction: out of disk space at tablet 72075186224037921 txId 281474976710764 2025-04-07T13:55:32.095319Z node 2 :BS_SKELETON ERROR: VDISK[0:_:0:0:0]: TDskSpaceTrackerActor: LIGHT_ORANGE ZONE Marker# BSVSOOST01 Trying to start YDB, gRPC: 26960, MsgBus: 61979 2025-04-07T13:55:33.176251Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490569564495416212:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:33.176336Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f7/r3tmp/tmpa1WHkV/pdisk_1.dat 2025-04-07T13:55:33.254125Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26960, node 3 2025-04-07T13:55:33.297490Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:33.297609Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:33.299039Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:33.315628Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:33.315649Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:33.315658Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:33.315801Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61979 TClient is connected to server localhost:61979 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:33.756013Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:33.774206Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:33.823623Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:33.957872Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:34.019807Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:35.746924Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569573085352528:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:35.747041Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:35.795985Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:55:35.829923Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:55:35.864720Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:55:35.894270Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:55:35.938297Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:55:35.984064Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:55:36.057180Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569577380320322:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:36.057289Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:36.057341Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490569577380320327:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:36.060277Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:55:36.069088Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490569577380320329:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:55:37.129917Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:38.176308Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490569564495416212:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:38.176370Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:55:46.619711Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTlhN2UxNGYtZmUzMTYyZjEtZmQ3Y2JhZmYtNGQwM2U1YTQ=, ActorId: [3:7490569611740060024:2607], ActorState: ExecuteState, TraceId: 01jr88j85rc1bdd27tmk7kechm, Create QueryResponse for error on request, msg:
: Error: Intermediate data materialization exceeded size limit (88240925 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data., code: 2013 >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup >> GenericFederatedQuery::PostgreSQLOnPremSelectAll >> GenericFederatedQuery::YdbSelectCount >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant >> GenericFederatedQuery::PostgreSQLSelectCount >> Cdc::InitialScanDebezium [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |87.6%| [TA] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::ClickHouseManagedSelectConstant |87.7%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning [GOOD] >> Cdc::AddIndex >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> GenericFederatedQuery::PostgreSQLFilterPushdown >> TestKinesisHttpProxy::ListShardsToken >> Cdc::RacySplitAndDropTable [GOOD] >> Cdc::RenameTable >> KqpJoin::IdxLookupLeftPredicate [GOOD] >> KqpJoin::IdxLookupPartialLeftPredicate >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecords >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |87.7%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut >> KqpJoin::JoinDupColumnRightPure [GOOD] >> KqpJoin::JoinLeftPureInner >> IndexBuildTest::BaseCase [GOOD] >> IndexBuildTest::CancelBuild >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup >> Secret::DeactivatedQueryService [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |87.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: 2025-04-07T13:55:49.456975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0021d0/r3tmp/tmp4txwlr/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18855, node 1 TClient is connected to server localhost:20008 2025-04-07T13:55:50.110345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:55:50.148755Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:50.152895Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:50.152986Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:50.153029Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:50.153367Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:55:50.202605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:50.202724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:50.215077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-07T13:56:03.081978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:648:2541], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.082096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:658:2546], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.082191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.089648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:56:03.113065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:662:2549], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:56:03.355778Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:723:2590], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2025-04-07T13:56:03.357587Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTUwNWRmNGMtMzI5NjdjMTUtMzY3NTUxNDQtMTBlZDhlNGM=, ActorId: [1:646:2539], ActorState: ExecuteState, TraceId: 01jr88jtc12jw73x76qg2837zt, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |87.7%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup >> Cdc::InitialScanRacyCompleteAndRequest [GOOD] >> Cdc::InitialScanUpdatedRows >> GenericFederatedQuery::YdbManagedSelectConstant >> KqpJoin::IdxLookupPartialLeftPredicate [GOOD] >> KqpJoin::IdxLookupPartialWithTempTable |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |87.7%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller >> IndexBuildTest::CancelBuild [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] >> TestKinesisHttpProxy::ListShardsToken [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] >> TestYmqHttpProxy::TestDeleteQueue [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] >> GenericFederatedQuery::YdbSelectCount [GOOD] >> TestYmqHttpProxy::TestDeleteMessageBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:55:47.563012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:55:47.563123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:47.563174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:55:47.563224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:55:47.563272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:55:47.563301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:55:47.563366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:47.563704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:55:47.635715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:55:47.635780Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:47.648614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:55:47.648810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:55:47.649018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:55:47.654801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:55:47.655034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:55:47.655664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:47.655889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:55:47.658022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:47.659366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:47.659424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:47.659553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:55:47.659626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:47.659669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:55:47.659842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.668378Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:55:47.809936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:55:47.810194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.810433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:55:47.810649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:55:47.810711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.813667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:47.813805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:55:47.813969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.814028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:55:47.814072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:55:47.814121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:55:47.817545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.817620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:47.817700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:55:47.821583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.821642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.821686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:47.821739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:55:47.825662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:55:47.829519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:55:47.829763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:55:47.830976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:47.831190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:55:47.831252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:47.831535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:55:47.831586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:47.831791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:47.831907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:55:47.836307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:47.836381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:47.836578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:47.836619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:55:47.837041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.837102Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:55:47.837211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:55:47.837256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:47.837325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:55:47.837367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:47.837410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:55:47.837438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:55:47.837551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:55:47.837604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:55:47.837636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:55:47.839645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:47.839758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:47.839796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:55:47.839835Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:55:47.839884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:47.840000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... eration IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-07T13:56:08.187501Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-07T13:56:08.187784Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:56:08.187885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:56:08.187931Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-07T13:56:08.187971Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-07T13:56:08.190191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-07T13:56:08.190241Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-07T13:56:08.190330Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-07T13:56:08.190360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-07T13:56:08.190395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-07T13:56:08.190473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:121:2147] message: TxId: 281474976710760 2025-04-07T13:56:08.190510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-07T13:56:08.190542Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-07T13:56:08.190571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-07T13:56:08.190633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 13 2025-04-07T13:56:08.193543Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-07T13:56:08.193613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-07T13:56:08.193690Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-07T13:56:08.193761Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1140:3004], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T13:56:08.195680Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T13:56:08.195761Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1140:3004], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T13:56:08.195825Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-04-07T13:56:08.197952Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T13:56:08.198029Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1140:3004], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T13:56:08.198067Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-07T13:56:08.198203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:56:08.198246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1234:3088] TestWaitNotification: OK eventTxId 102 2025-04-07T13:56:08.201406Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-07T13:56:08.201642Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 0 } 2025-04-07T13:56:08.208719Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:56:08.209079Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 4.42ms result status StatusSuccess 2025-04-07T13:56:08.209522Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:56:08.212037Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:56:08.212209Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 196us result status StatusPathDoesNotExist 2025-04-07T13:56:08.212335Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000005, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 3 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |87.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator >> KqpJoin::JoinLeftPureInner [GOOD] >> KqpJoin::JoinLeftPureFull >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small+StreamLookupJoin-ColumnStore >> Cdc::AddIndex [GOOD] >> Cdc::AddStream ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 20980, MsgBus: 4420 2025-04-07T13:55:56.382005Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569663986288364:2180];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:56.382182Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e47/r3tmp/tmpeJV8co/pdisk_1.dat 2025-04-07T13:55:57.177535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:57.229662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:57.229739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:57.233896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20980, node 1 2025-04-07T13:55:57.445224Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:57.445240Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:57.445257Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:57.445336Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4420 TClient is connected to server localhost:4420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:58.339795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:58.398015Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:01.381647Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569663986288364:2180];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:01.381707Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:01.602495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569685461125347:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:01.602595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:02.014533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:56:02.236611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569689756092763:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:02.236835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:02.237431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569689756092769:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:02.240579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-07T13:56:02.259258Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:56:02.259567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569689756092772:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:56:03.023209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:03.544956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-04-07T13:56:04.099334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:56:04.669741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:56:05.154841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:56:05.650821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:56:05.731485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:56:08.155910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710704:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] Test command err: Trying to start YDB, gRPC: 13116, MsgBus: 18140 2025-04-07T13:55:56.514867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569666821442702:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:56.514939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e40/r3tmp/tmprXOZKO/pdisk_1.dat 2025-04-07T13:55:56.948778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:56.948905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:56.960594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13116, node 1 2025-04-07T13:55:56.983927Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:55:56.983956Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:55:57.011513Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:57.157892Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:57.157916Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:57.157924Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:57.158028Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18140 TClient is connected to server localhost:18140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:57.705580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:01.517233Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569666821442702:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:01.517300Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:02.864790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569692591246916:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:02.878689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:02.884681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:56:03.026711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569696886214334:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.026794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.027410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569696886214339:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.030282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-07T13:56:03.036609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569696886214341:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:56:03.702581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:04.125243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-04-07T13:56:04.533831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:05.100955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:56:05.629886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:56:06.323195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:56:06.372943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:56:08.680628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710713:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] Test command err: 2025-04-07T13:54:31.927196Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:32.035696Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:32.059056Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:32.059153Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:32.059364Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:32.069529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:32.069739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:32.069975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:32.070084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:32.070190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:32.070303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:32.070413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:32.070532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:32.070633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:32.070751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:32.070845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:32.070953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:32.093782Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:32.093849Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:54:32.096546Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:32.096731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:32.096779Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:32.096970Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:32.097115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:32.097185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:32.097226Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:32.097303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:32.097380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:32.097428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:32.097453Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:32.097613Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:32.097672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:32.097706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:32.097731Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:32.097805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:32.097849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:32.097910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:32.097939Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:32.098018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:32.098060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:32.098100Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:32.098153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:32.098192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:32.098223Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:32.098393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-04-07T13:54:32.098471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-07T13:54:32.098559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-04-07T13:54:32.098621Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=27; 2025-04-07T13:54:32.098776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:32.098858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:32.098898Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:32.099103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:32.099147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:32.099180Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:32.099320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:32.099360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:32.099392Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:32.099572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:32.099617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:32.099645Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execu ... age_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=17140; 2025-04-07T13:56:07.855494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=36180; 2025-04-07T13:56:07.855805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=226; 2025-04-07T13:56:07.857455Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=258; 2025-04-07T13:56:07.857566Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1699; 2025-04-07T13:56:07.857744Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=113; 2025-04-07T13:56:07.857882Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:56:07.857939Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=152; 2025-04-07T13:56:07.858131Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=147; 2025-04-07T13:56:07.858310Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=130; 2025-04-07T13:56:07.859021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=652; 2025-04-07T13:56:07.860196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1097; 2025-04-07T13:56:07.860316Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=43; 2025-04-07T13:56:07.860414Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=49; 2025-04-07T13:56:07.860469Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-07T13:56:07.860534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-07T13:56:07.860585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-07T13:56:07.860707Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=84; 2025-04-07T13:56:07.860761Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-07T13:56:07.860858Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=60; 2025-04-07T13:56:07.860918Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-07T13:56:07.860997Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=37; 2025-04-07T13:56:07.861048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=49454; 2025-04-07T13:56:07.861306Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=25;blobs=50;rows=708348;bytes=40196124;raw_bytes=67627660; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=22;blobs=44;rows=1136652;bytes=64332088;raw_bytes=108739216; inactive portions=44;blobs=88;rows=1246652;bytes=70696640;raw_bytes=119259832; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:56:07.861450Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1668:3644];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:56:07.861521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1668:3644];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:56:07.861585Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1668:3644];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:56:07.861685Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:56:07.861740Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:56:07.861826Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:07.862084Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:56:07.862174Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:56:07.862271Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:56:07.862336Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:07.862384Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:07.862444Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:56:07.862536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:56:07.862596Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:56:07.863523Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:07.863749Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1701:3670];tablet_id=9437184;parent=[1:1668:3644];fline=manager.h:99;event=ask_data;request=request_id=234;1={portions_count=91};; 2025-04-07T13:56:07.869967Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1668:3644];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:56:07.870537Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:56:07.870611Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:56:07.870651Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:56:07.870720Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:56:07.870835Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:07.871102Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:56:07.871224Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:56:07.871297Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:56:07.871372Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:07.871422Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:07.871488Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:56:07.871578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:56:07.871643Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:56:07.872704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=91;path_id=1; 2025-04-07T13:56:07.876066Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=91;path_id=1; 2025-04-07T13:56:07.886947Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:56:07.887054Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1668:3644];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] Test command err: 2025-04-07T13:55:14.429237Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569485195231598:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:14.431675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00178d/r3tmp/tmpHOqx0e/pdisk_1.dat 2025-04-07T13:55:15.042553Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:15.053981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:15.054087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:15.058394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22390, node 1 2025-04-07T13:55:15.280101Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:15.280126Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:15.280132Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:15.280207Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:15.865124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:15.883049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:15.887332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:15.888237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:55:15.888459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:55:15.888477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:55:15.889063Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:55:15.889080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:55:15.889419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:15.891032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:55:15.891742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034115936, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:15.891767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:55:15.892031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:55:15.892642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:15.892974Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:15.893070Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:55:15.893193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:55:15.893248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:55:15.893294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:55:15.895440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:55:15.895509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:55:15.895526Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:55:15.895626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:6154 waiting... 2025-04-07T13:55:16.174644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:16.174895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:16.174927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:16.175686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T13:55:16.175831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:16.177331Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:55:16.177545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034116223, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:16.177590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1744034116223, at schemeshard: 72057594046644480 2025-04-07T13:55:16.177854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T13:55:16.177978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T13:55:16.178042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 1 2025-04-07T13:55:16.178547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:16.178763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:16.179310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:55:16.179351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:55:16.179398Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:55:16.179478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2025-04-07T13:55:16.181526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:55:16.181890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:16.181935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:55:16.182007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T13:55:16.182124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:55:16.182152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2025-04-07T13:55:16.182575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2025-04-07T13:55:16.182715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:16.182878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:16.183416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:55:16.183488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:55:16.183502Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T13:55:16.183586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2025-04-07T13:55:16.184410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:55:16.184682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:16.185298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: ... 74976710696:0 3 -> 128 2025-04-07T13:56:08.518919Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976710696:0, at schemeshard: 72057594046644480 2025-04-07T13:56:08.520027Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034168569, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:56:08.520061Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976710696:0 HandleReply TEvOperationPlan, step: 1744034168569, at tablet: 72057594046644480 2025-04-07T13:56:08.520241Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710696:0 128 -> 240 2025-04-07T13:56:08.520873Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:56:08.521131Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:56:08.521180Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710696:0 ProgressState 2025-04-07T13:56:08.521270Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710696:0 progress is 1/1 2025-04-07T13:56:08.521311Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710696:0 2025-04-07T13:56:08.521475Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710696, publications: 1, subscribers: 1 2025-04-07T13:56:08.522984Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710696 2025-04-07T13:56:08.523020Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710696 2025-04-07T13:56:08.523038Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710696, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 5 2025-04-07T13:56:08.523106Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710696, subscribers: 1 2025-04-07T13:56:08.524488Z node 7 :HTTP_PROXY INFO: http request [RegisterStreamConsumer] requestId [bb179558-57606638-6498a4d1-ac0062ed] reply ok 2025-04-07T13:56:08.524778Z node 7 :HTTP DEBUG: (#44,[::1]:58446) <- (200 ) 2025-04-07T13:56:08.524883Z node 7 :HTTP DEBUG: (#44,[::1]:58446) connection closed Http output full {"Consumer":{"ConsumerCreationTimestamp":1744034.168,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}} 200 {"Consumer":{"ConsumerCreationTimestamp":1744034.168,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}} E0000 00:00:1744034168.526427 212997 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-07T13:56:08.526041Z node 7 :HTTP DEBUG: (#44,[::1]:58460) incoming connection opened 2025-04-07T13:56:08.526085Z node 7 :HTTP DEBUG: (#44,[::1]:58460) -> (POST /Root) 2025-04-07T13:56:08.526171Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [5868:b000:6050:0:4068:b000:6050:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: 3eb0e39-98f1e802-f93259b0-f0939334 2025-04-07T13:56:08.526487Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [3eb0e39-98f1e802-f93259b0-f0939334] got new request from [5868:b000:6050:0:4068:b000:6050:0] database '/Root' stream 'teststream' E0000 00:00:1744034168.526859 212997 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744034168.526893 212997 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744034168.526978 212997 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744034168.527006 212997 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-07T13:56:08.526838Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [3eb0e39-98f1e802-f93259b0-f0939334] [auth] Authorized successfully 2025-04-07T13:56:08.526949Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [3eb0e39-98f1e802-f93259b0-f0939334] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:08.527723Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [3eb0e39-98f1e802-f93259b0-f0939334] reply ok 2025-04-07T13:56:08.527877Z node 7 :HTTP DEBUG: (#44,[::1]:58460) <- (200 ) 2025-04-07T13:56:08.527928Z node 7 :HTTP DEBUG: (#44,[::1]:58460) connection closed Http output full {"NextToken":"CM/9pYThMhABGAEiCnRlc3RzdHJlYW0=","Consumers":[{"ConsumerCreationTimestamp":1744034168,"ConsumerArn":"","ConsumerName":"user1","ConsumerStatus":"ACTIVE"}]} 200 {"NextToken":"CM/9pYThMhABGAEiCnRlc3RzdHJlYW0=","Consumers":[{"ConsumerCreationTimestamp":1744034168,"ConsumerArn":"","ConsumerName":"user1","ConsumerStatus":"ACTIVE"}]} 2025-04-07T13:56:08.541204Z node 7 :HTTP DEBUG: (#44,[::1]:58464) incoming connection opened 2025-04-07T13:56:08.541297Z node 7 :HTTP DEBUG: (#44,[::1]:58464) -> (POST /Root) 2025-04-07T13:56:08.541421Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d832:c600:6050:0:c032:c600:6050:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: 16e3e730-b5ed025a-38c6aa3e-b711fb55 2025-04-07T13:56:08.552948Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [16e3e730-b5ed025a-38c6aa3e-b711fb55] got new request from [d832:c600:6050:0:c032:c600:6050:0] database '/Root' stream '' 2025-04-07T13:56:08.559565Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [16e3e730-b5ed025a-38c6aa3e-b711fb55] [auth] Authorized successfully 2025-04-07T13:56:08.559889Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [16e3e730-b5ed025a-38c6aa3e-b711fb55] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:08.560603Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [16e3e730-b5ed025a-38c6aa3e-b711fb55] reply with status: BAD_REQUEST message:
: Error: Provided NextToken is malformed, code: 500040 2025-04-07T13:56:08.560913Z node 7 :HTTP DEBUG: (#44,[::1]:58464) <- (400 InvalidArgumentException) 2025-04-07T13:56:08.560969Z node 7 :HTTP DEBUG: (#44,[::1]:58464) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.ListStreamConsumers X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 61 { "NextToken":"CM/9pYThMhABGAEiCnRlc3RzdHJlYW0=garbage", "StreamArn":"", "MaxResults":100 } 0 2025-04-07T13:56:08.561001Z node 7 :HTTP DEBUG: (#44,[::1]:58464) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 16e3e730-b5ed025a-38c6aa3e-b711fb55 x-amz-crc32: 40394744 Content-Type: application/x-amz-json-1.1 Content-Length: 112 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2025-04-07T13:56:08.561096Z node 7 :HTTP DEBUG: (#44,[::1]:58464) connection closed Http output full {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 400 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2025-04-07T13:56:08.562018Z node 7 :HTTP DEBUG: (#44,[::1]:58470) incoming connection opened 2025-04-07T13:56:08.562100Z node 7 :HTTP DEBUG: (#44,[::1]:58470) -> (POST /Root) 2025-04-07T13:56:08.562217Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [7878:1400:6050:0:6078:1400:6050:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: bc628e23-a50d1262-be17aa94-26ca607d 2025-04-07T13:56:08.562742Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [bc628e23-a50d1262-be17aa94-26ca607d] got new request from [7878:1400:6050:0:6078:1400:6050:0] database '/Root' stream '' 2025-04-07T13:56:08.567044Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [bc628e23-a50d1262-be17aa94-26ca607d] [auth] Authorized successfully 2025-04-07T13:56:08.567375Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [bc628e23-a50d1262-be17aa94-26ca607d] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:08.568465Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [bc628e23-a50d1262-be17aa94-26ca607d] reply with status: BAD_REQUEST message:
: Error: Provided NextToken is malformed, code: 500040 2025-04-07T13:56:08.568651Z node 7 :HTTP DEBUG: (#44,[::1]:58470) <- (400 InvalidArgumentException) 2025-04-07T13:56:08.568726Z node 7 :HTTP DEBUG: (#44,[::1]:58470) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.ListStreamConsumers X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 57 { "NextToken":"Y18AjrkckeND9kjnbEhjhkX$^[]?!", "StreamArn":"", "MaxResults":100 } 0 Http output full {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 400 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2025-04-07T13:56:08.568764Z node 7 :HTTP DEBUG: (#44,[::1]:58470) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: bc628e23-a50d1262-be17aa94-26ca607d x-amz-crc32: 40394744 Content-Type: application/x-amz-json-1.1 Content-Length: 112 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2025-04-07T13:56:08.568873Z node 7 :HTTP DEBUG: (#44,[::1]:58470) connection closed 2025-04-07T13:56:08.570675Z node 7 :HTTP DEBUG: (#44,[::1]:58478) incoming connection opened 2025-04-07T13:56:08.570730Z node 7 :HTTP DEBUG: (#44,[::1]:58478) -> (POST /Root) 2025-04-07T13:56:08.570856Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [588f:3200:6050:0:408f:3200:6050:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: b0a50a63-c7cdad80-1f6a1a63-66d146d0 2025-04-07T13:56:08.571327Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [b0a50a63-c7cdad80-1f6a1a63-66d146d0] got new request from [588f:3200:6050:0:408f:3200:6050:0] database '/Root' stream 'teststream' 2025-04-07T13:56:08.572942Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [b0a50a63-c7cdad80-1f6a1a63-66d146d0] [auth] Authorized successfully 2025-04-07T13:56:08.573086Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [b0a50a63-c7cdad80-1f6a1a63-66d146d0] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:08.578641Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [b0a50a63-c7cdad80-1f6a1a63-66d146d0] reply ok 2025-04-07T13:56:08.578936Z node 7 :HTTP DEBUG: (#44,[::1]:58478) <- (200 ) 2025-04-07T13:56:08.579039Z node 7 :HTTP DEBUG: (#44,[::1]:58478) connection closed Http output full {"NextToken":"","Consumers":[{"ConsumerCreationTimestamp":1744034168,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}]} 200 {"NextToken":"","Consumers":[{"ConsumerCreationTimestamp":1744034168,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}]} >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] >> KqpJoinOrder::TPCDS34-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS61-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2025-04-07T13:55:12.650478Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569477745131110:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:12.650700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001790/r3tmp/tmp7nO6go/pdisk_1.dat 2025-04-07T13:55:13.134678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:13.134797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:13.149591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:13.204336Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2450, node 1 2025-04-07T13:55:13.222597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:13.222639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:13.222708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:55:13.222778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T13:55:13.373453Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:13.373474Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:13.373482Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:13.373577Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:13.758094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:13.764813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:13.766296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:13.767147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:55:13.767364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:55:13.767381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:55:13.768362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:55:13.768389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:55:13.768943Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:13.770931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034113815, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:13.770960Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:55:13.771198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:55:13.772050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:13.772241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:13.772329Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:55:13.772414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:55:13.772472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:55:13.772514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-04-07T13:55:13.779321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:55:13.779382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:55:13.779414Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:55:13.779527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 waiting... 2025-04-07T13:55:13.785386Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:16990 2025-04-07T13:55:14.018911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:14.019141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:14.019180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:14.019832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T13:55:14.020025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:14.021459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034114067, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:14.021496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1744034114067, at schemeshard: 72057594046644480 2025-04-07T13:55:14.021747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T13:55:14.021843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T13:55:14.021879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T13:55:14.022426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:14.022616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:14.023145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:55:14.023194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:55:14.023209Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:55:14.023256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 2025-04-07T13:55:14.036688Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:55:14.044585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:55:14.044933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:14.044950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:55:14.045020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T13:55:14.045089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:55:14.045098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2025-04-07T13:55:14.045522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2025-04-07T13:55:14.045656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:14.045857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:14.046825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:55:14.046874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:55:14.046885Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T13:55:14.046946Z node 1 :FLAT_TX_SCHEMESHA ... 3 2025-04-07T13:56:08.761927Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710693, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 3 2025-04-07T13:56:08.761962Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710693, subscribers: 1 2025-04-07T13:56:08.766064Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2025-04-07T13:56:08.767602Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Registered with mediator time cast 2025-04-07T13:56:08.767633Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Registered with mediator time cast 2025-04-07T13:56:08.767651Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Registered with mediator time cast 2025-04-07T13:56:08.767669Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Registered with mediator time cast 2025-04-07T13:56:08.767688Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Registered with mediator time cast 2025-04-07T13:56:08.768353Z node 7 :HTTP_PROXY INFO: http request [CreateStream] requestId [903a7f49-29096b7c-64983482-3d8fc0aa] reply ok 2025-04-07T13:56:08.768573Z node 7 :HTTP DEBUG: (#44,[::1]:40208) <- (200 ) 2025-04-07T13:56:08.768686Z node 7 :HTTP DEBUG: (#44,[::1]:40208) connection closed Http output full {} 200 {} E0000 00:00:1744034168.774391 213255 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-07T13:56:08.773015Z node 7 :HTTP DEBUG: (#44,[::1]:40224) incoming connection opened 2025-04-07T13:56:08.773079Z node 7 :HTTP DEBUG: (#44,[::1]:40224) -> (POST /Root) 2025-04-07T13:56:08.773212Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [9808:a900:6050:0:8008:a900:6050:0] request [ListShards] url [/Root] database [/Root] requestId: 44bc8ad3-39f650ae-4fe8994e-9dea899d 2025-04-07T13:56:08.773543Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [44bc8ad3-39f650ae-4fe8994e-9dea899d] got new request from [9808:a900:6050:0:8008:a900:6050:0] database '/Root' stream 'teststream' 2025-04-07T13:56:08.774247Z node 7 :HTTP_PROXY DEBUG: http request [ListShards] requestId [44bc8ad3-39f650ae-4fe8994e-9dea899d] [auth] Authorized successfully 2025-04-07T13:56:08.774335Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [44bc8ad3-39f650ae-4fe8994e-9dea899d] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:08.776421Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:08.776448Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7490569715590681950:2485], now have 1 active actors on pipe 2025-04-07T13:56:08.776474Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:08.776492Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7490569715590681951:2486], now have 1 active actors on pipe Http output full {"NextToken":"CMj/pYThMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-04-07T13:56:08.777886Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [44bc8ad3-39f650ae-4fe8994e-9dea899d] reply ok 2025-04-07T13:56:08.778201Z node 7 :HTTP DEBUG: (#44,[::1]:40224) <- (200 ) 2025-04-07T13:56:08.778275Z node 7 :HTTP DEBUG: (#44,[::1]:40224) connection closed 200 {"NextToken":"CMj/pYThMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-04-07T13:56:08.778351Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:56:08.778370Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7490569715590681950:2485] destroyed 2025-04-07T13:56:08.778387Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:56:08.778401Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7490569715590681951:2486] destroyed 2025-04-07T13:56:08.781954Z node 7 :HTTP DEBUG: (#44,[::1]:40230) incoming connection opened 2025-04-07T13:56:08.782018Z node 7 :HTTP DEBUG: (#44,[::1]:40230) -> (POST /Root) 2025-04-07T13:56:08.782155Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [78ec:7e00:6050:0:60ec:7e00:6050:0] request [ListShards] url [/Root] database [/Root] requestId: c22247b3-b4498836-e3e4ec36-4f6099a8 2025-04-07T13:56:08.782542Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [c22247b3-b4498836-e3e4ec36-4f6099a8] got new request from [78ec:7e00:6050:0:60ec:7e00:6050:0] database '/Root' stream 'teststream' E0000 00:00:1744034168.783203 213255 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-07T13:56:08.783055Z node 7 :HTTP_PROXY DEBUG: http request [ListShards] requestId [c22247b3-b4498836-e3e4ec36-4f6099a8] [auth] Authorized successfully 2025-04-07T13:56:08.783143Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [c22247b3-b4498836-e3e4ec36-4f6099a8] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:08.784217Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:08.784247Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7490569715590681963:2491], now have 1 active actors on pipe 2025-04-07T13:56:08.784286Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:08.784302Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7490569715590681962:2490], now have 1 active actors on pipe 2025-04-07T13:56:08.785206Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [c22247b3-b4498836-e3e4ec36-4f6099a8] reply ok 2025-04-07T13:56:08.785362Z node 7 :HTTP DEBUG: (#44,[::1]:40230) <- (200 ) 2025-04-07T13:56:08.785437Z node 7 :HTTP DEBUG: (#44,[::1]:40230) connection closed Http output full {"NextToken":"CND/pYThMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CND/pYThMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-04-07T13:56:08.787145Z node 7 :HTTP DEBUG: (#47,[::1]:40238) incoming connection opened 2025-04-07T13:56:08.787209Z node 7 :HTTP DEBUG: (#47,[::1]:40238) -> (POST /Root) 2025-04-07T13:56:08.787305Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [78ee:7600:6050:0:60ee:7600:6050:0] request [ListShards] url [/Root] database [/Root] requestId: f467e1f1-24afac1b-cc344fe5-4d829794 2025-04-07T13:56:08.787661Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [f467e1f1-24afac1b-cc344fe5-4d829794] got new request from [78ee:7600:6050:0:60ee:7600:6050:0] database '/Root' stream 'teststream' 2025-04-07T13:56:08.787987Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:56:08.788012Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7490569715590681962:2490] destroyed 2025-04-07T13:56:08.788032Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:56:08.788046Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7490569715590681963:2491] destroyed E0000 00:00:1744034168.788399 213255 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-07T13:56:08.788271Z node 7 :HTTP_PROXY DEBUG: http request [ListShards] requestId [f467e1f1-24afac1b-cc344fe5-4d829794] [auth] Authorized successfully 2025-04-07T13:56:08.788348Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [f467e1f1-24afac1b-cc344fe5-4d829794] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:08.789555Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:08.789587Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7490569715590681974:2495], now have 1 active actors on pipe 2025-04-07T13:56:08.789612Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:08.789628Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7490569715590681975:2496], now have 1 active actors on pipe Http output full {"NextToken":"CNX/pYThMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CNX/pYThMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-04-07T13:56:08.790498Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [f467e1f1-24afac1b-cc344fe5-4d829794] reply ok 2025-04-07T13:56:08.790661Z node 7 :HTTP DEBUG: (#47,[::1]:40238) <- (200 ) 2025-04-07T13:56:08.790730Z node 7 :HTTP DEBUG: (#47,[::1]:40238) connection closed 2025-04-07T13:56:08.792367Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:56:08.792392Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7490569715590681974:2495] destroyed 2025-04-07T13:56:08.792409Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:56:08.792423Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7490569715590681975:2496] destroyed 2025-04-07T13:56:08.879159Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7490569715590681677:2445]: Pool not found >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] >> Cdc::RenameTable [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady [GOOD] >> IndexBuildTest::RejectsCancel >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup >> KqpJoinOrder::TPCDS87-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS87-StreamLookupJoin+ColumnStore >> YdbTableSplit::SplitByLoadWithReads [GOOD] >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectAll ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 28914, MsgBus: 26207 2025-04-07T13:55:58.141764Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569673205690166:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:58.142074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e34/r3tmp/tmpj3lIpL/pdisk_1.dat 2025-04-07T13:55:58.901033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:58.909223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:58.909312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:58.962594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28914, node 1 2025-04-07T13:55:59.473937Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:59.473959Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:59.473968Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:59.474056Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26207 TClient is connected to server localhost:26207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:01.351887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:03.112786Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569673205690166:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:03.112850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:03.945711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569694680527149:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.965222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.971676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:56:04.116188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569698975494567:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:04.116455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:04.119234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569698975494574:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:04.119314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:04.119656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569698975494579:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:04.123862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-07T13:56:04.126866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569698975494603:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:04.134848Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:56:04.135077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569698975494581:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:56:04.135138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569698975494609:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:56:04.943252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-07T13:56:05.565757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480 2025-04-07T13:56:06.046521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:56:06.547066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:56:07.210323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:56:07.757897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:56:07.833586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:56:10.318802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710711:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectConstant [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 22245, MsgBus: 24373 2025-04-07T13:55:56.803521Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569666321733708:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:56.803591Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e60/r3tmp/tmpKSNPsk/pdisk_1.dat 2025-04-07T13:55:57.487489Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:57.500520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:57.512848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:57.526052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22245, node 1 2025-04-07T13:55:57.825284Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:57.825320Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:57.825335Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:57.825427Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24373 TClient is connected to server localhost:24373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:58.813617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:01.804827Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569666321733708:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:01.804898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:03.853803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569696386505425:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.853929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:04.088304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:56:04.254426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569700681472844:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:04.254497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:04.254713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569700681472849:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:04.257616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-07T13:56:04.266817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569700681472851:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:56:04.999274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:05.511388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-07T13:56:06.109715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:06.656009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:56:07.214018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:56:07.839432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:56:07.898674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:56:10.275503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710703:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] >> GenericFederatedQuery::YdbFilterPushdown >> GenericFederatedQuery::YdbManagedSelectAll >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2025-04-07T13:55:45.276083Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569617586627371:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:45.276272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a38/r3tmp/tmpLDZ6LO/pdisk_1.dat 2025-04-07T13:55:45.703299Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:45.715065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:45.715157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:45.721050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62239, node 1 2025-04-07T13:55:45.829123Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:45.829147Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:45.829158Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:45.829265Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:46.121200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:46.134930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:46.136272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:46.138743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:55:46.138889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:55:46.138910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:55:46.140208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:55:46.140930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:55:46.140962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:55:46.144037Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:46.148275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034146190, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:46.148329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:55:46.148637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:55:46.151655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:46.151837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:46.151890Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:55:46.151979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:55:46.152015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:55:46.152052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:55:46.154787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:55:46.154846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:55:46.154866Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:55:46.154981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:3006 2025-04-07T13:55:48.509497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569630471530112:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:48.512516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:48.769037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:48.769593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2025-04-07T13:55:48.770203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:48.770253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:48.773565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2025-04-07T13:55:48.773746Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:48.773925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:48.773995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:55:48.775068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:55:48.775102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:55:48.775118Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:55:48.775355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:55:48.775376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:55:48.775386Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:55:48.776028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T13:55:48.785569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:55:48.785677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-07T13:55:48.795283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:55:48.853882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:55:48.853913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:55:48.854004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2025-04-07T13:55:48.861733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:55:48.865654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034148913, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:48.865705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744034148913 2025-04-07T13:55:48.865843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2025-04-07T13:55:48.871628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:48.871979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:48.872043Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2025-04-07T13:55:48.873641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T13:55:48.873688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T13:55:48.873702Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72 ... n/3?node_id=1&id=NmEwNzEzMWEtZTQ2MGUwMWYtMWNkMzJjMWYtMmQxNGZiZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.170418Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720350. Ctx: { TraceId: 01jr88k0aa9zeq651bn22g11yz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzllMjdiMDgtYmY1NmI0Y2EtODFkNDkwMTctZDZmYTJhODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.170820Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720351. Ctx: { TraceId: 01jr88k0ab78n4rk46hh0pwc0b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRjN2MyZjQtYmQ0YTZiOC1jN2RiMTE3Ni00MzM3ZGFiZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.171284Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720352. Ctx: { TraceId: 01jr88k0aa6bes4gz8sg9c96kb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYxYzgxNDUtODRjMmU3ODEtZmIzNDIwYjgtNmQ0YjdiMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.172507Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720353. Ctx: { TraceId: 01jr88k0ab61zcwwq6tpy69avv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzRkZjkxZTQtNmUxN2UwNmItNGJjYjhjZDYtY2UyODUwZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.185000Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720355. Ctx: { TraceId: 01jr88k0ah6dchtz7zhj43t5sz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzJkZDY2NzQtMjQ4OTVmMjgtY2IyYWEwYjgtY2ExNGU5Yjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.186379Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720354. Ctx: { TraceId: 01jr88k0ah88ts53dpbn7j6cay, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2FlNjg3YzYtMjljNTk3MjktNDg0Yjc1MzctNDRkZjMwMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.192457Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720356. Ctx: { TraceId: 01jr88k0az5qcd9c3384yt5vnf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ZkYTU4M2MtN2I1YzU0MGEtOGY2NTFkNDQtODVhNTBlMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.193084Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720357. Ctx: { TraceId: 01jr88k0b17pgp01b9zsg0sc73, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGIyNTM3MzAtYTU5NGIwNC1kY2ViY2NmNi1lYjUxMWFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.198417Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720358. Ctx: { TraceId: 01jr88k0b51d42emcs991nvw7x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmEwNzEzMWEtZTQ2MGUwMWYtMWNkMzJjMWYtMmQxNGZiZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.204461Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720359. Ctx: { TraceId: 01jr88k0b81jcjv8m99rrh6yk6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTUxMjM4NTEtY2Y5YjEwNDUtOWIzNWUyYTQtZmY1MTRiYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.208409Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720360. Ctx: { TraceId: 01jr88k0bb1bjjt7qfcvhfwv0m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzRkZjkxZTQtNmUxN2UwNmItNGJjYjhjZDYtY2UyODUwZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.212086Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720361. Ctx: { TraceId: 01jr88k0be618bpq8rc7c2v998, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzllMjdiMDgtYmY1NmI0Y2EtODFkNDkwMTctZDZmYTJhODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.212642Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720362. Ctx: { TraceId: 01jr88k0becgv1j5qj08mgy0pq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYxYzgxNDUtODRjMmU3ODEtZmIzNDIwYjgtNmQ0YjdiMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.217112Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720366. Ctx: { TraceId: 01jr88k0bn70kh25nzspk9de1q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2FlNjg3YzYtMjljNTk3MjktNDg0Yjc1MzctNDRkZjMwMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.217619Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720363. Ctx: { TraceId: 01jr88k0bm1sm6m44tdfpq6d3k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzJkZDY2NzQtMjQ4OTVmMjgtY2IyYWEwYjgtY2ExNGU5Yjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.218024Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720364. Ctx: { TraceId: 01jr88k0bn8dq2c3e2k6e61w5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ZkYTU4M2MtN2I1YzU0MGEtOGY2NTFkNDQtODVhNTBlMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.218406Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720365. Ctx: { TraceId: 01jr88k0bn8t8cdyz1znfmj8qf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRjN2MyZjQtYmQ0YTZiOC1jN2RiMTE3Ni00MzM3ZGFiZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.219796Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720367. Ctx: { TraceId: 01jr88k0bp3r5hyfgn2je29bkm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGIyNTM3MzAtYTU5NGIwNC1kY2ViY2NmNi1lYjUxMWFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.227348Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720368. Ctx: { TraceId: 01jr88k0bz8qhpr4v3r0dv07v0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmEwNzEzMWEtZTQ2MGUwMWYtMWNkMzJjMWYtMmQxNGZiZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.228361Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720369. Ctx: { TraceId: 01jr88k0bz4gh5kmnr7ks2fj2b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTUxMjM4NTEtY2Y5YjEwNDUtOWIzNWUyYTQtZmY1MTRiYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-04-07T13:56:09.233761Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720370. Ctx: { TraceId: 01jr88k0c41yayqd19tfz4gxc1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzRkZjkxZTQtNmUxN2UwNmItNGJjYjhjZDYtY2UyODUwZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.235545Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720372. Ctx: { TraceId: 01jr88k0ce23540mn5g2nxyfrd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzllMjdiMDgtYmY1NmI0Y2EtODFkNDkwMTctZDZmYTJhODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.235902Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720374. Ctx: { TraceId: 01jr88k0ce8q6r47kyjg4xyv3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzJkZDY2NzQtMjQ4OTVmMjgtY2IyYWEwYjgtY2ExNGU5Yjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.236136Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720373. Ctx: { TraceId: 01jr88k0cea82v59dt9thke2r9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2FlNjg3YzYtMjljNTk3MjktNDg0Yjc1MzctNDRkZjMwMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.236552Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720371. Ctx: { TraceId: 01jr88k0ce010278ca29236dek, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ZkYTU4M2MtN2I1YzU0MGEtOGY2NTFkNDQtODVhNTBlMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.238098Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720375. Ctx: { TraceId: 01jr88k0ch4h1x676b5c1ganvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGIyNTM3MzAtYTU5NGIwNC1kY2ViY2NmNi1lYjUxMWFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: 2025-04-07T13:56:09.238412Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720376. Ctx: { TraceId: 01jr88k0chfxzfm90edp03ypyz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYxYzgxNDUtODRjMmU3ODEtZmIzNDIwYjgtNmQ0YjdiMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.238579Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720377. Ctx: { TraceId: 01jr88k0cg096atmh1eng8ndz4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRjN2MyZjQtYmQ0YTZiOC1jN2RiMTE3Ni00MzM3ZGFiZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744034148913 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2025-04-07T13:56:09.243157Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720378. Ctx: { TraceId: 01jr88k0cs2wtj0nhzvrgh1y5p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmEwNzEzMWEtZTQ2MGUwMWYtMWNkMzJjMWYtMmQxNGZiZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744034148913 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) Table has 2 shards ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2025-04-07T13:55:45.343756Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569616814992184:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:45.343850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a34/r3tmp/tmp3hfGSf/pdisk_1.dat 2025-04-07T13:55:45.770178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:45.770320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:45.773258Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:45.776093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3200, node 1 2025-04-07T13:55:45.831395Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:55:45.831420Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:55:45.848373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:45.858319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:45.859428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:45.860910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:55:45.864104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T13:55:45.917453Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:45.917477Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:45.917490Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:45.917571Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9168 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:46.191935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:46.200353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:46.204780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:46.207967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:55:46.208186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:55:46.208208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:55:46.209707Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:55:46.210212Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:55:46.210235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:55:46.211626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:46.214912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034146260, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:46.214959Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:55:46.215185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:55:46.216655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:46.216835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:46.216874Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:55:46.216968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:55:46.217005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:55:46.217043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:55:46.219214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:55:46.219260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:55:46.219284Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:55:46.219392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:9168 2025-04-07T13:55:48.497361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569629699895088:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:48.515256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:48.736151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:48.736639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T13:55:48.737247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:48.737275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:48.746026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2025-04-07T13:55:48.746275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:48.746533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:48.746609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:55:48.749125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:55:48.749180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:55:48.749199Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:55:48.749425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:55:48.749435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:55:48.749441Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:55:48.749695Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:55:48.762111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:55:48.762209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T13:55:48.764365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:55:48.836392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:55:48.836439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:55:48.836511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T13:55:48.838463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:55:48.841408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034148885, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:48.841453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744034148885 2025-04-07T13:55:48.841566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 28147497 ... set, use /Root 2025-04-07T13:56:09.195405Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714311. Ctx: { TraceId: 01jr88k0ar14fwa2wachbqr654, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFlOWUwNmUtY2QyMDdjOGItZTI0Nzg2Zi1lNTU0ODg5MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.201612Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714315. Ctx: { TraceId: 01jr88k0b98rcynqvtyd9frbcn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDJlOTc4ODctYzQxNjg3YzYtOGE2MzU4Y2MtMTExMTVhZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.203448Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714316. Ctx: { TraceId: 01jr88k0b984ht967rcd0kepat, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ0MzFmNmMtNGI1OGFkMWYtMTliNjVmMjMtNGEyN2Y3N2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.204482Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714317. Ctx: { TraceId: 01jr88k0b9dqknn33wtq850996, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGE3ODAyMjItNWY4OGRhZjEtNDlhNGJhZDEtMjEzNWI1NjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.205576Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714318. Ctx: { TraceId: 01jr88k0bd7px24vp5bhqpvzws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhlNzZlN2UtNmY2ZTM3YTctMjQ5NWNhZTYtOGY5MzcyMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.206744Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714319. Ctx: { TraceId: 01jr88k0bj5hb499ja2zr155aj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2YzOWI1OWMtOTI0Njk0MTctZTc3ZGFhOGYtZTAyMjBhMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.211713Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714320. Ctx: { TraceId: 01jr88k0bn63c1f0h0m9drztja, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY4MmQyYzUtZDQ1N2JiYzMtMjc4Mjk5YWUtMWE2ZjBjMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.215151Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714321. Ctx: { TraceId: 01jr88k0bwdtpxaprg71grkq14, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ5MzI3M2ItZmYzZDAwMmUtMjBiODE1M2QtNmVmYTFmMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.215519Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714322. Ctx: { TraceId: 01jr88k0bw60pxq6d057r71h9v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFlOWUwNmUtY2QyMDdjOGItZTI0Nzg2Zi1lNTU0ODg5MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.215719Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714323. Ctx: { TraceId: 01jr88k0bwc7czxch6hkcyvbjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmEyYWU4Yy0xZmZhMjg4NC1iNWViN2ZiNC1kZDE3YmZmMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.215972Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714324. Ctx: { TraceId: 01jr88k0bw66ck65ghhss9zy2w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE0NjgwMTctYWFjZGM2YjctYzYwNDEyOGEtYzU2YTE1NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.221036Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714325. Ctx: { TraceId: 01jr88k0c2aynz2gfxsp3z10es, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDJlOTc4ODctYzQxNjg3YzYtOGE2MzU4Y2MtMTExMTVhZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.221542Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714326. Ctx: { TraceId: 01jr88k0c2etm4d3cv4h3z1230, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ0MzFmNmMtNGI1OGFkMWYtMTliNjVmMjMtNGEyN2Y3N2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.222511Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714327. Ctx: { TraceId: 01jr88k0c497a1em1hahkx3frq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGE3ODAyMjItNWY4OGRhZjEtNDlhNGJhZDEtMjEzNWI1NjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.224853Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714328. Ctx: { TraceId: 01jr88k0c5edndspbv8t8f22nq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhlNzZlN2UtNmY2ZTM3YTctMjQ5NWNhZTYtOGY5MzcyMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.226449Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714329. Ctx: { TraceId: 01jr88k0c442e01mhqt6cencqe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2YzOWI1OWMtOTI0Njk0MTctZTc3ZGFhOGYtZTAyMjBhMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.232683Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714330. Ctx: { TraceId: 01jr88k0c9ab44az78xj4jkcbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY4MmQyYzUtZDQ1N2JiYzMtMjc4Mjk5YWUtMWE2ZjBjMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.249868Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714331. Ctx: { TraceId: 01jr88k0cj4ackxhw5ajeqr197, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFlOWUwNmUtY2QyMDdjOGItZTI0Nzg2Zi1lNTU0ODg5MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-04-07T13:56:09.255869Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714332. Ctx: { TraceId: 01jr88k0ck86yd2jw9bwbpr5py, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ0MzFmNmMtNGI1OGFkMWYtMTliNjVmMjMtNGEyN2Y3N2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: 2025-04-07T13:56:09.260365Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714333. Ctx: { TraceId: 01jr88k0cmaxe78a8ya9bwzyps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE0NjgwMTctYWFjZGM2YjctYzYwNDEyOGEtYzU2YTE1NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034148885 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2025-04-07T13:56:09.293754Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714336. Ctx: { TraceId: 01jr88k0d77a48vhczg37m6w53, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhlNzZlN2UtNmY2ZTM3YTctMjQ5NWNhZTYtOGY5MzcyMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.297693Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714334. Ctx: { TraceId: 01jr88k0cweb4qt9djp5nm6bga, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGE3ODAyMjItNWY4OGRhZjEtNDlhNGJhZDEtMjEzNWI1NjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.298158Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714338. Ctx: { TraceId: 01jr88k0db0fp44kzpsch0sjp2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2YzOWI1OWMtOTI0Njk0MTctZTc3ZGFhOGYtZTAyMjBhMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.298481Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714335. Ctx: { TraceId: 01jr88k0cw4jbrbdgx5rjytphj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDJlOTc4ODctYzQxNjg3YzYtOGE2MzU4Y2MtMTExMTVhZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.298786Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714337. Ctx: { TraceId: 01jr88k0d8e12kjvaph4sjgc09, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmEyYWU4Yy0xZmZhMjg4NC1iNWViN2ZiNC1kZDE3YmZmMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:56:09.300133Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714339. Ctx: { TraceId: 01jr88k0cw1dv2tgrtvep7chnd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ5MzI3M2ItZmYzZDAwMmUtMjBiODE1M2QtNmVmYTFmMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034148885 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2025-04-07T13:56:10.281482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 31400 rowCount 387 cpuUsage 0 2025-04-07T13:56:10.286158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 16488 rowCount 203 cpuUsage 0 Table has 2 shards 2025-04-07T13:56:10.381651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-04-07T13:56:10.381841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 387, DataSize 31400 2025-04-07T13:56:10.381976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 203, DataSize 16488 2025-04-07T13:56:10.382269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 >> KqpJoin::JoinWithDuplicates >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut |87.7%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] Test command err: Trying to start YDB, gRPC: 22714, MsgBus: 22045 2025-04-07T13:55:56.319440Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569666353834760:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:56.319745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e51/r3tmp/tmpDZWPqW/pdisk_1.dat 2025-04-07T13:55:56.873114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:56.873207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:56.886081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:56.992981Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22714, node 1 2025-04-07T13:55:57.199742Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:57.199761Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:57.199766Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:57.199857Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22045 TClient is connected to server localhost:22045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:58.245936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:58.305637Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:01.320874Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569666353834760:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:01.320956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:02.633487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569692123639090:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:02.648595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:02.984495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:56:03.100915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569696418606507:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.100974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.101137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569696418606512:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.104804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-07T13:56:03.114124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569696418606514:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:56:03.792031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:04.218300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-07T13:56:04.821245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:05.401253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:56:05.982544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:56:06.523589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:56:06.577260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:56:08.875445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710703:0, at schemeshard: 72057594046644480 2025-04-07T13:56:08.903244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-04-07T13:56:08.904596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-07T13:56:08.906257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-04-07T13:56:11.305655Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034171285, txId: 281474976710734] shutting down >> KqpFlipJoin::RightSemi_1 >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2025-04-07T13:55:15.815354Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569487881245989:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:15.815384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001783/r3tmp/tmp7Oa40r/pdisk_1.dat 2025-04-07T13:55:16.521785Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:16.532898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:16.533003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:16.536992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30062, node 1 2025-04-07T13:55:16.689396Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:16.689427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:16.689435Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:16.689568Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4473 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:17.061473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:17.087970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:17.089598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:17.091886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:55:17.092039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:55:17.092054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:55:17.092604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:55:17.092642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:55:17.093054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:17.094403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034117140, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:17.094421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:55:17.094638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:55:17.095156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:17.095287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:17.095341Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:55:17.095402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:55:17.095430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:55:17.095464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-04-07T13:55:17.097982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:55:17.098030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:55:17.098044Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:55:17.098123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 waiting... 2025-04-07T13:55:17.103195Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:4473 2025-04-07T13:55:17.410759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:17.410980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:17.411006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:17.411613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T13:55:17.411796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:17.413247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034117462, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:17.413278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1744034117462, at schemeshard: 72057594046644480 2025-04-07T13:55:17.413509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T13:55:17.413605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T13:55:17.413634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T13:55:17.414572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:17.414721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:17.415940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:55:17.415988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:55:17.416006Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:55:17.416067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 waiting... 2025-04-07T13:55:17.421480Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:55:17.430093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:55:17.430357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:17.430374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:55:17.430444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T13:55:17.430524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:55:17.430535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2025-04-07T13:55:17.430961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2025-04-07T13:55:17.431077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:17.431287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:17.432283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:55:17.432330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:55:17.432343Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T13:55:17.432396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2025-04-07T13:55:17.437488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:55:17.437703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:17.438314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIR ... f80834dd] Not retrying GRPC response. Code: 400, Error: AWS.SimpleQueueService.EmptyBatchRequest 2025-04-07T13:56:11.134948Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [b0f9f903-faf197f8-4fa65d7d-f80834dd] reply with status: STATUS_UNDEFINED message: The batch request doesn't contain any entries. 2025-04-07T13:56:11.135080Z node 7 :HTTP DEBUG: (#47,[::1]:58534) <- (400 AWS.SimpleQueueService.EmptyBatchRequest) 2025-04-07T13:56:11.135126Z node 7 :HTTP DEBUG: (#47,[::1]:58534) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ChangeMessageVisibilityBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 84 { "QueueUrl":"http://ghrun-lqtfcktism.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries": [ ] } 0 2025-04-07T13:56:11.135163Z node 7 :HTTP DEBUG: (#47,[::1]:58534) Response: HTTP/1.1 400 AWS.SimpleQueueService.EmptyBatchRequest Connection: close x-amzn-requestid: b0f9f903-faf197f8-4fa65d7d-f80834dd x-amz-crc32: 2331440613 Content-Type: application/x-amz-json-1.1 Content-Length: 112 {"__type":"AWS.SimpleQueueService.EmptyBatchRequest","message":"The batch request doesn't contain any entries."} 2025-04-07T13:56:11.135266Z node 7 :HTTP DEBUG: (#47,[::1]:58534) connection closed 2025-04-07T13:56:11.137215Z node 7 :HTTP DEBUG: (#44,[::1]:58544) incoming connection opened 2025-04-07T13:56:11.137286Z node 7 :HTTP DEBUG: (#44,[::1]:58544) -> (POST /Root) 2025-04-07T13:56:11.137425Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d894:7f00:6050:0:c094:7f00:6050:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: 756dd281-dce01eb6-48cb04c8-54461113 2025-04-07T13:56:11.137943Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [756dd281-dce01eb6-48cb04c8-54461113] got new request from [d894:7f00:6050:0:c094:7f00:6050:0] 2025-04-07T13:56:11.138266Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [756dd281-dce01eb6-48cb04c8-54461113] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T13:56:11.138281Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [756dd281-dce01eb6-48cb04c8-54461113] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:11.176790Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [756dd281-dce01eb6-48cb04c8-54461113] Got succesfult GRPC response. 2025-04-07T13:56:11.177019Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [756dd281-dce01eb6-48cb04c8-54461113] reply ok 2025-04-07T13:56:11.177276Z node 7 :HTTP DEBUG: (#44,[::1]:58544) <- (200 ) 2025-04-07T13:56:11.177357Z node 7 :HTTP DEBUG: (#44,[::1]:58544) connection closed Http output full {"Failed":[{"Message":"The specified message isn't in flight.","Id":"Id-0","Code":"AWS.SimpleQueueService.MessageNotInflight","SenderFault":true}]} 2025-04-07T13:56:11.181304Z node 7 :HTTP DEBUG: (#44,[::1]:58558) incoming connection opened 2025-04-07T13:56:11.181403Z node 7 :HTTP DEBUG: (#44,[::1]:58558) -> (POST /Root) 2025-04-07T13:56:11.181529Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d84d:b100:6050:0:c04d:b100:6050:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: db769b9c-106ef3e7-d21e1261-62a3b917 2025-04-07T13:56:11.182178Z node 7 :HTTP_PROXY WARN: http request [ChangeMessageVisibilityBatch] requestId [db769b9c-106ef3e7-d21e1261-62a3b917] got new request with incorrect json from [d84d:b100:6050:0:c04d:b100:6050:0] 2025-04-07T13:56:11.182206Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [db769b9c-106ef3e7-d21e1261-62a3b917] reply with status: BAD_REQUEST message: [json.exception.type_error.302] type must be string, but is number 2025-04-07T13:56:11.182368Z node 7 :HTTP DEBUG: (#44,[::1]:58558) <- (400 InvalidArgumentException) 2025-04-07T13:56:11.182420Z node 7 :HTTP DEBUG: (#44,[::1]:58558) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ChangeMessageVisibilityBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked c3 { "QueueUrl":"http://ghrun-lqtfcktism.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries": [ { "Id":"Id-0", "ReceiptHandle":0 } ] } 0 2025-04-07T13:56:11.182461Z node 7 :HTTP DEBUG: (#44,[::1]:58558) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: db769b9c-106ef3e7-d21e1261-62a3b917 x-amz-crc32: 3176743181 Content-Type: application/x-amz-json-1.1 Content-Length: 116 {"__type":"InvalidArgumentException","message":"[json.exception.type_error.302] type must be string, but is number"} 2025-04-07T13:56:11.182539Z node 7 :HTTP DEBUG: (#44,[::1]:58558) connection closed Http output full {"__type":"InvalidArgumentException","message":"[json.exception.type_error.302] type must be string, but is number"} 2025-04-07T13:56:11.186133Z node 7 :HTTP DEBUG: (#44,[::1]:58570) incoming connection opened 2025-04-07T13:56:11.186206Z node 7 :HTTP DEBUG: (#44,[::1]:58570) -> (POST /Root) 2025-04-07T13:56:11.186324Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [18b4:7100:6050:0:b4:7100:6050:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: ba5315e4-8e476121-2f2027c6-2c136983 2025-04-07T13:56:11.186766Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [ba5315e4-8e476121-2f2027c6-2c136983] got new request from [18b4:7100:6050:0:b4:7100:6050:0] 2025-04-07T13:56:11.187122Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [ba5315e4-8e476121-2f2027c6-2c136983] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T13:56:11.187138Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [ba5315e4-8e476121-2f2027c6-2c136983] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:11.200765Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [ba5315e4-8e476121-2f2027c6-2c136983] Got succesfult GRPC response. 2025-04-07T13:56:11.200923Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [ba5315e4-8e476121-2f2027c6-2c136983] reply ok 2025-04-07T13:56:11.201107Z node 7 :HTTP DEBUG: (#44,[::1]:58570) <- (200 ) 2025-04-07T13:56:11.201188Z node 7 :HTTP DEBUG: (#44,[::1]:58570) connection closed Http output full {"Failed":[{"Message":"VisibilityTimeout was not provided.","Id":"Id-0","Code":"MissingParameter","SenderFault":true}]} 2025-04-07T13:56:11.202743Z node 7 :HTTP DEBUG: (#47,[::1]:58586) incoming connection opened 2025-04-07T13:56:11.202804Z node 7 :HTTP DEBUG: (#47,[::1]:58586) -> (POST /Root) 2025-04-07T13:56:11.202959Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [58aa:f200:6050:0:40aa:f200:6050:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: f7b66721-b55db574-4e7291a1-ed2560f6 2025-04-07T13:56:11.203427Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [f7b66721-b55db574-4e7291a1-ed2560f6] got new request from [58aa:f200:6050:0:40aa:f200:6050:0] 2025-04-07T13:56:11.203798Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [f7b66721-b55db574-4e7291a1-ed2560f6] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T13:56:11.203818Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [f7b66721-b55db574-4e7291a1-ed2560f6] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:11.204835Z node 7 :SQS WARN: Request [f7b66721-b55db574-4e7291a1-ed2560f6] Failed to process receipt handle : (yexception) ydb/core/ymq/base/helpers.cpp:147: Condition violated: `!decoded.empty()' 2025-04-07T13:56:11.205330Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [f7b66721-b55db574-4e7291a1-ed2560f6] Got succesfult GRPC response. 2025-04-07T13:56:11.205440Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [f7b66721-b55db574-4e7291a1-ed2560f6] reply ok 2025-04-07T13:56:11.205570Z node 7 :HTTP DEBUG: (#47,[::1]:58586) <- (200 ) 2025-04-07T13:56:11.205655Z node 7 :HTTP DEBUG: (#47,[::1]:58586) connection closed Http output full {"Failed":[{"Message":"The specified receipt handle isn't valid.","Id":"Id-0","Code":"ReceiptHandleIsInvalid","SenderFault":true}]} 2025-04-07T13:56:11.207112Z node 7 :HTTP DEBUG: (#44,[::1]:58602) incoming connection opened 2025-04-07T13:56:11.207176Z node 7 :HTTP DEBUG: (#44,[::1]:58602) -> (POST /Root) 2025-04-07T13:56:11.207276Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [78af:f100:6050:0:60af:f100:6050:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: fda62a7c-d0e09e2a-6b554387-d8079806 2025-04-07T13:56:11.207713Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [fda62a7c-d0e09e2a-6b554387-d8079806] got new request from [78af:f100:6050:0:60af:f100:6050:0] 2025-04-07T13:56:11.208092Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [fda62a7c-d0e09e2a-6b554387-d8079806] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T13:56:11.208111Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [fda62a7c-d0e09e2a-6b554387-d8079806] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:11.218465Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [fda62a7c-d0e09e2a-6b554387-d8079806] Got succesfult GRPC response. 2025-04-07T13:56:11.218556Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [fda62a7c-d0e09e2a-6b554387-d8079806] reply ok 2025-04-07T13:56:11.218697Z node 7 :HTTP DEBUG: (#44,[::1]:58602) <- (200 ) 2025-04-07T13:56:11.218781Z node 7 :HTTP DEBUG: (#44,[::1]:58602) connection closed Http output full {"Successful":[{"Id":"Id-0"}]} 2025-04-07T13:56:11.220210Z node 7 :HTTP DEBUG: (#44,[::1]:58616) incoming connection opened 2025-04-07T13:56:11.220273Z node 7 :HTTP DEBUG: (#44,[::1]:58616) -> (POST /Root) 2025-04-07T13:56:11.220547Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [78a8:7100:6050:0:60a8:7100:6050:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: cdadcfe6-450e3edf-938e7c74-35727a28 2025-04-07T13:56:11.221099Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [cdadcfe6-450e3edf-938e7c74-35727a28] got new request from [78a8:7100:6050:0:60a8:7100:6050:0] 2025-04-07T13:56:11.221488Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [cdadcfe6-450e3edf-938e7c74-35727a28] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T13:56:11.221505Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [cdadcfe6-450e3edf-938e7c74-35727a28] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:11.243971Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [cdadcfe6-450e3edf-938e7c74-35727a28] Got succesfult GRPC response. 2025-04-07T13:56:11.244063Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [cdadcfe6-450e3edf-938e7c74-35727a28] reply ok 2025-04-07T13:56:11.244193Z node 7 :HTTP DEBUG: (#44,[::1]:58616) <- (200 ) 2025-04-07T13:56:11.244264Z node 7 :HTTP DEBUG: (#44,[::1]:58616) connection closed Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} >> Cdc::ResolvedTimestampsVolatileOutOfOrder [GOOD] >> Cdc::SequentialSplitMerge >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup >> KqpFlipJoin::Inner_1 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> BsControllerConfig::SelectAllGroups >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] >> KqpJoin::IdxLookupPartialWithTempTable [GOOD] >> KqpJoin::FullOuterJoinSizeCheck >> DstCreator::ExistingDst >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] >> Cdc::InitialScanUpdatedRows [GOOD] >> Cdc::InitialScanAndLimits >> DstCreator::SameOwner ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] Test command err: 2025-04-07T13:54:39.836379Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:39.937549Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:39.958217Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:39.958303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:39.958527Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:39.966486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:39.966696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:39.966916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:39.967028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:39.967143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:39.967261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:39.967383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:39.967505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:39.967609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:39.967715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:39.967815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:39.967930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:39.990968Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:39.991033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:54:39.993337Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:39.993487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:39.993531Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:39.993731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:39.993883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:39.993947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:39.993991Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:39.994072Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:39.994145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:39.994184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:39.994210Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:39.994400Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:39.994455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:39.994488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:39.994513Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:39.994614Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:39.994653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:39.994677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:39.994694Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:39.994738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:39.994772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:39.994793Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:39.994841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:39.994871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:39.994888Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:39.995022Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=37; 2025-04-07T13:54:39.995081Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=27; 2025-04-07T13:54:39.995153Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-07T13:54:39.995211Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=26; 2025-04-07T13:54:39.995352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:39.995399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:39.995422Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:39.995557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:39.995585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:39.995604Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:39.995720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:39.995750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:39.995779Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:39.995926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:39.995953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:39.995969Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execu ... age_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=16826; 2025-04-07T13:56:13.631618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=35922; 2025-04-07T13:56:13.631940Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=220; 2025-04-07T13:56:13.633407Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=204; 2025-04-07T13:56:13.633482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1490; 2025-04-07T13:56:13.633647Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=110; 2025-04-07T13:56:13.633778Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:56:13.633835Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=140; 2025-04-07T13:56:13.634011Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=133; 2025-04-07T13:56:13.634197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=141; 2025-04-07T13:56:13.634694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=444; 2025-04-07T13:56:13.635549Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=802; 2025-04-07T13:56:13.635654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=39; 2025-04-07T13:56:13.635726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=28; 2025-04-07T13:56:13.635780Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2025-04-07T13:56:13.635830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-04-07T13:56:13.635873Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-07T13:56:13.635952Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=43; 2025-04-07T13:56:13.635994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-07T13:56:13.636080Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=52; 2025-04-07T13:56:13.636131Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-07T13:56:13.636208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=39; 2025-04-07T13:56:13.636256Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=48643; 2025-04-07T13:56:13.636448Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=25;blobs=50;rows=708348;bytes=40210220;raw_bytes=73294644; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=22;blobs=44;rows=1136652;bytes=64354976;raw_bytes=117832608; inactive portions=44;blobs=88;rows=1246652;bytes=70721288;raw_bytes=129233400; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:56:13.636576Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1676:3652];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:56:13.636649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1676:3652];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:56:13.636718Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:56:13.636791Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:56:13.636842Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:56:13.636927Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:13.637178Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:56:13.637359Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:56:13.637412Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:56:13.637465Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:13.637508Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:13.637557Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:56:13.637624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:56:13.637695Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:56:13.638710Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:13.638824Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1709:3678];tablet_id=9437184;parent=[1:1676:3652];fline=manager.h:99;event=ask_data;request=request_id=234;1={portions_count=91};; 2025-04-07T13:56:13.641106Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:56:13.641316Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:56:13.641352Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:56:13.641385Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:56:13.641438Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:56:13.641535Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:13.641800Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:56:13.641883Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:56:13.641940Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:56:13.641996Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:13.642047Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:13.642102Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:56:13.642219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:56:13.642302Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:56:13.643116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=91;path_id=1; 2025-04-07T13:56:13.646251Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=91;path_id=1; 2025-04-07T13:56:13.654413Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:56:13.654495Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] Test command err: 2025-04-07T13:54:39.786207Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:39.887447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:39.909036Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:39.909131Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:39.909364Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:39.921303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:39.921537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:39.921786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:39.921903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:39.922028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:39.922162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:39.922275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:39.922414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:39.922534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:39.922672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:39.922800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:39.922900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:39.940406Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:39.940475Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:54:39.942385Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:39.942494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:39.942549Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:39.942702Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:39.942812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:39.942862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:39.942890Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:39.942951Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:39.943014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:39.943051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:39.943073Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:39.943193Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:39.943238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:39.943263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:39.943282Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:39.943386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:39.943423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:39.943453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:39.943471Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:39.943534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:39.943573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:39.943611Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:39.943661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:39.943696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:39.943723Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:39.943893Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-04-07T13:54:39.943976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=27; 2025-04-07T13:54:39.944049Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-04-07T13:54:39.944129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-07T13:54:39.944235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:39.944270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:39.944292Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:39.944454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:39.944500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:39.944530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:39.944637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:39.944677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:39.944699Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:39.944826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:39.944853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:39.944882Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execu ... age_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=18382; 2025-04-07T13:56:14.480602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=37543; 2025-04-07T13:56:14.480955Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=243; 2025-04-07T13:56:14.482465Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=218; 2025-04-07T13:56:14.482564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1550; 2025-04-07T13:56:14.482729Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=105; 2025-04-07T13:56:14.482862Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:56:14.482921Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=149; 2025-04-07T13:56:14.483147Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=175; 2025-04-07T13:56:14.483352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=141; 2025-04-07T13:56:14.483956Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=554; 2025-04-07T13:56:14.485042Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1022; 2025-04-07T13:56:14.485141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=38; 2025-04-07T13:56:14.485263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=33; 2025-04-07T13:56:14.485312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-07T13:56:14.485361Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-04-07T13:56:14.485403Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-04-07T13:56:14.485484Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2025-04-07T13:56:14.485532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-07T13:56:14.485680Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=108; 2025-04-07T13:56:14.485737Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-07T13:56:14.485808Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2025-04-07T13:56:14.485852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=51620; 2025-04-07T13:56:14.486070Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=25;blobs=50;rows=708348;bytes=40210220;raw_bytes=73294644; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=22;blobs=44;rows=1136652;bytes=64354976;raw_bytes=117832608; inactive portions=44;blobs=88;rows=1246652;bytes=70721288;raw_bytes=129233400; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:56:14.486188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1672:3648];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:56:14.486258Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1672:3648];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:56:14.486316Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1672:3648];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:56:14.486398Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:56:14.486464Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:56:14.486543Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:14.486841Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:56:14.486937Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:56:14.487013Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:56:14.487076Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:14.487127Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:14.487183Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:56:14.487262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:56:14.487331Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:56:14.488240Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:14.488492Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1705:3674];tablet_id=9437184;parent=[1:1672:3648];fline=manager.h:99;event=ask_data;request=request_id=234;1={portions_count=91};; 2025-04-07T13:56:14.490403Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1672:3648];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:56:14.490725Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:56:14.490766Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:56:14.490800Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:56:14.490858Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:56:14.490945Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:14.491202Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:56:14.491278Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:56:14.491592Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:56:14.491668Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:14.491721Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:14.491808Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:56:14.491896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:56:14.491963Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:56:14.492773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=91;path_id=1; 2025-04-07T13:56:14.496139Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=91;path_id=1; 2025-04-07T13:56:14.505044Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:56:14.505142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1672:3648];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; >> DstCreator::KeyColumnsSizeMismatch >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> BsControllerConfig::SelectAllGroups [GOOD] >> KqpJoin::JoinLeftPureFull [GOOD] >> KqpJoin::JoinLeftPureExclusion >> IndexBuildTest::RejectsCancel [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] Test command err: 2025-04-07T13:54:35.295111Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:35.444936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:35.466332Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:35.466382Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:35.466576Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:35.474579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:35.474772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:35.474957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:35.475056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:35.475162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:35.475294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:35.475368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:35.475446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:35.475538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:35.475642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:35.475741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:35.475835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:35.508696Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:35.508760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:54:35.511099Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:35.511253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:35.511319Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:35.511511Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:35.511653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:35.511726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:35.511768Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:35.511852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:35.511908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:35.511953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:35.511979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:35.512123Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:35.512174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:35.512211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:35.512247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:35.512349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:35.512396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:35.512438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:35.512462Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:35.512522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:35.512565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:35.512591Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:35.512631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:35.512699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:35.512736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:35.512901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-07T13:54:35.512990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-04-07T13:54:35.513088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-07T13:54:35.513163Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-07T13:54:35.513313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:35.513377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:35.513414Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:35.513607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:35.513646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:35.513673Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:35.513794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:35.513830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:35.513855Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:35.514048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:35.514089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:35.514115Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execu ... stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=13461; 2025-04-07T13:56:14.759371Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=30800; 2025-04-07T13:56:14.759624Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=192; 2025-04-07T13:56:14.760987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=194; 2025-04-07T13:56:14.761066Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1396; 2025-04-07T13:56:14.761200Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=84; 2025-04-07T13:56:14.761318Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:56:14.761366Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=128; 2025-04-07T13:56:14.761557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=146; 2025-04-07T13:56:14.761748Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=137; 2025-04-07T13:56:14.762267Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=465; 2025-04-07T13:56:14.763099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=775; 2025-04-07T13:56:14.763213Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=57; 2025-04-07T13:56:14.763277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=26; 2025-04-07T13:56:14.763320Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-07T13:56:14.763369Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-04-07T13:56:14.763413Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-07T13:56:14.763490Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2025-04-07T13:56:14.763535Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-07T13:56:14.763647Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=66; 2025-04-07T13:56:14.763700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-07T13:56:14.763779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=41; 2025-04-07T13:56:14.763836Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=55927; 2025-04-07T13:56:14.764025Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=25;blobs=50;rows=708348;bytes=40196124;raw_bytes=67627660; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=22;blobs=44;rows=1136652;bytes=64332088;raw_bytes=108739216; inactive portions=44;blobs=88;rows=1246652;bytes=70696640;raw_bytes=119259832; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:56:14.764140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1678:3654];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:56:14.764206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1678:3654];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:56:14.764254Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1678:3654];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:56:14.764330Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:56:14.764380Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:56:14.764474Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:14.764721Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:56:14.764794Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:56:14.764839Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:56:14.764894Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:14.764950Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:14.764997Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:56:14.765063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:56:14.765113Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:56:14.766021Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:14.766125Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1711:3680];tablet_id=9437184;parent=[1:1678:3654];fline=manager.h:99;event=ask_data;request=request_id=234;1={portions_count=91};; 2025-04-07T13:56:14.768028Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1678:3654];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:56:14.768257Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:56:14.768298Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:56:14.768324Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:56:14.768379Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:56:14.768445Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:14.768684Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:56:14.768774Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:56:14.768836Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:56:14.768892Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:14.768980Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:14.769032Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:56:14.769103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:56:14.769156Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:56:14.769931Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=91;path_id=1; 2025-04-07T13:56:14.772845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=91;path_id=1; 2025-04-07T13:56:14.780873Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:56:14.780973Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1678:3654];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; >> VectorIndexBuildTest::BaseCase [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SelectAllGroups [GOOD] Test command err: 2025-04-07T13:56:15.293034Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:15.297982Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:15.299965Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:15.300506Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:15.301370Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:15.301421Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:15.301732Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:56:15.311137Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:56:15.311292Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:56:15.311446Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:56:15.311548Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:15.311629Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:15.311687Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:56:15.524117Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.170016s 2025-04-07T13:56:15.524262Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.170192s >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsCancel [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:55:47.584998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:55:47.585104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:47.585144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:55:47.585177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:55:47.585221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:55:47.585250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:55:47.585304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:47.585648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:55:47.657232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:55:47.657305Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:47.669232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:55:47.669381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:55:47.669565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:55:47.675819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:55:47.676037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:55:47.676747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:47.677005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:55:47.679189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:47.680609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:47.680695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:47.680879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:55:47.680930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:47.680971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:55:47.681154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.688499Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:55:47.843257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:55:47.843487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.843729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:55:47.843983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:55:47.844052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.846307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:47.846450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:55:47.846626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.846685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:55:47.846764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:55:47.846821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:55:47.848770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.848845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:47.848929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:55:47.850707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.850755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.850796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:47.850845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:55:47.854773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:55:47.856688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:55:47.856884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:55:47.857932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:47.858063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:55:47.858118Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:47.858364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:55:47.858416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:47.858596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:47.858684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:55:47.860596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:47.860642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:47.860850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:47.860891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:55:47.861168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:47.861216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:55:47.861319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:55:47.861358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:47.861408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:55:47.861450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:47.861492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:55:47.861533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:55:47.861607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:55:47.861672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:55:47.861702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:55:47.863578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:47.863681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:47.863778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:55:47.863821Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:55:47.863877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:47.863988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify ... EBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1140:3004], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T13:56:17.203546Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-07T13:56:17.205145Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T13:56:17.205224Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1140:3004], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T13:56:17.205257Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-07T13:56:17.205400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:56:17.205444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1232:3086] TestWaitNotification: OK eventTxId 102 2025-04-07T13:56:17.207810Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: DoExecute TxId: 105 DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-07T13:56:17.207978Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: Reply TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } BUILDINDEX RESPONSE CANCEL: NKikimrIndexBuilder.TEvCancelResponse TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } 2025-04-07T13:56:17.210216Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-07T13:56:17.210452Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } 2025-04-07T13:56:17.213001Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:56:17.213314Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 353us result status StatusSuccess 2025-04-07T13:56:17.213835Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:56:17.216562Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:56:17.217037Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 493us result status StatusSuccess 2025-04-07T13:56:17.217923Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/index1" PathDescription { Self { Name: "index1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DstCreator::ColumnsSizeMismatch >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] >> DstCreator::WithSyncIndexAndIntermediateDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:55:48.946446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:55:48.946559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:48.946598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:55:48.946627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:55:48.946672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:55:48.946695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:55:48.946745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:48.947083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:55:49.019928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:55:49.019979Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:49.030477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:55:49.030592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:55:49.030765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:55:49.035805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:55:49.035984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:55:49.036551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.036765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:55:49.038566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.039913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:49.039970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.040085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:55:49.040161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:49.040200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:55:49.040357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.046702Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:55:49.154579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:55:49.154835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.155051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:55:49.155254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:55:49.155298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.160779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.160921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:55:49.161120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.161170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:55:49.161216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:55:49.161245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:55:49.164040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.164098Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:49.164167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:55:49.166163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.166210Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.166249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.166310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.176603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:55:49.178654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:55:49.178875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:55:49.179827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.179958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:55:49.180024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.180289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:55:49.180333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.180507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:49.180580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:55:49.189160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:49.191654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:49.191868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.191907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:55:49.192219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.192271Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:55:49.192383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:55:49.192415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.192474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:55:49.192518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.192580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:55:49.192608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:55:49.192706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:55:49.192762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:55:49.192793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:55:49.194785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:49.194890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:49.194926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:55:49.194968Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:55:49.195013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:49.195134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... : 1 siblings to be activated: wait to activation from: 2025-04-07T13:56:17.146656Z node 1 :TX_DATASHARD INFO: 72075186233409583 Initiating switch from PreOffline to Offline state 2025-04-07T13:56:17.148257Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:7343:8854], Recipient [1:7130:8651]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T13:56:17.148305Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T13:56:17.150862Z node 1 :TX_DATASHARD INFO: 72075186233409583 Reporting state Offline to schemeshard 72075186233409571 2025-04-07T13:56:17.151170Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [1:7121:8644], Recipient [1:7130:8651]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-07T13:56:17.151569Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [1:7453:8964], Recipient [1:7130:8651]: NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T13:56:17.151604Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-07T13:56:17.151749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72075186233409571, message: Source { RawX1: 7130 RawX2: 4294975947 } TabletId: 72075186233409583 State: 4 2025-04-07T13:56:17.151817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409583, state: Offline, at schemeshard: 72075186233409571 2025-04-07T13:56:17.154385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72075186233409571:13 hive 72057594037968897 at ss 72075186233409571 2025-04-07T13:56:17.154615Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269552133, Sender [1:5025:6678], Recipient [1:7130:8651]: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72075186233409571 State: 4 2025-04-07T13:56:17.154655Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvStateChangedResult 2025-04-07T13:56:17.154686Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186233409583 state Offline 2025-04-07T13:56:17.154925Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:7453:8964], Recipient [1:7130:8651]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T13:56:17.154971Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T13:56:17.155440Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409571 ShardLocalIdx: 13 TxId_Deprecated: 13 TabletID: 72075186233409583 Forgetting tablet 72075186233409583 2025-04-07T13:56:17.155793Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [1:7121:8644], Recipient [1:7130:8651]: NKikimr::TEvTablet::TEvTabletDead 2025-04-07T13:56:17.156049Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186233409583 2025-04-07T13:56:17.156103Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186233409583 2025-04-07T13:56:17.157764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 13 ShardOwnerId: 72075186233409571 ShardLocalIdx: 13, at schemeshard: 72075186233409571 2025-04-07T13:56:17.158027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409571, LocalPathId: 12] was 1 2025-04-07T13:56:17.158948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409571 2025-04-07T13:56:17.158992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409571, LocalPathId: 12], at schemeshard: 72075186233409571 2025-04-07T13:56:17.159061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409571, LocalPathId: 3] was 4 2025-04-07T13:56:17.163762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409571:13 2025-04-07T13:56:17.163838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409571:13 tabletId 72075186233409583 2025-04-07T13:56:17.164641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409571 2025-04-07T13:56:17.217164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3100, transactions count in step: 1, at schemeshard: 72075186233409571 2025-04-07T13:56:17.217315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976735770 AckTo { RawX1: 5129 RawX2: 4294974052 } } Step: 3100 MediatorID: 72075186233409573 TabletID: 72075186233409571, at schemeshard: 72075186233409571 2025-04-07T13:56:17.217395Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409571] TDropLock TPropose opId# 281474976735770:0 HandleReply TEvOperationPlan: step# 3100 2025-04-07T13:56:17.217452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976735770:0 128 -> 240 2025-04-07T13:56:17.222031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976735770:0, at schemeshard: 72075186233409571 2025-04-07T13:56:17.222103Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409571] TDone opId# 281474976735770:0 ProgressState 2025-04-07T13:56:17.222204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976735770:0 progress is 1/1 2025-04-07T13:56:17.222236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735770 ready parts: 1/1 2025-04-07T13:56:17.222277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976735770, ready parts: 1/1, is published: true 2025-04-07T13:56:17.222354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:5025:6678] message: TxId: 281474976735770 2025-04-07T13:56:17.222402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735770 ready parts: 1/1 2025-04-07T13:56:17.222435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976735770:0 2025-04-07T13:56:17.222465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976735770:0 2025-04-07T13:56:17.222545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409571, LocalPathId: 2] was 4 2025-04-07T13:56:17.227612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976735770 2025-04-07T13:56:17.227717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976735770 2025-04-07T13:56:17.227787Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735770, buildInfoId: 115 2025-04-07T13:56:17.227876Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735770, buildInfo: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409571, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409571, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:5888:7496], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735769, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735770, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T13:56:17.230997Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2025-04-07T13:56:17.231104Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409571, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409571, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:5888:7496], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735769, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735770, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T13:56:17.231180Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-07T13:56:17.235254Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2025-04-07T13:56:17.235363Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409571, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409571, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:5888:7496], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735769, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735770, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T13:56:17.235429Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 115, subscribers count# 1 2025-04-07T13:56:17.235646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-04-07T13:56:17.235700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:6095:7663] TestWaitNotification: OK eventTxId 115 2025-04-07T13:56:17.244655Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/CommonDB" IndexBuildId: 115 2025-04-07T13:56:17.246098Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } >> DstCreator::ExistingDst [GOOD] >> DstCreator::EmptyReplicationConfig >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup >> DstCreator::Basic >> Cdc::AddStream [GOOD] >> Cdc::AwsRegion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2025-04-07T13:55:12.680454Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569478315613420:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:12.680530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017ac/r3tmp/tmpeEkRCN/pdisk_1.dat 2025-04-07T13:55:13.382552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:13.382646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:13.390732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:13.462393Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26327, node 1 2025-04-07T13:55:13.548542Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:13.548559Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:13.548565Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:13.548676Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:14.052293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:14.069859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:14.071358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:14.071915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:55:14.072114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:55:14.072132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:55:14.072524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:55:14.072544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-07T13:55:14.072909Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:14.074612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034114123, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:14.074758Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:55:14.075066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:55:14.075713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:14.075879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:14.075920Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:55:14.075988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:55:14.076019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:55:14.076063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-04-07T13:55:14.078844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:55:14.078908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:55:14.078919Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:55:14.078985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2025-04-07T13:55:14.080124Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:26126 2025-04-07T13:55:14.433512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:14.433730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:14.433751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:14.434289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T13:55:14.434399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:14.435676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034114480, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:14.435704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1744034114480, at schemeshard: 72057594046644480 2025-04-07T13:55:14.435908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T13:55:14.435974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T13:55:14.435998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T13:55:14.436447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:14.436548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:14.441902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:55:14.441952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:55:14.442082Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:55:14.442181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 2025-04-07T13:55:14.443013Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:55:14.444576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:55:14.444869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:14.444891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:55:14.444965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T13:55:14.445063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:55:14.445079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2025-04-07T13:55:14.445652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2025-04-07T13:55:14.445752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:14.447660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:14.447928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:55:14.448068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:14.449299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS 2025-04-07T13:55:14.449430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:14.449615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:14.449682Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710660:0 ProgressState, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:14.452870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:55:14.452905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Public ... quest [DeleteMessageBatch] requestId [4e3a6a43-b455cfee-f3be08e0-714618cb] got new request with incorrect json from [38ab:501:6050:0:20ab:501:6050:0] 2025-04-07T13:56:17.616643Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [4e3a6a43-b455cfee-f3be08e0-714618cb] reply with status: BAD_REQUEST message: LogMessageFatal exception 2025-04-07T13:56:17.616776Z node 7 :HTTP DEBUG: (#47,[::1]:53636) <- (400 InvalidArgumentException) 2025-04-07T13:56:17.616815Z node 7 :HTTP DEBUG: (#47,[::1]:53636) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.DeleteMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 84 { "QueueUrl":"http://ghrun-lqtfcktism.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries": { } } 0 2025-04-07T13:56:17.616848Z node 7 :HTTP DEBUG: (#47,[::1]:53636) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 4e3a6a43-b455cfee-f3be08e0-714618cb x-amz-crc32: 3518254967 Content-Type: application/x-amz-json-1.1 Content-Length: 75 {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} 2025-04-07T13:56:17.616938Z node 7 :HTTP DEBUG: (#47,[::1]:53636) connection closed Http output full {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} F0000 00:00:1744034177.620350 214646 generated_message_reflection.cc:181] F0000 00:00:1744034177.620350 214646 generated_message_reflection.cc:181] 2025-04-07T13:56:17.618102Z node 7 :HTTP DEBUG: (#45,[::1]:53644) incoming connection opened 2025-04-07T13:56:17.618171Z node 7 :HTTP DEBUG: (#45,[::1]:53644) -> (POST /Root) 2025-04-07T13:56:17.619808Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d887:5600:6050:0:c087:5600:6050:0] request [DeleteMessageBatch] url [/Root] database [/Root] requestId: d3d685dc-ef032176-31dc492b-bd562c60 2025-04-07T13:56:17.620638Z node 7 :HTTP_PROXY WARN: http request [DeleteMessageBatch] requestId [d3d685dc-ef032176-31dc492b-bd562c60] got new request with incorrect json from [d887:5600:6050:0:c087:5600:6050:0] 2025-04-07T13:56:17.620659Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [d3d685dc-ef032176-31dc492b-bd562c60] reply with status: BAD_REQUEST message: LogMessageFatal exception 2025-04-07T13:56:17.620915Z node 7 :HTTP DEBUG: (#45,[::1]:53644) <- (400 InvalidArgumentException) 2025-04-07T13:56:17.620981Z node 7 :HTTP DEBUG: (#45,[::1]:53644) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.DeleteMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 7a { "QueueUrl":"http://ghrun-lqtfcktism.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries":"" } 0 2025-04-07T13:56:17.621010Z node 7 :HTTP DEBUG: (#45,[::1]:53644) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: d3d685dc-ef032176-31dc492b-bd562c60 x-amz-crc32: 3518254967 Content-Type: application/x-amz-json-1.1 Content-Length: 75 {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} 2025-04-07T13:56:17.621080Z node 7 :HTTP DEBUG: (#45,[::1]:53644) connection closed Http output full {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} 2025-04-07T13:56:17.625637Z node 7 :HTTP DEBUG: (#45,[::1]:53658) incoming connection opened 2025-04-07T13:56:17.625698Z node 7 :HTTP DEBUG: (#45,[::1]:53658) -> (POST /Root) 2025-04-07T13:56:17.625832Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [b82c:2000:6050:0:a02c:2000:6050:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: c54e2d0f-49f5a0b7-e0ff5a99-f79bc361 2025-04-07T13:56:17.629109Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [c54e2d0f-49f5a0b7-e0ff5a99-f79bc361] got new request from [b82c:2000:6050:0:a02c:2000:6050:0] 2025-04-07T13:56:17.629659Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [c54e2d0f-49f5a0b7-e0ff5a99-f79bc361] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T13:56:17.629694Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [c54e2d0f-49f5a0b7-e0ff5a99-f79bc361] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:17.680509Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [c54e2d0f-49f5a0b7-e0ff5a99-f79bc361] Got succesfult GRPC response. 2025-04-07T13:56:17.680736Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [c54e2d0f-49f5a0b7-e0ff5a99-f79bc361] reply ok 2025-04-07T13:56:17.681004Z node 7 :HTTP DEBUG: (#45,[::1]:53658) <- (200 ) 2025-04-07T13:56:17.681130Z node 7 :HTTP DEBUG: (#45,[::1]:53658) connection closed Http output full {"Successful":[{"SequenceNumber":"0","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MessageId":"3a33ac90-7e5452eb-aa8fdbfd-58df2b9c"},{"SequenceNumber":"0","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"b990a729-66a0853f-2b7832b5-9d062fe5"}]} 2025-04-07T13:56:17.685959Z node 7 :HTTP DEBUG: (#45,[::1]:53660) incoming connection opened 2025-04-07T13:56:17.686030Z node 7 :HTTP DEBUG: (#45,[::1]:53660) -> (POST /Root) 2025-04-07T13:56:17.686198Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [b822:c300:6050:0:a022:c300:6050:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 2fdd866b-ecad7c79-fad13887-27d097e6 2025-04-07T13:56:17.686613Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [2fdd866b-ecad7c79-fad13887-27d097e6] got new request from [b822:c300:6050:0:a022:c300:6050:0] 2025-04-07T13:56:17.686995Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [2fdd866b-ecad7c79-fad13887-27d097e6] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T13:56:17.687015Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [2fdd866b-ecad7c79-fad13887-27d097e6] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:17.754447Z node 7 :SQS WARN: Mark infly [cloud4/000000000000000101v0/0] for reloading. Reason: MessageDeleted 2025-04-07T13:56:17.755293Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [2fdd866b-ecad7c79-fad13887-27d097e6] Got succesfult GRPC response. 2025-04-07T13:56:17.755444Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [2fdd866b-ecad7c79-fad13887-27d097e6] reply ok 2025-04-07T13:56:17.755608Z node 7 :HTTP DEBUG: (#45,[::1]:53660) <- (200 ) 2025-04-07T13:56:17.755718Z node 7 :HTTP DEBUG: (#45,[::1]:53660) connection closed Http output full {"Messages":[{"MD5OfBody":"94a29778a1f1f41bf68142847b2e6106","Attributes":{"SenderId":"fake_user_sid@as","SentTimestamp":"1744034177627","ApproximateFirstReceiveTimestamp":"1744034177720","ApproximateReceiveCount":"1"},"ReceiptHandle":"EAEguMWmhOEyKAA","Body":"MessageBody-0","MessageId":"3a33ac90-7e5452eb-aa8fdbfd-58df2b9c"}]} 2025-04-07T13:56:17.757691Z node 7 :HTTP DEBUG: (#47,[::1]:53674) incoming connection opened 2025-04-07T13:56:17.757755Z node 7 :HTTP DEBUG: (#47,[::1]:53674) -> (POST /Root) 2025-04-07T13:56:17.757857Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [18b8:7c00:6050:0:b8:7c00:6050:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 1e4aa327-1e1cbaef-e19a0cb7-f5f3d1f3 2025-04-07T13:56:17.758183Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [1e4aa327-1e1cbaef-e19a0cb7-f5f3d1f3] got new request from [18b8:7c00:6050:0:b8:7c00:6050:0] 2025-04-07T13:56:17.758520Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [1e4aa327-1e1cbaef-e19a0cb7-f5f3d1f3] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T13:56:17.758537Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [1e4aa327-1e1cbaef-e19a0cb7-f5f3d1f3] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:17.785022Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [1e4aa327-1e1cbaef-e19a0cb7-f5f3d1f3] Got succesfult GRPC response. 2025-04-07T13:56:17.785218Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [1e4aa327-1e1cbaef-e19a0cb7-f5f3d1f3] reply ok 2025-04-07T13:56:17.785415Z node 7 :HTTP DEBUG: (#47,[::1]:53674) <- (200 ) 2025-04-07T13:56:17.785539Z node 7 :HTTP DEBUG: (#47,[::1]:53674) connection closed Http output full {"Messages":[{"MD5OfBody":"3bf7e6d806a0b8062135ae945eca30bf","Attributes":{"SenderId":"fake_user_sid@as","SentTimestamp":"1744034177627","ApproximateFirstReceiveTimestamp":"1744034177757","ApproximateReceiveCount":"1"},"ReceiptHandle":"EAIg3cWmhOEyKAA","Body":"MessageBody-1","MessageId":"b990a729-66a0853f-2b7832b5-9d062fe5"}]} 2025-04-07T13:56:17.787125Z node 7 :HTTP DEBUG: (#45,[::1]:53682) incoming connection opened 2025-04-07T13:56:17.787188Z node 7 :HTTP DEBUG: (#45,[::1]:53682) -> (POST /Root) 2025-04-07T13:56:17.787287Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d8a6:9a00:6050:0:c0a6:9a00:6050:0] request [DeleteMessageBatch] url [/Root] database [/Root] requestId: 8ab72ad3-3bfee4f6-3d8c0677-d40ff22e 2025-04-07T13:56:17.787760Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [8ab72ad3-3bfee4f6-3d8c0677-d40ff22e] got new request from [d8a6:9a00:6050:0:c0a6:9a00:6050:0] 2025-04-07T13:56:17.788092Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [8ab72ad3-3bfee4f6-3d8c0677-d40ff22e] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T13:56:17.788111Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [8ab72ad3-3bfee4f6-3d8c0677-d40ff22e] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:17.823944Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [8ab72ad3-3bfee4f6-3d8c0677-d40ff22e] Got succesfult GRPC response. 2025-04-07T13:56:17.824088Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [8ab72ad3-3bfee4f6-3d8c0677-d40ff22e] reply ok 2025-04-07T13:56:17.824365Z node 7 :HTTP DEBUG: (#45,[::1]:53682) <- (200 ) 2025-04-07T13:56:17.824476Z node 7 :HTTP DEBUG: (#45,[::1]:53682) connection closed Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} 2025-04-07T13:56:17.825825Z node 7 :HTTP DEBUG: (#45,[::1]:53696) incoming connection opened 2025-04-07T13:56:17.834018Z node 7 :HTTP DEBUG: (#45,[::1]:53696) -> (POST /Root) 2025-04-07T13:56:17.834169Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [f8be:1301:6050:0:e0be:1301:6050:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: b41f4892-a4272ed1-bc4239c1-e0edb764 2025-04-07T13:56:17.834548Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [b41f4892-a4272ed1-bc4239c1-e0edb764] got new request from [f8be:1301:6050:0:e0be:1301:6050:0] 2025-04-07T13:56:17.834905Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [b41f4892-a4272ed1-bc4239c1-e0edb764] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T13:56:17.834942Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [b41f4892-a4272ed1-bc4239c1-e0edb764] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:17.835774Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [b41f4892-a4272ed1-bc4239c1-e0edb764] Got succesfult GRPC response. 2025-04-07T13:56:17.835882Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [b41f4892-a4272ed1-bc4239c1-e0edb764] reply ok Http output full {} 2025-04-07T13:56:17.835957Z node 7 :HTTP DEBUG: (#45,[::1]:53696) <- (200 ) 2025-04-07T13:56:17.836044Z node 7 :HTTP DEBUG: (#45,[::1]:53696) connection closed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] Test command err: 2025-04-07T13:54:39.890616Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:39.978650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:40.002316Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:40.002390Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:40.002640Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:40.011073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:40.011320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:40.011541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:40.011650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:40.011766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:40.011904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:40.012008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:40.012108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:40.012216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:40.012333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:40.012471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:40.012574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:40.035967Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:40.036044Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:54:40.039023Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:40.039178Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:40.039225Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:40.039447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:40.039612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:40.039718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:40.039764Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:40.039852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:40.039915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:40.039960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:40.039993Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:40.040206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:40.040278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:40.040321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:40.040352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:40.040482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:40.040542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:40.040590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:40.040623Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:40.040779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:40.040837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:40.040872Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:40.040921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:40.040963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:40.041006Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:40.041205Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-04-07T13:54:40.041301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-04-07T13:54:40.041392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-07T13:54:40.041494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-07T13:54:40.041648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:40.041715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:40.041747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:40.041960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:40.042007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:40.042042Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:40.042190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:40.042229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:40.042260Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:40.042466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:40.042519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:40.042551Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execu ... stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=15881; 2025-04-07T13:56:17.647585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=34147; 2025-04-07T13:56:17.647914Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=246; 2025-04-07T13:56:17.649534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=213; 2025-04-07T13:56:17.649634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1646; 2025-04-07T13:56:17.649784Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=91; 2025-04-07T13:56:17.649924Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:56:17.649985Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=159; 2025-04-07T13:56:17.650182Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=150; 2025-04-07T13:56:17.650419Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=185; 2025-04-07T13:56:17.650983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=503; 2025-04-07T13:56:17.651925Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=881; 2025-04-07T13:56:17.652033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=38; 2025-04-07T13:56:17.652113Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=37; 2025-04-07T13:56:17.652163Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-07T13:56:17.652229Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-07T13:56:17.652278Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-07T13:56:17.652377Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=47; 2025-04-07T13:56:17.652443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-07T13:56:17.652548Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=64; 2025-04-07T13:56:17.652594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-07T13:56:17.652720Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=83; 2025-04-07T13:56:17.652785Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=47201; 2025-04-07T13:56:17.653026Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=25;blobs=50;rows=708348;bytes=40196124;raw_bytes=67627660; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=22;blobs=44;rows=1136652;bytes=64332088;raw_bytes=108739216; inactive portions=44;blobs=88;rows=1246652;bytes=70696640;raw_bytes=119259832; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:56:17.653161Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1669:3645];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:56:17.653240Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1669:3645];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:56:17.653301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1669:3645];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:56:17.653418Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:56:17.653496Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:56:17.653579Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:17.653820Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:56:17.653908Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:56:17.653989Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:56:17.654054Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:17.654101Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:17.654162Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:56:17.654432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:56:17.654510Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:56:17.655454Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:17.655594Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1702:3671];tablet_id=9437184;parent=[1:1669:3645];fline=manager.h:99;event=ask_data;request=request_id=234;1={portions_count=91};; 2025-04-07T13:56:17.657085Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1669:3645];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:56:17.658437Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:56:17.658499Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:56:17.658535Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:56:17.658598Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:56:17.658708Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:17.658995Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:56:17.659095Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:56:17.659174Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:56:17.659240Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:17.659305Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:17.659357Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:56:17.659436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:56:17.659502Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:56:17.660285Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=91;path_id=1; 2025-04-07T13:56:17.663851Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=91;path_id=1; 2025-04-07T13:56:17.672186Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:56:17.672291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1669:3645];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute >> Cdc::InitialScan_WithTopicSchemeTx [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning >> DstCreator::WithSyncIndex >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] Test command err: Trying to start YDB, gRPC: 19197, MsgBus: 12794 2025-04-07T13:56:07.809371Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569713774182848:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:07.809418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e27/r3tmp/tmpo9LuR8/pdisk_1.dat 2025-04-07T13:56:08.468107Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:08.471529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:08.471607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:08.476368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19197, node 1 2025-04-07T13:56:08.653355Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:08.653382Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:08.653392Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:08.653484Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12794 TClient is connected to server localhost:12794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:09.401225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:09.420559Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:11.391827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569730954052448:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:11.392030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:11.619116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:56:11.720241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569730954052567:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:11.720332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:11.720695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569730954052572:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:11.724240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-07T13:56:11.731389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569730954052574:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:56:12.448946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:12.810376Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569713774182848:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:12.810589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:12.945756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-07T13:56:13.395536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:13.879679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:56:14.334292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:56:14.842657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:56:14.928497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.041140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710709:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> DstCreator::WithIntermediateDir >> KqpFlipJoin::RightSemi_1 [GOOD] >> KqpFlipJoin::RightSemi_2 >> DstCreator::SameOwner [GOOD] >> DstCreator::ReplicationModeMismatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] Test command err: 2025-04-07T13:54:38.528595Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:38.672456Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:38.692857Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:38.692922Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:38.693124Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:38.700617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:38.700824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:38.701026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:38.701119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:38.701228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:38.701338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:38.701444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:38.701543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:38.701628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:38.701734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:38.701825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:38.701928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:38.723217Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:38.723273Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:54:38.725306Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:38.725447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:38.725528Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:38.725707Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:38.725855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:38.725920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:38.725959Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:38.726054Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:38.726121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:38.726166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:38.726195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:38.726348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:38.726405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:38.726443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:38.726482Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:38.726570Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:38.726615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:38.726654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:38.726678Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:38.726758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:38.726813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:38.726852Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:38.726900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:38.726933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:38.726956Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:38.727112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=39; 2025-04-07T13:54:38.727211Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=49; 2025-04-07T13:54:38.727303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-07T13:54:38.727382Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-04-07T13:54:38.727528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:38.727585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:38.727615Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:38.727786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:38.727830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:38.727859Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:38.727993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:38.728032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:38.728064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:38.728232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:38.728276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:38.728300Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execu ... stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=15128; 2025-04-07T13:56:18.770405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=30846; 2025-04-07T13:56:18.770655Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=180; 2025-04-07T13:56:18.772027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=191; 2025-04-07T13:56:18.772102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1398; 2025-04-07T13:56:18.772251Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=96; 2025-04-07T13:56:18.772373Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:56:18.772427Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=135; 2025-04-07T13:56:18.772592Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=124; 2025-04-07T13:56:18.772820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=183; 2025-04-07T13:56:18.773353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=480; 2025-04-07T13:56:18.774184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=778; 2025-04-07T13:56:18.774273Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=37; 2025-04-07T13:56:18.774334Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=24; 2025-04-07T13:56:18.774372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-07T13:56:18.774419Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-07T13:56:18.774482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-07T13:56:18.774567Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2025-04-07T13:56:18.774610Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-04-07T13:56:18.774710Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=64; 2025-04-07T13:56:18.774761Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-07T13:56:18.774826Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2025-04-07T13:56:18.774868Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=42637; 2025-04-07T13:56:18.775084Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=25;blobs=50;rows=708348;bytes=40210220;raw_bytes=73294644; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=22;blobs=44;rows=1136652;bytes=64354976;raw_bytes=117832608; inactive portions=44;blobs=88;rows=1246652;bytes=70721288;raw_bytes=129233400; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:56:18.775197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1676:3652];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:56:18.775262Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1676:3652];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:56:18.775316Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:56:18.775390Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:56:18.775443Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:56:18.775510Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:18.775745Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:56:18.775816Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:56:18.775862Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:56:18.775917Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:18.775956Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:18.776035Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:56:18.776113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:56:18.776163Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:56:18.776877Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1709:3678];tablet_id=9437184;parent=[1:1676:3652];fline=manager.h:99;event=ask_data;request=request_id=234;1={portions_count=91};; 2025-04-07T13:56:18.777624Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:18.778912Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:56:18.779132Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:56:18.779167Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:56:18.779198Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:56:18.779247Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:56:18.779317Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:18.779550Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:56:18.779620Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:56:18.779692Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:56:18.779772Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:18.779823Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:18.779888Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:56:18.779968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:56:18.780026Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:56:18.780745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=91;path_id=1; 2025-04-07T13:56:18.783682Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=91;path_id=1; 2025-04-07T13:56:18.791314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:56:18.791393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1676:3652];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; >> TestKinesisHttpProxy::ErroneousRequestGetRecords >> KqpJoin::JoinWithDuplicates [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown >> DstCreator::StrongConsistency >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup >> DstCreator::KeyColumnsSizeMismatch [GOOD] >> DstCreator::KeyColumnNameMismatch >> KqpFlipJoin::Inner_1 [GOOD] >> KqpFlipJoin::Inner_2 >> LocalPartition::Restarts >> TxUsage::WriteToTopic_Demo_3 >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin >> TxUsage::WriteToTopic_Demo_2 >> Cdc::SequentialSplitMerge [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx >> BasicUsage::ConnectToYDB >> DstCreator::EmptyReplicationConfig [GOOD] >> GenericFederatedQuery::YdbFilterPushdown [GOOD] >> DstCreator::ColumnsSizeMismatch [GOOD] >> DstCreator::ColumnTypeMismatch >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] >> TPersQueueTest::TestReadRuleServiceTypePassword [GOOD] >> TPersQueueTest::TestReadPartitionByGroupId >> DstCreator::Basic [GOOD] >> DstCreator::CannotFindColumn >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small-StreamLookupJoin+ColumnStore >> KqpJoin::FullOuterJoinSizeCheck [GOOD] >> KqpJoin::FullOuterJoinNotNullJoinKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2025-04-07T13:56:15.888453Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569748556174632:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:15.888494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001870/r3tmp/tmpfJNp0b/pdisk_1.dat 2025-04-07T13:56:16.281111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:16.281245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:16.282583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:56:16.319638Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1242 TServer::EnableGrpc on GrpcPort 65064, node 1 2025-04-07T13:56:16.569232Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:16.569257Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:16.569267Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:16.569360Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:17.034008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:17.051952Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:17.055225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:17.197265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034177095 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034177263 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034177095 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034177263 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2025-04-07T13:56:17.241704Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:17.241835Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:17.241848Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:17.243053Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:19.076921Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034177172, tx_id: 281474976710658 } } } 2025-04-07T13:56:19.077423Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:19.079386Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-07T13:56:19.081759Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034177263 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 C ... _client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001870/r3tmp/tmpNchH2k/pdisk_1.dat 2025-04-07T13:56:19.803913Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:19.848972Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:19.849074Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:19.850472Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13082 TServer::EnableGrpc on GrpcPort 22009, node 2 2025-04-07T13:56:20.040098Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:20.040114Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:20.040118Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:20.040180Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:20.429971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:20.440411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:20.553109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034180476 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744034180658 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034180476 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744034180658 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2025-04-07T13:56:20.628699Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:20.628812Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:20.628822Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:20.632822Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:23.060802Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034180532, tx_id: 281474976715658 } } } 2025-04-07T13:56:23.061048Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:23.062142Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-07T13:56:23.062855Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744034180658 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-07T13:56:23.062980Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config >> DstCreator::WithIntermediateDir [GOOD] >> DstCreator::WithAsyncIndex >> KqpJoin::JoinLeftPureExclusion [GOOD] >> KqpJoin::JoinLeftPureCross ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 12885, MsgBus: 6771 2025-04-07T13:56:13.167507Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569739815815092:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:13.167734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e13/r3tmp/tmpEyToIG/pdisk_1.dat 2025-04-07T13:56:13.645625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:13.645751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:13.648873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:56:13.677608Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12885, node 1 2025-04-07T13:56:13.819310Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:13.819338Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:13.819347Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:13.819456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6771 TClient is connected to server localhost:6771 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:14.641136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:14.657402Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:16.825565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569752700717476:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.839089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.848956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:56:16.978911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569752700717595:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.978994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.979333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569752700717600:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.983659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-07T13:56:16.998916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569752700717602:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:56:17.610979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:18.066716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-07T13:56:18.149607Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569739815815092:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:18.149672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:18.670416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.236687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.778089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.431071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:56:20.482240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:56:23.054137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710711:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+StreamLookupJoin-ColumnStore >> DstCreator::WithSyncIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] Test command err: 2025-04-07T13:56:19.560257Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569763475509222:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:19.560304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001827/r3tmp/tmpKQfZKx/pdisk_1.dat 2025-04-07T13:56:20.200956Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:20.206344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:20.206428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:20.222987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13072 TServer::EnableGrpc on GrpcPort 18617, node 1 2025-04-07T13:56:20.676043Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:20.676063Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:20.676071Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:20.676161Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:21.131978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:21.165089Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:21.174390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034181666 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partition... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034181204 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034181666 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2025-04-07T13:56:21.727774Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:21.727900Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:21.727936Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:21.729188Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:23.393440Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034181666, tx_id: 281474976710658 } } } 2025-04-07T13:56:23.393855Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:23.395876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-07T13:56:23.398570Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-07T13:56:23.398591Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-07T13:56:23.504085Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-07T13:56:23.506447Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dir/Replicated" PathDescription { Self { Name: "Replicated" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034183514 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 7 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAh ... wner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 8 PathOwnerId: 72057594046644480 } 2025-04-07T13:56:23.543992Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 8] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034183514 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034183514 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Dir/Replicated/index_by_value/indexImplTable" >> DstCreator::ReplicationModeMismatch [GOOD] >> DstCreator::KeyColumnNameMismatch [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] >> DstCreator::StrongConsistency [GOOD] >> DstCreator::SamePartitionCount >> Cdc::InitialScanAndLimits [GOOD] >> Cdc::InitialScanComplete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndex [GOOD] Test command err: 2025-04-07T13:56:20.757811Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569768623112943:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:20.758118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00180c/r3tmp/tmpzkdh4o/pdisk_1.dat 2025-04-07T13:56:21.409903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:21.410007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:21.411490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:56:21.437344Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13843 TServer::EnableGrpc on GrpcPort 3489, node 1 2025-04-07T13:56:21.745277Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:21.745298Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:21.745304Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:21.745377Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:22.171130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:22.189471Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:22.197032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034182590 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partition... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034182226 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034182590 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2025-04-07T13:56:22.700983Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:22.701084Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:22.701099Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:22.701776Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:24.455718Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034182590, tx_id: 281474976710658 } } } 2025-04-07T13:56:24.456222Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:24.458368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:56:24.460810Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-07T13:56:24.460834Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-07T13:56:24.536436Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 TClient::Ls request: /Root/Replicated 2025-04-07T13:56:24.537898Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Replicated" PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034184571 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InM ... rceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 7 PathOwnerId: 72057594046644480 } 2025-04-07T13:56:24.600346Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 7] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034184571 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034184571 ParentPathId: 6 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ... (TRUNCATED) TClient::Ls request: /Root/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034184571 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034184571 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Replicated/index_by_value/indexImplTable" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ReplicationModeMismatch [GOOD] Test command err: 2025-04-07T13:56:16.506944Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569749626059738:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:16.507301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001855/r3tmp/tmp5juPWy/pdisk_1.dat 2025-04-07T13:56:17.020503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:17.020615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:17.026249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:56:17.049277Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17080 TServer::EnableGrpc on GrpcPort 61107, node 1 2025-04-07T13:56:17.533278Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:17.533304Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:17.533310Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:17.533389Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:18.042645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:18.057449Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:18.059362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:56:18.064079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034178194 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034178096 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034178194 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2025-04-07T13:56:18.232694Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:18.232784Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:18.232798Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:18.233563Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:20.538285Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034178194, tx_id: 281474976710659 } } } 2025-04-07T13:56:20.538631Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:20.540091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.541397Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-07T13:56:20.541418Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710660 2025-04-07T13:56:20.590006Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710660 2025-04-07T13:56:20.590035Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744034180623 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) 2025-04-07T13:56:21.423637Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569772178543868:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:21.423758Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001855/r3tmp/tmp4iQTz5/pdisk_1.dat 2025-04-07T13:56:21.749175Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:21.767932Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:21.768032Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:21.769657Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64517 TServer::EnableGrpc on GrpcPort 10656, node 2 2025-04-07T13:56:22.097380Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:22.097405Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:22.097415Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:22.097495Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:22.525746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:22.539127Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:22.543433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:22.619424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034182576 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034182709 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034182576 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034182709 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2025-04-07T13:56:22.680236Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:22.680332Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:22.680343Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:22.684776Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:25.016796Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034182639, tx_id: 281474976710658 } } } 2025-04-07T13:56:25.017068Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:25.018358Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-07T13:56:25.019226Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034182709 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE Consistency: CONSISTENCY_UNKNOWN } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-07T13:56:25.019418Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication mode mismatch: expected: REPLICATION_MODE_READ_ONLY, got: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] Test command err: 2025-04-07T13:56:17.317784Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569754755285974:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:17.317829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001844/r3tmp/tmpc77Tn1/pdisk_1.dat 2025-04-07T13:56:17.903459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:17.903557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:17.906660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:56:17.910808Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:30360 TServer::EnableGrpc on GrpcPort 13718, node 1 2025-04-07T13:56:18.358355Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:18.358382Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:18.358391Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:18.358485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:18.995372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:19.023978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:19.151590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034179055 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034179230 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034179055 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034179230 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2025-04-07T13:56:19.211237Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:19.211358Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:19.211374Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:19.215764Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:21.185129Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034179132, tx_id: 281474976710658 } } } 2025-04-07T13:56:21.185508Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:21.186962Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-07T13:56:21.188960Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034179230 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 7 ... IVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:22.156231Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22758 TServer::EnableGrpc on GrpcPort 6940, node 2 2025-04-07T13:56:22.332438Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:22.332467Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:22.332474Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:22.332569Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:22.686898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:22.696542Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:56:22.701003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:22.766810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034182737 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744034182842 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034182737 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744034182842 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2025-04-07T13:56:22.808178Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:22.808305Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:22.808318Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:22.811818Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:25.133549Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034182800, tx_id: 281474976715658 } } } 2025-04-07T13:56:25.133814Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:25.135030Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-07T13:56:25.135710Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744034182842 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-07T13:56:25.135839Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key column name mismatch: position: 0, expected: key, got: value >> Trace::SkipSpaces [GOOD] >> Trace::TTraceEvent [GOOD] >> Trace::TExpectedTraceEvent [GOOD] >> Trace::TExpectedTrace [GOOD] >> TxUsage::SessionAbort >> TxUsage::WriteToTopic_Demo_11 >> GenericFederatedQuery::ClickHouseManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 22257, MsgBus: 32551 2025-04-07T13:55:57.603335Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569670893035586:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:57.607521Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e37/r3tmp/tmpkV9DtH/pdisk_1.dat 2025-04-07T13:55:58.447474Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:58.455136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:58.455208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:58.461464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22257, node 1 2025-04-07T13:55:58.801471Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:58.801490Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:58.801518Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:58.801610Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32551 TClient is connected to server localhost:32551 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:00.583600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:00.643685Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:02.594952Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569670893035586:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:02.595030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:03.973657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569696662839879:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.973743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:04.280142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:56:04.389952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569700957807293:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:04.390124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:04.392316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569700957807299:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:04.395965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-07T13:56:04.407163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569700957807301:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:56:05.032869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:05.527412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-04-07T13:56:06.084275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:56:06.652486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:56:07.198662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:56:07.735354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:56:07.804658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:56:11.561077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710731:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 7425, MsgBus: 12757 2025-04-07T13:56:12.807936Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569731912411772:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:12.908133Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e37/r3tmp/tmpwQqZCa/pdisk_1.dat 2025-04-07T13:56:13.100721Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:13.123708Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:13.123795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:13.125766Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7425, node 2 2025-04-07T13:56:13.171640Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:13.171770Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:13.171780Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:13.171881Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12757 TClient is connected to server localhost:12757 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:13.719036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:13.725704Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:16.350023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569749092281439:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.350139Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.374767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:56:16.442458Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569749092281556:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.442543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.443429Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569749092281561:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.447610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-07T13:56:16.463379Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:56:16.463940Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490569749092281563:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:56:17.132453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.771650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-07T13:56:17.810635Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490569731912411772:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:17.810742Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:18.427281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.107120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.681613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.402427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:56:20.473051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:56:24.533333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710731:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] Test command err: Trying to start YDB, gRPC: 21692, MsgBus: 29122 2025-04-07T13:56:13.124841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569737010093858:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:13.124929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e14/r3tmp/tmpEsgWsT/pdisk_1.dat 2025-04-07T13:56:13.618480Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21692, node 1 2025-04-07T13:56:13.625568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:13.625679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:13.634274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:56:13.718358Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:13.718383Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:13.718392Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:13.718473Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29122 TClient is connected to server localhost:29122 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:14.248446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:16.197995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569749894996378:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.198143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.391152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:56:16.560780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569749894996497:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.560887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.561162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569749894996502:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:16.564301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-07T13:56:16.572372Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:56:16.572909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569749894996504:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:56:17.287545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.825722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-07T13:56:18.131358Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569737010093858:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:18.158087Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:18.370762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:18.991187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.698945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.318918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:56:20.360368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:56:22.646004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-07T13:56:22.689905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-07T13:56:22.694838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-07T13:56:22.696308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-04-07T13:56:25.422202Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034185446, txId: 281474976710752] shutting down >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] >> Describe::Basic >> DstCreator::ColumnTypeMismatch [GOOD] >> KqpFlipJoin::RightSemi_2 [GOOD] >> KqpFlipJoin::RightSemi_3 >> DstCreator::CannotFindColumn [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> TxUsage::WriteToTopic_Invalid_Session >> Cdc::InitialScan_TopicAutoPartitioning [GOOD] >> Cdc::ResolvedTimestamps >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnTypeMismatch [GOOD] Test command err: 2025-04-07T13:56:19.310903Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569765166088604:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:19.310943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001833/r3tmp/tmpsRF0uF/pdisk_1.dat 2025-04-07T13:56:19.746523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:19.746644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:19.749592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:56:19.777007Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16223 TServer::EnableGrpc on GrpcPort 7923, node 1 2025-04-07T13:56:20.212479Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:20.212508Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:20.212519Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:20.212624Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:20.676645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:20.703416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:20.872134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034180742 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034180994 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034180742 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034180994 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2025-04-07T13:56:20.972805Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:20.972960Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:20.972985Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:20.973503Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:23.209411Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034180833, tx_id: 281474976710658 } } } 2025-04-07T13:56:23.212136Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:23.213759Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-07T13:56:23.217093Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034180994 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "extra" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 ... ted -> Connecting 2025-04-07T13:56:24.235642Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:24.255529Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31225 TServer::EnableGrpc on GrpcPort 14199, node 2 2025-04-07T13:56:24.457250Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:24.457275Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:24.457281Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:24.457380Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:24.764820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:24.769749Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:24.772303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:56:24.849605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034184816 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034184935 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034184816 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034184935 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2025-04-07T13:56:24.921926Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:24.922120Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:24.922136Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:24.922578Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:27.145473Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034184879, tx_id: 281474976710658 } } } 2025-04-07T13:56:27.146676Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:27.148317Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-07T13:56:27.149354Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034184935 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-07T13:56:27.149543Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Column type mismatch: name: value, expected: Utf8, got: Uint32 >> TxUsage::WriteToTopic_Demo_4 >> TxUsage::WriteToTopic_Demo_22_RestartNo >> BsControllerConfig::Basic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::CannotFindColumn [GOOD] Test command err: 2025-04-07T13:56:20.314940Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569769091922025:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:20.314992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00181f/r3tmp/tmp6hIE38/pdisk_1.dat 2025-04-07T13:56:20.773534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:20.791953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:20.792440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:20.794821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31701 TServer::EnableGrpc on GrpcPort 18830, node 1 2025-04-07T13:56:21.145277Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:21.145323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:21.145330Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:21.145448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31701 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:21.719567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:21.737154Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:21.747939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034181869 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034181771 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034181869 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2025-04-07T13:56:21.908392Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:21.908525Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:21.908552Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:21.909254Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:23.802528Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034181869, tx_id: 281474976710658 } } } 2025-04-07T13:56:23.802935Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:23.804603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:56:23.805961Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-07T13:56:23.805976Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-07T13:56:23.847963Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-07T13:56:23.847990Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034183878 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) 2025-04-07T13:56:24.541395Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569785994723897:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:24.541495Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00181f/r3tmp/tmpkr4ePS/pdisk_1.dat 2025-04-07T13:56:24.645573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:24.645648Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:24.645782Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:24.655966Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10709 TServer::EnableGrpc on GrpcPort 13629, node 2 2025-04-07T13:56:24.893240Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:24.893267Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:24.893273Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:24.893367Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:25.201992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:25.209312Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:56:25.215575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:25.259542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034185250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744034185341 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034185250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744034185341 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2025-04-07T13:56:25.310837Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:25.310939Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:25.310953Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:25.311386Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:27.216463Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034185292, tx_id: 281474976715658 } } } 2025-04-07T13:56:27.216722Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:27.218048Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-07T13:56:27.219130Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744034185341 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-07T13:56:27.219318Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot find column: name: value >> DstCreator::WithAsyncIndex [GOOD] >> DstCreator::NonExistentSrc >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin >> KqpFlipJoin::Inner_2 [GOOD] >> KqpFlipJoin::Inner_3 >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup >> TestKinesisHttpProxy::GoodRequestCreateStream >> DstCreator::SamePartitionCount [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithAsyncIndex [GOOD] Test command err: 2025-04-07T13:56:20.947230Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569768650259351:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:20.947272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017f3/r3tmp/tmpW8GD44/pdisk_1.dat 2025-04-07T13:56:21.638996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:21.639117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:21.656831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:56:21.685116Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3758 TServer::EnableGrpc on GrpcPort 12942, node 1 2025-04-07T13:56:22.121299Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:22.121323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:22.121330Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:22.121414Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:22.563485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:22.581454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034182695 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034182618 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034182695 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2025-04-07T13:56:22.737884Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:22.738040Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:22.738058Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:22.738741Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:24.569382Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034182695, tx_id: 281474976710658 } } } 2025-04-07T13:56:24.569794Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:24.571806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-07T13:56:24.572630Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-07T13:56:24.572654Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-07T13:56:24.604437Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-07T13:56:24.604466Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 4] TClient::Ls request: /Root/Dir/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034184648 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) 2025-04-07T13:56:25.274677Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569790799512442:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:25.274730Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017f3/r3tmp/tmpq9SKsG/pdisk_1.dat 2025-04-07T13:56:25.434667Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:25.434772Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:25.438428Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:25.450019Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6774 TServer::EnableGrpc on GrpcPort 25688, node 2 2025-04-07T13:56:25.644027Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:25.644053Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:25.644060Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:25.644166Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:25.956327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:25.969231Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:56:25.976230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744034186335 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partition... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034186006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744034186335 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2025-04-07T13:56:26.360120Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:26.360227Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:26.360235Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:26.360635Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:28.384056Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034186335, tx_id: 281474976715658 } } } 2025-04-07T13:56:28.384431Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:28.386119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:56:28.386904Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-04-07T13:56:28.386927Z node 2 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2025-04-07T13:56:28.417612Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-04-07T13:56:28.417642Z node 2 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 5] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744034188456 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) >> Cdc::AwsRegion [GOOD] >> BsControllerConfig::AddDriveSerial >> BsControllerConfig::OverlayMap >> BsControllerConfig::OverlayMap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::SamePartitionCount [GOOD] Test command err: 2025-04-07T13:56:21.772230Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569772878466003:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:21.772304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017e0/r3tmp/tmpPsH5EB/pdisk_1.dat 2025-04-07T13:56:22.340733Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:22.359506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:22.359611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:22.361760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18807 TServer::EnableGrpc on GrpcPort 9048, node 1 2025-04-07T13:56:22.951188Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:22.951209Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:22.951214Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:22.951299Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:23.344310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:23.371847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034183486 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034183409 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034183486 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2025-04-07T13:56:23.496411Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:23.496612Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:23.496630Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:23.497472Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:25.567199Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034183486, tx_id: 281474976710658 } } } 2025-04-07T13:56:25.567680Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:25.569730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:56:25.571014Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-07T13:56:25.576763Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 TClient::Ls request: /Root/Replicated 2025-04-07T13:56:25.602938Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-07T13:56:25.602971Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034185642 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017e0/r3tmp/tmpJAsCXc/pdisk_1.dat 2025-04-07T13:56:26.476611Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:56:26.493040Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:26.511819Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:26.511901Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:26.520121Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25717 TServer::EnableGrpc on GrpcPort 63343, node 2 2025-04-07T13:56:26.700332Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:26.700355Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:26.700364Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:26.700455Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:26.997053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:27.007355Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:56:27.015885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744034187112 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034187049 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744034187112 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2025-04-07T13:56:27.079741Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:27.079893Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:27.079907Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:27.080458Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:29.491755Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034187112, tx_id: 281474976715658 } } } 2025-04-07T13:56:29.492124Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:29.493859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:56:29.495168Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-04-07T13:56:29.495179Z node 2 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2025-04-07T13:56:29.554320Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-04-07T13:56:29.554348Z node 2 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744034187112 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744034189590 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup >> BsControllerConfig::ReassignGroupDisk |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMap [GOOD] >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] >> KqpJoin::JoinLeftPureCross [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge >> BsControllerConfig::Basic [GOOD] >> BsControllerConfig::DeleteStoragePool >> DstCreator::NonExistentSrc [GOOD] >> DstCreator::ReplicationConsistencyMismatch >> BsControllerConfig::MergeIntersectingBoxes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] Test command err: Trying to start YDB, gRPC: 31063, MsgBus: 9484 2025-04-07T13:55:50.189161Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569640007557155:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:50.189204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001165/r3tmp/tmpVk71Ys/pdisk_1.dat 2025-04-07T13:55:50.598828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:50.598943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:50.601951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:50.609147Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31063, node 1 2025-04-07T13:55:50.718743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:50.718764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:50.718773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:50.718879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9484 TClient is connected to server localhost:9484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:51.371951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:55:51.389162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:51.583483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:51.859783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:51.965476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:54.669062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569657187427821:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:54.669171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:54.914865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:55:54.953160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:55:55.043021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:55:55.113943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:55:55.185559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:55:55.191462Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569640007557155:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:55.191747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:55:55.257243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:55:55.353650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569661482395620:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:55.353720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:55.354060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569661482395625:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:55.358061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:55:55.402756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569661482395627:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:55:56.900923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:55:56.935743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:55:56.972091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27390, MsgBus: 3746 2025-04-07T13:56:00.790199Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569681285368338:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:00.897290Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001165/r3tmp/tmpV3oEgv/pdisk_1.dat 2025-04-07T13:56:01.177269Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:01.187858Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:01.187958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:01.215732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27390, node 2 2025-04-07T13:56:01.525179Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:01.525207Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:01.525220Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:01.525337Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3746 TClient is connected to server localhost:3746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:56:02.445300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:56:02.470756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:02.571369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:56:02.794668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-0 ... 644480 2025-04-07T13:56:19.982098Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.040849Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.085178Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.141173Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.198835Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.271470Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569767960895473:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:20.271568Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:20.271821Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569767960895478:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:20.277006Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:20.300075Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490569767960895480:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:56:20.809872Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490569746486056854:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:20.809962Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:21.882740Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:21.929295Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:21.988516Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28536, MsgBus: 10147 2025-04-07T13:56:24.840656Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490569784130230549:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:24.840746Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001165/r3tmp/tmpccBnqB/pdisk_1.dat 2025-04-07T13:56:24.970492Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:24.983697Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:24.983768Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:24.985597Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28536, node 5 2025-04-07T13:56:25.054811Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:25.054836Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:25.054846Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:25.054972Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10147 TClient is connected to server localhost:10147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:25.678340Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:25.685558Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:56:25.697166Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:25.776423Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:26.044080Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:26.131535Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:28.769455Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569801310101437:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:28.769541Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:28.819724Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:28.868992Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:28.920631Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:28.956517Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:29.012139Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:29.061460Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:29.119849Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569805605069229:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:29.119974Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:29.120214Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569805605069234:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:29.123534Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:29.137565Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T13:56:29.139078Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490569805605069236:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:56:29.841111Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490569784130230549:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:29.841176Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:30.376509Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:30.447381Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> BsControllerConfig::ExtendByCreatingSeparateBox ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureCross [GOOD] Test command err: Trying to start YDB, gRPC: 9204, MsgBus: 10893 2025-04-07T13:55:52.925157Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569649920482915:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001148/r3tmp/tmpzoYtCU/pdisk_1.dat 2025-04-07T13:55:53.362803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:55:53.611950Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:53.669435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:53.669536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:53.677822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9204, node 1 2025-04-07T13:55:53.881264Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:53.881296Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:53.881307Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:53.881405Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10893 TClient is connected to server localhost:10893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:54.831182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:54.851951Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:55:54.860247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:55.072420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:55.283085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:55.401958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:57.586479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569671395320915:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:57.586577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:57.915621Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569649920482915:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:57.915691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:55:58.046787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.103341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.167067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.223402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.293586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.349896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.457171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569675690288713:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:58.457268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:58.457501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569675690288718:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:58.461845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:55:58.493489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569675690288720:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:56:00.803111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:00.864502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:00.955484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10865, MsgBus: 10233 2025-04-07T13:56:03.252624Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569694163342222:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:03.252688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001148/r3tmp/tmpGmEHT0/pdisk_1.dat 2025-04-07T13:56:03.482880Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:03.485782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:03.485894Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:03.488494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10865, node 2 2025-04-07T13:56:03.555122Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:03.555175Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:03.555184Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:03.555321Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10233 TClient is connected to server localhost:10233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:56:04.025805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:56:04.046720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:04.152832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:04.364022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation ... : 72057594046644480 waiting... 2025-04-07T13:56:18.898913Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:18.976803Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:21.789528Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569771796474999:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:21.789612Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:21.846687Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:21.892455Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:21.954107Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:22.008310Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:22.078011Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:22.192816Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:22.287140Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569776091442801:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:22.287223Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:22.287491Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569776091442806:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:22.291624Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:22.306538Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490569776091442808:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:56:22.523761Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490569754616604118:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:22.523918Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31848, MsgBus: 15720 2025-04-07T13:56:25.283510Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490569788724910789:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:25.333962Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001148/r3tmp/tmpb9JqdY/pdisk_1.dat 2025-04-07T13:56:25.459426Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:25.501580Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:25.501675Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:25.502967Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31848, node 5 2025-04-07T13:56:25.621347Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:25.621377Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:25.621388Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:25.621517Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15720 TClient is connected to server localhost:15720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:26.310882Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:26.316285Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:26.323622Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:26.433410Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:26.610175Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:26.685311Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:56:29.349555Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569805904781670:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:29.349650Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:29.419983Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:29.466181Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:29.509675Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:29.542259Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:29.615992Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:29.659101Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:29.724604Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569805904782173:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:29.724721Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:29.724956Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569805904782178:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:29.728880Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:29.781853Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490569805904782180:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:56:30.274724Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490569788724910789:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:30.274809Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::AwsRegion [GOOD] Test command err: 2025-04-07T13:53:51.528862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:53:51.529221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:51.529504Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00133a/r3tmp/tmpIlXEaU/pdisk_1.dat 2025-04-07T13:53:52.142296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:53:52.206184Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:52.270200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:52.270337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:52.281949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:52.428356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:53:52.509331Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T13:53:52.509648Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:52.613862Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:52.613988Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:53:52.615573Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:53:52.615665Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:53:52.615723Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:53:52.616031Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:53:52.646744Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:53:52.646957Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:53:52.647079Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T13:53:52.647125Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:53:52.647161Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:53:52.647194Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:52.647659Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:53:52.647765Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:53:52.648221Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T13:53:52.648280Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:52.648315Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:52.648356Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:53:52.648423Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:52.648856Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:52.649090Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:53:52.649220Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:53:52.650910Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:52.662754Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:52.662856Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:53:52.878596Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T13:53:52.882730Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:53:52.882807Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:52.883763Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:52.883814Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:53:52.883875Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T13:53:52.884169Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T13:53:52.884311Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:53:52.885052Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:52.885127Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:53:52.887128Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:53:52.887544Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:52.889867Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T13:53:52.889916Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:52.890573Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T13:53:52.890630Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T13:53:52.890692Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:52.892137Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:52.892181Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:53:52.892225Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:53:52.892292Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:53:52.892342Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T13:53:52.892424Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:52.893837Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:648:2546][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-07T13:53:52.897298Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:52.899570Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T13:53:52.899688Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T13:53:52.899825Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:53:56.806759Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:53:56.807216Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:56.807300Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00133a/r3tmp/tmpgDELJg/pdisk_1.dat 2025-04-07T13:53:57.110024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:53:57.154633Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:57.206779Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:57.206912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:57.221901Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:57.353802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:53:57.390132Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:643:2545] 2025-04-07T13:53:57.390396Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:57.473018Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:57.473262Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:53:57.474972Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:53:57.475070Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:53:57.475125Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:53:57.475515Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:53:57.475584Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:53:57.475674Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params ... 1] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2025-04-07T13:56:29.303946Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-07T13:56:29.304030Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-07T13:56:29.304147Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:56:29.304176Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2025-04-07T13:56:29.304258Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message topic: Table/Stream2/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 2 partNo : 0 messageNo: 1 size 323 offset: -1 2025-04-07T13:56:29.304585Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Topic 'Table/Stream2/streamImpl' partition 0 part blob processing sourceId '\00072075186224037888' seqNo 2 partNo 0 2025-04-07T13:56:29.305577Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Topic 'Table/Stream2/streamImpl' partition 0 part blob complete sourceId '\00072075186224037888' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 438 count 1 nextOffset 1 batches 1 2025-04-07T13:56:29.307002Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream2/streamImpl' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 426 WTime 2533 2025-04-07T13:56:29.307541Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:56:29.307584Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-04-07T13:56:29.307637Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream1/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 1 partNo : 0 messageNo: 1 size 324 offset: -1 2025-04-07T13:56:29.307801Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 part blob processing sourceId '\00072075186224037888' seqNo 1 partNo 0 2025-04-07T13:56:29.308579Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 part blob complete sourceId '\00072075186224037888' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 439 count 1 nextOffset 1 batches 1 2025-04-07T13:56:29.309081Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream1/streamImpl' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 427 WTime 2533 2025-04-07T13:56:29.309317Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:56:29.309567Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2025-04-07T13:56:29.313035Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:56:29.313145Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-04-07T13:56:29.314004Z node 21 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T13:56:29.314195Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 0 max time lag 0ms effective offset 0 2025-04-07T13:56:29.314277Z node 21 :PERSQUEUE DEBUG: waiting read cookie 0 partition 0 user $without_consumer offset 0 count 10000 size 26214400 timeout 0 2025-04-07T13:56:29.314432Z node 21 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:56:29.314543Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] waiting read cookie 0 partition 0 read timeout for $without_consumer offset 0 2025-04-07T13:56:29.314677Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:56:29.325612Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 342 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:56:29.325806Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:56:29.325992Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream1/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-07T13:56:29.326422Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-04-07T13:56:29.326537Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-04-07T13:56:29.326733Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 341 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:56:29.326786Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:56:29.326880Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream2/streamImpl', Partition: 0, SeqNo: 2, partNo: 0, Offset: 0 is stored on disk 2025-04-07T13:56:29.327088Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-07T13:56:29.327344Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-07T13:56:29.327669Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037891][21:1148:2771] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 2 Offset: 0 WriteTimestampMS: 2533 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-04-07T13:56:29.327835Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-07T13:56:29.327944Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T13:56:29.328089Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-04-07T13:56:29.328179Z node 21 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:56:29.328424Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][21:1147:2676] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 1 Offset: 0 WriteTimestampMS: 2533 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-04-07T13:56:29.328581Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][21:1009:2771] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-07T13:56:29.328811Z node 21 :PERSQUEUE DEBUG: Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer readTimeStamp done, result 2533 queuesize 0 startOffset 0 2025-04-07T13:56:29.328963Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][21:847:2676] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-07T13:56:29.329114Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-07T13:56:29.329195Z node 21 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 2, at tablet: 72075186224037888 2025-04-07T13:56:29.340696Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 1, at tablet# 72075186224037888 2025-04-07T13:56:29.340945Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-07T13:56:29.341043Z node 21 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 1, at tablet: 72075186224037888 2025-04-07T13:56:29.356284Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2025-04-07T13:56:29.866170Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:56:29.866240Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-04-07T13:56:29.866361Z node 21 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T13:56:29.866419Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-04-07T13:56:29.866504Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T13:56:29.866692Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-07T13:56:29.866788Z node 21 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:56:29.867564Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >>>>> GetRecords path=/Root/Table/Stream2 partitionId=0 2025-04-07T13:56:29.873977Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:56:29.874121Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2025-04-07T13:56:29.875068Z node 21 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T13:56:29.875270Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream2/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-04-07T13:56:29.875433Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T13:56:29.875586Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-07T13:56:29.875680Z node 21 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:56:29.876428Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> BasicUsage::ConnectToYDB [GOOD] >> BasicUsage::WriteRead >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin+ColumnStore >> KqpFlipJoin::RightSemi_3 [GOOD] >> KqpFlipJoin::RightOnly_1 >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::AddDriveSerialMassive >> BsControllerConfig::PDiskCreate >> Cdc::InitialScanComplete [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] >> BsControllerConfig::OverlayMapCrossReferences >> BsControllerConfig::ManyPDisksRestarts >> KqpJoinOrder::TPCDS61-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS61+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin >> DstCreator::ReplicationConsistencyMismatch [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin [GOOD] >> KqpJoin::JoinLeftPureInnerConverted >> KqpFlipJoin::Inner_3 [GOOD] >> KqpFlipJoin::LeftSemi_1 >> BsControllerConfig::ReassignGroupDisk [GOOD] >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ReplicationConsistencyMismatch [GOOD] Test command err: 2025-04-07T13:56:29.349112Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569807891511706:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:29.349333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017cb/r3tmp/tmpzGFqHv/pdisk_1.dat 2025-04-07T13:56:29.709086Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:29.775188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:29.775316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:29.778345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16575 TServer::EnableGrpc on GrpcPort 25411, node 1 2025-04-07T13:56:29.967442Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:29.967472Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:29.967478Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:29.967586Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:30.288405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:30.311101Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034190353 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution:... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034190353 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution:... (TRUNCATED) 2025-04-07T13:56:30.318700Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:30.318825Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:30.318842Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:30.319468Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:32.265034Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { status: SCHEME_ERROR, issues: } } 2025-04-07T13:56:32.265078Z node 1 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot describe table: status: SCHEME_ERROR, issue: 2025-04-07T13:56:32.741314Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569817830869901:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:32.741407Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017cb/r3tmp/tmpZaZE1P/pdisk_1.dat 2025-04-07T13:56:32.842035Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:32.865944Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:32.866042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:32.867662Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22857 TServer::EnableGrpc on GrpcPort 7143, node 2 2025-04-07T13:56:33.059259Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:33.059284Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:33.059291Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:33.059377Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:33.302877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:33.307681Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:56:33.311706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:33.432231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034193349 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744034193496 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034193349 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744034193496 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2025-04-07T13:56:33.460156Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:33.460236Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:56:33.460250Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:56:33.460654Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:56:35.379206Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034193419, tx_id: 281474976715658 } } } 2025-04-07T13:56:35.379568Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:56:35.381031Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-07T13:56:35.382974Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744034193496 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_STRONG } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-07T13:56:35.383192Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication consistency mismatch: expected: CONSISTENCY_WEAK, got: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:266:2068] recipient: [1:241:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:266:2068] recipient: [1:241:2077] Leader for TabletID 72057594037932033 is [1:268:2079] sender: [1:269:2068] recipient: [1:241:2077] 2025-04-07T13:56:32.072627Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:32.077408Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:32.079589Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:32.079852Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:32.080576Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:32.080621Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:32.080855Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:56:32.089442Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:56:32.089639Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:56:32.089795Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:56:32.089903Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:32.090003Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:32.090061Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:268:2079] sender: [1:294:2068] recipient: [1:22:2069] 2025-04-07T13:56:32.102006Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:56:32.102158Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:32.116383Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:32.116511Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:32.116590Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:32.116647Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:32.116765Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:32.116829Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:32.116860Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:32.116899Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:32.132205Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:32.132347Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2025-04-07T13:56:32.133436Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2025-04-07T13:56:32.133501Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2161} LoadFinished 2025-04-07T13:56:32.133620Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2025-04-07T13:56:32.146276Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-04-07T13:56:32.146697Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1000 Path# /dev/disk 2025-04-07T13:56:32.146731Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1000 Path# /dev/disk 2025-04-07T13:56:32.146744Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1000 Path# /dev/disk 2025-04-07T13:56:32.146756Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1000 Path# /dev/disk 2025-04-07T13:56:32.146782Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1000 Path# /dev/disk 2025-04-07T13:56:32.146816Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1000 Path# /dev/disk 2025-04-07T13:56:32.146845Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1000 Path# /dev/disk 2025-04-07T13:56:32.146861Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1000 Path# /dev/disk 2025-04-07T13:56:32.146881Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1000 Path# /dev/disk 2025-04-07T13:56:32.146899Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /dev/disk 2025-04-07T13:56:32.146910Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1000 Path# /dev/disk 2025-04-07T13:56:32.146923Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-04-07T13:56:32.165737Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { Success: true } Success: true ConfigTxSeqNo: 2 Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:266:2068] recipient: [13:241:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:266:2068] recipient: [13:241:2077] Leader for TabletID 72057594037932033 is [13:268:2079] sender: [13:269:2068] recipient: [13:241:2077] 2025-04-07T13:56:34.157488Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:34.158537Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:34.160095Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:34.160286Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:34.160915Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:34.160947Z node 13 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:34.161176Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:56:34.170288Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:56:34.170417Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:56:34.170520Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:56:34.170639Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:34.170728Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:34.170807Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [13:268:2079] sender: [13:294:2068] recipient: [13:22:2069] 2025-04-07T13:56:34.182674Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:56:34.182814Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:34.193670Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:34.193838Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:34.193931Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:34.194021Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:34.194186Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:34.194259Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:34.194308Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:34.194400Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:34.205223Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:34.205384Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2025-04-07T13:56:34.206782Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2025-04-07T13:56:34.206844Z node 13 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2161} LoadFinished 2025-04-07T13:56:34.206932Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2025-04-07T13:56:34.207462Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-04-07T13:56:34.207868Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 24:1000 Path# /dev/disk 2025-04-07T13:56:34.207899Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2025-04-07T13:56:34.207915Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2025-04-07T13:56:34.207932Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2025-04-07T13:56:34.207960Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1000 Path# /dev/disk 2025-04-07T13:56:34.207986Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1000 Path# /dev/disk 2025-04-07T13:56:34.208011Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1000 Path# /dev/disk 2025-04-07T13:56:34.208036Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1000 Path# /dev/disk 2025-04-07T13:56:34.208053Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1000 Path# /dev/disk 2025-04-07T13:56:34.208080Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1000 Path# /dev/disk 2025-04-07T13:56:34.208105Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1000 Path# /dev/disk 2025-04-07T13:56:34.208128Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-04-07T13:56:34.225429Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] Test command err: 2025-04-07T13:55:40.656936Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569596936632198:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:40.657035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001777/r3tmp/tmpUo0fpJ/pdisk_1.dat 2025-04-07T13:55:40.914041Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7880, node 1 2025-04-07T13:55:40.953754Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:40.953786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:40.953793Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:40.953907Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:55:41.004650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:41.004785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:41.006555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:41.178748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:41.191824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:41.192955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:41.193577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:55:41.193772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:55:41.193794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:55:41.194352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:55:41.194382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:55:41.194788Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:55:41.194912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:41.196066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034141241, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:41.196101Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:55:41.196340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:55:41.197024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:41.197162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:41.197198Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:55:41.197280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:55:41.197310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:55:41.197340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:55:41.198948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:55:41.199006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:55:41.199023Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:55:41.199117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:27081 2025-04-07T13:55:41.363677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:41.363935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:41.363962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:41.364454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T13:55:41.364583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:41.365392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:55:41.365540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034141409, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:41.365561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1744034141409, at schemeshard: 72057594046644480 2025-04-07T13:55:41.365760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T13:55:41.365813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T13:55:41.365852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 1 2025-04-07T13:55:41.366207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:41.366306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:41.366706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:55:41.366744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:55:41.366753Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:55:41.366785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2025-04-07T13:55:41.367886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:55:41.368044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:41.368061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:55:41.368099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T13:55:41.368137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:55:41.368149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2025-04-07T13:55:41.368439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2025-04-07T13:55:41.368508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:41.368703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:41.368986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:55:41.369017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:55:41.369040Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T13:55:41.369074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2025-04-07T13:55:41.369673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:55:41.369790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:41.370156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: ... 379a-d915d23-76250d72-d18b64a9] got new request from [3817:fd00:6050:0:2017:fd00:6050:0] 2025-04-07T13:56:35.054262Z node 7 :HTTP_PROXY DEBUG: http request [CreateQueue] requestId [8b4f379a-d915d23-76250d72-d18b64a9] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T13:56:35.054282Z node 7 :HTTP_PROXY INFO: http request [CreateQueue] requestId [8b4f379a-d915d23-76250d72-d18b64a9] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:35.055460Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4, operationId: 281474976715706:0, at schemeshard: 72057594046644480 2025-04-07T13:56:35.055547Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715706:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/SQS/cloud4', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 28], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T13:56:35.056086Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715706, database: /Root, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/Root/SQS/cloud4', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 28], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /Root/SQS/cloud4 2025-04-07T13:56:35.096374Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000301v0, operationId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-07T13:56:35.096632Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715711:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:56:35.097577Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715711, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000301v0 2025-04-07T13:56:35.097743Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:56:35.097957Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:56:35.098018Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715711:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:56:35.098252Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715711, at schemeshard: 72057594046644480 2025-04-07T13:56:35.098454Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715711 2025-04-07T13:56:35.098482Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715711 2025-04-07T13:56:35.098503Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 7 2025-04-07T13:56:35.098673Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715711 2025-04-07T13:56:35.098693Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715711 2025-04-07T13:56:35.098705Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 3 2025-04-07T13:56:35.099750Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034195148, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:56:35.099791Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715711:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744034195148, at schemeshard: 72057594046644480 2025-04-07T13:56:35.099908Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715711:0 128 -> 240 2025-04-07T13:56:35.100481Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:56:35.100653Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:56:35.100720Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715711:0 ProgressState 2025-04-07T13:56:35.100806Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715711:0 progress is 1/1 2025-04-07T13:56:35.100845Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715711:0 2025-04-07T13:56:35.100883Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715711, publications: 2, subscribers: 1 2025-04-07T13:56:35.101365Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715711 2025-04-07T13:56:35.101401Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715711 2025-04-07T13:56:35.101417Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 8 2025-04-07T13:56:35.101581Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715711 2025-04-07T13:56:35.101607Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715711 2025-04-07T13:56:35.101619Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 4 2025-04-07T13:56:35.101670Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715711, subscribers: 1 2025-04-07T13:56:35.103818Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000301v0/v4, operationId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-07T13:56:35.104013Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715712:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:56:35.104612Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715712, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000301v0/v4 2025-04-07T13:56:35.104764Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:56:35.104906Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:56:35.104955Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715712:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:56:35.105533Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715712, at schemeshard: 72057594046644480 2025-04-07T13:56:35.105626Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715712 2025-04-07T13:56:35.105651Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715712 2025-04-07T13:56:35.105668Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715712, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 5 2025-04-07T13:56:35.105852Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715712 2025-04-07T13:56:35.105875Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715712 2025-04-07T13:56:35.105888Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715712, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 3 2025-04-07T13:56:35.113197Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034195162, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:56:35.113252Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715712:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744034195162, at schemeshard: 72057594046644480 2025-04-07T13:56:35.113389Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715712:0 128 -> 240 2025-04-07T13:56:35.114056Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:56:35.114278Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:56:35.114328Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715712:0 ProgressState 2025-04-07T13:56:35.114414Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715712:0 progress is 1/1 2025-04-07T13:56:35.114449Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715712:0 2025-04-07T13:56:35.114487Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715712, publications: 2, subscribers: 1 2025-04-07T13:56:35.114911Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715712 2025-04-07T13:56:35.114945Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715712 2025-04-07T13:56:35.114961Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715712, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 6 2025-04-07T13:56:35.115130Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715712 2025-04-07T13:56:35.115178Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715712 2025-04-07T13:56:35.115193Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715712, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 4 2025-04-07T13:56:35.115236Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715712, subscribers: 1 2025-04-07T13:56:35.170671Z node 7 :HTTP_PROXY DEBUG: http request [CreateQueue] requestId [8b4f379a-d915d23-76250d72-d18b64a9] Got succesfult GRPC response. 2025-04-07T13:56:35.170756Z node 7 :HTTP_PROXY INFO: http request [CreateQueue] requestId [8b4f379a-d915d23-76250d72-d18b64a9] reply ok 2025-04-07T13:56:35.170899Z node 7 :HTTP DEBUG: (#44,[::1]:59612) <- (200 ) Http output full {"QueueUrl":"http://ghrun-lqtfcktism.auto.internal:8771/cloud4/000000000000000301v0/ExampleQueueName.fifo"} 2025-04-07T13:56:35.171001Z node 7 :HTTP DEBUG: (#44,[::1]:59612) connection closed >> TxUsage::SessionAbort [GOOD] >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup [GOOD] |87.8%| [TA] $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::TwoSessionOneConsumer >> Describe::Basic [GOOD] >> Describe::Statistics >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] >> TxUsage::WriteToTopic_Invalid_Session [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 3389, MsgBus: 12266 2025-04-07T13:55:56.061115Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569664063934464:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:56.061502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001136/r3tmp/tmp6ZLpqt/pdisk_1.dat 2025-04-07T13:55:56.715784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:56.723903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:56.752276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:56.752860Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3389, node 1 2025-04-07T13:55:57.087047Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:57.087067Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:57.087074Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:57.087149Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12266 TClient is connected to server localhost:12266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:57.985199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:58.013320Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:55:58.027973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:58.373815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:58.692651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:58.813832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:01.044818Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569664063934464:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:01.044907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:02.701547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569689833739800:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:02.725127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:02.760352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:02.805776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:02.860822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:02.885061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:02.913083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:02.959014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:03.024848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569694128707589:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.038177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.038670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569694128707594:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.042117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:03.053951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569694128707597:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:56:04.197931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:04.249435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:04.289224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:04.366812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:56:04.392191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:56:04.416139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7116, MsgBus: 10492 2025-04-07T13:56:06.395200Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569709109575330:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001136/r3tmp/tmpxUwXfr/pdisk_1.dat 2025-04-07T13:56:06.447904Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:56:06.586939Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:06.606868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:06.607085Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:06.609279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7116, node 2 2025-04-07T13:56:06.697579Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:06.697605Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:06.697613Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:06.697721Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10492 TClient is connected to server localhost:10492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:07.229513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo uns ... ave access permissions } 2025-04-07T13:56:26.244860Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:26.245243Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569795442560865:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:26.249188Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:26.260450Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490569795442560867:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:56:26.936756Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490569773967722183:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:26.936819Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:27.559628Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:27.604229Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:27.652442Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:27.694997Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:56:27.769709Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:56:27.818913Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18141, MsgBus: 61834 2025-04-07T13:56:30.127410Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490569809673295575:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:30.127477Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001136/r3tmp/tmpFd6nRV/pdisk_1.dat 2025-04-07T13:56:30.299927Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:30.334562Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:30.334680Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:30.336728Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18141, node 5 2025-04-07T13:56:30.404403Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:30.404424Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:30.404433Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:30.404551Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61834 TClient is connected to server localhost:61834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:30.955934Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:30.961973Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:56:30.976722Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:31.046045Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:31.273586Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:31.344337Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T13:56:34.055755Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569826853166496:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:34.055871Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:34.107922Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:34.148141Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:34.185927Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:34.217233Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:34.279684Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:34.311576Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:34.388953Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569826853166997:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:34.389037Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:34.389165Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569826853167002:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:34.392195Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:34.399836Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490569826853167004:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:56:35.127510Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490569809673295575:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:35.127579Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:35.475690Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:35.544231Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:35.578474Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:35.616475Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T13:56:35.647828Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T13:56:35.678240Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> BsControllerConfig::AddDriveSerialMassive [GOOD] >> TxUsage::WriteToTopic_Two_WriteSession >> BsControllerConfig::OverlayMapCrossReferences [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2025-04-07T13:55:38.537377Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569590099578882:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:38.537589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00177e/r3tmp/tmp1wtoni/pdisk_1.dat 2025-04-07T13:55:38.759881Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25330, node 1 2025-04-07T13:55:38.816413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:38.816439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:38.816452Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:38.816545Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:55:38.890052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:38.890207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:38.891848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:39.038486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:39.044310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:39.045397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:39.045937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:55:39.046090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:55:39.046105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:55:39.046466Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:55:39.046483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-07T13:55:39.046762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:39.047064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:55:39.047896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034139092, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:39.047930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:55:39.048113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:55:39.048517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:39.048632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:39.048718Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:55:39.048822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:55:39.048862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:55:39.048888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:55:39.050378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:55:39.050422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:55:39.050446Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:55:39.050495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:12964 waiting... 2025-04-07T13:55:39.230393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:39.230557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:39.230571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:39.230985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T13:55:39.231097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:39.232015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034139281, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:55:39.232036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1744034139281, at schemeshard: 72057594046644480 2025-04-07T13:55:39.232155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T13:55:39.232211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T13:55:39.232230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T13:55:39.232551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:39.232637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:39.233237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T13:55:39.233276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T13:55:39.233295Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:55:39.233364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 2025-04-07T13:55:39.233508Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:55:39.234668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:55:39.234835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:39.234851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:55:39.234883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T13:55:39.234926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T13:55:39.234940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2025-04-07T13:55:39.235227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2025-04-07T13:55:39.235319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:55:39.235489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:55:39.235881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:55:39.235910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:55:39.235919Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T13:55:39.235951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2025-04-07T13:55:39.236672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:55:39.236866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:55:39.237301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CRE ... 569839040070345:2480] destroyed 2025-04-07T13:56:36.522651Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:56:36.522668Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7490569839040070348:2483] destroyed 2025-04-07T13:56:36.522685Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:56:36.522702Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [7:7490569839040070346:2481] destroyed 2025-04-07T13:56:36.522717Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:56:36.522731Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [7:7490569839040070347:2482] destroyed 2025-04-07T13:56:36.523696Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [615eaf8a-1186cf9c-dd1e3f83-c74517b0] reply ok 2025-04-07T13:56:36.523909Z node 7 :HTTP DEBUG: (#47,[::1]:57424) <- (200 ) 2025-04-07T13:56:36.524017Z node 7 :HTTP DEBUG: (#47,[::1]:57424) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1744034197,"StorageLimitMb":0,"StreamName":"testtopic"}} 200 {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1744034197,"StorageLimitMb":0,"StreamName":"testtopic"}} 2025-04-07T13:56:36.525398Z node 7 :HTTP DEBUG: (#44,[::1]:57436) incoming connection opened 2025-04-07T13:56:36.525448Z node 7 :HTTP DEBUG: (#44,[::1]:57436) -> (POST /Root) 2025-04-07T13:56:36.525557Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [586c:9a00:6050:0:406c:9a00:6050:0] request [DescribeStreamSummary] url [/Root] database [/Root] requestId: 5ed377d8-6ca42b6e-5fef14c3-3c0936a2 2025-04-07T13:56:36.525821Z node 7 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [5ed377d8-6ca42b6e-5fef14c3-3c0936a2] got new request from [586c:9a00:6050:0:406c:9a00:6050:0] database '/Root' stream 'testtopic' 2025-04-07T13:56:36.526180Z node 7 :HTTP_PROXY DEBUG: http request [DescribeStreamSummary] requestId [5ed377d8-6ca42b6e-5fef14c3-3c0936a2] [auth] Authorized successfully 2025-04-07T13:56:36.526311Z node 7 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [5ed377d8-6ca42b6e-5fef14c3-3c0936a2] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:36.527182Z node 7 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [5ed377d8-6ca42b6e-5fef14c3-3c0936a2] reply ok 2025-04-07T13:56:36.527304Z node 7 :HTTP DEBUG: (#44,[::1]:57436) <- (200 ) 2025-04-07T13:56:36.527378Z node 7 :HTTP DEBUG: (#44,[::1]:57436) connection closed Http output full {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1744034.196,"StreamName":"testtopic"}} 200 {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1744034.196,"StreamName":"testtopic"}} 2025-04-07T13:56:36.528016Z node 7 :HTTP DEBUG: (#44,[::1]:57446) incoming connection opened 2025-04-07T13:56:36.528083Z node 7 :HTTP DEBUG: (#44,[::1]:57446) -> (POST /Root) 2025-04-07T13:56:36.528178Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [1832:d200:6050:0:32:d200:6050:0] request [DescribeStream] url [/Root] database [/Root] requestId: e1ef7d97-30095b95-1f59da16-15f50aed 2025-04-07T13:56:36.528427Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [e1ef7d97-30095b95-1f59da16-15f50aed] got new request from [1832:d200:6050:0:32:d200:6050:0] database '/Root' stream 'testtopic' 2025-04-07T13:56:36.528689Z node 7 :HTTP_PROXY DEBUG: http request [DescribeStream] requestId [e1ef7d97-30095b95-1f59da16-15f50aed] [auth] Authorized successfully 2025-04-07T13:56:36.528751Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [e1ef7d97-30095b95-1f59da16-15f50aed] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T13:56:36.529430Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:36.529483Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7490569839040070371:2491], now have 1 active actors on pipe 2025-04-07T13:56:36.529503Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:36.529522Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server connected, pipe [7:7490569839040070372:2492], now have 1 active actors on pipe 2025-04-07T13:56:36.529592Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:36.529619Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server connected, pipe [7:7490569839040070373:2493], now have 1 active actors on pipe 2025-04-07T13:56:36.529650Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:36.529679Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [7:7490569839040070374:2494], now have 1 active actors on pipe 2025-04-07T13:56:36.529737Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:36.529763Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7490569839040070375:2495], now have 1 active actors on pipe 2025-04-07T13:56:36.530779Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:56:36.530812Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7490569839040070371:2491] destroyed 2025-04-07T13:56:36.530829Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:56:36.530843Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [7:7490569839040070372:2492] destroyed 2025-04-07T13:56:36.530859Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:56:36.530873Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [7:7490569839040070373:2493] destroyed 2025-04-07T13:56:36.530888Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:56:36.530903Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [7:7490569839040070374:2494] destroyed 2025-04-07T13:56:36.530919Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:56:36.530934Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7490569839040070375:2495] destroyed 2025-04-07T13:56:36.531251Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [e1ef7d97-30095b95-1f59da16-15f50aed] reply ok 2025-04-07T13:56:36.531464Z node 7 :HTTP DEBUG: (#44,[::1]:57446) <- (200 ) 2025-04-07T13:56:36.531558Z node 7 :HTTP DEBUG: (#44,[::1]:57446) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1744034197,"StorageLimitMb":0,"StreamName":"testtopic"}} >> BsControllerConfig::PDiskCreate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:191:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:191:2075] Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:207:2066] recipient: [1:191:2075] 2025-04-07T13:56:30.710701Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:30.717507Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:30.719312Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:30.719823Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:30.720475Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:30.720517Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:30.720758Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:56:30.729283Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:56:30.729419Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:56:30.729542Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:56:30.729665Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:30.729754Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:30.729829Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:229:2066] recipient: [1:20:2067] 2025-04-07T13:56:30.743999Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:56:30.744160Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:30.754842Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:30.754963Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:30.755078Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:30.755148Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:30.755230Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:30.755276Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:30.755319Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:30.755376Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:30.766768Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:30.766897Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2025-04-07T13:56:30.768044Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2025-04-07T13:56:30.768105Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2161} LoadFinished 2025-04-07T13:56:30.768224Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2025-04-07T13:56:30.779706Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-04-07T13:56:30.780798Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-04-07T13:56:30.781510Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:193:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:193:2075] Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:207:2066] recipient: [11:193:2075] 2025-04-07T13:56:32.806387Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:32.807255Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:32.808818Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:32.809078Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:32.809651Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:32.809686Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:32.809873Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:56:32.818834Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:56:32.818983Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:56:32.819097Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:56:32.819186Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:32.819272Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:32.819371Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:229:2066] recipient: [11:20:2067] 2025-04-07T13:56:32.830576Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:56:32.830718Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:32.841477Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:32.841623Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:32.841717Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:32.841802Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:32.841933Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:32.841988Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:32.842068Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:32.842135Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:32.853100Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:32.853265Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2025-04-07T13:56:32.854602Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2025-04-07T13:56:32.854689Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2161} LoadFinished 2025-04-07T13:56:32.854807Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2025-04-07T13:56:32.855251Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-04-07T13:56:32.856226Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-04-07T13:56:32.857002Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:204:2066] recipient: [21:177:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:204:2066] recipient: [21:177:2075] Leader for TabletID 72057594037932033 is [21:206:2077] sender: [21:207:2066] recipient: [21:177:2075] 2025-04-07T13:56:34.921541Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:34.922362Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:34.923352Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:34.923709Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:34.924101Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NAct ... 01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2025-04-07T13:56:34.968714Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2025-04-07T13:56:34.969272Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2025-04-07T13:56:34.969819Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-04-07T13:56:34.970386Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-04-07T13:56:34.970987Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-04-07T13:56:34.971570Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-04-07T13:56:34.972155Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-04-07T13:56:34.972913Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-04-07T13:56:34.973689Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-04-07T13:56:34.974279Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-04-07T13:56:34.974955Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-04-07T13:56:34.975604Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-04-07T13:56:34.976239Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-04-07T13:56:34.976889Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-04-07T13:56:34.977580Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-04-07T13:56:34.978342Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-04-07T13:56:34.979078Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:204:2066] recipient: [31:191:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:204:2066] recipient: [31:191:2075] Leader for TabletID 72057594037932033 is [31:206:2077] sender: [31:207:2066] recipient: [31:191:2075] 2025-04-07T13:56:36.834622Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:36.835486Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:36.836909Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:36.837171Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:36.837695Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:36.837721Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:36.837884Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:56:36.845856Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:56:36.845977Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:56:36.846070Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:56:36.846158Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:36.846276Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:36.846401Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:206:2077] sender: [31:229:2066] recipient: [31:20:2067] 2025-04-07T13:56:36.858528Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:56:36.858687Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:36.869363Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:36.869491Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:36.869567Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:36.869648Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:36.869747Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:36.869789Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:36.869819Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:36.869869Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:36.880471Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:36.880599Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2025-04-07T13:56:36.881742Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2025-04-07T13:56:36.881790Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2161} LoadFinished 2025-04-07T13:56:36.881905Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2025-04-07T13:56:36.882238Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2025-04-07T13:56:36.882989Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2025-04-07T13:56:36.883597Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2025-04-07T13:56:36.884101Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2025-04-07T13:56:36.884600Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2025-04-07T13:56:36.885299Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-04-07T13:56:36.885975Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-04-07T13:56:36.886579Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-04-07T13:56:36.887155Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-04-07T13:56:36.887745Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-04-07T13:56:36.888445Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-04-07T13:56:36.889210Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-04-07T13:56:36.889834Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-04-07T13:56:36.890398Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-04-07T13:56:36.891001Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-04-07T13:56:36.891903Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-04-07T13:56:36.892623Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-04-07T13:56:36.893393Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-04-07T13:56:36.894258Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-04-07T13:56:36.895062Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } >> TxUsage::WriteToTopic_Demo_3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::PDiskCreate [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:191:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:191:2075] Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:207:2066] recipient: [1:191:2075] 2025-04-07T13:56:35.081502Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:35.085288Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:35.086995Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:35.087457Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:35.088064Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:35.088104Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:35.088303Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:56:35.095822Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:56:35.095966Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:56:35.096102Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:56:35.096188Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:35.096270Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:35.096315Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:232:2066] recipient: [1:20:2067] 2025-04-07T13:56:35.107899Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:56:35.108027Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:35.119107Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:35.119262Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:35.119369Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:35.119446Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:35.119554Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:35.119603Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:35.119637Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:35.119687Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:35.130294Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:35.130434Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2025-04-07T13:56:35.132900Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2025-04-07T13:56:35.132985Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2161} LoadFinished 2025-04-07T13:56:35.133124Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2025-04-07T13:56:35.149373Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2025-04-07T13:56:35.149925Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1000 Path# /dev/disk2 2025-04-07T13:56:35.149977Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1000 Path# /dev/disk3 2025-04-07T13:56:35.150005Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-04-07T13:56:35.150025Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1001 Path# /dev/disk1 2025-04-07T13:56:35.150049Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1000 Path# /dev/disk3 2025-04-07T13:56:35.150069Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-04-07T13:56:35.150092Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1000 Path# /dev/disk2 2025-04-07T13:56:35.150123Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1000 Path# /dev/disk3 2025-04-07T13:56:35.150146Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-04-07T13:56:35.150166Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1002 Path# /dev/disk1 2025-04-07T13:56:35.150185Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1000 Path# /dev/disk3 2025-04-07T13:56:35.150206Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2025-04-07T13:56:35.150230Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-04-07T13:56:35.150249Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1001 Path# /dev/disk3 2025-04-07T13:56:35.150278Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1002 Path# /dev/disk1 2025-04-07T13:56:35.150315Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-04-07T13:56:35.150352Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1000 Path# /dev/disk3 2025-04-07T13:56:35.150384Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1001 Path# /dev/disk1 2025-04-07T13:56:35.150413Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-04-07T13:56:35.150433Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1002 Path# /dev/disk1 2025-04-07T13:56:35.150452Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1002 Path# /dev/disk2 2025-04-07T13:56:35.150470Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /dev/disk3 2025-04-07T13:56:35.150491Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1002 Path# /dev/disk1 2025-04-07T13:56:35.150511Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-04-07T13:56:35.150556Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1000 Path# /dev/disk2 2025-04-07T13:56:35.150583Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1001 Path# /dev/disk1 2025-04-07T13:56:35.150601Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1002 Path# /dev/disk1 2025-04-07T13:56:35.150621Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1002 Path# /dev/disk2 2025-04-07T13:56:35.150641Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1001 Path# /dev/disk3 2025-04-07T13:56:35.150665Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1002 Path# /dev/disk1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:193:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:193:2075] Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:207:2066] recipient: [11:193:2075] 2025-04-07T13:56:37.182126Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:37.182920Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:37.184271Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:37.184468Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:37.185021Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:37.185048Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:37.185218Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:56:37.195583Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:56:37.195708Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:56:37.195808Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:56:37.195889Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:37.196020Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:37.196105Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:232:2066] recipient: [11:20:2067] 2025-04-07T13:56:37.207819Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:56:37.207963Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:37.219551Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:37.219707Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:37.219789Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:37.219860Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:37.219959Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:37.220004Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:37.220081Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:37.220133Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:37.230821Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:37.230952Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2025-04-07T13:56:37.231957Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2025-04-07T13:56:37.232003Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2161} LoadFinished 2025-04-07T13:56:37.232219Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2025-04-07T13:56:37.232749Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } } } Command { QueryBaseConfig { } } } 2025-04-07T13:56:37.233169Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1000 Path# /dev/disk3 2025-04-07T13:56:37.233211Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1001 Path# /dev/disk1 2025-04-07T13:56:37.233243Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1000 Path# /dev/disk2 2025-04-07T13:56:37.233276Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1000 Path# /dev/disk3 2025-04-07T13:56:37.233319Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1000 Path# /dev/disk2 2025-04-07T13:56:37.233347Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1001 Path# /dev/disk1 2025-04-07T13:56:37.233370Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1000 Path# /dev/disk1 2025-04-07T13:56:37.233389Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1000 Path# /dev/disk3 2025-04-07T13:56:37.233408Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1001 Path# /dev/disk1 2025-04-07T13:56:37.233428Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1001 Path# /dev/disk1 2025-04-07T13:56:37.233459Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1000 Path# /dev/disk2 2025-04-07T13:56:37.233498Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1001 Path# /dev/disk3 2025-04-07T13:56:37.233522Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1002 Path# /dev/disk2 2025-04-07T13:56:37.233542Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1000 Path# /dev/disk3 2025-04-07T13:56:37.233561Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1002 Path# /dev/disk3 2025-04-07T13:56:37.233584Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1001 Path# /dev/disk2 2025-04-07T13:56:37.233604Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1002 Path# /dev/disk2 2025-04-07T13:56:37.233622Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1000 Path# /dev/disk3 2025-04-07T13:56:37.233642Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1001 Path# /dev/disk3 2025-04-07T13:56:37.233663Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1001 Path# /dev/disk1 2025-04-07T13:56:37.233706Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1002 Path# /dev/disk1 2025-04-07T13:56:37.233734Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1002 Path# /dev/disk2 2025-04-07T13:56:37.233755Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-04-07T13:56:37.233773Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1002 Path# /dev/disk2 2025-04-07T13:56:37.233791Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1001 Path# /dev/disk3 2025-04-07T13:56:37.233810Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1002 Path# /dev/disk2 2025-04-07T13:56:37.233830Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1001 Path# /dev/disk3 2025-04-07T13:56:37.233849Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1002 Path# /dev/disk1 2025-04-07T13:56:37.233873Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1002 Path# /dev/disk2 2025-04-07T13:56:37.233892Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1002 Path# /dev/disk1 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 18594, MsgBus: 22606 2025-04-07T13:56:12.020471Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569733450847611:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:12.020502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e1d/r3tmp/tmp35UKEq/pdisk_1.dat 2025-04-07T13:56:12.512968Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:12.533891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:12.533989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:12.537351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18594, node 1 2025-04-07T13:56:12.597867Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:12.597885Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:12.597917Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:12.597996Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22606 TClient is connected to server localhost:22606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:13.245806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:13.260875Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:15.508456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569746335750137:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:15.508589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:15.806996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:56:15.932828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569746335750255:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:15.932914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:15.933445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569746335750261:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:15.938818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-07T13:56:15.959836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569746335750263:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:56:16.591551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.021728Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569733450847611:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:17.021790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:17.123555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-07T13:56:17.661680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:18.278396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:56:18.923256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.409577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:56:19.453417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:56:23.522002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710731:0, at schemeshard: 72057594046644480 2025-04-07T13:56:23.555773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710734:0, at schemeshard: 72057594046644480 2025-04-07T13:56:23.559832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710732:0, at schemeshard: 72057594046644480 2025-04-07T13:56:23.561364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710733:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-04-07T13:56:26.462897Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034186482, txId: 281474976710772] shutting down Trying to start YDB, gRPC: 24573, MsgBus: 3499 2025-04-07T13:56:27.397229Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569796648058549:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e1d/r3tmp/tmpCgdT6X/pdisk_1.dat 2025-04-07T13:56:27.491535Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:56:27.595605Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:27.614568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:27.614650Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:27.616000Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24573, node 2 2025-04-07T13:56:27.672216Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:27.672243Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:27.672249Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:27.672331Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3499 TClient is connected to server localhost:3499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:28.166656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:30.725997Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569809532960883:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:30.726072Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:30.758742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T13:56:30.799604Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569809532961001:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:30.799770Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:30.800067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569809532961007:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:30.803756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-07T13:56:30.812583Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:56:30.812746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490569809532961009:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:56:31.404532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:31.923870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-07T13:56:32.363064Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490569796648058549:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:32.363157Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:32.474751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:33.011370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:56:33.589128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:56:34.047558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T13:56:34.076155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T13:56:37.678372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710727:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TxUsage::WriteToTopic_Demo_28 |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] |87.9%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpFlipJoin::RightOnly_1 [GOOD] >> KqpFlipJoin::RightOnly_2 >> TxUsage::WriteToTopic_Demo_22_RestartNo [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap >> Cdc::ResolvedTimestamps [GOOD] >> Cdc::MustNotLoseSchemaSnapshot >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit >> TxUsage::WriteToTopic_Demo_2 [GOOD] |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |87.9%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream >> TxUsage::WriteToTopic_Demo_19_RestartNo >> KqpJoin::JoinLeftPureInnerConverted [GOOD] >> KqpJoin::JoinMismatchDictKeyTypes >> KqpFlipJoin::LeftSemi_1 [GOOD] >> KqpFlipJoin::LeftSemi_2 >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords [GOOD] >> Cdc::InitialScanRacyProgressAndDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 32614, MsgBus: 2339 2025-04-07T13:55:52.955761Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569646394884205:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:52.955895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001158/r3tmp/tmpEPxwsl/pdisk_1.dat 2025-04-07T13:55:53.948111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:53.948205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:53.948436Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:53.981357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:53.992808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 32614, node 1 2025-04-07T13:55:54.227870Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:54.227890Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:54.227896Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:54.227984Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2339 TClient is connected to server localhost:2339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:55.069318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:55.107169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:55.351711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:55.563320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:55.666182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:57.939608Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569646394884205:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:57.999450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:55:58.032930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569672164689548:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:58.033044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:58.409812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.453795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.533635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.577853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.642561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.705921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.801031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569672164690052:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:58.801181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:58.801727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569672164690057:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:58.805950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:55:58.835058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569672164690059:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:56:01.142703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:01.198488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:01.246180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:01.281312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:56:01.317692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:56:01.387627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29475, MsgBus: 13469 2025-04-07T13:56:04.397270Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569701700925685:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001158/r3tmp/tmpmpuBIm/pdisk_1.dat 2025-04-07T13:56:04.462394Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:56:04.555552Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:04.558332Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:04.558395Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:04.560783Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29475, node 2 2025-04-07T13:56:04.649122Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:04.649146Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:04.649153Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:04.649256Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13469 TClient is connected to server localhost:13469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:05.193604Z node 2 :FLAT_TX_SCHEMESHARD ... CHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:21.677824Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:21.789231Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:24.644862Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569784473307640:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:24.644988Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:24.678482Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:24.713248Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:24.785191Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:24.823215Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:24.866603Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:24.950907Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:25.011803Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569788768275431:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:25.011895Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:25.012122Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569788768275436:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:25.015477Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:25.030239Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490569788768275438:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:56:25.180796Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490569767293436751:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:25.180884Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30105, MsgBus: 25746 2025-04-07T13:56:31.964645Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490569814423671854:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:31.964734Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001158/r3tmp/tmpGq1uka/pdisk_1.dat 2025-04-07T13:56:32.089104Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:32.114807Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:32.114904Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:32.116055Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30105, node 5 2025-04-07T13:56:32.183702Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:32.183720Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:32.183733Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:32.183882Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25746 TClient is connected to server localhost:25746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:32.729948Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:32.745799Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:32.862278Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:33.060363Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:33.152898Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:35.798776Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569831603542746:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:35.798880Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:35.843399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:35.883652Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:35.917670Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:35.947174Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:35.984031Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:36.060497Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:36.104838Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569835898510542:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:36.104914Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:36.104972Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569835898510547:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:36.108587Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:36.121247Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490569835898510549:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:56:36.990383Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490569814423671854:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:36.990446Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] Test command err: 2025-04-07T13:53:46.614055Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569105894314882:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:46.614106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00132e/r3tmp/tmpcryLXn/pdisk_1.dat 2025-04-07T13:53:47.565410Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:47.567957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:47.568031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:47.571963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24295, node 1 2025-04-07T13:53:47.805348Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:47.805368Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:47.805376Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:47.805482Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:53:47.906235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:53:47.935447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:53:47.986065Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7490569110189282570:2284] 2025-04-07T13:53:47.986331Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:48.028686Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:48.028794Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:53:48.030428Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:53:48.030492Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:53:48.030527Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:53:48.030834Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:53:48.167066Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:53:48.167201Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:53:48.167249Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7490569114484249883:2285] 2025-04-07T13:53:48.167270Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:53:48.167279Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:53:48.167290Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:48.167445Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:53:48.167509Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:53:48.167526Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:48.167549Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:48.167564Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:53:48.167584Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:48.169005Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490569110189282567:2299], serverId# [1:7490569110189282584:2306], sessionId# [0:0:0] 2025-04-07T13:53:48.169147Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:48.169372Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:53:48.169463Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-07T13:53:48.171407Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:48.171707Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:48.171764Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:53:48.175706Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490569114484249897:2316], serverId# [1:7490569114484249898:2317], sessionId# [0:0:0] 2025-04-07T13:53:48.179921Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744034028219 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 7490569110189282445 RawX2: 4294969521 } } Step: 1744034028219 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:53:48.179947Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:48.180056Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:48.180133Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:48.180147Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:53:48.180169Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744034028219:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-07T13:53:48.180401Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744034028219:281474976710657 keys extracted: 0 2025-04-07T13:53:48.180520Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:53:48.180635Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:48.183775Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:53:48.185745Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:53:48.186110Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:48.195022Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744034028218 2025-04-07T13:53:48.195054Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:48.195118Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1744034028219 txid# 281474976710657} 2025-04-07T13:53:48.195144Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744034028219} 2025-04-07T13:53:48.195190Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:48.195229Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:48.195244Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:53:48.195262Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:53:48.195303Z node 1 :TX_DATASHARD DEBUG: Complete [1744034028219 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7490569110189282358:2183], exec latency: 5 ms, propose latency: 14 ms 2025-04-07T13:53:48.195325Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-07T13:53:48.195369Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:48.195434Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744034028226 2025-04-07T13:53:48.196596Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7490569114484249883:2285][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-07T13:53:48.214868Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-07T13:53:48.214931Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:53:48.220891Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:48.221019Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-04-07T13:53:48.221040Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-04-07T13:53:48.221048Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-04-07T13:53:48.232843Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:48.344938Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:53:48.345751Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-04-07T13:53:48.346216Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:53:48.346436Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-04-07T13:53:48.346461Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:53:48.346475Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-04-07T13:53:48.346492Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:53:48.346520Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:53:48.346545Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] empty tx queue 2025-04-07T13:53:48.346567Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-04-07T13:53:48.347291Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:53:48.347327Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7490569114484249955:2358], now have 1 active actors on pipe 2025-04-07T13:53:48.347336Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:53:48.347347Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037 ... 37889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-07T13:56:41.785946Z node 23 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:56:41.796994Z node 23 :TX_DATASHARD INFO: TTxActivateChangeSenderAck Complete: origin# 72075186224037893, at tablet# 72075186224037891 2025-04-07T13:56:41.799222Z node 23 :TX_DATASHARD INFO: TTxActivateChangeSenderAck Complete: origin# 72075186224037893, at tablet# 72075186224037892 2025-04-07T13:56:41.800831Z node 23 :TX_DATASHARD DEBUG: 72075186224037891 ack split partitioning changed to schemeshard 281474976715660 2025-04-07T13:56:41.800987Z node 23 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-07T13:56:41.801792Z node 23 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-04-07T13:56:41.804018Z node 23 :TX_DATASHARD DEBUG: 72075186224037892 ack split partitioning changed to schemeshard 281474976715660 2025-04-07T13:56:41.804084Z node 23 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-07T13:56:41.805342Z node 23 :TX_DATASHARD INFO: 72075186224037892 Initiating switch from PreOffline to Offline state 2025-04-07T13:56:41.807170Z node 23 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-07T13:56:41.808014Z node 23 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-04-07T13:56:41.809381Z node 23 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-07T13:56:41.809896Z node 23 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-07T13:56:41.823320Z node 23 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-07T13:56:41.824603Z node 23 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-07T13:56:41.827597Z node 23 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-04-07T13:56:41.829198Z node 23 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-07T13:56:41.831939Z node 23 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 23, TabletId: 72075186224037891 not found 2025-04-07T13:56:41.832271Z node 23 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 23, TabletId: 72075186224037892 not found 2025-04-07T13:56:41.855623Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:41.855698Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [23:1233:2609], now have 1 active actors on pipe ... release register requests ... wait for merge tx notification 2025-04-07T13:56:41.879222Z node 23 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:56:41.879347Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-07T13:56:41.879832Z node 23 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:56:41.881740Z node 23 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 ... wait for final heartbeat >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-07T13:56:41.886008Z node 23 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:56:41.886132Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-07T13:56:41.886915Z node 23 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T13:56:41.887119Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 2 max time lag 0ms effective offset 0 2025-04-07T13:56:41.887221Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T13:56:41.887356Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-07T13:56:41.887430Z node 23 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:56:41.888068Z node 23 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:56:41.899127Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:56:41.899368Z node 23 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:56:41.899731Z node 23 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:56:41.899851Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-07T13:56:41.900037Z node 23 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:56:41.900401Z node 23 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][23:1284:3002] Handle NKikimr::NPQ::TEvPartitionWriter::TEvInitResult { SessionId: TxId: Success { OwnerCookie: 72075186224037893|e13b04b1-883ff5c-dc16cedd-2f74b7cf_0 SourceIdInfo: SourceId: "\00072075186224037893" SeqNo: 0 Offset: 2 WriteTimestampMS: 0 Explicit: true State: STATE_REGISTERED } } 2025-04-07T13:56:41.900611Z node 23 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037893:1][23:1281:3002] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-07T13:56:41.900888Z node 23 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][23:1284:3002] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-07T13:56:41.901303Z node 23 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:56:41.901347Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-07T13:56:41.901472Z node 23 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-07T13:56:41.901623Z node 23 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:56:41.901659Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-07T13:56:41.901754Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037893' SeqNo: 1 partNo : 0 messageNo: 1 size 26 offset: -1 2025-04-07T13:56:41.902029Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037893' version v6000/0 2025-04-07T13:56:41.902163Z node 23 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2025-04-07T13:56:41.902384Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-04-07T13:56:41.964347Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 107 count 1 nextOffset 3 batches 1 2025-04-07T13:56:41.965830Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 93 WTime 6504 2025-04-07T13:56:41.966395Z node 23 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:56:41.978695Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:56:41.978910Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:56:41.979109Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037893', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 2 is stored on disk 2025-04-07T13:56:41.979599Z node 23 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-07T13:56:41.980016Z node 23 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][23:1284:3002] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037893" SeqNo: 1 Offset: 2 WriteTimestampMS: 6504 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-04-07T13:56:41.980196Z node 23 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037893:1][23:1281:3002] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-07T13:56:41.980401Z node 23 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037893 2025-04-07T13:56:41.980487Z node 23 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 1, at tablet: 72075186224037893 2025-04-07T13:56:41.991645Z node 23 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037893 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-07T13:56:42.489650Z node 23 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:56:42.489738Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-07T13:56:42.489862Z node 23 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T13:56:42.489934Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 3 max time lag 0ms effective offset 0 2025-04-07T13:56:42.489982Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T13:56:42.490056Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-04-07T13:56:42.490097Z node 23 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:56:42.490273Z node 23 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> TPersQueueTest::TestReadPartitionByGroupId [GOOD] >> TPersQueueTest::TestReadPartitionStatus >> LocalPartition::Restarts [GOOD] >> LocalPartition::WithoutPartition >> TRtmrTest::CreateWithoutTimeCastBuckets >> TxUsage::TwoSessionOneConsumer [GOOD] >> TxUsage::WriteToTopic_Demo_4 [GOOD] >> TxUsage::Offsets_Cannot_Be_Promoted_When_Reading_In_A_Transaction >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists >> KqpFlipJoin::RightOnly_2 [GOOD] >> KqpFlipJoin::RightOnly_3 >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb >> TMiniKQLEngineFlatTest::TestPureProgram >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey >> TMiniKQLEngineFlatTest::TestEmptyProgram >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort >> TxUsage::WriteToTopic_Demo_37 >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 >> TMiniKQLEngineFlatTest::TestEraseRow >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb >> TxUsage::WriteToTopic_Two_WriteSession [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive >> Cdc::MustNotLoseSchemaSnapshot [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive >> TMiniKQLProtoTestYdb::TestExportVoidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStringYdb >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions >> TMiniKQLProtoTestYdb::TestExportStringYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 >> TMiniKQLProtoTestYdb::TestExportUuidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructYdb >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange >> TMiniKQLProtoTestYdb::TestExportDictYdb >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 >> TMiniKQLProtoTestYdb::TestExportStructYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantYdb >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName >> TMiniKQLProtoTestYdb::TestCellsFromTuple >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TxUsage::WriteToTopic_Demo_5 >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] Test command err: Trying to start YDB, gRPC: 29289, MsgBus: 14247 2025-04-07T13:56:13.508332Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569737688280901:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:13.516626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001128/r3tmp/tmpVC86LQ/pdisk_1.dat 2025-04-07T13:56:14.015234Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:14.029448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:14.029542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:14.034204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29289, node 1 2025-04-07T13:56:14.153248Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:14.153278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:14.153285Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:14.153370Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14247 TClient is connected to server localhost:14247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:14.751464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:14.774036Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:14.783954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:14.930393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:15.111981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:15.203399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:17.242514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569754868151683:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.255012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.477326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.552857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.592085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.661536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.730047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.804767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.907278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569754868152201:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.907347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.907699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569754868152206:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.910996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:17.923164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569754868152208:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:56:18.571963Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569737688280901:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:18.573132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:19.225046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.254884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8734, MsgBus: 29345 2025-04-07T13:56:21.680742Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569774751992192:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:21.686237Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001128/r3tmp/tmpS6PxEJ/pdisk_1.dat 2025-04-07T13:56:21.895665Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:21.932244Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:21.932330Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:21.934039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8734, node 2 2025-04-07T13:56:22.109185Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:22.109207Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:22.109213Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:22.109307Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29345 TClient is connected to server localhost:29345 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:22.654245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:22.689184Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:22.754601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:22.889306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:56:23.203598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESch ... proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:39.413739Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569847974642684:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:39.413830Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:39.456264Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:39.492509Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:39.518867Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:39.551034Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:39.587586Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:39.656399Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:39.699461Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569847974643180:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:39.699536Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:39.699574Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569847974643185:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:39.703051Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:39.717631Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490569847974643187:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:56:40.996357Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:41.027031Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:41.052500Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:41.141063Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490569835089739176:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:41.141160Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13686, MsgBus: 23847 2025-04-07T13:56:42.817922Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490569864346943067:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:42.818079Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001128/r3tmp/tmphVy1Zg/pdisk_1.dat 2025-04-07T13:56:42.918065Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:42.943744Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:42.943844Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:42.945329Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13686, node 5 2025-04-07T13:56:42.994988Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:42.995010Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:42.995018Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:42.995120Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23847 TClient is connected to server localhost:23847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:43.470634Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:43.486743Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:43.579448Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:43.718476Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:43.792045Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:45.936718Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569877231846663:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:45.936793Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:45.980318Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.009443Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.039530Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.066276Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.094197Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.125761Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.166551Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569881526814457:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:46.166605Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569881526814462:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:46.166644Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:46.169867Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:46.178974Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490569881526814464:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:56:47.272566Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } >> KqpFlipJoin::LeftSemi_2 [GOOD] >> KqpFlipJoin::LeftSemi_3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:56:48.527950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:56:48.528070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:56:48.528109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:56:48.528142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:56:48.528187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:56:48.528213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:56:48.532812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:56:48.534756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:56:48.674540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:56:48.674596Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:48.757023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:56:48.757206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:56:48.765353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:56:48.803073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:56:48.814006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:56:48.835968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:56:48.853491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:56:48.903536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:56:48.953784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:56:48.953873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:56:48.954014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:56:48.954062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:56:48.954099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:56:48.964769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:56:48.971125Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:56:49.121918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:56:49.136924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:49.157443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:56:49.168816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:56:49.168939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:49.171451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:56:49.171552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:56:49.194636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:49.194752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:56:49.194788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:56:49.194819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:56:49.197125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:49.197172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:56:49.197203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:56:49.198892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:49.198935Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:49.198970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:56:49.199048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:56:49.212411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:56:49.214781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:56:49.214977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:56:49.224850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:56:49.225021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:56:49.225081Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:56:49.225331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:56:49.225410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:56:49.253119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:56:49.253285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:56:49.255864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:56:49.255928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:56:49.256111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:56:49.256152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:56:49.261286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:49.261374Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:56:49.261498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:56:49.261548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:56:49.261603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:56:49.261649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:56:49.261689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:56:49.261728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:56:49.261817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:56:49.261851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:56:49.261903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:56:49.263759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:56:49.263883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:56:49.263923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:56:49.263960Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:56:49.263991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:56:49.264139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... alPathId: 1] was 2 2025-04-07T13:56:49.394115Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:56:49.394413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T13:56:49.394521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T13:56:49.394547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2025-04-07T13:56:49.394572Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 1 2025-04-07T13:56:49.394596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:56:49.394676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2025-04-07T13:56:49.397455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T13:56:49.397508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateRTMR TConfigureParts ProgressState operationId#100:0 at tablet72057594046678944 2025-04-07T13:56:49.397556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-04-07T13:56:49.399183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T13:56:49.399286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T13:56:49.400848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T13:56:49.400918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:56:49.400963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-07T13:56:49.403673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:56:49.405450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-07T13:56:49.405578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-07T13:56:49.405913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:56:49.406039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:56:49.406091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2025-04-07T13:56:49.406194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-07T13:56:49.406355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:56:49.406405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:56:49.408231Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:56:49.408279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:56:49.408411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:56:49.408481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:56:49.408526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-07T13:56:49.408559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-04-07T13:56:49.408772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T13:56:49.408810Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-07T13:56:49.408888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-07T13:56:49.409003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T13:56:49.409050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-07T13:56:49.409086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T13:56:49.409116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-07T13:56:49.409142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-07T13:56:49.409205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:56:49.409250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-07T13:56:49.409298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T13:56:49.409324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-07T13:56:49.409935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T13:56:49.410023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T13:56:49.410055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-07T13:56:49.410089Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T13:56:49.410129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:56:49.411135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T13:56:49.411199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T13:56:49.411221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-07T13:56:49.411246Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T13:56:49.411267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:56:49.411331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-07T13:56:49.413990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T13:56:49.414187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-07T13:56:49.414407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-07T13:56:49.414444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-07T13:56:49.414864Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-07T13:56:49.414931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T13:56:49.414958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:302:2294] TestWaitNotification: OK eventTxId 100 2025-04-07T13:56:49.429358Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:56:49.429583Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 251us result status StatusSuccess 2025-04-07T13:56:49.429841Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "rtmr1" PathId: 2 PartitionsCount: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BsControllerConfig::DeleteStoragePool [GOOD] |87.9%| [TA] $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::DeleteStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:191:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:191:2075] Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:207:2066] recipient: [1:191:2075] 2025-04-07T13:56:29.183687Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:29.188177Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:29.189970Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:29.190445Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:29.191098Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:29.191141Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:29.191362Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:56:29.199765Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:56:29.199915Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:56:29.200084Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:56:29.200172Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:29.200254Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:29.200323Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:229:2066] recipient: [1:20:2067] 2025-04-07T13:56:29.212006Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:56:29.212160Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:29.224239Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:29.224374Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:29.224445Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:29.224527Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:29.224620Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:29.224709Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:29.224760Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:29.224811Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:29.235522Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:29.235670Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2025-04-07T13:56:29.236767Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2025-04-07T13:56:29.236817Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2161} LoadFinished 2025-04-07T13:56:29.236943Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2025-04-07T13:56:29.247249Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:193:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:193:2075] Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:207:2066] recipient: [11:193:2075] 2025-04-07T13:56:31.077413Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:31.078020Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:31.079050Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:31.079191Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:31.079616Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:31.079633Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:31.079752Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:56:31.087661Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:56:31.087784Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:56:31.087893Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:56:31.087972Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:31.088051Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:31.088115Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:229:2066] recipient: [11:20:2067] 2025-04-07T13:56:31.099387Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:56:31.099633Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:31.110892Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:31.111011Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:31.111088Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:31.111166Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:31.111265Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:31.111303Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:31.111335Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:31.111370Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:31.121995Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:31.122116Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2025-04-07T13:56:31.123117Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2025-04-07T13:56:31.123177Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2161} LoadFinished 2025-04-07T13:56:31.123296Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2025-04-07T13:56:31.123569Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:2964:2106] recipient: [21:2864:2115] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:2964:2106] recipient: [21:2864:2115] Leader for TabletID 72057594037932033 is [21:2966:2117] sender: [21:2967:2106] recipient: [21:2864:2115] 2025-04-07T13:56:33.148619Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:33.149549Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:33.151021Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:33.151393Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:33.152411Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:33.152447Z node 21 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:33.152700Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:56:33.163192Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:56:33.163318Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:56:33.163441Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:56:33.163546Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:33.163639Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx ... pp:355} Create new pdisk PDiskId# 87:1002 Path# /dev/disk1 2025-04-07T13:56:41.646779Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1000 Path# /dev/disk1 2025-04-07T13:56:41.646803Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1000 Path# /dev/disk3 2025-04-07T13:56:41.646826Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 109:1002 Path# /dev/disk3 2025-04-07T13:56:41.646849Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 86:1000 Path# /dev/disk1 2025-04-07T13:56:41.646873Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 104:1002 Path# /dev/disk3 2025-04-07T13:56:41.646895Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 85:1000 Path# /dev/disk1 2025-04-07T13:56:41.646919Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 107:1002 Path# /dev/disk3 2025-04-07T13:56:41.646941Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 84:1000 Path# /dev/disk1 2025-04-07T13:56:41.646966Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1001 Path# /dev/disk2 2025-04-07T13:56:41.646988Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1000 Path# /dev/disk3 2025-04-07T13:56:41.647011Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 108:1002 Path# /dev/disk3 2025-04-07T13:56:41.647034Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1001 Path# /dev/disk1 2025-04-07T13:56:41.647058Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1000 Path# /dev/disk3 2025-04-07T13:56:41.647081Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 111:1002 Path# /dev/disk3 2025-04-07T13:56:41.647101Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1001 Path# /dev/disk1 2025-04-07T13:56:41.647125Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1002 Path# /dev/disk2 2025-04-07T13:56:41.647148Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 90:1001 Path# /dev/disk2 2025-04-07T13:56:41.647169Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1000 Path# /dev/disk3 2025-04-07T13:56:41.647193Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 110:1002 Path# /dev/disk3 2025-04-07T13:56:41.647215Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1001 Path# /dev/disk1 2025-04-07T13:56:41.647238Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1002 Path# /dev/disk3 2025-04-07T13:56:41.647261Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 88:1001 Path# /dev/disk3 2025-04-07T13:56:41.647283Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1001 Path# /dev/disk2 2025-04-07T13:56:41.647305Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 114:1002 Path# /dev/disk3 2025-04-07T13:56:41.647328Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1002 Path# /dev/disk1 2025-04-07T13:56:41.647351Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1002 Path# /dev/disk2 2025-04-07T13:56:41.647376Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 85:1001 Path# /dev/disk2 2025-04-07T13:56:41.647398Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1000 Path# /dev/disk3 2025-04-07T13:56:41.647420Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 85:1002 Path# /dev/disk3 2025-04-07T13:56:41.647444Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 91:1001 Path# /dev/disk3 2025-04-07T13:56:41.647467Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1001 Path# /dev/disk2 2025-04-07T13:56:41.647496Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 86:1001 Path# /dev/disk2 2025-04-07T13:56:41.647519Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1000 Path# /dev/disk1 2025-04-07T13:56:41.647544Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 88:1002 Path# /dev/disk2 2025-04-07T13:56:41.647566Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1000 Path# /dev/disk3 2025-04-07T13:56:41.647590Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 86:1002 Path# /dev/disk3 2025-04-07T13:56:41.647613Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1001 Path# /dev/disk2 2025-04-07T13:56:41.647636Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1002 Path# /dev/disk2 2025-04-07T13:56:41.647661Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 91:1002 Path# /dev/disk2 2025-04-07T13:56:41.647683Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1000 Path# /dev/disk3 2025-04-07T13:56:41.647706Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 89:1001 Path# /dev/disk3 2025-04-07T13:56:41.647729Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1001 Path# /dev/disk2 2025-04-07T13:56:41.647751Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 84:1001 Path# /dev/disk2 2025-04-07T13:56:41.647772Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 94:1002 Path# /dev/disk2 2025-04-07T13:56:41.647795Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1000 Path# /dev/disk3 2025-04-07T13:56:41.647817Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 92:1001 Path# /dev/disk3 2025-04-07T13:56:41.647841Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1001 Path# /dev/disk2 2025-04-07T13:56:41.647863Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 89:1002 Path# /dev/disk2 2025-04-07T13:56:41.647884Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1000 Path# /dev/disk3 2025-04-07T13:56:41.647908Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 95:1001 Path# /dev/disk2 2025-04-07T13:56:41.647929Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1000 Path# /dev/disk3 2025-04-07T13:56:41.647951Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1001 Path# /dev/disk1 2025-04-07T13:56:41.647973Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 112:1002 Path# /dev/disk3 2025-04-07T13:56:41.647996Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1002 Path# /dev/disk1 2025-04-07T13:56:41.648018Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 84:1002 Path# /dev/disk3 2025-04-07T13:56:41.648040Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 90:1002 Path# /dev/disk3 2025-04-07T13:56:41.648061Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1000 Path# /dev/disk2 2025-04-07T13:56:41.648083Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 118:1002 Path# /dev/disk3 2025-04-07T13:56:41.648105Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1002 Path# /dev/disk1 2025-04-07T13:56:41.648127Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 92:1002 Path# /dev/disk2 2025-04-07T13:56:41.648151Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1001 Path# /dev/disk3 2025-04-07T13:56:41.648174Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1001 Path# /dev/disk3 2025-04-07T13:56:41.648197Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 116:1002 Path# /dev/disk3 2025-04-07T13:56:41.648222Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1002 Path# /dev/disk1 2025-04-07T13:56:41.648246Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 115:1002 Path# /dev/disk3 2025-04-07T13:56:41.648269Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1002 Path# /dev/disk1 2025-04-07T13:56:41.648293Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 93:1002 Path# /dev/disk3 2025-04-07T13:56:41.648316Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1001 Path# /dev/disk2 2025-04-07T13:56:41.648341Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1002 Path# /dev/disk2 2025-04-07T13:56:41.648365Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 119:1002 Path# /dev/disk3 2025-04-07T13:56:41.648390Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1002 Path# /dev/disk1 2025-04-07T13:56:41.648413Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 117:1002 Path# /dev/disk3 2025-04-07T13:56:41.648438Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1002 Path# /dev/disk1 2025-04-07T13:56:41.648460Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 95:1002 Path# /dev/disk3 2025-04-07T13:56:41.648485Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1002 Path# /dev/disk2 2025-04-07T13:56:41.663361Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool 1" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: ROT } } } } } 2025-04-07T13:56:41.734694Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "storage pool 2" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: SSD } } } } Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 2 ItemConfigGeneration: 1 } } } 2025-04-07T13:56:41.788212Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 1 ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> Cdc::InitialScanRacyProgressAndDrop [GOOD] >> Cdc::EnqueueRequestProcessSend >> BsControllerConfig::MergeIntersectingBoxes [GOOD] >> BsControllerConfig::MoveGroups |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> Describe::Statistics [GOOD] >> Describe::Location >> TxUsage::WriteToTopic_Demo_28 [GOOD] >> TCdcStreamTests::VirtualTimestamps |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TCdcStreamTests::Basic |88.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool >> TxUsage::WriteToTopic_Demo_29 >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::ResolvedTimestamps >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::Attributes >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 27676, MsgBus: 22504 2025-04-07T13:56:14.414506Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569742652708676:2114];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:14.431048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001119/r3tmp/tmpYdEQsK/pdisk_1.dat 2025-04-07T13:56:15.039339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:15.039440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:15.049471Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:15.051351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27676, node 1 2025-04-07T13:56:15.297162Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:15.297182Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:15.297190Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:15.297292Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22504 TClient is connected to server localhost:22504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:16.183150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:16.211848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:16.363487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:16.564353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:16.675679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:18.652038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569759832579502:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:18.729860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:19.000688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.053503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.089690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.133565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.171932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.211486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.278779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569764127547294:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:19.278874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:19.279169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569764127547299:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:19.283516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:19.301266Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:56:19.301463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569764127547301:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:56:19.415833Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569742652708676:2114];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:19.415905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:20.711932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.751477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.791555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.828824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.869755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.897664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25735, MsgBus: 7296 2025-04-07T13:56:22.951550Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569776924323819:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:22.951600Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001119/r3tmp/tmpQqot2P/pdisk_1.dat 2025-04-07T13:56:23.115924Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:23.134629Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:23.134710Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:23.136266Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25735, node 2 2025-04-07T13:56:23.280420Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:23.280443Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:23.280450Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:23.280547Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7296 TClient is connected to server localhost:7296 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:56:23.779891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself ... Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:39.151421Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:39.195343Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:39.228422Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:39.277299Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:39.310667Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:39.342781Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:39.375406Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:39.413820Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569851948735109:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:39.413921Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:39.414100Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490569851948735114:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:39.417545Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:39.427526Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490569851948735116:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:56:40.543162Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:40.575526Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:40.835593Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490569834768863738:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:40.835648Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61591, MsgBus: 20348 2025-04-07T13:56:43.299447Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490569868669968518:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:43.299523Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001119/r3tmp/tmpSK6uYD/pdisk_1.dat 2025-04-07T13:56:43.417122Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:43.439853Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:43.439944Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:43.441187Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61591, node 5 2025-04-07T13:56:43.483531Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:43.483570Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:43.483582Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:43.483713Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20348 TClient is connected to server localhost:20348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:43.976312Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:43.994580Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:44.053693Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:44.216383Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:44.286471Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:46.812217Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569881554872128:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:46.812325Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:46.831760Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.866241Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.896486Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.927093Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.960407Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:47.028295Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:47.070187Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569885849839915:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:47.070291Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:47.070639Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569885849839920:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:47.074830Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:47.085823Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490569885849839922:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:56:48.264361Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:48.299571Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490569868669968518:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:48.299642Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:48.337559Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TCdcStreamTests::RetentionPeriod >> TxUsage::WriteToTopic_Demo_19_RestartNo [GOOD] >> TCdcStreamTests::DocApi [GOOD] >> TCdcStreamTests::DocApiNegative >> TCdcStreamTests::RetentionPeriod [GOOD] >> TCdcStreamTests::TopicPartitions >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit >> KqpFlipJoin::RightOnly_3 [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit [GOOD] >> TCdcStreamTests::DocApiNegative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TCdcStreamTests::CreateStream >> LocalPartition::WithoutPartition [GOOD] >> LocalPartition::WithoutPartitionWithRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightOnly_3 [GOOD] Test command err: Trying to start YDB, gRPC: 13993, MsgBus: 30955 2025-04-07T13:56:13.793918Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569737592826534:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:13.794165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001124/r3tmp/tmpm3CtBB/pdisk_1.dat 2025-04-07T13:56:14.287285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:14.287405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:14.299788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:56:14.303147Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13993, node 1 2025-04-07T13:56:14.401249Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:14.401274Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:14.401281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:14.401385Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30955 TClient is connected to server localhost:30955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:15.064067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:15.093789Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:15.104254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:15.315727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:15.533516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:15.615678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:17.513772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569754772697302:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.521196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.556499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.599116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.653208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.702995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.779378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.859518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.932385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569754772697805:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.932459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.932657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569754772697810:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.937096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:17.948972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569754772697812:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:56:18.769983Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569737592826534:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:18.770135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:19.102492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.187513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.239607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.280539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13030, MsgBus: 10496 2025-04-07T13:56:21.027898Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569774784380534:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:21.027951Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001124/r3tmp/tmpqctEdk/pdisk_1.dat 2025-04-07T13:56:21.265563Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:21.295481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:21.295582Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:21.300824Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13030, node 2 2025-04-07T13:56:21.421286Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:21.421317Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:21.421325Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:21.421419Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10496 TClient is connected to server localhost:10496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:22.060309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:22.066286Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:56:22.077478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, op ... e, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:44.594017Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:44.625859Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:44.700287Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:44.756655Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569870872689863:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:44.756748Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569870872689869:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:44.757936Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:44.759922Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:44.767738Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490569870872689871:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:56:45.848560Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490569853692818468:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:45.848777Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:45.912510Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:45.942514Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:45.975224Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.009479Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25273, MsgBus: 8368 2025-04-07T13:56:47.946842Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490569883376323093:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:47.946998Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001124/r3tmp/tmp4Gm1sO/pdisk_1.dat 2025-04-07T13:56:48.156752Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:48.183012Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:48.183101Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:48.184987Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25273, node 6 2025-04-07T13:56:48.230257Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:48.230280Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:48.230293Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:48.230409Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8368 TClient is connected to server localhost:8368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:48.770026Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:48.783905Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:48.869816Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:49.087404Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:49.174608Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:56:52.372412Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490569904851161291:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:52.372515Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:52.423159Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:52.453951Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:52.485039Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:52.515557Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:52.539937Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:52.638466Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:52.712620Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490569904851161796:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:52.712724Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:52.712778Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490569904851161801:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:52.717038Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:52.728826Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490569904851161803:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:56:52.945565Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490569883376323093:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:52.945643Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:53.742561Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:53.775438Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:53.805578Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:53.838281Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute >> BasicUsage::WriteRead [GOOD] >> BasicUsage::ReadWithoutConsumerWithRestarts >> TxUsage::Offsets_Cannot_Be_Promoted_When_Reading_In_A_Transaction [GOOD] >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] >> AutoConfig::GetASPoolsWith3CPUs [GOOD] >> KqpJoinOrder::TPCDS61+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS61-StreamLookupJoin+ColumnStore >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith2CPUs [GOOD] >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream >> AutoConfig::GetASPoolsith1CPU [GOOD] >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] >> KqpFlipJoin::LeftSemi_3 [GOOD] |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> TxUsage::WriteToTopic_Demo_1 |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith2CPUs [GOOD] |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsith1CPU [GOOD] >> TxUsage::WriteToTopic_Demo_37 [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |88.0%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> TxUsage::WriteToTopic_Demo_38 >> TCdcStreamTests::ReplicationAttribute [GOOD] >> TCdcStreamTests::Negative >> KqpJoinOrder::TPCDS87-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS88-StreamLookupJoin-ColumnStore >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::LeftSemi_3 [GOOD] Test command err: Trying to start YDB, gRPC: 9616, MsgBus: 17247 2025-04-07T13:56:15.029376Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569743362993008:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:15.029443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001118/r3tmp/tmppFHLiB/pdisk_1.dat 2025-04-07T13:56:15.718129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:15.718204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:15.740569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:56:15.767650Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9616, node 1 2025-04-07T13:56:15.993213Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:15.993233Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:15.993238Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:15.993307Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17247 TClient is connected to server localhost:17247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:16.709285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:16.735275Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:56:16.759727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:16.909897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.063202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:17.129795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:19.138924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569760542863740:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:19.156491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:19.190420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.256116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.292837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.367287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.406234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.493617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.553171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569764837831540:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:19.553242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:19.553435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569764837831545:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:19.557078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:19.570985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569764837831547:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:56:20.024818Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569743362993008:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:20.069488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:20.828704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.869300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.928217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.953751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"E-Size":"0","LookupKeyColumns":["Key"],"Node Type":"TableLookupJoin","PlanNodeId":3,"Columns":["Key","Value"],"E-Rows":"4","Table":"FJ_Table_1","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["FJ_Table_2"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"2","Table":"FJ_Table_2","ReadColumns":["Fk1","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"ComputeNodes":[{"Tasks":[{"FinishTimeMs":1744034181715,"Host":"ghrun-lqtfcktism","ResultRows":2,"ResultBytes":26,"OutputRows":2,"StartTimeMs":1744034181714,"IngressRows":2,"ComputeTimeUs":79,"NodeId":1,"OutputChannels":[{"ChannelId":1,"Rows":2,"DstStageId":0,"Bytes":26}],"WaitInputTimeUs":1060,"TaskId":1,"OutputBytes":26}],"PeakMemoryUsageBytes":65536,"DurationUs":1000,"CpuTimeUs":1036}],"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":26,"Max":26,"Min":26}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":26,"Max":26,"Min":26},"WaitTimeUs":{"Count":1,"Sum":1076,"Max":1076,"Min":1076},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1,"ResultBytes":{"Count":1,"Sum":26,"Max":26,"Min":26},"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":1000,"BaseTimeMs":1744034181713,"WaitInputTimeUs":{"Count":1,"Sum":1060,"Max":1060,"Min":1060},"OutputBytes":{"Count":1,"Sum":26,"Max":26,"Min":26},"CpuTimeUs":{"Count":1,"Sum":473,"Max":473,"Min":473},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":64,"Max":64,"Min":64}},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":64,"Max":64,"Min":64},"WaitTimeUs":{"Count":1,"Sum":1076,"Max":1076,"Min":1076},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Collect","Stats":{"ComputeNodes":[{"Tasks":[{"InputBytes":26,"FinishTimeMs":1744034181719,"Host":"ghrun-lqtfcktism","OutputRows":2,"StartTimeMs":1744034181715,"InputRows":2,"ComputeTime ... suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.630601Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.669902Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.733083Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:46.791299Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569881677799088:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:46.791382Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:46.791483Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490569881677799093:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:46.795211Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:46.804376Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490569881677799095:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:56:48.004956Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:48.098287Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:48.137392Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:48.175760Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T13:56:48.309883Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490569868792894979:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:48.309956Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25200, MsgBus: 24791 2025-04-07T13:56:50.471217Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490569897646148604:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:50.471370Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001118/r3tmp/tmp5hvGHW/pdisk_1.dat 2025-04-07T13:56:50.577555Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:50.613851Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:50.613963Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:50.615630Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25200, node 6 2025-04-07T13:56:50.670583Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:50.670612Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:50.670622Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:50.670743Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24791 TClient is connected to server localhost:24791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:51.275317Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:51.293849Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:51.371468Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:51.552097Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:51.629265Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:54.360131Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490569914826019495:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:54.360246Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:54.394365Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:54.424766Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:54.456012Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:54.490910Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:54.530297Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:54.577355Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:54.632199Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490569914826019992:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:54.632287Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490569914826019997:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:54.632286Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:54.635908Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:54.645790Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490569914826019999:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:56:55.469521Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490569897646148604:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:55.469663Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:55.610970Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:55.680499Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:55.711378Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:55.748509Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> Cdc::EnqueueRequestProcessSend [GOOD] >> Cdc::InitialScanAndResolvedTimestamps |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::RebootSchemeShard |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull >> KqpJoin::IdxLookupSelf |88.0%| [TA] $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup >> KqpJoinOrder::TPCDS92-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] Test command err: 2025-04-07T13:54:36.480395Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:36.594164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:36.613536Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:36.613628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:36.613900Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:36.621429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:36.621631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:36.621873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:36.621996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:36.622118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:36.622255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:36.622355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:36.622462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:36.622561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:36.622675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.622801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:36.622901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:36.645632Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:36.645730Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:54:36.651334Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:36.651498Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:36.651562Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:36.651763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:36.651896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:36.651960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:36.652011Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:36.652097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:36.652154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:36.652206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:36.652253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:36.652440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:36.652505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:36.652543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:36.652572Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:36.652730Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:36.652793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:36.652854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:36.652908Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:36.652993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:36.653047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:36.653097Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:36.653146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:36.653187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:36.653231Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:36.653433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-07T13:54:36.653519Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-07T13:54:36.653614Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-07T13:54:36.653696Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-07T13:54:36.653837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:36.653899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:36.653933Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:36.654139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:36.654182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.654212Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:36.654354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:36.654398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:36.654430Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:36.654607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:36.654653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:36.654680Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execu ... e_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=24778; 2025-04-07T13:56:58.090624Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=51671; 2025-04-07T13:56:58.090891Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=190; 2025-04-07T13:56:58.092272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=207; 2025-04-07T13:56:58.092354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1405; 2025-04-07T13:56:58.092497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=93; 2025-04-07T13:56:58.092639Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:56:58.092717Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=175; 2025-04-07T13:56:58.092902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=143; 2025-04-07T13:56:58.093118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=168; 2025-04-07T13:56:58.093958Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=772; 2025-04-07T13:56:58.095144Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1123; 2025-04-07T13:56:58.095230Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=30; 2025-04-07T13:56:58.095312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=44; 2025-04-07T13:56:58.095357Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-07T13:56:58.095412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-07T13:56:58.095461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-07T13:56:58.095539Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2025-04-07T13:56:58.095598Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-07T13:56:58.095697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=57; 2025-04-07T13:56:58.095740Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-07T13:56:58.095802Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2025-04-07T13:56:58.095843Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=63135; 2025-04-07T13:56:58.096041Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=62;blobs=124;rows=1845000;bytes=117738232;raw_bytes=191860690; inactive portions=123;blobs=246;rows=3204603;bytes=206446212;raw_bytes=330422736; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:56:58.096164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2283:4252];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:56:58.096229Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2283:4252];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:56:58.096276Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2283:4252];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:56:58.096365Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:56:58.096416Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:56:58.096481Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:58.096784Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:56:58.096865Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:56:58.096919Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:56:58.096986Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:58.097044Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:58.097115Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:56:58.097187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:56:58.097235Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:56:58.098221Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:58.098339Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2318:4280];tablet_id=9437184;parent=[1:2283:4252];fline=manager.h:99;event=ask_data;request=request_id=343;1={portions_count=185};; 2025-04-07T13:56:58.101382Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2283:4252];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:56:58.101567Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:56:58.101608Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:56:58.101648Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:56:58.101702Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:56:58.101787Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:56:58.102070Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:56:58.102147Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:56:58.102212Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:56:58.102281Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:58.102326Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:56:58.102380Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:56:58.102467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:56:58.102528Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:56:58.103285Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=185;path_id=1; 2025-04-07T13:56:58.178125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=185;path_id=1; 2025-04-07T13:56:58.188704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:56:58.188820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2283:4252];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; >> KqpJoin::ExclusionJoin >> KqpIndexLookupJoin::LeftOnly-StreamLookup >> KqpIndexLookupJoin::MultiJoins >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] >> KqpIndexLookupJoin::Inner+StreamLookup >> KqpJoin::RightSemiJoin_FullScan >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin-ColumnStore >> KqpJoinOrder::Chain65Nodes >> KqpJoinOrder::TPCH3-StreamLookupJoin-ColumnStore >> TPersQueueTest::TestReadPartitionStatus [GOOD] >> TPersQueueTest::TxCounters >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78+StreamLookupJoin-ColumnStore >> TxUsage::WriteToTopic_Demo_5 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] Test command err: 2025-04-07T13:54:38.696492Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T13:54:38.779313Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:38.801284Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T13:54:38.801355Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:38.801567Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T13:54:38.809024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:38.809205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:38.809429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:38.809536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:38.809676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:38.809804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:38.809913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:38.810010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:38.810102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:38.810215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:38.810345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:38.810438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:38.833671Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T13:54:38.833737Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T13:54:38.835874Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T13:54:38.835990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:38.836032Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:38.836202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:38.836383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:38.836450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:38.836488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:38.836570Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:38.836629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:38.836690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:38.836732Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:38.836915Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:38.836985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:38.837025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:54:38.837053Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T13:54:38.837150Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T13:54:38.837203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:54:38.837264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:54:38.837308Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T13:54:38.837383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:54:38.837422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:54:38.837448Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T13:54:38.837512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:54:38.837554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:54:38.837581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T13:54:38.837764Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-07T13:54:38.837838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-07T13:54:38.837927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-07T13:54:38.837997Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-04-07T13:54:38.838179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:54:38.838235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:54:38.838266Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T13:54:38.838446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:54:38.838494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:54:38.838526Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T13:54:38.838673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:54:38.838715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:54:38.838746Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T13:54:38.838927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:54:38.838973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:54:38.839000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execu ... te_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=29478; 2025-04-07T13:57:00.412170Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=57664; 2025-04-07T13:57:00.412405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=168; 2025-04-07T13:57:00.414064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=189; 2025-04-07T13:57:00.414147Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1692; 2025-04-07T13:57:00.414287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=80; 2025-04-07T13:57:00.414410Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T13:57:00.414471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=139; 2025-04-07T13:57:00.414642Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=130; 2025-04-07T13:57:00.414808Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=125; 2025-04-07T13:57:00.415545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=687; 2025-04-07T13:57:00.416651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1047; 2025-04-07T13:57:00.416813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=85; 2025-04-07T13:57:00.416904Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=45; 2025-04-07T13:57:00.416950Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-07T13:57:00.416994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-04-07T13:57:00.417054Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-07T13:57:00.417152Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2025-04-07T13:57:00.417195Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-07T13:57:00.417284Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=55; 2025-04-07T13:57:00.417323Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T13:57:00.417410Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=40; 2025-04-07T13:57:00.417452Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=69843; 2025-04-07T13:57:00.417648Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=62;blobs=124;rows=1845000;bytes=117738232;raw_bytes=191860690; inactive portions=123;blobs=246;rows=3204603;bytes=206446212;raw_bytes=330422736; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T13:57:00.417766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2289:4258];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T13:57:00.417842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2289:4258];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T13:57:00.417905Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2289:4258];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T13:57:00.418011Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T13:57:00.418059Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:57:00.418130Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:57:00.418388Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:57:00.418457Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:57:00.418517Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:57:00.418571Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:57:00.418614Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:57:00.418662Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:57:00.418749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:57:00.418801Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:57:00.419595Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:57:00.419699Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2324:4286];tablet_id=9437184;parent=[1:2289:4258];fline=manager.h:99;event=ask_data;request=request_id=343;1={portions_count=185};; 2025-04-07T13:57:00.421432Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2289:4258];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T13:57:00.425303Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T13:57:00.425354Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:57:00.425391Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T13:57:00.425453Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:57:00.425532Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:57:00.425794Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=21; 2025-04-07T13:57:00.425873Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T13:57:00.425922Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:57:00.426000Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:57:00.426048Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:57:00.426104Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:57:00.426188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:57:00.426248Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:57:00.427360Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=185;path_id=1; 2025-04-07T13:57:00.532386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=185;path_id=1; 2025-04-07T13:57:00.545112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T13:57:00.545210Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2289:4258];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; >> TxUsage::WriteToTopic_Demo_29 [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream >> TxUsage::WriteToTopic_Demo_6 >> TxUsage::WriteToTopic_Demo_30 >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |88.0%| [TA] {RESULT} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Describe::Location [GOOD] >> Describe::DescribePartitionPermissions |88.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [TA] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin-ColumnStore |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |88.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamTests::MoveTableShouldFail >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull >> KqpJoin::IdxLookupSelf [GOOD] >> KqpJoin::JoinAggregateSingleRow >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |88.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order >> KqpJoin::ExclusionJoin [GOOD] >> KqpJoin::FullOuterJoin >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] >> KqpIndexLookupJoin::LeftOnly-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemi >> KqpIndexLookupJoin::Inner+StreamLookup [GOOD] >> KqpIndexLookupJoin::Inner-StreamLookup >> TxUsage::WriteToTopic_Demo_11 [GOOD] >> LocalPartition::WithoutPartitionWithRestart [GOOD] >> LocalPartition::WithoutPartitionUnknownEndpoint >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit [GOOD] >> KqpJoin::RightSemiJoin_FullScan [GOOD] >> KqpJoin::RightSemiJoin_KeyPrefix >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit [GOOD] >> KqpIndexLookupJoin::MultiJoins [GOOD] >> KqpIndexLookupJoin::RightSemi >> TxUsage::WriteToTopic_Demo_12 >> IndexBuildTest::CancellationNotEnoughRetries [GOOD] >> IndexBuildTest::CancellationNoTable >> BasicUsage::ReadWithoutConsumerWithRestarts [GOOD] >> BasicUsage::MaxByteSizeEqualZero ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] Test command err: 2025-04-07T13:53:37.921023Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569069857705950:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:37.921069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00135f/r3tmp/tmpOPezBm/pdisk_1.dat 2025-04-07T13:53:38.593605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:38.593720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:38.601318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:38.624999Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61780, node 1 2025-04-07T13:53:38.893065Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:38.893092Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:38.893098Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:38.893184Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:53:38.973299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:53:38.989317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:53:39.011865Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7490569078447641135:2284] 2025-04-07T13:53:39.012137Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:39.029116Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:39.029193Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:53:39.036970Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:53:39.037035Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:53:39.037080Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:53:39.037385Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:53:39.100650Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:53:39.100830Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:53:39.100886Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7490569078447641151:2285] 2025-04-07T13:53:39.100908Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:53:39.100916Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:53:39.100928Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:39.101080Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:53:39.101151Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:53:39.101190Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:39.101209Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:39.101223Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:53:39.101235Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:39.107841Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490569074152673831:2296], serverId# [1:7490569078447641150:2307], sessionId# [0:0:0] 2025-04-07T13:53:39.108015Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:39.108308Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:53:39.108444Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-07T13:53:39.109751Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:39.109831Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:53:39.109891Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:39.115276Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490569078447641165:2315], serverId# [1:7490569078447641167:2317], sessionId# [0:0:0] 2025-04-07T13:53:39.115342Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:39.127885Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744034019161 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 7490569074152673704 RawX2: 4294969518 } } Step: 1744034019161 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:53:39.127934Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:39.128545Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:39.128570Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:53:39.128595Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744034019161:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-07T13:53:39.128882Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744034019161:281474976710657 keys extracted: 0 2025-04-07T13:53:39.129005Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:53:39.130273Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:39.130318Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:53:39.132713Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:53:39.133079Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:39.134163Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1744034019161 txid# 281474976710657} 2025-04-07T13:53:39.134187Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744034019161} 2025-04-07T13:53:39.134225Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:39.135367Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744034019168 2025-04-07T13:53:39.135389Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:39.135418Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:39.135432Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:53:39.135451Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:53:39.135495Z node 1 :TX_DATASHARD DEBUG: Complete [1744034019161 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7490569074152673647:2191], exec latency: 3 ms, propose latency: 6 ms 2025-04-07T13:53:39.135527Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-07T13:53:39.135567Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:39.135661Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744034019168 2025-04-07T13:53:39.137164Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7490569078447641151:2285][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-07T13:53:39.143740Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-07T13:53:39.143786Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:53:39.156859Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:39.156981Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-04-07T13:53:39.157021Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-04-07T13:53:39.157031Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-04-07T13:53:39.174521Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:39.216767Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:53:39.249441Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-04-07T13:53:39.257444Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:53:39.257718Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-04-07T13:53:39.257739Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:53:39.257753Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-04-07T13:53:39.257770Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:53:39.257798Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:53:39.257828Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] empty tx queue 2025-04-07T13:53:39.257840Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-04-07T13:53:39.262813Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:53:39.262876Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7490569078447641261:2287], now have 1 active actors on pipe 2025-04-07T13:53:39.262889Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:53:39.262912Z node 1 :PERSQUEUE DEBUG: [PQ: 720751862240378 ... : 0 SourceId: '\00072075186224037888' SeqNo: 5 partNo : 0 messageNo: 9 size 52 offset: -1 2025-04-07T13:57:06.372237Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037888' seqNo 5 partNo 0 2025-04-07T13:57:06.373249Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037888' seqNo 5 partNo 0 FormedBlobsCount 0 NewHead: Offset 4 PartNo 0 PackedSize 167 count 1 nextOffset 5 batches 1 2025-04-07T13:57:06.374664Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 4,1 HeadOffset 0 endOffset 4 curOffset 5 d0000000000_00000000000000000004_00000_0000000001_00000| size 155 WTime 8969 2025-04-07T13:57:06.375150Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:57:06.387716Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 70 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:57:06.387923Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:57:06.388120Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-04-07T13:57:06.388581Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 9 requestId: cookie: 5 2025-04-07T13:57:06.388942Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:907:2674] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 5 Offset: 4 WriteTimestampMS: 8969 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 5 } } } 2025-04-07T13:57:06.389141Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:845:2674] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-07T13:57:06.389343Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-07T13:57:06.389417Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037888 2025-04-07T13:57:06.390062Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... unblocking updates ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-07T13:57:06.514876Z node 27 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 9000 at tablet 72075186224037888 2025-04-07T13:57:06.515037Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:57:06.515195Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v9000/18446744073709551615, at tablet# 72075186224037888 2025-04-07T13:57:06.515450Z node 27 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-07T13:57:06.519056Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2025-04-07T13:57:06.519211Z node 27 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2025-04-07T13:57:06.519331Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:57:06.519454Z node 27 :TX_DATASHARD DEBUG: Waiting for PlanStep# 12000 from mediator time cast 2025-04-07T13:57:06.519707Z node 27 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][27:648:2546] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2025-04-07T13:57:06.519961Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:845:2674] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2025-04-07T13:57:06.520701Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2025-04-07T13:57:06.521056Z node 27 :TX_DATASHARD DEBUG: Send 1 change records: to# [27:845:2674], at tablet# 72075186224037888 2025-04-07T13:57:06.521174Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2025-04-07T13:57:06.521443Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:845:2674] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-07T13:57:06.521789Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:907:2674] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-07T13:57:06.522184Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:57:06.522301Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-07T13:57:06.522485Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 10 requestId: cookie: 6 2025-04-07T13:57:06.522713Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:57:06.522753Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-07T13:57:06.522867Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 6 partNo : 0 messageNo: 11 size 26 offset: -1 2025-04-07T13:57:06.523102Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2025-04-07T13:57:06.523257Z node 27 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2025-04-07T13:57:06.523498Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-04-07T13:57:06.524501Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 107 count 1 nextOffset 6 batches 1 2025-04-07T13:57:06.525842Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 5,1 HeadOffset 0 endOffset 5 curOffset 6 d0000000000_00000000000000000005_00000_0000000001_00000| size 93 WTime 8979 2025-04-07T13:57:06.526233Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:57:06.538116Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:57:06.538337Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:57:06.538510Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-04-07T13:57:06.539004Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 11 requestId: cookie: 6 2025-04-07T13:57:06.539393Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:907:2674] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 6 Offset: 5 WriteTimestampMS: 8979 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 6 } } } 2025-04-07T13:57:06.539611Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:845:2674] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-07T13:57:06.539853Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-07T13:57:06.539938Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037888 2025-04-07T13:57:06.540565Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... checking the update is logged before the new resolved timestamp >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-07T13:57:06.651534Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:57:06.651645Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-07T13:57:06.651850Z node 27 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T13:57:06.651991Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 8 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 6 max time lag 0ms effective offset 0 2025-04-07T13:57:06.652102Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 8 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T13:57:06.652275Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 8. All data is from uncompacted head. 2025-04-07T13:57:06.652360Z node 27 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:57:06.652722Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |88.1%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-StreamLookupJoin+ColumnStore >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless >> IndexBuildTest::CancellationNoTable [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup >> Cdc::InitialScanAndResolvedTimestamps [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit >> TxUsage::WriteToTopic_Demo_23_RestartNo >> BsControllerConfig::MoveGroups [GOOD] |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |88.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql >> KqpJoinOrder::FiveWayJoinWithPreds+StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancellationNoTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:55:48.895452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:55:48.895533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:48.895569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:55:48.895596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:55:48.895633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:55:48.895656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:55:48.895716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:55:48.895983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:55:48.961035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:55:48.961093Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:48.972291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:55:48.972414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:55:48.972557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:55:48.979503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:55:48.979664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:55:48.980170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:48.980321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:55:48.981813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:48.982927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:48.982975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:48.983088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:55:48.983126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:48.983159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:55:48.983279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:55:48.988619Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:55:49.123009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:55:49.123215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.123408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:55:49.123594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:55:49.123641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.125698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.125808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:55:49.125942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.125987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:55:49.126035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:55:49.126079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:55:49.127534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.127577Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:55:49.127644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:55:49.128903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.128940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.128975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.129015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.132441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:55:49.134026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:55:49.134183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:55:49.135005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:55:49.135124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:55:49.135173Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.135391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:55:49.135432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:55:49.135586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:49.135649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:55:49.137369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:55:49.137407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:55:49.137549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:55:49.137585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:55:49.137804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:55:49.137843Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:55:49.137934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:55:49.137971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.138012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:55:49.138051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:55:49.138088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:55:49.138116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:55:49.138176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:55:49.138211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:55:49.138237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:55:49.140034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:49.140121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:55:49.140153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:55:49.140191Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:55:49.140237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:55:49.140329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 57:09.841201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:57:09.841280Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:57:09.841330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:57:09.841535Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:57:09.858281Z node 2 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [2:123:2149] sender: [2:234:2058] recipient: [2:15:2062] 2025-04-07T13:57:09.877133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:57:09.877374Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:57:09.877606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:57:09.877821Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:57:09.877870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:57:09.887010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:57:09.887150Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:57:09.887375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:57:09.887440Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:57:09.887490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:57:09.887530Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:57:09.890035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:57:09.890118Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:57:09.890161Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:57:09.892141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:57:09.892190Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:57:09.892234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:57:09.892282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:57:09.892435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:57:09.894032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:57:09.894231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:57:09.895170Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:57:09.895299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 8589936745 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:57:09.895348Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:57:09.895584Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:57:09.895645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:57:09.895856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:57:09.895939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:57:09.897655Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:57:09.897705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:57:09.897969Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:57:09.898042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:57:09.898416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:57:09.898471Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:57:09.898611Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:57:09.898647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:57:09.898704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:57:09.898748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:57:09.898790Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:57:09.898824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:57:09.898890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:57:09.898927Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:57:09.898957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:57:09.899462Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:57:09.899550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:57:09.899587Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:57:09.899637Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:57:09.899730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:57:09.899860Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:57:09.902756Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:57:09.903264Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-04-07T13:57:09.903938Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 101 DatabaseName: "/MyRoot" Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { settings { } } } max_batch_rows: 2 max_shards_in_flight: 2 } 2025-04-07T13:57:09.904158Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: Reply TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" severity: 1 } SchemeStatus: 2 2025-04-07T13:57:09.904311Z node 2 :TX_PROXY DEBUG: actor# [2:264:2256] Bootstrap 2025-04-07T13:57:09.925997Z node 2 :TX_PROXY DEBUG: actor# [2:264:2256] Become StateWork (SchemeCache [2:269:2261]) 2025-04-07T13:57:09.926608Z node 2 :TX_PROXY DEBUG: actor# [2:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:57:09.933797Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" severity: 1 } SchemeStatus: 2 TestWaitNotification wait txId: 101 2025-04-07T13:57:09.934260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T13:57:09.934313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T13:57:09.934844Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T13:57:09.934972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T13:57:09.935020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:276:2268] TestWaitNotification: OK eventTxId 101 2025-04-07T13:57:09.935548Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" PageSize: 100 PageToken: "" 2025-04-07T13:57:09.935662Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: Reply Status: SUCCESS NextPageToken: "0" BUILDINDEX RESPONSE LIST: NKikimrIndexBuilder.TEvListResponse Status: SUCCESS NextPageToken: "0" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2879:2115] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2879:2115] Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2967:2106] recipient: [1:2879:2115] 2025-04-07T13:56:33.704969Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:33.710168Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:33.712023Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:33.712397Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:33.713016Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:33.713047Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:33.713274Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:56:33.721027Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:56:33.721209Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:56:33.721342Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:56:33.721447Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:33.721550Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:33.721646Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2992:2106] recipient: [1:60:2107] 2025-04-07T13:56:33.733886Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:56:33.734061Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:33.744945Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:33.745094Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:33.745176Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:33.745260Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:33.745336Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:33.745371Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:33.745411Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:33.745453Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:33.756282Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:33.756442Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2025-04-07T13:56:33.757681Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2025-04-07T13:56:33.757735Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2161} LoadFinished 2025-04-07T13:56:33.757860Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2025-04-07T13:56:33.771165Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 150 PDiskFilter { Property { Type: ROT } } } } } 2025-04-07T13:56:33.772110Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 50:1000 Path# /dev/disk2 2025-04-07T13:56:33.772203Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 50:1001 Path# /dev/disk1 2025-04-07T13:56:33.772230Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 49:1000 Path# /dev/disk3 2025-04-07T13:56:33.772247Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 49:1001 Path# /dev/disk2 2025-04-07T13:56:33.772262Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 49:1002 Path# /dev/disk1 2025-04-07T13:56:33.772281Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 48:1000 Path# /dev/disk2 2025-04-07T13:56:33.772296Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 48:1001 Path# /dev/disk1 2025-04-07T13:56:33.772315Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 47:1000 Path# /dev/disk2 2025-04-07T13:56:33.772329Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 47:1001 Path# /dev/disk1 2025-04-07T13:56:33.772344Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 46:1000 Path# /dev/disk2 2025-04-07T13:56:33.772371Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 46:1001 Path# /dev/disk1 2025-04-07T13:56:33.772396Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 45:1000 Path# /dev/disk2 2025-04-07T13:56:33.772412Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 45:1001 Path# /dev/disk1 2025-04-07T13:56:33.772426Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 44:1000 Path# /dev/disk2 2025-04-07T13:56:33.772440Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 44:1001 Path# /dev/disk1 2025-04-07T13:56:33.772453Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 43:1000 Path# /dev/disk2 2025-04-07T13:56:33.772466Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 43:1001 Path# /dev/disk1 2025-04-07T13:56:33.772497Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 42:1000 Path# /dev/disk2 2025-04-07T13:56:33.772516Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 42:1001 Path# /dev/disk1 2025-04-07T13:56:33.772532Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 41:1000 Path# /dev/disk2 2025-04-07T13:56:33.772572Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 41:1001 Path# /dev/disk1 2025-04-07T13:56:33.772590Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 40:1000 Path# /dev/disk2 2025-04-07T13:56:33.772607Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 40:1001 Path# /dev/disk1 2025-04-07T13:56:33.772620Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 39:1000 Path# /dev/disk2 2025-04-07T13:56:33.772633Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 39:1001 ... 69:1000 Path# /dev/disk1 2025-04-07T13:57:02.474879Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 171:1001 Path# /dev/disk1 2025-04-07T13:57:02.474904Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 168:1000 Path# /dev/disk2 2025-04-07T13:57:02.474931Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 169:1001 Path# /dev/disk2 2025-04-07T13:57:02.474954Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 168:1001 Path# /dev/disk1 2025-04-07T13:57:02.474978Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 167:1000 Path# /dev/disk1 2025-04-07T13:57:02.475002Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 164:1000 Path# /dev/disk2 2025-04-07T13:57:02.475026Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 165:1000 Path# /dev/disk2 2025-04-07T13:57:02.475054Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 164:1001 Path# /dev/disk1 2025-04-07T13:57:02.475077Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 156:1000 Path# /dev/disk1 2025-04-07T13:57:02.475100Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1000 Path# /dev/disk3 2025-04-07T13:57:02.475124Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1000 Path# /dev/disk1 2025-04-07T13:57:02.475150Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2025-04-07T13:57:02.475174Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1001 Path# /dev/disk2 2025-04-07T13:57:02.475200Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 194:1002 Path# /dev/disk3 2025-04-07T13:57:02.475223Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 160:1000 Path# /dev/disk2 2025-04-07T13:57:02.475247Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1002 Path# /dev/disk1 2025-04-07T13:57:02.475272Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2025-04-07T13:57:02.475297Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1001 Path# /dev/disk2 2025-04-07T13:57:02.475320Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 161:1000 Path# /dev/disk3 2025-04-07T13:57:02.475343Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 160:1001 Path# /dev/disk1 2025-04-07T13:57:02.475368Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2025-04-07T13:57:02.475393Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 161:1001 Path# /dev/disk2 2025-04-07T13:57:02.475419Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 192:1002 Path# /dev/disk3 2025-04-07T13:57:02.475442Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 166:1000 Path# /dev/disk2 2025-04-07T13:57:02.475467Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 161:1002 Path# /dev/disk1 2025-04-07T13:57:02.475490Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 157:1000 Path# /dev/disk1 2025-04-07T13:57:02.475514Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 160:1002 Path# /dev/disk3 2025-04-07T13:57:02.475537Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 159:1000 Path# /dev/disk3 2025-04-07T13:57:02.475563Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 159:1001 Path# /dev/disk2 2025-04-07T13:57:02.475590Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 153:1000 Path# /dev/disk1 2025-04-07T13:57:02.475615Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 164:1002 Path# /dev/disk3 2025-04-07T13:57:02.475642Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 155:1000 Path# /dev/disk1 2025-04-07T13:57:02.475668Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 166:1001 Path# /dev/disk3 2025-04-07T13:57:02.475694Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1002 Path# /dev/disk3 2025-04-07T13:57:02.475720Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 159:1002 Path# /dev/disk1 2025-04-07T13:57:02.475746Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 158:1000 Path# /dev/disk3 2025-04-07T13:57:02.475772Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 158:1001 Path# /dev/disk2 2025-04-07T13:57:02.475799Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 157:1001 Path# /dev/disk2 2025-04-07T13:57:02.475823Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 156:1001 Path# /dev/disk2 2025-04-07T13:57:02.475851Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 156:1002 Path# /dev/disk3 2025-04-07T13:57:02.475876Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 170:1001 Path# /dev/disk2 2025-04-07T13:57:02.475902Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 165:1001 Path# /dev/disk1 2025-04-07T13:57:02.475928Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 173:1002 Path# /dev/disk3 2025-04-07T13:57:02.475952Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 169:1002 Path# /dev/disk3 2025-04-07T13:57:02.475978Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 154:1000 Path# /dev/disk1 2025-04-07T13:57:02.476004Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 151:1000 Path# /dev/disk3 2025-04-07T13:57:02.476030Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 153:1001 Path# /dev/disk2 2025-04-07T13:57:02.476053Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 155:1001 Path# /dev/disk3 2025-04-07T13:57:02.476077Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 168:1002 Path# /dev/disk3 2025-04-07T13:57:02.476099Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 165:1002 Path# /dev/disk3 2025-04-07T13:57:02.476124Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 158:1002 Path# /dev/disk1 2025-04-07T13:57:02.476148Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 154:1001 Path# /dev/disk2 2025-04-07T13:57:02.476174Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 152:1000 Path# /dev/disk3 2025-04-07T13:57:02.476197Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 152:1001 Path# /dev/disk1 2025-04-07T13:57:02.476221Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 167:1001 Path# /dev/disk3 2025-04-07T13:57:02.476246Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 152:1002 Path# /dev/disk2 2025-04-07T13:57:02.476271Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 157:1002 Path# /dev/disk3 2025-04-07T13:57:02.476295Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 151:1001 Path# /dev/disk1 2025-04-07T13:57:02.476321Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 170:1002 Path# /dev/disk3 2025-04-07T13:57:02.476346Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 151:1002 Path# /dev/disk2 2025-04-07T13:57:02.476371Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 155:1002 Path# /dev/disk2 2025-04-07T13:57:02.476394Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 153:1002 Path# /dev/disk3 2025-04-07T13:57:02.476419Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 154:1002 Path# /dev/disk3 2025-04-07T13:57:02.476444Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 171:1002 Path# /dev/disk3 2025-04-07T13:57:02.476469Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 193:1002 Path# /dev/disk3 2025-04-07T13:57:02.476492Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 167:1002 Path# /dev/disk2 2025-04-07T13:57:02.476515Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 166:1002 Path# /dev/disk1 2025-04-07T13:57:02.476540Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 174:1002 Path# /dev/disk3 2025-04-07T13:57:02.717209Z node 151 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.246645s 2025-04-07T13:57:02.717458Z node 151 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.246922s 2025-04-07T13:57:02.732115Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-04-07T13:57:02.810336Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2025-04-07T13:57:02.825558Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-04-07T13:57:02.907509Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2025-04-07T13:57:02.923668Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-04-07T13:57:03.033998Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2025-04-07T13:57:03.051644Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } >> TxUsage::WriteToTopic_Demo_38 [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS16-StreamLookupJoin-ColumnStore >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> TCdcStreamWithInitialScanTests::DropStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] Test command err: 2025-04-07T13:53:44.881211Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569098444637603:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:44.882013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00134b/r3tmp/tmpPCoENQ/pdisk_1.dat 2025-04-07T13:53:45.679628Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:45.721157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:45.721253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:45.730763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9811, node 1 2025-04-07T13:53:46.053288Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:46.053306Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:46.053312Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:46.053392Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:53:46.148192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:53:46.195454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:53:46.224370Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7490569107034572657:2284] 2025-04-07T13:53:46.224619Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:53:46.264804Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:53:46.264907Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:53:46.266486Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:53:46.266531Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:53:46.266563Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:53:46.266857Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:53:46.350821Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:53:46.350979Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:53:46.351027Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7490569107034572673:2285] 2025-04-07T13:53:46.351047Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:53:46.351056Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:53:46.351065Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:46.351294Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:53:46.351355Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:53:46.351373Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:46.360856Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:46.360903Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:53:46.360959Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:46.361039Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490569107034572653:2297], serverId# [1:7490569107034572672:2306], sessionId# [0:0:0] 2025-04-07T13:53:46.361153Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:46.361395Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:53:46.361498Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-07T13:53:46.372813Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:46.374410Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:46.374485Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T13:53:46.382877Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490569107034572688:2315], serverId# [1:7490569107034572689:2316], sessionId# [0:0:0] 2025-04-07T13:53:46.387783Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744034026427 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 7490569102739605226 RawX2: 4294969518 } } Step: 1744034026427 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:53:46.387818Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:46.387976Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:53:46.388038Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:46.388054Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:53:46.388098Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744034026427:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-07T13:53:46.388431Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744034026427:281474976710657 keys extracted: 0 2025-04-07T13:53:46.388554Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:53:46.388646Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:53:46.405999Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T13:53:46.408310Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T13:53:46.408716Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:53:46.412065Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744034026426 2025-04-07T13:53:46.412089Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:46.412117Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744034026427 2025-04-07T13:53:46.417017Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1744034026427 txid# 281474976710657} 2025-04-07T13:53:46.417060Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744034026427} 2025-04-07T13:53:46.417106Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:46.417164Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:53:46.417181Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:53:46.417201Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T13:53:46.417243Z node 1 :TX_DATASHARD DEBUG: Complete [1744034026427 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7490569102739605172:2194], exec latency: 19 ms, propose latency: 28 ms 2025-04-07T13:53:46.417270Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-07T13:53:46.417309Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:53:46.418638Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7490569107034572673:2285][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-07T13:53:46.521719Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-07T13:53:46.521771Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:53:46.551419Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:53:46.551549Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-04-07T13:53:46.551592Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-04-07T13:53:46.551601Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-04-07T13:53:46.564751Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:53:46.797674Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:53:46.798646Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-04-07T13:53:46.798824Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:53:46.798992Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-04-07T13:53:46.799024Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:53:46.799043Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-04-07T13:53:46.799056Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:53:46.799077Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:53:46.799214Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] empty tx queue 2025-04-07T13:53:46.799238Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-04-07T13:53:46.800062Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:53:46.800108Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7490569107034572773:2287], now have 1 active actors on pipe 2025-04-07T13:53:46.800118Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:53:46.800142Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037 ... SHARD DEBUG: RemoveChangeRecord: order: 3, at tablet: 72075186224037888 2025-04-07T13:57:09.236516Z node 27 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:57:09.236719Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037888 2025-04-07T13:57:09.431195Z node 27 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715662 at step 7500 at tablet 72075186224037888 { Transactions { TxId: 281474976715662 AckTo { RawX1: 517 RawX2: 115964119447 } } Step: 7500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T13:57:09.431308Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:57:09.431624Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:57:09.431670Z node 27 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:57:09.431727Z node 27 :TX_DATASHARD DEBUG: Found ready operation [7500:281474976715662] in PlanQueue unit at 72075186224037888 2025-04-07T13:57:09.432071Z node 27 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 7500:281474976715662 keys extracted: 0 2025-04-07T13:57:09.432282Z node 27 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T13:57:09.432553Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:57:09.433820Z node 27 :TX_DATASHARD DEBUG: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 2], version# 3, step# 7500, txId# 281474976715662, at tablet# 72075186224037888 2025-04-07T13:57:09.434406Z node 27 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:57:09.453609Z node 27 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 7500 txid# 281474976715662} 2025-04-07T13:57:09.453787Z node 27 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 7500} 2025-04-07T13:57:09.453983Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:57:09.454070Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:57:09.454129Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:57:09.454348Z node 27 :TX_DATASHARD DEBUG: Complete [7500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [27:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T13:57:09.454477Z node 27 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-04-07T13:57:09.454688Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:57:09.455717Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v7500/18446744073709551615, at tablet# 72075186224037888 2025-04-07T13:57:09.456001Z node 27 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-07T13:57:09.459784Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v7500/18446744073709551615, at tablet# 72075186224037888 2025-04-07T13:57:09.462680Z node 27 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-04-07T13:57:09.462825Z node 27 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:57:09.477462Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2025-04-07T13:57:09.477646Z node 27 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2025-04-07T13:57:09.477787Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:57:09.477920Z node 27 :TX_DATASHARD DEBUG: Waiting for PlanStep# 9000 from mediator time cast 2025-04-07T13:57:09.478018Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 0 change record(s): at tablet# 72075186224037888 2025-04-07T13:57:09.478109Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:57:09.478345Z node 27 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][27:648:2546] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 }] } 2025-04-07T13:57:09.478485Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:948:2742] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 }] } 2025-04-07T13:57:09.479032Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2025-04-07T13:57:09.479254Z node 27 :TX_DATASHARD DEBUG: Send 1 change records: to# [27:948:2742], at tablet# 72075186224037888 2025-04-07T13:57:09.479308Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2025-04-07T13:57:09.479470Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:948:2742] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-07T13:57:09.479802Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:1029:2742] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-07T13:57:09.480331Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:57:09.480390Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-07T13:57:09.480494Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 2 requestId: cookie: 2 2025-04-07T13:57:09.480626Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:57:09.482985Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-07T13:57:09.483108Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 4 partNo : 0 messageNo: 3 size 26 offset: -1 2025-04-07T13:57:09.483371Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v6000/0 2025-04-07T13:57:09.483560Z node 27 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2025-04-07T13:57:09.483845Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-04-07T13:57:09.484800Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 3 PartNo 0 PackedSize 107 count 1 nextOffset 4 batches 1 2025-04-07T13:57:09.485448Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 3,1 HeadOffset 0 endOffset 3 curOffset 4 d0000000000_00000000000000000003_00000_0000000001_00000| size 93 WTime 7451 2025-04-07T13:57:09.485715Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:57:09.498366Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:57:09.498499Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:57:09.498566Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-04-07T13:57:09.498783Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 3 requestId: cookie: 2 2025-04-07T13:57:09.499095Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:1029:2742] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 4 Offset: 3 WriteTimestampMS: 7451 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 2 } } } 2025-04-07T13:57:09.499249Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:948:2742] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-07T13:57:09.499460Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-07T13:57:09.499505Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037888 2025-04-07T13:57:09.511109Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-07T13:57:09.713514Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:57:09.713591Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-07T13:57:09.713745Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 4 max time lag 0ms effective offset 0 2025-04-07T13:57:09.713846Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T13:57:09.714319Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-07T13:57:09.714443Z node 27 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:57:09.715338Z node 27 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T13:57:09.715486Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> KqpJoinOrder::TPCH3-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH3-StreamLookupJoin+ColumnStore >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFold+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull >> KqpJoin::JoinAggregateSingleRow [GOOD] >> KqpJoin::JoinAggregate >> TxUsage::WriteToTopic_Demo_39 |88.1%| [TA] $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> KqpJoin::FullOuterJoin [GOOD] >> KqpJoin::FullOuterJoin2 >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup [GOOD] |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |88.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup >> KqpJoinOrder::TPCDS23-StreamLookupJoin-ColumnStore >> TxUsage::WriteToTopic_Demo_30 [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup |88.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> KqpJoinOrder::TPCDS90-StreamLookupJoin-ColumnStore >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart >> TxUsage::WriteToTopic_Demo_31 >> KqpJoinOrder::TPCDS95-StreamLookupJoin-ColumnStore >> TxUsage::WriteToTopic_Demo_1 [GOOD] >> KqpIndexLookupJoin::LeftSemi [GOOD] >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |88.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |88.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut |88.1%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |88.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile >> KqpJoinOrder::TPCDS92-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS94-StreamLookupJoin-ColumnStore >> TxUsage::WriteToTopic_Demo_10 >> Describe::DescribePartitionPermissions [GOOD] >> LocalPartition::Basic >> KqpIndexLookupJoin::Inner-StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2879:2115] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2879:2115] Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2967:2106] recipient: [1:2879:2115] 2025-04-07T13:56:34.095017Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:34.100339Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:34.102824Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:34.103371Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:34.104142Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:34.104185Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:34.104459Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:56:34.113434Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:56:34.113596Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:56:34.113749Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:56:34.113882Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:34.113976Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:34.114080Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2992:2106] recipient: [1:60:2107] 2025-04-07T13:56:34.126198Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:56:34.126385Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:34.137285Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:34.137440Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:34.137533Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:34.137615Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:34.137713Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:34.137774Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:34.137845Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:34.137900Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:34.148793Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:34.148959Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2025-04-07T13:56:34.150176Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2025-04-07T13:56:34.150237Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2161} LoadFinished 2025-04-07T13:56:34.150372Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2025-04-07T13:56:34.168837Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2025-04-07T13:56:34.169803Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1000 Path# /dev/disk3 2025-04-07T13:56:34.169862Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1001 Path# /dev/disk2 2025-04-07T13:56:34.169888Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1002 Path# /dev/disk1 2025-04-07T13:56:34.169920Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1000 Path# /dev/disk2 2025-04-07T13:56:34.169943Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1001 Path# /dev/disk1 2025-04-07T13:56:34.169967Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1000 Path# /dev/disk3 2025-04-07T13:56:34.169986Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1001 Path# /dev/disk2 2025-04-07T13:56:34.170010Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1002 Path# /dev/disk1 2025-04-07T13:56:34.170043Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1000 Path# /dev/disk3 2025-04-07T13:56:34.170075Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1001 Path# /dev/disk2 2025-04-07T13:56:34.170112Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1002 Path# /dev/disk1 2025-04-07T13:56:34.170141Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1000 Path# /dev/disk3 2025-04-07T13:56:34.170165Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1002 Path# /dev/disk3 2025-04-07T13:56:34.170202Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1001 Path# /dev/disk1 2025-04-07T13:56:34.170228Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1000 Path# /dev/disk2 2025-04-07T13:56:34.170263Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1000 Path# /dev/disk3 2025-04-07T13:56:34.170291Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1001 Path# /dev/disk2 2025-04-07T13:56:34.170316Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1000 Path# /dev/disk2 2025-04-07T13:56:34.170345Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1000 Path# /dev/disk2 2025-04-07T13:56:34.170368Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1000 Path# /dev/disk2 2025-04-07T13:56:34.170416Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1000 Path# /dev/disk2 2025-04-07T13:56:34.170443Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1001 Path# /dev/disk1 2025-04-07T13:56:34.170469Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1000 Path# /dev/disk3 2025-04-07T13:56:34.170494Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-04-07T13:56:34.170530Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1001 Path# /dev/disk1 2025-04-07T13:56:34.170555Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1000 Path# /dev/disk3 2025-04-07T13:56:34.170579Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-04-07T13:56:34.170601Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1000 Path# /dev/disk3 2025-04-07T13:56:34.170625Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1000 Path# /dev/disk2 2025-04-07T13:56:34.170652Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1001 Path# /dev/disk1 2025-04-07T13:56:34.170675Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1000 Path# /dev/disk3 2025-04-07T13:56:34.170709Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-04-07T13:56:34.170739Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1002 Path# /dev/disk1 2025-04-07T13:56:34.170773Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /dev/disk2 2025-04-07T13:56:34.170808Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1000 Path# /dev/disk3 2025-04-07T13:56:34.170837Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2025-04-07T13:56:34.170860Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1000 Path# /dev/ ... ER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 174:1002 Path# /dev/disk2 2025-04-07T13:57:04.911773Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 164:1002 Path# /dev/disk3 2025-04-07T13:57:04.911796Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 167:1002 Path# /dev/disk3 2025-04-07T13:57:04.911819Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 170:1002 Path# /dev/disk3 2025-04-07T13:57:04.911848Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1000 Path# /dev/disk1 2025-04-07T13:57:04.911872Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1000 Path# /dev/disk1 2025-04-07T13:57:04.911896Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2025-04-07T13:57:04.911919Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1001 Path# /dev/disk2 2025-04-07T13:57:04.911940Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 172:1002 Path# /dev/disk1 2025-04-07T13:57:04.911961Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 173:1001 Path# /dev/disk2 2025-04-07T13:57:04.911986Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 173:1002 Path# /dev/disk1 2025-04-07T13:57:04.912008Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1002 Path# /dev/disk3 2025-04-07T13:57:04.912030Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 192:1002 Path# /dev/disk2 2025-04-07T13:57:04.912054Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1001 Path# /dev/disk3 2025-04-07T13:57:04.912077Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 189:1002 Path# /dev/disk1 2025-04-07T13:57:04.912101Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2025-04-07T13:57:04.912124Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1002 Path# /dev/disk2 2025-04-07T13:57:04.912166Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 175:1002 Path# /dev/disk3 2025-04-07T13:57:04.912191Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2025-04-07T13:57:04.912213Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 161:1001 Path# /dev/disk2 2025-04-07T13:57:04.912236Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 161:1002 Path# /dev/disk3 2025-04-07T13:57:05.157809Z node 161 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.251368s 2025-04-07T13:57:05.158013Z node 161 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.251594s 2025-04-07T13:57:05.184562Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2025-04-07T13:57:05.186392Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 220:1000 Path# /dev/disk2 2025-04-07T13:57:05.186481Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 220:1001 Path# /dev/disk1 2025-04-07T13:57:05.186508Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 219:1000 Path# /dev/disk2 2025-04-07T13:57:05.186534Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 219:1001 Path# /dev/disk1 2025-04-07T13:57:05.186560Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 218:1000 Path# /dev/disk2 2025-04-07T13:57:05.186587Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 218:1001 Path# /dev/disk1 2025-04-07T13:57:05.186613Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 217:1000 Path# /dev/disk2 2025-04-07T13:57:05.186639Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 217:1001 Path# /dev/disk1 2025-04-07T13:57:05.186666Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 216:1000 Path# /dev/disk2 2025-04-07T13:57:05.186690Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 216:1001 Path# /dev/disk1 2025-04-07T13:57:05.186715Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 215:1000 Path# /dev/disk2 2025-04-07T13:57:05.186739Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 215:1001 Path# /dev/disk1 2025-04-07T13:57:05.186764Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 214:1000 Path# /dev/disk2 2025-04-07T13:57:05.186788Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 214:1001 Path# /dev/disk1 2025-04-07T13:57:05.186810Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 213:1000 Path# /dev/disk3 2025-04-07T13:57:05.186836Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2025-04-07T13:57:05.186860Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 213:1002 Path# /dev/disk1 2025-04-07T13:57:05.186903Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 212:1000 Path# /dev/disk3 2025-04-07T13:57:05.186931Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2025-04-07T13:57:05.186956Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 212:1002 Path# /dev/disk1 2025-04-07T13:57:05.186979Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 211:1000 Path# /dev/disk3 2025-04-07T13:57:05.187002Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2025-04-07T13:57:05.187026Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 211:1002 Path# /dev/disk1 2025-04-07T13:57:05.187180Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2025-04-07T13:57:05.187216Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 2025-04-07T13:57:05.187246Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2025-04-07T13:57:05.187293Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2025-04-07T13:57:05.187320Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2025-04-07T13:57:05.187358Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2025-04-07T13:57:05.187394Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |88.2%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |88.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp >> KqpIndexLookupJoin::RightSemi [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |88.2%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut >> TxUsage::WriteToTopic_Demo_6 [GOOD] >> KqpJoin::RightSemiJoin_KeyPrefix [GOOD] >> KqpJoin::RightSemiJoin_SecondaryIndex >> TPersQueueTest::TxCounters [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup [GOOD] >> KqpIndexLookupJoin::Left+StreamLookup >> TxUsage::WriteToTopic_Demo_7 >> TxUsage::WriteToTopic_Demo_12 [GOOD] >> TxUsage::WriteToTopic_Demo_13 >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated >> KqpJoin::FullOuterJoin2 [GOOD] >> KqpJoin::CrossJoinCount >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::TxCounters [GOOD] Test command err: === Server->StartServer(false); 2025-04-07T13:50:32.457508Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568275413494104:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:32.457557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:50:32.724914Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490568272439859458:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:33.173291Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001483/r3tmp/tmp51w0vL/pdisk_1.dat 2025-04-07T13:50:33.278983Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:50:33.340048Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:50:33.497398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:50:33.958657Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:33.983653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:33.983744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:33.993880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:33.993956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:34.018891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:50:34.019111Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:50:34.025670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7897, node 1 2025-04-07T13:50:34.389241Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001483/r3tmp/yandexDwZaBG.tmp 2025-04-07T13:50:34.389267Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001483/r3tmp/yandexDwZaBG.tmp 2025-04-07T13:50:34.389408Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001483/r3tmp/yandexDwZaBG.tmp 2025-04-07T13:50:34.389488Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:50:34.600635Z INFO: TTestServer started on Port 12827 GrpcPort 7897 TClient is connected to server localhost:12827 PQClient connected to localhost:7897 === TenantModeEnabled() = 0 === Init PQ - start server on port 7897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:35.626815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:50:35.633123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:35.633370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T13:50:35.633579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:35.633632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:35.637735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:35.637957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:35.638094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:35.638126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:35.638139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-04-07T13:50:35.638151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:50:35.647460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:35.647532Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:35.647548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:50:35.656857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:35.656882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-04-07T13:50:35.656898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:50:35.657280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:35.657301Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:35.657316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:35.657348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-04-07T13:50:35.661380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:50:35.669373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-04-07T13:50:35.669553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T13:50:35.675652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033835719, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:35.675805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 7490568279708462046 RawX2: 4294969662 } } Step: 1744033835719 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T13:50:35.675860Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:35.676093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:50:35.676126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:50:35.676283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T13:50:35.676344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:50:35.683123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:35.683167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:50:35.683307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:35.683322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490568284003429367:2388], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-04-07T13:50:35.683357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:35.683376Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:50:35.683465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:50:35.683482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-07T13:50:35.683504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2025-04-07T13:50:35.683521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-07T13:50:35.683532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:50:35.683543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-04-07T13:50:35.683590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, L ... ccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-07T13:57:16.647232Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7490570008371175180:2441] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-07T13:57:16.647272Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T13:57:16.648937Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-04-07T13:57:16.649206Z node 32 :PERSQUEUE INFO: new Cookie 123|e42cc193-79ebfb47-5998a722-f0ba2181_0 generated for partition 0 topic 'topic' owner 123 2025-04-07T13:57:16.649928Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 123|e42cc193-79ebfb47-5998a722-f0ba2181_0 2025-04-07T13:57:16.754259Z node 32 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-07T13:57:16.780082Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/topic" include_location: true 2025-04-07T13:57:16.791682Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[32:7490570008371175191:2444]: Bootstrap 2025-04-07T13:57:16.795359Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7490570008371175191:2444]: Request location 2025-04-07T13:57:16.800495Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7490570008371175200:2445] connected; active server actors: 1 2025-04-07T13:57:16.803451Z node 32 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 32, Generation 1 2025-04-07T13:57:16.803791Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7490570008371175191:2444]: Got location 2025-04-07T13:57:16.804252Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7490570008371175200:2445] disconnected; active server actors: 1 2025-04-07T13:57:16.804308Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7490570008371175200:2445] disconnected no session 2025-04-07T13:57:16.810634Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 123|e42cc193-79ebfb47-5998a722-f0ba2181_0 grpc read done: success: 0 data: 2025-04-07T13:57:16.810678Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|e42cc193-79ebfb47-5998a722-f0ba2181_0 grpc read failed 2025-04-07T13:57:16.810733Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|e42cc193-79ebfb47-5998a722-f0ba2181_0 grpc closed 2025-04-07T13:57:16.810763Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|e42cc193-79ebfb47-5998a722-f0ba2181_0 is DEAD 2025-04-07T13:57:16.812303Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:57:16.816617Z node 32 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-07T13:57:16.816680Z node 32 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2025-04-07T13:57:16.818230Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { path: "topic" producer_id: "123" partition_with_generation { generation: 1 } } 2025-04-07T13:57:16.818417Z node 32 :PQ_WRITE_PROXY INFO: session request cookie: 3 path: "topic" producer_id: "123" partition_with_generation { generation: 1 } from ipv6:[::1]:52212 2025-04-07T13:57:16.818448Z node 32 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="topic server" ip=ipv6:[::1]:52212 proto=topic topic=topic durationSec=0 2025-04-07T13:57:16.818462Z node 32 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T13:57:16.818504Z node 32 :PQ_WRITE_PROXY INFO: session to partition: 0, generation: 1 2025-04-07T13:57:16.820140Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-04-07T13:57:16.820398Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-07T13:57:16.820428Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T13:57:16.820452Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-07T13:57:16.820473Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7490570008371175207:2447] (SourceId=123, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=0 2025-04-07T13:57:16.820492Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 3 sessionId: partition: 0 expectedGeneration: 1 2025-04-07T13:57:16.822195Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-04-07T13:57:16.822335Z node 32 :PERSQUEUE INFO: new Cookie 123|e3d40068-324f38ce-e8093369-38a9c4cb_0 generated for partition 0 topic 'topic' owner 123 2025-04-07T13:57:16.823038Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 3 partition: 0 MaxSeqNo: 0 sessionId: 123|e3d40068-324f38ce-e8093369-38a9c4cb_0 2025-04-07T13:57:16.850348Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|e3d40068-324f38ce-e8093369-38a9c4cb_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-07T13:57:16.858848Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|e3d40068-324f38ce-e8093369-38a9c4cb_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-07T13:57:16.865487Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|e3d40068-324f38ce-e8093369-38a9c4cb_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-07T13:57:16.865590Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-04-07T13:57:16.869200Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|e3d40068-324f38ce-e8093369-38a9c4cb_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-07T13:57:16.918941Z node 32 :PQ_WRITE_PROXY DEBUG: SessionId: ydb://session/3?node_id=32&id=MzczYmFhNzktNGFlMjAyYTEtNmE5YzFiNzctZGRlZTJjMjk= TxId: 01jr88n1s0djcjhrq4gxtrh5tf WriteId: {32, 281474976710672} 2025-04-07T13:57:16.956817Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976710672}, 100000}, State: StateInit] bootstrapping {0, {32, 281474976710672}, 100000} [32:7490570008371175221:2449] 2025-04-07T13:57:16.993471Z node 32 :PERSQUEUE INFO: [topic:{0, {32, 281474976710672}, 100000}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T13:57:16.993570Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976710672}, 100000}, State: StateInit] init complete for topic 'topic' partition {0, {32, 281474976710672}, 100000} generation 1 [32:7490570008371175221:2449] 2025-04-07T13:57:16.995206Z node 32 :PERSQUEUE INFO: new Cookie 123|b4d83430-cfc53190-64032e0c-b3c9157a_0 generated for partition {0, {32, 281474976710672}, 100000} topic 'topic' owner 123 2025-04-07T13:57:16.997885Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T13:57:16.997948Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T13:57:16.997999Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T13:57:16.998033Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T13:57:17.009331Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T13:57:17.048277Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T13:57:17.048341Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T13:57:17.048370Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T13:57:17.202034Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|e3d40068-324f38ce-e8093369-38a9c4cb_0 grpc read done: success: 0 data: 2025-04-07T13:57:17.202088Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|e3d40068-324f38ce-e8093369-38a9c4cb_0 grpc read failed 2025-04-07T13:57:17.202128Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|e3d40068-324f38ce-e8093369-38a9c4cb_0 grpc closed 2025-04-07T13:57:17.202156Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|e3d40068-324f38ce-e8093369-38a9c4cb_0 is DEAD 2025-04-07T13:57:17.203578Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:57:17.203649Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:57:17.212841Z node 32 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:57:17.212880Z node 32 :IMPORT WARN: Table profiles were not loaded Counters: ================================
name=api.grpc.topic.stream_write.bytes: 20796
name=api.grpc.topic.stream_write.messages: 4
name=api.grpc.topic.stream_write.uncommitted_bytes: 20796
name=api.grpc.topic.stream_write.uncommitted_messages: 4
name=topic.write.bytes: 20796
name=topic.write.discarded_bytes: 0
name=topic.write.discarded_messages: 0
name=topic.write.messages: 4
name=topic.write.uncommitted_bytes: 20796
name=topic.write.uncommitted_messages: 4
name=topic.write.uncompressed_bytes: 16
name=topic.write.lag_milliseconds:
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=180000: 0
    bin=200: 0
    bin=2000: 3
    bin=30000: 0
    bin=500: 0
    bin=5000: 1
    bin=60000: 0
    bin=999999: 0
name=topic.write.message_size_bytes:
    bin=1024: 1
    bin=10240: 2
    bin=102400: 0
    bin=1048576: 0
    bin=10485760: 0
    bin=20480: 1
    bin=204800: 0
    bin=2097152: 0
    bin=5120: 0
    bin=51200: 0
    bin=524288: 0
    bin=5242880: 0
    bin=67108864: 0
    bin=99999999: 0
name=topic.write.partition_throttled_milliseconds:
    bin=0: 4
    bin=1: 0
    bin=10: 0
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=20: 0
    bin=2500: 0
    bin=5: 0
    bin=50: 0
    bin=500: 0
    bin=5000: 0
    bin=999999: 0
2025-04-07T13:57:17.421744Z node 32 :PERSQUEUE WARN: [PQ: 72075186224037892] Unknown transaction 281474976710673 >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup >> TxUsage::WriteToTopic_Demo_23_RestartNo [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit >> LocalPartition::WithoutPartitionUnknownEndpoint [GOOD] >> LocalPartition::WithoutPartitionDeadNode >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin-ColumnStore >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit [GOOD] >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup >> KqpJoinOrder::FiveWayJoinWithPreds+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |88.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id >> KqpJoin::JoinAggregate [GOOD] >> KqpJoin::JoinConvert >> TxUsage::WriteToTopic_Demo_39 [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartNo >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] >> TxUsage::WriteToTopic_Demo_31 [GOOD] >> LocalPartition::Basic [GOOD] >> LocalPartition::DescribeBadPartition >> TxUsage::WriteToTopic_Demo_40 |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |88.2%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> KqpIndexLookupJoin::Left+StreamLookup [GOOD] >> KqpIndexLookupJoin::Left-StreamLookup |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:56:52.046021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:56:52.046113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:56:52.046157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:56:52.046192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:56:52.046238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:56:52.046263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:56:52.046321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:56:52.046647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:56:52.111355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:56:52.111413Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:52.121592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:56:52.121737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:56:52.121910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:56:52.128443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:56:52.128624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:56:52.129233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:56:52.129434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:56:52.137574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:56:52.138834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:56:52.138894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:56:52.139009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:56:52.139053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:56:52.139126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:56:52.139275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.151516Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:56:52.262367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:56:52.262607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.262828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:56:52.263040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:56:52.263095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.265579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:56:52.265720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:56:52.265876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.265926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:56:52.265958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:56:52.266011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:56:52.267991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.268044Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:56:52.268078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:56:52.269877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.269922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.269976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:56:52.270017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:56:52.273400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:56:52.275374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:56:52.275588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:56:52.276487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:56:52.276626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:56:52.276699Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:56:52.276946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:56:52.276992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:56:52.277176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:56:52.277267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:56:52.279365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:56:52.279403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:56:52.279558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:56:52.279596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:56:52.279817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.279861Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:56:52.279962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:56:52.280014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:56:52.280054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:56:52.280091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:56:52.280125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:56:52.280162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:56:52.280229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:56:52.280286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:56:52.280319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:56:52.287453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:56:52.287616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:56:52.287653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:56:52.287699Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:56:52.287747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:56:52.287852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-07T13:57:23.153484Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 ProgressState at tablet: 72075186233409546 2025-04-07T13:57:23.154294Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72075186233409546 2025-04-07T13:57:23.154338Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72075186233409546] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:57:23.154422Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2025-04-07T13:57:23.154453Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-04-07T13:57:23.154485Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2025-04-07T13:57:23.155564Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-07T13:57:23.155734Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-07T13:57:23.155804Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-04-07T13:57:23.155872Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 5 2025-04-07T13:57:23.155961Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 4 2025-04-07T13:57:23.160369Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-07T13:57:23.160490Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-07T13:57:23.160528Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-04-07T13:57:23.160564Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 5 2025-04-07T13:57:23.160601Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 6 2025-04-07T13:57:23.160734Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2025-04-07T13:57:23.171677Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-04-07T13:57:23.173104Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-04-07T13:57:23.191221Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1400 } } 2025-04-07T13:57:23.191303Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-04-07T13:57:23.191451Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1400 } } 2025-04-07T13:57:23.191592Z node 18 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1400 } } 2025-04-07T13:57:23.193472Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 743 RawX2: 77309413964 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-07T13:57:23.193581Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-04-07T13:57:23.193828Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 743 RawX2: 77309413964 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-07T13:57:23.193930Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 2025-04-07T13:57:23.194107Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 743 RawX2: 77309413964 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-07T13:57:23.194217Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2025-04-07T13:57:23.194290Z node 18 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-07T13:57:23.194371Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2025-04-07T13:57:23.194449Z node 18 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:1 129 -> 240 2025-04-07T13:57:23.208751Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-07T13:57:23.211956Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-07T13:57:23.212575Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-07T13:57:23.212643Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2025-04-07T13:57:23.212908Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2025-04-07T13:57:23.212979Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-04-07T13:57:23.213078Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2025-04-07T13:57:23.213176Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-04-07T13:57:23.213261Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:57:23.213321Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-04-07T13:57:23.213446Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 3 2025-04-07T13:57:23.213541Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:1 2025-04-07T13:57:23.213567Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:1 2025-04-07T13:57:23.213665Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 5 2025-04-07T13:57:23.213700Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:2 2025-04-07T13:57:23.213723Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:2 2025-04-07T13:57:23.213754Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-07T13:57:26.012854Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-07T13:57:26.013297Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 485us result status StatusNameConflict 2025-04-07T13:57:26.013567Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-07T13:57:28.423837Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-07T13:57:28.424256Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 446us result status StatusNameConflict 2025-04-07T13:57:28.424466Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TxUsage::WriteToTopic_Demo_10 [GOOD] >> TxUsage::WriteToTopic_Demo_32 >> TxUsage::ReadRuleGeneration |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |88.2%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |88.2%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut >> KqpJoin::CrossJoinCount [GOOD] >> KqpJoin::ComplexJoin >> KqpJoinOrder::FiveWayJoinWithConstantFold+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup >> KqpFlipJoin::Right_1 >> TxUsage::WriteToTopic_Demo_13 [GOOD] >> KqpJoin::RightSemiJoin_SecondaryIndex [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex >> TxUsage::WriteToTopic_Demo_14 >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoin+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter 2025-04-07 13:57:23,781 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-07 13:57:24,746 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 23069 75.4M 75.5M 21.2M test_tool run_ut @/home/runner/.ya/build/build_root/u6pr/001526/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/test_tool.args 24141 3.4G 3.4G 3.4G └─ ydb-core-viewer-ut --trace-path-append /home/runner/.ya/build/build_root/u6pr/001526/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk5/ytest.report.trace Test command err: 2025-04-07T13:49:27.164611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:2715:2332], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:49:27.233279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:2728:2389], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:27.234660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:27.235354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:27.238407Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:2742:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:27.240058Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:2738:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:27.240262Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:2740:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:27.240361Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:27.241088Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:27.241596Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:27.241673Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:27.242408Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:2734:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:27.242532Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:27.242588Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:27.243431Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:2730:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:27.243692Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:2736:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:27.244965Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:27.245186Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:27.245228Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:27.245956Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:27.246081Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:27.246173Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:27.247023Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:2732:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:49:27.248341Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:27.249109Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:49:27.249164Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:49:27.249198Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:49:27.672415Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:49:27.901482Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-07T13:49:27.918654Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-07T13:49:28.459222Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 11991, node 1 TClient is connected to server localhost:1873 2025-04-07T13:49:28.870997Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:49:28.871066Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:49:28.871104Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:49:28.871820Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:51:07.050732Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:1077:2194], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:07.052770Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:51:07.053665Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:07.055414Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:2327:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:07.056005Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:2329:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:07.056734Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:07.056990Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:2331:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:07.057639Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:51:07.057729Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:07.058547Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:51:07.058608Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:07.059241Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:2749:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:07.059398Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:51:07.060594Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:51:07.062196Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:51:07.062975Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:2333:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:07.063726Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:2335:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:07.063925Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:2337:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:51:07.064587Z node 15 :METADATA_PROVIDER ERROR: fline=acce ... etadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:53:06.925258Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:06.925987Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:53:06.926130Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:53:06.926177Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:06.926277Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:53:06.926326Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:53:06.926872Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:53:07.652426Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:07.996238Z node 19 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-07T13:53:08.094375Z node 19 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-07T13:53:09.575957Z node 19 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 9894, node 19 TClient is connected to server localhost:21436 2025-04-07T13:53:10.586481Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:53:10.586621Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:53:10.586708Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:53:10.587309Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:55:49.031927Z node 28 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [28:2746:2389], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:49.033723Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:49.034026Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:55:49.037434Z node 30 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [30:2307:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:49.037721Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [31:2309:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:49.040725Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:49.040879Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:49.041226Z node 33 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [33:2313:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:49.041849Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [29:2740:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:49.041984Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:55:49.042067Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:55:49.042264Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [32:2311:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:49.042620Z node 34 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [34:2315:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:49.043811Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:49.043983Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:49.044079Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:49.044133Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:55:49.044837Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:55:49.044924Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:49.044978Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:55:49.045570Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:55:49.048614Z node 36 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [36:2319:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:49.050124Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:49.050647Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:55:49.051679Z node 35 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [35:2317:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:49.053835Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:49.053906Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-07T13:55:49.631146Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:50.069180Z node 28 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-07T13:55:50.099965Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-07T13:55:51.348509Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 1642, node 28 TClient is connected to server localhost:17250 2025-04-07T13:55:52.429952Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:52.430074Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:52.430153Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:52.430996Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7573699373/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/u6pr/001526/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1747, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7573699373/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/u6pr/001526/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> KqpJoinOrder::TPCDS90-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS90+StreamLookupJoin-ColumnStore >> TxUsage::WriteToTopic_Demo_7 [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin |88.3%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::TPCH10-StreamLookupJoin-ColumnStore >> TxUsage::WriteToTopic_Demo_8 >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull >> KqpJoinOrder::TPCDS16-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS16+StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 22566, MsgBus: 18767 2025-04-07T13:55:49.120003Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569633329170480:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:49.120742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000dff/r3tmp/tmpWorMel/pdisk_1.dat 2025-04-07T13:55:49.509479Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:49.529858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:49.529986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:49.546553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22566, node 1 2025-04-07T13:55:49.615191Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:49.615220Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:49.615226Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:49.615313Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18767 TClient is connected to server localhost:18767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:50.108072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:50.124279Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:50.139705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:55:50.335837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:50.556743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:50.658039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:53.153436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569646214074060:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:53.166016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:53.198472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:55:53.274161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:55:53.324015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:55:53.374215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:55:53.446003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:55:53.531362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:55:53.602757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569650509041860:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:53.602837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:53.603031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569650509041865:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:53.606760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:55:53.622836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569650509041867:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:55:54.187227Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569633329170480:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:54.187350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:55:55.166993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:55:56.218236Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr88jkneearpkbmjwm8hygda, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmVkYzgyZjctNzc0ZDU5YmQtMWE0ZjQ4YjItYzYyN2M5MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:56.221885Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr88jknk5dr6k4k9gmkrqtxw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTAxMjI4NmQtMTI3YTAyOC1lNjFkZmRiNy01YzJiYjVmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:56.222904Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr88jknk0xmh54dfxhrx30k6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzI2YjU2NTMtZWJkMTgyMmMtNTM3MThkY2EtYWQ0MTU0Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:56.258073Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jr88jknk5dr6k4k9gmkrqtxw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTAxMjI4NmQtMTI3YTAyOC1lNjFkZmRiNy01YzJiYjVmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:56.260221Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jr88jknk0xmh54dfxhrx30k6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzI2YjU2NTMtZWJkMTgyMmMtNTM3MThkY2EtYWQ0MTU0Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:56.266647Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr88jkneearpkbmjwm8hygda, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmVkYzgyZjctNzc0ZDU5YmQtMWE0ZjQ4YjItYzYyN2M5MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:56.270673Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jr88jknk5dr6k4k9gmkrqtxw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTAxMjI4NmQtMTI3YTAyOC1lNjFkZmRiNy01YzJiYjVmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:56.273137Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jr88jknk0xmh54dfxhrx30k6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzI2YjU2NTMtZWJkMTgyMmMtNTM3MThkY2EtYWQ0MTU0Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:56.284949Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr88jkp1e2e3f454mad33eaf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjFlNzMxYzUtNTU5MjE1YzYtZTdiODdkM2ItZjlhNGMwZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:56.286079Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01jr88jkneearpkbmjwm8hygda, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmVkYzgyZjctNzc0ZDU5YmQtMWE0ZjQ4YjItYzYyN2M5MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:56.286650Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr88jknycgm3ht26gempzp2d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDE0Y2M3YTktMzU3MzQ1MjQtNDM5YzliNDAtYmE4OTJmMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:56.309414Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710686. Ctx: { TraceId: 01jr88jkpp9pgz8140gk53exhd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M2ZDcyYjItMTRjNGNkZjYtZDFlNjhjMi0zZDI0YmEyNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Data ... sion/3?node_id=2&id=ZmQ0NDkwOGYtM2E2MGY2Y2EtZTlkZjI4YzQtNjRkNDY1YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.442126Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721653. Ctx: { TraceId: 01jr88nfn61209nyew1hdmxbgj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2M5NzUzNzAtNDhhZTdlMDEtYmM4MmM4MzUtMTI3NmM2NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.477226Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721654. Ctx: { TraceId: 01jr88nfn97ptcvn317vrcqsf8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTZkMDA5YjEtMzRlMzYzODktZmFlNzRiNmMtZjFmNmMzY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.478076Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721656. Ctx: { TraceId: 01jr88nfn61209nyew1hdmxbgj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2M5NzUzNzAtNDhhZTdlMDEtYmM4MmM4MzUtMTI3NmM2NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.478903Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721655. Ctx: { TraceId: 01jr88nffs28kqhz2cvfwybpsh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWU1NzYxN2ItNDYyNzhmYTktY2UyNWJhN2YtZWVmY2RhN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.570870Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721657. Ctx: { TraceId: 01jr88nfn97ptcvn317vrcqsf8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTZkMDA5YjEtMzRlMzYzODktZmFlNzRiNmMtZjFmNmMzY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.614785Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721660. Ctx: { TraceId: 01jr88nfrb5f2mrjpvt0ngmnqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk5OTg2ZWMtYjFhMDljNzctZjM4NDE4OWQtNTI3ZjFmZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.636970Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721661. Ctx: { TraceId: 01jr88nfn97ptcvn317vrcqsf8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTZkMDA5YjEtMzRlMzYzODktZmFlNzRiNmMtZjFmNmMzY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.647355Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721658. Ctx: { TraceId: 01jr88nfr859yn267t21egmf4d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjYwZWRhMDUtYjVhYjE4ZWQtODBkMGQ0NzgtOTFkMTNlY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.652856Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721659. Ctx: { TraceId: 01jr88nfrb848bhtwrm74ga9yb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmQ0NDkwOGYtM2E2MGY2Y2EtZTlkZjI4YzQtNjRkNDY1YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.664935Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721662. Ctx: { TraceId: 01jr88nfrb5f2mrjpvt0ngmnqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk5OTg2ZWMtYjFhMDljNzctZjM4NDE4OWQtNTI3ZjFmZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.698598Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721663. Ctx: { TraceId: 01jr88nfn97ptcvn317vrcqsf8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTZkMDA5YjEtMzRlMzYzODktZmFlNzRiNmMtZjFmNmMzY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.733720Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721665. Ctx: { TraceId: 01jr88nfr859yn267t21egmf4d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjYwZWRhMDUtYjVhYjE4ZWQtODBkMGQ0NzgtOTFkMTNlY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.743128Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721664. Ctx: { TraceId: 01jr88nfwy8nrzz5dxz1egb7py, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2M5NzUzNzAtNDhhZTdlMDEtYmM4MmM4MzUtMTI3NmM2NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.747258Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721666. Ctx: { TraceId: 01jr88nfrb848bhtwrm74ga9yb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmQ0NDkwOGYtM2E2MGY2Y2EtZTlkZjI4YzQtNjRkNDY1YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.748256Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721667. Ctx: { TraceId: 01jr88nfr859yn267t21egmf4d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjYwZWRhMDUtYjVhYjE4ZWQtODBkMGQ0NzgtOTFkMTNlY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.780989Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721668. Ctx: { TraceId: 01jr88nfrb848bhtwrm74ga9yb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmQ0NDkwOGYtM2E2MGY2Y2EtZTlkZjI4YzQtNjRkNDY1YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.820739Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721670. Ctx: { TraceId: 01jr88nfrb848bhtwrm74ga9yb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmQ0NDkwOGYtM2E2MGY2Y2EtZTlkZjI4YzQtNjRkNDY1YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.820899Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721669. Ctx: { TraceId: 01jr88ng0522vg405jdyqd2jw0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWU1NzYxN2ItNDYyNzhmYTktY2UyNWJhN2YtZWVmY2RhN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.823822Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721671. Ctx: { TraceId: 01jr88nfwy8nrzz5dxz1egb7py, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2M5NzUzNzAtNDhhZTdlMDEtYmM4MmM4MzUtMTI3NmM2NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.832163Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721672. Ctx: { TraceId: 01jr88nfwy8nrzz5dxz1egb7py, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2M5NzUzNzAtNDhhZTdlMDEtYmM4MmM4MzUtMTI3NmM2NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.832483Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721673. Ctx: { TraceId: 01jr88ng0522vg405jdyqd2jw0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWU1NzYxN2ItNDYyNzhmYTktY2UyNWJhN2YtZWVmY2RhN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.841741Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721674. Ctx: { TraceId: 01jr88ng0522vg405jdyqd2jw0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWU1NzYxN2ItNDYyNzhmYTktY2UyNWJhN2YtZWVmY2RhN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.851073Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721675. Ctx: { TraceId: 01jr88ng2tf8fcamv0rsv35sq8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTZkMDA5YjEtMzRlMzYzODktZmFlNzRiNmMtZjFmNmMzY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.860167Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721677. Ctx: { TraceId: 01jr88ng2tf8fcamv0rsv35sq8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTZkMDA5YjEtMzRlMzYzODktZmFlNzRiNmMtZjFmNmMzY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.864513Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721676. Ctx: { TraceId: 01jr88ng310vb06j501hg6bhyd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk5OTg2ZWMtYjFhMDljNzctZjM4NDE4OWQtNTI3ZjFmZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-07T13:57:30.901884Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721679. Ctx: { TraceId: 01jr88ng3r7sjfg61ad0gwgx6s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmQ0NDkwOGYtM2E2MGY2Y2EtZTlkZjI4YzQtNjRkNDY1YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.906887Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721678. Ctx: { TraceId: 01jr88ng3k0e70v6rkzn8vcvj5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjYwZWRhMDUtYjVhYjE4ZWQtODBkMGQ0NzgtOTFkMTNlY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.910772Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721681. Ctx: { TraceId: 01jr88ng3r7sjfg61ad0gwgx6s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmQ0NDkwOGYtM2E2MGY2Y2EtZTlkZjI4YzQtNjRkNDY1YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.910945Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721680. Ctx: { TraceId: 01jr88ng310vb06j501hg6bhyd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk5OTg2ZWMtYjFhMDljNzctZjM4NDE4OWQtNTI3ZjFmZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.916202Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721683. Ctx: { TraceId: 01jr88ng310vb06j501hg6bhyd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk5OTg2ZWMtYjFhMDljNzctZjM4NDE4OWQtNTI3ZjFmZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-07T13:57:30.931710Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721684. Ctx: { TraceId: 01jr88ng3k0e70v6rkzn8vcvj5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjYwZWRhMDUtYjVhYjE4ZWQtODBkMGQ0NzgtOTFkMTNlY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.970454Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721682. Ctx: { TraceId: 01jr88ng4sc9bsq8b7kgbc6mn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2M5NzUzNzAtNDhhZTdlMDEtYmM4MmM4MzUtMTI3NmM2NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.978652Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721685. Ctx: { TraceId: 01jr88ng3k0e70v6rkzn8vcvj5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjYwZWRhMDUtYjVhYjE4ZWQtODBkMGQ0NzgtOTFkMTNlY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:57:30.981152Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721686. Ctx: { TraceId: 01jr88ng4sc9bsq8b7kgbc6mn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2M5NzUzNzAtNDhhZTdlMDEtYmM4MmM4MzUtMTI3NmM2NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-04-07T13:57:30.989658Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721687. Ctx: { TraceId: 01jr88ng4sc9bsq8b7kgbc6mn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2M5NzUzNzAtNDhhZTdlMDEtYmM4MmM4MzUtMTI3NmM2NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> KqpJoinOrder::CanonizedJoinOrderTPCDS78+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin+ColumnStore >> KqpJoin::JoinConvert [GOOD] >> KqpJoin::JoinDupColumnRight >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin >> KqpIndexLookupJoin::Left-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull >> KqpJoinOrder::TPCDS95-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS95-StreamLookupJoin+ColumnStore >> KqpFlipJoin::Right_1 [GOOD] >> KqpFlipJoin::Right_2 >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2+StreamLookupJoin-ColumnStore >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit [GOOD] >> KqpJoinOrder::TPCDS94-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS94+StreamLookupJoin-ColumnStore >> TxUsage::WriteToTopic_Demo_32 [GOOD] >> KqpJoin::ComplexJoin [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] >> TxUsage::WriteToTopic_Demo_33 >> TxUsage::WriteToTopic_Demo_20_RestartNo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::ComplexJoin [GOOD] Test command err: Trying to start YDB, gRPC: 31275, MsgBus: 24752 2025-04-07T13:57:01.011477Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569942747639171:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:01.011530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010ea/r3tmp/tmpMxGPqO/pdisk_1.dat 2025-04-07T13:57:01.375632Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31275, node 1 2025-04-07T13:57:01.423540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:01.427675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:01.460761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:01.483672Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:01.483690Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:01.483695Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:01.483757Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24752 TClient is connected to server localhost:24752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:02.131519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.151540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:57:02.302549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:57:02.475505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.561976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:04.147170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569955632542766:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.231064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.570757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.624791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.657903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.716110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.774313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.870428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.930509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569955632543265:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.930583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.930940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569955632543270:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.934881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:04.951508Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:57:04.951816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569955632543272:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:05.946645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.971988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.003659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.012861Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569942747639171:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:06.012928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10680, MsgBus: 24786 2025-04-07T13:57:07.444062Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569971105594601:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:07.444105Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010ea/r3tmp/tmp0ntjZz/pdisk_1.dat 2025-04-07T13:57:07.556348Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10680, node 2 2025-04-07T13:57:07.581596Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:07.581675Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:07.589823Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:07.623625Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:07.623654Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:07.623663Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:07.623756Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24786 TClient is connected to server localhost:24786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:08.093128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:08.107387Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:57:08.121264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:08.186758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCrea ... suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:27.045972Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:27.110162Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:27.177622Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490570034768083863:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:27.177789Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:27.223206Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:27.320472Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570056242922557:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:27.320577Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:27.321181Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570056242922562:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:27.328502Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:27.366401Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490570056242922564:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:29.185926Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:29.255088Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:29.348139Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24378, MsgBus: 12993 2025-04-07T13:57:32.880774Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490570075996868760:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:32.884170Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010ea/r3tmp/tmpRrMAHJ/pdisk_1.dat 2025-04-07T13:57:33.066062Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:33.102666Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:33.102797Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:33.104498Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24378, node 5 2025-04-07T13:57:33.177183Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:33.177210Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:33.177220Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:33.177378Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12993 TClient is connected to server localhost:12993 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:57:33.882363Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.912085Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:34.001896Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:34.197887Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:34.299533Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:36.867740Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570093176739622:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:36.867846Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:36.935313Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:36.992398Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:37.059188Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:37.106135Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:37.193984Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:37.311578Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:37.403288Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570097471707428:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:37.403395Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:37.403599Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570097471707433:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:37.407499Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:37.421336Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570097471707435:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:57:37.886587Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490570075996868760:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:37.900398Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:38.804591Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.881749Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.929696Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:39.007213Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:39.043710Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 2991, MsgBus: 24646 2025-04-07T13:57:00.329273Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569939434026257:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:00.329327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010f5/r3tmp/tmpsmskmF/pdisk_1.dat 2025-04-07T13:57:00.692510Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:00.726106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:00.726252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:00.727722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2991, node 1 2025-04-07T13:57:00.788357Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:00.788383Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:00.788390Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:00.788482Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24646 TClient is connected to server localhost:24646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:01.395864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.425936Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:01.445657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.657131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.847297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.949444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:03.853494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569952318929850:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.862250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.894793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.929929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.968042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.009520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.054371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.102235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.258009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569956613897642:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.258096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.258351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569956613897648:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.263332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:04.282040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569956613897650:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:05.335549Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569939434026257:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:05.336958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:05.426125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.455562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.491187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.520184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.544318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.572268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4250, MsgBus: 20759 2025-04-07T13:57:07.579650Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569968870745520:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:07.579699Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010f5/r3tmp/tmpdwRalJ/pdisk_1.dat 2025-04-07T13:57:07.716903Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:07.731575Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:07.731659Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:07.736636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4250, node 2 2025-04-07T13:57:07.833249Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:07.833273Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:07.833282Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:07.833378Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20759 TClient is connected to server localhost:20759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:08.318464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo uns ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:30.315471Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570070134986574:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:30.315595Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:30.316007Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570070134986579:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:30.320201Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:30.346796Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490570070134986581:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:31.871844Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:31.921226Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:31.994008Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.073596Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.151002Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.237510Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29304, MsgBus: 61580 2025-04-07T13:57:34.186963Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490570084912691293:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:34.187124Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010f5/r3tmp/tmpVx4kKa/pdisk_1.dat 2025-04-07T13:57:34.378053Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:34.383500Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:34.383600Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:34.385453Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29304, node 5 2025-04-07T13:57:34.451313Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:34.451340Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:34.451351Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:34.451495Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61580 TClient is connected to server localhost:61580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:35.029951Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:35.049037Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:35.067652Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:35.224013Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:35.484052Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:35.715527Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:38.420945Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570102092562022:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:38.421072Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:38.467211Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.551139Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.627217Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.662924Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.702855Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.743468Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.859335Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570102092562526:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:38.859417Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:38.859682Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570102092562531:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:38.865200Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:38.877037Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570102092562533:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:39.188928Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490570084912691293:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:39.189437Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:40.580266Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.629515Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.684805Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.775592Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.832963Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.871838Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> TxUsage::WriteToTopic_Demo_14 [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] >> TxUsage::WriteToTopic_Demo_15 >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix >> KqpJoinOrder::FiveWayJoin+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin+ColumnStore >> TxUsage::ReadRuleGeneration [GOOD] >> BsControllerConfig::MergeBoxes [GOOD] >> LocalPartition::WithoutPartitionDeadNode [GOOD] >> LocalPartition::WithoutPartitionPartitionRelocation >> TxUsage::WriteToTopic_Demo_40 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 20456, MsgBus: 9196 2025-04-07T13:57:01.037242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569942734557671:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:01.037312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010e2/r3tmp/tmpt3NS5T/pdisk_1.dat 2025-04-07T13:57:01.340926Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20456, node 1 2025-04-07T13:57:01.424695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:01.424800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:01.440481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:01.469258Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:01.469309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:01.469318Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:01.469437Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9196 TClient is connected to server localhost:9196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:02.044532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.060733Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:57:02.081028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.207048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.361200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:57:02.442900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.133491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569955619461264:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.133625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.458507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.631370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.663735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.706295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.742204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.818303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.858581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569955619461762:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.858643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.858872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569955619461767:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.862828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:04.874878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569955619461769:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:57:05.902553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.978263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.009817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.037597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.040904Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569942734557671:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:06.041016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:06.067263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.121128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6911, MsgBus: 9588 2025-04-07T13:57:08.047418Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569976645365886:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010e2/r3tmp/tmpQ1ST9d/pdisk_1.dat 2025-04-07T13:57:08.110211Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:57:08.162712Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:08.192584Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:08.192747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:08.194308Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6911, node 2 2025-04-07T13:57:08.273160Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:08.273181Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:08.273189Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:08.273292Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9588 TClient is connected to server localhost:9588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:08.775864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, ... 474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.357082Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.403162Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.439425Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.482728Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.526082Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.589165Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570076883300319:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:32.589270Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:32.589681Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570076883300324:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:32.594817Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:32.609770Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490570076883300326:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:33.854132Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.911922Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.994304Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.048054Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.104856Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.153868Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25541, MsgBus: 4769 2025-04-07T13:57:36.962449Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490570096054850172:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:36.962515Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010e2/r3tmp/tmph5EsOD/pdisk_1.dat 2025-04-07T13:57:37.111455Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:37.123418Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:37.123502Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:37.124362Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25541, node 5 2025-04-07T13:57:37.249245Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:37.249274Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:37.249283Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:37.249411Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4769 TClient is connected to server localhost:4769 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:37.848204Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:37.879410Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:37.995067Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:38.208597Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:38.298910Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:40.999300Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570113234721063:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:40.999402Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:41.056627Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:41.102401Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:41.151493Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:41.197372Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:41.240847Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:41.314521Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:41.387261Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570117529688856:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:41.387366Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:41.388374Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570117529688861:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:41.392167Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:41.407812Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570117529688863:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:57:41.964882Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490570096054850172:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:41.964962Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:43.569886Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:43.654602Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 4315, MsgBus: 31528 2025-04-07T13:57:00.258032Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569939498864439:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:00.258186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001101/r3tmp/tmp9fJcvl/pdisk_1.dat 2025-04-07T13:57:00.587208Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4315, node 1 2025-04-07T13:57:00.642515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:00.642848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:00.649171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:00.669507Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:00.669530Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:00.669543Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:00.669640Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31528 TClient is connected to server localhost:31528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:01.275637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.310167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.438194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.635207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.700856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:03.506708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569952383768044:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.506835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.766741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.792541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.824567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.859395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.894831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.982574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.060077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569956678735837:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.060159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.060422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569956678735842:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.063953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:04.076347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569956678735844:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:05.258473Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569939498864439:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:05.258537Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:05.432401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.459408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.486339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.577544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.611765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.638561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16706, MsgBus: 21379 2025-04-07T13:57:07.121391Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569968279204041:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:07.121462Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001101/r3tmp/tmpPdQ6Y8/pdisk_1.dat 2025-04-07T13:57:07.293693Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:07.321390Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:07.321468Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:07.322868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16706, node 2 2025-04-07T13:57:07.385977Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:07.386004Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:07.386011Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:07.386113Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21379 TClient is connected to server localhost:21379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:07.938345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:07.945954Z node 2 :FL ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:31.797886Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570073131973036:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:31.797990Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:31.798298Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570073131973041:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:31.803535Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:31.825532Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490570073131973043:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:33.254726Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.310328Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.398977Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.446687Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.530123Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.574926Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27053, MsgBus: 29729 2025-04-07T13:57:35.838592Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490570090915231796:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:35.838635Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001101/r3tmp/tmp14f5pW/pdisk_1.dat 2025-04-07T13:57:36.012381Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:36.030356Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:36.030451Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:36.037935Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27053, node 5 2025-04-07T13:57:36.225846Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:36.225869Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:36.225878Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:36.226007Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29729 TClient is connected to server localhost:29729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:36.888715Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:36.898431Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:57:36.905365Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:36.986043Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:37.196619Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:37.283374Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:40.466030Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570112390069969:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:40.466148Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:40.518011Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.576974Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.635390Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.691440Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.757347Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.842251Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490570090915231796:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:40.842523Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:40.854671Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.940167Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570112390070472:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:40.940279Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:40.944956Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570112390070477:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:40.950504Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:40.968476Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570112390070479:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:57:42.366719Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:42.451979Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:42.534593Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:42.609589Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:42.667162Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:42.723765Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:10914:2156] recipient: [1:10714:2165] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:10914:2156] recipient: [1:10714:2165] Leader for TabletID 72057594037932033 is [1:10916:2167] sender: [1:10917:2156] recipient: [1:10714:2165] 2025-04-07T13:56:36.249110Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-07T13:56:36.254071Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-07T13:56:36.256455Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:56:36.256975Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-07T13:56:36.257900Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1981} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:36.257958Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2025-04-07T13:56:36.258284Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-07T13:56:36.267186Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-07T13:56:36.267341Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2025-04-07T13:56:36.267490Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2025-04-07T13:56:36.267594Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:36.267682Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-07T13:56:36.267777Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:10916:2167] sender: [1:10942:2156] recipient: [1:110:2157] 2025-04-07T13:56:36.282811Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-07T13:56:36.282976Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:36.296783Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-07T13:56:36.296927Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:36.297062Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-07T13:56:36.297176Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:36.297288Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-07T13:56:36.297348Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:36.297385Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-07T13:56:36.297433Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:36.308288Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-07T13:56:36.308440Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2025-04-07T13:56:36.309548Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2025-04-07T13:56:36.309597Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2161} LoadFinished 2025-04-07T13:56:36.309731Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2025-04-07T13:56:36.321590Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12097 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12098 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12099 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12100 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 800 PDiskFilter { Property { Type: ROT } } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 800 PDiskFilter { Property { Type: SSD } } } } Command { QueryBaseConfig { ... 5} Create new pdisk PDiskId# 295:1001 Path# /dev/disk1 2025-04-07T13:57:38.402423Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 292:1000 Path# /dev/disk2 2025-04-07T13:57:38.402447Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 293:1002 Path# /dev/disk2 2025-04-07T13:57:38.402472Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 292:1001 Path# /dev/disk1 2025-04-07T13:57:38.402496Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 291:1000 Path# /dev/disk3 2025-04-07T13:57:38.402521Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 290:1000 Path# /dev/disk3 2025-04-07T13:57:38.402546Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2025-04-07T13:57:38.402571Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2025-04-07T13:57:38.402607Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 290:1002 Path# /dev/disk1 2025-04-07T13:57:38.402639Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 289:1000 Path# /dev/disk3 2025-04-07T13:57:38.402665Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2025-04-07T13:57:38.402690Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 289:1001 Path# /dev/disk1 2025-04-07T13:57:38.402718Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 288:1000 Path# /dev/disk3 2025-04-07T13:57:38.402744Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 291:1002 Path# /dev/disk1 2025-04-07T13:57:38.402769Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2025-04-07T13:57:38.402813Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 289:1002 Path# /dev/disk2 2025-04-07T13:57:38.402841Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 288:1002 Path# /dev/disk1 2025-04-07T13:57:38.402867Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 287:1000 Path# /dev/disk3 2025-04-07T13:57:38.402893Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2025-04-07T13:57:38.402921Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2025-04-07T13:57:38.402947Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 287:1002 Path# /dev/disk1 2025-04-07T13:57:38.402971Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 286:1000 Path# /dev/disk3 2025-04-07T13:57:38.402998Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2025-04-07T13:57:38.403038Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 286:1001 Path# /dev/disk1 2025-04-07T13:57:38.403070Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 285:1000 Path# /dev/disk3 2025-04-07T13:57:38.403097Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2025-04-07T13:57:38.403126Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2025-04-07T13:57:38.403154Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 285:1002 Path# /dev/disk1 2025-04-07T13:57:38.403179Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 284:1000 Path# /dev/disk3 2025-04-07T13:57:38.403209Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2025-04-07T13:57:38.403236Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2025-04-07T13:57:38.403279Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 284:1002 Path# /dev/disk1 2025-04-07T13:57:38.403307Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 283:1000 Path# /dev/disk3 2025-04-07T13:57:38.403339Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2025-04-07T13:57:38.403367Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2025-04-07T13:57:38.403393Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 283:1002 Path# /dev/disk1 2025-04-07T13:57:38.403417Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 282:1000 Path# /dev/disk3 2025-04-07T13:57:38.403454Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 271:1000 Path# /dev/disk3 2025-04-07T13:57:38.403489Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2025-04-07T13:57:38.403518Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 282:1002 Path# /dev/disk1 2025-04-07T13:57:38.403545Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 281:1000 Path# /dev/disk3 2025-04-07T13:57:38.403573Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2025-04-07T13:57:38.403598Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2025-04-07T13:57:38.403624Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 281:1002 Path# /dev/disk1 2025-04-07T13:57:38.403650Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 271:1001 Path# /dev/disk2 2025-04-07T13:57:38.403678Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 280:1000 Path# /dev/disk3 2025-04-07T13:57:38.403705Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2025-04-07T13:57:38.403764Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2025-04-07T13:57:38.403794Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 280:1002 Path# /dev/disk1 2025-04-07T13:57:38.403821Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 279:1000 Path# /dev/disk3 2025-04-07T13:57:38.403858Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2025-04-07T13:57:38.403901Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 279:1002 Path# /dev/disk1 2025-04-07T13:57:38.403936Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 278:1000 Path# /dev/disk3 2025-04-07T13:57:38.403976Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2025-04-07T13:57:38.404007Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 274:1000 Path# /dev/disk1 2025-04-07T13:57:38.404038Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 277:1000 Path# /dev/disk3 2025-04-07T13:57:38.404069Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2025-04-07T13:57:38.404094Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 276:1000 Path# /dev/disk3 2025-04-07T13:57:38.404120Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 272:1000 Path# /dev/disk1 2025-04-07T13:57:38.404149Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 275:1000 Path# /dev/disk3 2025-04-07T13:57:38.404190Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 275:1001 Path# /dev/disk2 2025-04-07T13:57:38.404233Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 273:1000 Path# /dev/disk1 2025-04-07T13:57:38.404267Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 272:1001 Path# /dev/disk3 2025-04-07T13:57:38.404310Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 275:1002 Path# /dev/disk1 2025-04-07T13:57:38.404339Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 274:1001 Path# /dev/disk3 2025-04-07T13:57:38.404365Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 273:1001 Path# /dev/disk3 2025-04-07T13:57:38.404391Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 278:1002 Path# /dev/disk1 2025-04-07T13:57:38.404417Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 274:1002 Path# /dev/disk2 2025-04-07T13:57:38.404446Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2025-04-07T13:57:38.404473Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 277:1002 Path# /dev/disk1 2025-04-07T13:57:38.404499Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 273:1002 Path# /dev/disk2 2025-04-07T13:57:38.404528Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 276:1001 Path# /dev/disk1 2025-04-07T13:57:38.404569Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 286:1002 Path# /dev/disk2 2025-04-07T13:57:38.404619Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 271:1002 Path# /dev/disk1 2025-04-07T13:57:38.404744Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 272:1002 Path# /dev/disk2 2025-04-07T13:57:38.404790Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 276:1002 Path# /dev/disk2 2025-04-07T13:57:38.603745Z node 251 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.203498s 2025-04-07T13:57:38.603926Z node 251 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.203707s 2025-04-07T13:57:38.618709Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2025-04-07T13:57:38.640293Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } >> KqpJoin::JoinDupColumnRight [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull |88.3%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 8743, MsgBus: 16101 2025-04-07T13:57:01.255317Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569942786687420:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:01.255379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010d3/r3tmp/tmpRvgERe/pdisk_1.dat 2025-04-07T13:57:01.626424Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8743, node 1 2025-04-07T13:57:01.673689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:01.677740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:01.685794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:01.813222Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:01.813251Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:01.813263Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:01.813351Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16101 TClient is connected to server localhost:16101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:02.509494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.529715Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:02.542866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:57:02.707220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:57:02.862784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.940177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:04.797192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569955671590807:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.797296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.030156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.058795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.086373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.111623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.140211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.210805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.290394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569959966558606:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.290463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.290724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569959966558611:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.293791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:05.304486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569959966558613:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:06.255497Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569942786687420:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:06.255643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:06.306596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.334648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.372021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.413682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.478599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.521405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19625, MsgBus: 29654 2025-04-07T13:57:08.416835Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569973989476685:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:08.416891Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010d3/r3tmp/tmpVTHebV/pdisk_1.dat 2025-04-07T13:57:08.570301Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:08.585153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:08.585224Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:08.586668Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19625, node 2 2025-04-07T13:57:08.649555Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:08.649570Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:08.649574Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:08.649650Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29654 TClient is connected to server localhost:29654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:57:09.096445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itsel ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.917006Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.962203Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.007632Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.098401Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.196109Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570086389863712:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:34.196212Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:34.197430Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570086389863717:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:34.201768Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:34.215073Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490570086389863719:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:35.468566Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:35.530532Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:35.587929Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:35.637497Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:35.745037Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:35.836205Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61465, MsgBus: 20741 2025-04-07T13:57:38.218674Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490570101883984007:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:38.225665Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010d3/r3tmp/tmpwTdvrU/pdisk_1.dat 2025-04-07T13:57:38.432022Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:38.446300Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:38.446367Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:38.448412Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61465, node 5 2025-04-07T13:57:38.584750Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:38.584780Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:38.584795Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:38.584932Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20741 TClient is connected to server localhost:20741 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:39.230249Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:39.241339Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:39.251307Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:57:39.364699Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:57:39.652767Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:39.807940Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:42.941116Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570119063854873:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:42.941214Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:42.983138Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:43.022321Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:43.074633Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:43.124435Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:43.187432Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:43.194369Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490570101883984007:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:43.194423Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:43.280784Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:43.377273Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570123358822669:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:43.377419Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:43.378666Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570123358822674:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:43.387440Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:43.411912Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570123358822676:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:45.147089Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:45.231062Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] >> LocalPartition::DescribeBadPartition [GOOD] >> LocalPartition::DiscoveryServiceBadPort >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::ReadRuleGeneration [GOOD] Test command err: 2025-04-07T13:56:27.162778Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569797045948528:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:27.162849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00165b/r3tmp/tmpLfacQ9/pdisk_1.dat 2025-04-07T13:56:27.345231Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:56:27.544274Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:27.548891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:27.548993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:27.555576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4296, node 1 2025-04-07T13:56:27.632982Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/00165b/r3tmp/yandexP7jIkE.tmp 2025-04-07T13:56:27.633018Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/00165b/r3tmp/yandexP7jIkE.tmp 2025-04-07T13:56:27.633229Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/00165b/r3tmp/yandexP7jIkE.tmp 2025-04-07T13:56:27.633354Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:56:27.678059Z INFO: TTestServer started on Port 22950 GrpcPort 4296 TClient is connected to server localhost:22950 PQClient connected to localhost:4296 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:27.959150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:27.986874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:28.128981Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:29.972553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569805635883781:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:29.972562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569805635883768:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:29.972658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:29.976778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:56:29.998621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569805635883783:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:56:30.329741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:30.348571Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569809930851153:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:56:30.348772Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTc2NTAxZmEtMTBkMzE1OGQtNTAzODNjOWEtZDc5MDMyMWI=, ActorId: [1:7490569805635883764:2307], ActorState: ExecuteState, TraceId: 01jr88kmkya8ff4nc85nh9z7yz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:56:30.350882Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:56:30.399416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:30.488929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490569809930851425:2617] 2025-04-07T13:56:32.140832Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569797045948528:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:32.140903Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:56:36.693010Z :SessionAbort INFO: TTopicSdkTestSetup started 2025-04-07T13:56:36.707006Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:56:36.723376Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7490569835700655508:2787] connected; active server actors: 1 2025-04-07T13:56:36.723605Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-07T13:56:36.724070Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-07T13:56:36.724282Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-07T13:56:36.725695Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-07T13:56:36.727973Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:56:36.728757Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-07T13:56:36.728980Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:56:36.729160Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-07T13:56:36.729178Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:56:36.729193Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-07T13:56:36.729218Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:56:36.729239Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:56:36.729278Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2025-04-07T13:56:36.729292Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-07T13:56:36.736124Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-07T13:56:36.736222Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:36.736256Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569835700655532:2436], now have 1 active actors on pipe 2025-04-07T13:56:36.767018Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:36.767064Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569835700655507:2786], now have 1 active actors on pipe 2025-04-07T13:56:36.781101Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7490569797045948790 RawX2: 4294969483 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" Y ... Topic /Root/test-topic in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-04-07T13:57:44.396289Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 after read state TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid f8478ef9-5d285cfd-f9b2efae-42dbe0db has messages 1 2025-04-07T13:57:44.396346Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 read done: guid# f8478ef9-5d285cfd-f9b2efae-42dbe0db, partition# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), size# 201 2025-04-07T13:57:44.396368Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 response to read: guid# f8478ef9-5d285cfd-f9b2efae-42dbe0db 2025-04-07T13:57:44.396524Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 Process answer. Aval parts: 0 2025-04-07T13:57:44.397343Z :DEBUG: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] [] Got ReadResponse, serverBytesSize = 201, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428599 2025-04-07T13:57:44.397437Z :DEBUG: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428599 2025-04-07T13:57:44.402166Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2025-04-07T13:57:44.402239Z :DEBUG: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] [] Returning serverBytesSize = 201 to budget 2025-04-07T13:57:44.402273Z :DEBUG: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] [] In ContinueReadingDataImpl, ReadSizeBudget = 201, ReadSizeServerDelta = 52428599 2025-04-07T13:57:44.402584Z :DEBUG: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-07T13:57:44.404353Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (3-3) 2025-04-07T13:57:44.404433Z :DEBUG: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] [] The application data is transferred to the client. Number of messages 1, size 9 bytes 2025-04-07T13:57:44.404481Z :DEBUG: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] [] Returning serverBytesSize = 0 to budget 0 1 2025-04-07T13:57:44.404591Z :DEBUG: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] [] Commit offsets [3, 4). Partition stream id: 1 2025-04-07T13:57:44.405145Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 grpc read done: success# 1, data# { read_request { bytes_size: 201 } } 2025-04-07T13:57:44.405297Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 got read request: guid# e818710d-25f353bd-295cb51d-29cc7a32 2025-04-07T13:57:44.405609Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 3 end: 4 } } } } 2025-04-07T13:57:44.405784Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) committing to position 4 prev 3 end 4 by cookie 2 2025-04-07T13:57:44.406081Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-07T13:57:44.406113Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-07T13:57:44.406315Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user consumer-1 offset is set to 4 (startOffset 0) session consumer-1_6_2_769917807419993690_v1 2025-04-07T13:57:44.406456Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:57:44.407468Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user consumer-1 readTimeStamp for offset 4 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 2 2025-04-07T13:57:44.407522Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:57:44.407566Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 2 2025-04-07T13:57:44.407642Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2025-04-07T13:57:44.407673Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) commit done to position 4 endOffset 4 with cookie 2 2025-04-07T13:57:44.407709Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 4 2025-04-07T13:57:44.408421Z :DEBUG: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 4 } } 2025-04-07T13:57:45.204972Z node 6 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T13:57:45.206820Z node 6 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|fed2a216-442ff282-d336e55b-da151858_0 describe result for acl check 2025-04-07T13:57:45.368846Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:4 2025-04-07T13:57:45.368909Z :INFO: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1003 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 9 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:57:45.388244Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 checking auth because of timeout 2025-04-07T13:57:45.388330Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 auth for : consumer-1 2025-04-07T13:57:45.388984Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 Handle describe topics response 2025-04-07T13:57:45.389084Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 auth is DEAD 2025-04-07T13:57:45.389185Z node 6 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 auth ok: topics# 1, initDone# 1 2025-04-07T13:57:46.369127Z :INFO: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] Closing read session. Close timeout: 0.000000s 2025-04-07T13:57:46.369196Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:4 2025-04-07T13:57:46.369238Z :INFO: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2004 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 9 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:57:46.369348Z :NOTICE: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T13:57:46.369401Z :DEBUG: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] [] Abort session to cluster 2025-04-07T13:57:46.369934Z :NOTICE: [/Root] [/Root] [eba0890f-ea67e564-6460a161-84929bbe] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:57:46.370974Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 grpc read done: success# 0, data# { } 2025-04-07T13:57:46.371003Z node 6 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 grpc read failed 2025-04-07T13:57:46.371028Z node 6 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 grpc closed 2025-04-07T13:57:46.371066Z node 6 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_6_2_769917807419993690_v1 is DEAD 2025-04-07T13:57:46.371844Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [6:7490570128781186239:2506] disconnected; active server actors: 1 2025-04-07T13:57:46.371874Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [6:7490570128781186239:2506] client consumer-1 disconnected session consumer-1_6_2_769917807419993690_v1 2025-04-07T13:57:46.371957Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:57:46.371976Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session consumer-1_6_2_769917807419993690_v1 2025-04-07T13:57:46.372005Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [6:7490570128781186242:2509] destroyed 2025-04-07T13:57:46.372043Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: consumer-1_6_2_769917807419993690_v1 2025-04-07T13:57:46.376078Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|fed2a216-442ff282-d336e55b-da151858_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-07T13:57:46.376121Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|fed2a216-442ff282-d336e55b-da151858_0] PartitionId [0] Generation [1] Write session will now close 2025-04-07T13:57:46.376162Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|fed2a216-442ff282-d336e55b-da151858_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-07T13:57:46.376562Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|fed2a216-442ff282-d336e55b-da151858_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T13:57:46.376598Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|fed2a216-442ff282-d336e55b-da151858_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-07T13:57:46.377614Z node 6 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message_group_id|fed2a216-442ff282-d336e55b-da151858_0 grpc read done: success: 0 data: 2025-04-07T13:57:46.377641Z node 6 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|fed2a216-442ff282-d336e55b-da151858_0 grpc read failed 2025-04-07T13:57:46.377666Z node 6 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|fed2a216-442ff282-d336e55b-da151858_0 grpc closed 2025-04-07T13:57:46.377682Z node 6 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|fed2a216-442ff282-d336e55b-da151858_0 is DEAD 2025-04-07T13:57:46.378417Z node 6 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:57:46.380854Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:57:46.380902Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [6:7490570120191251444:2460] destroyed 2025-04-07T13:57:46.380953Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> TxUsage::WriteToTopic_Demo_41 |88.3%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpFlipJoin::Right_2 [GOOD] >> KqpFlipJoin::Right_3 >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup [GOOD] |88.3%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 21269, MsgBus: 18605 2025-04-07T13:57:01.051481Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569942656193738:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:01.051527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010db/r3tmp/tmp97Yryf/pdisk_1.dat 2025-04-07T13:57:01.470256Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:01.486519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:01.486625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:01.491862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21269, node 1 2025-04-07T13:57:01.559676Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:01.559704Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:01.559714Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:01.559802Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18605 TClient is connected to server localhost:18605 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:02.113771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.144420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.291401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.454374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.516798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:04.705470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569955541097315:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.717913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.748730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.784856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.864739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.892057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.930700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.968974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.026457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569959836065107:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.026533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.026769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569959836065112:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.030362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:05.040335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569959836065114:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:06.051864Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569942656193738:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:06.051924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:06.054026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.081553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.106726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.138699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.172325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.209565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19007, MsgBus: 26162 2025-04-07T13:57:09.150549Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569978524700569:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010db/r3tmp/tmp7kz8kB/pdisk_1.dat 2025-04-07T13:57:09.224741Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:57:09.276725Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:09.290979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:09.291074Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:09.293246Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19007, node 2 2025-04-07T13:57:09.364065Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:09.364091Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:09.364097Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:09.364198Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26162 TClient is connected to server localhost:26162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:09.904842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:09.915810Z node 2 : ... s undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.366204Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.428366Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.490161Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.548878Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.661061Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570085682462341:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:34.661180Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:34.661361Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570085682462346:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:34.666002Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:34.676239Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490570085682462348:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:36.480733Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:36.529747Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:36.582398Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:36.648778Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:36.697313Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:36.766201Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7258, MsgBus: 32092 2025-04-07T13:57:40.153743Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490570110587769378:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:40.153808Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010db/r3tmp/tmpDBuqpv/pdisk_1.dat 2025-04-07T13:57:40.393978Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:40.400795Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:40.400894Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:40.402275Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7258, node 5 2025-04-07T13:57:40.549320Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:40.549349Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:40.549360Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:40.549497Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32092 TClient is connected to server localhost:32092 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:41.210827Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:41.219583Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:41.235505Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:41.387766Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:41.660126Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:41.766928Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:44.700851Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570127767640248:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:44.700960Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:44.763222Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.817661Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.879048Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.932994Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:45.080237Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:45.156197Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490570110587769378:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:45.156368Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:45.206027Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:45.307773Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570132062608047:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:45.307871Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:45.308222Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570132062608052:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:45.312485Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:45.331859Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570132062608054:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:46.805141Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:46.850532Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinDupColumnRight [GOOD] Test command err: Trying to start YDB, gRPC: 16338, MsgBus: 30147 2025-04-07T13:56:59.968265Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569937859070016:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:59.968339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001110/r3tmp/tmpc8jeBD/pdisk_1.dat 2025-04-07T13:57:00.275565Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16338, node 1 2025-04-07T13:57:00.351254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:00.351516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:00.357427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:00.403237Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:00.403266Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:00.403274Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:00.403363Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30147 TClient is connected to server localhost:30147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:00.875915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:00.901806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.059390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.224540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.295935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:03.093671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569955038940891:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.093829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.380068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.408280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.442786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.476839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.553322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.606144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.688841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569955038941388:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.688939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.696146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569955038941393:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.701603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:03.718418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569955038941395:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:04.930780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.972072Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569937859070016:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:04.972162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:05.000948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.072954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:3:29: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 18100, MsgBus: 7151 2025-04-07T13:57:06.760710Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569963905789314:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:06.760752Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001110/r3tmp/tmpUoFQQM/pdisk_1.dat 2025-04-07T13:57:06.885793Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:06.909559Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:06.909635Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:06.911431Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18100, node 2 2025-04-07T13:57:06.997248Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:06.997274Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:06.997281Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:06.997375Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7151 TClient is connected to server localhost:7151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:57:07.454955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.464164Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:57:07.470629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:07.558789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... s undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.780195Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.826321Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.902305Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.953131Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:35.011503Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:35.087167Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570090758122259:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:35.087258Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:35.087623Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570090758122264:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:35.094026Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:35.112363Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490570090758122266:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:57:36.587639Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:36.639266Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:36.723682Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:37.279738Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5169, MsgBus: 16360 2025-04-07T13:57:39.296380Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490570109252652957:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:39.296427Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001110/r3tmp/tmpAVnlvV/pdisk_1.dat 2025-04-07T13:57:39.608337Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:39.640970Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:39.641070Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:39.644591Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5169, node 5 2025-04-07T13:57:39.857353Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:39.857375Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:39.857384Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:39.857512Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16360 TClient is connected to server localhost:16360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:40.613908Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:40.620467Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:40.639223Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:40.733651Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:40.954638Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:41.100986Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:44.300783Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490570109252652957:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:44.300900Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:44.412168Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570130727491150:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:44.412328Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:44.444461Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.534600Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.590715Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.630598Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.710401Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.766126Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.845408Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570130727491647:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:44.845512Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:44.845566Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570130727491652:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:44.849780Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:44.863262Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570130727491654:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:46.377784Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:46.441846Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:46.503887Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> KqpJoinOrder::TPCH10-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH10+StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 63439, MsgBus: 21499 2025-04-07T13:57:02.697323Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569949652884071:2232];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:02.697568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001079/r3tmp/tmpAyeFc5/pdisk_1.dat 2025-04-07T13:57:03.074665Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63439, node 1 2025-04-07T13:57:03.105357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:03.105477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:03.127865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:03.268475Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:03.268497Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:03.268501Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:03.268572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21499 TClient is connected to server localhost:21499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:03.900847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:03.926460Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:03.947115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.120680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:04.439989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:04.661706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:06.459115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569966832754773:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:06.459254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:06.738326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.814348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.846986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.914368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.951792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.997217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.033483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569971127722568:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:07.033537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:07.033679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569971127722573:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:07.037022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:07.053148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569971127722575:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:07.697864Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569949652884071:2232];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:07.698039Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:08.203507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:08.231260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:08.262399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:08.294126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:08.326522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:08.354992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20001, MsgBus: 9634 2025-04-07T13:57:10.388118Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569983723926867:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:10.388165Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001079/r3tmp/tmppAwhyV/pdisk_1.dat 2025-04-07T13:57:10.626272Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:10.648405Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:10.648553Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:10.650027Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20001, node 2 2025-04-07T13:57:10.720629Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:10.720654Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:10.720676Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:10.720776Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9634 TClient is connected to server localhost:9634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:11.330088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo un ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:36.138631Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570094762911689:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:36.138715Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:36.138940Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570094762911694:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:36.144633Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:36.169114Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490570094762911696:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:37.668545Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:37.710610Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:37.743286Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:37.789073Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:37.862118Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:37.899906Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28171, MsgBus: 14514 2025-04-07T13:57:40.905097Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490570112165486037:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:41.021822Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001079/r3tmp/tmpZ9ZsW1/pdisk_1.dat 2025-04-07T13:57:41.233309Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:41.252197Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:41.252315Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:41.258250Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28171, node 5 2025-04-07T13:57:41.433377Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:41.433419Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:41.433431Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:41.433573Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14514 TClient is connected to server localhost:14514 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:42.357775Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:42.370758Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:42.388044Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:42.504067Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:42.773319Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:42.910995Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:45.863434Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490570112165486037:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:45.863503Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:46.205098Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570137935291383:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:46.205190Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:46.251768Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:46.337716Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:46.377484Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:46.459554Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:46.519867Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:46.580891Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:46.660384Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570137935291882:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:46.660492Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:46.661319Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570137935291887:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:46.665061Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:46.676294Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570137935291889:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:48.032467Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:48.077845Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:48.120781Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:48.165371Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:48.241245Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:48.276710Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> TTxDataShardMiniKQL::WriteKeyTooLarge >> TTxDataShardMiniKQL::ReadConstant >> TTxDataShardMiniKQL::Write >> KqpJoinOrder::TPCDS90+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS90-StreamLookupJoin+ColumnStore >> KqpWorkloadServiceActors::TestPoolFetcher ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 25652, MsgBus: 28584 2025-04-07T13:56:59.897186Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569933941868654:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:59.898116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001107/r3tmp/tmpJpgqqW/pdisk_1.dat 2025-04-07T13:57:00.276765Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:00.281670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:00.281773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:00.284382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25652, node 1 2025-04-07T13:57:00.347864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:00.347887Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:00.347895Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:00.347976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28584 TClient is connected to server localhost:28584 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:00.911629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:00.953401Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:00.971243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.154926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:57:01.302280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:57:01.376377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:03.012510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569951121739381:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.012610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.310655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.343978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.379378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.412605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.483953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.572051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.682440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569951121739889:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.682511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.682859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569951121739895:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.687427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:03.711718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569951121739897:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:04.903723Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569933941868654:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:04.904026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:04.958104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.012432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7625, MsgBus: 28144 2025-04-07T13:57:06.729987Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490569966149499477:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:06.730118Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001107/r3tmp/tmpFdaMlp/pdisk_1.dat 2025-04-07T13:57:06.926966Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:06.930355Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:06.930436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:06.933015Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7625, node 2 2025-04-07T13:57:06.998991Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:06.999018Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:06.999027Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:06.999138Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28144 TClient is connected to server localhost:28144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:57:07.442007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:07.458727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.533080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:57:07.744926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.828753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation ... ions } 2025-04-07T13:57:38.232952Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.282437Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.337641Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.388411Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.463580Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.650114Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.738538Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570102842891163:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:38.738639Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:38.739158Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570102842891168:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:38.743548Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:38.760868Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T13:57:38.761089Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570102842891170:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:57:39.060796Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490570085663019747:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:39.086880Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:40.362694Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.442781Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12242, MsgBus: 13782 2025-04-07T13:57:42.696126Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490570122068825792:2153];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001107/r3tmp/tmpVSB07b/pdisk_1.dat 2025-04-07T13:57:42.827881Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:57:42.912285Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:42.952471Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:42.952585Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:42.956824Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12242, node 6 2025-04-07T13:57:43.141289Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:43.141315Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:43.141325Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:43.141461Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13782 TClient is connected to server localhost:13782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:57:43.782068Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:57:43.789869Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:57:43.795510Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:43.880201Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:44.100687Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:44.183107Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:47.459943Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570143543663865:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:47.460036Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:47.513911Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:47.588924Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:47.637907Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:47.681677Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490570122068825792:2153];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:47.681751Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:47.700388Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:47.753376Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:47.824387Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:47.925800Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570143543664369:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:47.925914Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:47.926184Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570143543664374:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:47.931260Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:47.953099Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490570143543664376:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:57:49.261151Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:49.369432Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits >> TxUsage::WriteToTopic_Demo_8 [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder >> KqpJoinOrder::TPCDS88-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS88+StreamLookupJoin-ColumnStore >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions >> TxUsage::WriteToTopic_Demo_33 [GOOD] |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |88.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull >> TxUsage::WriteToTopic_Demo_9 >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+StreamLookupJoin-ColumnStore |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |88.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table >> TxUsage::WriteToTopic_Demo_34 >> KqpJoinOrder::CanonizedJoinOrderTPCH2+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin+ColumnStore >> TTxDataShardMiniKQL::WriteKeyTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge >> TTxDataShardMiniKQL::ReadConstant [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite >> TTxDataShardMiniKQL::Write [GOOD] >> TTxDataShardMiniKQL::TableStats >> KqpWorkloadServiceActors::TestPoolFetcher [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting >> KqpFlipJoin::Right_3 [GOOD] >> KqpIndexLookupJoin::CheckAllKeyTypesCast >> TTxDataShardMiniKQL::TableStats [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms >> DataShardVolatile::DistributedWriteThenImmediateUpsert >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TxUsage::WriteToTopic_Demo_15 [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 29086, MsgBus: 27315 2025-04-07T13:57:01.453925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569942368835107:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:01.454122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010c4/r3tmp/tmpTCb5hX/pdisk_1.dat 2025-04-07T13:57:01.909889Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:01.913677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:01.913738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:01.915173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29086, node 1 2025-04-07T13:57:02.031054Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:02.031080Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:02.031091Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:02.031181Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27315 TClient is connected to server localhost:27315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:02.621713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.644440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.800044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.933990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:03.032070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:04.784514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569955253738544:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.784645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.963588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:04.991223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.025531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.069344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.101939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.140801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.188208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569959548706334:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.188303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.188592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569959548706339:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.193780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:05.213504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569959548706341:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:06.272956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.305021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.394104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.451930Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569942368835107:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:06.452033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:06.454293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.483747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 20693, MsgBus: 3619 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010c4/r3tmp/tmpAhcUeJ/pdisk_1.dat 2025-04-07T13:57:09.175697Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:57:09.218188Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20693, node 2 2025-04-07T13:57:09.233198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:09.233271Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:09.234761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:09.283237Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:09.283259Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:09.283266Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:09.283387Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3619 TClient is connected to server localhost:3619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:57:09.742015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:57:09.756597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: EScheme ... n type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:39.660147Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570105800655267:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:39.660291Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:39.660593Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490570105800655272:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:39.669108Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:39.724776Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490570105800655274:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:57:41.339860Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:41.425717Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:41.471063Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:41.524201Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:41.614765Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:47: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:57: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 62182, MsgBus: 19426 2025-04-07T13:57:46.424933Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490570135685105000:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:46.424994Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010c4/r3tmp/tmpZtfWfv/pdisk_1.dat 2025-04-07T13:57:46.587563Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:46.611310Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:46.611409Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:46.615227Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62182, node 5 2025-04-07T13:57:46.668219Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:46.668248Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:46.668256Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:46.668390Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19426 TClient is connected to server localhost:19426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:47.218756Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:47.242051Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:47.329506Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:47.531236Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:47.629795Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:50.376793Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570152864975887:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:50.376901Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:50.422074Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:50.475489Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:50.513700Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:50.588535Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:50.659194Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:50.732503Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:50.802311Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570152864976394:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:50.802410Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:50.802653Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570152864976399:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:50.806308Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:50.822475Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570152864976401:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:51.425192Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490570135685105000:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:51.425261Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:52.182406Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:52.264428Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:52.303109Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:52.353145Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:52.418418Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> TTxDataShardMiniKQL::WriteValueTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] Test command err: 2025-04-07T13:57:53.730200Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:57:54.113508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:57:54.113563Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:54.125609Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:57:54.144843Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T13:57:54.145144Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:57:54.203141Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:57:54.329544Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:57:54.330134Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:57:54.369464Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T13:57:54.369659Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T13:57:54.369733Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T13:57:54.396246Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:57:54.427324Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T13:57:54.436540Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:57:54.436784Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T13:57:54.436822Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T13:57:54.436862Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T13:57:54.436894Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T13:57:54.437680Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:57:54.437717Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:57:54.449077Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T13:57:54.449235Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T13:57:54.449292Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T13:57:54.449332Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:57:54.460754Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T13:57:54.460840Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T13:57:54.460876Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T13:57:54.460926Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T13:57:54.460983Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T13:57:54.778481Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:57:54.778558Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:57:54.778618Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T13:57:54.792977Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T13:57:54.793061Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:57:54.793170Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T13:57:54.820847Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T13:57:54.820955Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T13:57:54.821023Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T13:57:54.834256Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T13:57:54.834337Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T13:57:54.834392Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T13:57:54.834446Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T13:57:54.834736Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T13:57:54.834797Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T13:57:54.834841Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T13:57:54.834884Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T13:57:54.845250Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T13:57:54.845324Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T13:57:54.845373Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T13:57:54.845432Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T13:57:54.845467Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T13:57:54.878158Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T13:57:54.878236Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T13:57:54.878309Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T13:57:54.878376Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T13:57:54.878466Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T13:57:54.879015Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:57:54.879067Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:57:54.879114Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T13:57:54.879261Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T13:57:54.879292Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T13:57:54.879438Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T13:57:54.879485Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T13:57:54.879529Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T13:57:54.879573Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T13:57:54.884338Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T13:57:54.895424Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T13:57:54.895855Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:57:54.895906Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:57:54.895984Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T13:57:54.896034Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:57:54.896108Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T13:57:54.896153Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T13:57:54.896196Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T13:57:54.896243Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T13:57:54.896288Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T13:57:54.896327Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T13:57:54.896361Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T13:57:54.896578Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T13:57:54.896631Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T13:57:54.896697Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T13:57:54.896734Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T13:57:54.896763Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T13:57:54.896830Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T13:57:54.896874Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T13:57:54.896916Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T13:57:54.896954Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T13:57:54.897013Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T13:57:54.897048Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T13:57:54.897082Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T13:57:54.897132Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T13:57:54.897154Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T13:57:54.897194Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... ARD TRACE: StateInit, received event# 268828672, Sender [3:226:2223], Recipient [3:229:2224]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:57:57.389719Z node 3 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [3:226:2223], Recipient [3:229:2224]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:57:57.389935Z node 3 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [3:226:2223], Recipient [3:229:2224]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:57:57.396426Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [3:229:2224] 2025-04-07T13:57:57.396693Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:57:57.405317Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2025-04-07T13:57:57.445455Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:57:57.445612Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:57:57.447660Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T13:57:57.447732Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T13:57:57.447780Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T13:57:57.448159Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:57:57.448237Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2025-04-07T13:57:57.448325Z node 3 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-04-07T13:57:57.448457Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2025-04-07T13:57:57.448650Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [3:273:2261] 2025-04-07T13:57:57.448712Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T13:57:57.448760Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-04-07T13:57:57.448795Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T13:57:57.448953Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-07T13:57:57.449048Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-07T13:57:57.449323Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:229:2224], Recipient [3:229:2224]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:57:57.449382Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:57:57.449550Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T13:57:57.449653Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T13:57:57.449838Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [3:24:2071], Recipient [3:229:2224]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-04-07T13:57:57.449866Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-07T13:57:57.449911Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-04-07T13:57:57.449949Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T13:57:57.450039Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T13:57:57.450073Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:57:57.450105Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T13:57:57.450138Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T13:57:57.450172Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T13:57:57.450208Z node 3 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T13:57:57.450245Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T13:57:57.450386Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 229 RawX2: 12884904112 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-04-07T13:57:57.450503Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [3:24:2071], Recipient [3:229:2224]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-04-07T13:57:57.450540Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-07T13:57:57.450581Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-04-07T13:57:57.450622Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 9437184: waitStep# 0 readStep# 0 observedStep# 1000001 2025-04-07T13:57:57.450677Z node 3 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 9437184 promoting UnprotectedReadEdge to v0/18446744073709551615 2025-04-07T13:57:57.450772Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [3:271:2259], Recipient [3:229:2224]: NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T13:57:57.450806Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-07T13:57:57.450886Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [3:121:2147], Recipient [3:229:2224]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-07T13:57:57.450919Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-07T13:57:57.450961Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-07T13:57:57.451004Z node 3 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-07T13:57:57.489831Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:271:2259], Recipient [3:229:2224]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T13:57:57.489904Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T13:57:57.546748Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:229:2224]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2025-04-07T13:57:57.546825Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-07T13:57:57.547071Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:281:2267], Recipient [3:229:2224]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:57:57.547107Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:57:57.547153Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:279:2266], serverId# [3:281:2267], sessionId# [0:0:0] 2025-04-07T13:57:57.547386Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:229:2224]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\365\001\037\004\0021\nvalue\005\205\n\205\002\207\205\002\207\203\001H\006\002\205\004\205\002?\006\002\205\000\034MyReads MyWrites\205\004\205\002?\006\002\206\202\024Reply\024Write?\014\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\010)\211\n?\006\203\005\004\200\205\002\203\004\006\213\002\203\004\203\004$SelectRow\000\003?\036 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000?\004\005?\"\003? p\001\013?&\003?$T\001\003?(\000\037\002\000\005?\016\005?\n?8\000\005?\014\003\005?\024\005?\020?8\000\006\000?\022\003?>\005?\032\006\000?\030\001\037/ \0018\001" TxId: 2 ExecLevel: 0 Flags: 0 2025-04-07T13:57:57.547425Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:57:57.547513Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T13:57:57.548207Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-04-07T13:57:57.548306Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-07T13:57:57.548349Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-04-07T13:57:57.548387Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T13:57:57.548424Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T13:57:57.548460Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T13:57:57.548518Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-04-07T13:57:57.548561Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-07T13:57:57.548587Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T13:57:57.548612Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-07T13:57:57.548638Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-07T13:57:57.550111Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-07T13:57:57.550195Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T13:57:57.550260Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-07T13:57:57.550294Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-07T13:57:57.550326Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-07T13:57:57.550359Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-07T13:57:57.550401Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T13:57:57.550470Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayComplete 2025-04-07T13:57:57.550521Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-07T13:57:57.550582Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-07T13:57:57.550614Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-07T13:57:57.550665Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-07T13:57:57.550689Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-07T13:57:57.550715Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-07T13:57:57.550817Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T13:57:57.550859Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-07T13:57:57.550904Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop >> TxUsage::WriteToTopic_Demo_16 >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCH8-StreamLookupJoin-ColumnStore >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit [GOOD] >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool >> KqpWorkloadService::TestQueueSizeSimple >> AnalyzeDatashard::DropTableNavigateError [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag >> TxUsage::WriteToTopic_Demo_24 >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated >> KqpJoinOrder::TPCDS16+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS16-StreamLookupJoin+ColumnStore >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull >> TxUsage::WriteToTopic_Demo_41 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] Test command err: 2025-04-07T13:57:53.724162Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:57:54.111194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:57:54.111245Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:54.121453Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:57:54.141072Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T13:57:54.141434Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:57:54.203432Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:57:54.330260Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:57:54.338427Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:57:54.370087Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T13:57:54.370261Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T13:57:54.370346Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T13:57:54.400946Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:57:54.435308Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T13:57:54.436750Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:57:54.436871Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T13:57:54.436919Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T13:57:54.436953Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T13:57:54.436998Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T13:57:54.437272Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:57:54.437320Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:57:54.448221Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T13:57:54.448360Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T13:57:54.448431Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T13:57:54.448495Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:57:54.458939Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T13:57:54.459033Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T13:57:54.459070Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T13:57:54.459104Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T13:57:54.459167Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T13:57:54.777992Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:57:54.778080Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:57:54.778140Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T13:57:54.789009Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T13:57:54.789098Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:57:54.789219Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T13:57:54.820196Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T13:57:54.820311Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T13:57:54.820366Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T13:57:54.832720Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T13:57:54.832823Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T13:57:54.832874Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T13:57:54.832917Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T13:57:54.833292Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T13:57:54.835018Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T13:57:54.835082Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T13:57:54.835134Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T13:57:54.845916Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T13:57:54.845996Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T13:57:54.846053Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T13:57:54.846101Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T13:57:54.846160Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T13:57:54.869790Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T13:57:54.869873Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T13:57:54.869930Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T13:57:54.869989Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T13:57:54.870063Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T13:57:54.870644Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:57:54.870705Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:57:54.870748Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T13:57:54.870916Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T13:57:54.870950Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T13:57:54.871100Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T13:57:54.871149Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T13:57:54.871185Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T13:57:54.871222Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T13:57:54.884191Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T13:57:54.895447Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T13:57:54.895825Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:57:54.895901Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:57:54.895965Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T13:57:54.896044Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:57:54.896089Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T13:57:54.896138Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T13:57:54.896182Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T13:57:54.896237Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T13:57:54.896272Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T13:57:54.896332Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T13:57:54.896383Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T13:57:54.896585Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T13:57:54.896628Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T13:57:54.896692Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T13:57:54.896724Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T13:57:54.896750Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T13:57:54.896823Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T13:57:54.896853Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T13:57:54.896899Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T13:57:54.896937Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T13:57:54.896998Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T13:57:54.897039Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T13:57:54.897079Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T13:57:54.897125Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T13:57:54.897188Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T13:57:54.897245Z node 1 :TX_DATASHARD TRACE: ... ated operation [0:2] at 9437184 2025-04-07T13:57:58.990906Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-07T13:57:58.990930Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T13:57:58.990971Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-07T13:57:58.990996Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-07T13:57:58.991048Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T13:57:58.991101Z node 3 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 33554432 more memory 2025-04-07T13:57:58.991139Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-07T13:57:58.991283Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T13:57:58.991326Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-07T13:57:58.991372Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T13:57:59.020961Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-07T13:57:59.021068Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 7340039, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T13:57:59.021161Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T13:57:59.021221Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-07T13:57:59.021267Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-07T13:57:59.021307Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-07T13:57:59.021423Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T13:57:59.021459Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-07T13:57:59.021494Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-07T13:57:59.021534Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-07T13:57:59.021596Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-07T13:57:59.021622Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-07T13:57:59.021657Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-07T13:57:59.103627Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T13:57:59.103706Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-07T13:57:59.103771Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 5 ms, status: COMPLETE 2025-04-07T13:57:59.103877Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T13:58:00.193913Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:229:2224]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2025-04-07T13:58:00.194001Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-07T13:58:00.194386Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:290:2275], Recipient [3:229:2224]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:00.194426Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:00.194480Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:289:2274], serverId# [3:290:2275], sessionId# [0:0:0] 2025-04-07T13:58:00.409493Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:229:2224]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\332\201\200\010\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\004\203\004\203\001H\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000\013?\024\003?\020\251\003\003?\022\006bar\003\005?\030\003?\026\007\000\000\000\001xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx 2025-04-07T13:58:00.419789Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:58:00.419989Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T13:58:00.477277Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CheckDataTx 2025-04-07T13:58:00.477418Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-07T13:58:00.477467Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CheckDataTx 2025-04-07T13:58:00.477514Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T13:58:00.477557Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T13:58:00.477612Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-07T13:58:00.477680Z node 3 :TX_DATASHARD TRACE: Activated operation [0:3] at 9437184 2025-04-07T13:58:00.477727Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-07T13:58:00.477758Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T13:58:00.477787Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit ExecuteDataTx 2025-04-07T13:58:00.477816Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-07T13:58:00.477868Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-07T13:58:00.477923Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 requested 46269638 more memory 2025-04-07T13:58:00.477968Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2025-04-07T13:58:00.478140Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T13:58:00.478201Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-07T13:58:00.478259Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-07T13:58:00.516893Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 exceeded memory limit 50463942 and requests 403711536 more for the next try 2025-04-07T13:58:00.519896Z node 3 :TX_DATASHARD DEBUG: tx 3 released its data 2025-04-07T13:58:00.520003Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2025-04-07T13:58:00.533225Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T13:58:00.533298Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-07T13:58:00.600951Z node 3 :TX_DATASHARD DEBUG: tx 3 at 9437184 restored its data 2025-04-07T13:58:00.601082Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-07T13:58:00.697671Z node 3 :TX_DATASHARD TRACE: Executed operation [0:3] at tablet 9437184 with status COMPLETE 2025-04-07T13:58:00.697784Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:3] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 16777223, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T13:58:00.697865Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T13:58:00.697908Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit ExecuteDataTx 2025-04-07T13:58:00.697959Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit FinishPropose 2025-04-07T13:58:00.698008Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit FinishPropose 2025-04-07T13:58:00.698057Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is DelayComplete 2025-04-07T13:58:00.698091Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit FinishPropose 2025-04-07T13:58:00.698128Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit CompletedOperations 2025-04-07T13:58:00.698163Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CompletedOperations 2025-04-07T13:58:00.698211Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-07T13:58:00.698243Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CompletedOperations 2025-04-07T13:58:00.698285Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:3] at 9437184 has finished 2025-04-07T13:58:00.919516Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T13:58:00.919592Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:3] at 9437184 on unit FinishPropose 2025-04-07T13:58:00.919642Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 3 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-04-07T13:58:00.919733Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T13:58:01.089247Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-07T13:58:01.089319Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-04-07T13:58:01.105568Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:226:2223], Recipient [3:229:2224]: NKikimr::TEvTablet::TEvFollowerGcApplied >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] >> TxUsage::WriteToTopic_Demo_42 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError [GOOD] Test command err: 2025-04-07T13:55:01.303744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:01.304214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:01.304415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00140d/r3tmp/tmpMTAqb8/pdisk_1.dat 2025-04-07T13:55:01.860123Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23632, node 1 2025-04-07T13:55:02.323247Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:02.323309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:02.323343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:02.323972Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:55:02.332278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:55:02.441767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:02.441925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:02.462296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25416 2025-04-07T13:55:03.206410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:06.582254Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T13:55:06.651325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:06.651457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:06.700634Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:55:06.708542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:07.245983Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:07.483923Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T13:55:07.484019Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T13:55:07.545882Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T13:55:07.546071Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T13:55:07.546288Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T13:55:07.546361Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T13:55:07.546424Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T13:55:07.546480Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T13:55:07.546529Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T13:55:07.546583Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T13:55:07.546973Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T13:55:07.827808Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:07.827947Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:07.846930Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T13:55:07.865444Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T13:55:07.866388Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T13:55:07.867548Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T13:55:07.911769Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T13:55:07.911839Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T13:55:07.911905Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T13:55:07.922300Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:07.922405Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:07.948650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T13:55:07.979846Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T13:55:07.979989Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T13:55:08.097676Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:55:08.120249Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:08.167595Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T13:55:08.538027Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T13:55:08.727280Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T13:55:10.010299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:10.010435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:10.029079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T13:55:10.777205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2426:3064], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:10.777342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:10.778482Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2431:3068]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T13:55:10.778643Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T13:55:10.778704Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2433:3070] 2025-04-07T13:55:10.778760Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2433:3070] 2025-04-07T13:55:10.779209Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2434:2945] 2025-04-07T13:55:10.779444Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2433:3070], server id = [2:2434:2945], tablet id = 72075186224037897, status = OK 2025-04-07T13:55:10.779646Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2434:2945], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T13:55:10.779697Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T13:55:10.779875Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:55:10.779926Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2431:3068], StatRequests.size() = 1 2025-04-07T13:55:10.798834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2438:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:10.798925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:10.799259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2443:3079], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:10.806037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T13:55:11.007922Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T13:55:11.008000Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T13:55:11.117053Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2433:3070], schemeshard count = 1 2025-04-07T13:55:11.424761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2445:3081], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T13:55:11.626996Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2584:3168]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T13:55:11.627196Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T13:55:11.627233Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2584:3168], StatRequests.size() = 1 2025-04-07T13:55:11.701276Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr88h79ecb5r26saz5gw5vd9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M1OGE3ZjEtMjA3MTliMjUtM2FlMTkxOWEtODRlNjIwODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:55:11.782599Z node 1 :STATISTICS DEBUG: [TStatService::TEvGet ... EvPropagateStatistics, node id = 1 2025-04-07T13:56:23.137266Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T13:56:25.977181Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:56:25.977491Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:56:30.032181Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T13:56:32.888149Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:56:32.888450Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:56:36.779754Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T13:56:39.091587Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:56:39.091942Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:56:42.870069Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T13:56:45.623544Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:56:45.623913Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:56:49.163767Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T13:56:51.487183Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:56:51.487495Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:56:55.507895Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T13:56:58.916550Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:56:58.916924Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:57:03.909662Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T13:57:07.414093Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:57:07.414400Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:57:12.597331Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T13:57:16.454835Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:57:16.455509Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:57:21.826746Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T13:57:25.665204Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:57:25.665513Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:57:31.179977Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T13:57:34.897002Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:57:34.897326Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:57:40.287839Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T13:57:44.041234Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:57:44.041540Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:57:49.239491Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T13:57:51.241661Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-07T13:57:51.241750Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T13:57:51.241801Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T13:57:51.241849Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-07T13:57:52.953109Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:57:52.953448Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T13:57:53.045290Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037889 2025-04-07T13:57:53.045373Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 219.000000s, at schemeshard: 72075186224037889 2025-04-07T13:57:53.045674Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 25 2025-04-07T13:57:53.069811Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T13:57:54.724307Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T13:57:54.724387Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T13:57:54.724429Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T13:57:54.724481Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2025-04-07T13:57:54.724532Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T13:57:54.724957Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T13:57:54.773503Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T13:57:54.777189Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6553:4641], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:54.777301Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6562:4646], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:54.777436Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:54.789080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2025-04-07T13:57:54.874487Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6567:4649], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-07T13:57:55.168627Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6684:4711]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T13:57:55.169023Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T13:57:55.169111Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:6686:4713] 2025-04-07T13:57:55.169187Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:6686:4713] 2025-04-07T13:57:55.169481Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:6687:4714] 2025-04-07T13:57:55.169585Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:6687:4714], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T13:57:55.169658Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-07T13:57:55.169745Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:6686:4713], server id = [2:6687:4714], tablet id = 72075186224037897, status = OK 2025-04-07T13:57:55.169791Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T13:57:55.169837Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:6684:4711], StatRequests.size() = 1 2025-04-07T13:57:56.598656Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjZkYTc5OGMtYjFjZjNjZDItN2YzY2JhYzctODY2NzRjZDA=, TxId: 2025-04-07T13:57:56.598723Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjZkYTc5OGMtYjFjZjNjZDItN2YzY2JhYzctODY2NzRjZDA=, TxId: 2025-04-07T13:57:56.599240Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T13:57:56.619601Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T13:57:56.619674Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T13:57:56.748171Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T13:57:56.748245Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T13:57:56.833208Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:6686:4713], schemeshard count = 1 2025-04-07T13:57:58.188440Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T13:57:58.188536Z node 2 :STATISTICS ERROR: [72075186224037897] IsColumnTable. traversal path [OwnerId: 72075186224037889, LocalPathId: 4] is not known to schemeshard 2025-04-07T13:57:58.188915Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T13:57:58.191272Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T13:57:58.199566Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODRkZTk3YjEtMjEyZGQ4ZDUtZTMzZWFiODgtNjM5ZmFiOGI=, TxId: 2025-04-07T13:57:58.199622Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODRkZTk3YjEtMjEyZGQ4ZDUtZTMzZWFiODgtNjM5ZmFiOGI=, TxId: 2025-04-07T13:57:58.200031Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T13:57:58.213657Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T13:57:58.213715Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2733:3213] 2025-04-07T13:57:58.214295Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6778:4767]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T13:57:58.216529Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T13:57:58.216583Z node 2 :STATISTICS ERROR: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] Navigate failed 2025-04-07T13:57:58.216623Z node 2 :STATISTICS DEBUG: ReplyFailed(), request id = 2 >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::ReadWithRestarts >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] Test command err: 2025-04-07T13:55:00.449471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:00.449900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:00.450097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001418/r3tmp/tmpO3iC0J/pdisk_1.dat 2025-04-07T13:55:01.000918Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9504, node 1 2025-04-07T13:55:01.450809Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:01.450870Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:01.450903Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:01.451581Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:55:01.454193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:55:01.595450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:01.595594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:01.611991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15933 2025-04-07T13:55:02.613446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:07.521515Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T13:55:07.592603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:07.601189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:07.655393Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:55:07.666185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:08.040628Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:08.172212Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T13:55:08.172320Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T13:55:08.218405Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T13:55:08.218601Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T13:55:08.218853Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T13:55:08.218916Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T13:55:08.218964Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T13:55:08.219035Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T13:55:08.219086Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T13:55:08.219139Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T13:55:08.219537Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T13:55:08.514670Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:08.514776Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:08.528792Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T13:55:08.548374Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T13:55:08.549383Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T13:55:08.550422Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T13:55:08.591764Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T13:55:08.591827Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T13:55:08.591913Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T13:55:08.598061Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:08.598182Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:08.629226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T13:55:08.659519Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T13:55:08.659666Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T13:55:08.679304Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:55:08.695835Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:08.749604Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T13:55:09.245819Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T13:55:09.425469Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T13:55:10.349792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:10.349974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:10.371047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T13:55:10.654865Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:55:10.655141Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:55:10.655468Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:55:10.655592Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:55:10.655705Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:55:10.655858Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:55:10.655991Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:55:10.656117Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:55:10.656276Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:55:10.656402Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:55:10.656525Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:55:10.656774Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:55:10.725192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:55:10.725294Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:55:10.725563Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:55:10.725699Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:55:10.725849Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:55:10.725982Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxI ... Execute 2025-04-07T13:57:59.652538Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T13:57:59.652701Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR 2025-04-07T13:57:59.763309Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8222:6111] 2025-04-07T13:57:59.766743Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:8222:6111], schemeshard id = 72075186224037889 2025-04-07T13:57:59.766963Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8176:6086], server id = [2:8223:6112], tablet id = 72075186224037897, status = OK 2025-04-07T13:57:59.767168Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8223:6112] 2025-04-07T13:57:59.767258Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:8223:6112], node id = 2, have schemeshards count = 1, need schemeshards count = 0 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse 2025-04-07T13:57:59.913609Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T13:57:59.913716Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T13:57:59.926436Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T13:57:59.952935Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T13:57:59.953114Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T13:57:59.954084Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8230:6117], server id = [2:8235:6122], tablet id = 72075186224037899, status = OK 2025-04-07T13:57:59.961149Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8230:6117], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:57:59.961396Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8231:6118], server id = [2:8236:6123], tablet id = 72075186224037900, status = OK 2025-04-07T13:57:59.961468Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8231:6118], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:57:59.977406Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8232:6119], server id = [2:8237:6124], tablet id = 72075186224037901, status = OK 2025-04-07T13:57:59.977536Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8232:6119], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:57:59.977822Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T13:57:59.978385Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T13:57:59.978845Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8233:6120], server id = [2:8238:6125], tablet id = 72075186224037902, status = OK 2025-04-07T13:57:59.978935Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8233:6120], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:57:59.979598Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8234:6121], server id = [2:8239:6126], tablet id = 72075186224037903, status = OK 2025-04-07T13:57:59.979656Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8234:6121], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:57:59.979884Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8230:6117], server id = [2:8235:6122], tablet id = 72075186224037899 2025-04-07T13:57:59.979916Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:57:59.980387Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-07T13:57:59.980524Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8231:6118], server id = [2:8236:6123], tablet id = 72075186224037900 2025-04-07T13:57:59.980546Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:57:59.986380Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-07T13:57:59.986969Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-07T13:57:59.987149Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8240:6127], server id = [2:8242:6129], tablet id = 72075186224037904, status = OK 2025-04-07T13:57:59.987258Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8240:6127], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:57:59.987581Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8232:6119], server id = [2:8237:6124], tablet id = 72075186224037901 2025-04-07T13:57:59.987608Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:57:59.988281Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8241:6128], server id = [2:8243:6130], tablet id = 72075186224037905, status = OK 2025-04-07T13:57:59.988351Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8241:6128], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:57:59.988508Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8233:6120], server id = [2:8238:6125], tablet id = 72075186224037902 2025-04-07T13:57:59.988532Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:57:59.988994Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8234:6121], server id = [2:8239:6126], tablet id = 72075186224037903 2025-04-07T13:57:59.989020Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:57:59.989135Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-07T13:57:59.989287Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8244:6131], server id = [2:8248:6135], tablet id = 72075186224037906, status = OK 2025-04-07T13:57:59.989345Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8244:6131], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:57:59.989394Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8245:6132], server id = [2:8247:6134], tablet id = 72075186224037907, status = OK 2025-04-07T13:57:59.989442Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8245:6132], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:57:59.990192Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8246:6133], server id = [2:8249:6136], tablet id = 72075186224037908, status = OK 2025-04-07T13:57:59.990256Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8246:6133], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:57:59.990359Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T13:57:59.990552Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8240:6127], server id = [2:8242:6129], tablet id = 72075186224037904 2025-04-07T13:57:59.990577Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:57:59.991014Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T13:57:59.991158Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T13:57:59.991251Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8241:6128], server id = [2:8243:6130], tablet id = 72075186224037905 2025-04-07T13:57:59.991274Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:57:59.991445Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T13:57:59.991489Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T13:57:59.991735Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T13:58:00.002286Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T13:58:00.002655Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T13:58:00.005307Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8245:6132], server id = [2:8247:6134], tablet id = 72075186224037907 2025-04-07T13:58:00.005350Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:00.005926Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8244:6131], server id = [2:8248:6135], tablet id = 72075186224037906 2025-04-07T13:58:00.005956Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:00.006191Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T13:58:00.006634Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8246:6133], server id = [2:8249:6136], tablet id = 72075186224037908 2025-04-07T13:58:00.006656Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:00.143702Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8266:6153]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T13:58:00.143982Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T13:58:00.144034Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8266:6153], StatRequests.size() = 1 2025-04-07T13:58:00.532485Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmM1NjBlNGItYTU1YTYyOGQtZWM1OGMwYWUtODAyM2VmYzY=, TxId: 2025-04-07T13:58:00.532567Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmM1NjBlNGItYTU1YTYyOGQtZWM1OGMwYWUtODAyM2VmYzY=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-07T13:58:00.533275Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8274:6159]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T13:58:00.533555Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T13:58:00.533991Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T13:58:00.534042Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T13:58:00.537156Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T13:58:00.537232Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T13:58:00.537288Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T13:58:00.548779Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpWorkloadServiceDistributed::TestDistributedQueue >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor >> KqpJoinOrder::TPCH10+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH10-StreamLookupJoin+ColumnStore >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer >> KqpJoinOrder::TPCH3-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCH5-StreamLookupJoin-ColumnStore >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions >> LocalPartition::WithoutPartitionPartitionRelocation [GOOD] >> LocalPartition::WithoutPartitionWithSplit >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] >> TFlatMetrics::TimeSeriesAvg16 [GOOD] >> TFlatMetrics::TimeSeriesAVG [GOOD] >> TxUsage::WriteToTopic_Demo_34 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:155:2058] recipient: [1:153:2135] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:155:2058] recipient: [1:153:2135] Leader for TabletID 9437184 is [1:161:2139] sender: [1:162:2058] recipient: [1:153:2135] Leader for TabletID 9437185 is [0:0:0] sender: [2:165:2049] recipient: [2:156:2093] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [2:165:2049] recipient: [2:156:2093] Leader for TabletID 9437185 is [2:177:2096] sender: [2:179:2049] recipient: [2:156:2093] Leader for TabletID 9437184 is [1:161:2139] sender: [1:205:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:177:2096] sender: [1:207:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:177:2096] sender: [2:209:2049] recipient: [2:41:2053] Leader for TabletID 9437185 is [2:177:2096] sender: [2:210:2049] recipient: [2:150:2092] Leader for TabletID 9437185 is [2:177:2096] sender: [1:213:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:177:2096] sender: [2:215:2049] recipient: [2:41:2053] Leader for TabletID 9437185 is [2:177:2096] sender: [2:216:2049] recipient: [2:214:2109] Leader for TabletID 9437185 is [2:217:2110] sender: [2:218:2049] recipient: [2:214:2109] Leader for TabletID 9437185 is [2:217:2110] sender: [1:247:2058] recipient: [1:15:2062] |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAVG [GOOD] >> LocalPartition::DiscoveryServiceBadPort [GOOD] >> LocalPartition::DiscoveryServiceBadNodeId >> TxUsage::WriteToTopic_Demo_35 >> KqpJoinOrder::TPCDS94+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS94-StreamLookupJoin+ColumnStore >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin+ColumnStore |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |88.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> KqpWorkloadService::TestQueueSizeSimple [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries >> TTabletPipeTest::TestSendWithoutWaitOpen >> TxUsage::WriteToTopic_Demo_9 [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits [GOOD] >> KqpWorkloadService::TestLargeConcurrentQueryLimit |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> TxUsage::WriteToTopic_Demo_46 >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions >> DataShardVolatile::DistributedWriteThenImmediateUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenSplit >> KqpJoinOrder::TPCDS61-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS87+StreamLookupJoin-ColumnStore >> TTabletResolver::NodeProblem >> TTabletResolver::NodeProblem [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor [GOOD] >> KqpWorkloadServiceTables::TestLeaseUpdates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2025-04-07T13:58:12.320541Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StInit ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-07T13:58:12.320802Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [1:201:2134] CurrentLeaderTablet: [1:202:2135] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-07T13:58:12.320847Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-04-07T13:58:12.320901Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:201:2134] 2025-04-07T13:58:12.321152Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StInit ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-07T13:58:12.321418Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [1:207:2138] CurrentLeaderTablet: [1:208:2139] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-07T13:58:12.321468Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-04-07T13:58:12.321517Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2138] 2025-04-07T13:58:12.322728Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-07T13:58:12.322789Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:201:2134] 2025-04-07T13:58:12.322987Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-07T13:58:12.323033Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2138] 2025-04-07T13:58:12.323231Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 2 2025-04-07T13:58:12.323279Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [1:201:2134] by NodeId 2025-04-07T13:58:12.323353Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-07T13:58:12.323548Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [2:217:2092] CurrentLeaderTablet: [2:218:2093] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 0, 9}} 2025-04-07T13:58:12.323585Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-04-07T13:58:12.323650Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:217:2092] 2025-04-07T13:58:12.323885Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [1:207:2138] by NodeId 2025-04-07T13:58:12.323957Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-07T13:58:12.324154Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [2:223:2094] CurrentLeaderTablet: [2:224:2095] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-07T13:58:12.324196Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-04-07T13:58:12.324243Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2025-04-07T13:58:12.325969Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 2 2025-04-07T13:58:12.326032Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-07T13:58:12.326127Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:217:2092] 2025-04-07T13:58:12.326376Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-07T13:58:12.326434Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2025-04-07T13:58:12.326734Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 4 2025-04-07T13:58:12.326795Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [2:217:2092] by NodeId 2025-04-07T13:58:12.326871Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-07T13:58:12.327134Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [3:235:2092] CurrentLeaderTablet: [3:236:2093] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-07T13:58:12.327178Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-04-07T13:58:12.327226Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:235:2092] 2025-04-07T13:58:12.327499Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-07T13:58:12.327553Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2025-04-07T13:58:12.327776Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 5 2025-04-07T13:58:12.327843Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-07T13:58:12.327886Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:235:2092] 2025-04-07T13:58:12.328117Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [2:223:2094] by NodeId 2025-04-07T13:58:12.328180Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-07T13:58:12.328424Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [3:241:2094] CurrentLeaderTablet: [3:242:2095] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-07T13:58:12.328466Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-04-07T13:58:12.328510Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:241:2094] >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] >> TxUsage::WriteToTopic_Demo_16 [GOOD] >> KqpJoinOrder::TPCDS23-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS23+StreamLookupJoin-ColumnStore >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TxUsage::WriteToTopic_Demo_24 [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier >> TExternalTableTest::CreateExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] Test command err: 2025-04-07T13:55:03.054340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:03.054642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:03.054801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001406/r3tmp/tmp5rl5mz/pdisk_1.dat 2025-04-07T13:55:03.620253Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27671, node 1 2025-04-07T13:55:04.129864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:04.129922Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:04.129952Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:04.130537Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:55:04.135936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:55:04.241211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:04.241339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:04.256570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21481 2025-04-07T13:55:05.072341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:10.163481Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T13:55:10.196898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:10.197008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:10.243451Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:55:10.259707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:10.542749Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:10.600218Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T13:55:10.600309Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T13:55:10.637470Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T13:55:10.637620Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T13:55:10.637802Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T13:55:10.637873Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T13:55:10.637921Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T13:55:10.637963Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T13:55:10.638030Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T13:55:10.638082Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T13:55:10.638443Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T13:55:10.945079Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:10.945189Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:10.954210Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T13:55:10.962842Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T13:55:10.963702Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T13:55:10.964762Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T13:55:10.995207Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T13:55:10.995279Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T13:55:10.995363Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T13:55:10.997492Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:10.997601Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:11.018229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T13:55:11.044955Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T13:55:11.045137Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T13:55:11.060278Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:55:11.078105Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:11.130296Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T13:55:11.497069Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T13:55:11.689339Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T13:55:12.683218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:12.683403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:12.708889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T13:55:13.110084Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:55:13.110511Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:55:13.110890Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:55:13.111067Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:55:13.111199Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:55:13.111333Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:55:13.111465Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:55:13.111582Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:55:13.111723Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:55:13.111868Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:55:13.112027Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:55:13.112162Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:55:13.195953Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:55:13.196047Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:55:13.196301Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:55:13.196434Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:55:13.196620Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:55:13.196793Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTx ... = [2:8186:6097], tablet id = 72075186224037900, status = OK 2025-04-07T13:58:06.642255Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8181:6092], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:58:06.642892Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8180:6091], server id = [2:8185:6096], tablet id = 72075186224037899 2025-04-07T13:58:06.642933Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:06.643086Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8182:6093], server id = [2:8188:6099], tablet id = 72075186224037901, status = OK 2025-04-07T13:58:06.643157Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8182:6093], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:58:06.643274Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8183:6094], server id = [2:8187:6098], tablet id = 72075186224037902, status = OK 2025-04-07T13:58:06.643329Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8183:6094], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:58:06.643388Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8184:6095], server id = [2:8190:6101], tablet id = 72075186224037903, status = OK 2025-04-07T13:58:06.643439Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8184:6095], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:58:06.643917Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T13:58:06.647228Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8189:6100], server id = [2:8191:6102], tablet id = 72075186224037904, status = OK 2025-04-07T13:58:06.647324Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8189:6100], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:58:06.647510Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-07T13:58:06.648394Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8181:6092], server id = [2:8186:6097], tablet id = 72075186224037900 2025-04-07T13:58:06.648429Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:06.648485Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-07T13:58:06.648766Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-07T13:58:06.649234Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8182:6093], server id = [2:8188:6099], tablet id = 72075186224037901 2025-04-07T13:58:06.649264Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:06.649309Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-07T13:58:06.649661Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8192:6103], server id = [2:8194:6105], tablet id = 72075186224037905, status = OK 2025-04-07T13:58:06.649744Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8192:6103], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:58:06.649900Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8183:6094], server id = [2:8187:6098], tablet id = 72075186224037902 2025-04-07T13:58:06.649924Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:06.649997Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8184:6095], server id = [2:8190:6101], tablet id = 72075186224037903 2025-04-07T13:58:06.650018Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:06.650456Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8193:6104], server id = [2:8197:6108], tablet id = 72075186224037906, status = OK 2025-04-07T13:58:06.650525Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8193:6104], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:58:06.650661Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8189:6100], server id = [2:8191:6102], tablet id = 72075186224037904 2025-04-07T13:58:06.650686Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:06.651079Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8195:6106], server id = [2:8198:6109], tablet id = 72075186224037907, status = OK 2025-04-07T13:58:06.651148Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8195:6106], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:58:06.651304Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8196:6107], server id = [2:8199:6110], tablet id = 72075186224037908, status = OK 2025-04-07T13:58:06.651360Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8196:6107], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:58:06.651823Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T13:58:06.652417Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T13:58:06.652559Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T13:58:06.652647Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T13:58:06.652706Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T13:58:06.652894Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T13:58:06.653481Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8192:6103], server id = [2:8194:6105], tablet id = 72075186224037905 2025-04-07T13:58:06.653512Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:06.653581Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8193:6104], server id = [2:8197:6108], tablet id = 72075186224037906 2025-04-07T13:58:06.653603Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:06.653652Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8195:6106], server id = [2:8198:6109], tablet id = 72075186224037907 2025-04-07T13:58:06.653672Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:06.653727Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8196:6107], server id = [2:8199:6110], tablet id = 72075186224037908 2025-04-07T13:58:06.653750Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:06.670703Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T13:58:06.670924Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T13:58:07.505493Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 3 2025-04-07T13:58:07.505573Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T13:58:09.985479Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:58:09.985747Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T13:58:11.697636Z node 2 :STATISTICS INFO: Node 3 is unavailable 2025-04-07T13:58:11.697743Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T13:58:11.697930Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T13:58:11.698006Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T13:58:11.698148Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-07T13:58:11.698183Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T13:58:11.698603Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T13:58:11.724688Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T13:58:11.724944Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-07T13:58:11.725571Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8320:6174], server id = [2:8321:6175], tablet id = 72075186224037900, status = OK 2025-04-07T13:58:11.725692Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8320:6174], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:58:11.726449Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T13:58:11.726541Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T13:58:11.726742Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T13:58:11.726934Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T13:58:11.727370Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T13:58:11.729574Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8320:6174], server id = [2:8321:6175], tablet id = 72075186224037900 2025-04-07T13:58:11.729610Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:11.730554Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T13:58:11.761850Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8338:6192]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T13:58:11.762107Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T13:58:11.762157Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8338:6192], StatRequests.size() = 1 2025-04-07T13:58:11.909593Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTQ1ZDVkNS1hY2RhMmFkMy1kZmVjNTJlZS0xZjM4ZWU0NA==, TxId: 2025-04-07T13:58:11.909669Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTQ1ZDVkNS1hY2RhMmFkMy1kZmVjNTJlZS0xZjM4ZWU0NA==, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-07T13:58:11.910268Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8347:6198]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T13:58:11.910729Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T13:58:11.910787Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T13:58:11.911015Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T13:58:11.915633Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T13:58:11.915710Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T13:58:11.915774Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T13:58:11.925400Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 probe = 3 >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] >> TxUsage::WriteToTopic_Demo_17 >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:15.840413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:15.840526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:15.840566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:15.840599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:15.840652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:15.840715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:15.840780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:15.842174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:15.923357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:15.923421Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:15.942649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:15.942805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:15.942975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:15.948907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:15.949085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:15.949745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:15.949950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:15.952552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:15.977525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:15.977624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:15.977805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:15.977862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:15.977909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:15.978104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.007500Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:16.198905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:16.199155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.199359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:16.199559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:16.199614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.208933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:16.209086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:16.209305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.209355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:16.209389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:16.209422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:16.211491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.211551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:16.211596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:16.215868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.215948Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.215995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:16.216094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:16.219760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:16.227014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:16.227261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:16.228287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:16.228442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:16.228500Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:16.228765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:16.228833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:16.229014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:16.229091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:16.231211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:16.231270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:16.231436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:16.231496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:16.231709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.231772Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:16.231878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:16.231909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:16.231948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:16.231982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:16.232019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:16.232045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:16.232106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:16.232156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:16.232188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:16.234013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:16.247992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:16.248088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:16.248130Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:16.248186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:16.248320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 78944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:58:16.296909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:58:16.296968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:16.297002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T13:58:16.297026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T13:58:16.297039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T13:58:16.297079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.297121Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T13:58:16.297230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T13:58:16.297258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:58:16.297301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T13:58:16.297334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:58:16.297378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T13:58:16.297410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T13:58:16.297469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:58:16.297503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-07T13:58:16.297535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T13:58:16.297553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-07T13:58:16.298323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:16.298383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:16.298424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:58:16.298459Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T13:58:16.298492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:16.299475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:16.299547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:16.299572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:58:16.299605Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T13:58:16.299633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:58:16.299705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T13:58:16.301341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:58:16.302345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T13:58:16.302533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T13:58:16.302567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T13:58:16.302928Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T13:58:16.303044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T13:58:16.303077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:298:2290] TestWaitNotification: OK eventTxId 101 2025-04-07T13:58:16.303468Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:16.303632Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 189us result status StatusSuccess 2025-04-07T13:58:16.303935Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-07T13:58:16.306264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:16.306472Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-04-07T13:58:16.306523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-07T13:58:16.306554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-07T13:58:16.308279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:16.308418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T13:58:16.308587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T13:58:16.308633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T13:58:16.309000Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T13:58:16.309062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:58:16.309087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:306:2298] TestWaitNotification: OK eventTxId 102 2025-04-07T13:58:16.309490Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:16.309661Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 155us result status StatusPathDoesNotExist 2025-04-07T13:58:16.309813Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: 2025-04-07T13:57:57.084247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:57:57.085286Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:57:57.085665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a11/r3tmp/tmpMO72SL/pdisk_1.dat 2025-04-07T13:57:58.142045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:57:58.263857Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:58.368325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:58.376853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:58.393578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:58.626126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:57:59.280896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T13:57:59.780780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:792:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:59.780906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:803:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:59.780998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:59.853848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T13:58:00.097119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:806:2655], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T13:58:03.021261Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr88pcawc8vj1prykjq68xqh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ2MmFmOC03ZTM0MjYxNS0zNzVjODgyLTczMzNiZGMy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:58:03.305922Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr88pfnxefjsp3n7q4fw4rb8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWU5YTJmOTktYjQzODhkYTEtMjMxZDRjNDEtODE2ZjVkNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the first select 2025-04-07T13:58:05.406399Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr88pg5595k4ddsrtapynjde, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM2Y2Y1NjItOTc2MmVjM2EtMWI4ZTc2ZTAtOGQ5M2YyM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-07T13:58:06.056310Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr88pj2mc7xp6pc8cde1fmzt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQwZDY3OWItNGU1YzExNTktYmUzOTcyODMtZDgzOTEwMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets 2025-04-07T13:58:06.188580Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr88pjfnbp7st0snm0ncp1mf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQwZDY3OWItNGU1YzExNTktYmUzOTcyODMtZDgzOTEwMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ... performing an upsert 2025-04-07T13:58:06.726681Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr88pk1w8xen7bfna8pka3nt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDAwY2EyM2EtZWEyYWYwMDEtYmMyOTNmZDAtZjAzNDRmMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the second select 2025-04-07T13:58:06.845871Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr88pk4pcxpyxzq6bpfggt9k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM2Y2Y1NjItOTc2MmVjM2EtMWI4ZTc2ZTAtOGQ5M2YyM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the third select 2025-04-07T13:58:06.971650Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr88pk8z31t719p6w8jp3rd5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM2Y2Y1NjItOTc2MmVjM2EtMWI4ZTc2ZTAtOGQ5M2YyM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the last upsert and commit 2025-04-07T13:58:07.054300Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmM2Y2Y1NjItOTc2MmVjM2EtMWI4ZTc2ZTAtOGQ5M2YyM2U=, ActorId: [1:929:2746], ActorState: ExecuteState, TraceId: 01jr88pkbzfytxnjkm06wa8kkk, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-07T13:58:07.073575Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr88pkbzfytxnjkm06wa8kkk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM2Y2Y1NjItOTc2MmVjM2EtMWI4ZTc2ZTAtOGQ5M2YyM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:58:11.457576Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:58:11.457982Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:58:11.458060Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a11/r3tmp/tmpk7qFkL/pdisk_1.dat 2025-04-07T13:58:11.777507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:58:11.816992Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:11.874722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:11.874861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:11.888914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:58:12.018069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:58:12.043106Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:623:2531], Recipient [2:631:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:58:12.044257Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:623:2531], Recipient [2:631:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:58:12.044733Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:631:2537] 2025-04-07T13:58:12.044946Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:12.096481Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:623:2531], Recipient [2:631:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:58:12.097267Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:12.097388Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:58:12.099024Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:58:12.099113Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:58:12.099176Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:58:12.099510Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:58:12.099569Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:58:12.100868Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:58:12.101024Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:648:2546] 2025-04-07T13:58:12.101077Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:58:12.101115Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:58:12.101153Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:58:12.101580Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:631:2537], Recipient [2:631:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:58:12.101634Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:58:12.102168Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:58:12.102267Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:58:12.102356Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:639:2541], Recipient [2:631:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:12.102391Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:12.102441Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:630:2536], serverId# [2:639:2541], sessionId# [0:0:0] 2025-04-07T13:58:12.102545Z node 2 :TX_DATASHA ... rtbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T13:58:14.296446Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:58:14.300563Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:631:2537]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 2 Status: STATUS_NOT_FOUND 2025-04-07T13:58:14.300719Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:720:2600]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 2 Status: STATUS_NOT_FOUND 2025-04-07T13:58:14.312868Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-07T13:58:14.313079Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:631:2537], Recipient [2:720:2600]: {TEvReadSet step# 3001 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-07T13:58:14.313121Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T13:58:14.313170Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 2025-04-07T13:58:14.313281Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-07T13:58:14.313360Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:720:2600], Recipient [2:631:2537]: {TEvReadSet step# 3001 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-07T13:58:14.313387Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T13:58:14.313414Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2025-04-07T13:58:15.090385Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr88ptxzfemfct8grskfty9c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWM5YWM2MmQtMTBkMTRlZi0yYzAxZjQzNy03YWUxZjA0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:58:15.094316Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1052:2842], Recipient [2:631:2537]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 KeysSize: 1 2025-04-07T13:58:15.094451Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T13:58:15.094530Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-04-07T13:58:15.094619Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-07T13:58:15.094662Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-04-07T13:58:15.094721Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T13:58:15.094758Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T13:58:15.094811Z node 2 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-07T13:58:15.094860Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-07T13:58:15.094890Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T13:58:15.094912Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T13:58:15.094933Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-04-07T13:58:15.095043Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 } 2025-04-07T13:58:15.095323Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T13:58:15.095381Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-04-07T13:58:15.095430Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1052:2842], 0} after executionsCount# 1 2025-04-07T13:58:15.095489Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1052:2842], 0} sends rowCount# 1, bytes# 24, quota rows left# 1000, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T13:58:15.095576Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1052:2842], 0} finished in read 2025-04-07T13:58:15.095656Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-07T13:58:15.095680Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T13:58:15.095702Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T13:58:15.095726Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-07T13:58:15.095786Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-07T13:58:15.095812Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T13:58:15.095839Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-07T13:58:15.095885Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T13:58:15.095992Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-07T13:58:15.096208Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:631:2537]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2025-04-07T13:58:15.096470Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1054:2843], Recipient [2:720:2600]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 KeysSize: 1 2025-04-07T13:58:15.096539Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-07T13:58:15.096585Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-04-07T13:58:15.096638Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-07T13:58:15.096686Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-04-07T13:58:15.096710Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-07T13:58:15.096734Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-07T13:58:15.096773Z node 2 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037889 2025-04-07T13:58:15.096801Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-07T13:58:15.096823Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-07T13:58:15.096842Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-04-07T13:58:15.096863Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-04-07T13:58:15.096964Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 } 2025-04-07T13:58:15.097188Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-07T13:58:15.097240Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-04-07T13:58:15.097270Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[2:1054:2843], 0} after executionsCount# 1 2025-04-07T13:58:15.097305Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1054:2843], 0} sends rowCount# 1, bytes# 24, quota rows left# 1000, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T13:58:15.097359Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1054:2843], 0} finished in read 2025-04-07T13:58:15.097418Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-07T13:58:15.097440Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-04-07T13:58:15.097462Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-04-07T13:58:15.097507Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-04-07T13:58:15.097552Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-07T13:58:15.097587Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-04-07T13:58:15.097614Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037889 has finished 2025-04-07T13:58:15.097639Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-07T13:58:15.097711Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-07T13:58:15.098318Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:720:2600]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2025-04-07T13:58:15.099011Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1052:2842], Recipient [2:631:2537]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T13:58:15.099076Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-07T13:58:15.101094Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1054:2843], Recipient [2:720:2600]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T13:58:15.101141Z node 2 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> TExternalTableTest::DropExternalTable >> KqpWorkloadService::TestQueueSizeManyQueries [GOOD] >> KqpWorkloadService::TestZeroQueueSize >> TxUsage::WriteToTopic_Demo_25 >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit [GOOD] >> DataShardVolatile::DistributedWriteThenSplit [GOOD] >> DataShardVolatile::DistributedWriteThenReadIterator >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64+StreamLookupJoin-ColumnStore >> BasicUsage::ReadWithRestarts [GOOD] >> BasicUsage::SessionNotDestroyedWhileCompressionInFlight ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:16.131245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:16.131364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:16.131409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:16.131440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:16.131482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:16.131507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:16.131561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:16.131861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:16.225295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:16.225350Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:16.239493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:16.239638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:16.239811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:16.258605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:16.258805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:16.259416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:16.259619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:16.265603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:16.266905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:16.266984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:16.267102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:16.267143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:16.267176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:16.267344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.275795Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:16.378348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:16.378580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.378800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:16.379001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:16.379052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.381589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:16.381723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:16.381915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.381980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:16.382014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:16.382056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:16.384115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.384174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:16.384240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:16.386086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.386132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.386170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:16.386236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:16.398824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:16.401068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:16.401278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:16.402356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:16.402509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:16.402558Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:16.402818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:16.402873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:16.403048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:16.403126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:16.408132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:16.408182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:16.408366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:16.408403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:16.408676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:16.408723Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:16.408844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:16.408883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:16.408932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:16.408983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:16.409024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:16.409052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:16.409142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:16.409185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:16.409226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:16.412959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:16.413086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:16.413124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:16.413161Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:16.413230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:16.413339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 70245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-07T13:58:17.370273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-07T13:58:17.370293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-07T13:58:17.371594Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:58:17.371665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:58:17.371697Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:58:17.371737Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T13:58:17.371776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T13:58:17.372846Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:58:17.372918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:58:17.372953Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:58:17.372988Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-07T13:58:17.373015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:58:17.373690Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:58:17.373750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:58:17.373776Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:58:17.373811Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T13:58:17.373837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:58:17.373892Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-07T13:58:17.375293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:58:17.377539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:58:17.377630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T13:58:17.377842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T13:58:17.377886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T13:58:17.378270Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T13:58:17.378370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:58:17.378406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:328:2320] TestWaitNotification: OK eventTxId 102 2025-04-07T13:58:17.378832Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:17.379018Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 228us result status StatusSuccess 2025-04-07T13:58:17.379273Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-07T13:58:17.381980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:17.382278Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-04-07T13:58:17.382355Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2025-04-07T13:58:17.382486Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-07T13:58:17.384775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-04-07T13:58:17.384929Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T13:58:17.385253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T13:58:17.385301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T13:58:17.385701Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T13:58:17.385790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:58:17.385826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:336:2328] TestWaitNotification: OK eventTxId 103 2025-04-07T13:58:17.386293Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:17.386477Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 204us result status StatusSuccess 2025-04-07T13:58:17.386751Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::Decimal >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartNo >> TxUsage::WriteToTopic_Demo_35 [GOOD] >> TExternalTableTest::DropTableTwice >> TExternalTableTest::ReplaceExternalTableIfNotExists >> TExternalTableTest::Decimal [GOOD] >> TxUsage::WriteToTopic_Demo_36 >> TExternalTableTest::ReadOnlyMode >> TExternalTableTest::DropTableTwice [GOOD] >> AnalyzeColumnshard::AnalyzeServerless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] Test command err: 2025-04-07T13:55:01.441273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:01.441625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:01.441781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001407/r3tmp/tmp0znw96/pdisk_1.dat 2025-04-07T13:55:01.959197Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9236, node 1 2025-04-07T13:55:02.353347Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:02.353406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:02.353435Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:02.354127Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:55:02.359760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:55:02.469446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:02.469601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:02.494387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21702 2025-04-07T13:55:03.255057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:07.870055Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T13:55:07.925024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:07.925159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:07.977466Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:55:07.986578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:08.554726Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:08.749254Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T13:55:08.749366Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T13:55:08.806413Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T13:55:08.806621Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T13:55:08.806867Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T13:55:08.806948Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T13:55:08.807009Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T13:55:08.807079Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T13:55:08.807134Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T13:55:08.807201Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T13:55:08.807696Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T13:55:09.121408Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:09.121528Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1761:2555], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:09.136249Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1766:2560] 2025-04-07T13:55:09.149014Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T13:55:09.150179Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1812:2583] 2025-04-07T13:55:09.150641Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1812:2583], schemeshard id = 72075186224037889 2025-04-07T13:55:09.180916Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T13:55:09.180988Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T13:55:09.181075Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T13:55:09.191712Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:09.191819Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:09.201733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T13:55:09.220977Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T13:55:09.221157Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T13:55:09.238461Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:55:09.261167Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:09.348832Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T13:55:09.728635Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T13:55:09.897411Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T13:55:11.124795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2149:3022], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:11.124933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:11.144114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T13:55:11.341860Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2232:2813];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:55:11.342118Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2232:2813];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:55:11.342616Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2232:2813];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:55:11.342813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2232:2813];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:55:11.342947Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2232:2813];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:55:11.343096Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2232:2813];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:55:11.343208Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2232:2813];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:55:11.343322Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2232:2813];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:55:11.343458Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2232:2813];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:55:11.343579Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2232:2813];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:55:11.343711Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2232:2813];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:55:11.343835Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2232:2813];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:55:11.389988Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:55:11.390100Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:55:11.390233Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:55:11.390303Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:55:11.390521Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:55:11.390567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:55:11.390682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema: ... ncNode(), pipe client id = [2:7312:5426] 2025-04-07T13:58:14.405243Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T13:58:14.405348Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T13:58:14.405991Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T13:58:14.406705Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T13:58:14.407020Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded database: /Root/Database 2025-04-07T13:58:14.407061Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal start key 2025-04-07T13:58:14.407094Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal table owner id: 72075186224037889 2025-04-07T13:58:14.407125Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal table local path id: 4 2025-04-07T13:58:14.407160Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal start time: 1744034294317658 2025-04-07T13:58:14.407192Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal IsColumnTable: 1 2025-04-07T13:58:14.407222Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded global traversal round: 2 2025-04-07T13:58:14.407318Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 1 2025-04-07T13:58:14.407397Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T13:58:14.407495Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 2 2025-04-07T13:58:14.407566Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 1 2025-04-07T13:58:14.407771Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 1 2025-04-07T13:58:14.407840Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T13:58:14.407988Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T13:58:14.409124Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T13:58:14.409721Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T13:58:14.409785Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T13:58:14.410122Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T13:58:14.411114Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T13:58:14.411171Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T13:58:14.412299Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T13:58:14.435522Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T13:58:14.435750Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T13:58:14.436440Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7359:5453], server id = [2:7360:5454], tablet id = 72075186224037899, status = OK 2025-04-07T13:58:14.436610Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7359:5453], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:58:14.437405Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T13:58:14.437499Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T13:58:14.437773Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T13:58:14.437996Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T13:58:14.438307Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T13:58:14.441416Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7359:5453], server id = [2:7360:5454], tablet id = 72075186224037899 2025-04-07T13:58:14.441463Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:14.442066Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T13:58:14.480016Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7377:5471]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T13:58:14.480272Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T13:58:14.480318Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7377:5471], StatRequests.size() = 1 2025-04-07T13:58:14.711658Z node 2 :SYSTEM_VIEWS WARN: [72075186224037896] TEvIntervalQuerySummary, wrong stage: node id# 2 2025-04-07T13:58:14.712200Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzNkOTNmMjYtODI1OGQwMmQtZDM5MzU1ZWQtY2NkMWJjNGQ=, TxId: 2025-04-07T13:58:14.712264Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzNkOTNmMjYtODI1OGQwMmQtZDM5MzU1ZWQtY2NkMWJjNGQ=, TxId: 2025-04-07T13:58:14.712874Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T13:58:14.726519Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7387:5477] 2025-04-07T13:58:14.726700Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7312:5426], server id = [2:7388:5478], tablet id = 72075186224037897, status = OK 2025-04-07T13:58:14.726756Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7388:5478] 2025-04-07T13:58:14.726855Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:7387:5477], schemeshard id = 72075186224037889 2025-04-07T13:58:14.726988Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7388:5478], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-07T13:58:14.757744Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T13:58:14.757816Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T13:58:14.965823Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7396:5483] 2025-04-07T13:58:14.966489Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:2655:3161] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-07T13:58:14.966542Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2655:3161] 2025-04-07T13:58:14.966604Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Complete 2025-04-07T13:58:15.552880Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T13:58:15.552995Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T13:58:15.565320Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-07T13:58:15.565394Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T13:58:16.311597Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T13:58:16.311676Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T13:58:16.311722Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T13:58:17.889402Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T13:58:17.889564Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T13:58:17.889623Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T13:58:17.890359Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T13:58:17.910033Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T13:58:17.910459Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T13:58:17.910530Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T13:58:17.910970Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T13:58:17.927132Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T13:58:17.927355Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-07T13:58:17.927865Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7475:5526], server id = [2:7476:5527], tablet id = 72075186224037899, status = OK 2025-04-07T13:58:17.928008Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7475:5526], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:58:17.928724Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T13:58:17.928832Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T13:58:17.929083Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T13:58:17.929299Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T13:58:17.929624Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T13:58:17.932344Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7475:5526], server id = [2:7476:5527], tablet id = 72075186224037899 2025-04-07T13:58:17.932398Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:17.933063Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T13:58:17.954769Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDM4NzU4ZWUtNGM4M2YxMjItYmU3ZDBkOS03NWM4MjkwNQ==, TxId: 2025-04-07T13:58:17.954865Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDM4NzU4ZWUtNGM4M2YxMjItYmU3ZDBkOS03NWM4MjkwNQ==, TxId: 2025-04-07T13:58:17.955469Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T13:58:17.970505Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T13:58:17.970584Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2655:3161] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:18.129891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:18.130021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:18.130090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:18.130128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:18.130179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:18.130210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:18.130268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:18.130615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:18.205195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:18.205270Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:18.222604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:18.222766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:18.222928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:18.237852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:18.238054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:18.238687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:18.238900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:18.253871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:18.255196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:18.255254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:18.255366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:18.255478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:18.255520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:18.255670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:18.273999Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:18.521182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:18.521433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:18.521648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:18.521888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:18.521939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:18.533821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:18.534001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:18.534191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:18.534249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:18.534288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:18.534344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:18.537360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:18.537441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:18.537498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:18.539744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:18.539793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:18.539834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:18.539899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:18.544727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:18.547251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:18.547480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:18.548571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:18.548755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:18.548813Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:18.549685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:18.549750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:18.550067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:18.550144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:18.552511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:18.552561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:18.552775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:18.552819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:18.553098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:18.553146Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:18.553262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:18.553303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:18.553363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:18.553410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:18.553443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:18.553472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:18.553554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:18.553605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:18.553634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:18.556320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:18.556433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:18.556478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:18.556514Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:18.556562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:18.556653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-07T13:58:19.729095Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:19.729221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:19.729275Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalTable TPropose, operationId: 101:0 HandleReply TEvOperationPlan: step# 5000003 2025-04-07T13:58:19.729425Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-07T13:58:19.729621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:19.729684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:58:19.729741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:58:19.730146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:58:19.730530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T13:58:19.731900Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:19.731935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:19.732065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:58:19.732142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:58:19.732206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:58:19.732306Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:19.732334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T13:58:19.732365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-07T13:58:19.732386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-07T13:58:19.732428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T13:58:19.732711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:19.732750Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T13:58:19.732851Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T13:58:19.732884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:58:19.732940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T13:58:19.733000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:58:19.733036Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T13:58:19.733069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T13:58:19.733135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T13:58:19.733163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:58:19.733196Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-04-07T13:58:19.733245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-07T13:58:19.733275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-07T13:58:19.733291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-07T13:58:19.734290Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:19.734367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:19.734396Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:58:19.734433Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T13:58:19.734467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T13:58:19.735468Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:19.735535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:19.735559Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:58:19.735584Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-07T13:58:19.735608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:58:19.736334Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:19.736417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:19.736445Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:58:19.736468Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T13:58:19.736490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:58:19.736541Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T13:58:19.739681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:58:19.741046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:58:19.741128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T13:58:19.741341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T13:58:19.741399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T13:58:19.741803Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T13:58:19.741893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T13:58:19.741929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:330:2322] TestWaitNotification: OK eventTxId 101 2025-04-07T13:58:19.742354Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:19.742539Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 232us result status StatusSuccess 2025-04-07T13:58:19.742836Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpWorkloadServiceDistributed::TestDistributedQueue [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect >> TxUsage::WriteToTopic_Demo_42 [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::DropTableTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:20.079339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:20.079467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:20.079508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:20.079558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:20.079602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:20.079634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:20.079692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:20.080012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:20.149920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:20.149987Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:20.165081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:20.165243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:20.165390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:20.177839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:20.178041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:20.178707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:20.178915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:20.180940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:20.182172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:20.182243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:20.182362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:20.182438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:20.182482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:20.182637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:20.189435Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:20.314596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:20.314841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:20.315044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:20.315285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:20.315339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:20.317596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:20.317735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:20.317897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:20.317950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:20.317987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:20.318021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:20.319916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:20.319978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:20.320015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:20.321650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:20.321694Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:20.321733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:20.321794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:20.330211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:20.332261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:20.332457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:20.333437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:20.333594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:20.333645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:20.333877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:20.333931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:20.334101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:20.334171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:20.336121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:20.336186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:20.336336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:20.336378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:20.336592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:20.336641Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:20.336774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:20.336813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:20.336876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:20.336920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:20.336953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:20.336983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:20.337044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:20.337100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:20.337134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:20.338943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:20.339062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:20.339105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:20.339141Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:20.339191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:20.339286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-07T13:58:20.428693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-07T13:58:20.429471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:20.429571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:20.429621Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 103:0 HandleReply TEvOperationPlan: step# 5000004 2025-04-07T13:58:20.429773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:58:20.429855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-07T13:58:20.429999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:20.430054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T13:58:20.430086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:58:20.430745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:58:20.432364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 FAKE_COORDINATOR: Erasing txId 103 2025-04-07T13:58:20.432702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:20.432740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:20.432872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:58:20.432995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:58:20.433104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:20.433144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-07T13:58:20.433185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-07T13:58:20.433230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-07T13:58:20.433421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:58:20.433458Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-07T13:58:20.433569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T13:58:20.433606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:58:20.433652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-07T13:58:20.433689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:58:20.433722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T13:58:20.433766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T13:58:20.433837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:58:20.433870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:58:20.433907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2025-04-07T13:58:20.433941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-07T13:58:20.433970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-07T13:58:20.434008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-07T13:58:20.434381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:20.434474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:20.434529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:58:20.434564Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-07T13:58:20.434597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T13:58:20.435015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:58:20.435064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T13:58:20.435135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T13:58:20.435680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:20.435753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:20.435790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:58:20.435821Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-07T13:58:20.435850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:20.436535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:20.436600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:20.436625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:58:20.436650Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T13:58:20.436722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:58:20.436791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-07T13:58:20.440150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:58:20.440592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T13:58:20.440722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:58:20.441099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T13:58:20.441427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T13:58:20.441470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T13:58:20.441881Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T13:58:20.441973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:58:20.442025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:360:2352] TestWaitNotification: OK eventTxId 103 2025-04-07T13:58:20.442514Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:20.442724Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 196us result status StatusPathDoesNotExist 2025-04-07T13:58:20.442902Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] >> TExternalTableTest::ReadOnlyMode [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks [GOOD] >> ResourcePoolsDdl::TestCreateResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] Test command err: 2025-04-07T13:55:00.625496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:00.625897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:00.626068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001413/r3tmp/tmpvvatSC/pdisk_1.dat 2025-04-07T13:55:01.209159Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4963, node 1 2025-04-07T13:55:01.588008Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:01.588071Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:01.588098Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:01.588843Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:55:01.593169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:55:01.728952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:01.729095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:01.760715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31561 2025-04-07T13:55:02.552717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:06.137573Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T13:55:06.216605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:06.220894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:06.268323Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:55:06.283152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:06.877151Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:07.077040Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T13:55:07.077150Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T13:55:07.154949Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T13:55:07.155128Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T13:55:07.155350Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T13:55:07.155424Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T13:55:07.155489Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T13:55:07.155545Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T13:55:07.155603Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T13:55:07.155654Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T13:55:07.156090Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T13:55:07.434932Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:07.435085Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:07.465685Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T13:55:07.482704Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T13:55:07.483711Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T13:55:07.484930Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2025-04-07T13:55:07.508106Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T13:55:07.508177Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T13:55:07.508249Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-07T13:55:07.510419Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:07.510560Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:07.580797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T13:55:07.608407Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T13:55:07.608592Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T13:55:07.627451Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:55:07.655004Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:07.710051Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T13:55:08.242138Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T13:55:08.421432Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T13:55:09.204591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T13:55:10.051369Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:10.268863Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-07T13:55:10.268959Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2025-04-07T13:55:10.269057Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2486:2904], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2025-04-07T13:55:10.271870Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2494:2908] 2025-04-07T13:55:10.281319Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2494:2908], schemeshard id = 72075186224037899 2025-04-07T13:55:11.566582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2613:3187], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:11.566758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:11.607886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-07T13:55:11.771584Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2698:3003];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:55:11.771874Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2698:3003];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:55:11.772219Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2698:3003];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:55:11.772364Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2698:3003];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:55:11.772494Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2698:3003];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:55:11.772636Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2698:3003];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:55:11.772872Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2698:3003];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:55:11.773048Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2698:3003];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:55:11.773199Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2698:3003];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:55:11.773311Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2698:3003];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:55:11.773427Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2698:3003];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:55:11.773555Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2698:3003];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:55:11.805563Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:55:11.805693Z node 2 ... uest id = 1, ReplyToActorId = [2:8303:6161], StatRequests.size() = 1 2025-04-07T13:58:12.454042Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2RhYzIzNTUtNmI0MTY4NjgtMTc3ZjdhMDktOWNjYTE2NjA=, TxId: 2025-04-07T13:58:12.454127Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2RhYzIzNTUtNmI0MTY4NjgtMTc3ZjdhMDktOWNjYTE2NjA=, TxId: 2025-04-07T13:58:12.454718Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T13:58:12.474309Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T13:58:12.474388Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T13:58:12.603465Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T13:58:12.603565Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T13:58:12.683753Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8305:6163], schemeshard count = 1 2025-04-07T13:58:13.227419Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-07T13:58:13.227492Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 220.000000s, at schemeshard: 72075186224037899 2025-04-07T13:58:13.227729Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-04-07T13:58:13.248277Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T13:58:14.121588Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T13:58:14.121679Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-07T13:58:14.128916Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T13:58:14.147850Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T13:58:14.148460Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T13:58:14.148520Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037899, LocalPathId: 2], AnalyzedShards 1 2025-04-07T13:58:14.179417Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T13:58:15.837611Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T13:58:15.837675Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T13:58:15.837705Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T13:58:15.837739Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-07T13:58:15.837782Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T13:58:15.838561Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T13:58:15.852320Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-07T13:58:15.852454Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T13:58:15.853106Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T13:58:15.853176Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T13:58:15.854016Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2025-04-07T13:58:15.854113Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-07T13:58:15.854711Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T13:58:15.872951Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2025-04-07T13:58:15.873039Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T13:58:15.873263Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T13:58:15.873852Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8452:6253], server id = [2:8453:6254], tablet id = 72075186224037905, status = OK 2025-04-07T13:58:15.874004Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8452:6253], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T13:58:15.875281Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T13:58:15.875383Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T13:58:15.875573Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T13:58:15.875802Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T13:58:15.876169Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-07T13:58:15.878689Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8452:6253], server id = [2:8453:6254], tablet id = 72075186224037905 2025-04-07T13:58:15.878737Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:15.879428Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T13:58:15.900429Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8470:6271]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T13:58:15.900637Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T13:58:15.900699Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8470:6271], StatRequests.size() = 1 2025-04-07T13:58:16.122021Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDhhMzVjYmItYjg5ZWVlYzYtMTZiZjcwYTAtOWU1ZGVkYjk=, TxId: 2025-04-07T13:58:16.122094Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDhhMzVjYmItYjg5ZWVlYzYtMTZiZjcwYTAtOWU1ZGVkYjk=, TxId: 2025-04-07T13:58:16.122677Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T13:58:16.166069Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T13:58:16.166134Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T13:58:16.997352Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T13:58:16.997444Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T13:58:17.785624Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T13:58:17.785711Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T13:58:17.785756Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T13:58:19.423981Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:58:19.424218Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T13:58:19.435514Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T13:58:19.435653Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-07T13:58:19.435688Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T13:58:19.436432Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T13:58:19.467650Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T13:58:19.468071Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T13:58:19.468147Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T13:58:19.468591Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T13:58:19.501585Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T13:58:19.501777Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T13:58:19.502390Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8575:6335], server id = [2:8576:6336], tablet id = 72075186224037905, status = OK 2025-04-07T13:58:19.502522Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8575:6335], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T13:58:19.503202Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T13:58:19.503280Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T13:58:19.503440Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T13:58:19.503584Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T13:58:19.503845Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-07T13:58:19.505967Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8575:6335], server id = [2:8576:6336], tablet id = 72075186224037905 2025-04-07T13:58:19.506000Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:19.506457Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T13:58:19.536120Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzJkODg3NDUtZmZlMzBkZjUtZWI1MDcwOWItYTZmZDE5Nzg=, TxId: 2025-04-07T13:58:19.536186Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzJkODg3NDUtZmZlMzBkZjUtZWI1MDcwOWItYTZmZDE5Nzg=, TxId: 2025-04-07T13:58:19.537002Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T13:58:19.562754Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T13:58:19.562841Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3164:3342] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:20.623193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:20.623365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:20.623411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:20.623443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:20.623487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:20.623514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:20.623579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:20.623908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:20.781354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:20.781419Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:20.812469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:20.812613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:20.812786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:20.828181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:20.828401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:20.829032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:20.829257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:20.831168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:20.832361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:20.832412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:20.832538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:20.832585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:20.832634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:20.832826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:20.848098Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:21.021681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:21.021940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.022141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:21.022349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:21.022401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.027633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:21.027766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:21.027949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.027997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:21.028029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:21.028072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:21.032565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.032634Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:21.032686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:21.034617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.034667Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.034701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:21.034765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:21.038564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:21.040707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:21.040893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:21.041960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:21.042100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:21.042153Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:21.042405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:21.042453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:21.042619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:21.042694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:21.050034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:21.050105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:21.050318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:21.050354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:21.050577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.050618Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:21.050721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:21.050756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:21.050801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:21.050845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:21.050875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:21.050917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:21.050988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:21.051033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:21.051064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:21.053007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:21.053141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:21.053178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:21.053228Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:21.053269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:21.053363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... : Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:58:21.242147Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-07T13:58:21.242194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T13:58:21.243062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:21.243148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:21.243172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:58:21.243199Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T13:58:21.243244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T13:58:21.243307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-07T13:58:21.245820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-07T13:58:21.245965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-07T13:58:21.246697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:21.246812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:21.246867Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalTable TPropose, operationId: 103:0 HandleReply TEvOperationPlan: step# 5000004 2025-04-07T13:58:21.246989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-07T13:58:21.247157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:21.247228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:58:21.247780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:58:21.248838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 FAKE_COORDINATOR: Erasing txId 103 2025-04-07T13:58:21.250162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:21.250200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:21.250339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:58:21.250445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:58:21.250529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:21.250557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-07T13:58:21.250604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-07T13:58:21.250626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-07T13:58:21.250838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.250881Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-07T13:58:21.250967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T13:58:21.251008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:58:21.251052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-07T13:58:21.251087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:58:21.251118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T13:58:21.251172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T13:58:21.251233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T13:58:21.251262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:58:21.251314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-04-07T13:58:21.251345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-07T13:58:21.251369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-07T13:58:21.252071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:21.252133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:21.252176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:58:21.252222Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-07T13:58:21.252258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T13:58:21.253022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:21.253099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:21.253122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:58:21.253146Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T13:58:21.253187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:58:21.253287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-07T13:58:21.255697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:58:21.256567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T13:58:21.256877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T13:58:21.256932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T13:58:21.257323Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T13:58:21.257422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:58:21.257456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:357:2349] TestWaitNotification: OK eventTxId 103 2025-04-07T13:58:21.257928Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:21.258160Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 209us result status StatusSuccess 2025-04-07T13:58:21.258455Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 2 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::WriteToTopic_Demo_43 >> TExternalTableTest::SchemeErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:20.890388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:20.890493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:20.890544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:20.890575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:20.890616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:20.890637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:20.890684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:20.891020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:20.955437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:20.955497Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:20.969898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:20.970070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:20.970248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:20.988828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:20.989008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:20.989627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:20.989829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:20.992405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:20.993593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:20.993652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:20.993783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:20.993834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:20.993875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:20.994017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.010128Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:21.128027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:21.128258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.128470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:21.128715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:21.128773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.131102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:21.131241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:21.131440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.131521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:21.131555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:21.131590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:21.133672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.133730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:21.133766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:21.135429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.135470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.135504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:21.135580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:21.139177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:21.141105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:21.141363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:21.142428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:21.142594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:21.142647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:21.142906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:21.142960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:21.143120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:21.143199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:21.145334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:21.145402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:21.145574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:21.145611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:21.145824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.145871Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:21.145987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:21.146027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:21.146069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:21.146104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:21.146135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:21.146161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:21.146224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:21.146288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:21.146319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:21.148112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:21.148219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:21.148250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:21.148281Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:21.148326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:21.148443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 4 2025-04-07T13:58:21.580653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 0 2025-04-07T13:58:21.580805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:21.581110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-07T13:58:21.581182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-07T13:58:21.590010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusAccepted TxId: 129 SchemeshardId: 72057594046678944 PathId: 5, at schemeshard: 72057594046678944 2025-04-07T13:58:21.590202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2025-04-07T13:58:21.590484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:21.590533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:21.590738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-07T13:58:21.590839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:21.590882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:475:2436], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-04-07T13:58:21.590925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:475:2436], at schemeshard: 72057594046678944, txId: 129, path id: 5 2025-04-07T13:58:21.591263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.591314Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#129:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:21.591375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2025-04-07T13:58:21.591503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:21.592283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-04-07T13:58:21.592397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-04-07T13:58:21.592434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-04-07T13:58:21.592483Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-07T13:58:21.592529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-04-07T13:58:21.593430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-04-07T13:58:21.593532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-04-07T13:58:21.593571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-04-07T13:58:21.593600Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-04-07T13:58:21.593627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-07T13:58:21.593689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2025-04-07T13:58:21.605031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-04-07T13:58:21.605231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000005 2025-04-07T13:58:21.607161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-07T13:58:21.607457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-07T13:58:21.607580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:21.607714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:21.607771Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2025-04-07T13:58:21.607921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 129:0 128 -> 240 2025-04-07T13:58:21.608093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-07T13:58:21.608158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 FAKE_COORDINATOR: Erasing txId 129 2025-04-07T13:58:21.613075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:21.613140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:21.613339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-07T13:58:21.613419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:21.613458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:475:2436], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-04-07T13:58:21.613500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:475:2436], at schemeshard: 72057594046678944, txId: 129, path id: 5 2025-04-07T13:58:21.613796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-04-07T13:58:21.613884Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 129:0 ProgressState 2025-04-07T13:58:21.613995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-07T13:58:21.614032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-07T13:58:21.614077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-04-07T13:58:21.614120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-07T13:58:21.614153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2025-04-07T13:58:21.614184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2025-04-07T13:58:21.614274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-07T13:58:21.614320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-04-07T13:58:21.614357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-07T13:58:21.614387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 5], 3 2025-04-07T13:58:21.615163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-04-07T13:58:21.615237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-04-07T13:58:21.615265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-04-07T13:58:21.615312Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-07T13:58:21.615350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-04-07T13:58:21.618409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-04-07T13:58:21.618503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-04-07T13:58:21.618542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-04-07T13:58:21.618573Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-04-07T13:58:21.618619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-07T13:58:21.618709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-04-07T13:58:21.624430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-07T13:58:21.624785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] Test command err: 2025-04-07T13:54:59.531044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:54:59.531455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:59.531635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00141e/r3tmp/tmp0186KT/pdisk_1.dat 2025-04-07T13:55:00.030061Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3620, node 1 2025-04-07T13:55:00.466701Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:00.466762Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:00.466796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:00.467380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:55:00.470312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:55:00.582686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:00.582840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:00.599332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27350 2025-04-07T13:55:01.462109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:55:06.626171Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T13:55:06.715800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:06.715936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:06.761042Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:55:06.767897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:07.409422Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:07.616283Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T13:55:07.616394Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T13:55:07.669178Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T13:55:07.670925Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T13:55:07.671150Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T13:55:07.671211Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T13:55:07.671271Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T13:55:07.671368Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T13:55:07.671442Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T13:55:07.671507Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T13:55:07.672138Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T13:55:07.902638Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:07.902769Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1760:2552], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T13:55:07.906297Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1763:2554] 2025-04-07T13:55:07.955045Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T13:55:07.961443Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1809:2577] 2025-04-07T13:55:07.961996Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1809:2577], schemeshard id = 72075186224037889 2025-04-07T13:55:08.042527Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T13:55:08.042593Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T13:55:08.042672Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T13:55:08.048055Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:08.048161Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:08.081463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T13:55:08.108150Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T13:55:08.108310Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T13:55:08.161571Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:55:08.187030Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:08.268291Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T13:55:08.675514Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T13:55:08.869376Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T13:55:10.586933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2142:3020], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:10.587093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:10.612195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T13:55:10.763017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2225:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:55:10.770075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2225:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:55:10.770441Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2225:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:55:10.770590Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2225:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:55:10.770721Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2225:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:55:10.770845Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2225:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:55:10.770946Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2225:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:55:10.771061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2225:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:55:10.771192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2225:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:55:10.771343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2225:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:55:10.771459Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2225:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:55:10.771558Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2225:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:55:10.831943Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:55:10.832047Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:55:10.832184Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:55:10.832243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:55:10.832434Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:55:10.832469Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:55:10.832606Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema: ... -07T13:58:10.939438Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2025-04-07T13:58:10.957871Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T13:58:12.583605Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T13:58:12.583677Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T13:58:12.583708Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T13:58:12.583746Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T13:58:12.583799Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T13:58:12.584789Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T13:58:12.598597Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-07T13:58:12.598744Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T13:58:12.599240Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T13:58:12.599321Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-04-07T13:58:12.605094Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T13:58:12.626574Z node 2 :STATISTICS ERROR: [72075186224037897] TEvDeliveryProblem with ColumnShard=72075186224037899 2025-04-07T13:58:12.658512Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T13:58:12.658938Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T13:58:12.766236Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7315:5425];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T13:58:12.794383Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:58:12.797276Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7315:5425];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:58:12.813604Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7315:5425];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037899;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T13:58:12.974903Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7311:5423], server id = [2:7365:5458], tablet id = 72075186224037899, status = OK 2025-04-07T13:58:12.975080Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7311:5423], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:58:12.975923Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T13:58:12.976035Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T13:58:12.976379Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T13:58:12.976597Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T13:58:12.976856Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7311:5423], server id = [2:7365:5458], tablet id = 72075186224037899 2025-04-07T13:58:12.976923Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:12.977191Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T13:58:12.980281Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T13:58:13.027187Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7382:5475]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T13:58:13.027513Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T13:58:13.027571Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7382:5475], StatRequests.size() = 1 2025-04-07T13:58:13.266731Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWUwODdmYTAtZjM3YTAzYTYtZTU5ZDFlMjgtMzUxNTcyYQ==, TxId: 2025-04-07T13:58:13.266814Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWUwODdmYTAtZjM3YTAzYTYtZTU5ZDFlMjgtMzUxNTcyYQ==, TxId: 2025-04-07T13:58:13.267382Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T13:58:13.287733Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T13:58:13.287805Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T13:58:14.205289Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T13:58:14.205372Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T13:58:14.941467Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T13:58:14.941557Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T13:58:14.941601Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T13:58:14.941881Z node 2 :STATISTICS DEBUG: [72075186224037897] Reset DeliveryProblem to ColumnShard=72075186224037899 2025-04-07T13:58:16.492880Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T13:58:16.493160Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T13:58:16.517926Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T13:58:16.518029Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T13:58:16.518075Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T13:58:16.518307Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-07T13:58:16.519223Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2025-04-07T13:58:16.519326Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-07T13:58:16.532822Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2025-04-07T13:58:18.201506Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T13:58:18.201573Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T13:58:18.201605Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T13:58:19.840717Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T13:58:19.840853Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T13:58:19.840917Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T13:58:19.841650Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T13:58:19.874599Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T13:58:19.875046Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T13:58:19.875130Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T13:58:19.875538Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T13:58:19.891758Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T13:58:19.891937Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T13:58:19.892428Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7549:5567], server id = [2:7550:5568], tablet id = 72075186224037899, status = OK 2025-04-07T13:58:19.892570Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7549:5567], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T13:58:19.893525Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T13:58:19.893634Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T13:58:19.893847Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T13:58:19.894025Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T13:58:19.894314Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T13:58:19.906144Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7549:5567], server id = [2:7550:5568], tablet id = 72075186224037899 2025-04-07T13:58:19.906205Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T13:58:19.907395Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T13:58:19.965822Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODc0M2ZjNWItZDE1MmFhMmItNWIyN2QxZmMtYzcxNjViZjQ=, TxId: 2025-04-07T13:58:19.965917Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODc0M2ZjNWItZDE1MmFhMmItNWIyN2QxZmMtYzcxNjViZjQ=, TxId: 2025-04-07T13:58:19.966532Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T13:58:19.982474Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T13:58:19.982549Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2659:3168] >> TxUsage::WriteToTopic_Demo_46 [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries >> KqpJoinOrder::TPCH8-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH8+StreamLookupJoin-ColumnStore >> TExternalTableTest::SchemeErrors [GOOD] |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |88.4%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:23.459862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:23.460002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:23.460053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:23.460080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:23.460115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:23.460138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:23.460184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:23.460466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:23.541748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:23.541808Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:23.552692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:23.552841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:23.552974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:23.558212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:23.558376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:23.558949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:23.559144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:23.560817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:23.561919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:23.561967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:23.562090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:23.562145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:23.562189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:23.562326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:23.570001Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:23.693858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:23.694062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:23.694247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:23.694426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:23.694474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:23.700788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:23.700929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:23.701076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:23.701119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:23.701141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:23.701162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:23.702871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:23.702917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:23.702949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:23.705268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:23.705319Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:23.705358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:23.705440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:23.710320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:23.722628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:23.722841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:23.723765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:23.723913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:23.723959Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:23.724271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:23.724335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:23.724497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:23.724863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:23.732387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:23.732506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:23.732675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:23.732710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:23.733236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:23.733281Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:23.733434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:23.733472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:23.733514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:23.733549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:23.733577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:23.733601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:23.733666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:23.733738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:23.733769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:23.736678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:23.736816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:23.736857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:23.736888Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:23.736934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:23.737041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 126 2025-04-07T13:58:23.840494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:23.844161Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } 2025-04-07T13:58:23.844287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 126:0, path# /MyRoot/DirA/Table2 2025-04-07T13:58:23.844578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2025-04-07T13:58:23.847648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:23.847879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-04-07T13:58:23.850395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:23.850676Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2025-04-07T13:58:23.850791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2025-04-07T13:58:23.850947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2025-04-07T13:58:23.853111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:23.853301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-04-07T13:58:23.855676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:23.855964Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2025-04-07T13:58:23.856045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2025-04-07T13:58:23.856153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2025-04-07T13:58:23.858308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:23.858492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-04-07T13:58:23.860911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:23.861249Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2025-04-07T13:58:23.861352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2025-04-07T13:58:23.861483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2025-04-07T13:58:23.863754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:23.863916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2025-04-07T13:58:23.866505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:23.866841Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2025-04-07T13:58:23.866950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2025-04-07T13:58:23.867139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2025-04-07T13:58:23.869473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:23.869651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2025-04-07T13:58:23.872350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:23.872732Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2025-04-07T13:58:23.872814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2025-04-07T13:58:23.872944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-04-07T13:58:23.874974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:23.875136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 >> TxUsage::WriteToTopic_Demo_47 >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TTxLocatorTest::TestAllocateAllByPieces |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx >> KqpWorkloadService::TestZeroQueueSize [GOOD] >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool >> TTxLocatorTest::TestAllocateAll >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:24.027586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:24.027691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:24.027738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:24.027769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:24.027809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:24.027833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:24.027889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:24.028200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:24.141972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:24.142031Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:24.162593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:24.162736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:24.162908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:24.168307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:24.168488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:24.169239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:24.169442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:24.171583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:24.172825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:24.172877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:24.172986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:24.173032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:24.173065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:24.173246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:24.182457Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:24.311506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:24.311754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:24.311971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:24.312184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:24.312234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:24.316771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:24.316907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:24.317108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:24.317158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:24.317215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:24.317249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:24.320720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:24.320782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:24.320822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:24.325771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:24.325843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:24.325908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:24.325974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:24.329041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:24.331198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:24.331336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:24.332098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:24.332233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:24.332270Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:24.332439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:24.332473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:24.332600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:24.332698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:24.336514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:24.336548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:24.336709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:24.336750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:24.336991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:24.337050Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:24.337150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:24.337181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:24.337233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:24.337269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:24.337311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:24.337340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:24.337404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:24.337441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:24.337467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:24.339231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:24.339364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:24.339403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:24.339447Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:24.339484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:24.339571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... mentPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:24.420359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:58:24.420698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:58:24.421044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:58:24.422122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:24.422150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:24.422264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:58:24.422330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:58:24.422412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:24.422434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-07T13:58:24.422453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-07T13:58:24.422469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 3 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T13:58:24.422643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:58:24.422686Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T13:58:24.422783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T13:58:24.422807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:58:24.422838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-07T13:58:24.422884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:58:24.422921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T13:58:24.422948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T13:58:24.423000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T13:58:24.423034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-07T13:58:24.423058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-04-07T13:58:24.423076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-07T13:58:24.423614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:58:24.423677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:58:24.423699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:58:24.423723Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-07T13:58:24.423748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T13:58:24.424620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:58:24.424700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:58:24.424726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:58:24.424762Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-07T13:58:24.424784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:58:24.424831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-07T13:58:24.426155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:58:24.427052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T13:58:24.427263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T13:58:24.427300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T13:58:24.427693Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T13:58:24.427818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:58:24.427868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:323:2315] TestWaitNotification: OK eventTxId 102 2025-04-07T13:58:24.428316Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:24.428511Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 195us result status StatusSuccess 2025-04-07T13:58:24.428813Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-07T13:58:24.431560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:24.431873Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-04-07T13:58:24.431949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists:1 2025-04-07T13:58:24.432083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, at schemeshard: 72057594046678944 2025-04-07T13:58:24.437657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-07T13:58:24.437793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T13:58:24.438069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T13:58:24.438137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T13:58:24.438441Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T13:58:24.438517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:58:24.438541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:331:2323] TestWaitNotification: OK eventTxId 103 |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] >> TTxLocatorTest::TestAllocateAll [GOOD] >> KqpJoinOrder::TPCH5-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH5+StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] Test command err: 2025-04-07T13:58:25.255962Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-07T13:58:25.256416Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-07T13:58:25.257704Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-07T13:58:25.261280Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.263296Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-07T13:58:25.275943Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.276053Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.276165Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-07T13:58:25.276254Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.276327Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.276398Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-07T13:58:25.276497Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-07T13:58:25.277056Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#8796093022207 2025-04-07T13:58:25.278270Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.278393Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.278483Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 8796093022207 2025-04-07T13:58:25.278520Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 8796093022207 expected SUCCESS 2025-04-07T13:58:25.284045Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2107] requested range size#8796093022207 2025-04-07T13:58:25.285336Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.285408Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.285503Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8796093022207 Reserved to# 17592186044414 2025-04-07T13:58:25.285548Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:73:2107] TEvAllocateResult from# 8796093022207 to# 17592186044414 expected SUCCESS 2025-04-07T13:58:25.285936Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2111] requested range size#8796093022207 2025-04-07T13:58:25.286275Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.286339Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.286410Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 17592186044414 Reserved to# 26388279066621 2025-04-07T13:58:25.286460Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:77:2111] TEvAllocateResult from# 17592186044414 to# 26388279066621 expected SUCCESS 2025-04-07T13:58:25.286787Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2115] requested range size#8796093022207 2025-04-07T13:58:25.287089Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.287747Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.288536Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 26388279066621 Reserved to# 35184372088828 2025-04-07T13:58:25.288999Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:81:2115] TEvAllocateResult from# 26388279066621 to# 35184372088828 expected SUCCESS 2025-04-07T13:58:25.290912Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2119] requested range size#8796093022207 2025-04-07T13:58:25.292359Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.292581Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.292720Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 35184372088828 Reserved to# 43980465111035 2025-04-07T13:58:25.292996Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:85:2119] TEvAllocateResult from# 35184372088828 to# 43980465111035 expected SUCCESS 2025-04-07T13:58:25.295063Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:89:2123] requested range size#8796093022207 2025-04-07T13:58:25.296210Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.296307Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.296390Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 43980465111035 Reserved to# 52776558133242 2025-04-07T13:58:25.296437Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:89:2123] TEvAllocateResult from# 43980465111035 to# 52776558133242 expected SUCCESS 2025-04-07T13:58:25.296841Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:93:2127] requested range size#8796093022207 2025-04-07T13:58:25.297156Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.297231Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.297317Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 52776558133242 Reserved to# 61572651155449 2025-04-07T13:58:25.297355Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:93:2127] TEvAllocateResult from# 52776558133242 to# 61572651155449 expected SUCCESS 2025-04-07T13:58:25.297760Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:97:2131] requested range size#8796093022207 2025-04-07T13:58:25.298012Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.298063Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.298140Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 61572651155449 Reserved to# 70368744177656 2025-04-07T13:58:25.298176Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:97:2131] TEvAllocateResult from# 61572651155449 to# 70368744177656 expected SUCCESS 2025-04-07T13:58:25.298601Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:101:2135] requested range size#8796093022207 2025-04-07T13:58:25.298894Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.298939Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.299481Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 70368744177656 Reserved to# 79164837199863 2025-04-07T13:58:25.299529Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:101:2135] TEvAllocateResult from# 70368744177656 to# 79164837199863 expected SUCCESS 2025-04-07T13:58:25.299980Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:105:2139] requested range size#8796093022207 2025-04-07T13:58:25.300311Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.300386Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.300460Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 79164837199863 Reserved to# 87960930222070 2025-04-07T13:58:25.300496Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:105:2139] TEvAllocateResult from# 79164837199863 to# 87960930222070 expected SUCCESS 2025-04-07T13:58:25.301014Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:109:2143] requested range size#8796093022207 2025-04-07T13:58:25.301295Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.301345Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.301436Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 87960930222070 Reserved to# 96757023244277 2025-04-07T13:58:25.301472Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:109:2143] TEvAllocateResult from# 87960930222070 to# 96757023244277 expected SUCCESS 2025-04-07T13:58:25.301966Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:113:2147] requested range size#8796093022207 2025-04-07T13:58:25.302257Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.302301Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:69:0] Status# OK StatusFla ... e 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:149:2183] requested range size#8796093022207 2025-04-07T13:58:25.311253Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.311327Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.311394Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 175921860444140 Reserved to# 184717953466347 2025-04-07T13:58:25.311425Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:149:2183] TEvAllocateResult from# 175921860444140 to# 184717953466347 expected SUCCESS 2025-04-07T13:58:25.312035Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:153:2187] requested range size#8796093022207 2025-04-07T13:58:25.312300Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.312363Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.312431Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 184717953466347 Reserved to# 193514046488554 2025-04-07T13:58:25.312482Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:153:2187] TEvAllocateResult from# 184717953466347 to# 193514046488554 expected SUCCESS 2025-04-07T13:58:25.313107Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:157:2191] requested range size#8796093022207 2025-04-07T13:58:25.313397Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.313462Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.313592Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 193514046488554 Reserved to# 202310139510761 2025-04-07T13:58:25.313631Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:157:2191] TEvAllocateResult from# 193514046488554 to# 202310139510761 expected SUCCESS 2025-04-07T13:58:25.314287Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:161:2195] requested range size#8796093022207 2025-04-07T13:58:25.314582Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.314662Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.314735Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 202310139510761 Reserved to# 211106232532968 2025-04-07T13:58:25.314769Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:161:2195] TEvAllocateResult from# 202310139510761 to# 211106232532968 expected SUCCESS 2025-04-07T13:58:25.315371Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:165:2199] requested range size#8796093022207 2025-04-07T13:58:25.315639Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.315691Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.315774Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 211106232532968 Reserved to# 219902325555175 2025-04-07T13:58:25.315809Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:165:2199] TEvAllocateResult from# 211106232532968 to# 219902325555175 expected SUCCESS 2025-04-07T13:58:25.316437Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:169:2203] requested range size#8796093022207 2025-04-07T13:58:25.316761Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.316808Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.316928Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 219902325555175 Reserved to# 228698418577382 2025-04-07T13:58:25.316968Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:169:2203] TEvAllocateResult from# 219902325555175 to# 228698418577382 expected SUCCESS 2025-04-07T13:58:25.317685Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:173:2207] requested range size#8796093022207 2025-04-07T13:58:25.317944Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:1:24576:73:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.318001Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.318097Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 228698418577382 Reserved to# 237494511599589 2025-04-07T13:58:25.318131Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:173:2207] TEvAllocateResult from# 228698418577382 to# 237494511599589 expected SUCCESS 2025-04-07T13:58:25.318785Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:177:2211] requested range size#8796093022207 2025-04-07T13:58:25.319058Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.319151Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.319242Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 237494511599589 Reserved to# 246290604621796 2025-04-07T13:58:25.319276Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:177:2211] TEvAllocateResult from# 237494511599589 to# 246290604621796 expected SUCCESS 2025-04-07T13:58:25.319933Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:181:2215] requested range size#8796093022207 2025-04-07T13:58:25.320237Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.320285Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.320378Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 246290604621796 Reserved to# 255086697644003 2025-04-07T13:58:25.320410Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:181:2215] TEvAllocateResult from# 246290604621796 to# 255086697644003 expected SUCCESS 2025-04-07T13:58:25.321137Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:185:2219] requested range size#8796093022207 2025-04-07T13:58:25.321421Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.321516Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.321588Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 255086697644003 Reserved to# 263882790666210 2025-04-07T13:58:25.321622Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:185:2219] TEvAllocateResult from# 255086697644003 to# 263882790666210 expected SUCCESS 2025-04-07T13:58:25.322342Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:189:2223] requested range size#8796093022207 2025-04-07T13:58:25.322646Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:1:24576:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.322748Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.322839Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 263882790666210 Reserved to# 272678883688417 2025-04-07T13:58:25.322875Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:189:2223] TEvAllocateResult from# 263882790666210 to# 272678883688417 expected SUCCESS 2025-04-07T13:58:25.323621Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:193:2227] requested range size#8796093022207 2025-04-07T13:58:25.323895Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.323957Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.324021Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 272678883688417 Reserved to# 281474976710624 2025-04-07T13:58:25.324051Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:193:2227] TEvAllocateResult from# 272678883688417 to# 281474976710624 expected SUCCESS 2025-04-07T13:58:25.324848Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:197:2231] requested range size#31 2025-04-07T13:58:25.325110Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.325169Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.325260Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 281474976710624 Reserved to# 281474976710655 2025-04-07T13:58:25.325296Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:197:2231] TEvAllocateResult from# 281474976710624 to# 281474976710655 expected SUCCESS 2025-04-07T13:58:25.325992Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:201:2235] requested range size#1 2025-04-07T13:58:25.326080Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-04-07T13:58:25.326115Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:201:2235] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE >> DataShardVolatile::DistributedWriteThenReadIterator [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] Test command err: 2025-04-07T13:58:25.302759Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-07T13:58:25.303147Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-07T13:58:25.303771Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-07T13:58:25.306983Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.307439Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-07T13:58:25.324592Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.328925Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.329106Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-07T13:58:25.329220Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.329296Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.329366Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-07T13:58:25.329486Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-07T13:58:25.330856Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2025-04-07T13:58:25.331341Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2025-04-07T13:58:25.331519Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2025-04-07T13:58:25.332108Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2025-04-07T13:58:25.332260Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2025-04-07T13:58:25.332522Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.332631Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.340422Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2025-04-07T13:58:25.340764Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.340963Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.341079Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2025-04-07T13:58:25.341308Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.341496Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.341537Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.341662Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#100000 2025-04-07T13:58:25.341790Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.341871Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2025-04-07T13:58:25.342021Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.342214Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.342307Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2025-04-07T13:58:25.342520Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.342627Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-04-07T13:58:25.342671Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2025-04-07T13:58:25.342757Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-04-07T13:58:25.342788Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2025-04-07T13:58:25.342918Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.342967Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.343063Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.343130Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-04-07T13:58:25.343151Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2025-04-07T13:58:25.343233Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.343308Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-04-07T13:58:25.343345Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 300000 to# 400000 2025-04-07T13:58:25.343415Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-04-07T13:58:25.343435Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 400000 to# 500000 2025-04-07T13:58:25.343514Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.343564Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.343595Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-04-07T13:58:25.343614Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 500000 to# 600000 2025-04-07T13:58:25.343698Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-04-07T13:58:25.343722Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 600000 to# 700000 2025-04-07T13:58:25.343845Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.343888Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.343944Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-04-07T13:58:25.343967Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 700000 to# 800000 2025-04-07T13:58:25.344079Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.344127Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-04-07T13:58:25.344147Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 800000 to# 900000 2025-04-07T13:58:25.344226Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-04-07T13:58:25.344250Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-07T13:58:25.350020Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:117:2151] requested range size#100000 2025-04-07T13:58:25.350454Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:119:2153] requested range size#100000 2025-04-07T13:58:25.350794Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:121:2155] requested range size#100000 2025-04-07T13:58:25.351018Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.351137Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:123:2157] requested range size#100000 2025-04-07T13:58:25.351329Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.351533Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:125:2159] requested range size#100000 2025-04-07T13:58:25.351848Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.351969Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.352085Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:127:2161] requested range size#100000 2025-04-07T13:58:25.352318Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [720575940464476 ... from# 8200000 Reserved to# 8300000 2025-04-07T13:58:25.423349Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:393:2427] TEvAllocateResult from# 8200000 to# 8300000 2025-04-07T13:58:25.423490Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.423578Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8300000 Reserved to# 8400000 2025-04-07T13:58:25.423619Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:395:2429] TEvAllocateResult from# 8300000 to# 8400000 2025-04-07T13:58:25.423708Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.423785Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8400000 Reserved to# 8500000 2025-04-07T13:58:25.423808Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:397:2431] TEvAllocateResult from# 8400000 to# 8500000 2025-04-07T13:58:25.423873Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.423935Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8500000 Reserved to# 8600000 2025-04-07T13:58:25.423955Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:399:2433] TEvAllocateResult from# 8500000 to# 8600000 2025-04-07T13:58:25.424070Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.424150Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8600000 Reserved to# 8700000 2025-04-07T13:58:25.424172Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:401:2435] TEvAllocateResult from# 8600000 to# 8700000 2025-04-07T13:58:25.424261Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.424309Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8700000 Reserved to# 8800000 2025-04-07T13:58:25.424341Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:403:2437] TEvAllocateResult from# 8700000 to# 8800000 2025-04-07T13:58:25.424455Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8800000 Reserved to# 8900000 2025-04-07T13:58:25.424480Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:405:2439] TEvAllocateResult from# 8800000 to# 8900000 2025-04-07T13:58:25.424545Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8900000 Reserved to# 9000000 2025-04-07T13:58:25.424569Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:407:2441] TEvAllocateResult from# 8900000 to# 9000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-07T13:58:25.432415Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:429:2463] requested range size#100000 2025-04-07T13:58:25.441120Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:431:2465] requested range size#100000 2025-04-07T13:58:25.441681Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:433:2467] requested range size#100000 2025-04-07T13:58:25.441847Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.442047Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.442206Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:435:2469] requested range size#100000 2025-04-07T13:58:25.442444Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.442620Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:437:2471] requested range size#100000 2025-04-07T13:58:25.442720Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.442884Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.443015Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:439:2473] requested range size#100000 2025-04-07T13:58:25.443120Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.443356Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:441:2475] requested range size#100000 2025-04-07T13:58:25.443467Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.443626Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.443781Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:443:2477] requested range size#100000 2025-04-07T13:58:25.443900Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.444027Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.444142Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2025-04-07T13:58:25.444171Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:429:2463] TEvAllocateResult from# 9000000 to# 9100000 2025-04-07T13:58:25.444245Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.444331Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:445:2479] requested range size#100000 2025-04-07T13:58:25.444570Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.444609Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.448927Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:447:2481] requested range size#100000 2025-04-07T13:58:25.449161Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.449364Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2025-04-07T13:58:25.449400Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:431:2465] TEvAllocateResult from# 9100000 to# 9200000 2025-04-07T13:58:25.449616Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2025-04-07T13:58:25.449640Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:433:2467] TEvAllocateResult from# 9200000 to# 9300000 2025-04-07T13:58:25.449687Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.449742Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.449854Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2025-04-07T13:58:25.449887Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:435:2469] TEvAllocateResult from# 9300000 to# 9400000 2025-04-07T13:58:25.450003Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-04-07T13:58:25.450024Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:437:2471] TEvAllocateResult from# 9400000 to# 9500000 2025-04-07T13:58:25.450068Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.450185Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-04-07T13:58:25.450209Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:439:2473] TEvAllocateResult from# 9500000 to# 9600000 2025-04-07T13:58:25.450250Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.450284Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.450390Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-04-07T13:58:25.450421Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:441:2475] TEvAllocateResult from# 9600000 to# 9700000 2025-04-07T13:58:25.450470Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.450538Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-04-07T13:58:25.450562Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:443:2477] TEvAllocateResult from# 9700000 to# 9800000 2025-04-07T13:58:25.450645Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-04-07T13:58:25.450667Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:445:2479] TEvAllocateResult from# 9800000 to# 9900000 2025-04-07T13:58:25.450764Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-04-07T13:58:25.450800Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:447:2481] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAll [GOOD] Test command err: 2025-04-07T13:58:25.533607Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-07T13:58:25.534105Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-07T13:58:25.534824Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-07T13:58:25.536427Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.539130Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-07T13:58:25.547060Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.547214Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.547346Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-07T13:58:25.547466Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.547551Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.547637Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-07T13:58:25.547774Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-07T13:58:25.548280Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#281474976710655 2025-04-07T13:58:25.548779Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.548850Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:25.548908Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 281474976710655 2025-04-07T13:58:25.548945Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 281474976710655 expected SUCCESS 2025-04-07T13:58:25.554006Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2107] requested range size#1 2025-04-07T13:58:25.554145Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-04-07T13:58:25.554187Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:73:2107] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |88.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl >> LocalPartition::DiscoveryServiceBadNodeId [GOOD] >> LocalPartition::DescribeHang |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize >> TTxLocatorTest::TestImposibleSize [GOOD] >> TTxLocatorTest::TestZeroRange >> TTxLocatorTest::Boot >> TTxLocatorTest::TestZeroRange [GOOD] >> TTxLocatorTest::Boot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] Test command err: 2025-04-07T13:58:27.188504Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-07T13:58:27.197341Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-07T13:58:27.198059Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-07T13:58:27.199573Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.199993Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-07T13:58:27.227330Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.227453Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.227573Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-07T13:58:27.227684Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.227758Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.227833Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-07T13:58:27.227947Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-07T13:58:27.228510Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#281474976710656 2025-04-07T13:58:27.228654Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 0 Reserved to# 0 2025-04-07T13:58:27.239336Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-04-07T13:58:27.239869Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2106] requested range size#123456 2025-04-07T13:58:27.240279Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.240342Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.240419Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 123456 2025-04-07T13:58:27.240450Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2106] TEvAllocateResult from# 0 to# 123456 expected SUCCESS 2025-04-07T13:58:27.243673Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2110] requested range size#281474976587200 2025-04-07T13:58:27.243843Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 123456 Reserved to# 0 2025-04-07T13:58:27.243886Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:76:2110] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-04-07T13:58:27.244352Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:79:2113] requested range size#246912 2025-04-07T13:58:27.244813Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.244891Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.245001Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 123456 Reserved to# 370368 2025-04-07T13:58:27.245038Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:79:2113] TEvAllocateResult from# 123456 to# 370368 expected SUCCESS 2025-04-07T13:58:27.245470Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2117] requested range size#281474976340288 2025-04-07T13:58:27.245576Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 370368 Reserved to# 0 2025-04-07T13:58:27.245610Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:83:2117] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE >> Secret::SimpleQueryService [GOOD] >> TTxLocatorTest::TestWithReboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot [GOOD] Test command err: 2025-04-07T13:58:27.612284Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-07T13:58:27.612774Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-07T13:58:27.613587Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-07T13:58:27.615276Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.615785Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-07T13:58:27.625803Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.625932Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.626070Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-07T13:58:27.626199Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.626282Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.626367Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-07T13:58:27.626483Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2025-04-07T13:58:27.569793Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-07T13:58:27.570247Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-07T13:58:27.570949Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-07T13:58:27.572529Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.573049Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-07T13:58:27.582153Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.582285Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.582413Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-07T13:58:27.582540Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.582631Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.582724Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-07T13:58:27.582848Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-07T13:58:27.583466Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#0 2025-04-07T13:58:27.583961Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.584072Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:27.584148Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2025-04-07T13:58:27.584184Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 0 expected SUCCESS >> TExternalTableTest::ParallelCreateExternalTable >> TExternalTableTest::ParallelCreateSameExternalTable >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] >> TTxLocatorTest::TestWithReboot [GOOD] >> LocalPartition::WithoutPartitionWithSplit [GOOD] >> Trace::NextToken [GOOD] >> TSettingsValidation::TestDifferentDedupParams >> TExternalTableTest::ParallelCreateExternalTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] Test command err: 2025-04-07T13:58:28.617355Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-07T13:58:28.617927Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-07T13:58:28.618748Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-07T13:58:28.620546Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.621142Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-07T13:58:28.633238Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.633371Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.633532Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-07T13:58:28.633656Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.633736Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.633826Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-07T13:58:28.633955Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-07T13:58:28.635492Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2025-04-07T13:58:28.636263Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2025-04-07T13:58:28.636527Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2025-04-07T13:58:28.637276Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2025-04-07T13:58:28.637461Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2025-04-07T13:58:28.637757Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.637890Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.637986Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2025-04-07T13:58:28.638130Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.638282Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.638399Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2025-04-07T13:58:28.638584Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.638780Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.638821Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.638947Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#100000 2025-04-07T13:58:28.639081Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.639163Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2025-04-07T13:58:28.639327Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.639496Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.639576Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2025-04-07T13:58:28.639771Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.639888Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-04-07T13:58:28.639935Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2025-04-07T13:58:28.640026Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-04-07T13:58:28.640065Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2025-04-07T13:58:28.640230Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.640287Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.640387Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.640459Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-04-07T13:58:28.640482Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2025-04-07T13:58:28.640585Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.640700Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-04-07T13:58:28.640732Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 300000 to# 400000 2025-04-07T13:58:28.640841Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-04-07T13:58:28.640865Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 400000 to# 500000 2025-04-07T13:58:28.640951Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.641051Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.641088Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-04-07T13:58:28.641113Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 500000 to# 600000 2025-04-07T13:58:28.641225Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-04-07T13:58:28.641252Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 600000 to# 700000 2025-04-07T13:58:28.641395Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.641444Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.641504Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-04-07T13:58:28.641532Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 700000 to# 800000 2025-04-07T13:58:28.641645Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:28.641698Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-04-07T13:58:28.641727Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 800000 to# 900000 2025-04-07T13:58:28.641793Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-04-07T13:58:28.641820Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-07T13:58:28.645844Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 2 Marker# TSYS31 2025-04-07T13:58:28.647080Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 2 Promote Marker# TSYS16 2025-04-07T13:58:28.647987Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:2:12:0:0:71:0] Snap: 2:1 for 72057594046447617 Marker# TRRH04 2025-04-07T13:58:28.648059Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:12:0:0:71:0], refs: [[72057594046447617:2:12:1:24576:76:0],] for 72057594046447617 2025-04-07T13:58:28.648280Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:1:0:0:42:0], refs: [[72057594046447617:2:1:1:28672:35:0],] for 72057594046447617 2025-04-07T13:58:28.648338Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:2:0:0:71:0], refs: [[72057594046447617:2:2:1:8192:71:0],] for 72057594046447617 2025-04-07T13:58:28.648399Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:3:0:0:69:0], refs: [[72057594046447617:2:3:1:24576:70:0],] for 72057594046447617 2025-04-07T13:58:28.648458Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:4:0:0:71:0], refs: [[72057594046447617:2:4:1:24576:76:0],] for 720575940 ... 000000 Reserved to# 9100000 2025-04-07T13:58:29.060288Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:613:2544] TEvAllocateResult from# 9000000 to# 9100000 2025-04-07T13:58:29.060446Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:29.060570Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2025-04-07T13:58:29.060620Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:615:2546] TEvAllocateResult from# 9100000 to# 9200000 2025-04-07T13:58:29.068906Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:29.069201Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2025-04-07T13:58:29.069268Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:617:2548] TEvAllocateResult from# 9200000 to# 9300000 2025-04-07T13:58:29.069408Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:29.069499Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2025-04-07T13:58:29.069528Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:619:2550] TEvAllocateResult from# 9300000 to# 9400000 2025-04-07T13:58:29.069709Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:29.069824Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-04-07T13:58:29.069860Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:621:2552] TEvAllocateResult from# 9400000 to# 9500000 2025-04-07T13:58:29.070029Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:29.070090Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-04-07T13:58:29.070114Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:623:2554] TEvAllocateResult from# 9500000 to# 9600000 2025-04-07T13:58:29.070194Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:29.070249Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:29.070306Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-04-07T13:58:29.070331Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:625:2556] TEvAllocateResult from# 9600000 to# 9700000 2025-04-07T13:58:29.070433Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-04-07T13:58:29.070474Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:627:2558] TEvAllocateResult from# 9700000 to# 9800000 2025-04-07T13:58:29.070581Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:29.070676Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-04-07T13:58:29.070701Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:629:2560] TEvAllocateResult from# 9800000 to# 9900000 2025-04-07T13:58:29.070782Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:29.070875Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:29.070978Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-04-07T13:58:29.071002Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:631:2562] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-07T13:58:29.080355Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 11 Marker# TSYS31 2025-04-07T13:58:29.081830Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 11 Promote Marker# TSYS16 2025-04-07T13:58:29.082592Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:11:11:0:0:71:0] Snap: 11:1 for 72057594046447617 Marker# TRRH04 2025-04-07T13:58:29.082670Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:11:0:0:71:0], refs: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617 2025-04-07T13:58:29.082906Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:1:0:0:42:0], refs: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2025-04-07T13:58:29.082960Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:2:0:0:69:0], refs: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2025-04-07T13:58:29.083001Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:3:0:0:71:0], refs: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2025-04-07T13:58:29.083039Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:4:0:0:71:0], refs: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2025-04-07T13:58:29.083077Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:5:0:0:71:0], refs: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2025-04-07T13:58:29.083140Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:6:0:0:71:0], refs: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617 2025-04-07T13:58:29.083189Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:7:0:0:71:0], refs: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617 2025-04-07T13:58:29.083236Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:8:0:0:71:0], refs: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617 2025-04-07T13:58:29.083273Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:9:0:0:71:0], refs: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617 2025-04-07T13:58:29.083330Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:10:0:0:71:0], refs: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617 2025-04-07T13:58:29.083504Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 11 from 1 with 11 steps Marker# TRRH09 2025-04-07T13:58:29.083550Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2025-04-07T13:58:29.083595Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2025-04-07T13:58:29.083625Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2025-04-07T13:58:29.083660Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2025-04-07T13:58:29.083688Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2025-04-07T13:58:29.083736Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:6:1:24576:78:0],] 2025-04-07T13:58:29.083771Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:7:1:24576:78:0],] 2025-04-07T13:58:29.083804Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617, Gc+: [[72057594046447617:11:8:1:24576:75:0],] 2025-04-07T13:58:29.083848Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:9:1:24576:78:0],] 2025-04-07T13:58:29.083884Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:10:1:24576:78:0],] 2025-04-07T13:58:29.083919Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617, Gc+: [[72057594046447617:11:11:1:24576:72:0],] 2025-04-07T13:58:29.084211Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:12:0:0:0:0:0] Marker# TSYS01 2025-04-07T13:58:29.094311Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:29.097925Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-07T13:58:29.098195Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-07T13:58:29.099064Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 12, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-07T13:58:29.099205Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:1:28672:1639:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:29.099273Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T13:58:29.099362Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 12:0 Marker# TSYS28 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService [GOOD] Test command err: 2025-04-07T13:55:47.952622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:47.952995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:47.953214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0021d4/r3tmp/tmp7f6YIB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25052, node 1 TClient is connected to server localhost:27223 2025-04-07T13:55:48.580692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:55:48.634498Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:48.638849Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:48.638924Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:48.638956Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:48.639165Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:55:48.690102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:48.690224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:48.703142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-07T13:56:01.643999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:774:2643], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:01.644143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:789:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:01.644226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:01.663736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:56:01.700607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2655], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:56:02.849424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-07T13:56:07.319796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:07.886388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-04-07T13:56:08.900234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-07T13:56:09.814019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:10.285544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T13:56:14.128578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-04-07T13:56:14.670764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.477820Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88jrx9d6fxgb3skpzkkzke", SessionId: ydb://session/3?node_id=1&id=ZDg1YjZhZmEtYTI5ZTJiYi05ZGU3MWVmZi1lM2RjMTMyZQ==, Slow query, duration: 17.866104s, status: STATUS_CODE_UNSPECIFIED, user: root@builtin, results: 0b, text: "CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`", parameters: 0b REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: 2025-04-07T13:56:32.623561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:56:32.623626Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 2025-04-07T13:56:56.381315Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jr88me092v63ts1abbf6pygm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjNmYTg4MzUtY2M2YTFlYTYtNDdiNGQwZDYtZjE4ZmU4YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-07T13:57:20.140053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715755:0, at schemeshard: 72057594046644480 2025-04-07T13:57:22.239360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715764:0, at schemeshard: 72057594046644480 2025-04-07T13:57:27.320168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715777:0, at schemeshard: 72057594046644480 2025-04-07T13:57:28.253516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715780:0, at schemeshard: 72057594046644480 2025-04-07T13:57:30.895754Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88n5kg9c68cqbt6hzz04nk", SessionId: ydb://session/3?node_id=1&id=MWI2ZDMwNTQtNTVlNzFmY2ItN2QxMmZmZmQtZjBjNzU5YjY=, Slow query, duration: 10.779385s, status: STATUS_CODE_UNSPECIFIED, user: root@builtin, results: 0b, text: "CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS)", parameters: 0b REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-07T13:57:42.864613Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715795. Ctx: { TraceId: 01jr88nvfk5w52qv5tgzjynsan, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjkyNDk4MDgtZTFjMjU5NTUtOWUyZDJjOTUtYzhlNTc0ODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 2025-04-07T13:58:24.304523Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715845. Ctx: { TraceId: 01jr88q42dfyp5cmvm0tmgwg9v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2U2ODkxYjMtNzQzMTBhZmYtZTZlYjlmMjctNmMxY2JjNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:28.949236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:28.949356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:28.949396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:28.949429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:28.949475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:28.949518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:28.949573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:28.949893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:29.015301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:29.015361Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:29.031337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:29.031517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:29.031682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:29.047969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:29.048153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:29.048678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:29.048905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:29.052109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:29.053429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:29.053488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:29.053651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:29.053703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:29.053743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:29.053920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.061660Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:29.179561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:29.179784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.180015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:29.180220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:29.180272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.194896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:29.195033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:29.195218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.195275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:29.195330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:29.195376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:29.199613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.199689Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:29.199725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:29.201752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.201801Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.201841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:29.201912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:29.230987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:29.245681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:29.245916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:29.246920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:29.247793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:29.247865Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:29.248126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:29.248175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:29.248382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:29.248452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:29.254640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:29.254706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:29.254888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:29.254927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:29.255150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.255190Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:29.255281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:29.255311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:29.255372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:29.255415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:29.255456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:29.255488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:29.255558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:29.255603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:29.255630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:29.261711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:29.261885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:29.261933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:29.261970Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:29.262018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:29.262158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 126, at schemeshard: 72057594046678944 2025-04-07T13:58:29.388733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-04-07T13:58:29.388754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:363:2355] 2025-04-07T13:58:29.388852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-04-07T13:58:29.389011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:363:2355] 2025-04-07T13:58:29.389066Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-04-07T13:58:29.389158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-04-07T13:58:29.389171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:363:2355] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-04-07T13:58:29.389519Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:29.389686Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 179us result status StatusSuccess 2025-04-07T13:58:29.389871Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:29.390380Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:29.390541Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 162us result status StatusSuccess 2025-04-07T13:58:29.390734Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:29.391495Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:29.391628Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 101us result status StatusSuccess 2025-04-07T13:58:29.391860Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } Children { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:29.392256Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:29.392368Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 115us result status StatusSuccess 2025-04-07T13:58:29.392555Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:29.392989Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:29.393112Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 135us result status StatusSuccess 2025-04-07T13:58:29.393368Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 25649, MsgBus: 21034 2025-04-07T13:57:41.479000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570115498825238:2157];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:41.481740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001046/r3tmp/tmpLlbRPR/pdisk_1.dat 2025-04-07T13:57:42.170129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:42.170225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:42.180302Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:42.184195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25649, node 1 2025-04-07T13:57:42.387875Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:42.387895Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:42.387901Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:42.387982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21034 TClient is connected to server localhost:21034 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:43.012448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:43.049050Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:43.063654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:43.269265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:43.452058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:43.549964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:45.414605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570132678696008:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:45.414694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:45.731379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:45.774686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:45.806417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:45.878869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:45.936996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:45.982939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:46.043390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570136973663801:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:46.043469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:46.043856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570136973663806:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:46.048439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:46.061960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570136973663808:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:46.483552Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570115498825238:2157];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:46.483623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:47.122426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:47.203408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25303, MsgBus: 5492 2025-04-07T13:57:48.657180Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490570147609336126:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:48.657276Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001046/r3tmp/tmpj9SKo6/pdisk_1.dat 2025-04-07T13:57:48.773421Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:48.799366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:48.799435Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:48.800934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25303, node 2 2025-04-07T13:57:48.851627Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:48.851647Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:48.851653Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:48.851749Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5492 TClient is connected to server localhost:5492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:57:49.268764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:57:49.287039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:49.346903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:49.499724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:49.563370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation ... ervice] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:14.646999Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:58:14.716549Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:58:14.769103Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:58:14.820813Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:58:14.874674Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:58:14.926829Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:58:15.028800Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570262198165108:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:15.028911Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:15.030657Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570262198165113:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:15.035457Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:58:15.047455Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570262198165115:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:58:15.245266Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490570240723326550:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:15.245339Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:58:16.549332Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:58:16.690759Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7707, MsgBus: 1135 2025-04-07T13:58:19.191794Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490570279008916350:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:19.191861Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001046/r3tmp/tmpOgnNdb/pdisk_1.dat 2025-04-07T13:58:19.438477Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7707, node 6 2025-04-07T13:58:19.511227Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:19.511350Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:19.512552Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:58:19.538172Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:58:19.538198Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:58:19.538209Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:58:19.538339Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1135 TClient is connected to server localhost:1135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:58:20.281648Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:20.294359Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:58:20.315498Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:20.427082Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:20.682092Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:20.814142Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:24.194460Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490570279008916350:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:24.194527Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:58:24.536873Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570300483754534:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:24.537152Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:24.595139Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.652346Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.712607Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.788044Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.840498Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.933305Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:58:25.070022Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570304778722339:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:25.070114Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:25.070483Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570304778722344:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:25.078734Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:58:25.107127Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490570304778722346:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:58:26.713620Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:58:26.811806Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] Test command err: 2025-04-07T13:57:53.726813Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:57:54.103789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:57:54.103858Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:54.121509Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:57:54.142719Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T13:57:54.143066Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:57:54.203448Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:57:54.330773Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:57:54.331775Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:57:54.370619Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T13:57:54.370746Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T13:57:54.370813Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T13:57:54.396420Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:57:54.435923Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T13:57:54.437089Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:57:54.437206Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T13:57:54.437240Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T13:57:54.437272Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T13:57:54.437309Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T13:57:54.437497Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:57:54.437541Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:57:54.448979Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T13:57:54.449069Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T13:57:54.449119Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T13:57:54.449168Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:57:54.459327Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T13:57:54.459400Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T13:57:54.459437Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T13:57:54.459479Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T13:57:54.459531Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T13:57:54.779913Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:57:54.779971Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:57:54.780016Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T13:57:54.789566Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T13:57:54.789653Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:57:54.789766Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T13:57:54.820557Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T13:57:54.820647Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T13:57:54.820713Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T13:57:54.833451Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T13:57:54.833539Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T13:57:54.833583Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T13:57:54.833638Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T13:57:54.833950Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T13:57:54.834010Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T13:57:54.834053Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T13:57:54.834094Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T13:57:54.844470Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T13:57:54.844559Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T13:57:54.844604Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T13:57:54.844680Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T13:57:54.844725Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T13:57:54.881307Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T13:57:54.881379Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T13:57:54.881421Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T13:57:54.881474Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T13:57:54.881549Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T13:57:54.882070Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:57:54.882125Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:57:54.882168Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2025-04-07T13:57:54.882303Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T13:57:54.882338Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T13:57:54.882460Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T13:57:54.882520Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T13:57:54.882558Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T13:57:54.882598Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T13:57:54.886376Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T13:57:54.896763Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T13:57:54.897052Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:57:54.897108Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:57:54.897163Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T13:57:54.897225Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T13:57:54.897261Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T13:57:54.897307Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T13:57:54.897356Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T13:57:54.897398Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T13:57:54.897439Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T13:57:54.897484Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T13:57:54.897517Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T13:57:54.897698Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T13:57:54.897753Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T13:57:54.897791Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T13:57:54.897835Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T13:57:54.897857Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T13:57:54.897918Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T13:57:54.897947Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T13:57:54.897982Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T13:57:54.898019Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T13:57:54.898067Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T13:57:54.898102Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T13:57:54.898141Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T13:57:54.898197Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T13:57:54.898226Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T13:57:54.898249Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... llReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\231\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1002 ExecLevel: 0 Flags: 0 2025-04-07T13:58:28.564502Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:58:28.564573Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T13:58:28.565275Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CheckDataTx 2025-04-07T13:58:28.565339Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-07T13:58:28.565368Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CheckDataTx 2025-04-07T13:58:28.565394Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T13:58:28.565419Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T13:58:28.565457Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-07T13:58:28.565508Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1002] at 9437184 2025-04-07T13:58:28.565545Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-07T13:58:28.565566Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T13:58:28.565587Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit ExecuteDataTx 2025-04-07T13:58:28.565607Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit ExecuteDataTx 2025-04-07T13:58:28.565644Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-07T13:58:28.565953Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1002] at tablet 9437184 with status COMPLETE 2025-04-07T13:58:28.566384Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1002] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T13:58:28.566441Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T13:58:28.566465Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit ExecuteDataTx 2025-04-07T13:58:28.566489Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit FinishPropose 2025-04-07T13:58:28.566515Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit FinishPropose 2025-04-07T13:58:28.566546Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is DelayComplete 2025-04-07T13:58:28.566568Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit FinishPropose 2025-04-07T13:58:28.566606Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit CompletedOperations 2025-04-07T13:58:28.566631Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CompletedOperations 2025-04-07T13:58:28.566671Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-07T13:58:28.566692Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CompletedOperations 2025-04-07T13:58:28.566715Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1002] at 9437184 has finished 2025-04-07T13:58:28.590373Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T13:58:28.590443Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1002] at 9437184 on unit FinishPropose 2025-04-07T13:58:28.590480Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1002 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-07T13:58:28.590560Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 .2025-04-07T13:58:28.599607Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:229:2224]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2025-04-07T13:58:28.599673Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-07T13:58:28.600621Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4522:6452], Recipient [3:229:2224]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:28.604689Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:28.604780Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4521:6451], serverId# [3:4522:6452], sessionId# [0:0:0] 2025-04-07T13:58:28.605507Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:229:2224]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\265\002\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\235\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1003 ExecLevel: 0 Flags: 0 2025-04-07T13:58:28.605552Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:58:28.605641Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T13:58:28.606185Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CheckDataTx 2025-04-07T13:58:28.606245Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-07T13:58:28.606276Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CheckDataTx 2025-04-07T13:58:28.606306Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T13:58:28.606330Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T13:58:28.606369Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-07T13:58:28.606421Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1003] at 9437184 2025-04-07T13:58:28.606450Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-07T13:58:28.606471Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T13:58:28.606493Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit ExecuteDataTx 2025-04-07T13:58:28.606515Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit ExecuteDataTx 2025-04-07T13:58:28.606553Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-07T13:58:28.606891Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1003] at tablet 9437184 with status COMPLETE 2025-04-07T13:58:28.606936Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1003] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T13:58:28.606987Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T13:58:28.607015Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit ExecuteDataTx 2025-04-07T13:58:28.607043Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit FinishPropose 2025-04-07T13:58:28.607074Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit FinishPropose 2025-04-07T13:58:28.607111Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is DelayComplete 2025-04-07T13:58:28.607135Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit FinishPropose 2025-04-07T13:58:28.607163Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit CompletedOperations 2025-04-07T13:58:28.607188Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CompletedOperations 2025-04-07T13:58:28.607233Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-07T13:58:28.607256Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CompletedOperations 2025-04-07T13:58:28.607278Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1003] at 9437184 has finished 2025-04-07T13:58:28.649668Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-07T13:58:28.649727Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-04-07T13:58:28.651809Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T13:58:28.651864Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1003] at 9437184 on unit FinishPropose 2025-04-07T13:58:28.651898Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1003 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-04-07T13:58:28.651972Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T13:58:28.663378Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:226:2223], Recipient [3:229:2224]: NKikimr::TEvTablet::TEvFollowerGcApplied .2025-04-07T13:58:28.669677Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4536:6465], Recipient [3:229:2224]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:28.669758Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:28.669809Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4535:6464], serverId# [3:4536:6465], sessionId# [0:0:0] 2025-04-07T13:58:28.670304Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553160, Sender [3:4534:6463], Recipient [3:229:2224]: NKikimrTxDataShard.TEvGetTableStats TableId: 13 { InMemSize: 0 LastAccessTime: 1716 LastUpdateTime: 1716 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:29.306879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:29.307027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:29.307075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:29.307109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:29.307153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:29.307181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:29.307239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:29.307554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:29.410935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:29.410997Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:29.426919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:29.427058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:29.427206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:29.439653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:29.439853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:29.440490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:29.440715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:29.447834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:29.449219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:29.449283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:29.449421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:29.449478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:29.449516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:29.449710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.463296Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:29.661017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:29.661263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.661490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:29.661717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:29.661767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.664172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:29.664282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:29.664438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.664499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:29.664549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:29.664596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:29.666668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.666723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:29.666755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:29.668483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.668521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.668556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:29.668613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:29.672246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:29.674219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:29.674413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:29.675395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:29.675544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:29.675595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:29.675857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:29.675906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:29.676081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:29.676165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:29.678245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:29.678305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:29.678483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:29.678553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:29.678773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:29.678814Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:29.678906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:29.678943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:29.679003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:29.679045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:29.679077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:29.679121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:29.679188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:29.679234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:29.679266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:29.681226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:29.681346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:29.681392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:29.681432Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:29.681476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:29.681584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 7304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T13:58:29.787359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 125, subscribers: 0 2025-04-07T13:58:29.788800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2025-04-07T13:58:29.791403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2025-04-07T13:58:29.797539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 TestModificationResult got TxId: 127, wait until txId: 127 2025-04-07T13:58:29.798180Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:29.798429Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 264us result status StatusSuccess 2025-04-07T13:58:29.798723Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:29.799293Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:29.799451Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 145us result status StatusSuccess 2025-04-07T13:58:29.799692Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2025-04-07T13:58:29.799950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2025-04-07T13:58:29.799991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2025-04-07T13:58:29.800060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2025-04-07T13:58:29.800078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2025-04-07T13:58:29.800125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2025-04-07T13:58:29.800142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2025-04-07T13:58:29.804783Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-04-07T13:58:29.805050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-04-07T13:58:29.805130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:336:2328] 2025-04-07T13:58:29.805393Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-04-07T13:58:29.805488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-04-07T13:58:29.805511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:336:2328] 2025-04-07T13:58:29.805610Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-04-07T13:58:29.805672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-04-07T13:58:29.805692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:336:2328] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-04-07T13:58:29.806279Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:29.806442Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 181us result status StatusSuccess 2025-04-07T13:58:29.806680Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2025-04-07T13:58:29.809579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:29.809897Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-04-07T13:58:29.809971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2025-04-07T13:58:29.810110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-07T13:58:29.813957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 125, at schemeshard: 72057594046678944 2025-04-07T13:58:29.814095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 |88.5%| [TA] $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister >> TxUsage::WriteToTopic_Demo_17 [GOOD] >> KqpScan::JoinSimple >> KqpScan::UnionThree >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless >> TPQCachingProxyTest::TestDeregister [GOOD] >> BasicUsage::SessionNotDestroyedWhileCompressionInFlight [GOOD] >> BasicUsage::SessionNotDestroyedWhileUserEventHandlingInFlight |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2025-04-07T13:58:31.444514Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:31.444592Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T13:58:31.459580Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:58:31.459671Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-07T13:58:31.459719Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 1 2025-04-07T13:58:31.459857Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: session1 >> TPQCachingProxyTest::OutdatedSession >> TPQCachingProxyTest::MultipleSessions |88.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> KqpWorkloadService::TestLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit >> TxUsage::WriteToTopic_Demo_36 [GOOD] >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] >> TPQCachingProxyTest::OutdatedSession [GOOD] >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride+StreamLookupJoin-ColumnStore >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2025-04-07T13:58:32.464983Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:32.465086Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T13:58:32.480786Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:58:32.480878Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2 2025-04-07T13:58:32.480955Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-07T13:58:32.480995Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2025-04-07T13:58:32.481057Z node 1 :PQ_READ_PROXY INFO: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2025-04-07T13:58:32.481102Z node 1 :PQ_READ_PROXY ALERT: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2025-04-07T13:58:32.481138Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2025-04-07T13:58:32.481204Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2025-04-07T13:58:32.763265Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:32.763338Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T13:58:32.779396Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:58:32.779495Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-07T13:58:32.779571Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-07T13:58:32.779610Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2025-04-07T13:58:32.779680Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation >> TxUsage::WriteToTopic_Demo_25 [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2025-04-07T13:58:33.003209Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:33.003278Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T13:58:33.032360Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:58:33.032447Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-07T13:58:33.032514Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-07T13:58:33.032558Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 2 for session: session1 2025-04-07T13:58:33.032594Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2025-04-07T13:58:33.032649Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 2 for session session1 2025-04-07T13:58:33.032725Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 2 2025-04-07T13:58:33.032803Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 3 for session: session2 2025-04-07T13:58:33.032841Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 3 for session session2 >> TxUsage::WriteToTopic_Demo_21_RestartNo [GOOD] >> TSchemeShardTTLTests::ShouldCheckQuotas >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries [GOOD] >> KqpWorkloadServiceActors::TestCreateDefaultPool >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative >> TxUsage::WriteToTopic_Demo_18_RestartNo >> TSchemeShardTTLTests::TtlTiersValidation >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:33.706285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:33.706413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:33.706450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:33.706485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:33.706542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:33.706586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:33.706648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:33.706946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:33.774208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:33.774290Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:33.785299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:33.785430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:33.785582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:33.799909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:33.800115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:33.800785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:33.800987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:33.805472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:33.806873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:33.806940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:33.807063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:33.807110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:33.807158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:33.807345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:33.816611Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:33.949391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:33.949658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:33.949875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:33.950112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:33.950181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:33.954245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:33.954391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:33.954610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:33.954679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:33.954736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:33.954773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:33.960612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:33.960713Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:33.960749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:33.962824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:33.962875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:33.962913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:33.962974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:33.966316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:33.974038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:33.974290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:33.975251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:33.975407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:33.975455Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:33.975740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:33.975797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:33.975956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:33.976043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:33.984861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:33.985135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:33.985394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:33.985438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:33.985729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:33.985775Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:33.985871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:33.985905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:33.985951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:33.986211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:33.986260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:33.986298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:33.986373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:33.986410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:33.986440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:33.988573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:33.988704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:33.988742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:33.988861Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:33.988922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:33.989074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:58:33.994351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:58:33.994963Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T13:58:33.996277Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T13:58:34.015848Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T13:58:34.018077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:34.018427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:34.018530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { }, at schemeshard: 72057594046678944 2025-04-07T13:58:34.019163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL status must be specified, at schemeshard: 72057594046678944 2025-04-07T13:58:34.020726Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:58:34.022694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL status must be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:34.022889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL status must be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-07T13:58:34.024036Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_36 [GOOD] Test command err: 2025-04-07T13:56:22.740987Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569775877372781:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:22.760887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:56:22.960318Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001678/r3tmp/tmpHwd0oD/pdisk_1.dat 2025-04-07T13:56:23.250123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:23.250222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:23.271774Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:23.277818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21343, node 1 2025-04-07T13:56:23.419173Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001678/r3tmp/yandexoa1gYq.tmp 2025-04-07T13:56:23.419230Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001678/r3tmp/yandexoa1gYq.tmp 2025-04-07T13:56:23.419422Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001678/r3tmp/yandexoa1gYq.tmp 2025-04-07T13:56:23.419520Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:56:23.488151Z INFO: TTestServer started on Port 62887 GrpcPort 21343 TClient is connected to server localhost:62887 PQClient connected to localhost:21343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:23.793396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:23.824041Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:23.835312Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:56:23.840088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:23.971519Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:25.792582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569788762275366:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:25.792898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:25.800790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569788762275394:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:25.810613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:56:25.831562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569788762275397:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:56:26.124185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:26.167117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:26.167620Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569793057242784:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:56:26.203064Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzllNDA2OGYtODQ2MTNlMDgtNmRhYmRjOTAtMjg3YjUxZDc=, ActorId: [1:7490569788762275358:2307], ActorState: ExecuteState, TraceId: 01jr88kgh355qn3zkszm057m1v, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:56:26.218055Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:56:26.299203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490569793057243039:2617] 2025-04-07T13:56:27.724330Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569775877372781:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:27.724487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:56:32.644346Z :WriteToTopic_Demo_3 INFO: TTopicSdkTestSetup started 2025-04-07T13:56:32.667940Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:56:32.705088Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:56:32.705327Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-07T13:56:32.705824Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:56:32.706080Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-07T13:56:32.706115Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:56:32.706131Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-07T13:56:32.706155Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:56:32.706182Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:56:32.706228Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2025-04-07T13:56:32.706245Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-07T13:56:32.729296Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7490569818827047133:2802] connected; active server actors: 1 2025-04-07T13:56:32.729699Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-07T13:56:32.730147Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:32.730174Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569818827047132:2801], now have 1 active actors on pipe 2025-04-07T13:56:32.732526Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-07T13:56:32.732680Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-07T13:56:32.737467Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-07T13:56:32.737490Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-07T13:56:32.770895Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7490569780172340418 RawX2: 4294969489 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" ... sing sourceId '\0test-message_group_id' seqNo 3 partNo 0 2025-04-07T13:58:32.637942Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {10, 281474976710677}, 100001}, State: StateIdle] Topic 'topic_A' partition {0, {10, 281474976710677}, 100001} part blob complete sourceId '\0test-message_group_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 16546 count 1 nextOffset 1 batches 1 2025-04-07T13:58:32.638797Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {10, 281474976710677}, 100001}, State: StateIdle] Add new write blob: topic 'topic_A' partition {0, {10, 281474976710677}, 100001} compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 D0000100001_00000000000000000000_00000_0000000001_00000| size 16536 WTime 1744034312638 2025-04-07T13:58:32.639075Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:58:32.640378Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {10, 281474976710677}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 16479 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:58:32.640441Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {10, 281474976710677}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {10, 281474976710677}, 100001} 2025-04-07T13:58:32.640500Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {10, 281474976710677}, 100001}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {10, 281474976710677}, 100001}, SeqNo: 3, partNo: 0, Offset: 0 is stored on disk 2025-04-07T13:58:32.640726Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {10, 281474976710677}, 100001}, State: StateIdle] Topic 'topic_A' partition {0, {10, 281474976710677}, 100001} user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:58:32.640757Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {10, 281474976710677}, 100001}, State: StateIdle] Topic 'topic_A' partition {0, {10, 281474976710677}, 100001} user test-consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-07T13:58:32.640831Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 1 requestId: cookie: 3 2025-04-07T13:58:32.640935Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T13:58:32.641357Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {10, 281474976710677}, 100001}, State: StateIdle] read cookie 0 Topic 'topic_A' partition {0, {10, 281474976710677}, 100001} user test-consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-07T13:58:32.641383Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {10, 281474976710677}, 100001}, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T13:58:32.641462Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {10, 281474976710677}, 100001}, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-07T13:58:32.641482Z node 10 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:58:32.641551Z node 10 :PERSQUEUE DEBUG: Topic 'topic_A' partition {0, {10, 281474976710677}, 100001} user test-consumer readTimeStamp done, result 1744034312631 queuesize 0 startOffset 0 2025-04-07T13:58:32.642241Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|61694e7f-f3ba1959-5278d234-71ad321b_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2025-04-07T13:58:32.642421Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|61694e7f-f3ba1959-5278d234-71ad321b_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 3 written_in_tx { } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 6000000 } max_queue_wait_time { nanos: 6000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-07T13:58:32.642460Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|61694e7f-f3ba1959-5278d234-71ad321b_0] PartitionId [0] Generation [1] OnAck: seqNo=3, txId=01jr88qcd4dkvraqtecm1px5vf, WriteCount=1, AckCount=1 2025-04-07T13:58:32.643741Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|61694e7f-f3ba1959-5278d234-71ad321b_0] PartitionId [0] Generation [1] Write session: acknoledged message 3 2025-04-07T13:58:32.648486Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7490570333821854859 RawX2: 4503642577045943 } TxId: 281474976710678 Data { Operations { PartitionId: 0 Path: "/Root/topic_A" SupportivePartition: 100001 } Op: Commit SendingShards: 72075186224037894 ReceivingShards: 72075186224037894 Immediate: true WriteId { NodeId: 10 KeyId: 281474976710677 } } 2025-04-07T13:58:32.648525Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] PartitionId {0, {10, 281474976710677}, 100001} for WriteId {10, 281474976710677} 2025-04-07T13:58:32.648550Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710678 has WriteId {10, 281474976710677} 2025-04-07T13:58:32.648566Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] immediate transaction 2025-04-07T13:58:32.648719Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-07T13:58:32.648777Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-07T13:58:32.648812Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=1 2025-04-07T13:58:32.648842Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 16536 count 1 nextOffset 2 batches 1, NewHead=Offset 2 PartNo 0 PackedSize 0 count 0 nextOffset 2 batches 0 2025-04-07T13:58:32.648942Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id' seqNo 3 partNo 0 2025-04-07T13:58:32.649837Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 part blob complete sourceId '\0test-message_group_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 16546 count 1 nextOffset 3 batches 1 2025-04-07T13:58:32.649897Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-07T13:58:32.650638Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Add new write blob: topic 'topic_A' partition 0 compactOffset 2,1 HeadOffset 1 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 16536 WTime 1744034312650 2025-04-07T13:58:32.650910Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:58:32.653425Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 16479 2025-04-07T13:58:32.654302Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvTransactionCompleted WriteId {10, 281474976710677} 2025-04-07T13:58:32.654325Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvPQ::TEvDeletePartition to partition {0, {10, 281474976710677}, 100001} 2025-04-07T13:58:32.654374Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {10, 281474976710677}, 100001}, State: StateIdle] Handle TEvPQ::TEvDeletePartition 2025-04-07T13:58:32.654427Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvLongTxService::TEvLockStatus LockId: 281474976710677 LockNode: 10 Status: STATUS_NOT_FOUND 2025-04-07T13:58:32.654448Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] there is already a transaction TxId 281474976710678 for WriteId {10, 281474976710677} 2025-04-07T13:58:32.655757Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvDeletePartitionDone {0, {10, 281474976710677}, 100001} 2025-04-07T13:58:32.655833Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvUnsubscribeLock for WriteId {10, 281474976710677} 2025-04-07T13:58:32.655857Z node 10 :PERSQUEUE WARN: [PQ: 72075186224037894] Unknown transaction 281474976710678 2025-04-07T13:58:32.655899Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-07T13:58:32.657763Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T13:58:32.665335Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|61694e7f-f3ba1959-5278d234-71ad321b_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-07T13:58:32.665370Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|61694e7f-f3ba1959-5278d234-71ad321b_0] PartitionId [0] Generation [1] Write session will now close 2025-04-07T13:58:32.665419Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|61694e7f-f3ba1959-5278d234-71ad321b_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-07T13:58:32.665935Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|61694e7f-f3ba1959-5278d234-71ad321b_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T13:58:32.665975Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|61694e7f-f3ba1959-5278d234-71ad321b_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-07T13:58:32.672359Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|61694e7f-f3ba1959-5278d234-71ad321b_0 grpc read done: success: 0 data: 2025-04-07T13:58:32.672394Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|61694e7f-f3ba1959-5278d234-71ad321b_0 grpc read failed 2025-04-07T13:58:32.672421Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|61694e7f-f3ba1959-5278d234-71ad321b_0 grpc closed 2025-04-07T13:58:32.672439Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|61694e7f-f3ba1959-5278d234-71ad321b_0 is DEAD 2025-04-07T13:58:32.673303Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:58:32.673339Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:58:32.673377Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:58:32.674198Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:58:32.674229Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7490570333821854752:2469] destroyed 2025-04-07T13:58:32.674248Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:58:32.674268Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7490570333821854756:2469] destroyed 2025-04-07T13:58:32.674282Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:58:32.674301Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7490570333821854837:2469] destroyed 2025-04-07T13:58:32.674337Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier >> TxUsage::WriteToTopic_Demo_26 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:34.879545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:34.879674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:34.879715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:34.879770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:34.879831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:34.879871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:34.879937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:34.880289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:34.966386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:34.966456Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:34.998591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:34.998752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:34.998920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:35.019581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:35.019804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:35.020550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:35.020818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:35.023073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:35.024504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:35.024566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:35.024716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:35.024764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:35.024805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:35.024989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.032015Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:35.169821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:35.170077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.170281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:35.170507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:35.170567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.173185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:35.173338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:35.173543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.173606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:35.173682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:35.173718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:35.176762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.176854Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:35.176896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:35.179234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.179292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.179334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:35.179406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:35.183184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:35.187119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:35.187366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:35.188378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:35.188519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:35.188673Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:35.188973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:35.189034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:35.189280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:35.189380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:35.191313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:35.191353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:35.191553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:35.191597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:35.191868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.191919Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:35.192013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:35.192049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:35.192097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:35.192135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:35.192189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:35.192221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:35.192285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:35.192321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:35.192351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:35.194150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:35.194256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:35.194291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:35.194346Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:35.194397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:35.194525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... e, message: Transactions { TxId: 103 AckTo { RawX1: 375 RawX2: 4294969642 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:35.744472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#103:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 200 2025-04-07T13:58:35.744580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 129 2025-04-07T13:58:35.744754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T13:58:35.744800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T13:58:35.752604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:35.752685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:58:35.752895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T13:58:35.753076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:35.753121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-07T13:58:35.753179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 4 2025-04-07T13:58:35.757896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.757994Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-07T13:58:35.759040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:35.759138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:35.759191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:58:35.759229Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-04-07T13:58:35.759267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T13:58:35.760282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:35.760349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:35.760373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:58:35.760400Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-07T13:58:35.760434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T13:58:35.760505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-07T13:58:35.761452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 968 } } 2025-04-07T13:58:35.761491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2025-04-07T13:58:35.761615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 968 } } 2025-04-07T13:58:35.761708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 968 } } 2025-04-07T13:58:35.762379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 539 RawX2: 4294969783 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-07T13:58:35.762425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2025-04-07T13:58:35.762549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 539 RawX2: 4294969783 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-07T13:58:35.762596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T13:58:35.762667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 539 RawX2: 4294969783 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-07T13:58:35.762721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:35.762758Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.762788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-07T13:58:35.762822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-07T13:58:35.766191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:58:35.769836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:58:35.769965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.770105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.770395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.770446Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-07T13:58:35.770548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T13:58:35.770590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:58:35.770644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-07T13:58:35.770712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:403:2372] message: TxId: 103 2025-04-07T13:58:35.770765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T13:58:35.770823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T13:58:35.770876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T13:58:35.770987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T13:58:35.774481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:58:35.774544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:567:2508] TestWaitNotification: OK eventTxId 103 W0000 00:00:1744034315.774973 250558 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 104 2025-04-07T13:58:35.776845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/SubDomain" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:35.777262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.777396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, schema: Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } }, at schemeshard: 72057594046678944 2025-04-07T13:58:35.777808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, at schemeshard: 72057594046678944 2025-04-07T13:58:35.780864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "TTL run interval cannot be less than limit: 1800" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:35.781065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot/SubDomain, subject: , status: StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, operation: CREATE TABLE, path: /MyRoot/SubDomain/Table4 TestModificationResult got TxId: 104, wait until txId: 104 >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:35.333947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:35.334068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:35.334108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:35.334146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:35.334208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:35.334250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:35.334313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:35.334656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:35.421842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:35.421909Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:35.441432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:35.441602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:35.441780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:35.455158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:35.455386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:35.456061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:35.456296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:35.458429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:35.459767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:35.459825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:35.459957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:35.460014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:35.460054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:35.460227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.469895Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:35.617428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:35.617710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.617929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:35.618157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:35.618224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.621129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:35.621278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:35.621474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.621554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:35.621613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:35.621652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:35.623837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.623897Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:35.623933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:35.625725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.625772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.625811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:35.625873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:35.629494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:35.631471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:35.631658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:35.632689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:35.632810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:35.632857Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:35.633099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:35.633155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:35.633350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:35.633442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:35.635500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:35.635545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:35.635730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:35.635767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:35.636009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.636052Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:35.636146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:35.636176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:35.636227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:35.636265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:35.636323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:35.636357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:35.636414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:35.636451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:35.636480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:35.638426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:35.638539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:35.638573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:35.638630Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:35.638693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:35.642696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... meBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:35.877990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:35.878179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:58:35.878397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:35.878443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T13:58:35.878480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T13:58:35.878712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.878761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-04-07T13:58:35.880466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:35.880589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:35.880626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:58:35.880698Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T13:58:35.880740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:35.881620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:35.881711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:35.881736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:58:35.881776Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T13:58:35.881810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:58:35.881888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2025-04-07T13:58:35.882918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1345 } } 2025-04-07T13:58:35.882963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-07T13:58:35.883080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1345 } } 2025-04-07T13:58:35.883180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1345 } } 2025-04-07T13:58:35.884412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T13:58:35.884467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-07T13:58:35.884587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T13:58:35.884627Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T13:58:35.884753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T13:58:35.884836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:35.884875Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.884916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T13:58:35.884953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-07T13:58:35.888231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:58:35.889633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:58:35.889795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.890008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.890340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.890388Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T13:58:35.890494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T13:58:35.890526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:58:35.890573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-07T13:58:35.890653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 101 2025-04-07T13:58:35.890708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:58:35.890744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T13:58:35.890773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T13:58:35.890914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:58:35.892625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T13:58:35.892704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:337:2312] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-07T13:58:35.895654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 Delete { } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:35.895899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.896189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, at schemeshard: 72057594046678944 2025-04-07T13:58:35.898417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Tier 0: only the last tier in TTL settings can have Delete action" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:35.898575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-04-07T13:58:35.901297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 EvictToExternalStorage { StorageName: "/Root/abc" } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:35.901541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.902076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, at schemeshard: 72057594046678944 2025-04-07T13:58:35.904307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Only DELETE via TTL is allowed for row-oriented tables" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:35.904447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 103, wait until txId: 103 |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:36.376292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:36.376381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:36.376413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:36.376446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:36.376510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:36.376572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:36.376631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:36.376955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:36.443899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:36.443945Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:36.455562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:36.455684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:36.455821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:36.460700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:36.460883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:36.461430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:36.461610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:36.463294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:36.464413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:36.464463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:36.464566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:36.464601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:36.464649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:36.464834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.470683Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:36.584549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:36.584802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.585009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:36.585260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:36.585326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.595495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:36.595621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:36.595791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.595847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:36.595901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:36.595935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:36.605726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.605802Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:36.605834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:36.613641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.613702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.613738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:36.613807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:36.617375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:36.621579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:36.621786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:36.622741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:36.622872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:36.622923Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:36.623205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:36.623256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:36.623398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:36.623478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:36.631356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:36.631409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:36.631644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:36.631687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:36.631926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.631970Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:36.632057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:36.632090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:36.632140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:36.632175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:36.632221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:36.632254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:36.632321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:36.632361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:36.632405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:36.634234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:36.634351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:36.634386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:36.634437Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:36.634490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:36.634589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:58:36.644983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:58:36.645512Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1744034316.646519 250874 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2025-04-07T13:58:36.646979Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T13:58:36.665825Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T13:58:36.668121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:36.668474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.668587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } }, at schemeshard: 72057594046678944 2025-04-07T13:58:36.670860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL should be less than 1744034316 seconds (20185 days, 55 years). The ttl behaviour is undefined before 1970., at schemeshard: 72057594046678944 2025-04-07T13:58:36.672332Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:58:36.676727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL should be less than 1744034316 seconds (20185 days, 55 years). The ttl behaviour is undefined before 1970." TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:36.676874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL should be less than 1744034316 seconds (20185 days, 55 years). The ttl behaviour is undefined before 1970., operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-07T13:58:36.681561Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers >> TxUsage::WriteToTopic_Demo_43 [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |88.6%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] >> TxUsage::WriteToTopic_Demo_44 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 7397, MsgBus: 11956 2025-04-07T13:56:13.369166Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569740199562304:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:13.369217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00112d/r3tmp/tmplcqShu/pdisk_1.dat 2025-04-07T13:56:13.858101Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:13.873796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:13.873909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:13.878108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7397, node 1 2025-04-07T13:56:13.979310Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:56:13.979336Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:56:13.979342Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:56:13.979448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11956 TClient is connected to server localhost:11956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:14.789258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:14.826683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:14.980039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:15.188737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:15.301045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.065773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569757379433186:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.084000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.339067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.373251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.421252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.459785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.486753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.571923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.677063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569757379433685:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.677169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.677711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569757379433690:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:17.681795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:17.694408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569757379433692:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:56:18.373999Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569740199562304:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:18.390850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:56:18.943726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:18.985746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.028177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.077958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.125634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.330961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.431108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.476492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.513567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.553158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.587251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.616383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.643485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.277595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:56:20.318855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.383657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.430388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.467057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.492529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T13:56:20.536321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part p ... 72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:58:26.171935Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:58:26.171992Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:58:26.172196Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:58:26.172226Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:58:26.172392Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:58:26.172423Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:58:26.172592Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:58:26.172619Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:58:26.172744Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:58:26.172781Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:58:26.181556Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:58:26.185700Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:58:26.185862Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:58:26.185892Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:58:26.186096Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:58:26.186130Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:58:26.186254Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:58:26.186296Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:58:26.186371Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:58:26.186398Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:58:26.186435Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:58:26.186465Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:58:26.186829Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:58:26.186866Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:58:26.187107Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:58:26.187141Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:58:26.187302Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:58:26.187335Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:58:26.187521Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:58:26.187548Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:58:26.187664Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:58:26.187698Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:58:26.189588Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:58:26.189642Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:58:26.189741Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:58:26.189770Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:58:26.189945Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:58:26.189972Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:58:26.190070Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:58:26.190102Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:58:26.190175Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:58:26.190202Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:58:26.190239Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:58:26.190267Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:58:26.190615Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:58:26.190652Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:58:26.190846Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:58:26.190876Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:58:26.191032Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:58:26.191062Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:58:26.191258Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:58:26.191288Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:58:26.191402Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:58:26.191430Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |88.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |88.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:38.873823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:38.873939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:38.873978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:38.874011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:38.874071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:38.874096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:38.874165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:38.874459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:38.975959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:38.976022Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:39.006462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:39.006557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:39.006710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:39.037587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:39.037772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:39.038323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:39.038512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:39.045425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:39.046997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:39.047055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:39.047167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:39.047213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:39.047258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:39.047424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.068548Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:39.197573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:39.197812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.198013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:39.198219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:39.198282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.200608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:39.200744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:39.200923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.200978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:39.201022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:39.201067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:39.202987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.203055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:39.203089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:39.204796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.204837Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.204870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:39.204927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:39.213154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:39.215304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:39.215497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:39.216429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:39.216562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:39.216602Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:39.216837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:39.216890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:39.217037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:39.217119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:39.219283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:39.219324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:39.219532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:39.219593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:39.219829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.219873Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:39.219962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:39.219998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:39.220042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:39.220077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:39.220127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:39.220156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:39.220213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:39.220246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:39.220272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:39.222085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:39.222184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:39.222222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:39.222275Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:39.222324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:39.222425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... HARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:39.441809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-04-07T13:58:39.446836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:39.446900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:39.447040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:58:39.447179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:39.447229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T13:58:39.447286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T13:58:39.447492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.447536Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-04-07T13:58:39.449261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:39.449370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:39.449398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:58:39.449425Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T13:58:39.449449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:39.449988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:39.450041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:58:39.450055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:58:39.450079Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T13:58:39.450099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:58:39.450140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2025-04-07T13:58:39.450899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1163 } } 2025-04-07T13:58:39.450934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-07T13:58:39.451051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1163 } } 2025-04-07T13:58:39.451176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1163 } } 2025-04-07T13:58:39.451902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T13:58:39.451945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-07T13:58:39.452055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T13:58:39.452101Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T13:58:39.452156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T13:58:39.452203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:39.452223Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.452248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T13:58:39.452273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-07T13:58:39.458168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:58:39.465492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:58:39.465657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.465742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.465974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.466011Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T13:58:39.466083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T13:58:39.466110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:58:39.466140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-07T13:58:39.466195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 101 2025-04-07T13:58:39.466229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:58:39.466257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T13:58:39.466288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T13:58:39.466410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:58:39.470060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T13:58:39.470141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:337:2312] TestWaitNotification: OK eventTxId 101 2025-04-07T13:58:39.470674Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:39.470865Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 205us result status StatusSuccess 2025-04-07T13:58:39.471257Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType >> KqpScan::UnionThree [GOOD] >> KqpScan::UnionSameTable >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool [GOOD] >> KqpWorkloadService::TestStartQueryAfterCancel >> LocalPartition::DescribeHang [GOOD] >> LocalPartition::DiscoveryHang >> KqpScan::JoinSimple [GOOD] >> KqpScan::Join4 >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] >> TSchemeShardTTLTestsWithReboots::MoveTable |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpWorkloadServiceActors::TestCreateDefaultPool [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:39.684510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:39.684636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:39.684699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:39.684733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:39.684778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:39.684803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:39.684856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:39.685131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:39.767345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:39.767407Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:39.778255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:39.778374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:39.778531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:39.783748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:39.783932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:39.784479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:39.784655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:39.786642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:39.787814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:39.787866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:39.788014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:39.788062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:39.788133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:39.788328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.801638Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:39.911306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:39.911545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.911742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:39.911949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:39.912009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.914365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:39.914486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:39.914647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.914701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:39.914761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:39.914813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:39.917090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.917150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:39.917180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:39.919092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.919145Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.919179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:39.919233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:39.927507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:39.930799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:39.931006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:39.931969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:39.932102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:39.932144Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:39.932396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:39.932451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:39.932602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:39.932715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:39.934856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:39.934902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:39.935124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:39.935174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:39.935394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.935439Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:39.935531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:39.935562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:39.935603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:39.935643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:39.935696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:39.935738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:39.935806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:39.935845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:39.935883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:39.937729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:39.937851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:39.937890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:39.937955Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:39.938006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:39.938142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... cLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:40.184747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T13:58:40.184870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-07T13:58:40.185456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:40.185575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:40.185621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-07T13:58:40.185858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-07T13:58:40.185971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-07T13:58:40.191113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:40.191176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:58:40.191389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:40.191425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T13:58:40.191724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:58:40.191786Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-07T13:58:40.192449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:58:40.192566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:58:40.192600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:58:40.192630Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-07T13:58:40.192682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:58:40.192754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-07T13:58:40.194016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1077 } } 2025-04-07T13:58:40.194066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T13:58:40.194231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1077 } } 2025-04-07T13:58:40.194306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1077 } } 2025-04-07T13:58:40.195392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T13:58:40.195428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T13:58:40.195528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T13:58:40.195568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T13:58:40.195656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T13:58:40.195713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:40.195747Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:58:40.195781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T13:58:40.195817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T13:58:40.198479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:58:40.198579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:58:40.198695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:58:40.198874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:58:40.198908Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T13:58:40.199007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T13:58:40.199048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:58:40.199089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T13:58:40.199162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 102 2025-04-07T13:58:40.199216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:58:40.199247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T13:58:40.199275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T13:58:40.199369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:58:40.200684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:58:40.200777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:382:2356] TestWaitNotification: OK eventTxId 102 2025-04-07T13:58:40.201255Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:40.201482Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 259us result status StatusSuccess 2025-04-07T13:58:40.201867Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:39.785048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:39.785135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:39.785175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:39.785209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:39.785277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:39.785307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:39.785364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:39.785670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:39.844693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:39.844772Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:39.859773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:39.859929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:39.860113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:39.865742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:39.865937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:39.866543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:39.866726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:39.868947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:39.870208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:39.870266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:39.870380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:39.870426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:39.870468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:39.870629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:39.878502Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:40.008232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:40.008592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:40.008843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:40.009120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:40.009188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:40.011771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:40.011920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:40.012123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:40.012182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:40.012244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:40.012282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:40.014306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:40.014373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:40.014419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:40.016485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:40.016534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:40.016576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:40.016657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:40.020380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:40.028969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:40.029249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:40.030024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:40.030137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:40.030187Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:40.030374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:40.030415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:40.030545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:40.030609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:40.032681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:40.032742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:40.032966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:40.033014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:40.033311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:40.033371Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:40.033479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:40.033515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:40.033568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:40.033617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:40.033681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:40.033720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:40.033797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:40.033834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:40.033869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:40.035904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:40.036023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:40.036058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:40.036110Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:40.036161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:40.036308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:58:40.041076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:58:40.041617Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1744034320.042633 251407 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2025-04-07T13:58:40.043146Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T13:58:40.059997Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T13:58:40.061662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:40.061957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:40.062256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2025-04-07T13:58:40.063421Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:58:40.064958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:40.065112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-04-07T13:58:40.066291Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 W0000 00:00:1744034320.066623 251407 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2025-04-07T13:58:40.068178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:40.068392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:58:40.068568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, at schemeshard: 72057594046678944 2025-04-07T13:58:40.071670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Type \'DyNumber\' specified for column \'modified_at\' is not supported" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:40.071780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart >> KqpJoinOrder::TPCDS95-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS96-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:40.920687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:40.920813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:40.920851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:40.920885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:40.920930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:40.920970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:40.921047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:40.921359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:41.012492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:41.012551Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:41.023961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:41.024075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:41.024228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:41.028436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:41.028576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:41.029066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:41.029256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:41.030944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:41.032200Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:41.032254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:41.032373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:41.032416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:41.032450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:41.032639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.038690Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:41.156224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:41.156501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.157097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:41.157356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:41.157422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.160334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:41.160479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:41.160686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.160747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:41.160798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:41.160833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:41.163136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.163192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:41.163239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:41.167405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.167460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.167494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:41.167570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:41.171171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:41.173637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:41.173831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:41.174762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:41.174895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:41.174943Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:41.175178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:41.175223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:41.175406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:41.175501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:41.177787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:41.177830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:41.178054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:41.178104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:41.178361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.178407Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:41.178499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:41.178532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:41.178586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:41.178626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:41.178675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:41.178711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:41.178780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:41.178813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:41.178858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:41.180583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:41.180777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:41.180815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:41.180863Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:41.180917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:41.181018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:58:41.184129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:58:41.184581Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T13:58:41.185890Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T13:58:41.201068Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T13:58:41.203317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:41.203676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.203788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } }, at schemeshard: 72057594046678944 2025-04-07T13:58:41.204202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2025-04-07T13:58:41.206189Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:58:41.208371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:41.208546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-07T13:58:41.209697Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TxUsage::WriteToTopic_Demo_47 [GOOD] >> KqpJoinOrder::TPCDS87+StreamLookupJoin-ColumnStore [GOOD] >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex >> TSchemeShardTTLTestsWithReboots::CopyTable |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:35.278982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:35.279079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:35.279113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:35.279186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:35.279245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:35.279272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:35.279322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:35.279645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:35.349591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:35.349644Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:35.366417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:35.366547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:35.366710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:35.372339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:35.372538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:35.373189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:35.373406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:35.377879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:35.379160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:35.379220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:35.379334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:35.379376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:35.379433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:35.379601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.386325Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:35.503650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:35.503866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.504050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:35.504245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:35.504303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.506783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:35.506911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:35.507065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.507118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:35.507163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:35.507193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:35.508970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.509018Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:35.509053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:35.510562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.510599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.510633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:35.510691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:35.522353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:35.527020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:35.527206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:35.528060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:35.528183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:35.528228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:35.528469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:35.528507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:35.528676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:35.528771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:35.534946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:35.534997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:35.535175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:35.535223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:35.535417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:35.535455Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:35.535533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:35.535564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:35.535607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:35.535640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:35.535678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:35.535706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:35.535770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:35.535801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:35.535827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:35.537519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:35.537610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:35.537642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:35.537688Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:35.537736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:35.537890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... te, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.945531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.945618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.945730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.946565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.946669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.946738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.946780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.946853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.946903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.946943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.946986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.950284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.950432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.950515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.950642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.950776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.950824Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T13:58:41.950934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T13:58:41.950972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:58:41.951011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-07T13:58:41.951096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2613:3880] message: TxId: 101 2025-04-07T13:58:41.951154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:58:41.951216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T13:58:41.951286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T13:58:41.952275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-04-07T13:58:41.953317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.957539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T13:58:41.957596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:2614:3881] TestWaitNotification: OK eventTxId 101 2025-04-07T13:58:41.958180Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:41.958500Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 351us result status StatusSuccess 2025-04-07T13:58:41.959134Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: false Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "str" Type: "String" TypeId: 4097 NotNull: false Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "key" NextColumnId: 4 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" ColumnCodec: ColumnCodecPlain } NextColumnFamilyId: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "key" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1744034321.959755 250602 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TAlterColumnTable: 6:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2025-04-07T13:58:41.962343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterTtlSettings { Enabled { ColumnName: "str" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:41.962549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:58:41.963062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2025-04-07T13:58:41.969716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:41.969907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 >> TxUsage::WriteToTopic_Demo_48 |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] >> ResourcePoolClassifiersDdl::TestExplicitPoolId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS87+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 61329, MsgBus: 29244 2025-04-07T13:55:51.143123Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569642785088693:2254];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00115f/r3tmp/tmp3Vjbfz/pdisk_1.dat 2025-04-07T13:55:51.466928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:55:51.657634Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:51.678484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:51.678573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:51.687665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61329, node 1 2025-04-07T13:55:51.905366Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:51.905396Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:51.905404Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:51.905507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29244 TClient is connected to server localhost:29244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:52.723781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:52.764828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:53.031024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:53.275598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:53.428728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:55.646296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569659964959368:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:55.646380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:55.986425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:55:56.029528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:55:56.076759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:55:56.131603Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569642785088693:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:56.131658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:55:56.174078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:55:56.262381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:55:56.324134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:55:56.445756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569664259927172:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:56.445823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:56.446018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569664259927177:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:56.450071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:55:56.463903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569664259927179:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:55:59.949368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:00.041514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:00.106103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:00.194487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:56:00.260625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:56:00.800476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:56:01.024166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:56:01.090279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:56:01.203543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:56:01.243478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:56:01.312945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:56:01.424402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:56:01.602190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:56:02.328945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:56:02.358580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:56:02.392346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:56:02.447590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:56:02.489233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:56:02.563808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T13:56:02.613406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part ... 13:58:21.539280Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-07T13:58:21.631973Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-07T13:58:21.692495Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-07T13:58:21.745097Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-07T13:58:21.795033Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-07T13:58:21.854859Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-07T13:58:23.110825Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2025-04-07T13:58:23.201385Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-07T13:58:23.255024Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T13:58:23.329326Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-07T13:58:23.386155Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-07T13:58:23.457328Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-07T13:58:23.528316Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-07T13:58:23.625992Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-07T13:58:23.710999Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-07T13:58:23.805439Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-07T13:58:23.902612Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.033954Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.115521Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.207705Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.291825Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.359337Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.423390Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.513188Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.567209Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.657811Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.736195Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.801225Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.868966Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2025-04-07T13:58:24.930676Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2025-04-07T13:58:25.254748Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2025-04-07T13:58:25.311222Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2025-04-07T13:58:25.364162Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-04-07T13:58:25.425665Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-07T13:58:25.486264Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-07T13:58:25.544759Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-04-07T13:58:25.612007Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2025-04-07T13:58:25.690032Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2025-04-07T13:58:25.748047Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2025-04-07T13:58:25.931519Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-04-07T13:58:26.002953Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-04-07T13:58:26.067841Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-04-07T13:58:26.166582Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 2025-04-07T13:58:27.087594Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:58:27.087622Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:40.935782Z node 5 :KQP_SLOW_LOG WARN: TraceId: "01jr88q6a9fzx553r9wgww8v2c", SessionId: ydb://session/3?node_id=5&id=Mjk0MGQzOTUtNjg4NDZmOWQtY2Q3YzkyNjAtNjVjNGQ4OTg=, Slow query, duration: 14.557587s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n$bla1 = (select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from store_sales as store_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where store_sales.ss_sold_date_sk = date_dim.d_date_sk\n and store_sales.ss_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11);\n\n$bla2 = ((select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from catalog_sales as catalog_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk\n and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11)\n union all\n (select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from web_sales as web_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where web_sales.ws_sold_date_sk = date_dim.d_date_sk\n and web_sales.ws_bill_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11));\n\n-- start query 1 in stream 0 using template query87.tpl and seed 1819994127\nselect count(*)\nfrom $bla1 bla1 left only join $bla2 bla2 using (c_last_name, c_first_name, d_date)\n;\n\n-- end query 1 in stream 0 using template query87.tpl", parameters: 0b >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental >> BasicUsage::SessionNotDestroyedWhileUserEventHandlingInFlight [GOOD] >> BasicUsage::ReadSessionCorrectClose >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:44.444463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:44.444552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:44.444593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:44.444627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:44.444708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:44.444737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:44.444792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:44.445120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:44.527673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:44.527726Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:44.546233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:44.546357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:44.546525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:44.571801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:44.572011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:44.572632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:44.572876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:44.578531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:44.579892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:44.579954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:44.580089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:44.580134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:44.580195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:44.580360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.594719Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:44.727529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:44.727783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.728011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:44.728235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:44.728299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.730601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:44.730734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:44.730944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.731003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:44.731063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:44.731093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:44.732757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.732810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:44.732839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:44.734376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.734420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.734452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:44.734511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:44.739191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:44.741044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:44.741241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:44.742256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:44.742359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:44.742402Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:44.742630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:44.742682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:44.742839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:44.742918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:44.744583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:44.744620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:44.744824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:44.744865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:44.745108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.745146Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:44.745243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:44.745272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:44.745322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:44.745370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:44.745420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:44.745450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:44.745501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:44.745540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:44.745567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:44.753156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:44.753321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:44.753366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:44.753413Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:44.753475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:44.753603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T13:58:45.249746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2025-04-07T13:58:45.250926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:45.251053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:45.251143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-07T13:58:45.251394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-07T13:58:45.251523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-07T13:58:45.261959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:45.262037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:58:45.262353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:45.262402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T13:58:45.262761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:58:45.262813Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-07T13:58:45.263647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:58:45.263743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T13:58:45.263785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T13:58:45.263818Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-07T13:58:45.263876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T13:58:45.263963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T13:58:45.267998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T13:58:45.290018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1213 } } 2025-04-07T13:58:45.290081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-07T13:58:45.290222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1213 } } 2025-04-07T13:58:45.290340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1213 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-07T13:58:45.291409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T13:58:45.291454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-07T13:58:45.291572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T13:58:45.291631Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T13:58:45.291722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T13:58:45.291780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:45.291811Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:58:45.291856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T13:58:45.291896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T13:58:45.301580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:58:45.301826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:58:45.302151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T13:58:45.302214Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T13:58:45.302316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T13:58:45.302365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:58:45.302408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T13:58:45.302504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:378:2343] message: TxId: 102 2025-04-07T13:58:45.302562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T13:58:45.302604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T13:58:45.302635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T13:58:45.302755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:58:45.309539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T13:58:45.309603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:494:2422] TestWaitNotification: OK eventTxId 102 2025-04-07T13:58:45.310190Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:58:45.310507Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 310us result status StatusSuccess 2025-04-07T13:58:45.310965Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPQTabletTests::DropTablet_And_Tx >> KqpScan::UnionSameTable [GOOD] >> TPQTabletTests::DropTablet_And_Tx [GOOD] >> TPQTabletTests::DropTablet |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |88.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data >> TPQTabletTests::UpdateConfig_2 >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] >> TPQTabletTests::DropTablet [GOOD] >> TPQTabletTests::DropTablet_Before_Write >> TPartitionTests::CorrectRange_Multiple_Transactions >> KqpJoinOrder::TPCH5+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH5-StreamLookupJoin+ColumnStore >> TPQTabletTests::DropTablet_Before_Write [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::UnionSameTable [GOOD] Test command err: Trying to start YDB, gRPC: 32405, MsgBus: 27590 2025-04-07T13:58:31.455621Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570330769153395:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:31.455667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001675/r3tmp/tmpLwOfhP/pdisk_1.dat 2025-04-07T13:58:32.190837Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:32.198333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:32.198429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:32.205004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32405, node 1 2025-04-07T13:58:32.557572Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:58:32.557592Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:58:32.557599Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:58:32.557702Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27590 TClient is connected to server localhost:27590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:58:33.291965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:58:33.332651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:58:33.541420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:33.723908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:33.794114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:35.822385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570347949024294:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:35.822514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:36.261061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:58:36.326600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:58:36.405267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:58:36.460347Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570330769153395:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:36.460539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:58:36.469180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:58:36.504555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:58:36.549454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:58:36.624891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570352243992093:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:36.624993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:36.625344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570352243992098:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:36.631223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:58:36.660884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570352243992100:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:58:39.651618Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034319601, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 24656, MsgBus: 11629 2025-04-07T13:58:40.563065Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490570368589566145:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:40.563152Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001675/r3tmp/tmp1hHiXF/pdisk_1.dat 2025-04-07T13:58:40.679372Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:40.697698Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:40.697793Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:40.702343Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24656, node 2 2025-04-07T13:58:40.809156Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:58:40.809177Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:58:40.809185Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:58:40.809288Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11629 TClient is connected to server localhost:11629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:58:41.370859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:41.377718Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:58:41.383287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:41.483550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:41.679781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:41.813339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:44.422357Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570385769437015:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:44.422440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:44.466734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:58:44.550630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:58:44.614286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:58:44.675967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:58:44.732288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:58:44.827719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:58:44.957016Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570385769437527:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:44.957109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:44.957320Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570385769437532:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:44.961943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:58:44.979402Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490570385769437534:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:58:45.576913Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490570368589566145:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:45.576992Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:58:46.796235Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034326818, txId: 281474976715671] shutting down >> TPQTabletTests::UpdateConfig_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:47.101882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:47.102003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:47.102043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:47.102080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:47.102130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:47.102203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:47.102289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:47.102631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:47.233833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:47.233884Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:47.254590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:47.254727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:47.254910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:47.265076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:47.265288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:47.265926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:47.266147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:47.268181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:47.269499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:47.269555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:47.269682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:47.269726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:47.269765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:47.269967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.279170Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:47.558394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:47.558714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.565869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:47.566173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:47.566271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.568641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:47.568806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:47.569036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.569105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:47.569167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:47.569206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:47.576555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.576628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:47.576687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:47.580333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.580392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.580430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:47.580504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:47.584282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:47.590786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:47.591071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:47.592222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:47.592377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:47.592452Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:47.592796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:47.592872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:47.593096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:47.593239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:47.604886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:47.604955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:47.605235Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:47.605289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:47.605611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.605668Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:47.605773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:47.605809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:47.605863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:47.605911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:47.605973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:47.606012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:47.606119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:47.606164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:47.606196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:47.608263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:47.608373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:47.608412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:47.608469Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:47.608539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:47.612860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:58:47.616546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:58:47.617124Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T13:58:47.618405Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T13:58:47.647729Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T13:58:47.649983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:47.650420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.650523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } }, at schemeshard: 72057594046678944 2025-04-07T13:58:47.650948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', at schemeshard: 72057594046678944 2025-04-07T13:58:47.652851Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:58:47.658103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Cannot enable TTL on unknown column: \'created_at\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:47.658305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-07T13:58:47.662165Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |88.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity >> TPQTest::DirectReadBadSessionOrPipe >> TPQTabletTests::Cancel_Tx >> KqpJoinOrder::TPCH8+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH8-StreamLookupJoin+ColumnStore >> TPartitionTests::CorrectRange_Multiple_Transactions [GOOD] >> KqpScan::Join4 [GOOD] >> KqpScan::JoinLeftOnly >> TPQTabletTests::Cancel_Tx [GOOD] >> TPQTabletTests::Config_TEvTxCommit_After_Restart >> TPartitionTests::CorrectRange_Rollback >> KqpJoinOrder::FiveWayJoinStatsOverride+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin-ColumnStore >> TxUsage::WriteToTopic_Demo_18_RestartNo [GOOD] >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] >> KqpJoinOrder::TPCDS88+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS88-StreamLookupJoin+ColumnStore >> TPartitionTests::CorrectRange_Rollback [GOOD] >> TPartitionTests::DataTxCalcPredicateOk >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] Test command err: 2025-04-07T13:58:47.547307Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:58:47.563104Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:58:47.563440Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-07T13:58:47.563501Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:58:47.563540Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-07T13:58:47.563580Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:58:47.563624Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:47.563702Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2025-04-07T13:58:47.563737Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T13:58:47.584693Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:47.584800Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:174:2189], now have 1 active actors on pipe 2025-04-07T13:58:47.584885Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T13:58:47.599417Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:47.603893Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-07T13:58:47.604055Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:47.605752Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:47.605894Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:47.605978Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:58:47.606460Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:58:47.606983Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:183:2196] 2025-04-07T13:58:47.607827Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-07T13:58:47.607904Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:183:2196] 2025-04-07T13:58:47.607953Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:58:47.608554Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-07T13:58:47.608615Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-07T13:58:47.612675Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:47.612923Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:58:47.613307Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:58:47.613633Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-07T13:58:47.614352Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-04-07T13:58:47.614398Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:185:2198] 2025-04-07T13:58:47.614438Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:58:47.614831Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-04-07T13:58:47.614869Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-04-07T13:58:47.614988Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:47.615104Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T13:58:47.615728Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:58:47.615894Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:47.619980Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:58:47.620170Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:58:47.620504Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:47.620556Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2207], now have 1 active actors on pipe 2025-04-07T13:58:47.622308Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:47.622370Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:203:2211], now have 1 active actors on pipe 2025-04-07T13:58:47.623191Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 4294969484 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Immediate: false } 2025-04-07T13:58:47.623248Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-07T13:58:47.623321Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-07T13:58:47.623367Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-07T13:58:47.623416Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-07T13:58:47.623591Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 231 MaxStep: 30231 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 4294969484 } Partitions { } 2025-04-07T13:58:47.623682Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-07T13:58:47.623748Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvDropTablet Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:47.629525Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T13:58:47.629590Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-07T13:58:47.629625Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-07T13:58:47.630358Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 4294969484 } TxId: 67891 Data { Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Immediate: true } 2025-04-07T13:58:47.630408Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 invalid PQ tablet state (EDropped) 2025-04-07T13:58:47.630473Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2025-04-07T13:58:47.636569Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 173 RawX2: 4294969484 } } Step: 100 2025-04-07T13:58:47.636739 ... alizing step TInitMetaStep 2025-04-07T13:58:50.340714Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-04-07T13:58:50.341376Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-04-07T13:58:50.341616Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-04-07T13:58:50.341655Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-07T13:58:50.341693Z node 5 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T13:58:50.341729Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-07T13:58:50.341774Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [5:308:2294] 2025-04-07T13:58:50.341852Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:58:50.341947Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 5 2025-04-07T13:58:50.342077Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNED 2025-04-07T13:58:50.342109Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-04-07T13:58:50.342141Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-04-07T13:58:50.342300Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T13:58:50.342611Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 100, TxId 67890 2025-04-07T13:58:50.342731Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:58:50.342884Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvProposePartitionConfigResult Step 100, TxId 67890, Partition 0 2025-04-07T13:58:50.342920Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvProposePartitionConfigResult 2025-04-07T13:58:50.342962Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2025-04-07T13:58:50.342995Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-04-07T13:58:50.343030Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-07T13:58:50.343067Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-04-07T13:58:50.343368Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 135 MaxStep: 18446744073709551615 PredicatesReceived { TabletId: 22222 } Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 1 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ChildPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 175 RawX2: 21474838670 } Partitions { Partition { PartitionId: 0 } } 2025-04-07T13:58:50.343505Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:50.353479Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T13:58:50.353562Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-04-07T13:58:50.353600Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-07T13:58:50.353685Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-07T13:58:50.353731Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T13:58:50.358513Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:50.358579Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:334:2313], now have 1 active actors on pipe 2025-04-07T13:58:50.358619Z node 5 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 2025-04-07T13:58:50.358763Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-04-07T13:58:50.358805Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2025-04-07T13:58:50.358846Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2025-04-07T13:58:50.358897Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2025-04-07T13:58:50.358940Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-04-07T13:58:50.359047Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-04-07T13:58:50.359081Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2025-04-07T13:58:50.359147Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-04-07T13:58:50.359314Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-1 reinit with generation 2 done 2025-04-07T13:58:50.359350Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2025-04-07T13:58:50.359378Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user drop done 2025-04-07T13:58:50.359652Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:50.363096Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:58:50.363316Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-04-07T13:58:50.363363Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-07T13:58:50.363412Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-07T13:58:50.363451Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-07T13:58:50.363678Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 1 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ChildPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } 2025-04-07T13:58:50.363740Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:50.363808Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-04-07T13:58:50.363843Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-04-07T13:58:50.364058Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 135 MaxStep: 18446744073709551615 PredicatesReceived { TabletId: 22222 Predicate: true } Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 1 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ChildPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 175 RawX2: 21474838670 } Partitions { Partition { PartitionId: 0 } } 2025-04-07T13:58:50.364285Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:50.371315Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T13:58:50.371401Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-07T13:58:50.371442Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-07T13:58:50.371513Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-04-07T13:58:50.371577Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-07T13:58:50.371625Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/0 2025-04-07T13:58:50.371653Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-07T13:58:50.371685Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/0 2025-04-07T13:58:50.371716Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2025-04-07T13:58:50.371756Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState DELETING 2025-04-07T13:58:50.371803Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete key for TxId 67890 2025-04-07T13:58:50.371874Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:50.380736Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T13:58:50.380796Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State DELETING 2025-04-07T13:58:50.380824Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2025-04-07T13:58:50.380862Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete TxId 67890 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit [GOOD] |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |88.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service >> KqpJoinOrder::TPCDS90-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS92+StreamLookupJoin-ColumnStore >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier >> TPQTest::DirectReadBadSessionOrPipe [GOOD] >> TPQTest::DirectReadOldPipe >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin+ColumnStore [GOOD] >> TCdcStreamTests::MeteringDedicated [GOOD] >> TCdcStreamTests::ChangeOwner >> KqpJoinOrder::CanonizedJoinOrderTPCH9+StreamLookupJoin-ColumnStore >> DataShardVolatile::DistributedWriteWithAsyncIndex [GOOD] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit >> Secret::Validation [GOOD] >> TxUsage::WriteToTopic_Demo_26 [GOOD] >> TPQTabletTests::Multiple_PQTablets >> TCdcStreamTests::ChangeOwner [GOOD] >> TCdcStreamTests::DropIndexWithStream >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless [GOOD] >> ResourcePoolsDdl::TestAlterResourcePool >> TPQTabletTests::Multiple_PQTablets [GOOD] >> TPQTabletTests::PQTablet_Send_RS_With_Abort >> TPQTest::DirectReadOldPipe [GOOD] >> TPQTest::TestAccountReadQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Validation [GOOD] Test command err: 2025-04-07T13:55:49.968156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:55:49.968489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:55:49.968771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0021c6/r3tmp/tmp6akqVi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17875, node 1 TClient is connected to server localhost:21485 2025-04-07T13:55:50.573145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:55:50.618755Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:50.627906Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:50.627978Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:50.628016Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:50.628211Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:55:50.677182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:50.677324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:50.693993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-07T13:56:03.815412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:720:2600], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:03.815566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: Unexpected token '-' : syntax error... ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-04-07T13:56:14.221805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:749:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:14.222091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:14.228016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-07T13:56:14.402975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:858:2694], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:14.403106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:14.403357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:863:2699], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:14.407471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-07T13:56:14.590356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:865:2701], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T13:56:15.603262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:16.139145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-07T13:56:16.922424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-07T13:56:17.728041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:18.271529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T13:56:19.650525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T13:56:20.037329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: Execution, code: 1060
:1:48: Error: Executing ALTER OBJECT SECRET
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 2025-04-07T13:56:23.489747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:56:23.489799Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-07T13:56:59.256961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-04-07T13:57:00.197926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715723:0, at schemeshard: 72057594046644480 2025-04-07T13:57:02.140752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715732:0, at schemeshard: 72057594046644480 2025-04-07T13:57:02.622262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715735:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Execution, code: 1060
:1:42: Error: Executing CREATE OBJECT SECRET_ACCESS
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-07T13:57:30.668211Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3687:4845], TxId: 281474976715766, task: 1. Ctx: { TraceId : 01jr88nf9ra5jb9ayqcqsfbgz4. SessionId : ydb://session/3?node_id=1&id=MTNiYWZjNmUtNmY3OTczYWYtMTJkY2FhYTYtOWExM2UwYmU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T13:57:30.718884Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3688:4846], TxId: 281474976715766, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MTNiYWZjNmUtNmY3OTczYWYtMTJkY2FhYTYtOWExM2UwYmU=. CustomerSuppliedId : . TraceId : 01jr88nf9ra5jb9ayqcqsfbgz4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:3684:4773], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T13:57:30.720918Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTNiYWZjNmUtNmY3OTczYWYtMTJkY2FhYTYtOWExM2UwYmU=, ActorId: [1:3587:4773], ActorState: ExecuteState, TraceId: 01jr88nf9ra5jb9ayqcqsfbgz4, Create QueryResponse for error on request, msg: 2025-04-07T13:57:30.777212Z node 1 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jr88new570mj3prxpzaq3r4q" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=MTNiYWZjNmUtNmY3OTczYWYtMTJkY2FhYTYtOWExM2UwYmU=" tx_control { tx_id: "01jr88new570mj3prxpzaq3r4q" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Execution, code: 1060
:1:29: Error: Executing DROP OBJECT SECRET
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-04-07T13:57:56.162110Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4214:5237], for# root@builtin, access# DescribeSchema 2025-04-07T13:57:56.162213Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4214:5237], for# root@builtin, access# DescribeSchema 2025-04-07T13:57:56.164185Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:4211:5234], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:57:56.166184Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTQwNWJlMTQtOTE2ZDY3ODAtZTRhYjc0MzQtZGIzM2QwNjA=, ActorId: [1:4207:5231], ActorState: ExecuteState, TraceId: 01jr88p8sn208m22ww3ndh2k6q, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-07T13:58:07.914296Z node 1 :TICKET_PARSER ERROR: Ticket **** (51449FAE): Could not find correct token validator REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing UPSERT OBJECT SECRET
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-07T13:58:50.319510Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715852. Ctx: { TraceId: 01jr88qx680zqw7pwpjsjwd31y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI2YmRhZWYtZDI3ZjE2ZTItYTBiYWExNmItYmFjOTBkODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TPQTabletTests::PQTablet_Send_RS_With_Abort [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart [GOOD] >> KqpWorkloadServiceTables::TestLeaseExpiration >> TPQTabletTests::One_Tablet_For_All_Partitions >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [GOOD] >> LocalPartition::DiscoveryHang [GOOD] >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission >> TPartitionTests::DataTxCalcPredicateOk [GOOD] >> TPartitionTests::DataTxCalcPredicateError >> TPQTabletTests::One_Tablet_For_All_Partitions [GOOD] >> TCdcStreamTests::DropIndexWithStream [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream >> TPQTabletTests::One_New_Partition_In_Another_Tablet >> TxUsage::WriteToTopic_Demo_27 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [GOOD] Test command err: 2025-04-07T13:58:50.473347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:58:50.473691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:58:50.473960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00214c/r3tmp/tmpUma2MY/pdisk_1.dat 2025-04-07T13:58:51.069908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-07T13:58:51.070176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:58:51.070431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T13:58:51.070677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:58:51.070780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:58:51.077384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T13:58:51.077568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:58:51.077770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:58:51.077825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:58:51.077853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:51.077881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:51.078389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:58:51.078437Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:58:51.078470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:51.078886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:58:51.078924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:58:51.078965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-07T13:58:51.079021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:51.087285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:51.089925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:51.090141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-07T13:58:51.091232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-07T13:58:51.091287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-07T13:58:51.091322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-07T13:58:51.150019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-07T13:58:51.150099Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:51.212961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:51.213114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:51.226134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:58:51.355675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:58:51.355881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T13:58:51.355947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-07T13:58:51.356209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:51.356278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-07T13:58:51.356460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T13:58:51.356537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:58:51.357810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:58:51.357861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:58:51.358039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:58:51.358100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-04-07T13:58:51.358708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:58:51.358765Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-04-07T13:58:51.358868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:51.358905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:51.358947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:51.359004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:51.359054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:51.359098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:51.359169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T13:58:51.359206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-07T13:58:51.359248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-07T13:58:51.361701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-07T13:58:51.361793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-07T13:58:51.361826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-04-07T13:58:51.361867Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:58:51.361903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T13:58:51.361985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-07T13:58:51.362039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2025-04-07T13:58:51.362449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-04-07T13:58:51.374016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T13:58:51.374374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:58:51.374475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, schema: Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1, at schemeshard: 72057594046644480 2025-04-07T13:58:51.374877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T13:58:51.374950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-07T13:58:51.375012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-07T13:58:51.375129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new ... ncy: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1020 } } 2025-04-07T13:58:54.533764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 632 RawX2: 4294969833 } Origin: 72075186224037888 State: 2 TxId: 281474976715667 Step: 0 Generation: 1 2025-04-07T13:58:54.533798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715667, tablet: 72075186224037888, partId: 2 2025-04-07T13:58:54.533886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715667:2, at schemeshard: 72057594046644480, message: Source { RawX1: 632 RawX2: 4294969833 } Origin: 72075186224037888 State: 2 TxId: 281474976715667 Step: 0 Generation: 1 2025-04-07T13:58:54.533932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715667:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T13:58:54.534007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715667:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 632 RawX2: 4294969833 } Origin: 72075186224037888 State: 2 TxId: 281474976715667 Step: 0 Generation: 1 2025-04-07T13:58:54.534057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715667:2, shardIdx: 72057594046644480:1, datashard: 72075186224037888, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-07T13:58:54.534087Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715667:2, at schemeshard: 72057594046644480 2025-04-07T13:58:54.534118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715667:2, datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-04-07T13:58:54.534156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715667:2 129 -> 240 2025-04-07T13:58:54.534522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715667:2, at schemeshard: 72057594046644480 2025-04-07T13:58:54.534776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715667:2, at schemeshard: 72057594046644480 2025-04-07T13:58:54.534885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715667:2, at schemeshard: 72057594046644480 2025-04-07T13:58:54.534930Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715667:2 ProgressState 2025-04-07T13:58:54.534999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715667:2 progress is 5/5 2025-04-07T13:58:54.535023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715667 ready parts: 5/5 2025-04-07T13:58:54.535058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715667, ready parts: 5/5, is published: true 2025-04-07T13:58:54.535111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1190:2895] message: TxId: 281474976715667 2025-04-07T13:58:54.535158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715667 ready parts: 5/5 2025-04-07T13:58:54.535220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:0 2025-04-07T13:58:54.535261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:0 2025-04-07T13:58:54.535327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 14] was 2 2025-04-07T13:58:54.535368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:1 2025-04-07T13:58:54.535387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:1 2025-04-07T13:58:54.535414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-04-07T13:58:54.535430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:2 2025-04-07T13:58:54.535445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:2 2025-04-07T13:58:54.535505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-07T13:58:54.535562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:3 2025-04-07T13:58:54.535584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:3 2025-04-07T13:58:54.535637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 3 2025-04-07T13:58:54.535660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:4 2025-04-07T13:58:54.535675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:4 2025-04-07T13:58:54.535716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2025-04-07T13:58:54.536099Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715667 datashard 72075186224037888 state Ready 2025-04-07T13:58:54.536144Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T13:58:54.861426Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 15][1:1219:2919] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.backups/collections/MyCollection/19700101000002Z_incremental/Table TableId: [72057594046644480:15:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:58:54.861904Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 15][1:1219:2919] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 15] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-07T13:58:54.861991Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 15][1:1219:2919] Send handshake: worker# [1:1217:2919] 2025-04-07T13:58:54.862116Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:1217:2919] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-07T13:58:54.862153Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:1217:2919] Handshake with writer: sender# [1:1219:2919] 2025-04-07T13:58:54.862281Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 15][1:1219:2919] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 0 Data: 71b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 1 Data: 57b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-07T13:58:54.862512Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 15][1:1219:2919] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 71 },{ Order: 1 BodySize: 57 }] } 2025-04-07T13:58:54.862751Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:15:1][72075186224037892][1:1329:2919] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-07T13:58:54.862860Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 15][1:1219:2919] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037892 } 2025-04-07T13:58:54.862995Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:15:1][72075186224037892][1:1329:2919] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 1744034333897789 Step: 2500 TxId: 18446744073709551615 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 1 Group: 1744034334024494 Step: 2500 TxId: 18446744073709551615 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2025-04-07T13:58:54.863400Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037892, clientId# [1:1330:3006], serverId# [1:1331:3007], sessionId# [0:0:0] 2025-04-07T13:58:54.877498Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:15:1][72075186224037892][1:1329:2919] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-07T13:58:54.877613Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 15][1:1219:2919] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037892 } 2025-04-07T13:58:54.877768Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 15][1:1219:2919] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0,1] } 2025-04-07T13:58:54.877867Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:1217:2919] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-07T13:58:54.877968Z node 1 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][1:979:2758][0][1:1218:2919] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-07T13:58:54.878124Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T13:58:54.878184Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2025-04-07T13:58:54.878330Z node 1 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T13:58:54.878403Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 2 count 4294967295 size 1048576 endOffset 2 max time lag 0ms effective offset 2 2025-04-07T13:58:54.878442Z node 1 :PERSQUEUE DEBUG: waiting read cookie 1 partition 0 user __OFFLOAD_ACTOR__ offset 2 count 4294967295 size 1048576 timeout 1000 2025-04-07T13:58:55.263147Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr88r2bn8yw9g4ekkx03zpe5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjIxOGE0NjctZjk0MDZjNzEtNWEyN2YyYzktNzVjYTY0Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { null_flag_value: NULL_VALUE } }, { items { uint32_value: 2 } items { uint32_value: 200 } } >> TSettingsValidation::TestDifferentDedupParams [GOOD] >> TSettingsValidation::ValidateSettingsFailOnStart |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |88.7%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] >> TPartitionTests::IncorrectRange >> TPQTabletTests::Huge_ProposeTransacton >> KqpWorkloadService::TestStartQueryAfterCancel [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit >> TPQTest::TestAccountReadQuota [GOOD] >> TPartitionTests::IncorrectRange [GOOD] >> TPQTest::TestAlreadyWritten >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] >> KqpScan::JoinLeftOnly [GOOD] >> TPartitionTests::GetPartitionWriteInfoSuccess |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |88.7%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> TSourceIdTests::SourceIdWriterAddMessage [GOOD] >> TSourceIdTests::SourceIdWriterClean [GOOD] >> TSourceIdTests::SourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TPartitionTests::DataTxCalcPredicateError [GOOD] |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TPartitionTests::DataTxCalcPredicateOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:56:52.146938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:56:52.147026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:56:52.147063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:56:52.147095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:56:52.147138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:56:52.147176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:56:52.147243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:56:52.147611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:56:52.217758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:56:52.217828Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:52.228631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:56:52.228803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:56:52.228983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:56:52.234704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:56:52.234897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:56:52.235523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:56:52.235704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:56:52.238136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:56:52.239337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:56:52.239405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:56:52.239513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:56:52.239557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:56:52.239592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:56:52.239723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.245840Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:56:52.371971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:56:52.372187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.372408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:56:52.372616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:56:52.372683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.374930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:56:52.375075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:56:52.375236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.375284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:56:52.375319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:56:52.375364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:56:52.377265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.377330Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:56:52.377365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:56:52.378953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.378999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.379042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:56:52.379080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:56:52.382418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:56:52.384194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:56:52.384355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:56:52.385354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:56:52.385492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:56:52.385551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:56:52.385801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:56:52.385853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:56:52.386008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:56:52.386079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:56:52.387959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:56:52.388002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:56:52.388161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:56:52.388201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:56:52.388449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:56:52.388497Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:56:52.388579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:56:52.388627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:56:52.388688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:56:52.388730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:56:52.388772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:56:52.388815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:56:52.388876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:56:52.388909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:56:52.388939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:56:52.390767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:56:52.390886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:56:52.390925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:56:52.390969Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:56:52.391011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:56:52.391102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 8.985028Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:58:58.985057Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T13:58:58.985087Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:58.995723Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:58.995846Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:58.995875Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:58:58.996404Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-07T13:58:58.996472Z node 18 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:59.001039Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2025-04-07T13:58:59.001468Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2025-04-07T13:58:59.001544Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2025-04-07T13:58:59.001623Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: false 2025-04-07T13:58:59.002330Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:59.002415Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:59.002442Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:58:59.003203Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:59.003277Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T13:58:59.003302Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T13:58:59.003336Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-07T13:58:59.003382Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T13:58:59.003471Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2025-04-07T13:58:59.012916Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T13:58:59.012986Z node 18 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:59.013365Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:58:59.013505Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2025-04-07T13:58:59.013537Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-04-07T13:58:59.013588Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2025-04-07T13:58:59.013693Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:381:2346] message: TxId: 103 2025-04-07T13:58:59.013797Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-04-07T13:58:59.013869Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T13:58:59.013927Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T13:58:59.014088Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:58:59.014154Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-07T13:58:59.014179Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-07T13:58:59.014210Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:58:59.014234Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-04-07T13:58:59.014255Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-04-07T13:58:59.014290Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T13:58:59.014315Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-04-07T13:58:59.014334Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-04-07T13:58:59.014362Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-07T13:58:59.014384Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:4 2025-04-07T13:58:59.014404Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:4 2025-04-07T13:58:59.014450Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-04-07T13:58:59.015079Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:58:59.015163Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-04-07T13:58:59.015276Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-07T13:58:59.015343Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-07T13:58:59.015380Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T13:58:59.016091Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:58:59.016303Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:58:59.029270Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:58:59.029344Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:58:59.029477Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:58:59.029551Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T13:58:59.034339Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:58:59.034445Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [18:761:2655] 2025-04-07T13:58:59.034632Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-07T13:58:59.035508Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:58:59.035919Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 456us result status StatusPathDoesNotExist 2025-04-07T13:58:59.036183Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4])" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T13:58:59.044124Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:58:59.044560Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 503us result status StatusPathDoesNotExist 2025-04-07T13:58:59.044850Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4])" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |88.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::JoinLeftOnly [GOOD] Test command err: Trying to start YDB, gRPC: 9083, MsgBus: 6510 2025-04-07T13:58:31.416628Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570331817201795:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:31.416705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001689/r3tmp/tmpdE13vq/pdisk_1.dat 2025-04-07T13:58:32.179777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:32.179874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:32.184573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:58:32.185492Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9083, node 1 2025-04-07T13:58:32.547530Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:58:32.547567Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:58:32.547576Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:58:32.548263Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6510 TClient is connected to server localhost:6510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:58:33.514025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:33.529384Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:58:33.545945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:33.805796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:58:34.088751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:58:34.200359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:36.009895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570353292039986:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:36.010020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:36.261302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:58:36.326047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:58:36.377304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:58:36.437530Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570331817201795:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:36.437596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:58:36.455501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:58:36.484535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:58:36.567529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:58:36.668258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570353292040492:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:36.668343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:36.668412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570353292040497:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:36.672253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:58:36.689030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570353292040499:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:58:38.523106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:39.687493Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034319636, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 1314, MsgBus: 10882 2025-04-07T13:58:40.698681Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490570368057109488:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:40.698737Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001689/r3tmp/tmpxr8xW1/pdisk_1.dat 2025-04-07T13:58:41.014560Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1314, node 2 2025-04-07T13:58:41.079192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:41.079292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:41.187103Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:58:41.247544Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:58:41.247566Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:58:41.247574Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:58:41.247686Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10882 TClient is connected to server localhost:10882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:58:42.122896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:42.129688Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:58:42.143569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:42.218517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:42.420686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, ... ct path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:58:45.749696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:58:45.831104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:58:45.929807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:58:45.985440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:58:46.041718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:58:46.124816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:58:46.304644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570393826915264:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:46.304961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:46.305551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570393826915269:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:46.310053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:58:46.324009Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:58:46.326918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490570393826915271:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:58:47.728055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:48.103010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:58:48.884388Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034328897, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 8610, MsgBus: 19765 2025-04-07T13:58:49.989630Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490570407986190050:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:49.989674Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001689/r3tmp/tmpBtWaMp/pdisk_1.dat 2025-04-07T13:58:50.111424Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:50.136147Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:50.136249Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:50.138381Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8610, node 3 2025-04-07T13:58:50.321371Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:58:50.321401Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:58:50.321410Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:58:50.321533Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19765 TClient is connected to server localhost:19765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:58:50.970738Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:50.977489Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:58:50.993912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:51.099944Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:51.320881Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:51.442492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:54.623074Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490570429461028223:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:54.623257Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:54.666189Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T13:58:54.715136Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:58:54.785156Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:58:54.853345Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:58:54.930413Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:58:55.037067Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490570407986190050:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:55.037234Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:58:55.181971Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:58:55.278435Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490570433755996027:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:55.278534Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:55.278754Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490570433755996032:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:55.282713Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:58:55.296612Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490570433755996034:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:58:56.659377Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:57.904876Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034337892, txId: 281474976715673] shutting down 2025-04-07T13:58:58.574317Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034338571, txId: 281474976715675] shutting down |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |88.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost >> TPartitionTests::GetPartitionWriteInfoSuccess [GOOD] >> TPartitionTests::GetPartitionWriteInfoError >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> TPartitionTests::GetPartitionWriteInfoError [GOOD] >> TPQTest::TestMessageNo >> TPartitionTests::NonConflictingCommitsBatch >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] |88.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTest::TestReadRuleVersions >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile >> ResourcePoolClassifiersDdl::TestExplicitPoolId [GOOD] >> ResourcePoolClassifiersDdl::TestMultiGroupClassification ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:44.163552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:44.163655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:44.163702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:44.163740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:44.163788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:44.163832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:44.163889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:44.164178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:44.229686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:44.229743Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:44.247915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:44.248048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:44.248193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:44.270194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:44.270393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:44.270958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:44.271147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:44.273053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:44.274262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:44.274315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:44.274417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:44.274460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:44.274490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:44.274643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.281275Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:44.405274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:44.405529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.405718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:44.405922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:44.405984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.411939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:44.412074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:44.412266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.412319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:44.412370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:44.412402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:44.414368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.414426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:44.414461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:44.416237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.416277Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.416312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:44.416369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:44.424773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:44.426747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:44.426976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:44.427858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:44.427971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:44.428012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:44.428271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:44.428313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:44.428460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:44.428540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:44.430464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:44.430503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:44.430668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:44.430712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:44.430953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.430993Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:44.431072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:44.431101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:44.431148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:44.431184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:44.431227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:44.431255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:44.431313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:44.431345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:44.431375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:44.433196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:44.433321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:44.433352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:44.433394Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:44.433443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:44.433573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... PathId: 2] was 3 2025-04-07T13:59:02.345934Z node 18 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:02.345993Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:02.346132Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:59:02.346303Z node 18 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:02.346340Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:202:2205], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T13:59:02.346378Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:202:2205], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-04-07T13:59:02.346623Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:59:02.346666Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T13:59:02.348281Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:59:02.348361Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:59:02.348388Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:59:02.348420Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T13:59:02.348451Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:59:02.348955Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:59:02.349026Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:59:02.349058Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:59:02.349083Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T13:59:02.349109Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:59:02.349165Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-07T13:59:02.349937Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 959 } } 2025-04-07T13:59:02.349976Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-07T13:59:02.350107Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 959 } } 2025-04-07T13:59:02.350187Z node 18 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 959 } } 2025-04-07T13:59:02.351018Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 77309413621 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T13:59:02.351056Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-07T13:59:02.351148Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 77309413621 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T13:59:02.351184Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T13:59:02.351255Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 305 RawX2: 77309413621 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T13:59:02.351302Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:02.351336Z node 18 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:59:02.351373Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T13:59:02.351409Z node 18 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-07T13:59:02.366900Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:59:02.367539Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:59:02.367826Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:59:02.369021Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:59:02.369300Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:59:02.369343Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T13:59:02.369430Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T13:59:02.369458Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:59:02.369500Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-07T13:59:02.369570Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:339:2314] message: TxId: 101 2025-04-07T13:59:02.369613Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:59:02.369675Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T13:59:02.369703Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T13:59:02.369804Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:59:02.371425Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T13:59:02.371468Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [18:340:2315] TestWaitNotification: OK eventTxId 101 2025-04-07T13:59:02.371890Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:59:02.372084Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" took 225us result status StatusSuccess 2025-04-07T13:59:02.372508Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "DyNumber" TypeId: 4866 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] >> TPQTest::TestMessageNo [GOOD] >> TPQTest::TestLowWatermark >> TPQTabletTests::Partition_Send_Predicate_With_False >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin+ColumnStore [GOOD] >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut >> TPQTabletTests::Partition_Send_Predicate_With_False [GOOD] >> KqpJoinOrder::TPCDS96-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS96+StreamLookupJoin-ColumnStore >> TPQTabletTests::ProposeTx_Missing_Operations >> KqpJoinOrder::TPCH10-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCH11+StreamLookupJoin-ColumnStore >> TPartitionTests::NonConflictingCommitsBatch [GOOD] >> TPQTabletTests::ProposeTx_Missing_Operations [GOOD] >> TPQTabletTests::ProposeTx_Unknown_Partition_1 >> TPQTabletTests::ProposeTx_Unknown_Partition_1 [GOOD] >> TPartitionTests::GetUsedStorage >> TPQTabletTests::ProposeTx_Unknown_Partition_2 >> TPQTabletTests::ProposeTx_Unknown_Partition_2 [GOOD] >> TPQTest::TestReadRuleVersions [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect [GOOD] >> TPartitionTests::GetUsedStorage [GOOD] >> KqpErrors::ResolveTableError >> TPQTest::TestPartitionedBlobFails >> TPQTabletTests::ProposeTx_Command_After_Propose >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] |88.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] Test command err: 2025-04-07T13:58:49.237110Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:49.237285Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:49.256498Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:49.258185Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T13:58:49.000000Z 2025-04-07T13:58:49.258254Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:175:2190] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\250\343\257\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\250\343\257\204\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\264\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\001\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-07T13:58:50.265495Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:50.265581Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:50.280642Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:50.282337Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T13:58:50.000000Z 2025-04-07T13:58:50.282396Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\220\353\257\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-07T13:58:51.207406Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:51.207473Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T13:58:51.222452Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:51.224111Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T13:58:51.000000Z 2025-04-07T13:58:51.224174Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000(\370\362\257\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Create distr tx with id = 0 and act no: 1 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_AC ... OUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 2025-04-07T13:59:00.997083Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:00.997158Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T13:59:01.016542Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:01.018346Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T13:59:01.000000Z 2025-04-07T13:59:01.018416Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Create distr tx with id = 2 and act no: 3 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Send disk status response with cookie: 0 Wait tx committed for tx 0 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Wait tx committed for tx 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] Test command err: 2025-04-07T13:58:00.786530Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570196068734204:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:00.786796Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e7b/r3tmp/tmpkNA5Cd/pdisk_1.dat 2025-04-07T13:58:01.380469Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:01.384971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:01.385057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:01.387993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17880, node 1 2025-04-07T13:58:01.547998Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:58:01.548021Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:58:01.548031Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:58:01.548134Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:58:02.029979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:02.053115Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:58:02.119530Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2025-04-07T13:58:04.516018Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTBhNjA2YWItYzUzNmQ0YWYtNDY0Mzg4ZDQtOTI3OWYxZjI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OTBhNjA2YWItYzUzNmQ0YWYtNDY0Mzg4ZDQtOTI3OWYxZjI= 2025-04-07T13:58:04.516434Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTBhNjA2YWItYzUzNmQ0YWYtNDY0Mzg4ZDQtOTI3OWYxZjI=, ActorId: [1:7490570213248603857:2300], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:58:04.532250Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:58:04.532295Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was disabled 2025-04-07T13:58:04.586637Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI= 2025-04-07T13:58:04.586928Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:58:04.587180Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: ReadyState, TraceId: 01jr88ph1be7zevtx3b7yrbh62, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7490570213248603858:2292] database: Root databaseId: /Root pool id: 2025-04-07T13:58:04.587357Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: ExecuteState, TraceId: 01jr88ph1be7zevtx3b7yrbh62, Sending CompileQuery request 2025-04-07T13:58:04.844422Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: ExecuteState, TraceId: 01jr88ph1be7zevtx3b7yrbh62, ExecutePhyTx, tx: 0x000050C0002299D8 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-07T13:58:04.844490Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: ExecuteState, TraceId: 01jr88ph1be7zevtx3b7yrbh62, Sending to Executer TraceId: 0 8 2025-04-07T13:58:04.844589Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: ExecuteState, TraceId: 01jr88ph1be7zevtx3b7yrbh62, Created new KQP executer: [1:7490570213248603865:2301] isRollback: 0 2025-04-07T13:58:04.894782Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: ExecuteState, TraceId: 01jr88ph1be7zevtx3b7yrbh62, Forwarded TEvStreamData to [1:7490570213248603858:2292] 2025-04-07T13:58:04.899296Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: ExecuteState, TraceId: 01jr88ph1be7zevtx3b7yrbh62, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-07T13:58:04.899477Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: ExecuteState, TraceId: 01jr88ph1be7zevtx3b7yrbh62, txInfo Status: Committed Kind: Pure TotalDuration: 55.159 ServerDuration: 55.084 QueriesCount: 2 2025-04-07T13:58:04.899527Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: ExecuteState, TraceId: 01jr88ph1be7zevtx3b7yrbh62, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T13:58:04.899700Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: ExecuteState, TraceId: 01jr88ph1be7zevtx3b7yrbh62, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:58:04.899726Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: ExecuteState, TraceId: 01jr88ph1be7zevtx3b7yrbh62, EndCleanup, isFinal: 1 2025-04-07T13:58:04.899776Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: ExecuteState, TraceId: 01jr88ph1be7zevtx3b7yrbh62, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7490570196068734271:2256] 2025-04-07T13:58:04.899800Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: unknown state, TraceId: 01jr88ph1be7zevtx3b7yrbh62, Cleanup temp tables: 0 2025-04-07T13:58:04.900065Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMTEtZDJhNWIxNTMtZTIyNjNjMWQtOTlkNzJlNDI=, ActorId: [1:7490570213248603859:2301], ActorState: unknown state, TraceId: 01jr88ph1be7zevtx3b7yrbh62, Session actor destroyed 2025-04-07T13:58:04.918483Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=OTBhNjA2YWItYzUzNmQ0YWYtNDY0Mzg4ZDQtOTI3OWYxZjI=, ActorId: [1:7490570213248603857:2300], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:58:04.918525Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=OTBhNjA2YWItYzUzNmQ0YWYtNDY0Mzg4ZDQtOTI3OWYxZjI=, ActorId: [1:7490570213248603857:2300], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:58:04.918558Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTBhNjA2YWItYzUzNmQ0YWYtNDY0Mzg4ZDQtOTI3OWYxZjI=, ActorId: [1:7490570213248603857:2300], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:58:04.918579Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTBhNjA2YWItYzUzNmQ0YWYtNDY0Mzg4ZDQtOTI3OWYxZjI=, ActorId: [1:7490570213248603857:2300], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:58:04.918644Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTBhNjA2YWItYzUzNmQ0YWYtNDY0Mzg4ZDQtOTI3OWYxZjI=, ActorId: [1:7490570213248603857:2300], ActorState: unknown state, Session actor destroyed 2025-04-07T13:58:06.714945Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490570223766312641:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:06.736863Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e7b/r3tmp/tmpZGvKDP/pdisk_1.dat 2025-04-07T13:58:07.047927Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:07.081885Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:07.081958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:07.089720Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19878, node 2 2025-04-07T13:58:07.257506Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:58:07.257529Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:58:07.257536Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:58:07.257649Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPath ... uThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; 2025-04-07T13:58:58.157196Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-07T13:58:58.157368Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: ReadyState, TraceId: 01jr88r5bd24sntrq4gaqp7gcq, received request, proxyRequestId: 6 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; rpcActor: [8:7490570445572353512:2349] database: /Root databaseId: /Root pool id: default 2025-04-07T13:58:58.157423Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [8:7490570445572353511:2348], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg= 2025-04-07T13:58:58.157474Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490570445572353514:2350], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-07T13:58:58.157567Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7490570445572353515:2351], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, Start pool fetching 2025-04-07T13:58:58.157588Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490570445572353516:2352], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-07T13:58:58.159318Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490570445572353514:2350], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2025-04-07T13:58:58.159392Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490570445572353516:2352], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2025-04-07T13:58:58.159418Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root 2025-04-07T13:58:58.159480Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7490570445572353515:2351], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, Pool info successfully resolved 2025-04-07T13:58:58.159542Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg= 2025-04-07T13:58:58.159637Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7490570436982418857:2332], DatabaseId: /Root, PoolId: default, Received new request, worker id: [8:7490570445572353511:2348], session id: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg= 2025-04-07T13:58:58.159679Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7490570436982418857:2332], DatabaseId: /Root, PoolId: default, Reply continue success to [8:7490570445572353511:2348], session id: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, local in flight: 1 2025-04-07T13:58:58.159703Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg= 2025-04-07T13:58:58.159757Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: ExecuteState, TraceId: 01jr88r5bd24sntrq4gaqp7gcq, continue request, pool id: default 2025-04-07T13:58:58.160016Z node 8 :KQP_SESSION INFO: Scheme error, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], status: PathNotTable 2025-04-07T13:58:58.641501Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:58:58.641539Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:59.226219Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: ExecuteState, TraceId: 01jr88r5bd24sntrq4gaqp7gcq, ExecutePhyTx, tx: 0x000050C00043A518 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-07T13:58:59.226285Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: ExecuteState, TraceId: 01jr88r5bd24sntrq4gaqp7gcq, Sending to Executer TraceId: 0 8 2025-04-07T13:58:59.226389Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: ExecuteState, TraceId: 01jr88r5bd24sntrq4gaqp7gcq, Created new KQP executer: [8:7490570449867320843:2348] isRollback: 0 2025-04-07T13:58:59.299144Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: ExecuteState, TraceId: 01jr88r5bd24sntrq4gaqp7gcq, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-07T13:58:59.299231Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: ExecuteState, TraceId: 01jr88r5bd24sntrq4gaqp7gcq, ExecutePhyTx, tx: 0x000050C00043A458 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-07T13:58:59.323374Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: ExecuteState, TraceId: 01jr88r5bd24sntrq4gaqp7gcq, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-07T13:58:59.323652Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: ExecuteState, TraceId: 01jr88r5bd24sntrq4gaqp7gcq, txInfo Status: Committed Kind: ReadOnly TotalDuration: 97.511 ServerDuration: 97.435 QueriesCount: 2 2025-04-07T13:58:59.323757Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: ExecuteState, TraceId: 01jr88r5bd24sntrq4gaqp7gcq, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T13:58:59.323832Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: ExecuteState, TraceId: 01jr88r5bd24sntrq4gaqp7gcq, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-04-07T13:58:59.324168Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7490570436982418857:2332], DatabaseId: /Root, PoolId: default, Received cleanup request, worker id: [8:7490570445572353511:2348], session id: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, duration: 1.166156s, cpu consumed: 0.025836s 2025-04-07T13:58:59.324270Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7490570436982418857:2332], DatabaseId: /Root, PoolId: default, Reply cleanup success to [8:7490570445572353511:2348], session id: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, local in flight: 0 2025-04-07T13:58:59.324554Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: CleanupState, TraceId: 01jr88r5bd24sntrq4gaqp7gcq, EndCleanup, isFinal: 0 2025-04-07T13:58:59.324705Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: CleanupState, TraceId: 01jr88r5bd24sntrq4gaqp7gcq, Sent query response back to proxy, proxyRequestId: 6, proxyId: [8:7490570376852875703:2204] 2025-04-07T13:58:59.325200Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request finished in pool, DatabaseId: /Root, PoolId: default, Duration: 1.166156s, CpuConsumed: 0.025836s, AdjustCpuQuota: 0 2025-04-07T13:58:59.328803Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, TxId: 2025-04-07T13:58:59.332731Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, TxId: 2025-04-07T13:58:59.333395Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:58:59.333445Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:58:59.333477Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:58:59.333511Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:58:59.333589Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZjhiODAwNjktZDA2Y2Q4MDktNWU0NjhjOWMtMjBkYWY1MDg=, ActorId: [8:7490570445572353511:2348], ActorState: unknown state, Session actor destroyed 2025-04-07T13:58:59.458888Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=N2U3N2UxNGEtM2U4Y2IxOTUtOGFjODNiMGMtZTZlNmRiMGE=, ActorId: [8:7490570432687451396:2320], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:58:59.458959Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=N2U3N2UxNGEtM2U4Y2IxOTUtOGFjODNiMGMtZTZlNmRiMGE=, ActorId: [8:7490570432687451396:2320], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:58:59.459004Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2U3N2UxNGEtM2U4Y2IxOTUtOGFjODNiMGMtZTZlNmRiMGE=, ActorId: [8:7490570432687451396:2320], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:58:59.459044Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2U3N2UxNGEtM2U4Y2IxOTUtOGFjODNiMGMtZTZlNmRiMGE=, ActorId: [8:7490570432687451396:2320], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:58:59.459145Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2U3N2UxNGEtM2U4Y2IxOTUtOGFjODNiMGMtZTZlNmRiMGE=, ActorId: [8:7490570432687451396:2320], ActorState: unknown state, Session actor destroyed >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 12377, MsgBus: 31746 2025-04-07T13:55:54.041379Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569652032950443:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:54.041430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00113e/r3tmp/tmp5EzNiw/pdisk_1.dat 2025-04-07T13:55:54.745396Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:55:54.764077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:54.764176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:54.770014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12377, node 1 2025-04-07T13:55:54.941177Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:54.941197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:54.941205Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:54.941296Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31746 TClient is connected to server localhost:31746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:55.983596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:56.021418Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:55:56.033716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:56.243224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:56.480141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:56.611615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:59.045493Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569652032950443:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:59.071539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:55:59.897568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569677802755934:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:59.926233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:59.968695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:56:00.023147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:56:00.079567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:00.163835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:00.242840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:56:00.339572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:56:00.468236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569682097723738:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:00.468321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:00.468893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569682097723743:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:00.474057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:56:00.518167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569682097723745:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:56:03.045298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:56:03.078298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:56:03.130604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:56:03.215665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:56:03.258868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:56:03.469273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:56:03.518360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:56:03.602788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:56:03.648039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:56:03.718400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:56:03.783157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:56:03.829225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:56:03.906384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:56:04.559198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:56:04.605939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:56:04.641591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:56:04.688419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:56:04.719058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:56:04.754505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type ... ; 2025-04-07T13:58:34.633984Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:58:34.634018Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;self_id=[5:7490570344346691059:5924];tablet_id=72075186224038694;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:58:34.634023Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:58:34.634155Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;self_id=[5:7490570344346691059:5924];tablet_id=72075186224038694;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:58:34.634179Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:58:34.634213Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:58:34.634292Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;self_id=[5:7490570344346691059:5924];tablet_id=72075186224038694;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:58:34.634317Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:58:34.634348Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:58:34.634416Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;self_id=[5:7490570344346691059:5924];tablet_id=72075186224038694;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:58:34.640582Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:58:34.642201Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:58:34.642369Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:58:34.642423Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:58:34.642582Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:58:34.642614Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:58:34.642697Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:58:34.642729Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:58:34.642794Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:58:34.642825Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:58:34.642865Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:58:34.642895Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:58:34.643234Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:58:34.643276Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:58:34.643446Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:58:34.643480Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:58:34.643607Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:58:34.643639Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:58:34.643847Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:58:34.643889Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:58:34.643996Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:58:34.644025Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:58:34.691850Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:58:34.691929Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:58:34.692038Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:58:34.692076Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:58:34.692241Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:58:34.692274Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:58:34.692372Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:58:34.692406Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:58:34.692478Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:58:34.692511Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:58:34.692555Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:58:34.692589Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:58:34.692960Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:58:34.693015Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:58:34.693238Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:58:34.693273Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:58:34.693425Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:58:34.693465Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:58:34.693671Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:58:34.693709Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:58:34.693827Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:58:34.693857Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetUsedStorage [GOOD] Test command err: 2025-04-07T13:58:58.721470Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:58.721538Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:58.748724Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:58.750184Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T13:58:58.000000Z 2025-04-07T13:58:58.750244Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:175:2190] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\320\251\260\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\320\251\260\204\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\320\251\260\204\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\263\222\004" StorageChannel: INLINE } 2025-04-07T13:59:00.122815Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:00.122921Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T13:59:00.147121Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:59:00.147418Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:59:00.147701Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [2:176:2191] 2025-04-07T13:59:00.148569Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Initializing completed. 2025-04-07T13:59:00.148640Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [2:176:2191] 2025-04-07T13:59:00.148717Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition {2, {0, 10}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:00.148940Z node 2 :PERSQUEUE INFO: new Cookie owner1|2b3668c2-ec36979b-9812518e-44d0f609_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2025-04-07T13:59:00.149099Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {2, {0, 10}, 100001} Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T13:59:00.149486Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 2 partNo 0 2025-04-07T13:59:00.150583Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-04-07T13:59:00.151232Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 D0000100001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T13:59:00.189051Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:59:00.189238Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-04-07T13:59:00.189355Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 2, partNo: 0, Offset: 100 is stored on disk Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR 2025-04-07T13:59:00.453742Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 4 partNo 0 2025-04-07T13:59:00.454758Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 4 partNo 0 FormedBlobsCount 0 NewHead: Offset 101 PartNo 0 PackedSize 118 count 1 nextOffset 102 batches 1 2025-04-07T13:59:00.455315Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 101,1 HeadOffset 100 endOffset 101 curOffset 102 D0000100001_00000000000000000101_00000_0000000001_00000| size 104 WTime 1129 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:00.488968Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:59:00.489098Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-04-07T13:59:00.489181Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 4, partNo: 0, Offset: 101 is stored on disk Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX 2025-04-07T13:59:00.716828Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 6 partNo 0 2025-04-07T13:59:00.724507Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 6 partNo 0 FormedBlobsCount 0 NewHead: Offset 102 PartNo 0 PackedSize 118 count 1 nextOffset 103 batches 1 2025-04-07T13:59:00.725173Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 102,1 HeadOffset 100 endOffset 102 curOffset 103 D0000100001_00000000000000000102_00000_0000000001_00000| size 104 WTime 2130 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:00.760397Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:59:00.760521Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-04-07T13:59:00.760600Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 6, partNo: 0, Offset: 102 is stored on disk Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to B ... ured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:02.249243Z node 3 :PERSQUEUE INFO: new Cookie owner1|d628cb06-73235b58-3e02944d-8906ce95_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Wait write info error(2) 2025-04-07T13:59:03.006758Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:03.006832Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T13:59:03.025338Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:03.028236Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T13:59:03.000000Z 2025-04-07T13:59:03.028308Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000(\330\320\260\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient-0" Value: "\010\000\020\001\030\001\"\020session-client-0(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient-0" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-client-0" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Created Tx with id 3 as act# 3 Created Tx with id 4 as act# 4 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 6 Wait batch completion Wait kv request Wait tx committed for tx 3 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Wait tx committed for tx 4 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 6 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2025-04-07T13:59:06.523818Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:06.523897Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:06.546729Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [5:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:06.548526Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T13:59:06.000000Z 2025-04-07T13:59:06.548605Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [5:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] Test command err: 2025-04-07T13:59:05.073975Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:59:05.078442Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:59:05.078757Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-07T13:59:05.078817Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:59:05.078881Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-07T13:59:05.078940Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:59:05.078996Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:05.079056Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2025-04-07T13:59:05.079095Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T13:59:05.122378Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:59:05.122478Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:202:2208], now have 1 active actors on pipe 2025-04-07T13:59:05.122568Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T13:59:05.139490Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:05.143703Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-07T13:59:05.143880Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:05.144929Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:05.145078Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:59:05.145546Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:59:05.146000Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:211:2215] 2025-04-07T13:59:05.146910Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-07T13:59:05.146983Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:211:2215] 2025-04-07T13:59:05.147039Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:59:05.147629Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-07T13:59:05.147698Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-07T13:59:05.147875Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:05.148109Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T13:59:05.148405Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:05.151019Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:59:05.151427Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:59:05.151473Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:218:2220], now have 1 active actors on pipe 2025-04-07T13:59:05.153212Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:59:05.153284Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:223:2224], now have 1 active actors on pipe 2025-04-07T13:59:05.154145Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 4294969484 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 2 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-07T13:59:05.154204Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-07T13:59:05.154326Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-07T13:59:05.154385Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-07T13:59:05.154427Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-07T13:59:05.154582Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 2 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 4294969484 } Partitions { } 2025-04-07T13:59:05.154693Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:05.159886Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T13:59:05.159950Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-07T13:59:05.159983Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-07T13:59:05.167200Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 173 RawX2: 4294969484 } } Step: 100 2025-04-07T13:59:05.167314Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2025-04-07T13:59:05.167358Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2025-04-07T13:59:05.167403Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2025-04-07T13:59:05.167603Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 2 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 4294969484 } Partitions { } 2025-04-07T13:59:05.167735Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:05.174278Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T13:59:05.174353Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNING 2025-04-07T13:59:05.174387Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNED 2025-04-07T13:59:05.174429Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-04-07T13:59:05.174471Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-04-07T13:59:05.174541Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2025-04-07T13:59:05.174625Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-04-07T13:59:05.174716Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Partition 0 Consumer 'user' Bad request (behind the last offset) EndOffset 0 End 2 2025-04-07T13:59:05.174977Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 0 2025-04-07T13:59:05.175016Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-04-07T13:59:05.175053Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2025-04-07T13:59:05.175095Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-04-07T13:59:05.175137Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-07T13:59:05.175177Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-04-07T13:59:05.175369Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 2 Consumer: "user" Path: "/topic" } Step: 100 Predicate: false Kind: KIN ... Partitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 5 Important: false } 2025-04-07T13:59:07.195022Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:07.195577Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 5 actor [5:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 5 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 5 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 5 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:07.195660Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:59:07.195956Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:59:07.196175Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:183:2196] 2025-04-07T13:59:07.196942Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-07T13:59:07.196991Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [5:183:2196] 2025-04-07T13:59:07.197059Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:59:07.197448Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 5 2025-04-07T13:59:07.197493Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 5 done 2025-04-07T13:59:07.197632Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:07.197808Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T13:59:07.198150Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:07.200861Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:59:07.201228Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:59:07.201291Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:190:2201], now have 1 active actors on pipe 2025-04-07T13:59:07.203062Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:59:07.203122Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:195:2205], now have 1 active actors on pipe 2025-04-07T13:59:07.203253Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-07T13:59:07.203307Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-07T13:59:07.203360Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] partition {0, {0, 3}, 100000} for WriteId {0, 3} 2025-04-07T13:59:07.203563Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvSubscribeLock for WriteId {0, 3} 2025-04-07T13:59:07.203670Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:07.207333Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T13:59:07.207926Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:59:07.208233Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:59:07.208447Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] bootstrapping {0, {0, 3}, 100000} [5:202:2211] 2025-04-07T13:59:07.209235Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:07.210491Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitMetaStep 2025-04-07T13:59:07.210762Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitInfoRangeStep 2025-04-07T13:59:07.211092Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitDataRangeStep 2025-04-07T13:59:07.211334Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitDataStep 2025-04-07T13:59:07.211397Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-07T13:59:07.211440Z node 5 :PERSQUEUE INFO: [topic:{0, {0, 3}, 100000}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T13:59:07.211482Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Initializing completed. 2025-04-07T13:59:07.211538Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] init complete for topic 'topic' partition {0, {0, 3}, 100000} generation 2 [5:202:2211] 2025-04-07T13:59:07.211603Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] SYNC INIT topic topic partitition {0, {0, 3}, 100000} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:59:07.212092Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] Topic 'topic' partition {0, {0, 3}, 100000} user user reinit request with generation 5 2025-04-07T13:59:07.212158Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] Topic 'topic' partition {0, {0, 3}, 100000} user user reinit with generation 5 done 2025-04-07T13:59:07.212391Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] Topic 'topic' partition {0, {0, 3}, 100000} user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:59:07.212527Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T13:59:07.215123Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId {0, {0, 3}, 100000} 2025-04-07T13:59:07.215364Z node 5 :PERSQUEUE INFO: new Cookie -=[ 0wn3r ]=-|8f3b5e11-4efa3101-55a86e08-908c4e00_0 generated for partition {0, {0, 3}, 100000} topic 'topic' owner -=[ 0wn3r ]=- Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:07.219562Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:59:07.219796Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {0, {0, 3}, 100000} 2025-04-07T13:59:07.219921Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 4 2025-04-07T13:59:07.220325Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:07.220380Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server disconnected, pipe [5:195:2205] destroyed 2025-04-07T13:59:07.220481Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:59:07.220678Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:59:07.220727Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:218:2221], now have 1 active actors on pipe 2025-04-07T13:59:07.220953Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 21474838668 } TxId: 2 Data { Operations { PartitionId: 0 Path: "/topic" SupportivePartition: 100000 } Immediate: false WriteId { NodeId: 0 KeyId: 3 } } 2025-04-07T13:59:07.221010Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PartitionId {0, {0, 3}, 100000} for WriteId {0, 3} 2025-04-07T13:59:07.221059Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2 has WriteId {0, 3} 2025-04-07T13:59:07.221099Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-07T13:59:07.221170Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, State UNKNOWN 2025-04-07T13:59:07.221247Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-07T13:59:07.221303Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, NewState PREPARING 2025-04-07T13:59:07.221468Z node 5 :PERSQUEUE DEBUG: [TxId: 2] save tx TxId: 2 State: PREPARED MinStep: 233 MaxStep: 30233 Operations { PartitionId: 0 Path: "/topic" SupportivePartition: 100000 } Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } WriteId { NodeId: 0 KeyId: 3 } Partitions { } 2025-04-07T13:59:07.221579Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:07.228344Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T13:59:07.228430Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, State PREPARING 2025-04-07T13:59:07.228473Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, NewState PREPARED Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:07.233097Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:59:07.233173Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:233:2235], now have 1 active actors on pipe 2025-04-07T13:59:07.233326Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-07T13:59:07.233382Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-07T13:59:07.233430Z node 5 :PERSQUEUE WARN: tablet 72057594037927937 topic 'topic error: it is forbidden to write after a commit 2025-04-07T13:59:07.233521Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 2, Error it is forbidden to write after a commit 2025-04-07T13:59:07.233561Z node 5 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 0 requestId: error: it is forbidden to write after a commit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] Test command err: 2025-04-07T13:58:00.443895Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570197933310009:2241];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:00.444115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e7d/r3tmp/tmpBanEtV/pdisk_1.dat 2025-04-07T13:58:00.814772Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:00.821090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:00.821212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 5456, node 1 2025-04-07T13:58:00.828347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:58:00.881196Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:58:00.881227Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:58:00.881234Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:58:00.881317Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:58:01.283454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:01.298309Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:58:01.332058Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2025-04-07T13:58:03.669907Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:58:03.670008Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570210818212315:2302], Start check tables existence, number paths: 2 2025-04-07T13:58:03.697244Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTA4ZjQzNjctNDAwZTA5ZGYtNDNhNTI4OWItNWE1MTAxZjg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OTA4ZjQzNjctNDAwZTA5ZGYtNDNhNTI4OWItNWE1MTAxZjg= 2025-04-07T13:58:03.697556Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTA4ZjQzNjctNDAwZTA5ZGYtNDNhNTI4OWItNWE1MTAxZjg=, ActorId: [1:7490570210818212335:2303], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:58:03.698044Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:58:03.698063Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:58:03.698091Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-07T13:58:03.698187Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570210818212315:2302], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T13:58:03.698231Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570210818212315:2302], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T13:58:03.698261Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570210818212315:2302], Successfully finished 2025-04-07T13:58:03.702911Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T13:58:03.740121Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570210818212337:2296], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:58:03.743737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:58:03.744956Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570210818212337:2296], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-07T13:58:03.745109Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570210818212337:2296], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-07T13:58:03.757543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570210818212337:2296], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T13:58:03.840864Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570210818212337:2296], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:58:03.845105Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570210818212337:2296], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-07T13:58:03.854786Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWZkOGEzMGMtZjQwZGVhYjctZmUxNDU4NDQtMzM3ZDM4MGM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OWZkOGEzMGMtZjQwZGVhYjctZmUxNDU4NDQtMzM3ZDM4MGM= 2025-04-07T13:58:03.855108Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWZkOGEzMGMtZjQwZGVhYjctZmUxNDU4NDQtMzM3ZDM4MGM=, ActorId: [1:7490570210818212396:2304], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:58:03.855311Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWZkOGEzMGMtZjQwZGVhYjctZmUxNDU4NDQtMzM3ZDM4MGM=, ActorId: [1:7490570210818212396:2304], ActorState: ReadyState, TraceId: 01jr88pgaf8x30b1690mc89ny7, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7490570210818212395:2334] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-07T13:58:03.855356Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-07T13:58:03.855369Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-07T13:58:03.855433Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7490570210818212396:2304], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OWZkOGEzMGMtZjQwZGVhYjctZmUxNDU4NDQtMzM3ZDM4MGM= 2025-04-07T13:58:03.855480Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570210818212398:2305], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-07T13:58:03.855572Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7490570210818212399:2306], Database: /Root, Start database fetching 2025-04-07T13:58:03.859019Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7490570210818212399:2306], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-07T13:58:03.859157Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570210818212398:2305], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-07T13:58:03.859196Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-07T13:58:03.859236Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-07T13:58:03.859249Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-07T13:58:03.859475Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7490570210818212409:2307], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OWZkOGEzMGMtZjQwZGVhYjctZmUxNDU4NDQtMzM3ZDM4MGM=, Start pool fetching 2025-04-07T13:58:03.859493Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570210818212411:2309], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-07T13:58:03.859569Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7490570210818212410:2308], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-07T13:58:03.860986Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7490570210818212410:2308], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2025-04-07T13:58:03.860985Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570210818212411:2309], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-07T13:58:03.861068Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7490570210818212409:2307], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OWZkOGEzMGMtZjQwZGVhYjctZmUxNDU4NDQtMzM3ZDM4MGM=, Pool info successfully resolved 2025-04-07T13:58:03.861112Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWZkOGEzMGMtZjQwZGVhYjctZmUxNDU4NDQtMzM3ZDM4MGM= 2025-04-07T13:58:03.861175Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OWZkOGEzMGMtZjQwZGVhYjctZmUxNDU4NDQtMzM3ZDM4MGM= 2025-04-07T13:58:03.861220Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7490570210818212410:2308], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7490570210818212396:2304], session id: ydb://session/3?node_id=1&id=OWZkOGEzMGMtZjQwZGVhYjctZmUxNDU4NDQtMzM3ZDM4MGM= 2025-04-07T13:58:03.861285Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got create teables request ... 3:59:03.797115Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:05.629259Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:59:05.629423Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:59:05.629442Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:59:05.629505Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7490570476544791468:2305], Start check tables existence, number paths: 2 2025-04-07T13:59:05.632524Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTBhMGMzZGItNGZmZjczMmYtZWM1NjgwOTItM2QxMjcxYmQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZTBhMGMzZGItNGZmZjczMmYtZWM1NjgwOTItM2QxMjcxYmQ= 2025-04-07T13:59:05.635615Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7490570476544791468:2305], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T13:59:05.635689Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7490570476544791468:2305], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T13:59:05.635737Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7490570476544791468:2305], Successfully finished 2025-04-07T13:59:05.636374Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTBhMGMzZGItNGZmZjczMmYtZWM1NjgwOTItM2QxMjcxYmQ=, ActorId: [6:7490570476544791469:2306], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:59:05.636638Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-07T13:59:05.636680Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T13:59:05.639421Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490570476544791495:2303], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:59:05.644509Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:59:05.655532Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490570476544791495:2303], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-07T13:59:05.659536Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490570476544791495:2303], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-07T13:59:05.670607Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490570476544791495:2303], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T13:59:05.742222Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490570476544791495:2303], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:59:05.745377Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490570476544791495:2303], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-07T13:59:05.751183Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc= 2025-04-07T13:59:05.751774Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc=, ActorId: [6:7490570476544791553:2308], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:59:05.752023Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc=, ActorId: [6:7490570476544791553:2308], ActorState: ReadyState, TraceId: 01jr88rcrq28agxhrzjdw63hp1, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [6:7490570476544791552:2340] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-07T13:59:05.752070Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-07T13:59:05.752100Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-07T13:59:05.752169Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [6:7490570476544791553:2308], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc= 2025-04-07T13:59:05.752222Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570476544791555:2309], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-07T13:59:05.752333Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7490570476544791556:2310], Database: /Root, Start database fetching 2025-04-07T13:59:05.760022Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570476544791555:2309], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-07T13:59:05.760092Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7490570476544791556:2310], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-07T13:59:05.760125Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-07T13:59:05.760144Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-07T13:59:05.760358Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-07T13:59:05.760413Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7490570476544791567:2312], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc=, Start pool fetching 2025-04-07T13:59:05.760459Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570476544791568:2313], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-07T13:59:05.760561Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7490570476544791566:2311], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-07T13:59:05.761907Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570476544791568:2313], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-07T13:59:05.761960Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7490570476544791566:2311], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2025-04-07T13:59:05.762079Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7490570476544791567:2312], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc=, Pool info successfully resolved 2025-04-07T13:59:05.762157Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc= 2025-04-07T13:59:05.762239Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc= 2025-04-07T13:59:05.762334Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc=, ActorId: [6:7490570476544791553:2308], ActorState: ExecuteState, TraceId: 01jr88rcrq28agxhrzjdw63hp1, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool sample_pool_id 2025-04-07T13:59:05.762474Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc=, ActorId: [6:7490570476544791553:2308], ActorState: ExecuteState, TraceId: 01jr88rcrq28agxhrzjdw63hp1, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-04-07T13:59:05.762690Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [6:7490570476544791553:2308], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc= 2025-04-07T13:59:05.762739Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc=, ActorId: [6:7490570476544791553:2308], ActorState: CleanupState, TraceId: 01jr88rcrq28agxhrzjdw63hp1, EndCleanup, isFinal: 1 2025-04-07T13:59:05.762849Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc=, ActorId: [6:7490570476544791553:2308], ActorState: CleanupState, TraceId: 01jr88rcrq28agxhrzjdw63hp1, Sent query response back to proxy, proxyRequestId: 3, proxyId: [6:7490570446480019787:2060] 2025-04-07T13:59:05.762883Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc=, ActorId: [6:7490570476544791553:2308], ActorState: unknown state, TraceId: 01jr88rcrq28agxhrzjdw63hp1, Cleanup temp tables: 0 2025-04-07T13:59:05.763017Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjU1MjA5MjctNmRiMjEzNjktODQ4YjlmMmMtMzUxMzJmOTc=, ActorId: [6:7490570476544791553:2308], ActorState: unknown state, TraceId: 01jr88rcrq28agxhrzjdw63hp1, Session actor destroyed 2025-04-07T13:59:05.791912Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZTBhMGMzZGItNGZmZjczMmYtZWM1NjgwOTItM2QxMjcxYmQ=, ActorId: [6:7490570476544791469:2306], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:59:05.791969Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZTBhMGMzZGItNGZmZjczMmYtZWM1NjgwOTItM2QxMjcxYmQ=, ActorId: [6:7490570476544791469:2306], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:05.792007Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTBhMGMzZGItNGZmZjczMmYtZWM1NjgwOTItM2QxMjcxYmQ=, ActorId: [6:7490570476544791469:2306], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:59:05.792040Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTBhMGMzZGItNGZmZjczMmYtZWM1NjgwOTItM2QxMjcxYmQ=, ActorId: [6:7490570476544791469:2306], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:59:05.792122Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTBhMGMzZGItNGZmZjczMmYtZWM1NjgwOTItM2QxMjcxYmQ=, ActorId: [6:7490570476544791469:2306], ActorState: unknown state, Session actor destroyed >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin+ColumnStore [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] Test command err: 2025-04-07T13:57:53.474128Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570166533320928:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:53.478585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e9a/r3tmp/tmpHD2q0C/pdisk_1.dat 2025-04-07T13:57:53.815841Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61841, node 1 2025-04-07T13:57:53.837781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:53.837888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:53.858986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:53.938362Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:53.938382Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:53.938392Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:53.938494Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:54.219217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:54.231329Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:54.271290Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2025-04-07T13:57:56.428440Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:57:56.428629Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:57:56.428649Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:57:56.433061Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGZlOTc4MjgtODI0NzI4ODItM2E0N2I2NzYtMzI2OTIzZA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OGZlOTc4MjgtODI0NzI4ODItM2E0N2I2NzYtMzI2OTIzZA== 2025-04-07T13:57:56.433504Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-07T13:57:56.447459Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570179418223276:2301], Start check tables existence, number paths: 2 2025-04-07T13:57:56.447565Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGZlOTc4MjgtODI0NzI4ODItM2E0N2I2NzYtMzI2OTIzZA==, ActorId: [1:7490570179418223277:2302], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:57:56.452724Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570179418223276:2301], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T13:57:56.452788Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570179418223276:2301], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T13:57:56.452816Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570179418223276:2301], Successfully finished 2025-04-07T13:57:56.452871Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T13:57:56.466512Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570179418223294:2295], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:57:56.471903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:57:56.473318Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570179418223294:2295], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-07T13:57:56.473468Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570179418223294:2295], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-07T13:57:56.488870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570179418223294:2295], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T13:57:56.536181Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570179418223294:2295], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:57:56.539866Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570179418223294:2295], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-07T13:57:56.549044Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-07T13:57:56.549080Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-07T13:57:56.549145Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570179418223356:2305], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-07T13:57:56.549426Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGZlOTc4MjgtODI0NzI4ODItM2E0N2I2NzYtMzI2OTIzZA==, ActorId: [1:7490570179418223277:2302], ActorState: ReadyState, TraceId: 01jr88p9648px3hyjgbgydte12, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT DESCRIBE SCHEMA ON `/Root` TO `user@test`; GRANT DESCRIBE SCHEMA, SELECT ROW ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `user@test`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-07T13:57:56.558755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570179418223356:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:56.558905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:56.846231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:57:56.851203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:57:56.853111Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=OGZlOTc4MjgtODI0NzI4ODItM2E0N2I2NzYtMzI2OTIzZA==, ActorId: [1:7490570179418223277:2302], ActorState: ExecuteState, TraceId: 01jr88p9648px3hyjgbgydte12, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7490570179418223357:2302] WorkloadServiceCleanup: 0 2025-04-07T13:57:56.854606Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGZlOTc4MjgtODI0NzI4ODItM2E0N2I2NzYtMzI2OTIzZA==, ActorId: [1:7490570179418223277:2302], ActorState: CleanupState, TraceId: 01jr88p9648px3hyjgbgydte12, EndCleanup, isFinal: 0 2025-04-07T13:57:56.854668Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGZlOTc4MjgtODI0NzI4ODItM2E0N2I2NzYtMzI2OTIzZA==, ActorId: [1:7490570179418223277:2302], ActorState: CleanupState, TraceId: 01jr88p9648px3hyjgbgydte12, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7490570166533321004:2256] 2025-04-07T13:57:56.862174Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmFkNzMxMzUtNWYxZDdhMmItZWNhNWUzZjQtZmFjMWZjOGE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NmFkNzMxMzUtNWYxZDdhMmItZWNhNWUzZjQtZmFjMWZjOGE= 2025-04-07T13:57:56.862427Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmFkNzMxMzUtNWYxZDdhMmItZWNhNWUzZjQtZmFjMWZjOGE=, ActorId: [1:7490570179418223395:2308], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:57:56.862561Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmFkNzMxMzUtNWYxZDdhMmItZWNhNWUzZjQtZmFjMWZjOGE=, ActorId: [1:7490570179418223395:2308], ActorState: ReadyState, TraceId: 01jr88p9fy7tk7jb7t49bawxs4, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: DROP RESOURCE POOL CLASSIFIER MyResourcePoolClassifier rpcActor: [1:7490570179418223394:2354] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-07T13:57:56.862586Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-07T13:57:56.862620Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7490570179418223395:2308], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NmFkNzMxMzUtNWYxZDdhMmItZWNhNWUzZjQtZmFjMWZjOGE= 2025-04-07T13:57:56.862677Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570179418223397:2309], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-07T13:57:56.862734Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7490570179418223398:2310], Database: /Root, Start database fetching 2025-04-07T13:57:56.863234Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7490570179418223398:2310], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-07T13:57:56.863296Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: ... Id: 01jr88re1vdm6f3k953pb7kyqz, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-07T13:59:07.083988Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re1vdm6f3k953pb7kyqz, ExecutePhyTx, tx: 0x0000000000000000 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 1 2025-04-07T13:59:07.084031Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re1vdm6f3k953pb7kyqz, TExecPhysicalRequest, add DeferredEffect to Transaction, current Transactions.size(): 1 2025-04-07T13:59:07.084063Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re1vdm6f3k953pb7kyqz, TExecPhysicalRequest, tx has commit locks 2025-04-07T13:59:07.084130Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re1vdm6f3k953pb7kyqz, Sending to Executer TraceId: 0 8 2025-04-07T13:59:07.084205Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re1vdm6f3k953pb7kyqz, Created new KQP executer: [7:7490570485318987522:2493] isRollback: 0 2025-04-07T13:59:07.100087Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re1vdm6f3k953pb7kyqz, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-07T13:59:07.100279Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re1vdm6f3k953pb7kyqz, txInfo Status: Committed Kind: ReadWrite TotalDuration: 30.816 ServerDuration: 30.671 QueriesCount: 2 2025-04-07T13:59:07.100390Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re1vdm6f3k953pb7kyqz, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T13:59:07.100455Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re1vdm6f3k953pb7kyqz, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:07.100490Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re1vdm6f3k953pb7kyqz, EndCleanup, isFinal: 0 2025-04-07T13:59:07.100551Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re1vdm6f3k953pb7kyqz, Sent query response back to proxy, proxyRequestId: 31, proxyId: [7:7490570433779378492:2060] 2025-04-07T13:59:07.101175Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, TxId: 2025-04-07T13:59:07.101300Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-04-07T13:59:07.106984Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ReadyState, TraceId: 01jr88re321f017b5hbce3thct, received request, proxyRequestId: 32 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [7:7490570485318987530:2501] database: /Root databaseId: /Root pool id: default 2025-04-07T13:59:07.107035Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ReadyState, TraceId: 01jr88re321f017b5hbce3thct, request placed into pool from cache: default 2025-04-07T13:59:07.110013Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re321f017b5hbce3thct, ExecutePhyTx, tx: 0x000050C000318D18 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-07T13:59:07.110091Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re321f017b5hbce3thct, Sending to Executer TraceId: 0 8 2025-04-07T13:59:07.110171Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re321f017b5hbce3thct, Created new KQP executer: [7:7490570485318987534:2493] isRollback: 0 2025-04-07T13:59:07.152969Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re321f017b5hbce3thct, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-07T13:59:07.153055Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re321f017b5hbce3thct, ExecutePhyTx, tx: 0x000050C00031CD98 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-07T13:59:07.153985Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re321f017b5hbce3thct, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-07T13:59:07.154126Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re321f017b5hbce3thct, txInfo Status: Committed Kind: ReadOnly TotalDuration: 44.291 ServerDuration: 44.153 QueriesCount: 2 2025-04-07T13:59:07.154242Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re321f017b5hbce3thct, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T13:59:07.154309Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re321f017b5hbce3thct, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:07.154339Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re321f017b5hbce3thct, EndCleanup, isFinal: 0 2025-04-07T13:59:07.154389Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ExecuteState, TraceId: 01jr88re321f017b5hbce3thct, Sent query response back to proxy, proxyRequestId: 32, proxyId: [7:7490570433779378492:2060] 2025-04-07T13:59:07.154964Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, TxId: 2025-04-07T13:59:07.155027Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, TxId: 2025-04-07T13:59:07.155109Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7490570459549182973:2310], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2025-04-07T13:59:07.155204Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:59:07.155229Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:07.155250Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:59:07.155268Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:59:07.155315Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Y2QyMzExNzMtMTMzZjVlMi01ZDgzMzY2OS1kN2YxNmQyMw==, ActorId: [7:7490570485318987496:2493], ActorState: unknown state, Session actor destroyed |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 20070, MsgBus: 11736 2025-04-07T13:57:19.155805Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570023531678260:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:19.156128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001055/r3tmp/tmpkVabcn/pdisk_1.dat 2025-04-07T13:57:19.618106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:19.618219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:19.629187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:19.629487Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20070, node 1 2025-04-07T13:57:19.803583Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:19.803609Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:19.803616Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:19.803716Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11736 TClient is connected to server localhost:11736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:20.588455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:20.636208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:20.899436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:57:21.229755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:57:21.371574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:24.156933Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570023531678260:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:24.157020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:25.758449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570049301483606:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:25.758601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:26.135091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:26.241378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:26.358390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:26.409259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:26.467458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:26.534316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:26.624913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570053596451430:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:26.625045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:26.625726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570053596451435:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:26.629677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:26.658484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570053596451439:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:28.173099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:28.219008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:28.241521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:28.265654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:28.344014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:28.523887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:57:28.576771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:57:28.668869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:57:28.746809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:57:28.793452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:57:28.836268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:57:28.907869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:57:28.986590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:57:30.064636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:57:30.148815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:57:30.256100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:57:30.336145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:57:30.382244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:57:30.442399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T13:57:30.482855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part ... access permissions } 2025-04-07T13:58:58.089500Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:58:58.111464Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570447949528165:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:59:00.714261Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:59:00.768509Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:59:00.847846Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T13:59:00.926165Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T13:59:00.988875Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T13:59:01.228860Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T13:59:01.288744Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T13:59:01.374348Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-07T13:59:01.438126Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-07T13:59:01.515712Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-07T13:59:01.570115Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-07T13:59:01.636316Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-07T13:59:01.712403Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-07T13:59:02.856124Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2025-04-07T13:59:02.947441Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-07T13:59:03.046843Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T13:59:03.125207Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-07T13:59:03.169079Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-07T13:59:03.234955Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-07T13:59:03.292255Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-07T13:59:03.371022Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-07T13:59:03.429977Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-07T13:59:03.509161Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-07T13:59:03.629995Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-07T13:59:03.707872Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-07T13:59:03.773894Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-07T13:59:03.852411Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2025-04-07T13:59:03.924556Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-07T13:59:04.045654Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-04-07T13:59:04.155562Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-04-07T13:59:04.234973Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2025-04-07T13:59:04.356633Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-04-07T13:59:04.458577Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2025-04-07T13:59:04.567508Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-04-07T13:59:04.644115Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2025-04-07T13:59:04.702111Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2025-04-07T13:59:04.840511Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2025-04-07T13:59:05.120824Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2025-04-07T13:59:05.208701Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2025-04-07T13:59:05.303390Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-04-07T13:59:05.374184Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-07T13:59:05.451658Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-07T13:59:05.519514Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-04-07T13:59:05.576763Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:59:05.576802Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:05.627639Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2025-04-07T13:59:05.722613Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2025-04-07T13:59:05.807851Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2025-04-07T13:59:05.993019Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-04-07T13:59:06.115887Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-04-07T13:59:06.173020Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-04-07T13:59:06.227645Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 >> KqpQueryService::TableSink_OlapInsert |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 3544, MsgBus: 23038 2025-04-07T13:57:12.327707Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569993364116825:2233];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:12.327738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00106b/r3tmp/tmplWFkPG/pdisk_1.dat 2025-04-07T13:57:12.940442Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:12.947743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:12.947858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:12.949841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3544, node 1 2025-04-07T13:57:13.235463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:13.235483Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:13.235489Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:13.235562Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23038 TClient is connected to server localhost:23038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:14.085169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:14.125858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:14.335588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:14.569893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:14.707840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:17.105447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570010543987566:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:17.115885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:17.173879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:17.274267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:17.331874Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569993364116825:2233];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:17.332053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:17.362488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:17.392889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:17.481017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:17.553801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:17.631220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570014838955364:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:17.631282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:17.631454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570014838955369:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:17.640436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:17.654763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570014838955371:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:19.116436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.146871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.196093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.273963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.314137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.485341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.554250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.611271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.660176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.698973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.783394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.814108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.843438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.519093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:57:20.583226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.677305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.717139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.793672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.852491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.905108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part p ... =RestoreV1Chunks_V2;id=15; 2025-04-07T13:58:57.553904Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:58:57.554003Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:58:57.554028Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:58:57.554433Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:58:57.554469Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:58:57.554548Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:58:57.554574Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:58:57.554706Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:58:57.554741Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:58:57.554823Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:58:57.554864Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:58:57.554924Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:58:57.554950Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:58:57.554986Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:58:57.555020Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:58:57.555169Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:58:57.555208Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:58:57.555277Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:58:57.555308Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:58:57.555344Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:58:57.555381Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:58:57.555490Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:58:57.555521Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:58:57.555592Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:58:57.555625Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:58:57.555637Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:58:57.555666Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:58:57.555719Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:58:57.555742Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:58:57.555812Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:58:57.555840Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:58:57.555938Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:58:57.555962Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:58:57.571088Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:58:57.571144Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:58:57.571256Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:58:57.571288Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:58:57.571415Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:58:57.571439Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:58:57.571507Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:58:57.571534Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:58:57.571590Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:58:57.571612Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:58:57.571644Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:58:57.571671Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:58:57.571922Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:58:57.571977Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:58:57.572159Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:58:57.572190Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:58:57.572321Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:58:57.572347Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:58:57.572508Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:58:57.572534Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:58:57.572628Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:58:57.572652Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit [GOOD] >> TSettingsValidation::ValidateSettingsFailOnStart [GOOD] >> KqpQueryServiceScripts::ExecuteScriptPg >> TGRpcCmsTest::DescribeOptionsTest >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:36.170245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:36.170334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:36.170384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:36.170424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:36.170468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:36.170500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:36.170558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:36.170848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:36.262748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:36.262815Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:36.277638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:36.277816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:36.278006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:36.284334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:36.284516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:36.285008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:36.285203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:36.286743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:36.287779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:36.287837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:36.287944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:36.287974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:36.287998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:36.288134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.294565Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:36.398715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:36.398967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.399188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:36.399391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:36.399457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.402056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:36.402172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:36.402329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.402380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:36.402429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:36.402460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:36.404204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.404264Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:36.404305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:36.405872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.405918Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.405950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:36.405997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:36.409197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:36.411132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:36.411337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:36.412198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:36.412342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:36.412398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:36.412618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:36.412689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:36.412843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:36.412923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:36.414905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:36.414947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:36.415118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:36.415170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:36.415399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:36.415443Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:36.415523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:36.415554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:36.415598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:36.415639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:36.415687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:36.415716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:36.415770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:36.415804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:36.415829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:36.417667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:36.417768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:36.417823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:36.417879Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:36.417942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:36.418061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... eshard: 72057594046678944 2025-04-07T13:59:11.127109Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:59:11.127205Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:11.127284Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:59:11.127404Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:59:11.137599Z node 27 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [27:121:2147] sender: [27:235:2058] recipient: [27:15:2062] 2025-04-07T13:59:11.151694Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:59:11.152035Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:11.152412Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:59:11.152744Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:59:11.152842Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:11.156256Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:11.156435Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:59:11.156750Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:11.156866Z node 27 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:59:11.156939Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:59:11.157010Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:59:11.159518Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:11.159601Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:59:11.159670Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:59:11.161917Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:11.161986Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:11.162077Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:11.162174Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:59:11.162440Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:59:11.164502Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:59:11.164846Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:59:11.166140Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:11.166358Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 115964119147 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:59:11.166456Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:11.166849Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:59:11.166963Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:11.167324Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:11.167461Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:59:11.171715Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:11.171823Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:11.172119Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:11.172198Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [27:202:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:59:11.172782Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:11.172873Z node 27 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:59:11.173109Z node 27 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:59:11.173177Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:11.173286Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:59:11.173376Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:11.173447Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:59:11.173508Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:59:11.173627Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:59:11.173702Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:59:11.173777Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:59:11.174629Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:11.174818Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:11.174904Z node 27 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:59:11.174988Z node 27 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:59:11.175075Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:11.175233Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T13:59:11.179338Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T13:59:11.180137Z node 27 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T13:59:11.181650Z node 27 :TX_PROXY DEBUG: actor# [27:265:2257] Bootstrap 2025-04-07T13:59:11.214815Z node 27 :TX_PROXY DEBUG: actor# [27:265:2257] Become StateWork (SchemeCache [27:270:2262]) 2025-04-07T13:59:11.218349Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:59:11.219015Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:59:11.219230Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2025-04-07T13:59:11.223468Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2025-04-07T13:59:11.225928Z node 27 :TX_PROXY DEBUG: actor# [27:265:2257] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:59:11.241658Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:59:11.242021Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-07T13:59:11.242980Z node 27 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] >> IncrementalRestoreScan::ChangeSenderEmpty >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test >> YdbSdkSessionsPool::StressTestSync10 |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |88.8%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> YdbSdkSessionsPool::StressTestAsync1 >> YdbSdkSessionsPool::RunSmallPlan >> KqpCost::IndexLookup+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TSettingsValidation::ValidateSettingsFailOnStart [GOOD] Test command err: 2025-04-07T13:56:22.855111Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569776172448020:2240];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:22.855541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:56:23.111146Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001697/r3tmp/tmpjchPOu/pdisk_1.dat 2025-04-07T13:56:23.392133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:23.392236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:23.393643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:56:23.422246Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16962, node 1 2025-04-07T13:56:23.621340Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001697/r3tmp/yandexAfd1LN.tmp 2025-04-07T13:56:23.621370Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001697/r3tmp/yandexAfd1LN.tmp 2025-04-07T13:56:23.627434Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001697/r3tmp/yandexAfd1LN.tmp 2025-04-07T13:56:23.627577Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:56:23.685430Z INFO: TTestServer started on Port 30523 GrpcPort 16962 TClient is connected to server localhost:30523 PQClient connected to localhost:16962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:24.055442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:24.066686Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:24.083475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:56:26.209765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569793352317773:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:26.209927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:26.210203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569793352317809:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:26.214899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:56:26.250852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569793352317811:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:56:26.464987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:26.474657Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569793352317883:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:56:26.476731Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTE3ZGNmOWQtNjkwMTIxYWMtY2FmMzUzMTgtN2ZkOTg4NQ==, ActorId: [1:7490569793352317770:2307], ActorState: ExecuteState, TraceId: 01jr88kgyfcrjeez96xybe01bg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:56:26.479004Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:56:26.530969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:26.659242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490569793352318158:2620] 2025-04-07T13:56:27.852700Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569776172448020:2240];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:27.853386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:56:33.065783Z :Restarts INFO: TTopicSdkTestSetup started 2025-04-07T13:56:33.081773Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:56:33.131640Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:56:33.132827Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-07T13:56:33.133083Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:56:33.133274Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-07T13:56:33.133296Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:56:33.133313Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-07T13:56:33.133340Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:56:33.133367Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:56:33.133406Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2025-04-07T13:56:33.133425Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-07T13:56:33.146517Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:33.146591Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569823417089553:2806], now have 1 active actors on pipe 2025-04-07T13:56:33.146624Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T13:56:33.147225Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7490569823417089554:2807] connected; active server actors: 1 2025-04-07T13:56:33.147509Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-07T13:56:33.148232Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-07T13:56:33.148365Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-07T13:56:33.148594Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-07T13:56:33.148630Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-07T13:56:33.194969Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7490569780467415520:2192] txId 281474976710673 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 Tab ... son# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:11.206777Z node 11 :PERSQUEUE INFO: [PQ: 72075186224037892] Config applied version 0 actor [11:7490570446877493729:2138] txId 281474976710672 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-07T13:59:11.206867Z node 11 :PERSQUEUE DEBUG: [test-topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:59:11.207389Z node 11 :PERSQUEUE DEBUG: [test-topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:59:11.207760Z node 11 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [11:7490570502712069756:2433] 2025-04-07T13:59:11.208873Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 11, Generation 1 2025-04-07T13:59:11.208903Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 11, Generation 2 2025-04-07T13:59:11.208939Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:59:11.208982Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [11:7490570502712069752:2429], now have 1 active actors on pipe 2025-04-07T13:59:11.209300Z node 11 :PERSQUEUE DEBUG: [test-topic:0:Initializer] Initializing completed. 2025-04-07T13:59:11.209368Z node 11 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'test-topic' partition 0 generation 1 [11:7490570502712069756:2433] 2025-04-07T13:59:11.209431Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateInit] SYNC INIT topic test-topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:59:11.210162Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:59:11.210448Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:59:11.215341Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-07T13:59:11.216090Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-07T13:59:11.225382Z :DEBUG: [/Root] MessageGroupId [src] SessionId [] Write session: try to update token 2025-04-07T13:59:11.225859Z :INFO: [/Root] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-04-07T13:59:11.225917Z :INFO: [/Root] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:27454 2025-04-07T13:59:11.297862Z :DEBUG: [/Root] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-07T13:59:11.304868Z node 11 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-07T13:59:11.304946Z node 11 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-07T13:59:11.307999Z node 11 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-07T13:59:11.308129Z node 11 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:58986 2025-04-07T13:59:11.308150Z node 11 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:58986 proto=v1 topic=test-topic durationSec=0 2025-04-07T13:59:11.308163Z node 11 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T13:59:11.311689Z node 11 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-07T13:59:11.311957Z node 11 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-07T13:59:11.311978Z node 11 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T13:59:11.311995Z node 11 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-07T13:59:11.312050Z node 11 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [11:7490570502712069783:2437] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-07T13:59:11.312080Z node 11 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T13:59:11.313246Z node 11 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 11, Generation: 1 2025-04-07T13:59:11.313296Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:59:11.313346Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [11:7490570502712069786:2437], now have 1 active actors on pipe 2025-04-07T13:59:11.313574Z node 11 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-07T13:59:11.313618Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-07T13:59:11.313724Z node 11 :PERSQUEUE INFO: new Cookie src|aef678c9-c1efed3a-bb6b5112-3923e6a4_0 generated for partition 0 topic 'test-topic' owner src 2025-04-07T13:59:11.313853Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T13:59:11.313925Z node 11 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:59:11.315501Z :INFO: [/Root] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744034351315 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:59:11.315627Z :INFO: [/Root] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|aef678c9-c1efed3a-bb6b5112-3923e6a4_0" topic: "test-topic" 2025-04-07T13:59:11.315845Z :INFO: [/Root] MessageGroupId [src] SessionId [src|aef678c9-c1efed3a-bb6b5112-3923e6a4_0] Write session: close. Timeout = 0 ms 2025-04-07T13:59:11.315894Z :INFO: [/Root] MessageGroupId [src] SessionId [src|aef678c9-c1efed3a-bb6b5112-3923e6a4_0] Write session will now close 2025-04-07T13:59:11.315952Z :DEBUG: [/Root] MessageGroupId [src] SessionId [src|aef678c9-c1efed3a-bb6b5112-3923e6a4_0] Write session: aborting 2025-04-07T13:59:11.314282Z node 11 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-07T13:59:11.314311Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-07T13:59:11.314400Z node 11 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:59:11.314518Z node 11 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|aef678c9-c1efed3a-bb6b5112-3923e6a4_0 2025-04-07T13:59:11.316444Z :INFO: [/Root] MessageGroupId [src] SessionId [src|aef678c9-c1efed3a-bb6b5112-3923e6a4_0] Write session: gracefully shut down, all writes complete 2025-04-07T13:59:11.316498Z :DEBUG: [/Root] MessageGroupId [src] SessionId [src|aef678c9-c1efed3a-bb6b5112-3923e6a4_0] Write session: destroy 2025-04-07T13:59:11.317856Z node 11 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|aef678c9-c1efed3a-bb6b5112-3923e6a4_0 grpc read done: success: 0 data: 2025-04-07T13:59:11.317905Z node 11 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|aef678c9-c1efed3a-bb6b5112-3923e6a4_0 grpc read failed 2025-04-07T13:59:11.318000Z node 11 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|aef678c9-c1efed3a-bb6b5112-3923e6a4_0 2025-04-07T13:59:11.318038Z node 11 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|aef678c9-c1efed3a-bb6b5112-3923e6a4_0 is DEAD 2025-04-07T13:59:11.318710Z node 11 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:11.319359Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:11.319447Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [11:7490570502712069786:2437] destroyed 2025-04-07T13:59:11.319562Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:59:11.333985Z :ValidateSettingsFailOnStart INFO: Topic created 2025-04-07T13:59:11.345523Z :NOTICE: [/Root] [/Root] [775e52e1-20a1ceea-d5d35963-7212115d] Aborting read session. Description: SessionClosed { Status: BAD_REQUEST Issues: "
: Error: Invalid read session settings
: Error: Too small max memory usage. Valid values start from 1 megabyte. " } 2025-04-07T13:59:11.345670Z :NOTICE: [/Root] [/Root] [775e52e1-20a1ceea-d5d35963-7212115d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:59:11.345795Z :INFO: [/Root] [/Root] [775e52e1-20a1ceea-d5d35963-7212115d] Closing read session. Close timeout: 0.000000s 2025-04-07T13:59:11.345908Z :NOTICE: [/Root] [/Root] [775e52e1-20a1ceea-d5d35963-7212115d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> KqpJoinOrder::TPCDS16-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] Test command err: 2025-04-07T13:56:27.858997Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569796336742413:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:27.859049Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001655/r3tmp/tmpyuCRRo/pdisk_1.dat 2025-04-07T13:56:28.073916Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:56:28.247583Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:28.262973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:28.263057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:28.265728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24290, node 1 2025-04-07T13:56:28.332269Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001655/r3tmp/yandex8lrd3o.tmp 2025-04-07T13:56:28.332295Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001655/r3tmp/yandex8lrd3o.tmp 2025-04-07T13:56:28.332518Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001655/r3tmp/yandex8lrd3o.tmp 2025-04-07T13:56:28.332613Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:56:28.382811Z INFO: TTestServer started on Port 2936 GrpcPort 24290 TClient is connected to server localhost:2936 PQClient connected to localhost:24290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:28.725642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:28.759427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:28.917373Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:30.800463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569809221644868:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:30.800572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569809221644895:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:30.800640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:30.818163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:56:30.841352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569809221644897:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:56:31.065746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:31.075886Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569813516612267:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:56:31.077675Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mzg1Mzg4MTgtYzU5ODM3YWYtMTBiMTRiMjQtZmNlNjk2ZDA=, ActorId: [1:7490569809221644863:2306], ActorState: ExecuteState, TraceId: 01jr88kndv802mcry9sy5z3ywj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:56:31.080244Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:56:31.100898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:31.169532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490569813516612535:2614] 2025-04-07T13:56:32.859419Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569796336742413:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:32.859517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:56:37.392892Z :Basic INFO: TTopicSdkTestSetup started 2025-04-07T13:56:37.405160Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:56:37.420450Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7490569839286416632:2796] connected; active server actors: 1 2025-04-07T13:56:37.421016Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-07T13:56:37.421464Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-07T13:56:37.421577Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-07T13:56:37.422029Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-07T13:56:37.431696Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:56:37.431910Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-07T13:56:37.432359Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:56:37.432520Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-07T13:56:37.432546Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:56:37.432562Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-07T13:56:37.432587Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:56:37.432609Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:56:37.432697Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2025-04-07T13:56:37.432718Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-07T13:56:37.433102Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:37.433125Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-07T13:56:37.433143Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569839286416649:2436], now have 1 active actors on pipe 2025-04-07T13:56:37.464512Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:37.464549Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569839286416631:2795], now have 1 active actors on pipe 2025-04-07T13:56:37.464575Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T13:56:37.474984Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7490569800631709901:2186] txId 281474976710673 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 Topic ... t64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T13:59:12.382344Z node 11 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-07T13:59:12.382374Z node 11 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [11:7490570506590779886:2460] (SourceId=test-message_group_id, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=0 2025-04-07T13:59:12.382404Z node 11 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 3 sessionId: partition: 0 expectedGeneration: 1 2025-04-07T13:59:12.383318Z node 11 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 11, Generation: 1 2025-04-07T13:59:12.383358Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:59:12.383404Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [11:7490570506590779889:2460], now have 1 active actors on pipe 2025-04-07T13:59:12.383595Z node 11 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-07T13:59:12.383634Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-07T13:59:12.383721Z node 11 :PERSQUEUE INFO: new Cookie test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0 generated for partition 0 topic 'test-topic' owner test-message_group_id 2025-04-07T13:59:12.383830Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T13:59:12.383892Z node 11 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:59:12.388761Z node 11 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-07T13:59:12.388822Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-07T13:59:12.388955Z node 11 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:59:12.389081Z node 11 :PQ_WRITE_PROXY INFO: session inited cookie: 3 partition: 0 MaxSeqNo: 0 sessionId: test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0 2025-04-07T13:59:12.393208Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|41c5a47f-a5f1bf66-7fe9ea78-cfe8d072_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2025-04-07T13:59:12.393391Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|41c5a47f-a5f1bf66-7fe9ea78-cfe8d072_0] PartitionId [0] Generation [1] Write session established. Init response: session_id: "test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0" 2025-04-07T13:59:12.393450Z :TRACE: [/Root] TRACE_EVENT InitResponse partition_id=0 session_id=test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0 2025-04-07T13:59:12.393524Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0] PartitionId [0] Generation [1] Write session: set DirectWriteToPartitionId 0 2025-04-07T13:59:12.393893Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0] PartitionId [0] Generation [1] Write 1 messages with Id from 1 to 1 2025-04-07T13:59:12.393993Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0] PartitionId [0] Generation [1] Write session: close. Timeout 18446744073709.551615s 2025-04-07T13:59:12.397302Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0] PartitionId [0] Generation [1] Write session: try to update token 2025-04-07T13:59:12.397356Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 1 2025-04-07T13:59:12.401763Z node 11 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-07T13:59:12.402107Z node 11 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-07T13:59:12.405037Z node 11 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-07T13:59:12.405104Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-07T13:59:12.405287Z node 11 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-07T13:59:12.405389Z node 11 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T13:59:12.405749Z node 11 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-07T13:59:12.405774Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-07T13:59:12.405836Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: test-topic partition: 0 SourceId: '\0test-message_group_id' SeqNo: 1 partNo : 0 messageNo: 1 size 98 offset: -1 2025-04-07T13:59:12.406067Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 part blob processing sourceId '\0test-message_group_id' seqNo 1 partNo 0 2025-04-07T13:59:12.425069Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 part blob complete sourceId '\0test-message_group_id' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 187 count 1 nextOffset 1 batches 1 2025-04-07T13:59:12.426025Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'test-topic' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 175 WTime 1744034352425 2025-04-07T13:59:12.426310Z node 11 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:59:12.430039Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 120 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:59:12.430121Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:59:12.432332Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2025-04-07T13:59:12.432580Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { nanos: 4000000 } min_queue_wait_time { nanos: 20000000 } max_queue_wait_time { nanos: 20000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-07T13:59:12.432629Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0] PartitionId [0] Generation [1] OnAck: seqNo=1, txId=? 2025-04-07T13:59:12.432690Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0] PartitionId [0] Generation [1] Write session: acknoledged message 1 2025-04-07T13:59:12.430649Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-07T13:59:12.430949Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:59:12.430996Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-07T13:59:12.431110Z node 11 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-07T13:59:12.431269Z node 11 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T13:59:12.431727Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'test-topic' partition 0 user test-consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-07T13:59:12.431768Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T13:59:12.431817Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-07T13:59:12.431843Z node 11 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:59:12.431929Z node 11 :PERSQUEUE DEBUG: Topic 'test-topic' partition 0 user test-consumer readTimeStamp done, result 1744034352404 queuesize 0 startOffset 0 2025-04-07T13:59:12.496774Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0] PartitionId [0] Generation [1] Write session will now close 2025-04-07T13:59:12.496858Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-07T13:59:12.497463Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T13:59:12.497787Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-07T13:59:12.502242Z node 11 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0 grpc read done: success: 0 data: 2025-04-07T13:59:12.502307Z node 11 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0 grpc read failed 2025-04-07T13:59:12.502362Z node 11 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0 grpc closed 2025-04-07T13:59:12.502409Z node 11 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|dbb5e2d5-32719e2e-65787fba-a0c80446_0 is DEAD 2025-04-07T13:59:12.503462Z node 11 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:12.505312Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:12.505411Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [11:7490570506590779889:2460] destroyed 2025-04-07T13:59:12.505653Z node 11 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> YdbSdkSessionsPool::PeriodicTask10 >> YdbSdkSessionsPool::WaitQueue1 >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin-ColumnStore |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |88.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap >> YdbSdkSessionsPool::StressTestAsync10 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test [GOOD] >> TPartitionGraphTest::BuildGraph [GOOD] >> TPartitionTests::Batching >> YdbSdkSessionsPool::WaitQueue10 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit [GOOD] Test command err: 2025-04-07T13:56:23.291371Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569781844687179:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:23.291395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016cc/r3tmp/tmpcAaTnF/pdisk_1.dat 2025-04-07T13:56:23.638014Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:56:23.905107Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:23.910354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:23.910411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:23.932085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5973, node 1 2025-04-07T13:56:24.121388Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0016cc/r3tmp/yandexEwN6lk.tmp 2025-04-07T13:56:24.121418Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0016cc/r3tmp/yandexEwN6lk.tmp 2025-04-07T13:56:24.121572Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0016cc/r3tmp/yandexEwN6lk.tmp 2025-04-07T13:56:24.121682Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:56:24.261509Z INFO: TTestServer started on Port 28433 GrpcPort 5973 TClient is connected to server localhost:28433 PQClient connected to localhost:5973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:24.738591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:24.769532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:56:26.810763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569794729589855:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:26.810850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:26.811505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569794729589874:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:26.815196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:56:26.834059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569794729589876:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:56:27.034333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:27.044507Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569799024557245:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:56:27.047690Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2M5ZjQ3ZC05ZTRiMWQwZC0zMmFjZTRkMy1hMmEzMGQzZg==, ActorId: [1:7490569794729589844:2308], ActorState: ExecuteState, TraceId: 01jr88khhf87nhcvdh1msqrcxj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:56:27.049481Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:56:27.076058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:27.200321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490569799024557530:2621] 2025-04-07T13:56:28.291803Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569781844687179:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:28.291860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:56:33.471178Z :WriteToTopic_Demo_2 INFO: TTopicSdkTestSetup started 2025-04-07T13:56:33.491163Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:56:33.505945Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7490569824794361624:2797] connected; active server actors: 1 2025-04-07T13:56:33.506509Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-07T13:56:33.507012Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-07T13:56:33.507198Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-07T13:56:33.507657Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-07T13:56:33.511255Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:56:33.512384Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-07T13:56:33.512688Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:56:33.512886Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-07T13:56:33.512908Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:56:33.512923Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-07T13:56:33.512950Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:56:33.512974Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:56:33.513024Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2025-04-07T13:56:33.513045Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-07T13:56:33.518580Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-07T13:56:33.518654Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:33.518725Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569824794361640:2438], now have 1 active actors on pipe 2025-04-07T13:56:33.551546Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:33.551582Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569824794361623:2796], now have 1 active actors on pipe 2025-04-07T13:56:33.567197Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7490569781844687599 RawX2: 4294969488 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "test-consumer" ReadFromTimestampsMs: ... 82c484] [] In ContinueReadingDataImpl, ReadSizeBudget = 15001450, ReadSizeServerDelta = 37427350 2025-04-07T13:59:10.429693Z :DEBUG: [/Root] [/Root] [852bf349-69328f00-14a488c2-f782c484] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-07T13:59:10.429764Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T13:59:10.429796Z :DEBUG: [/Root] [/Root] [852bf349-69328f00-14a488c2-f782c484] [] Returning serverBytesSize = 0 to budget 2025-04-07T13:59:10.429829Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (2-7) 2025-04-07T13:59:10.429850Z :DEBUG: [/Root] [/Root] [852bf349-69328f00-14a488c2-f782c484] [] Returning serverBytesSize = 0 to budget 2025-04-07T13:59:10.429900Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (8-11) 2025-04-07T13:59:10.429920Z :DEBUG: [/Root] [/Root] [852bf349-69328f00-14a488c2-f782c484] [] Returning serverBytesSize = 0 to budget 2025-04-07T13:59:10.430262Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-07T13:59:10.430317Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-07T13:59:10.430348Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-04-07T13:59:10.430374Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 0} (3-3) 2025-04-07T13:59:10.430408Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 1} (4-4) 2025-04-07T13:59:10.430435Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 2} (5-5) 2025-04-07T13:59:10.430463Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 3} (6-6) 2025-04-07T13:59:10.430497Z :DEBUG: [/Root] Take Data. Partition 0. Read: {4, 0} (7-7) 2025-04-07T13:59:10.430533Z :DEBUG: [/Root] Take Data. Partition 0. Read: {5, 0} (8-8) 2025-04-07T13:59:10.430561Z :DEBUG: [/Root] Take Data. Partition 0. Read: {5, 1} (9-9) 2025-04-07T13:59:10.430588Z :DEBUG: [/Root] Take Data. Partition 0. Read: {5, 2} (10-10) 2025-04-07T13:59:10.430617Z :DEBUG: [/Root] Take Data. Partition 0. Read: {5, 3} (11-11) 2025-04-07T13:59:10.430679Z :DEBUG: [/Root] [/Root] [852bf349-69328f00-14a488c2-f782c484] [] The application data is transferred to the client. Number of messages 12, size 15000000 bytes 2025-04-07T13:59:10.430735Z :DEBUG: [/Root] [/Root] [852bf349-69328f00-14a488c2-f782c484] [] Returning serverBytesSize = 0 to budget 0 12 2025-04-07T13:59:10.430921Z :DEBUG: [/Root] [/Root] [852bf349-69328f00-14a488c2-f782c484] [] Commit offsets [0, 12). Partition stream id: 1 2025-04-07T13:59:10.432834Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 grpc read done: success# 1, data# { read_request { bytes_size: 15001450 } } 2025-04-07T13:59:10.433019Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 got read request: guid# 760e7db9-bbc8ac43-8a699327-f7e42b7 2025-04-07T13:59:10.437590Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { end: 12 } } } } 2025-04-07T13:59:10.437801Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 12 prev 0 end 12 by cookie 2 2025-04-07T13:59:10.440874Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2025-04-07T13:59:10.440927Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2025-04-07T13:59:10.441071Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 12 (startOffset 0) session test-consumer_10_1_12436726460890855681_v1 2025-04-07T13:59:10.441246Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:59:10.444374Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer readTimeStamp for offset 12 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:59:10.444453Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:59:10.444518Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 2 2025-04-07T13:59:10.444642Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2025-04-07T13:59:10.444706Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 12 endOffset 12 with cookie 2 2025-04-07T13:59:10.444758Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 12 2025-04-07T13:59:10.445784Z :DEBUG: [/Root] [/Root] [852bf349-69328f00-14a488c2-f782c484] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 12 } } 2025-04-07T13:59:11.088878Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:11:12 2025-04-07T13:59:11.088951Z :INFO: [/Root] [/Root] [852bf349-69328f00-14a488c2-f782c484] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1004 BytesRead: 15000000 MessagesRead: 12 BytesReadCompressed: 15000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:59:11.104039Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 checking auth because of timeout 2025-04-07T13:59:11.104108Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 auth for : test-consumer 2025-04-07T13:59:11.104957Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 Handle describe topics response 2025-04-07T13:59:11.105061Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 auth is DEAD 2025-04-07T13:59:11.105142Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 auth ok: topics# 1, initDone# 1 2025-04-07T13:59:11.673236Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:59:12.088853Z :INFO: [/Root] [/Root] [852bf349-69328f00-14a488c2-f782c484] Closing read session. Close timeout: 0.000000s 2025-04-07T13:59:12.088918Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:11:12 2025-04-07T13:59:12.088954Z :INFO: [/Root] [/Root] [852bf349-69328f00-14a488c2-f782c484] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2004 BytesRead: 15000000 MessagesRead: 12 BytesReadCompressed: 15000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:59:12.089038Z :NOTICE: [/Root] [/Root] [852bf349-69328f00-14a488c2-f782c484] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T13:59:12.089076Z :DEBUG: [/Root] [/Root] [852bf349-69328f00-14a488c2-f782c484] [] Abort session to cluster 2025-04-07T13:59:12.089566Z :NOTICE: [/Root] [/Root] [852bf349-69328f00-14a488c2-f782c484] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:59:12.097455Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e18d5635-d6239c2c-de7e9e9-448d83ae_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2025-04-07T13:59:12.104737Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e18d5635-d6239c2c-de7e9e9-448d83ae_0] PartitionId [0] Generation [2] Write session will now close 2025-04-07T13:59:12.104836Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|e18d5635-d6239c2c-de7e9e9-448d83ae_0] PartitionId [0] Generation [2] Write session: aborting 2025-04-07T13:59:12.105049Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e18d5635-d6239c2c-de7e9e9-448d83ae_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2025-04-07T13:59:12.105094Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|e18d5635-d6239c2c-de7e9e9-448d83ae_0] PartitionId [0] Generation [2] Write session: destroy 2025-04-07T13:59:12.116407Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 grpc read done: success# 0, data# { } 2025-04-07T13:59:12.116437Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 grpc read failed 2025-04-07T13:59:12.116466Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 grpc closed 2025-04-07T13:59:12.116499Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_12436726460890855681_v1 is DEAD 2025-04-07T13:59:12.124000Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [10:7490570498604425289:2519] disconnected; active server actors: 1 2025-04-07T13:59:12.124037Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [10:7490570498604425289:2519] client test-consumer disconnected session test-consumer_10_1_12436726460890855681_v1 2025-04-07T13:59:12.124142Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:12.124166Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_10_1_12436726460890855681_v1 2025-04-07T13:59:12.124196Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7490570498604425292:2522] destroyed 2025-04-07T13:59:12.124240Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_1_12436726460890855681_v1 2025-04-07T13:59:12.137200Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message_group_id|e18d5635-d6239c2c-de7e9e9-448d83ae_0 grpc read done: success: 0 data: 2025-04-07T13:59:12.137246Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|e18d5635-d6239c2c-de7e9e9-448d83ae_0 grpc read failed 2025-04-07T13:59:12.142637Z node 10 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 5 sessionId: test-message_group_id|e18d5635-d6239c2c-de7e9e9-448d83ae_0 2025-04-07T13:59:12.142672Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|e18d5635-d6239c2c-de7e9e9-448d83ae_0 is DEAD 2025-04-07T13:59:12.143058Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:12.144907Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:12.144994Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7490570490014490627:2500] destroyed 2025-04-07T13:59:12.145061Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TPartitionTests::Batching [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] Test command err: 2025-04-07T13:58:00.114539Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570197691325892:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:00.122030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e8c/r3tmp/tmpAiLYB6/pdisk_1.dat 2025-04-07T13:58:00.491061Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:00.579877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:00.580025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 63828, node 1 2025-04-07T13:58:00.582015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:58:00.772939Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:58:00.772963Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:58:00.772969Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:58:00.773057Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15890 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:58:01.148200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:01.161124Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:58:01.242995Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2025-04-07T13:58:03.175690Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:58:03.176179Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-07T13:58:03.176203Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:58:03.176224Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:58:03.194540Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZWVkZjE3ZTctMzZkNmVjMi01YzBjNjI3My05NTY5OWZiYg==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZWVkZjE3ZTctMzZkNmVjMi01YzBjNjI3My05NTY5OWZiYg== 2025-04-07T13:58:03.195046Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570210576228392:2302], Start check tables existence, number paths: 2 2025-04-07T13:58:03.195158Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZWVkZjE3ZTctMzZkNmVjMi01YzBjNjI3My05NTY5OWZiYg==, ActorId: [1:7490570210576228393:2303], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:58:03.200541Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570210576228392:2302], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T13:58:03.200600Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570210576228392:2302], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T13:58:03.200645Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570210576228392:2302], Successfully finished 2025-04-07T13:58:03.200743Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T13:58:03.209392Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570210576228410:2296], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:58:03.220388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:58:03.224918Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570210576228410:2296], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-07T13:58:03.226940Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570210576228410:2296], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-07T13:58:03.261559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570210576228410:2296], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T13:58:03.328055Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570210576228410:2296], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:58:03.332984Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570210576228410:2296], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-07T13:58:03.341160Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YTdmNWU3ZS0yMWUzOWU3Mi1iN2FmMzg5NC1lYjg4YjQ1NQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YTdmNWU3ZS0yMWUzOWU3Mi1iN2FmMzg5NC1lYjg4YjQ1NQ== 2025-04-07T13:58:03.341489Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-07T13:58:03.341526Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-07T13:58:03.341589Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YTdmNWU3ZS0yMWUzOWU3Mi1iN2FmMzg5NC1lYjg4YjQ1NQ==, ActorId: [1:7490570210576228468:2304], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:58:03.341817Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YTdmNWU3ZS0yMWUzOWU3Mi1iN2FmMzg5NC1lYjg4YjQ1NQ==, ActorId: [1:7490570210576228468:2304], ActorState: ReadyState, TraceId: 01jr88pftd3eeaw18xx6kjqdzg, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7490570210576228467:2333] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-07T13:58:03.341854Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7490570210576228468:2304], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YTdmNWU3ZS0yMWUzOWU3Mi1iN2FmMzg5NC1lYjg4YjQ1NQ== 2025-04-07T13:58:03.341902Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570210576228470:2305], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-07T13:58:03.341985Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7490570210576228471:2306], Database: /Root, Start database fetching 2025-04-07T13:58:03.342614Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7490570210576228471:2306], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-07T13:58:03.342665Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-07T13:58:03.342708Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7490570210576228480:2307], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YTdmNWU3ZS0yMWUzOWU3Mi1iN2FmMzg5NC1lYjg4YjQ1NQ==, Start pool fetching 2025-04-07T13:58:03.342729Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570210576228481:2308], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-07T13:58:03.343181Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570210576228481:2308], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-07T13:58:03.343233Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570210576228470:2305], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-07T13:58:03.343277Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7490570210576228480:2307], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YTdmNWU3ZS0yMWUzOWU3Mi1iN2FmMzg5NC1lYjg4YjQ1NQ==, Pool info successfully resolved 2025-04-07T13:58:03.343307Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-07T13:58:03.343321Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-07T13:58:03.343493Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdmNWU3ZS0yMWUzOWU3Mi1iN2FmMzg5NC1lYjg4YjQ1NQ== 2025-04-07T13:58:03.343550Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7490570210576228484:2309], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-07T13:58:03.343616Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7490570210576228484:2309], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7490570210576228468:2304], session id: ydb://session/3?node_id=1&id=YTdmNWU3ZS0yMWUzOWU3Mi1iN2FmMzg5NC1lYjg4YjQ1NQ== 2025-04-07T13:58:03.343649Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7490570210576228484:2309], DatabaseId: /Root, PoolId: sample_pool_id, Reply continue success to [1:7490570210576228468:2304], session id: ydb://session/3?node_id=1&id=YTdmNWU3ZS0yMWUzOWU3Mi1iN2FmMzg5NC1lYjg4YjQ1NQ==, local in flight: 1 2025-04-07T13:58:03.343677Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YTdmNWU3ZS0yMWUzOWU3Mi1 ... ode_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ExecuteState, TraceId: 01jr88rn7w3hym0d37bbxsx0t0, Sending CompileQuery request 2025-04-07T13:59:14.463336Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc= 2025-04-07T13:59:14.463680Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:59:14.463821Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: ReadyState, TraceId: 01jr88rn8z00r0knnfptkme7af, received request, proxyRequestId: 90 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [5:7490570516842221552:3270] database: Root databaseId: /Root pool id: default 2025-04-07T13:59:14.463846Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: ReadyState, TraceId: 01jr88rn8z00r0knnfptkme7af, request placed into pool from cache: default 2025-04-07T13:59:14.463932Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: ExecuteState, TraceId: 01jr88rn8z00r0knnfptkme7af, Sending CompileQuery request 2025-04-07T13:59:14.572906Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: ExecuteState, TraceId: 01jr88rn8z00r0knnfptkme7af, ExecutePhyTx, tx: 0x000050C00017D058 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-07T13:59:14.572964Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: ExecuteState, TraceId: 01jr88rn8z00r0knnfptkme7af, Sending to Executer TraceId: 0 8 2025-04-07T13:59:14.573043Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: ExecuteState, TraceId: 01jr88rn8z00r0knnfptkme7af, Created new KQP executer: [5:7490570516842221561:2838] isRollback: 0 2025-04-07T13:59:14.574712Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: ExecuteState, TraceId: 01jr88rn8z00r0knnfptkme7af, Forwarded TEvStreamData to [5:7490570516842221552:3270] 2025-04-07T13:59:14.575310Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: ExecuteState, TraceId: 01jr88rn8z00r0knnfptkme7af, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-07T13:59:14.575442Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: ExecuteState, TraceId: 01jr88rn8z00r0knnfptkme7af, txInfo Status: Committed Kind: Pure TotalDuration: 2.63 ServerDuration: 2.563 QueriesCount: 2 2025-04-07T13:59:14.575506Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: ExecuteState, TraceId: 01jr88rn8z00r0knnfptkme7af, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T13:59:14.575690Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: ExecuteState, TraceId: 01jr88rn8z00r0knnfptkme7af, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:14.575719Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: ExecuteState, TraceId: 01jr88rn8z00r0knnfptkme7af, EndCleanup, isFinal: 1 2025-04-07T13:59:14.575765Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: ExecuteState, TraceId: 01jr88rn8z00r0knnfptkme7af, Sent query response back to proxy, proxyRequestId: 90, proxyId: [5:7490570422352938377:2060] 2025-04-07T13:59:14.575786Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: unknown state, TraceId: 01jr88rn8z00r0knnfptkme7af, Cleanup temp tables: 0 2025-04-07T13:59:14.576059Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NzM1NWE2ZDQtZTNlMTY4ZTMtMWYzMThmNTEtNDljZmNmYjc=, ActorId: [5:7490570516842221553:2838], ActorState: unknown state, TraceId: 01jr88rn8z00r0knnfptkme7af, Session actor destroyed 2025-04-07T13:59:14.587927Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=ZTgzOGM2ZjItYjk3Y2RhN2MtZjVkZGQ3ZWItMjc4NGI4ZDA=, ActorId: [5:7490570452417710065:2307], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:59:14.587976Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=ZTgzOGM2ZjItYjk3Y2RhN2MtZjVkZGQ3ZWItMjc4NGI4ZDA=, ActorId: [5:7490570452417710065:2307], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:14.588007Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZTgzOGM2ZjItYjk3Y2RhN2MtZjVkZGQ3ZWItMjc4NGI4ZDA=, ActorId: [5:7490570452417710065:2307], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:59:14.588034Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZTgzOGM2ZjItYjk3Y2RhN2MtZjVkZGQ3ZWItMjc4NGI4ZDA=, ActorId: [5:7490570452417710065:2307], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:59:14.588104Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZTgzOGM2ZjItYjk3Y2RhN2MtZjVkZGQ3ZWItMjc4NGI4ZDA=, ActorId: [5:7490570452417710065:2307], ActorState: unknown state, Session actor destroyed 2025-04-07T13:59:14.648399Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ExecuteState, TraceId: 01jr88rn7w3hym0d37bbxsx0t0, acquire mvcc snapshot 2025-04-07T13:59:14.658011Z node 5 :KQP_SESSION TRACE: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ExecuteState, TraceId: 01jr88rn7w3hym0d37bbxsx0t0, read snapshot result: UNAVAILABLE, step: 1744034354216, tx id: 18446744073709551615 2025-04-07T13:59:14.658076Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ExecuteState, TraceId: 01jr88rn7w3hym0d37bbxsx0t0, ExecutePhyTx, tx: 0x000050C0000B0F98 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-07T13:59:14.658109Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ExecuteState, TraceId: 01jr88rn7w3hym0d37bbxsx0t0, Sending to Executer TraceId: 0 8 2025-04-07T13:59:14.658187Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ExecuteState, TraceId: 01jr88rn7w3hym0d37bbxsx0t0, Created new KQP executer: [5:7490570516842221575:2833] isRollback: 0 2025-04-07T13:59:14.666578Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ExecuteState, TraceId: 01jr88rn7w3hym0d37bbxsx0t0, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-07T13:59:14.666719Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ExecuteState, TraceId: 01jr88rn7w3hym0d37bbxsx0t0, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 18.321 QueriesCount: 2 2025-04-07T13:59:14.666852Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ExecuteState, TraceId: 01jr88rn7w3hym0d37bbxsx0t0, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T13:59:14.667133Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ExecuteState, TraceId: 01jr88rn7w3hym0d37bbxsx0t0, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:14.667160Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ExecuteState, TraceId: 01jr88rn7w3hym0d37bbxsx0t0, EndCleanup, isFinal: 0 2025-04-07T13:59:14.667209Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ExecuteState, TraceId: 01jr88rn7w3hym0d37bbxsx0t0, Sent query response back to proxy, proxyRequestId: 89, proxyId: [5:7490570422352938377:2060] 2025-04-07T13:59:14.668407Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:59:14.668472Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ReadyState, Sending to Executer TraceId: 0 8 2025-04-07T13:59:14.668527Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ReadyState, Created new KQP executer: [5:7490570516842221585:2833] isRollback: 1 2025-04-07T13:59:14.668570Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:14.669081Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: CleanupState, EndCleanup, isFinal: 1 2025-04-07T13:59:14.669104Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:59:14.669186Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTM0MGIyZjMtYWY3NGNjMGUtYTU1YTcyOTktNDM2M2QyMzQ=, ActorId: [5:7490570516842221542:2833], ActorState: unknown state, Session actor destroyed >> TPartitionTests::AfterRestart_1 >> KqpErrors::ResolveTableError [GOOD] >> TPartitionTests::AfterRestart_1 [GOOD] >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] >> TPartitionTests::AfterRestart_2 >> KqpJoinOrder::TPCDS92+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS92-StreamLookupJoin+ColumnStore >> TGRpcCmsTest::DescribeOptionsTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [GOOD] Test command err: 2025-04-07T13:59:14.143483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:59:14.145277Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f98/r3tmp/tmpPeyBfU/pdisk_1.dat 2025-04-07T13:59:14.838782Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:15.157078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:59:15.255113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:15.255262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:15.268942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:15.269040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:15.287637Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:59:15.288173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:15.288595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:15.722060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:17.308571Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2025-04-07T13:59:17.308634Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2025-04-07T13:59:17.308721Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-07T13:59:17.308770Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-04-07T13:59:17.308821Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-07T13:59:17.311556Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2025-04-07T13:59:17.322363Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 300.000000s, cancelAfter: (empty maybe) 2025-04-07T13:59:17.322441Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2025-04-07T13:59:17.322499Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-07T13:59:17.322547Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-04-07T13:59:17.322603Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-07T13:59:17.323258Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2025-04-07T13:59:17.323742Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1481:2908] TxId: 0. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Bootstrap done, become ReadyState 2025-04-07T13:59:17.323811Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Executing physical tx, type: 2, stages: 1 2025-04-07T13:59:17.323881Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got request, become WaitResolveState 2025-04-07T13:59:17.323962Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-07T13:59:17.324194Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key sets: 1 2025-04-07T13:59:17.324425Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-07T13:59:17.324574Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2025-04-07T13:59:17.327883Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] will be executed on 1 shards. 2025-04-07T13:59:17.328024Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2025-04-07T13:59:17.328540Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-04-07T13:59:17.328607Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {0, 0} 2025-04-07T13:59:17.353434Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. datashard task: 1, proto: Id: 1 Executer { ActorId { RawX1: 1481 RawX2: 4294970204 } } Program { RuntimeVersion: 100000 Raw: "\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/" Settings { LevelDataPrediction: 1 InputDataPrediction: 1 OutputDataPrediction: 1 NodesCount: 52 } } Parameters { key: "%kqp%tx_result_binding_0_0" value { TransportVersion: 20000 Raw: "\010\000\000\000\000\006\002\002\004\004\006\006" Rows: 3 } } Outputs { Effects { } } Meta { [type.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta] { Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\001\000\000\000" KeyPoints: "\001\000\004\000\000\000\002\000\000\000" KeyPoints: "\001\000\004\000\000\000\003\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } } } UseLlvm: false RequestContext { key: "CurrentExecutionId" value: "" } RequestContext { key: "CustomerSuppliedId" value: "" } RequestContext { key: "Database" value: "" } RequestContext { key: "DatabaseId" value: "/Root" } RequestContext { key: "PoolId" value: "" } RequestContext { key: "SessionId" value: "ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=" } RequestContext { key: "TraceId" value: "01jr88rqrq8y7md47faa6kpzmk" } EnableSpilling: false 2025-04-07T13:59:17.353711Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [1], lockTxId: (empty maybe), locks: , immediate: 1 2025-04-07T13:59:17.353820Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ExecuteDatashardTransaction traceId.verbosity: 0 2025-04-07T13:59:17.353954Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-07T13:59:17.354021Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Updating channels after the creation of compute actors 2025-04-07T13:59:17.354077Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2025-04-07T13:59:17.354150Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-04-07T13:59:17.354212Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-07T13:59:17.404358Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-04-07T13:59:17.404562Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2025-04-07T13:59:17.404607Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Terminate, become ZombieState 2025-04-07T13:59:17.404657Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1481:2908] TxId: 281474976715658. Ctx: { TraceId: 01jr88rqrq8y7md47faa6kpzmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1YjM5NzEtNjYyMDNlNzctNjkyZDI3Y2ItY2E2MmIxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-07T13:59:17.441364Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:1496:2927], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[/Root/table-1]
: Error: LookupError, code: 2005 2025-04-07T13:59:17.442894Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmQ2MTY0MC1iYzQ3MTBlYi1mNDRhYjBmOC0yZWJkMTA4Yw==, ActorId: [1:1494:2925], ActorState: ExecuteState, TraceId: 01jr88rr50c5v4x9hncnnw9pg1, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: >> TPartitionTests::AfterRestart_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:58:47.195342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:47.195439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:47.195472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:47.195533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:47.195595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:47.195631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:47.195687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:47.196047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:47.260219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:47.260256Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:47.280870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:47.280996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:47.281171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:47.301388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:47.301568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:47.302171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:47.302391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:47.304875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:47.306090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:47.306143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:47.306253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:47.306299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:47.306332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:47.306489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.312301Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:58:47.491922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:47.492192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.492418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:47.492633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:47.492737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.495017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:47.495147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:47.495345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.495424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:47.495480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:47.495512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:47.497547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.497605Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:47.497637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:47.499830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.499875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.499913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:47.499979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:47.503429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:47.505102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:47.505308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:47.506263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:47.506392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:47.506449Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:47.506703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:47.506753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:47.506915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:47.507001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:47.508949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:47.508991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:47.509192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:47.509270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:47.509525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:47.509567Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:47.509647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:47.509681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:47.509730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:47.509767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:47.509819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:47.509848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:47.509902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:47.509934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:47.509962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:58:47.511634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:47.511724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:58:47.511754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:58:47.511801Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:58:47.511852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:47.511970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... _COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-04-07T13:59:19.307658Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:19.307713Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:19.307866Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T13:59:19.308071Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:19.308111Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:202:2205], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T13:59:19.308155Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:202:2205], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T13:59:19.308784Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:59:19.308842Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-04-07T13:59:19.310950Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:59:19.311167Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:59:19.311208Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:59:19.311247Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T13:59:19.311288Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:59:19.312024Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:59:19.312112Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T13:59:19.312145Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T13:59:19.312182Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T13:59:19.312219Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:59:19.312294Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-07T13:59:19.314041Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1337 } } 2025-04-07T13:59:19.314090Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-07T13:59:19.314229Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1337 } } 2025-04-07T13:59:19.314334Z node 28 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1337 } } FAKE_COORDINATOR: Erasing txId 101 2025-04-07T13:59:19.316220Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 303 RawX2: 120259086579 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T13:59:19.316271Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-07T13:59:19.316394Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 303 RawX2: 120259086579 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T13:59:19.316443Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T13:59:19.316528Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 303 RawX2: 120259086579 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T13:59:19.316590Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:19.316630Z node 28 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:59:19.316715Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T13:59:19.316759Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-07T13:59:19.317839Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:59:19.320076Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T13:59:19.320207Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:59:19.320338Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:59:19.320443Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T13:59:19.320486Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T13:59:19.320596Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T13:59:19.320632Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:59:19.320698Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-07T13:59:19.320767Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:339:2314] message: TxId: 101 2025-04-07T13:59:19.320814Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T13:59:19.320861Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T13:59:19.320895Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T13:59:19.321033Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:59:19.323129Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T13:59:19.323178Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:340:2315] TestWaitNotification: OK eventTxId 101 2025-04-07T13:59:19.323684Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:59:19.323893Z node 28 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 241us result status StatusSuccess 2025-04-07T13:59:19.324409Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbSdkSessionsPool::Get1Session ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: 2025-04-07T13:59:17.543748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:59:17.544069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:59:17.544291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016a7/r3tmp/tmpqg4C8a/pdisk_1.dat 2025-04-07T13:59:18.036371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-07T13:59:18.036677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:59:18.036897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T13:59:18.037136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:59:18.037239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:59:18.038006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:18.038215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:59:18.038402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:59:18.038460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:59:18.038496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:59:18.038527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:59:18.039080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:59:18.039141Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:59:18.039179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:59:18.039604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:59:18.039634Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:59:18.039684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-07T13:59:18.039733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:59:18.043490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:59:18.043953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:59:18.044144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-07T13:59:18.045141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:18.045197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-07T13:59:18.045272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:18.073502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-07T13:59:18.073574Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:18.126588Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:59:18.127509Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:59:18.127685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:18.127788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:18.141880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:18.270694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:18.270856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T13:59:18.270914Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-07T13:59:18.271146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:59:18.271201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-07T13:59:18.271402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T13:59:18.271510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:59:18.272635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:18.272811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:59:18.273037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:18.273087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-04-07T13:59:18.273177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:59:18.273235Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-04-07T13:59:18.273337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:59:18.273376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:18.273419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:59:18.273477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:18.273517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:59:18.273552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:59:18.273644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T13:59:18.273687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-07T13:59:18.273720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-07T13:59:18.276232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-07T13:59:18.276341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-07T13:59:18.276386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-04-07T13:59:18.276424Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:59:18.276474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T13:59:18.276576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-07T13:59:18.276616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2025-04-07T13:59:18.281653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-04-07T13:59:18.282344Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T13:59:18.282398Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T13:59:18.282569Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T13:59:18.301641Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T13:59:18.302313Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T13:59:18.302436Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:59:18.302789Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T13:59:18.302984Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057 ... .TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-04-07T13:59:19.115204Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-04-07T13:59:19.115301Z node 1 :TX_PROXY DEBUG: Actor# [1:792:2647] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-04-07T13:59:19.115720Z node 1 :TX_PROXY DEBUG: Actor# [1:792:2647] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T13:59:19.115792Z node 1 :TX_PROXY DEBUG: Actor# [1:792:2647] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2025-04-07T13:59:19.116792Z node 1 :TX_PROXY DEBUG: Actor# [1:792:2647] Handle TEvDescribeSchemeResult Forward to# [1:558:2485] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-07T13:59:19.117489Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:794:2649] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:59:19.117799Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:794:2649] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T13:59:19.118113Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:794:2649] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-07T13:59:19.118249Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:794:2649] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData >> YdbSdkSessionsPool::CustomPlan >> YdbSdkSessionsPool::RunSmallPlan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2025-04-07T13:59:14.174676Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570517283831559:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:14.174826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0011fb/r3tmp/tmpT8Zgh2/pdisk_1.dat 2025-04-07T13:59:15.127139Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:15.184063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:15.184173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:15.184466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:59:15.198429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9075, node 1 2025-04-07T13:59:15.542629Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:15.542657Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:15.542669Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:15.542779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28970 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:16.354023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:16.364476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:59:16.374489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:16.377769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:59:16.378005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:59:16.378021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:59:16.380507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:59:16.380539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-07T13:59:16.385396Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:16.390236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034356435, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:16.390291Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:59:16.393192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:59:16.393846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:16.399219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:16.399436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:16.399494Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:59:16.399585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:59:16.399624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:59:16.399682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:59:16.402801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:59:16.402852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:59:16.402870Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:59:16.402963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:28970 2025-04-07T13:59:16.869691Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locking 2025-04-07T13:59:16.869723Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locked by parent 2025-04-07T13:59:16.877431Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now active 2025-04-07T13:59:16.981660Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285140, Sender [1:7490570525873766823:2287], Recipient [1:7490570517283831894:2192]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" } 2025-04-07T13:59:16.981714Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2025-04-07T13:59:16.989973Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } >> DataShardVolatile::TwoAppendsMustBeVolatile [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure >> KqpJoinOrder::TPCDS94-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS95+StreamLookupJoin-ColumnStore >> KqpCost::IndexLookupAtLeast8BytesInStorage-StreamLookup |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |88.9%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::AfterRestart_2 [GOOD] Test command err: 2025-04-07T13:58:52.784414Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570423178630491:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:52.784452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:58:52.989839Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490570422705149279:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:52.989877Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b3f/r3tmp/tmp5I0dVE/pdisk_1.dat 2025-04-07T13:58:53.489264Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:58:53.495196Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:58:53.884831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:58:54.045286Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:58:54.042576Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:54.099668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:54.099759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:54.105597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:54.105669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:54.115368Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:58:54.117161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:58:54.121159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27048, node 1 2025-04-07T13:58:54.595046Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001b3f/r3tmp/yandexbAhxRM.tmp 2025-04-07T13:58:54.595070Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001b3f/r3tmp/yandexbAhxRM.tmp 2025-04-07T13:58:54.595191Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001b3f/r3tmp/yandexbAhxRM.tmp 2025-04-07T13:58:54.595276Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:58:54.699534Z INFO: TTestServer started on Port 3098 GrpcPort 27048 TClient is connected to server localhost:3098 PQClient connected to localhost:27048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:58:55.443639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:58:55.556410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:58:55.830612Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720661, at schemeshard: 72057594046644480 2025-04-07T13:58:57.788811Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570423178630491:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:57.788873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:58:57.992850Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490570422705149279:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:57.992911Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:58:59.653541Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570452769920727:2289], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:59.653765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:59.657094Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570452769920754:2292], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:59.671720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-07T13:58:59.736926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490570452769920756:2293], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-04-07T13:59:00.272115Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490570457538370056:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:59:00.270580Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490570452769920790:2297], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:59:00.272502Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2I0NjllZjUtZDI3OWY0ZGItMWIzYzFhZTYtMTM0YmUzNzg=, ActorId: [2:7490570452769920725:2288], ActorState: ExecuteState, TraceId: 01jr88r6sr0t28er6g9157wdcq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:59:00.273484Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Njg3ODE2OGQtOGNlOTI2MTMtOTUxODUzNzQtYjdjNDE3YmQ=, ActorId: [1:7490570453243402714:2309], ActorState: ExecuteState, TraceId: 01jr88r74j55gf7qjkcjf5w0hr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:59:00.274849Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:59:00.275111Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:59:00.327446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-04-07T13:59:00.456579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-04-07T13:59:00.727098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:59:01.235716Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jr88r80gf10xbab6fzv07p62, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2Q3MDlmMWQtNmMwMDJkYmUtYWE1OTFmMmQtNjZhZDc1NzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490570461833337798:3093] === CheckClustersList. Ok 2025-04-07T13:59:06.808534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720674:1, at schemeshard: 72057594046644480 2025-04-07T13:59:09.048769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:59:09.050795Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:09.192906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but ... "01jr88rnsd430szasfpp9vsd5r" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1744034353956 } items { uint64_value: 1744034353956 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 204 2025-04-07T13:59:15.010263Z node 1 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [1:7490570517667914389:4132] (SourceId=A_Source_5, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2025-04-07T13:59:15.010294Z node 1 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [1:7490570517667914389:4132] (SourceId=A_Source_5, PreferedPartition=(NULL)) OnPartitionChosen 2025-04-07T13:59:15.010431Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [1:7490570521962881742:4132], Recipient [1:7490570483308174737:3356]: NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T13:59:15.010492Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [1:7490570517667914389:4132], Recipient [1:7490570483308174737:3356]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2025-04-07T13:59:15.010569Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateCheckPartition, received event# 271188558, Sender [1:7490570483308174737:3356], Recipient [1:7490570517667914389:4132]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-07T13:59:15.010603Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7490570517667914389:4132] (SourceId=A_Source_5, PreferedPartition=(NULL)) Update the table 2025-04-07T13:59:15.010847Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [1:7490570517667914389:4132], Recipient [1:7490570483308174737:3356]: NActors::TEvents::TEvPoison Received TEvChooseResult: 1 2025-04-07T13:59:15.211193Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [1:7490570423178630730:2256], Recipient [1:7490570517667914389:4132]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=1&id=MjM1YTg1ZTItY2NlNDM2NjYtZjVmYTk1NTAtN2IzMDNiMjA=" PreparedQuery: "d7f0b593-ada0f9a8-bcd27b4-e682828b" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 98 2025-04-07T13:59:15.211242Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7490570517667914389:4132] (SourceId=A_Source_5, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-07T13:59:15.211279Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7490570517667914389:4132] (SourceId=A_Source_5, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=13 2025-04-07T13:59:15.211303Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7490570517667914389:4132] (SourceId=A_Source_5, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 11131928866524144434 AND Topic = "Root" AND ProducerId = "00415F536F757263655F35" 2025-04-07T13:59:15.470668Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720713. Ctx: { TraceId: 01jr88rp1v9gtfnstpbyhp91q9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjMxYWRhYzktNTEwMGYyMDMtYzNjMDdlZTAtMjY3YmY0N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T13:59:16.238038Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490570526257849177:2716] TxId: 281474976720714. Ctx: { TraceId: 01jr88rpk65x3fa4pcspjfayfs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjI3NDgxZDctY2E4YWZiZWUtMTg0MzI5NjMtNWU2N2NmNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2025-04-07T13:59:16.256155Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490570526257849181:2716], TxId: 281474976720714, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YjI3NDgxZDctY2E4YWZiZWUtMTg0MzI5NjMtNWU2N2NmNjY=. CustomerSuppliedId : . TraceId : 01jr88rpk65x3fa4pcspjfayfs. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490570526257849177:2716], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-07T13:59:18.388753Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:18.388840Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:18.408010Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:175:2190] 2025-04-07T13:59:18.409152Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\002\030\003\"\014session-id-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id-1" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-2" Value: "\010\000\020\004\030\005\"\014session-id-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-2" Value: "\000\000\000\000\000\000\000\000\004\000\000\000\005\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-3" Value: "\010\000\020\006\030\007\"\014session-id-3(\0000\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-3" Value: "\000\000\000\000\000\000\000\000\006\000\000\000\007\000\000\000session-id-3" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\010\030\t\"\014session-id-2(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\010\000\000\000\t\000\000\000session-id-2" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T13:59:19.219352Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:19.219420Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:19.239474Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [4:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:19.241708Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T13:59:19.000000Z 2025-04-07T13:59:19.241783Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [4:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\330\315\261\204\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\316\255\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\004\020\000\030\000\"\007session(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session" StorageChannel: INLINE } 2025-04-07T13:59:20.089389Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:20.089456Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:20.106118Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [5:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:20.108365Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T13:59:20.000000Z 2025-04-07T13:59:20.108442Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [5:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase >> YdbSdkSessionsPool::StressTestSync1 >> KqpQueryServiceScripts::ExecuteScriptPg [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsBasic ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::RunSmallPlan [GOOD] Test command err: 2025-04-07T13:59:15.251497Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570521668241371:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:15.257007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a93/r3tmp/tmpgC2I21/pdisk_1.dat 2025-04-07T13:59:16.180334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:16.184929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:16.194293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:16.263227Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:16.293002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 23549, node 1 2025-04-07T13:59:16.357794Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:59:16.390279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:59:16.390843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:16.390986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:59:16.400365Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:59:16.561204Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:16.561243Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:16.561248Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:16.561321Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:59:16.610377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:16.611004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:16.611027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:16.611070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:59:16.611144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TClient is connected to server localhost:24178 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:17.158172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:17.166239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:59:17.167864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:17.173498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:59:17.173675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:59:17.173712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:59:17.176002Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:59:17.176030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:59:17.177712Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:17.181101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034357226, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:17.181129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:59:17.181410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:59:17.182742Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:17.183169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:17.183295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:17.183350Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:59:17.183432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:59:17.183463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:59:17.183498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:59:17.194634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:59:17.194732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:59:17.194751Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:59:17.194864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:59:20.156872Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570521668241371:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:20.156919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::TableSink_OlapInsert [GOOD] >> KqpQueryService::TableSink_OlapUpdate |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TxUsage::WriteToTopic_Demo_27 [GOOD] >> TPQTabletTests::Huge_ProposeTransacton [GOOD] >> KqpCost::IndexLookup+StreamLookup [GOOD] >> YdbSdkSessionsPool::WaitQueue1 [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 21570, MsgBus: 22323 2025-04-07T13:59:14.717290Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570516045693143:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:14.717715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00173d/r3tmp/tmpDL5e4a/pdisk_1.dat 2025-04-07T13:59:15.523795Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:15.544941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:15.545074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:15.548410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21570, node 1 2025-04-07T13:59:15.873210Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:15.873251Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:15.873259Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:15.873333Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22323 TClient is connected to server localhost:22323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:16.802414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:16.827932Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:16.842149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:17.028786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:17.288462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:17.373641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:19.754980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570537520531172:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:19.755356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:19.770047Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570516045693143:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:19.770101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:19.802506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:59:19.835549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:59:19.881338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:59:19.978571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:59:20.062885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:59:20.144715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:59:20.254412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570541815498983:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:20.254493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:20.254827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570541815498988:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:20.259073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:59:20.269018Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:59:20.269193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570541815498990:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:59:21.581148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue1 [GOOD] Test command err: 2025-04-07T13:59:17.282745Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570526918855943:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:17.282809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a6b/r3tmp/tmpckNVrg/pdisk_1.dat 2025-04-07T13:59:17.945663Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:17.961856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:17.961932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:17.991170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22737, node 1 2025-04-07T13:59:18.385124Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:18.385144Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:18.385150Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:18.385266Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:19.146184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:19.152004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:59:19.153499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:19.158665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:59:19.158843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:59:19.158857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:59:19.165589Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:59:19.165622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:59:19.167499Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:19.179754Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:19.182474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034359228, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:19.182503Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:59:19.182775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:59:19.189561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:19.189791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:19.189841Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:59:19.189919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:59:19.189956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:59:19.190008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:59:19.193351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:59:19.193390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:59:19.193402Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:59:19.193510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:59:22.283661Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570526918855943:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:22.283735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpWorkloadService::TestCpuLoadThreshold [GOOD] >> KqpWorkloadService::TestCpuLoadThresholdRefresh >> KqpCost::Range |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |88.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Huge_ProposeTransacton [GOOD] Test command err: 2025-04-07T13:58:54.533308Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:58:54.549248Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:58:54.549567Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-07T13:58:54.549627Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:58:54.549667Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-07T13:58:54.549718Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:58:54.549767Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:54.549833Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2025-04-07T13:58:54.549868Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T13:58:54.645849Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:54.645928Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:202:2208], now have 1 active actors on pipe 2025-04-07T13:58:54.646029Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T13:58:54.673409Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:54.679345Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-07T13:58:54.679455Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:54.680752Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:54.680880Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:58:54.681232Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:58:54.681595Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:211:2215] 2025-04-07T13:58:54.682433Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-07T13:58:54.682495Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:211:2215] 2025-04-07T13:58:54.682544Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:58:54.683052Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-07T13:58:54.683107Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-07T13:58:54.683258Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:54.683450Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T13:58:54.683767Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:54.688595Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:58:54.689043Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:54.689093Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:218:2220], now have 1 active actors on pipe 2025-04-07T13:58:54.690640Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:54.690692Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:223:2224], now have 1 active actors on pipe 2025-04-07T13:58:54.691459Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 4294969484 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-07T13:58:54.691509Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-07T13:58:54.691607Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-07T13:58:54.691650Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-07T13:58:54.691685Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-07T13:58:54.691834Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 4294969484 } Partitions { } 2025-04-07T13:58:54.691927Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:54.695227Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T13:58:54.695357Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-07T13:58:54.695389Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-07T13:58:54.695646Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 4294969484 } TxId: 67891 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-07T13:58:54.695688Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-07T13:58:54.695757Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2025-04-07T13:58:54.695813Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-07T13:58:54.695871Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2025-04-07T13:58:54.696020Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 136 MaxStep: 30136 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 4294969484 } Partitions { } 2025-04-07T13:58:54.696116Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:54.699052Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T13:58:54.699101Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PREPARING 2025-04-07T13:58:54.699154Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARED Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:54.705855Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67891 AckTo { RawX1: 173 RawX2: 4294969484 } } Step: 100 2025-04-07T13:58:54.705936Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PREPARED 2025-04-07T13:58:54.705973Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PLANNING 2025-04-07T13:58:54.706013Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67891 2025-04-07T13:58:54.706159Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PLANNED MinStep: 136 MaxStep: 30136 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 4294969484 } Partitions { } 2025-04-07T13:58:54.706235Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-07T13:58:54.706534Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 173 RawX2: 4294969484 } } Step: 200 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:54.719190Z node 1 :PERSQUEUE DEBUG: ... aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2205 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:59:22.833904Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-1200 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:59:22.834010Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-66 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:59:22.834082Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-1377 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:22.864321Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:22.960223Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:23.258443Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_27 [GOOD] Test command err: 2025-04-07T13:56:29.145016Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569806060053064:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:29.145058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:56:29.281906Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001649/r3tmp/tmpolPuV7/pdisk_1.dat 2025-04-07T13:56:29.460249Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9364, node 1 2025-04-07T13:56:29.529126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:29.529227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:29.538832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:56:29.561052Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001649/r3tmp/yandex3PialP.tmp 2025-04-07T13:56:29.561076Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001649/r3tmp/yandex3PialP.tmp 2025-04-07T13:56:29.561254Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001649/r3tmp/yandex3PialP.tmp 2025-04-07T13:56:29.561380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:56:29.611681Z INFO: TTestServer started on Port 28678 GrpcPort 9364 TClient is connected to server localhost:28678 PQClient connected to localhost:9364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:29.848786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:29.858283Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:29.880863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:56:31.860002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569814649988429:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:31.860003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569814649988453:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:31.860157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:31.863196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569814649988487:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:31.863277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:31.863849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:56:31.872515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569814649988458:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:56:32.114623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:32.126567Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569818944955826:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:56:32.126719Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjdlM2UzMTEtMTA4YmEyNzItZmE2MWM3OTYtMmRjZTlmZTU=, ActorId: [1:7490569814649988426:2307], ActorState: ExecuteState, TraceId: 01jr88kpf9204xy6bkq2k6ny65, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:56:32.128387Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:56:32.147540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:32.255532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490569818944956101:2616] 2025-04-07T13:56:34.146422Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569806060053064:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:34.148798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:56:38.351931Z :WriteToTopic_Demo_22_RestartNo INFO: TTopicSdkTestSetup started 2025-04-07T13:56:38.370017Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:56:38.384063Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7490569844714760189:2790] connected; active server actors: 1 2025-04-07T13:56:38.384553Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-07T13:56:38.386002Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-07T13:56:38.386128Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-07T13:56:38.386525Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-07T13:56:38.389018Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:56:38.389228Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-07T13:56:38.389715Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:56:38.389879Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-07T13:56:38.389904Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:56:38.389920Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-07T13:56:38.389956Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:56:38.389983Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:56:38.390025Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2025-04-07T13:56:38.390039Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-07T13:56:38.397628Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:38.397642Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-07T13:56:38.397675Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569844714760209:2437], now have 1 active actors on pipe 2025-04-07T13:56:38.429175Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:38.429216Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569844714760187:2789], now have 1 active actors on pipe 2025-04-07T13:56:38.443255Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7490569806060053473 RawX2: 4294969485 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } Explicit ... ad failed 2025-04-07T13:59:22.994503Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_6359067112032550654_v1 grpc closed 2025-04-07T13:59:22.994544Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_6359067112032550654_v1 is DEAD 2025-04-07T13:59:22.994888Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:22.994915Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_10_1_6359067112032550654_v1 2025-04-07T13:59:22.994950Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7490570497131959478:2530] destroyed 2025-04-07T13:59:22.994995Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_1_6359067112032550654_v1 2025-04-07T13:59:22.995206Z :INFO: [/Root] [/Root] [c8d7a600-44e89695-6a9ec1c2-75225213] Closing read session. Close timeout: 0.000000s 2025-04-07T13:59:22.995240Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_C:0:1:3:4 2025-04-07T13:59:22.995262Z :INFO: [/Root] [/Root] [c8d7a600-44e89695-6a9ec1c2-75225213] Counters: { Errors: 0 CurrentSessionLifetimeMs: 8192 BytesRead: 40 MessagesRead: 4 BytesReadCompressed: 40 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T13:59:22.995317Z :NOTICE: [/Root] [/Root] [c8d7a600-44e89695-6a9ec1c2-75225213] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T13:59:22.995263Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [10:7490570497131959475:2527] disconnected; active server actors: 1 2025-04-07T13:59:22.995346Z :DEBUG: [/Root] [/Root] [c8d7a600-44e89695-6a9ec1c2-75225213] [] Abort session to cluster 2025-04-07T13:59:22.995280Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [10:7490570497131959475:2527] client test-consumer disconnected session test-consumer_10_1_6359067112032550654_v1 2025-04-07T13:59:22.995576Z :NOTICE: [/Root] [/Root] [c8d7a600-44e89695-6a9ec1c2-75225213] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T13:59:22.996588Z node 10 :PQ_READ_PROXY DEBUG: session cookie 3 consumer test-consumer session test-consumer_10_3_17577385202688969782_v1 grpc read done: success# 0, data# { } 2025-04-07T13:59:22.996608Z node 10 :PQ_READ_PROXY INFO: session cookie 3 consumer test-consumer session test-consumer_10_3_17577385202688969782_v1 grpc read failed 2025-04-07T13:59:22.996639Z node 10 :PQ_READ_PROXY INFO: session cookie 3 consumer test-consumer session test-consumer_10_3_17577385202688969782_v1 grpc closed 2025-04-07T13:59:22.996687Z node 10 :PQ_READ_PROXY INFO: session cookie 3 consumer test-consumer session test-consumer_10_3_17577385202688969782_v1 is DEAD 2025-04-07T13:59:22.997057Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:22.997079Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Destroy direct read session test-consumer_10_3_17577385202688969782_v1 2025-04-07T13:59:22.997106Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [10:7490570514311828991:2627] destroyed 2025-04-07T13:59:22.997138Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_3_17577385202688969782_v1 2025-04-07T13:59:22.997382Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic_C] pipe [10:7490570514311828988:2622] disconnected; active server actors: 1 2025-04-07T13:59:22.997404Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic_C] pipe [10:7490570514311828988:2622] client test-consumer disconnected session test-consumer_10_3_17577385202688969782_v1 2025-04-07T13:59:22.999237Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|571cca9e-32d12555-8a570fbf-19bdbf4b_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-07T13:59:22.999289Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|571cca9e-32d12555-8a570fbf-19bdbf4b_0] PartitionId [0] Generation [1] Write session will now close 2025-04-07T13:59:22.999329Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|571cca9e-32d12555-8a570fbf-19bdbf4b_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-07T13:59:22.999647Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|571cca9e-32d12555-8a570fbf-19bdbf4b_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T13:59:22.999682Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|571cca9e-32d12555-8a570fbf-19bdbf4b_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-07T13:59:23.000298Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message_group_id|571cca9e-32d12555-8a570fbf-19bdbf4b_0 grpc read done: success: 0 data: 2025-04-07T13:59:23.000317Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|571cca9e-32d12555-8a570fbf-19bdbf4b_0 grpc read failed 2025-04-07T13:59:23.000337Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|571cca9e-32d12555-8a570fbf-19bdbf4b_0 grpc closed 2025-04-07T13:59:23.000349Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|571cca9e-32d12555-8a570fbf-19bdbf4b_0 is DEAD 2025-04-07T13:59:23.001258Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037898 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:23.001308Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037898 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:23.001346Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037898 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:23.001468Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:23.001499Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [10:7490570505721894176:2562] destroyed 2025-04-07T13:59:23.001520Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:23.001538Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [10:7490570531491698363:2562] destroyed 2025-04-07T13:59:23.001548Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:23.001801Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|c0bbdea2-80283e57-273d2450-53aa7a22_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-07T13:59:23.001563Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [10:7490570505721894179:2562] destroyed 2025-04-07T13:59:23.001823Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|c0bbdea2-80283e57-273d2450-53aa7a22_0] PartitionId [0] Generation [1] Write session will now close 2025-04-07T13:59:23.001578Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:59:23.001846Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|c0bbdea2-80283e57-273d2450-53aa7a22_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-07T13:59:23.002082Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|c0bbdea2-80283e57-273d2450-53aa7a22_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T13:59:23.002101Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|c0bbdea2-80283e57-273d2450-53aa7a22_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-07T13:59:23.003548Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|d9739b5f-c9bd9fa8-3c4e60f1-43ebc9fc_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-07T13:59:23.003578Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|d9739b5f-c9bd9fa8-3c4e60f1-43ebc9fc_0] PartitionId [0] Generation [1] Write session will now close 2025-04-07T13:59:23.003610Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d9739b5f-c9bd9fa8-3c4e60f1-43ebc9fc_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-07T13:59:23.004206Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d9739b5f-c9bd9fa8-3c4e60f1-43ebc9fc_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-04-07T13:59:23.005490Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 6 sessionId: test-message_group_id|c0bbdea2-80283e57-273d2450-53aa7a22_0 grpc read done: success: 0 data: 2025-04-07T13:59:23.005496Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message_group_id|d9739b5f-c9bd9fa8-3c4e60f1-43ebc9fc_0 grpc read done: success: 0 data: 2025-04-07T13:59:23.005514Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|d9739b5f-c9bd9fa8-3c4e60f1-43ebc9fc_0 grpc read failed 2025-04-07T13:59:23.005520Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 6 sessionId: test-message_group_id|c0bbdea2-80283e57-273d2450-53aa7a22_0 grpc read failed 2025-04-07T13:59:23.005538Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|d9739b5f-c9bd9fa8-3c4e60f1-43ebc9fc_0 grpc closed 2025-04-07T13:59:23.005549Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 6 sessionId: test-message_group_id|c0bbdea2-80283e57-273d2450-53aa7a22_0 grpc closed 2025-04-07T13:59:23.005554Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|d9739b5f-c9bd9fa8-3c4e60f1-43ebc9fc_0 is DEAD 2025-04-07T13:59:23.005567Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 6 sessionId: test-message_group_id|c0bbdea2-80283e57-273d2450-53aa7a22_0 is DEAD 2025-04-07T13:59:23.006147Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:23.006231Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:23.006340Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:23.006370Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [10:7490570497131959460:2521] destroyed 2025-04-07T13:59:23.006391Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:23.006410Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7490570497131959438:2516] destroyed 2025-04-07T13:59:23.006447Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:59:23.006504Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:59:23.008785Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|d9739b5f-c9bd9fa8-3c4e60f1-43ebc9fc_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T13:59:23.009922Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2025-04-07T13:59:23.009954Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d9739b5f-c9bd9fa8-3c4e60f1-43ebc9fc_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2025-04-07T13:59:23.010014Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d9739b5f-c9bd9fa8-3c4e60f1-43ebc9fc_0] PartitionId [0] Generation [1] Write session: destroy >> YdbSdkSessionsPool::WaitQueue10 [GOOD] >> TKesusTest::TestRegisterProxy >> YdbSdkSessionsPool::Get1Session [GOOD] >> YdbSdkSessionsPool::PeriodicTask1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] Test command err: 2025-04-07T13:57:53.816965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570169687851685:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:53.817051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ea1/r3tmp/tmpOl9yaw/pdisk_1.dat 2025-04-07T13:57:54.244075Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:54.251239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:54.251329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:54.257934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13285, node 1 2025-04-07T13:57:54.393000Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:54.393021Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:54.393027Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:54.393153Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:54.793629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:54.906192Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2025-04-07T13:57:56.934409Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:57:56.949607Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjRjNjZjYzYtMTQ3NzYwODYtOTdjN2I1MGItYzcxYTY4ZQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjRjNjZjYzYtMTQ3NzYwODYtOTdjN2I1MGItYzcxYTY4ZQ== 2025-04-07T13:57:56.950214Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-07T13:57:56.950233Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:57:56.950249Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:57:56.950291Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570182572754402:2300], Start check tables existence, number paths: 2 2025-04-07T13:57:56.971743Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjRjNjZjYzYtMTQ3NzYwODYtOTdjN2I1MGItYzcxYTY4ZQ==, ActorId: [1:7490570182572754403:2301], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:57:56.972116Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570182572754402:2300], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T13:57:56.972158Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570182572754402:2300], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T13:57:56.972185Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570182572754402:2300], Successfully finished 2025-04-07T13:57:56.972303Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T13:57:57.024335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:57:57.121689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:57.121771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:57.126500Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T13:57:57.127279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:57.315481Z node 3 :STATISTICS WARN: [72075186224037897] TTxInit::Complete. EnableColumnStatistics=false 2025-04-07T13:57:57.321985Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:57.372494Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:57.372571Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:57.384052Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:57.458267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:57:57.505986Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490570185220234392:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:57.506030Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:57:57.634389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:57.634463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:57.703554Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:57:57.704654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:57.899331Z node 2 :STATISTICS WARN: [72075186224037907] TTxInit::Complete. EnableColumnStatistics=false 2025-04-07T13:57:57.918440Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:58.013461Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:58.013548Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:58.023205Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:58.167149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:58.211253Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:57:58.211383Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:57:58.211454Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:57:58.211519Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:57:58.211587Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:57:58.211638Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T13:57:58.418326Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:58.820634Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570169687851685:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:58.820717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:58:00.891275Z node 3 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:58:00.891551Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7490570198031100506:2316], Start check tables existence, number paths: 2 2025-04-07T13:58:00.891639Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:58:00.891665Z node 3 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:58:00.896817Z node 3 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-07T13:58:00.896875Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7490570198031100506:2316], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T13:58:00.896940Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7490570198031100506:2316], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T13:58:00.896983Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7490570198031100506:2316], Successfully finished 2025-04-07T13:58:00.897111Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T13:58:01.613006Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:58:01.613084Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7490570202400104466:2345], Start check tables existence, number paths: 2 2025-04-07T13:58:01.613230Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:58:01.613244Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:58:01.613622Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7490570202400104468:2347], Database: /Root/test-serverless, Start database fetching 2025-04-07T13:58:01.615395Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7490570202400104468:2347], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 20 ... n/3?node_id=7&id=MTk4NWYwOC02NzU0YTIzYi03YTg3MzM0ZC02NjgwMTZhMA==, ActorId: [7:7490570553960462964:2862], ActorState: unknown state, TraceId: 01jr88ry265j8nbc72dry0xy2g, Cleanup temp tables: 0 2025-04-07T13:59:23.689253Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MTk4NWYwOC02NzU0YTIzYi03YTg3MzM0ZC02NjgwMTZhMA==, ActorId: [7:7490570553960462964:2862], ActorState: unknown state, TraceId: 01jr88ry265j8nbc72dry0xy2g, Session actor destroyed 2025-04-07T13:59:23.689789Z node 7 :KQP_SESSION TRACE: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: ExecuteState, TraceId: 01jr88ry1h2wczg19t54qw7sk7, read snapshot result: UNAVAILABLE, step: 1744034363001, tx id: 18446744073709551615 2025-04-07T13:59:23.689849Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: ExecuteState, TraceId: 01jr88ry1h2wczg19t54qw7sk7, ExecutePhyTx, tx: 0x000050C00033A9D8 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-07T13:59:23.689888Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: ExecuteState, TraceId: 01jr88ry1h2wczg19t54qw7sk7, Sending to Executer TraceId: 0 8 2025-04-07T13:59:23.689959Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: ExecuteState, TraceId: 01jr88ry1h2wczg19t54qw7sk7, Created new KQP executer: [7:7490570553960462988:2857] isRollback: 0 2025-04-07T13:59:23.700256Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: ExecuteState, TraceId: 01jr88ry1h2wczg19t54qw7sk7, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-07T13:59:23.700344Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YWI4ZGQ1NjgtZDhmMTQ1YTgtNTQwZjlhYWQtMWNmNzZhYTU=, ActorId: [7:7490570489535951409:2303], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:59:23.700389Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YWI4ZGQ1NjgtZDhmMTQ1YTgtNTQwZjlhYWQtMWNmNzZhYTU=, ActorId: [7:7490570489535951409:2303], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:23.700420Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: ExecuteState, TraceId: 01jr88ry1h2wczg19t54qw7sk7, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 19.117 QueriesCount: 2 2025-04-07T13:59:23.700423Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YWI4ZGQ1NjgtZDhmMTQ1YTgtNTQwZjlhYWQtMWNmNzZhYTU=, ActorId: [7:7490570489535951409:2303], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:59:23.700452Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YWI4ZGQ1NjgtZDhmMTQ1YTgtNTQwZjlhYWQtMWNmNzZhYTU=, ActorId: [7:7490570489535951409:2303], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:59:23.700542Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YWI4ZGQ1NjgtZDhmMTQ1YTgtNTQwZjlhYWQtMWNmNzZhYTU=, ActorId: [7:7490570489535951409:2303], ActorState: unknown state, Session actor destroyed 2025-04-07T13:59:23.700553Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: ExecuteState, TraceId: 01jr88ry1h2wczg19t54qw7sk7, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T13:59:23.701162Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: ExecuteState, TraceId: 01jr88ry1h2wczg19t54qw7sk7, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:23.701207Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: ExecuteState, TraceId: 01jr88ry1h2wczg19t54qw7sk7, EndCleanup, isFinal: 0 2025-04-07T13:59:23.701306Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: ExecuteState, TraceId: 01jr88ry1h2wczg19t54qw7sk7, Sent query response back to proxy, proxyRequestId: 91, proxyId: [7:7490570468061114545:2256] 2025-04-07T13:59:23.702754Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:59:23.702847Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: ReadyState, Sending to Executer TraceId: 0 8 2025-04-07T13:59:23.702928Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: ReadyState, Created new KQP executer: [7:7490570553960463005:2857] isRollback: 1 2025-04-07T13:59:23.702973Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:23.703529Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: CleanupState, EndCleanup, isFinal: 1 2025-04-07T13:59:23.703560Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:59:23.703656Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjBmM2FkOWItOTYwMDlmMzYtYWZiMjUxZDQtNWQ3MGU0MTI=, ActorId: [7:7490570553960462953:2857], ActorState: unknown state, Session actor destroyed 2025-04-07T13:59:23.860322Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ExecuteState, TraceId: 01jr88ry911mydz5e07b2nf7gk, acquire mvcc snapshot 2025-04-07T13:59:23.865423Z node 7 :KQP_SESSION TRACE: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ExecuteState, TraceId: 01jr88ry911mydz5e07b2nf7gk, read snapshot result: UNAVAILABLE, step: 1744034363736, tx id: 18446744073709551615 2025-04-07T13:59:23.865503Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ExecuteState, TraceId: 01jr88ry911mydz5e07b2nf7gk, ExecutePhyTx, tx: 0x000050C000250E58 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-07T13:59:23.865546Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ExecuteState, TraceId: 01jr88ry911mydz5e07b2nf7gk, Sending to Executer TraceId: 0 8 2025-04-07T13:59:23.865773Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ExecuteState, TraceId: 01jr88ry911mydz5e07b2nf7gk, Created new KQP executer: [7:7490570553960463018:2865] isRollback: 0 2025-04-07T13:59:23.870149Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ExecuteState, TraceId: 01jr88ry911mydz5e07b2nf7gk, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-07T13:59:23.870296Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ExecuteState, TraceId: 01jr88ry911mydz5e07b2nf7gk, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 9.977 QueriesCount: 2 2025-04-07T13:59:23.870485Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ExecuteState, TraceId: 01jr88ry911mydz5e07b2nf7gk, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T13:59:23.870821Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ExecuteState, TraceId: 01jr88ry911mydz5e07b2nf7gk, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:23.870851Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ExecuteState, TraceId: 01jr88ry911mydz5e07b2nf7gk, EndCleanup, isFinal: 0 2025-04-07T13:59:23.870918Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ExecuteState, TraceId: 01jr88ry911mydz5e07b2nf7gk, Sent query response back to proxy, proxyRequestId: 94, proxyId: [7:7490570468061114545:2256] 2025-04-07T13:59:23.872054Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:59:23.872128Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ReadyState, Sending to Executer TraceId: 0 8 2025-04-07T13:59:23.872199Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ReadyState, Created new KQP executer: [7:7490570553960463028:2865] isRollback: 1 2025-04-07T13:59:23.872237Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:23.874679Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: CleanupState, EndCleanup, isFinal: 1 2025-04-07T13:59:23.874716Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:59:23.874834Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTNkNTg5YzctYjQzN2NmNmQtODQzM2MxZTYtNjRlZTdkOWM=, ActorId: [7:7490570553960462972:2865], ActorState: unknown state, Session actor destroyed >> KqpJoinOrder::TPCH11+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH11-StreamLookupJoin+ColumnStore >> KqpOlapAggregations::Blocks_NoAggPushdown >> TxUsage::WriteToTopic_Demo_48 [GOOD] >> TKesusTest::TestAttachNewSessions >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestQuoterResourceDescribe >> KqpJoinOrder::TPCDS23+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS23-StreamLookupJoin+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue10 [GOOD] Test command err: 2025-04-07T13:59:19.315681Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570537789666495:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:19.315726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a61/r3tmp/tmpsoO9ae/pdisk_1.dat 2025-04-07T13:59:20.064022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:20.064111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:20.082067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:20.084186Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11678, node 1 2025-04-07T13:59:20.387073Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:20.387089Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:20.387094Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:20.387210Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:20.841647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:20.862507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:59:20.863857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:20.870744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:59:20.870936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:59:20.870952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:59:20.873378Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:59:20.873401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:59:20.874259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:20.875102Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:20.888727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034360929, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:20.888760Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:59:20.889014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:59:20.891189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:20.891361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:20.891408Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:59:20.891488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:59:20.891519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:59:20.891574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:59:20.894484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:59:20.894524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:59:20.894536Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:59:20.894617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:59:24.319236Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570537789666495:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:24.319314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpOlap::OlapRead_StreamGenericQuery >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession >> KqpOlapSparsed::AccessorActualization >> KqpOlapAggregations::Json_GetValue_Minus >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceModification >> KqpOlapAggregations::Aggregation_Count_Null >> YdbSdkSessionsPool::StressTestSync10 [GOOD] >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> KqpDatetime64ColumnShard::UseDate32AsPrimaryKey >> KqpJoinOrder::TPCDS96+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS96-StreamLookupJoin+ColumnStore |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |88.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync10 [GOOD] Test command err: 2025-04-07T13:59:14.859849Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570517559125763:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:14.859962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a7b/r3tmp/tmp7hAVsB/pdisk_1.dat 2025-04-07T13:59:15.631165Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:15.655616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:15.664883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:15.674159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28070, node 1 2025-04-07T13:59:16.057197Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:16.057234Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:16.057239Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:16.057328Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:16.458729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:16.492433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:59:16.493860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:16.497817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:59:16.498078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:59:16.498095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:59:16.503488Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:59:16.503523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:59:16.505232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:16.513463Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:16.529749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034356568, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:16.529791Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:59:16.530036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:59:16.541621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:16.541824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:16.541869Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:59:16.541967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:59:16.542008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:59:16.542065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:59:16.568829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:59:16.568912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:59:16.576708Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:59:16.578185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:59:19.806670Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570517559125763:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:19.806750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_48 [GOOD] Test command err: 2025-04-07T13:56:28.562483Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569802566443356:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:28.562645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001650/r3tmp/tmpOB4pOk/pdisk_1.dat 2025-04-07T13:56:28.765428Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:56:28.974934Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:28.981956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:28.982043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:28.984487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12449, node 1 2025-04-07T13:56:29.089319Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001650/r3tmp/yandexHk5KMt.tmp 2025-04-07T13:56:29.089342Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001650/r3tmp/yandexHk5KMt.tmp 2025-04-07T13:56:29.089532Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001650/r3tmp/yandexHk5KMt.tmp 2025-04-07T13:56:29.089623Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:56:29.142827Z INFO: TTestServer started on Port 6563 GrpcPort 12449 TClient is connected to server localhost:6563 PQClient connected to localhost:12449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:29.430532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:29.471885Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:29.477084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:31.520966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569815451345865:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:31.521095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569815451345884:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:31.521160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:31.525604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:56:31.534207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569815451345894:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:56:31.766781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:31.785692Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569815451345970:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:56:31.787267Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDVhNjMwM2ItNTViNzhiYzctMjllOTlkOGItNjgxYzVhZmE=, ActorId: [1:7490569815451345862:2307], ActorState: ExecuteState, TraceId: 01jr88kp4yc6sy46hxkh8cdegb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:56:31.789842Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:56:31.814529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:31.895519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490569819746313539:2615] 2025-04-07T13:56:33.564822Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569802566443356:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:33.564912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:56:38.222287Z :WriteToTopic_Invalid_Session INFO: TTopicSdkTestSetup started 2025-04-07T13:56:38.237176Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:56:38.253833Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7490569845516117642:2805] connected; active server actors: 1 2025-04-07T13:56:38.254424Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-07T13:56:38.255100Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-07T13:56:38.255251Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-07T13:56:38.255821Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-07T13:56:38.260104Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:56:38.260335Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-07T13:56:38.260966Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:56:38.261164Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-07T13:56:38.261192Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:56:38.261205Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-07T13:56:38.261230Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:56:38.261252Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:56:38.261294Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2025-04-07T13:56:38.261316Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-07T13:56:38.266680Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-07T13:56:38.266684Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:38.266724Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569845516117664:2436], now have 1 active actors on pipe 2025-04-07T13:56:38.296235Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:38.296272Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569845516117641:2804], now have 1 active actors on pipe 2025-04-07T13:56:38.320802Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7490569802566443604 RawX2: 4294969485 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" YcCloudId: "" YcFolderId: "" Yd ... sponse tabletId 72075186224037895, partitionId 0, NodeId 10, Generation 1 2025-04-07T13:59:27.021094Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037896][topic_A] addPartitionToResponse tabletId 72075186224037894, partitionId 1, NodeId 10, Generation 1 2025-04-07T13:59:27.021115Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037896][topic_A] addPartitionToResponse tabletId 72075186224037897, partitionId 2, NodeId 10, Generation 1 2025-04-07T13:59:27.021136Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037896][topic_A] addPartitionToResponse tabletId 72075186224037898, partitionId 3, NodeId 10, Generation 1 2025-04-07T13:59:27.021155Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037896][topic_A] addPartitionToResponse tabletId 72075186224037900, partitionId 4, NodeId 10, Generation 1 2025-04-07T13:59:27.021171Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037896][topic_A] addPartitionToResponse tabletId 72075186224037899, partitionId 5, NodeId 10, Generation 1 2025-04-07T13:59:27.021219Z node 10 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [10:7490570466816454545:2678]: Got location 2025-04-07T13:59:27.022226Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037896][topic_A] pipe [10:7490570466816454551:2681] disconnected; active server actors: 1 2025-04-07T13:59:27.022252Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037896][topic_A] pipe [10:7490570466816454551:2681] disconnected no session 2025-04-07T13:59:27.051785Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_2|edb3ec63-9a4eced9-264e0da1-3db8065f_0] PartitionId [1] Generation [1] Write session: close. Timeout 0.000000s 2025-04-07T13:59:27.051822Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_2|edb3ec63-9a4eced9-264e0da1-3db8065f_0] PartitionId [1] Generation [1] Write session will now close 2025-04-07T13:59:27.051876Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_2|edb3ec63-9a4eced9-264e0da1-3db8065f_0] PartitionId [1] Generation [1] Write session: aborting 2025-04-07T13:59:27.052302Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_2|edb3ec63-9a4eced9-264e0da1-3db8065f_0] PartitionId [1] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T13:59:27.052339Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_2|edb3ec63-9a4eced9-264e0da1-3db8065f_0] PartitionId [1] Generation [1] Write session: destroy 2025-04-07T13:59:27.057005Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_3|b8c594f9-9d6cb99e-452c00f-fdf3c3fb_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-07T13:59:27.057037Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_3|b8c594f9-9d6cb99e-452c00f-fdf3c3fb_0] PartitionId [0] Generation [1] Write session will now close 2025-04-07T13:59:27.057068Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_3|b8c594f9-9d6cb99e-452c00f-fdf3c3fb_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-07T13:59:27.057395Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_3|b8c594f9-9d6cb99e-452c00f-fdf3c3fb_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T13:59:27.057420Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_3|b8c594f9-9d6cb99e-452c00f-fdf3c3fb_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-07T13:59:27.055718Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message_group_id_2|edb3ec63-9a4eced9-264e0da1-3db8065f_0 grpc read done: success: 0 data: 2025-04-07T13:59:27.055751Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id_2|edb3ec63-9a4eced9-264e0da1-3db8065f_0 grpc read failed 2025-04-07T13:59:27.055780Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id_2|edb3ec63-9a4eced9-264e0da1-3db8065f_0 grpc closed 2025-04-07T13:59:27.055800Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id_2|edb3ec63-9a4eced9-264e0da1-3db8065f_0 is DEAD 2025-04-07T13:59:27.056557Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=1) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:27.056605Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=1) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:27.061257Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:27.061311Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7490570441046649975:2512] destroyed 2025-04-07T13:59:27.061336Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:27.061361Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7490570441046649972:2512] destroyed 2025-04-07T13:59:27.061401Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 1, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:59:27.067428Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message_group_id_3|b8c594f9-9d6cb99e-452c00f-fdf3c3fb_0 grpc read done: success: 0 data: 2025-04-07T13:59:27.067461Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id_3|b8c594f9-9d6cb99e-452c00f-fdf3c3fb_0 grpc read failed 2025-04-07T13:59:27.067484Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id_3|b8c594f9-9d6cb99e-452c00f-fdf3c3fb_0 grpc closed 2025-04-07T13:59:27.067498Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id_3|b8c594f9-9d6cb99e-452c00f-fdf3c3fb_0 is DEAD 2025-04-07T13:59:27.068397Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037895 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:27.068448Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037895 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:27.069079Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:27.069120Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [10:7490570441046649947:2507] destroyed 2025-04-07T13:59:27.069140Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:27.069160Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [10:7490570441046649942:2507] destroyed 2025-04-07T13:59:27.069200Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:59:27.071669Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_4|215c65e6-39868c3a-34a1f4ad-490314d0_0] PartitionId [1] Generation [1] Write session: close. Timeout 0.000000s 2025-04-07T13:59:27.071700Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_4|215c65e6-39868c3a-34a1f4ad-490314d0_0] PartitionId [1] Generation [1] Write session will now close 2025-04-07T13:59:27.071729Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_4|215c65e6-39868c3a-34a1f4ad-490314d0_0] PartitionId [1] Generation [1] Write session: aborting 2025-04-07T13:59:27.072082Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_4|215c65e6-39868c3a-34a1f4ad-490314d0_0] PartitionId [1] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T13:59:27.072107Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_4|215c65e6-39868c3a-34a1f4ad-490314d0_0] PartitionId [1] Generation [1] Write session: destroy 2025-04-07T13:59:27.072862Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_1|cdbc4336-12040dfe-f2d081ff-d17e180f_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-07T13:59:27.072902Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_1|cdbc4336-12040dfe-f2d081ff-d17e180f_0] PartitionId [0] Generation [1] Write session will now close 2025-04-07T13:59:27.072937Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_1|cdbc4336-12040dfe-f2d081ff-d17e180f_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-07T13:59:27.073221Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_1|cdbc4336-12040dfe-f2d081ff-d17e180f_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T13:59:27.073247Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_1|cdbc4336-12040dfe-f2d081ff-d17e180f_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-07T13:59:27.073840Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 6 sessionId: test-message_group_id_4|215c65e6-39868c3a-34a1f4ad-490314d0_0 grpc read done: success: 0 data: 2025-04-07T13:59:27.073874Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 6 sessionId: test-message_group_id_4|215c65e6-39868c3a-34a1f4ad-490314d0_0 grpc read failed 2025-04-07T13:59:27.073900Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 6 sessionId: test-message_group_id_4|215c65e6-39868c3a-34a1f4ad-490314d0_0 grpc closed 2025-04-07T13:59:27.073918Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 6 sessionId: test-message_group_id_4|215c65e6-39868c3a-34a1f4ad-490314d0_0 is DEAD 2025-04-07T13:59:27.074623Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=1) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:27.074670Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=1) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:27.074758Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id_1|cdbc4336-12040dfe-f2d081ff-d17e180f_0 grpc read done: success: 0 data: 2025-04-07T13:59:27.074771Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id_1|cdbc4336-12040dfe-f2d081ff-d17e180f_0 grpc read failed 2025-04-07T13:59:27.074788Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id_1|cdbc4336-12040dfe-f2d081ff-d17e180f_0 grpc closed 2025-04-07T13:59:27.074802Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id_1|cdbc4336-12040dfe-f2d081ff-d17e180f_0 is DEAD 2025-04-07T13:59:27.075415Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037895 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:27.075456Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037895 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:27.076403Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:27.076450Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7490570441046650041:2522] destroyed 2025-04-07T13:59:27.076474Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:27.076494Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7490570441046650038:2522] destroyed 2025-04-07T13:59:27.076516Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:27.076552Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [10:7490570441046649909:2499] destroyed 2025-04-07T13:59:27.076576Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:27.076606Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [10:7490570441046649906:2499] destroyed 2025-04-07T13:59:27.076648Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 1, State: StateIdle] TPartition::DropOwner. 2025-04-07T13:59:27.076746Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895, Partition: 0, State: StateIdle] TPartition::DropOwner. |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage-StreamLookup [GOOD] |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |89.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator >> DataShardVolatile::VolatileCommitOnBlobStorageFailure [GOOD] >> DataShardVolatile::VolatileTxAbortedOnSplit >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2025-04-07T13:59:27.446619Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T13:59:27.446758Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T13:59:27.464117Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T13:59:27.464585Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T13:59:27.488947Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T13:59:27.489479Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=15269868325036466011, session=0, seqNo=0) 2025-04-07T13:59:27.489625Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T13:59:27.503566Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=15269868325036466011, session=1) 2025-04-07T13:59:27.503901Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=15381030466756600606, session=0, seqNo=0) 2025-04-07T13:59:27.504025Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T13:59:27.516054Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=15381030466756600606, session=2) 2025-04-07T13:59:28.055652Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T13:59:28.055823Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T13:59:28.073509Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T13:59:28.073647Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T13:59:28.113522Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T13:59:28.114076Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:128:2154], cookie=15588251308726956297, session=1, seqNo=0) 2025-04-07T13:59:28.129335Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:128:2154], cookie=15588251308726956297, session=1) 2025-04-07T13:59:28.920527Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T13:59:28.920645Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T13:59:28.956504Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T13:59:28.957115Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T13:59:28.987535Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T13:59:28.988461Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=6324953003261161695, session=0, seqNo=0) 2025-04-07T13:59:28.988627Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T13:59:29.002946Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=6324953003261161695, session=1) 2025-04-07T13:59:29.629606Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T13:59:29.629723Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T13:59:29.653259Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T13:59:29.653402Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T13:59:29.672537Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T13:59:29.672933Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[4:130:2156], cookie=2236804939096591118, path="") 2025-04-07T13:59:29.698028Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[4:130:2156], cookie=2236804939096591118, status=SUCCESS) 2025-04-07T13:59:29.699100Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:139:2163], cookie=4851063586888494603, session=0, seqNo=0) 2025-04-07T13:59:29.699240Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T13:59:29.719076Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:139:2163], cookie=4851063586888494603, session=1) 2025-04-07T13:59:29.719922Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:140:2164], cookie=111, session=0, seqNo=0) 2025-04-07T13:59:29.720049Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T13:59:29.720210Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path attach session=1 to sender=[4:140:2164], cookie=222, seqNo=0 2025-04-07T13:59:29.741853Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:140:2164], cookie=111, session=2) 2025-04-07T13:59:30.333357Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T13:59:30.333452Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T13:59:30.352180Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T13:59:30.352392Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T13:59:30.391570Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T13:59:30.391978Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[5:130:2156], cookie=17131871718156308641, path="") 2025-04-07T13:59:30.415787Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[5:130:2156], cookie=17131871718156308641, status=SUCCESS) 2025-04-07T13:59:30.416946Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:139:2163], cookie=17125801319560456057, session=0, seqNo=0) 2025-04-07T13:59:30.417087Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T13:59:30.447772Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:139:2163], cookie=17125801319560456057, session=1) 2025-04-07T13:59:30.448697Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:139:2163], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-07T13:59:30.448880Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-07T13:59:30.448979Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T13:59:30.449432Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:140:2164], cookie=111, session=0, seqNo=0) 2025-04-07T13:59:30.449540Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T13:59:30.449691Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:140:2164], cookie=222, session=1, seqNo=0) 2025-04-07T13:59:30.462143Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:139:2163], cookie=123) 2025-04-07T13:59:30.462244Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:140:2164], cookie=111, session=2) 2025-04-07T13:59:30.462303Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:140:2164], cookie=222, session=1) >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsBasic [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsFull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2025-04-07T13:59:26.881790Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T13:59:26.881921Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T13:59:26.902003Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T13:59:26.902528Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T13:59:26.933292Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T13:59:27.540294Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T13:59:27.540381Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T13:59:27.549967Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T13:59:27.550063Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T13:59:27.574362Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T13:59:27.579445Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:128:2154], cookie=6075252443019241284, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-07T13:59:27.579712Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-07T13:59:27.592314Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:128:2154], cookie=6075252443019241284) 2025-04-07T13:59:27.592974Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:137:2161], cookie=151855843785667640, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-07T13:59:27.593220Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2025-04-07T13:59:27.606244Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:137:2161], cookie=151855843785667640) 2025-04-07T13:59:27.606811Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:142:2166], cookie=11844304498233535607, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-07T13:59:27.607028Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2025-04-07T13:59:27.623811Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:142:2166], cookie=11844304498233535607) 2025-04-07T13:59:27.624461Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:147:2171], cookie=10439090065470975655, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-07T13:59:27.624701Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-04-07T13:59:27.645477Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:147:2171], cookie=10439090065470975655) 2025-04-07T13:59:27.646153Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:152:2176], cookie=7012962300318329177, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-04-07T13:59:27.646374Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2025-04-07T13:59:27.672019Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:152:2176], cookie=7012962300318329177) 2025-04-07T13:59:27.672712Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:157:2181], cookie=13859922636029006767, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2025-04-07T13:59:27.672926Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2025-04-07T13:59:27.693347Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:157:2181], cookie=13859922636029006767) 2025-04-07T13:59:27.694090Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:162:2186], cookie=1798954533916252110, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-07T13:59:27.694273Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 7 "Root2" 2025-04-07T13:59:27.715049Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:162:2186], cookie=1798954533916252110) 2025-04-07T13:59:27.715858Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:167:2191], cookie=8146858070094708074, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2025-04-07T13:59:27.716073Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2025-04-07T13:59:27.741298Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:167:2191], cookie=8146858070094708074) 2025-04-07T13:59:27.741981Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:172:2196], cookie=5154399219873857710, ids=[100], paths=[], recursive=0) 2025-04-07T13:59:27.742072Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:172:2196], cookie=5154399219873857710) 2025-04-07T13:59:27.742628Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:175:2199], cookie=9481671573499434666, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-04-07T13:59:27.742713Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:175:2199], cookie=9481671573499434666) 2025-04-07T13:59:27.743249Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:178:2202], cookie=14366512132744221858, ids=[], paths=[/Root, ], recursive=0) 2025-04-07T13:59:27.743331Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:178:2202], cookie=14366512132744221858) 2025-04-07T13:59:27.743808Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:181:2205], cookie=9322996397865251308, ids=[1, 1], paths=[], recursive=0) 2025-04-07T13:59:27.743868Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:181:2205], cookie=9322996397865251308) 2025-04-07T13:59:27.744386Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:184:2208], cookie=13728183680010212272, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-04-07T13:59:27.744462Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:184:2208], cookie=13728183680010212272) 2025-04-07T13:59:27.744992Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:187:2211], cookie=6365601668344334704, ids=[], paths=[], recursive=1) 2025-04-07T13:59:27.745074Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:187:2211], cookie=6365601668344334704) 2025-04-07T13:59:27.745704Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:190:2214], cookie=4996021719846277495, ids=[], paths=[], recursive=0) 2025-04-07T13:59:27.745769Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:190:2214], cookie=4996021719846277495) 2025-04-07T13:59:27.746403Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:193:2217], cookie=11767033797214253265, ids=[3, 2], paths=[], recursive=1) 2025-04-07T13:59:27.746477Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:193:2217], cookie=11767033797214253265) 2025-04-07T13:59:27.747020Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:196:2220], cookie=7842647888221797999, ids=[3, 2], paths=[], recursive=0) 2025-04-07T13:59:27.747082Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:196:2220], cookie=7842647888221797999) 2025-04-07T13:59:27.747651Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:199:2223], cookie=13368377722507172440, ids=[], paths=[Root2/], recursive=1) 2025-04-07T13:59:27.747725Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:199:2223], cookie=13368377722507172440) 2025-04-07T13:59:27.748312Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:202:2226], cookie=10208726681687726795, ids=[], paths=[Root2/], recursive=0) 2025-04-07T13:59:27.748375Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:202:2226], cookie=10208726681687726795) 2025-04-07T13:59:27.799872Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T13:59:27.799992Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T13:59:27.800580Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T13:59:27.804165Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T13:59:27.836533Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T13:59:27.836876Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:242:2256], cookie=2220100532994303130, ids=[100], paths=[], recursive=0) 2025-04-07T13:59:27.836941Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:242:2256], cookie=2220100532994303130) 2025-04-07T13:59:27.837509Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:248:2261], cookie=14207251450957902986, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-04-07T13:59:27.837593Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:248:2261], cookie=14207251450957902986) 2025-04-07T13:59:27.838071Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:251:2264], cookie=6807673287808451604, ids=[], paths=[/Root, ], recursive=0) 2025-04-07T13:59:27.838161Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:251:2264], cookie=6807673287808451604) 2025-04-07T13:59:27.838624Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:254:2267], cookie=2113313230037704486, ids=[1, 1], paths=[], recursive=0) 2025-04-07T13:59:27.838681Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:254:2267], cookie=2113313230037704486) 2025-04-07T13:59:27.839137Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:257:2270], cookie=4670476622249788234, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-04-07T13:59:27.839203Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:257:2270], cookie=4670476622249788234) 2025-04-07T13:59:27.839640Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:260:2273], cookie=4070131576874947778, ids=[], paths=[], recursive=1) 2025-04-07T13:59:27.839713Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:260:2273], cookie=4070131576874947778) 2025-04-07T13:59:27.840191Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:263:2276], cookie=4989516624499346780, ids=[], paths=[], recursive=0) 2025-04-07T13:59:27.840244Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:263:2276], cookie=4989516624499346780) 2025-04-07T13:59:27.840816Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQ ... US_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T13:59:29.914673Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T13:59:29.915078Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=4398433510020741801, path="/Root", config={ MaxUnitsPerSecond: 1 }) 2025-04-07T13:59:29.915288Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-07T13:59:29.941347Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=4398433510020741801) 2025-04-07T13:59:29.941962Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:139:2163], cookie=3305795732149639330, path="/Root/Q", config={ }) 2025-04-07T13:59:29.942189Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Q" 2025-04-07T13:59:29.958764Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:139:2163], cookie=3305795732149639330) 2025-04-07T13:59:29.959369Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:144:2168], cookie=5422814170591607783, path="/Root/Folder", config={ }) 2025-04-07T13:59:29.959621Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Folder" 2025-04-07T13:59:29.975080Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:144:2168], cookie=5422814170591607783) 2025-04-07T13:59:29.975743Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:149:2173], cookie=3801627839211738694, path="/Root/Folder/Q1", config={ }) 2025-04-07T13:59:29.975990Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-04-07T13:59:29.988393Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:149:2173], cookie=3801627839211738694) 2025-04-07T13:59:29.989035Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:154:2178], cookie=12392423603646402160, ids=[], paths=[], recursive=1) 2025-04-07T13:59:29.989135Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:154:2178], cookie=12392423603646402160) 2025-04-07T13:59:29.989965Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:160:2184], cookie=1218554091750266724, ids=[], paths=[], recursive=1) 2025-04-07T13:59:29.990046Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:160:2184], cookie=1218554091750266724) 2025-04-07T13:59:29.990900Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:166:2190], cookie=9684593653686954785, ids=[], paths=[], recursive=1) 2025-04-07T13:59:29.990974Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:166:2190], cookie=9684593653686954785) 2025-04-07T13:59:29.991429Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:169:2193], cookie=16456002509974382976, id=0, path="/Root/Folder/NonexistingRes") 2025-04-07T13:59:29.991516Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:169:2193], cookie=16456002509974382976) 2025-04-07T13:59:29.991999Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:172:2196], cookie=10360122003521956, ids=[], paths=[], recursive=1) 2025-04-07T13:59:29.992062Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:172:2196], cookie=10360122003521956) 2025-04-07T13:59:29.992549Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:175:2199], cookie=850447364970909321, id=100, path="") 2025-04-07T13:59:29.992617Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:175:2199], cookie=850447364970909321) 2025-04-07T13:59:29.997458Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:178:2202], cookie=11047006323134778841, ids=[], paths=[], recursive=1) 2025-04-07T13:59:29.997560Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:178:2202], cookie=11047006323134778841) 2025-04-07T13:59:29.998148Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:181:2205], cookie=9211349796747530569, id=3, path="") 2025-04-07T13:59:29.998219Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:181:2205], cookie=9211349796747530569) 2025-04-07T13:59:29.998709Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:184:2208], cookie=10537295048870770156, ids=[], paths=[], recursive=1) 2025-04-07T13:59:29.998778Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:184:2208], cookie=10537295048870770156) 2025-04-07T13:59:29.999309Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:187:2211], cookie=17494230420327792330, id=0, path="/Root/Folder/Q1") 2025-04-07T13:59:29.999467Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2025-04-07T13:59:30.015975Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:187:2211], cookie=17494230420327792330) 2025-04-07T13:59:30.016728Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:192:2216], cookie=13555237704382783783, ids=[], paths=[], recursive=1) 2025-04-07T13:59:30.016826Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:192:2216], cookie=13555237704382783783) 2025-04-07T13:59:30.030916Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T13:59:30.031038Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T13:59:30.031687Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T13:59:30.032461Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T13:59:30.088493Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T13:59:30.088987Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:232:2246], cookie=7043656257457424479, ids=[], paths=[], recursive=1) 2025-04-07T13:59:30.089106Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:232:2246], cookie=7043656257457424479) 2025-04-07T13:59:30.089881Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:238:2251], cookie=7865492320732251016, id=3, path="") 2025-04-07T13:59:30.090047Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2025-04-07T13:59:30.120088Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:238:2251], cookie=7865492320732251016) 2025-04-07T13:59:30.120876Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:243:2256], cookie=9666943659259256342, ids=[], paths=[], recursive=1) 2025-04-07T13:59:30.120982Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:243:2256], cookie=9666943659259256342) 2025-04-07T13:59:30.139956Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T13:59:30.140059Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T13:59:30.140850Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T13:59:30.141450Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T13:59:30.190364Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T13:59:30.190775Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:283:2286], cookie=16578018126176520275, ids=[], paths=[], recursive=1) 2025-04-07T13:59:30.190860Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:283:2286], cookie=16578018126176520275) 2025-04-07T13:59:31.067919Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T13:59:31.068027Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T13:59:31.113173Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T13:59:31.113542Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T13:59:31.152141Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T13:59:31.152634Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:130:2156], cookie=17881338707116608712, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-07T13:59:31.152885Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Q1" 2025-04-07T13:59:31.168262Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:130:2156], cookie=17881338707116608712) 2025-04-07T13:59:31.169008Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:139:2163], cookie=3850081982287103724, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-04-07T13:59:31.169314Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Q2" 2025-04-07T13:59:31.191467Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:139:2163], cookie=3850081982287103724) 2025-04-07T13:59:31.193341Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:144:2168]. Cookie: 14084610115220928320. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-07T13:59:31.193427Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:144:2168], cookie=14084610115220928320) 2025-04-07T13:59:31.194964Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:144:2168]. Cookie: 11635067785985191930. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2025-04-07T13:59:31.195028Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:144:2168], cookie=11635067785985191930) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 16641, MsgBus: 5703 2025-04-07T13:59:22.844101Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570550905236103:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:22.844235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001737/r3tmp/tmpCKDMg1/pdisk_1.dat 2025-04-07T13:59:23.223593Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:23.228106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:23.228230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:23.231332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16641, node 1 2025-04-07T13:59:23.313136Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:23.313158Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:23.313164Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:23.313267Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5703 TClient is connected to server localhost:5703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:23.870525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:23.888903Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:23.902522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:24.034803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:24.188222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:59:24.271680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.243192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570568085106862:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:26.243317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:26.517270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.560311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.620282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.657722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.753097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.834412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.957965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570568085107366:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:26.958027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:26.958333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570568085107371:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:26.961887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:59:26.973069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570568085107373:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:59:27.854916Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570550905236103:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:27.854981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:28.893154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys 1 8 /Root/SecondaryKeys/Index/indexImplTable 1 8 >> KqpCost::Range [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range [GOOD] Test command err: Trying to start YDB, gRPC: 27687, MsgBus: 2846 2025-04-07T13:59:25.405816Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570561378925810:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:25.405939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001720/r3tmp/tmpGsqnlT/pdisk_1.dat 2025-04-07T13:59:26.053682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:26.053807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:26.055802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:26.078651Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27687, node 1 2025-04-07T13:59:26.129464Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:26.129501Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:26.129508Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:26.129606Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2846 TClient is connected to server localhost:2846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:26.668857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:26.681061Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:26.695911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:26.844492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:59:26.993162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.076551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:29.134106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570578558796547:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:29.134212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:29.477588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:59:29.575077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:59:29.634673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:59:29.687360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:59:29.725392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:59:29.810685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:59:29.904718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570578558797052:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:29.904810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:29.905095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570578558797057:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:29.908418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:59:29.930471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570578558797059:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:59:30.424091Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570561378925810:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:30.424177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> YdbSdkSessionsPool::CustomPlan [GOOD] >> YdbSdkSessionsPool::FailTest >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] >> KqpOlapAggregations::Json_GetValue_Minus [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> TPQTest::TestAlreadyWritten [GOOD] >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> BasicUsage::ReadSessionCorrectClose [GOOD] >> BasicUsage::ConflictingWrites ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF [GOOD] Test command err: Trying to start YDB, gRPC: 3237, MsgBus: 20164 2025-04-07T13:57:01.491151Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569943921017149:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:01.491179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010b5/r3tmp/tmp3LKp7I/pdisk_1.dat 2025-04-07T13:57:01.935229Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:01.941062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:01.941213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:01.944187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3237, node 1 2025-04-07T13:57:02.019728Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:02.019749Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:02.019755Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:02.019844Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20164 TClient is connected to server localhost:20164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:02.589489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.635226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:57:02.848125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.016622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:03.094302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:04.958843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569956805920735:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.958961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.210652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.240552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.268613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.290233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.314068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.346433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.422963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569961100888525:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.423022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.423241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569961100888530:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.426838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:05.435811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569961100888532:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:06.496397Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569943921017149:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:06.496774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:06.634975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.665869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.715672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.742494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.770321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.899531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.928905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.004878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.080126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.106095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.179573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.229669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.261946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.736877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:57:07.773666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.854630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.901719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.936768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.989130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T13:57:08.025651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part p ... access permissions } 2025-04-07T13:59:22.099169Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:59:22.117708Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570547955927901:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:59:23.690381Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:59:23.775184Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:59:23.825195Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:59:23.880689Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:59:23.927926Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:59:24.138403Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:59:24.228421Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:59:24.266380Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:59:24.310141Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:59:24.351488Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:59:24.437567Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:59:24.519434Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:59:24.577070Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:59:25.982178Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:59:26.038801Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.095126Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.153652Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.200081Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.251062Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.335534Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.384527Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.425506Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.473139Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.562759Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.639836Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.691180Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.818142Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.875963Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 2025-04-07T13:59:26.986772Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.046816Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710700:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.154728Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.213776Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.321582Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.393361Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.467974Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.518867Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.607421Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.879828Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:1, at schemeshard: 72057594046644480 2025-04-07T13:59:27.937771Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.033723Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.117956Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.177248Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.234767Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.298083Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.424637Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.515162Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.683322Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710717:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.747801Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.834908Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.926800Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 2025-04-07T13:59:30.426268Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:59:30.426304Z node 5 :IMPORT WARN: Table profiles were not loaded >> KqpOlap::OlapRead_StreamGenericQuery [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestAlreadyWritten [GOOD] Test command err: 2025-04-07T13:58:48.701530Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:58:48.705574Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:58:48.705875Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-07T13:58:48.705932Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:58:48.705972Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-07T13:58:48.706009Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:58:48.706063Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:48.706111Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2025-04-07T13:58:48.706146Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T13:58:48.724075Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:48.724168Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:174:2189], now have 1 active actors on pipe 2025-04-07T13:58:48.724265Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T13:58:48.740129Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:48.744142Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-07T13:58:48.744279Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:58:48.745998Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:48.746117Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:48.746186Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:58:48.746623Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:58:48.747130Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:183:2196] 2025-04-07T13:58:48.747966Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-07T13:58:48.748043Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:183:2196] 2025-04-07T13:58:48.748108Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:58:48.748762Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-07T13:58:48.748825Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-07T13:58:48.748998Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:48.749159Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:58:48.749450Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:58:48.749704Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-07T13:58:48.750400Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-04-07T13:58:48.750445Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:185:2198] 2025-04-07T13:58:48.750483Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T13:58:48.750846Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-04-07T13:58:48.750881Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-04-07T13:58:48.750999Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:58:48.751096Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T13:58:48.751649Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T13:58:48.751798Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:58:48.755582Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:58:48.755739Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:58:48.756085Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:48.756143Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2207], now have 1 active actors on pipe 2025-04-07T13:58:48.757831Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:48.757883Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:203:2211], now have 1 active actors on pipe 2025-04-07T13:58:48.765490Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 4294969484 } TxId: 67891 Config { TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "client-1" Generation: 1 Important: false } Consumers { Name: "client-2" Generation: 1 Important: false } } BootstrapConfig { } } 2025-04-07T13:58:48.765685Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2025-04-07T13:58:48.765737Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-07T13:58:48.765779Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2025-04-07T13:58:48.766050Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 231 MaxStep: 18446744073709551615 Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-2" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 173 RawX2: 4294969484 } Partitions { } 2025-04-07T13:58:48.766175Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-07T13:58:48.766362Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 4294969484 } TxId: 67892 Data { Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "client-2" Path: "/topic" } Operations { PartitionId: 2 Begin: 0 End: 0 Consumer: "client-1" Path: "/topic" } Immediate: false } 2025-04-07T13:58:48.766397Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927 ... C_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [23:232:2234] sender: [23:330:2057] recipient: [23:14:2061] 2025-04-07T13:59:32.601501Z node 23 :PERSQUEUE INFO: new Cookie default|cd23e9bf-b40eb462-b8cd02d8-de0cc53e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] 2025-04-07T13:59:33.100603Z node 24 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:33.100715Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [24:147:2057] recipient: [24:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [24:147:2057] recipient: [24:145:2168] Leader for TabletID 72057594037927938 is [24:151:2172] sender: [24:152:2057] recipient: [24:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:177:2057] recipient: [24:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:33.138764Z node 24 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:33.139815Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 24 actor [24:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 24 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 24 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 24 Important: false } 2025-04-07T13:59:33.140588Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [24:184:2197] 2025-04-07T13:59:33.143468Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [24:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T13:59:33.145724Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [24:185:2198] 2025-04-07T13:59:33.147905Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [24:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:33.160035Z node 24 :PERSQUEUE INFO: new Cookie default|4782ec61-47a40b5d-63a84669-40878499_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:33.169856Z node 24 :PERSQUEUE INFO: new Cookie default|a01ee7ec-8854093e-2b827f30-e9cc5b9b_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:33.183384Z node 24 :PERSQUEUE INFO: new Cookie default|a1b155d7-31e4262e-f048e0ac-b21ad3ae_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] 2025-04-07T13:59:33.881802Z node 25 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:33.881895Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [25:147:2057] recipient: [25:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [25:147:2057] recipient: [25:145:2168] Leader for TabletID 72057594037927938 is [25:151:2172] sender: [25:152:2057] recipient: [25:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:177:2057] recipient: [25:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:33.921639Z node 25 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:33.922703Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 25 actor [25:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 25 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 25 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 25 Important: false } 2025-04-07T13:59:33.923527Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [25:184:2197] 2025-04-07T13:59:33.927301Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [25:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:33.929746Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [25:185:2198] 2025-04-07T13:59:33.932298Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [25:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:33.944078Z node 25 :PERSQUEUE INFO: new Cookie default|449056a6-bd4db650-106e38d8-3863efa2_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:33.952099Z node 25 :PERSQUEUE INFO: new Cookie default|f43bbc14-255a9cc6-841f962a-2bfd2aba_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:33.965485Z node 25 :PERSQUEUE INFO: new Cookie default|151ddcda-676b3898-4f37753b-4117152a_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_GetValue_Minus [GOOD] Test command err: Trying to start YDB, gRPC: 4320, MsgBus: 30251 2025-04-07T13:59:28.069524Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570575844286412:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:28.070032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00101d/r3tmp/tmplsQv39/pdisk_1.dat 2025-04-07T13:59:28.546503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:28.546607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:28.548186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:28.564805Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4320, node 1 2025-04-07T13:59:28.688161Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:28.688186Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:28.688196Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:28.688284Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30251 TClient is connected to server localhost:30251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:29.260456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:29.297043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:29.402155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570580139254376:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:29.402365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570580139254376:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:29.402626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570580139254376:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:29.402847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570580139254376:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:29.402949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570580139254376:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:29.403061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570580139254376:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:29.403189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570580139254376:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:29.403303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570580139254376:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:29.403418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570580139254376:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:29.403575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570580139254376:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:29.403676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570580139254376:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:29.403794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570580139254376:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:29.438327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570580139254377:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:29.438393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570580139254377:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:29.438625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570580139254377:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:29.438739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570580139254377:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:29.438831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570580139254377:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:29.438939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570580139254377:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:29.439044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570580139254377:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:29.439158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570580139254377:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:29.439295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570580139254377:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:29.439412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570580139254377:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:29.439537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570580139254377:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:29.439627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570580139254377:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:29.471577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570580139254378:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:29.471649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570580139254378:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:29.471853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570580139254378:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:29.471947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570580139254378:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:29.472054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570580139254378:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:29.472164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570580139254378:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:29.472251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570580139254378:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:29.472372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570580139254378:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:29.472460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570580139254378 ... witched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:29.518446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:29.518475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:29.518649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:29.518728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:29.518839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:29.518860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:29.519024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:29.519046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:29.519163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:29.519189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.'col-abc'"), JSON_VALUE(jsondoc, "$.'col-abc'") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.'col-abc'") = "val-abc" AND id = 1; 2025-04-07T13:59:31.326682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570588729189240:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:31.326769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570588729189275:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:31.326823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:31.342155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T13:59:31.387231Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:59:31.387432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570588729189277:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:59:32.652567Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034372000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.'col-abc'"), JSON_VALUE(jsondoc, "$.'col-abc'") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.'col-abc'") = "val-abc" AND id = 1; 2025-04-07T13:59:33.069886Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570575844286412:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:33.069956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == \"val-abc\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val-abc"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [1, 1]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val-abc"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == \"val-abc\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1328) '('"_id" '"3d118fb5-1228b6a1-fadef0de-16e05c7a") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $19 (Int32 '1)) (let $20 (Just $19)) (let $21 '($20 $19)) (let $22 (If (== $19 (Int32 '2147483647)) $21 '((+ $20 $19) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($21 $22)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (DataType 'Utf8)) (let $11 (OptionalType $10)) (let $12 (DqPhyStage '() (lambda '() (block '( (let $23 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $24 '('"id" '"jsondoc" '"jsonval")) (let $25 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $26 (OptionalType (DataType 'JsonDocument))) (let $27 '((VariantType (TupleType (TupleType (DataType 'Uint8) (DataType 'String)) $11)))) (let $28 (ResourceType '"JsonNode")) (let $29 (OptionalType $28)) (let $30 '((ResourceType '"JsonPath"))) (let $31 (DictType $10 $28)) (let $32 '($31)) (let $33 (CallableType '() $27 '($29) $30 $32)) (let $34 '('('"strict"))) (let $35 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $33 (VoidType) '"" $34)) (let $36 (lambda '($54) (block '( (let $55 '((DataType 'Json) '"" '1)) (let $56 (CallableType '() '($28) $55)) (let $57 (Udf '"Json2.Parse" (Void) (VoidType) '"" $56 (VoidType) '"" '())) (return (Just (Apply $57 $54))) )))) (let $37 (Nothing $29)) (let $38 (CallableType '() $30 '($10))) (let $39 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $38 (VoidType) '"" '())) (let $40 (Apply $39 (Utf8 '"$.'col-abc'"))) (let $41 (Dict $31)) (let $42 (lambda '($58) (Nothing $11))) (let $43 (lambda '($59) $59)) (let $44 (KqpWideReadOlapTableRanges $23 %kqp%tx_result_binding_0_0 $24 '() $25 (lambda '($45) (block '( (let $46 (DataType 'Json)) (let $47 (StructType $9 '('"jsondoc" $26) '('"jsonval" (OptionalType $46)))) (let $48 (KqpOlapApply $47 '('"jsonval") (lambda '($51) (block '( (let $52 (IfPresent $51 $36 $37)) (let $53 (Apply $35 $52 $40 $41)) (return (Visit $53 '0 $42 '1 $43)) ))))) (let $49 '('eq $48 (String '"val-abc"))) (let $50 '('?? $49 (Bool 'false))) (return (KqpOlapFilter $45 $50)) ))))) (return (FromFlow (NarrowMap $44 (lambda '($60 $61 $62) (block '( (let $63 (IfPresent $62 $36 $37)) (let $64 (Apply $35 $63 $40 $41)) (let $65 (Visit $64 '0 $42 '1 $43)) (let $66 (CallableType '() $27 '($26) $30 $32)) (let $67 (Udf '"Json2.JsonDocumentSqlValueConvertToUtf8" (Void) (VoidType) '"" $66 (VoidType) '"" $34)) (let $68 (Apply $67 $61 $40 $41)) (let $69 (Visit $68 '0 $42 '1 $43)) (return (AsStruct '('"column1" $65) '('"column2" $69) '('"id" $60))) )))))) ))) '('('"_logical_id" '1399) '('"_id" '"d81d6bff-2177baa1-57f11f30-29e03f4f")))) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($70) $70) '('('"_logical_id" '2259) '('"_id" '"d02d4f95-61d378d3-1f48ff37-c065a801")))) (let $15 '('"id" '"column1" '"column2")) (let $16 (DqCnResult (TDqOutput $14 '0) $15)) (let $17 (KqpTxResultBinding $8 '0 '0)) (let $18 (KqpPhysicalTx '($12 $14) '($16) '('($5 $17)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $18) '((KqpTxResultBinding (ListType (StructType '('"column1" $11) '('"column2" $11) $9)) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpQueryService::TableSink_OlapUpdate [GOOD] >> KqpQueryService::TableSink_OlapRWQueries >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueSinglePartedShardWithMemData [GOOD] >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> RetryPolicy::RetryWithBatching [GOOD] >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] Test command err: 2025-04-07T13:57:52.371869Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570162262128949:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:52.371913Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ec3/r3tmp/tmpiiw0D8/pdisk_1.dat 2025-04-07T13:57:52.780448Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:52.838491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:52.838587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 26056, node 1 2025-04-07T13:57:52.840895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:52.925130Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:52.925154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:52.925161Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:52.925297Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:53.290501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:53.364486Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2025-04-07T13:57:55.448461Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:57:55.452120Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmNiYTcxODItNzBlMzQ0MGYtNTdjYzIxZmMtOWY4OWQ1Y2Y=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZmNiYTcxODItNzBlMzQ0MGYtNTdjYzIxZmMtOWY4OWQ1Y2Y= 2025-04-07T13:57:55.452526Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-07T13:57:55.452541Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:57:55.452553Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:57:55.452593Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570175147031231:2302], Start check tables existence, number paths: 2 2025-04-07T13:57:55.463792Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmNiYTcxODItNzBlMzQ0MGYtNTdjYzIxZmMtOWY4OWQ1Y2Y=, ActorId: [1:7490570175147031240:2303], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:57:55.464217Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570175147031231:2302], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T13:57:55.464293Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570175147031231:2302], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T13:57:55.464324Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570175147031231:2302], Successfully finished 2025-04-07T13:57:55.464440Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T13:57:55.473440Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570175147031257:2296], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:57:55.476414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:57:55.479006Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570175147031257:2296], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-07T13:57:55.480188Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570175147031257:2296], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-07T13:57:55.484886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570175147031257:2296], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T13:57:55.550347Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570175147031257:2296], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:57:55.554347Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570175147031257:2296], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-07T13:57:55.555829Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570175147031315:2334], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-04-07T13:57:55.556970Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570175147031315:2334], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-07T13:57:55.585601Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZmNiYTcxODItNzBlMzQ0MGYtNTdjYzIxZmMtOWY4OWQ1Y2Y=, ActorId: [1:7490570175147031240:2303], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:57:55.585657Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZmNiYTcxODItNzBlMzQ0MGYtNTdjYzIxZmMtOWY4OWQ1Y2Y=, ActorId: [1:7490570175147031240:2303], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:57:55.585702Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmNiYTcxODItNzBlMzQ0MGYtNTdjYzIxZmMtOWY4OWQ1Y2Y=, ActorId: [1:7490570175147031240:2303], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:57:55.585723Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmNiYTcxODItNzBlMzQ0MGYtNTdjYzIxZmMtOWY4OWQ1Y2Y=, ActorId: [1:7490570175147031240:2303], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:57:55.585808Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmNiYTcxODItNzBlMzQ0MGYtNTdjYzIxZmMtOWY4OWQ1Y2Y=, ActorId: [1:7490570175147031240:2303], ActorState: unknown state, Session actor destroyed 2025-04-07T13:57:56.435771Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490570181999353448:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:56.455674Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ec3/r3tmp/tmpi2NnqS/pdisk_1.dat 2025-04-07T13:57:56.517314Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12839, node 2 2025-04-07T13:57:56.573953Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:56.574100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:56.583135Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:56.613020Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:56.613039Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:56.613047Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:56.613146Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8362 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:56.869741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:56.888779Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:57:56.925036Z node 2 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2025-04-07T13:58:00.061003Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:58:00.061076Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7490570199179223128:2302], Start check tables existence, number paths: 2 2025-04-07T13:58:00.061216Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:58:00.061230Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:58:00.072767Z node 2 :KQP_SESSI ... 0rdmd, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-07T13:59:31.555254Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5tx2zh7fzvyhz40rdmd, txInfo Status: Committed Kind: ReadWrite TotalDuration: 127.762 ServerDuration: 127.566 QueriesCount: 2 2025-04-07T13:59:31.555373Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5tx2zh7fzvyhz40rdmd, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T13:59:31.555442Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5tx2zh7fzvyhz40rdmd, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:31.555471Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5tx2zh7fzvyhz40rdmd, EndCleanup, isFinal: 0 2025-04-07T13:59:31.555530Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5tx2zh7fzvyhz40rdmd, Sent query response back to proxy, proxyRequestId: 517, proxyId: [7:7490570271256559623:2209] 2025-04-07T13:59:31.556171Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, TxId: 2025-04-07T13:59:31.556265Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-04-07T13:59:31.556854Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ReadyState, TraceId: 01jr88s5z4293t3k17xf12ghz2, received request, proxyRequestId: 518 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [7:7490570589084147813:4788] database: /Root databaseId: /Root pool id: default 2025-04-07T13:59:31.556884Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ReadyState, TraceId: 01jr88s5z4293t3k17xf12ghz2, request placed into pool from cache: default 2025-04-07T13:59:31.557734Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5z4293t3k17xf12ghz2, ExecutePhyTx, tx: 0x000050C0003A1B18 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-07T13:59:31.557799Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5z4293t3k17xf12ghz2, Sending to Executer TraceId: 0 8 2025-04-07T13:59:31.557863Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5z4293t3k17xf12ghz2, Created new KQP executer: [7:7490570589084147816:4781] isRollback: 0 2025-04-07T13:59:31.565980Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZDk5NDRkZDMtNjcxMDUwMmQtYjlhYThjZmUtN2QwZjZhODE=, ActorId: [6:7490570299152738816:2307], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:59:31.566042Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZDk5NDRkZDMtNjcxMDUwMmQtYjlhYThjZmUtN2QwZjZhODE=, ActorId: [6:7490570299152738816:2307], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:31.566072Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDk5NDRkZDMtNjcxMDUwMmQtYjlhYThjZmUtN2QwZjZhODE=, ActorId: [6:7490570299152738816:2307], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:59:31.566100Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDk5NDRkZDMtNjcxMDUwMmQtYjlhYThjZmUtN2QwZjZhODE=, ActorId: [6:7490570299152738816:2307], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:59:31.566187Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDk5NDRkZDMtNjcxMDUwMmQtYjlhYThjZmUtN2QwZjZhODE=, ActorId: [6:7490570299152738816:2307], ActorState: unknown state, Session actor destroyed 2025-04-07T13:59:31.586884Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5z4293t3k17xf12ghz2, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-07T13:59:31.586969Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5z4293t3k17xf12ghz2, ExecutePhyTx, tx: 0x000050C0003A12D8 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-07T13:59:31.587879Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5z4293t3k17xf12ghz2, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-07T13:59:31.588030Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5z4293t3k17xf12ghz2, txInfo Status: Committed Kind: ReadOnly TotalDuration: 30.431 ServerDuration: 30.304 QueriesCount: 2 2025-04-07T13:59:31.588154Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5z4293t3k17xf12ghz2, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T13:59:31.588213Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5z4293t3k17xf12ghz2, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:31.588240Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5z4293t3k17xf12ghz2, EndCleanup, isFinal: 0 2025-04-07T13:59:31.588293Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ExecuteState, TraceId: 01jr88s5z4293t3k17xf12ghz2, Sent query response back to proxy, proxyRequestId: 518, proxyId: [7:7490570271256559623:2209] 2025-04-07T13:59:31.588725Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, TxId: 2025-04-07T13:59:31.588823Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, TxId: 2025-04-07T13:59:31.588917Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7490570301321330882:2298], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2025-04-07T13:59:31.588977Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:59:31.589016Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:31.589049Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:59:31.589076Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:59:31.589164Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MWRkOTdmN2YtZDI4ZGQ4NGYtYmM4NjExNDYtZWE5M2Q5Yw==, ActorId: [7:7490570589084147783:4781], ActorState: unknown state, Session actor destroyed 2025-04-07T13:59:31.977285Z node 8 :BS_PROXY_PUT ERROR: [3a07f5a5f790dad4] Result# TEvPutResult {Id# [72075186224037888:1:926:0:0:42:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-07T13:59:32.164737Z node 7 :BS_PROXY_PUT ERROR: [aea91a22a493a44f] Result# TEvPutResult {Id# [72075186224037889:1:724:0:0:42:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> YdbSdkSessionsPool::StressTestSync1 [FAIL] >> KqpOlapAggregations::Aggregation_Count_Null [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_StreamGenericQuery [GOOD] Test command err: Trying to start YDB, gRPC: 13755, MsgBus: 2002 2025-04-07T13:59:27.728027Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570569558466276:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:27.728103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001023/r3tmp/tmp3jnnBQ/pdisk_1.dat 2025-04-07T13:59:28.388603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:28.390610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:28.392403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:28.459652Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13755, node 1 2025-04-07T13:59:28.723510Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:28.723540Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:28.723550Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:28.723639Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2002 TClient is connected to server localhost:2002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:29.714615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:29.751876Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:29.761132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:29.945725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570578148401393:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:29.964892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570578148401393:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:29.965157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570578148401393:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:29.965310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570578148401393:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:29.965421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570578148401393:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:29.965527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570578148401393:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:29.965620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570578148401393:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:29.965720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570578148401393:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:29.965828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570578148401393:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:29.965915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570578148401393:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:29.966020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570578148401393:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:29.966116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570578148401393:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:30.069389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570578148401395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:30.069451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570578148401395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:30.069693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570578148401395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:30.069808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570578148401395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:30.069909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570578148401395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:30.070039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570578148401395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:30.070140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570578148401395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:30.070319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570578148401395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:30.070468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570578148401395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:30.070595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570578148401395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:30.070720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570578148401395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:30.070830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570578148401395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:30.139785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570578148401396:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:30.143500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570578148401396:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:30.143739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570578148401396:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:30.143873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570578148401396:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:30.143994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570578148401396:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:30.144108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570578148401396:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:30.144204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570578148401396:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:30.144301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570578148401396:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:30.602567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:30.602594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:30.602662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:30.602686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:30.602818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:30.602839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:30.602924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:30.602954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:30.602997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:30.603016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:30.603042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:30.603060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:30.603296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:30.603385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:30.603575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:30.603596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:30.603698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:30.603730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:30.603916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:30.603947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:30.604034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:30.604053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:30.604482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:30.604504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:30.604579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:30.604601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:30.604742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:30.604762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:30.604824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:30.604846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:30.604888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:30.604917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:30.604948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:30.604967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:30.605226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:30.605270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:30.605442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:30.605462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:30.605556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:30.605588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:30.605727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:30.605749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:30.605837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:30.605856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:30.692057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; 2025-04-07T13:59:32.731134Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570569558466276:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:32.731345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:32.739008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570591033303800:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:32.739077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570591033303784:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:32.739124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:32.748581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T13:59:32.765323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570591033303813:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> TContinuousBackupTests::TakeIncrementalBackup >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups >> TContinuousBackupTests::Basic |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> KqpWorkloadService::TestCpuLoadThresholdRefresh [GOOD] >> KqpWorkloadService::TestHandlerActorCleanup |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_Null [GOOD] Test command err: Trying to start YDB, gRPC: 21350, MsgBus: 14732 2025-04-07T13:59:28.310363Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570576665266907:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:28.310496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00101a/r3tmp/tmpqD2s54/pdisk_1.dat 2025-04-07T13:59:29.223638Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:29.294459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:59:29.306286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:29.306361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:29.309530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21350, node 1 2025-04-07T13:59:29.489083Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:29.489102Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:29.489127Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:29.489446Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14732 TClient is connected to server localhost:14732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:30.488884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:30.518354Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:30.527058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:30.705807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570585255202040:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:30.706075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570585255202040:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:30.706345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570585255202040:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:30.706445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570585255202040:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:30.706547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570585255202040:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:30.706638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570585255202040:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:30.706719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570585255202040:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:30.706831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570585255202040:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:30.706945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570585255202040:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:30.707032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570585255202040:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:30.707120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570585255202040:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:30.707207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570585255202040:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:30.788346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570585255202041:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:30.796632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570585255202041:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:30.796930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570585255202041:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:30.797062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570585255202041:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:30.797192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570585255202041:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:30.797336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570585255202041:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:30.797445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570585255202041:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:30.797555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570585255202041:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:30.797656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570585255202041:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:30.797753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570585255202041:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:30.798327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570585255202041:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:30.798434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570585255202041:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:30.889396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570585255202042:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:30.889476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570585255202042:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:30.889714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570585255202042:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:30.889835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570585255202042:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:30.889929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570585255202042:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:30.890036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570585255202042:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:30.890164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570585255202042:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04- ... cess=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:30.947470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:30.947603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:30.947637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:30.947710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:30.947745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:30.947820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:30.947841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:30.947868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:30.947885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:30.948142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:30.948176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:30.948330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:30.948357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:30.948469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:30.948505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:30.948679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:30.948702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:30.948778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:30.948793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(level) FROM `/Root/tableWithNulls` WHERE id > 5; 2025-04-07T13:59:33.172920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570598140104229:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:33.173023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:33.173444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570598140104241:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:33.177467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T13:59:33.192447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570598140104243:2403], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:59:33.290399Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570576665266907:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:33.290460Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(level) FROM `/Root/tableWithNulls` WHERE id > 5; 2025-04-07T13:59:34.721509Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034374000, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (declare %kqp%tx_result_binding_1_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '728) '('"_id" '"e5c34a3c-c9231158-a36426d7-f7981ef4") $3)) (let $5 (DqPhyStage '() (lambda '() (block '( (let $28 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $28) '((Nothing $2) $28))))))))))) ))) $4)) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 (KqpPhysicalTx '($5) '($6) '() '('('"type" '"compute")))) (let $8 '"%kqp%tx_result_binding_0_0") (let $9 (TupleType $2 $1)) (let $10 (TupleType (ListType (TupleType $9 $9)))) (let $11 (DataType 'Uint64)) (let $12 '('('"_logical_id" '786) '('"_id" '"d5cf2f6-97a8c748-784e9482-98128cbf") '('"_wide_channels" (StructType '('_yql_agg_0 $11))))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $29 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $30 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $29 %kqp%tx_result_binding_0_0 '('"level") '() $30 (lambda '($32) (TKqpOlapAgg $32 '('('_yql_agg_0 'count '"level")) '())))) (return (FromFlow $31)) ))) $12)) (let $14 (DqCnUnionAll (TDqOutput $13 '0))) (let $15 (DqPhyStage '($14) (lambda '($33) (block '( (let $34 (Bool 'false)) (let $35 (WideCondense1 (ToFlow $33) (lambda '($37) $37) (lambda '($38 $39) $34) (lambda '($40 $41) (AggrAdd $40 $41)))) (let $36 (Condense (NarrowMap (Take $35 (Uint64 '1)) (lambda '($42) (AsStruct '('Count0 $42)))) (Nothing (OptionalType (StructType '('Count0 $11)))) (lambda '($43 $44) $34) (lambda '($45 $46) (Just $45)))) (return (FromFlow (Map $36 (lambda '($47) (AsList (AsStruct '('"column0" (Coalesce (Member $47 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '1341) '('"_id" '"217c499b-7755bf37-cf547f4e-de37a58d")))) (let $16 (DqCnValue (TDqOutput $15 '0))) (let $17 (KqpTxResultBinding $10 '0 '0)) (let $18 '('('"type" '"scan"))) (let $19 (KqpPhysicalTx '($13 $15) '($16) '('($8 $17)) $18)) (let $20 '"%kqp%tx_result_binding_1_0") (let $21 (ListType (StructType '('"column0" $11)))) (let $22 '('('"_logical_id" '1444) '('"_id" '"bc5ac92c-3983db45-3f6f2050-d42a6135") $3)) (let $23 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_1_0)) $22)) (let $24 (DqCnResult (TDqOutput $23 '0) '('"column0"))) (let $25 (KqpTxResultBinding $21 '1 '0)) (let $26 (KqpPhysicalTx '($23) '($24) '('($20 $25)) $18)) (let $27 '($7 $19 $26)) (return (KqpPhysicalQuery $27 '((KqpTxResultBinding $21 '"2" '0)) '('('"type" '"scan_query")))) ) >> KqpDatetime64ColumnShard::UseDate32AsPrimaryKey [GOOD] >> KqpDatetime64ColumnShard::Csv >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] >> TContinuousBackupTests::Basic [GOOD] >> StreamCreator::Basic |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> YdbSdkSessionsPool::FailTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:59:37.325025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:59:37.325125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:59:37.325162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:59:37.325226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:59:37.325265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:59:37.325289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:59:37.325336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:59:37.325596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:59:37.393638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:59:37.393706Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:37.403651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:59:37.403789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:59:37.403946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:59:37.408780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:59:37.408965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:59:37.409640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:37.409891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:59:37.411777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:37.412868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:37.412921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:37.413027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:59:37.413071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:37.413104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:59:37.413273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.419317Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:59:37.555245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:59:37.555498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.555780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:59:37.556057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:59:37.556118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.558685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:37.558853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:59:37.559054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.559129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:59:37.559183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:59:37.559229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:59:37.561390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.561458Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:59:37.561498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:59:37.563447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.563498Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.563536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:37.563620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:59:37.576459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:59:37.582430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:59:37.582654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:59:37.583841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:37.584017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:59:37.584078Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:37.584352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:59:37.584412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:37.584586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:37.584721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:59:37.587094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:37.587162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:37.587357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:37.587401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:59:37.587625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.587669Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:59:37.587791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:59:37.587828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:37.587875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:59:37.587928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:37.587988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:59:37.588025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:59:37.588103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:59:37.588142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:59:37.588200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:59:37.590228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:37.590347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:37.590382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:59:37.590421Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:59:37.590456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:37.590565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... :1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:38.264332Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-07T13:59:38.264361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T13:59:38.264394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:1 129 -> 240 2025-04-07T13:59:38.266241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-07T13:59:38.267033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-07T13:59:38.267332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-07T13:59:38.267389Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:1 ProgressState 2025-04-07T13:59:38.267469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-04-07T13:59:38.267495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-04-07T13:59:38.267533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/4, is published: true 2025-04-07T13:59:38.267601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 103 2025-04-07T13:59:38.267665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-04-07T13:59:38.267731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T13:59:38.267773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T13:59:38.267832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T13:59:38.267867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-07T13:59:38.267884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-07T13:59:38.267938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:59:38.267963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-04-07T13:59:38.267979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-04-07T13:59:38.268022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-07T13:59:38.268050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-04-07T13:59:38.268068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-04-07T13:59:38.268117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T13:59:38.269884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T13:59:38.269932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:740:2619] TestWaitNotification: OK eventTxId 103 2025-04-07T13:59:38.270479Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:59:38.270747Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 305us result status StatusSuccess 2025-04-07T13:59:38.271182Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:59:38.271656Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:59:38.271858Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 208us result status StatusSuccess 2025-04-07T13:59:38.272278Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "continuousBackupImpl" TopicPath: "/MyRoot/Table/continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS OffloadConfig { IncrementalBackup { DstPath: "/MyRoot/IncrBackupImpl" DstPathId { OwnerId: 72057594046678944 LocalId: 5 } } } } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:59:38.273389Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:59:38.273618Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 206us result status StatusSuccess 2025-04-07T13:59:38.273979Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless >> DataShardVolatile::VolatileTxAbortedOnSplit [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop >> StreamCreator::WithResolvedTimestamps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] Test command err: 2025-04-07T13:57:54.567177Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570173486928513:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:54.567852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e8f/r3tmp/tmpYOk5Ud/pdisk_1.dat 2025-04-07T13:57:55.049335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:55.049454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:55.053565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:55.094568Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4558, node 1 2025-04-07T13:57:55.223633Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:55.223657Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:55.223662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:55.223753Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:55.567325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:55.701010Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2025-04-07T13:57:57.516924Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:57:57.525818Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2Y0ZWJmY2ItZGY3ZDc0MDItZDRjODViNDItMmFjMDBmOWM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id M2Y0ZWJmY2ItZGY3ZDc0MDItZDRjODViNDItMmFjMDBmOWM= 2025-04-07T13:57:57.527587Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-07T13:57:57.527603Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:57:57.527614Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:57:57.542444Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2Y0ZWJmY2ItZGY3ZDc0MDItZDRjODViNDItMmFjMDBmOWM=, ActorId: [1:7490570186371831009:2300], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:57:57.542916Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570186371831008:2299], Start check tables existence, number paths: 2 2025-04-07T13:57:57.546392Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570186371831008:2299], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T13:57:57.546454Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570186371831008:2299], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T13:57:57.546486Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570186371831008:2299], Successfully finished 2025-04-07T13:57:57.546586Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T13:57:57.568778Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570186371831036:2294], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:57:57.576247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:57:57.581769Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570186371831036:2294], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-07T13:57:57.590686Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570186371831036:2294], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-07T13:57:57.595543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570186371831036:2294], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T13:57:57.695123Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570186371831036:2294], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:57:57.698687Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570186371831036:2294], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-07T13:57:57.698994Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2025-04-07T13:57:57.699035Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2025-04-07T13:57:57.699100Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570186371831096:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-04-07T13:57:57.700867Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570186371831096:2304], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-07T13:57:57.700915Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2025-04-07T13:57:57.700932Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-07T13:57:57.701147Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7490570186371831105:2305], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-07T13:57:57.704064Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7490570186371831105:2305], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-04-07T13:57:57.714204Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-07T13:57:57.714228Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-07T13:57:57.714531Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2Y0ZWJmY2ItZGY3ZDc0MDItZDRjODViNDItMmFjMDBmOWM=, ActorId: [1:7490570186371831009:2300], ActorState: ReadyState, TraceId: 01jr88paah3d01e4900h83cpf7, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: ALTER RESOURCE POOL sample_pool_id SET ( CONCURRENT_QUERY_LIMIT=42 ); rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-07T13:57:57.717083Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570186371831117:2307], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-07T13:57:57.718622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570186371831117:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:57.718701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:57.987610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:57:57.989878Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7490570186371831105:2305], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-04-07T13:57:57.989969Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7490570186371831105:2305], DatabaseId: Root, PoolId: sample_pool_id, Pool config has changed, queue size: -1, in flight limit: 42 2025-04-07T13:57:57.990258Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got resign request, DatabaseId: Root, PoolId: sample_pool_id 2025-04-07T13:57:57.990311Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7490570186371831105:2305], DatabaseId: Root, PoolId: sample_pool_id, Got stop pool handler request, waiting for 0 requests 2025-04-07T13:57:57.990351Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: Root, PoolId: sample_pool_id 2025-04-07T13:57:58.016137Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=M2Y0ZWJmY2ItZGY3ZDc0MDItZDRjODViNDItMmFjMDBmOWM=, ActorId: [1:7490570186371831009:2300], ActorState: ExecuteState, TraceId: 01jr88paah3d01e4900h83cpf7, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7490570186371831118:2300] WorkloadServiceCleanup: 0 2025-04-07T13:57:58.018302Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2Y0ZWJmY2ItZGY3ZDc0MDItZDRjODViNDItMmFjMDBmOWM=, ActorId: [1:7490570186371831009:2300], ActorState: CleanupState, TraceId: 01jr88paah3d01e4900h83cpf7, EndCleanup, isFinal: 0 2025-04-07T13:57:58.018382Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2Y0ZWJmY2ItZGY3ZDc0MDItZDRjODViNDItMmFjMDBmOWM=, ActorId: [1:7490570186371831009:2300], ActorState: CleanupState, TraceId: 01jr88paah3d01e4900h83cpf7, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7490570173486928749:2256] 2025-04-07T13:57:58.034452Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=M2Y0ZWJmY2ItZGY3ZDc0MDItZ ... zA0OGRiMjQtYmNkNzU3NWMtOGEwNGE0OTY=, ActorId: [9:7490570606792402312:2779], ActorState: ExecuteState, TraceId: 01jr88sa3xdjtjyvwzmhzzq4bx, Sent query response back to proxy, proxyRequestId: 77, proxyId: [9:7490570434993707982:2256] 2025-04-07T13:59:35.825323Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=9&id=OGYyYmVjMzItMzA0OGRiMjQtYmNkNzU3NWMtOGEwNGE0OTY=, TxId: 2025-04-07T13:59:35.825444Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=9&id=OGYyYmVjMzItMzA0OGRiMjQtYmNkNzU3NWMtOGEwNGE0OTY=, TxId: 2025-04-07T13:59:35.825807Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=OGYyYmVjMzItMzA0OGRiMjQtYmNkNzU3NWMtOGEwNGE0OTY=, ActorId: [9:7490570606792402312:2779], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:59:35.825848Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=OGYyYmVjMzItMzA0OGRiMjQtYmNkNzU3NWMtOGEwNGE0OTY=, ActorId: [9:7490570606792402312:2779], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:35.825880Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=OGYyYmVjMzItMzA0OGRiMjQtYmNkNzU3NWMtOGEwNGE0OTY=, ActorId: [9:7490570606792402312:2779], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:59:35.825912Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=OGYyYmVjMzItMzA0OGRiMjQtYmNkNzU3NWMtOGEwNGE0OTY=, ActorId: [9:7490570606792402312:2779], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:59:35.825990Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=OGYyYmVjMzItMzA0OGRiMjQtYmNkNzU3NWMtOGEwNGE0OTY=, ActorId: [9:7490570606792402312:2779], ActorState: unknown state, Session actor destroyed Wait lease expiration 27.983750s: delayed = 1, running = 0 2025-04-07T13:59:36.831578Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , Bootstrap. Database: /Root 2025-04-07T13:59:36.835807Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA== 2025-04-07T13:59:36.836132Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:59:36.836478Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-04-07T13:59:36.837041Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ReadyState, TraceId: 01jr88sb445edg90nk37mhyftr, received request, proxyRequestId: 79 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [9:7490570611087369661:2792] database: /Root databaseId: /Root pool id: default 2025-04-07T13:59:36.837076Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ReadyState, TraceId: 01jr88sb445edg90nk37mhyftr, request placed into pool from cache: default 2025-04-07T13:59:36.840952Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ExecuteState, TraceId: 01jr88sb445edg90nk37mhyftr, ExecutePhyTx, tx: 0x000050C000307B58 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-07T13:59:36.841025Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ExecuteState, TraceId: 01jr88sb445edg90nk37mhyftr, Sending to Executer TraceId: 0 8 2025-04-07T13:59:36.841112Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ExecuteState, TraceId: 01jr88sb445edg90nk37mhyftr, Created new KQP executer: [9:7490570611087369664:2791] isRollback: 0 2025-04-07T13:59:36.852893Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ExecuteState, TraceId: 01jr88sb445edg90nk37mhyftr, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-07T13:59:36.852976Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ExecuteState, TraceId: 01jr88sb445edg90nk37mhyftr, ExecutePhyTx, tx: 0x000050C000340FD8 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-07T13:59:36.854219Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ExecuteState, TraceId: 01jr88sb445edg90nk37mhyftr, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-07T13:59:36.854362Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ExecuteState, TraceId: 01jr88sb445edg90nk37mhyftr, txInfo Status: Committed Kind: ReadOnly TotalDuration: 13.562 ServerDuration: 13.429 QueriesCount: 2 2025-04-07T13:59:36.854483Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ExecuteState, TraceId: 01jr88sb445edg90nk37mhyftr, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T13:59:36.854544Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ExecuteState, TraceId: 01jr88sb445edg90nk37mhyftr, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:36.854570Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ExecuteState, TraceId: 01jr88sb445edg90nk37mhyftr, EndCleanup, isFinal: 0 2025-04-07T13:59:36.854625Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ExecuteState, TraceId: 01jr88sb445edg90nk37mhyftr, Sent query response back to proxy, proxyRequestId: 79, proxyId: [9:7490570434993707982:2256] 2025-04-07T13:59:36.855625Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, TxId: 2025-04-07T13:59:36.855735Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, TxId: 2025-04-07T13:59:36.858089Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:59:36.858131Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:36.858157Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:59:36.858184Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:59:36.858255Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzkwMWJmNzItYmI4NmExZTUtY2QxMDFiZC1lZGYzODkzNA==, ActorId: [9:7490570611087369660:2791], ActorState: unknown state, Session actor destroyed 2025-04-07T13:59:36.868618Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=Y2FiZmNkNWItZGY2Y2ViNzUtZDBjMDMzMDUtOGFiNGQ2NDE=, ActorId: [9:7490570460763512146:2304], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T13:59:36.868697Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=Y2FiZmNkNWItZGY2Y2ViNzUtZDBjMDMzMDUtOGFiNGQ2NDE=, ActorId: [9:7490570460763512146:2304], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:36.868729Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=Y2FiZmNkNWItZGY2Y2ViNzUtZDBjMDMzMDUtOGFiNGQ2NDE=, ActorId: [9:7490570460763512146:2304], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T13:59:36.868762Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=Y2FiZmNkNWItZGY2Y2ViNzUtZDBjMDMzMDUtOGFiNGQ2NDE=, ActorId: [9:7490570460763512146:2304], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T13:59:36.868855Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=Y2FiZmNkNWItZGY2Y2ViNzUtZDBjMDMzMDUtOGFiNGQ2NDE=, ActorId: [9:7490570460763512146:2304], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-04-07T13:53:15.293934Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.293956Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.293972Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T13:53:15.298673Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T13:53:15.298725Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.298753Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.305742Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006024s 2025-04-07T13:53:15.313039Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T13:53:15.313074Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.313094Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.313146Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008956s 2025-04-07T13:53:15.344936Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T13:53:15.344976Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.344996Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T13:53:15.345058Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007095s 2025-04-07T13:53:15.387348Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1744033995387319 2025-04-07T13:53:16.560769Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568978201829428:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:16.560936Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001209/r3tmp/tmpSaPcoJ/pdisk_1.dat 2025-04-07T13:53:17.287035Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:53:17.272959Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:53:17.513951Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:17.653067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:53:17.950154Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:53:17.988017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:17.988106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:18.006196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:53:18.006279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:53:18.008413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:53:18.028892Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:53:18.034227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21507, node 1 2025-04-07T13:53:18.410506Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001209/r3tmp/yandexx4wsFT.tmp 2025-04-07T13:53:18.410535Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001209/r3tmp/yandexx4wsFT.tmp 2025-04-07T13:53:18.410676Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001209/r3tmp/yandexx4wsFT.tmp 2025-04-07T13:53:18.410810Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:53:18.610287Z INFO: TTestServer started on Port 13719 GrpcPort 21507 TClient is connected to server localhost:13719 PQClient connected to localhost:21507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:53:19.512208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T13:53:21.537087Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490568978201829428:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:53:21.537161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:53:25.485635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569016984351859:2289], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:25.485733Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490569016984351848:2286], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:25.485789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:53:25.499782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-07T13:53:25.563125Z node 1 :KQP_PROXY ERROR: TraceId: "01jr88dv4w7fgd62gbbncx321r", Request deadline has expired for 0.557435s seconds 2025-04-07T13:53:25.606936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490569016984351864:2290], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-04-07T13:53:26.234746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-04-07T13:53:26.247265Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569016856536023:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:26.248865Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDkzMTM1M2ItOTM0NGQwODktNGQ4ZGNkOWEtY2Q2YzQ0MzU=, ActorId: [1:7490569016856535950:2314], ActorState: ExecuteState, TraceId: 01jr88e0j6ahda4fs2zcwgy331, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:53:26.250907Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:53:26.254597Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490569016984351901:2295], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:53:26.256351Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGNlYzBmZmItZDYxM2I2NDUtOTM0YTk1ZjYtNzc2NjE5ZGY=, ActorId: [2:7490569016984351824:2284], ActorState: ExecuteState, TraceId: 01jr88e0e814egcdqr9x344a9n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:53:26.256819Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:53:26.486700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-04-07T13:53:26.806624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:21507", true, true, 1000); 2025-04-07T13:53:27.239421Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jr88e1yg2fk9035dgdy4zd2n, Database: , Data ... : 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1454 count 9 nextOffset 9 batches 1 2025-04-07T13:59:33.125137Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0test-message-group-id' seqNo 10 partNo 0 2025-04-07T13:59:33.125163Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1612 count 10 nextOffset 10 batches 1 2025-04-07T13:59:33.126110Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,10 HeadOffset 0 endOffset 0 curOffset 10 d0000000000_00000000000000000000_00000_0000000010_00000| size 1208 WTime 1744034373125 2025-04-07T13:59:33.126425Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T13:59:33.138540Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:59:33.138626Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:59:33.138709Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-07T13:59:33.138751Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:59:33.138785Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-07T13:59:33.138807Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:59:33.138837Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-07T13:59:33.138858Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:59:33.138893Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-04-07T13:59:33.138915Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:59:33.138949Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-04-07T13:59:33.138972Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:59:33.139003Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-04-07T13:59:33.139024Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:59:33.139056Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2025-04-07T13:59:33.139078Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:59:33.139109Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2025-04-07T13:59:33.139130Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:59:33.139162Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2025-04-07T13:59:33.139186Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:59:33.139218Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2025-04-07T13:59:33.139372Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-07T13:59:33.139463Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T13:59:33.139494Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T13:59:33.139509Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-07T13:59:33.139693Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2025-04-07T13:59:33.139728Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T13:59:33.139948Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-07T13:59:33.139976Z node 17 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:59:33.140068Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1744034373076 queuesize 0 startOffset 0 2025-04-07T13:59:33.143805Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 13 queued_in_partition_duration_ms: 48 } 2025-04-07T13:59:33.143878Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session: acknoledged message 1 2025-04-07T13:59:33.143921Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session: acknoledged message 2 2025-04-07T13:59:33.143951Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session: acknoledged message 3 2025-04-07T13:59:33.143976Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session: acknoledged message 4 2025-04-07T13:59:33.144004Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session: acknoledged message 5 2025-04-07T13:59:33.144030Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session: acknoledged message 6 2025-04-07T13:59:33.144081Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session: acknoledged message 7 2025-04-07T13:59:33.144110Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session: acknoledged message 8 2025-04-07T13:59:33.144139Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session: acknoledged message 9 2025-04-07T13:59:33.144165Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session: acknoledged message 10 2025-04-07T13:59:33.147382Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session: close. Timeout = 0 ms 2025-04-07T13:59:33.147448Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session will now close 2025-04-07T13:59:33.147506Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session: aborting 2025-04-07T13:59:33.148075Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session: gracefully shut down, all writes complete 2025-04-07T13:59:33.148125Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0] Write session: destroy 2025-04-07T13:59:33.157251Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0 grpc read done: success: 0 data: 2025-04-07T13:59:33.157314Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0 grpc read failed 2025-04-07T13:59:33.157359Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0 grpc closed 2025-04-07T13:59:33.157392Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|200a58c8-2b6f216b-86b4ee4b-94fceb49_0 is DEAD 2025-04-07T13:59:33.158587Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T13:59:33.159092Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T13:59:33.159146Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7490570596309769577:2617] destroyed 2025-04-07T13:59:33.159202Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:59:37.375440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:59:37.375576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:59:37.375617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:59:37.375653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:59:37.375702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:59:37.375731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:59:37.375787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:59:37.376318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:59:37.448972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:59:37.449034Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:37.479950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:59:37.480087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:59:37.480235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:59:37.498918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:59:37.499139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:59:37.499809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:37.500059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:59:37.502362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:37.503713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:37.503780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:37.503911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:59:37.503984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:37.504028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:59:37.504202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.511192Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:59:37.746091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:59:37.746343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.746576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:59:37.746768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:59:37.746832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.751895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:37.752039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:59:37.752231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.752304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:59:37.752350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:59:37.752387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:59:37.755597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.755666Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:59:37.755702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:59:37.757596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.757652Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.757691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:37.757767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:59:37.761614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:59:37.763804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:59:37.764012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:59:37.765085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:37.765252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:59:37.765303Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:37.765554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:59:37.765610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:37.765783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:37.765868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:59:37.769913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:37.769981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:37.770158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:37.770197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:59:37.770421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.770466Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:59:37.770568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:59:37.770600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:37.770644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:59:37.770682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:37.770722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:59:37.770809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:59:37.770886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:59:37.770929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:59:37.770971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:59:37.776435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:37.776583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:37.776624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:59:37.776700Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:59:37.776743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:37.776872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... txId: 104 2025-04-07T13:59:38.527939Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-07T13:59:38.527972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T13:59:38.528047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-04-07T13:59:38.528358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T13:59:38.528409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T13:59:38.528439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-07T13:59:38.528480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-04-07T13:59:38.532924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T13:59:38.533029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T13:59:38.533097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T13:59:38.533307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T13:59:38.533359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T13:59:38.533385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T13:59:38.545877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 783 } } 2025-04-07T13:59:38.545936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-07T13:59:38.546099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 783 } } 2025-04-07T13:59:38.546192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 783 } } FAKE_COORDINATOR: Erasing txId 104 2025-04-07T13:59:38.546914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-07T13:59:38.546953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-07T13:59:38.547072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-07T13:59:38.547117Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T13:59:38.547177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-07T13:59:38.547230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:38.547261Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T13:59:38.547292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T13:59:38.547335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-07T13:59:38.550113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T13:59:38.550316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T13:59:38.550441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T13:59:38.550478Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-07T13:59:38.550564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-04-07T13:59:38.550608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-07T13:59:38.550651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-04-07T13:59:38.550715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 104 2025-04-07T13:59:38.550776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-07T13:59:38.550814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T13:59:38.550841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T13:59:38.550982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T13:59:38.551018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-04-07T13:59:38.551049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-04-07T13:59:38.551076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T13:59:38.551093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-04-07T13:59:38.551121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-04-07T13:59:38.551169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T13:59:38.551549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T13:59:38.551590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-07T13:59:38.551644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T13:59:38.551683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T13:59:38.551706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T13:59:38.553801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T13:59:38.553870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T13:59:38.553927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:726:2629] TestWaitNotification: OK eventTxId 104 2025-04-07T13:59:38.554513Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:59:38.554715Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl" took 221us result status StatusPathDoesNotExist 2025-04-07T13:59:38.554874Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T13:59:38.555322Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T13:59:38.555512Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 159us result status StatusPathDoesNotExist 2025-04-07T13:59:38.555654Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpOlapSparsed::AccessorActualization [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::FailTest [GOOD] Test command err: 2025-04-07T13:59:21.460004Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570546807776532:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:21.460056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a56/r3tmp/tmp4Wh0bV/pdisk_1.dat 2025-04-07T13:59:22.059799Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:22.084064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:22.084176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:22.092899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63967, node 1 2025-04-07T13:59:22.317291Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:22.317321Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:22.317340Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:22.317451Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:22.675850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:22.687983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:59:22.693624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:22.697571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:59:22.697832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:59:22.697847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:59:22.699399Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:22.705596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:59:22.705633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:59:22.707513Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:22.711156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034362756, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:22.711194Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:59:22.711497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:59:22.713199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:22.713374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:22.713420Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:59:22.713505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:59:22.713543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:59:22.713588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:59:22.716891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:59:22.716951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:59:22.716990Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:59:22.717091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:59:26.460226Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570546807776532:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:26.460295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:34.258856Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490570602248523613:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:34.258895Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a56/r3tmp/tmpjTVEqX/pdisk_1.dat 2025-04-07T13:59:34.566697Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:34.609379Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:34.609479Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:34.624700Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27299, node 4 2025-04-07T13:59:34.869349Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:34.869372Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:34.869378Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:34.869493Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:59:35.217012Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:35.217446Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:59:35.217469Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:35.221151Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:59:35.221342Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:59:35.221364Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:59:35.225167Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:59:35.225201Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:59:35.229835Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:35.234552Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:35.244687Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034375286, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:35.244713Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:59:35.244992Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:59:35.249844Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:35.250072Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:35.250127Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:59:35.250224Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:59:35.250295Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:59:35.250379Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:59:35.252440Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:59:35.252472Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:59:35.252487Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:59:35.252546Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowSearchHeightThreshold [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowRowDeletesThreshold [GOOD] >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] |89.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSparsed::AccessorActualization [GOOD] Test command err: Trying to start YDB, gRPC: 27261, MsgBus: 29881 2025-04-07T13:59:28.038307Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570575564628882:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:28.039034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001020/r3tmp/tmp0iThWE/pdisk_1.dat 2025-04-07T13:59:28.795665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:28.795782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:28.800043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:28.835294Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27261, node 1 2025-04-07T13:59:29.057175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:29.057207Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:29.057224Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:29.057306Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29881 TClient is connected to server localhost:29881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:30.018298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:30.067186Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:30.088000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:30.306592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570584154564010:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:30.306739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570584154564010:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:30.306995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570584154564010:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:30.307144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570584154564010:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:30.307235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570584154564010:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:30.307349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570584154564010:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:30.307434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570584154564010:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:30.307528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570584154564010:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:30.307619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570584154564010:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:30.307724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570584154564010:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:30.307834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570584154564010:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:30.307937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570584154564010:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:30.385160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570584154564011:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:30.385249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570584154564011:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:30.385478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570584154564011:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:30.385591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570584154564011:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:30.385689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570584154564011:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:30.385816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570584154564011:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:30.385928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570584154564011:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:30.386015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570584154564011:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:30.386117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570584154564011:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:30.386215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570584154564011:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:30.386307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570584154564011:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:30.386443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570584154564011:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:30.473089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:30.473153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:30.473369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:30.473461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:30.473549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:30.473647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:30.473736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:30.473878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... eup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:59:38.682338Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:59:38.682387Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:59:38.682891Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034378240 at tablet 72075186224037891 2025-04-07T13:59:38.682913Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490570584154564012:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T13:59:38.682929Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7490570584154564012:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:516;problem=Background activities cannot be started: no index at tablet; 2025-04-07T13:59:38.682965Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034378240 at tablet 72075186224037890 2025-04-07T13:59:38.682978Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T13:59:38.683003Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:59:38.683028Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=1; 2025-04-07T13:59:38.683053Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=1744034078240;tx_id=18446744073709551615;;current_snapshot_ts=1744034377155; 2025-04-07T13:59:38.683070Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:59:38.683087Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:59:38.683101Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:59:38.683118Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:59:38.683155Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:59:38.826145Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976710665 scanId: 1 version: {1744034378240:max} readable: {1744034378786:max} at tablet 72075186224037888 2025-04-07T13:59:38.826349Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976710665 scanId: 1 at tablet 72075186224037888 2025-04-07T13:59:38.827023Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570584154564010:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1744034378240:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 6 } Function { Id: 2 Arguments { Id: 3 } } } } } Command { Projection { Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2025-04-07T13:59:38.936900Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570584154564010:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1744034378240:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{group_by_assignes=[{op=count;arguments=[uid;];options={skip_nulls=true, min_count=1};column=G:6;};];projections=[];};{projections=[G:6;];};]; 2025-04-07T13:59:38.962149Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976710665 scanId: 1 version: {1744034378240:max} readable: {1744034379003:max} at tablet 72075186224037889 2025-04-07T13:59:38.962296Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976710665 scanId: 1 at tablet 72075186224037889 2025-04-07T13:59:38.962750Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490570584154564011:2292];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1744034378240:max};tablet=72075186224037889;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 6 } Function { Id: 2 Arguments { Id: 3 } } } } } Command { Projection { Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2025-04-07T13:59:38.963390Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490570584154564011:2292];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1744034378240:max};tablet=72075186224037889;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{group_by_assignes=[{op=count;arguments=[uid;];options={skip_nulls=true, min_count=1};column=G:6;};];projections=[];};{projections=[G:6;];};]; 2025-04-07T13:59:38.964213Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976710665 scanId: 1 version: {1744034378240:max} readable: {1744034379003:max} at tablet 72075186224037890 2025-04-07T13:59:38.964305Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976710665 scanId: 1 at tablet 72075186224037890 2025-04-07T13:59:38.964648Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1744034378240:max};tablet=72075186224037890;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 6 } Function { Id: 2 Arguments { Id: 3 } } } } } Command { Projection { Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2025-04-07T13:59:38.965169Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570584154564060:2294];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1744034378240:max};tablet=72075186224037890;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{group_by_assignes=[{op=count;arguments=[uid;];options={skip_nulls=true, min_count=1};column=G:6;};];projections=[];};{projections=[G:6;];};]; 2025-04-07T13:59:38.973371Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7490570584154564083:2300];tablet_id=72075186224037889;parent=[1:7490570584154564011:2292];fline=manager.h:99;event=ask_data;request=request_id=55;3={portions_count=1};; 2025-04-07T13:59:38.973584Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7490570584154564090:2303];tablet_id=72075186224037890;parent=[1:7490570584154564060:2294];fline=manager.h:99;event=ask_data;request=request_id=56;3={portions_count=1};; 2025-04-07T13:59:38.974926Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7490570584154564077:2297];tablet_id=72075186224037888;parent=[1:7490570584154564010:2291];fline=manager.h:99;event=ask_data;request=request_id=57;3={portions_count=1};; 2025-04-07T13:59:38.975281Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-07T13:59:38.975338Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-07T13:59:38.975781Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-07T13:59:38.979514Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T13:59:38.979797Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T13:59:38.979994Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T13:59:39.017672Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037890 2025-04-07T13:59:39.032499Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037889 2025-04-07T13:59:39.034287Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2025-04-07T13:59:39.124024Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490570584154564011:2292];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:59:39.124062Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490570584154564010:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:59:39.146657Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7490570584154564012:2293];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:59:39.146708Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7490570584154564060:2294];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:59:39.198611Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034378240, txId: 18446744073709551615] shutting down [[10000u]] >> KqpQueryService::TableSink_OlapRWQueries [GOOD] >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] >> KqpOlapBlobsSharing::MultipleSplitsWithRestartsAfterWait >> KqpOlapAggregations::Aggregation_MaxL >> KqpOlapIndexes::CountMinSketchIndex |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] >> KqpOlap::OlapRead_ScanQuery >> KqpOlapAggregations::Blocks_NoAggPushdown [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsFull [GOOD] |89.1%| [TA] $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] >> KqpDatetime64ColumnShard::Csv [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapRWQueries [GOOD] Test command err: Trying to start YDB, gRPC: 15612, MsgBus: 28635 2025-04-07T13:59:11.168615Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570502237620415:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:11.177733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001177/r3tmp/tmpoTJiCY/pdisk_1.dat 2025-04-07T13:59:11.848174Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:11.859883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:11.859981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:11.875248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15612, node 1 2025-04-07T13:59:12.069418Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:12.069447Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:12.069457Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:12.069578Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28635 TClient is connected to server localhost:28635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:12.919929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:12.941558Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:15.023734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570519417490205:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:15.023847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:15.292737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:59:15.401029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490570519417490343:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:15.401207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490570519417490343:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:15.401453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490570519417490343:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:15.401558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490570519417490343:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:15.401632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490570519417490343:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:15.401695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490570519417490343:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:15.401756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490570519417490343:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:15.401813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490570519417490343:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:15.401897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490570519417490343:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:15.401970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490570519417490343:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:15.402021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490570519417490343:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:15.402071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490570519417490343:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:15.404217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490570519417490337:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:15.404277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490570519417490337:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:15.404399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490570519417490337:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:15.404457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490570519417490337:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:15.404507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490570519417490337:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:15.404552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490570519417490337:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:15.404597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490570519417490337:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:15.404642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490570519417490337:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:15.404709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490570519417490337:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:15.404809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490570519417490337:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:15.404923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490570519417490337:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:15.405004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490570519417490337:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:15.494837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490570519417490338:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:15.494897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490570519417490338:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:15.495066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490570519417490338:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:15.495155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490570519417490338:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:15.495229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490570519417490338:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:15.495304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490570519417490338:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:15.495375Z node 1 :TX_COLU ... 883024Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:38.907133Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7490570617776326809:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:38.907212Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7490570617776326809:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:38.907477Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7490570617776326809:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:38.907631Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7490570617776326809:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:38.907806Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7490570617776326809:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:38.907917Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7490570617776326809:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:38.908015Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7490570617776326809:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:38.908111Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7490570617776326809:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:38.908212Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7490570617776326809:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:38.908311Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7490570617776326809:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:38.908435Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7490570617776326809:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:38.908550Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7490570617776326809:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:38.913150Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:38.913226Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:38.913315Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:38.913341Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:38.913503Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:38.913535Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:38.913619Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:38.913651Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:38.913699Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:38.913717Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:38.913747Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:38.913766Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:38.914077Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:38.914114Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:38.914370Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:38.914407Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:38.914596Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:38.914637Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:38.914866Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:38.914899Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:38.915074Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:38.915122Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:38.984166Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490570617776326907:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:38.984277Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:38.984716Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490570617776326912:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:38.990375Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T13:59:38.999921Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490570617776326914:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T13:59:40.315178Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 101, 102 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_not_include":2},"id":281474976715661}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"101;"}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"102;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715661}]},"p":{"include":2147483647}}]}; 2025-04-07T13:59:40.315731Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037890;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 103 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_not_include":2},"id":281474976715661}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"103;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715661}]},"p":{"include":2147483647}}]}; 2025-04-07T13:59:40.316727Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 104 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_not_include":2},"id":281474976715661}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"104;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715661}]},"p":{"include":2147483647}}]}; >> KqpOlapAggregations::CountAllPushdown+UseLlvm >> KqpOlap::ManyColumnShardsFilterPushdownEmptySet >> KqpOlap::NormalizeAbsentColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] Test command err: 2025-04-07T13:59:17.047247Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570527165973798:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:17.047419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a70/r3tmp/tmpAcMpZq/pdisk_1.dat 2025-04-07T13:59:17.803088Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7596, node 1 2025-04-07T13:59:17.931696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:17.932019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:18.030156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:18.157175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:18.157225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:18.157232Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:18.160483Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9322 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:18.737963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:18.751348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:59:18.752547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:18.758585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:59:18.758760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:59:18.758774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:59:18.760897Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:59:18.760922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:59:18.762626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:18.772314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034358815, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:18.772351Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:59:18.772626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:59:18.776902Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:18.777589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:18.777764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:18.777810Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:59:18.777900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:59:18.777936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:59:18.777983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:59:18.780830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:59:18.780879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:59:18.780892Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:59:18.780968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:59:22.043330Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570527165973798:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:22.043434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:32.800402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:59:32.800433Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpOlapSysView::StatsSysViewBytesDictActualization >> StreamCreator::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Blocks_NoAggPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 29606, MsgBus: 14330 2025-04-07T13:59:27.212083Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570572087633884:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:27.216957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001027/r3tmp/tmpazg83l/pdisk_1.dat 2025-04-07T13:59:27.742697Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:27.759610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:27.759736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:27.762502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29606, node 1 2025-04-07T13:59:27.849193Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:27.849227Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:27.849235Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:27.849312Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14330 TClient is connected to server localhost:14330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:28.513441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:28.531548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:28.617483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570576382601633:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:28.617712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570576382601633:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:28.617971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570576382601633:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:28.618156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570576382601633:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:28.618246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570576382601633:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:28.618383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570576382601633:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:28.618491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570576382601633:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:28.618581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570576382601633:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:28.618695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570576382601633:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:28.618833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570576382601633:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:28.618929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570576382601633:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:28.619047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570576382601633:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:28.622486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:28.622556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:28.622722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:28.622779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:28.622957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:28.623009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:28.623147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:28.623187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:28.623248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:28.623274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:28.623307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:28.623345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:28.623854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:28.623910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:28.624098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:28.624119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:28.624291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:28.624322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:28.624491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:28.624513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:28.624631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:28.624658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:28.656338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570576382601639:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:28.656410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570576382601639:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:28.656576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570576382601639:2291];tablet_id=7207518622 ... 2075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:28.773025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:28.773073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:28.773168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:28.773198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:28.773244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:28.773263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:28.773557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:28.773609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:28.773856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:28.773892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:28.774007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:28.774038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:28.774222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:28.774252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:28.774384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:28.774402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA UseBlocks; SELECT COUNT(DISTINCT id) FROM `/Root/tableWithNulls`; 2025-04-07T13:59:30.627190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570584972536523:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:30.627336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:30.627665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570584972536535:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:30.631579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T13:59:30.643675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570584972536537:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:59:32.234492Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570572087633884:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:32.234572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:40.919953Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88s3cg5txhnfg25v53gqyk", SessionId: ydb://session/3?node_id=1&id=MmJmYmZiY2UtZGFiYTU4NTctM2ZkMzE3MjktOGYyYTk5ZTk=, Slow query, duration: 10.294451s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "--!syntax_v1\nPRAGMA Kikimr.OptUseFinalizeByKey;\n\n PRAGMA UseBlocks;\n SELECT\n COUNT(DISTINCT id)\n FROM `/Root/tableWithNulls`;\n \n", parameters: 0b 2025-04-07T13:59:40.920429Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034372000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA UseBlocks; SELECT COUNT(DISTINCT id) FROM `/Root/tableWithNulls`; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":6,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"GroupBy":"item.id","Aggregation":"state","Name":"Aggregate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"Aggregate-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":4}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"GroupBy":"item.id","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (lambda '($26) $26)) (let $2 '('('"_logical_id" '601) '('"_id" '"c29ee1c2-1e22ffd8-bbe589e0-9f423599") '('"_wide_channels" (StructType '('"id" (DataType 'Int32)))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $23 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $24 (KqpBlockReadOlapTableRanges $23 (Void) '('"id") '() '() (lambda '($25) $25))) (return (FromFlow (WideCombiner (WideFromBlocks $24) '-1073741824 $1 (lambda '($27 $28) $27) (lambda '($29 $30 $31) $31) (lambda '($32 $33) $33)))) ))) $2)) (let $4 (DqCnHashShuffle (TDqOutput $3 '0) '('0))) (let $5 (Uint64 '1)) (let $6 (Bool 'false)) (let $7 (DataType 'Uint64)) (let $8 '('('"_logical_id" '1309) '('"_id" '"4d862645-dc7fd2e0-bab8d890-d39c1de2") '('"_wide_channels" (StructType '('_yql_agg_0 (OptionalType $7)))))) (let $9 (DqPhyStage '($4) (lambda '($34) (block '( (let $35 (lambda '($38 $39))) (let $36 (WideCombiner (ToFlow $34) '"" $1 $35 $35 $1)) (let $37 (Condense1 (NarrowMap $36 (lambda '($40) (AsStruct '('"id" $40)))) (lambda '($41) $5) (lambda '($42 $43) $6) (lambda '($44 $45) (Inc $45)))) (return (FromFlow (ExpandMap $37 (lambda '($46) (Just $46))))) ))) $8)) (let $10 (DqCnUnionAll (TDqOutput $9 '0))) (let $11 (DqPhyStage '($10) (lambda '($47) (block '( (let $48 (Condense (NarrowMap (WideFromBlocks (WideTakeBlocks (WideToBlocks (WideCondense1 (ToFlow $47) (lambda '($49) $49) (lambda '($50 $51) $6) (lambda '($52 $53) (IfPresent $52 (lambda '($54) (IfPresent $53 (lambda '($55) (Just (AggrAdd $54 $55))) $52)) $53)))) $5)) (lambda '($56) (AsStruct '('Count0 (Unwrap $56))))) (Nothing (OptionalType (StructType '('Count0 $7)))) (lambda '($57 $58) $6) (lambda '($59 $60) (Just $59)))) (return (FromFlow (Map $48 (lambda '($61) (AsList (AsStruct '('"column0" (Coalesce (Member $61 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '1908) '('"_id" '"f8623e94-5c1fb1f7-c39f23f4-62860d27")))) (let $12 '($3 $9 $11)) (let $13 (DqCnValue (TDqOutput $11 '0))) (let $14 '('('"type" '"scan"))) (let $15 (KqpPhysicalTx $12 '($13) '() $14)) (let $16 '"%kqp%tx_result_binding_0_0") (let $17 (ListType (StructType '('"column0" $7)))) (let $18 '('('"_logical_id" '2011) '('"_id" '"8a4637f1-bf6f3d54-20c15234-6293359f") '('"_partition_mode" '"single"))) (let $19 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $18)) (let $20 (DqCnResult (TDqOutput $19 '0) '('"column0"))) (let $21 (KqpTxResultBinding $17 '0 '0)) (let $22 (KqpPhysicalTx '($19) '($20) '('($16 $21)) $14)) (return (KqpPhysicalQuery '($15 $22) '((KqpTxResultBinding $17 '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_ResultCountT_FilterL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpDatetime64ColumnShard::Csv [GOOD] Test command err: Trying to start YDB, gRPC: 25274, MsgBus: 12599 2025-04-07T13:59:29.564319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570580614080705:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:29.564380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001017/r3tmp/tmppe3Xi2/pdisk_1.dat 2025-04-07T13:59:30.205351Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:30.228429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:30.228511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:30.229571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25274, node 1 2025-04-07T13:59:30.363772Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:30.363803Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:30.363809Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:30.363890Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12599 TClient is connected to server localhost:12599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:31.125545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:31.157942Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (date Date32 NOT NULL, interval Interval64, PRIMARY KEY (date)) PARTITION BY HASH(date) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T13:59:33.897612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570597793950306:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:33.897732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:34.156131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:59:34.237164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570602088917680:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:34.237357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570602088917680:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:34.237653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570602088917680:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:34.237763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570602088917680:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:34.237856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570602088917680:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:34.237946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570602088917680:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:34.238034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570602088917680:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:34.238167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570602088917680:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:34.238253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570602088917680:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:34.238308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570602088917680:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:34.238363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570602088917680:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:34.238431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570602088917680:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:34.246853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:34.246920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:34.247013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:34.247041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:34.247194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:34.247216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:34.247302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:34.247331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:34.247389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:34.247413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:34.247446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:34.247473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:34.247884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:34.247918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:34.248113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:34.248142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:34.248278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:34.248312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:34.248503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:34.248526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:34.248650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-0 ... 86224037888;self_id=[2:7490570632309046273:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:41.374396Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570632309046273:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:41.374524Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570632309046273:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:41.374650Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570632309046273:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:41.374773Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570632309046273:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:41.374902Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570632309046273:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:41.375017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570632309046273:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:41.391400Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:41.391466Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:41.391560Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:41.391589Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:41.391752Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:41.391779Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:41.391867Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:41.391901Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:41.391963Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:41.391989Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:41.392027Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:41.392051Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:41.392398Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:41.392435Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:41.392596Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:41.392620Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:41.392768Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:41.392796Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:41.392954Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:41.392978Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:41.393083Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:41.393111Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:41.478922Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570632309046356:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:41.478990Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:41.479038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570632309046361:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:41.482379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T13:59:41.494760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490570632309046363:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T13:59:42.022247Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=manager.cpp:16;event=register_group;external_process_id=281474976715662;external_group_id=7;size=1;external_scope_id=7; 2025-04-07T13:59:42.022593Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=allocation.cpp:17;event=add;id=25;stage=FO::ACCESSORS; 2025-04-07T13:59:42.023404Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=allocation.cpp:17;event=add;id=26;stage=FO::FETCHING; 2025-04-07T13:59:42.024472Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=allocation.cpp:17;event=add;id=27;stage=FO::FETCHING; 2025-04-07T13:59:42.024761Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=process.h:130;event=allocation_unregister;allocation_id=25;wait=0;internal_group_id=1;allocation_status=Allocated; 2025-04-07T13:59:42.024787Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=0;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2025-04-07T13:59:42.024807Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=1;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2025-04-07T13:59:42.025073Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=allocation.cpp:17;event=add;id=28;stage=FO::MERGE; 2025-04-07T13:59:42.026565Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=process.h:130;event=allocation_unregister;allocation_id=27;wait=0;internal_group_id=1;allocation_status=Allocated; 2025-04-07T13:59:42.026592Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=process.h:130;event=allocation_unregister;allocation_id=26;wait=0;internal_group_id=1;allocation_status=Allocated; 2025-04-07T13:59:42.026609Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=0;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2025-04-07T13:59:42.026620Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=1;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2025-04-07T13:59:42.026641Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=0;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2025-04-07T13:59:42.026658Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=1;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2025-04-07T13:59:42.026929Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=manager.cpp:25;event=unregister_group;external_process_id=281474976715662;external_group_id=7;size=1; 2025-04-07T13:59:42.026946Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=process.h:139;event=remove_group;external_group_id=7;internal_group_id=1; 2025-04-07T13:59:42.026959Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=process.h:130;event=allocation_unregister;allocation_id=28;wait=0;internal_group_id=1;allocation_status=Allocated; 2025-04-07T13:59:42.026974Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=0;external_process_id=281474976715662;forced_internal_group_id=NO_VALUE_OPTIONAL;external_scope_id=7;forced_external_group_id=NO_VALUE_OPTIONAL; 2025-04-07T13:59:42.026985Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=1;external_process_id=281474976715662;forced_internal_group_id=NO_VALUE_OPTIONAL;external_scope_id=7;forced_external_group_id=NO_VALUE_OPTIONAL; 2025-04-07T13:59:42.027802Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=process.h:68;event=scope_cleaned;process_id=281474976715662;external_scope_id=7; 2025-04-07T13:59:42.055287Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034381544, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync1 [FAIL] Test command err: 2025-04-07T13:59:23.106572Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570555373423384:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:23.106610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a48/r3tmp/tmpv023bK/pdisk_1.dat 2025-04-07T13:59:23.554883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:23.555097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:23.564588Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:23.571173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16284, node 1 2025-04-07T13:59:23.688619Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:23.688640Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:23.688648Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:23.696871Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:23.981735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:23.987511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:59:23.988868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:23.993681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:59:23.993872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:59:23.993891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:59:23.998301Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:59:23.998346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:59:24.000643Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:24.017253Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:24.022471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034364065, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:24.022506Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:59:24.022781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:59:24.026176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:24.026354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:24.026418Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:59:24.026534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:59:24.026585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:59:24.026647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:59:24.030254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:59:24.030329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:59:24.030345Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:59:24.030434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:59:28.109008Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570555373423384:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:28.109111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp:304, virtual void NTestSuiteYdbSdkSessionsPool::TTestCaseStressTestSync1::Execute_(NUnitTest::TTestContext &): (client.GetCurrentPoolSize() == activeSessionsLimit) failed: (0 != 1) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x17D0EB20) NTestSuiteYdbSdkSessionsPool::TTestCaseStressTestSync1::Execute_(NUnitTest::TTestContext&)+5023 (0x1747FBEF) std::__y1::__function::__func, void ()>::operator()()+280 (0x17498038) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x17D4DA39) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x17D15689) NTestSuiteYdbSdkSessionsPool::TCurrentTest::Execute()+1204 (0x17496DE4) NUnitTest::TTestFactory::Execute()+2438 (0x17D16F56) NUnitTest::RunMain(int, char**)+5149 (0x17D4767D) ??+0 (0x7F4091BF4D90) __libc_start_main+128 (0x7F4091BF4E40) _start+41 (0x1553D029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptStatsFull [GOOD] Test command err: Trying to start YDB, gRPC: 20442, MsgBus: 31230 2025-04-07T13:59:12.919081Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570507199572428:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:12.919209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001168/r3tmp/tmpk6FDWd/pdisk_1.dat 2025-04-07T13:59:13.427984Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:13.462260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:13.462389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:13.469896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20442, node 1 2025-04-07T13:59:13.588775Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:13.588795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:13.588805Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:13.588893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31230 TClient is connected to server localhost:31230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:14.516151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:14.579860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:14.921635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:15.138809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:15.241448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:17.173672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570528674410453:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:17.173810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:17.476373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:59:17.528707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:59:17.618910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:59:17.665597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:59:17.691258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:59:17.749031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:59:17.819786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570528674410952:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:17.819868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:17.820230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570528674410957:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:17.824014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:59:17.839486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570528674410959:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:59:17.972996Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570507199572428:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:17.973243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:19.042458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:59:19.043654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:59:19.044604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:59:21.796422Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034361818, txId: 281474976710703] shutting down Trying to start YDB, gRPC: 8506, MsgBus: 64728 2025-04-07T13:59:22.882706Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490570552144987029:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:22.882765Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001168/r3tmp/tmpgKna4G/pdisk_1.dat 2025-04-07T13:59:23.016549Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:23.027981Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:23.028061Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:23.029988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8506, node 2 2025-04-07T13:59:23.079458Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:23.079483Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:23.079488Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:23.079600Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64728 TClient is connected to server localhost:64728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:23.618670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:23.628994Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:59:23.648327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:23.774029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type ... WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.023115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.064184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.102031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.169532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.249034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570573619825698:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:27.249150Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:27.253300Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570573619825703:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:27.260812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T13:59:27.276227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490570573619825705:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T13:59:27.967619Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490570552144987029:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:27.967706Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:28.995415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.997227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.998650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16063, MsgBus: 6750 2025-04-07T13:59:32.187870Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490570592421084377:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:32.187924Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001168/r3tmp/tmpOtjjaW/pdisk_1.dat 2025-04-07T13:59:32.577071Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:32.596468Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:32.596565Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:32.598391Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16063, node 3 2025-04-07T13:59:32.814952Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:32.814978Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:32.814985Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:32.815103Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6750 TClient is connected to server localhost:6750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:33.736254Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:33.750197Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:33.766385Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:33.863558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:34.080173Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:59:34.178999Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.856818Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490570609600955248:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:36.856931Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:36.917884Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.974049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.024052Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.071744Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.117719Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.181567Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.188073Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490570592421084377:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:37.188138Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:37.238419Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490570613895923042:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:37.238525Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:37.238792Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490570613895923047:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:37.242853Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:59:37.265034Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490570613895923049:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:59:38.619031Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:59:38.621224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:59:38.622622Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> StreamCreator::WithResolvedTimestamps [GOOD] >> KqpOlapAggregations::JsonDoc_GetValue_ToString ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic [GOOD] Test command err: 2025-04-07T13:59:39.136708Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570624725664670:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:39.136761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e77/r3tmp/tmp7Xv69I/pdisk_1.dat 2025-04-07T13:59:39.729952Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:39.744074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:39.744211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:39.749732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8294 TServer::EnableGrpc on GrpcPort 10987, node 1 2025-04-07T13:59:39.998478Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:39.998531Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:39.998541Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:39.998628Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:40.289613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:40.301245Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:40.306362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034380431 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034380340 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034380431 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2025-04-07T13:59:40.497016Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:59:40.497144Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:59:40.497157Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:59:40.497809Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:59:42.829790Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034380431, tx_id: 281474976710658 } } } 2025-04-07T13:59:42.830132Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:59:42.831686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:59:42.832849Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-07T13:59:42.832868Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-07T13:59:42.867922Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-07T13:59:42.867968Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-07T13:59:42.869199Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-04-07T13:59:43.053622Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-04-07T13:59:43.053653Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-04-07T13:59:43.067573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:59:43.082816Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-04-07T13:59:43.082840Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034380431 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) >> KqpOlap::BlockChannelScalar ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps [GOOD] Test command err: 2025-04-07T13:59:39.926490Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570624061376352:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:39.926721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e55/r3tmp/tmpwZZ1iR/pdisk_1.dat 2025-04-07T13:59:40.641504Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:40.643982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:40.644078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:40.648212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8781 TServer::EnableGrpc on GrpcPort 26809, node 1 2025-04-07T13:59:41.188790Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:41.188812Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:41.188818Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:41.188921Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:41.559984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:41.591914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034381719 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034381628 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034381719 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2025-04-07T13:59:41.742217Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:59:41.742361Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T13:59:41.742377Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-07T13:59:41.743052Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-07T13:59:43.887328Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034381719, tx_id: 281474976710658 } } } 2025-04-07T13:59:43.887672Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-07T13:59:43.889236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:59:43.890495Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-07T13:59:43.890512Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-07T13:59:43.958131Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-07T13:59:43.958155Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-07T13:59:43.958821Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-04-07T13:59:44.147955Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-04-07T13:59:44.147978Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-04-07T13:59:44.185922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:59:44.207141Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-04-07T13:59:44.207162Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034381719 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) >> KqpOlapStats::AddRowsSomeTablesInTableStore >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns+isStream >> KqpOlapAggregations::Aggregation_Avg_GroupByNull |89.1%| [TA] $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlap::PushdownFilter >> KqpOlapBlobsSharing::MultipleSplitsWithRestartsWhenWait >> KqpJoinOrder::CanonizedJoinOrderTPCDS64+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin+ColumnStore |89.1%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> KqpOlapStatistics::StatsUsageWithTTL |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |89.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TA] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardVolatile::VolatileTxAbortedOnDrop [GOOD] >> KqpOlap::OlapRead_ScanQuery [GOOD] >> KqpOlapAggregations::AggregationCountGroupByPushdown >> DataShardVolatile::UpsertNoLocksArbiter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_ScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 29317, MsgBus: 27931 2025-04-07T13:59:42.573075Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570635061673182:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:42.577397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001009/r3tmp/tmp1TedIH/pdisk_1.dat 2025-04-07T13:59:43.040267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:43.040414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:43.043037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29317, node 1 2025-04-07T13:59:43.076333Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:59:43.076359Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:59:43.093253Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:43.292978Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:43.292999Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:43.293007Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:43.293106Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27931 TClient is connected to server localhost:27931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:43.868737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:43.887211Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:43.915235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:44.001729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570639356641140:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:44.001891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570639356641140:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:44.002128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570639356641140:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:44.002255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570639356641140:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:44.002357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570639356641140:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:44.002498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570639356641140:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:44.002606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570639356641140:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:44.002707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570639356641140:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:44.002841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570639356641140:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:44.002961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570639356641140:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:44.003072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570639356641140:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:44.003229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570639356641140:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:44.038604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639356641141:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:44.038672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639356641141:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:44.038890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639356641141:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:44.038996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639356641141:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:44.039140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639356641141:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:44.039261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639356641141:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:44.039348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639356641141:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:44.039432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639356641141:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:44.039548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639356641141:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:44.039637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639356641141:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:44.039740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639356641141:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:44.039853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639356641141:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:44.042589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:44.042646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:44.042742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:44.042782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:44.042943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:44.042971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:44.043060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:44.043107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:44.043235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInse ... 025-04-07T13:59:44.371153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:44.371241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:44.371269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:44.371399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:44.371419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:44.371494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:44.371529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:44.371584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:44.371610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:44.371646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:44.371668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:44.371956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:44.371987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:44.372122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:44.372147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:44.372272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:44.372300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:44.372433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:44.372454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:44.372592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:44.372613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:44.374555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:44.374591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:44.374669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:44.374693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:44.374876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:44.374898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:44.374970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:44.375007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:44.375069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:44.375099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:44.375138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:44.375160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:44.375452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:44.375482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:44.375630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:44.375655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:44.375822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:44.375848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:44.376011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:44.376049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:44.376148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:44.376166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:44.415695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; 2025-04-07T13:59:45.854984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570647946576225:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:45.855156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:46.200342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570652241543553:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:46.200421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:46.200721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570652241543558:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:46.204880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T13:59:46.228174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570652241543560:2423], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:59:47.116819Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034386269, txId: 18446744073709551615] shutting down >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] >> KqpOlap::OlapRead_UsesScanOnJoin >> KqpOlapAggregations::JsonDoc_GetValue_ToString [GOOD] >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] >> KqpOlapStatistics::StatsUsageWithTTL [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless [GOOD] >> KqpOlapAggregations::CountAllPushdown+UseLlvm [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless |89.1%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T13:58:44.353569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:44.353657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:44.353697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:44.353729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:44.353807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:44.353833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:44.353896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:44.354277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:44.476295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:44.476359Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T13:58:44.510304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:44.510422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:44.510606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:44.526241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:44.526437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:44.527022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:44.527436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:44.532494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:44.534325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:44.534385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:44.534540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:44.534586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:44.534628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:44.534752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T13:58:44.540939Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T13:58:44.731072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:44.731283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.731490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:44.731785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:44.731860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.734164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:44.734296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:44.734462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.734534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:44.734584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:44.734612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:44.736609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.736717Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:44.736801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:44.738406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.738447Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.738482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:44.738531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:44.741835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:44.743851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:44.744039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:44.744875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:44.744996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:44.745035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:44.745281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:44.745333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:44.745508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:44.745585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:44.747422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:44.747464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:44.747634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:44.747674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:44.748005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.748051Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:44.748139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:44.748170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:44.748209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:44.748255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:44.748304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:44.748338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:44.748394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:44.748437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:44.748470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Pu ... 295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1003 at step: 5000004 2025-04-07T13:59:50.560225Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:50.560357Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 219043334248 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:59:50.560410Z node 51 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#1003:0 HandleReply TEvOperationPlan, operationId: 1003:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-04-07T13:59:50.560702Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 129 2025-04-07T13:59:50.560829Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-07T13:59:50.565048Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:50.565094Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T13:59:50.565365Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:50.565409Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [51:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-04-07T13:59:50.566165Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T13:59:50.566217Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2025-04-07T13:59:50.566925Z node 51 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T13:59:50.567017Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T13:59:50.567051Z node 51 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-07T13:59:50.567089Z node 51 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-07T13:59:50.567131Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T13:59:50.567206Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1003 2025-04-07T13:59:50.569813Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1078 } } 2025-04-07T13:59:50.569858Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-04-07T13:59:50.569985Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1078 } } 2025-04-07T13:59:50.570078Z node 51 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1078 } } 2025-04-07T13:59:50.571573Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 219043334409 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-07T13:59:50.571621Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-04-07T13:59:50.571732Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 219043334409 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-07T13:59:50.571777Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T13:59:50.571855Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 325 RawX2: 219043334409 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-07T13:59:50.571912Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:50.571949Z node 51 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T13:59:50.571987Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T13:59:50.572026Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-04-07T13:59:50.573057Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T13:59:50.575030Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T13:59:50.575168Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T13:59:50.575548Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T13:59:50.575593Z node 51 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-04-07T13:59:50.575694Z node 51 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-07T13:59:50.575729Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-07T13:59:50.575774Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-04-07T13:59:50.575816Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-07T13:59:50.575854Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-07T13:59:50.575885Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-07T13:59:50.576000Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-07T13:59:50.579902Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-07T13:59:50.579962Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-07T13:59:50.580351Z node 51 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-07T13:59:50.580443Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-07T13:59:50.580482Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:444:2419] TestWaitNotification: OK eventTxId 1003 2025-04-07T13:59:50.581028Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T13:59:50.582847Z node 51 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 1.85ms result status StatusSuccess 2025-04-07T13:59:50.583273Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] Test command err: 2025-04-07T13:59:21.629795Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570544262848358:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:21.629840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a4e/r3tmp/tmptSDMNM/pdisk_1.dat 2025-04-07T13:59:22.159893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:22.159992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:22.174465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:22.214681Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21807, node 1 2025-04-07T13:59:22.265086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:22.265348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T13:59:22.287878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:22.288475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:22.288674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:22.288729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T13:59:22.288801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T13:59:22.493510Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:22.493533Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:22.493540Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:22.493634Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:22.771292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:22.784806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:59:22.786174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:22.788265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:59:22.788485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:59:22.788504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:59:22.791204Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:59:22.791225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:59:22.793159Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:22.801935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034362847, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:22.801978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:59:22.802291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:59:22.805629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:22.805817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:22.805872Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:59:22.805990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:59:22.806032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:59:22.806093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-04-07T13:59:22.812154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:59:22.812218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:59:22.812242Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:59:22.812334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2025-04-07T13:59:22.817163Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:27.167707Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490570573626785821:2268];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a4e/r3tmp/tmpnFIaE9/pdisk_1.dat 2025-04-07T13:59:27.229596Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:59:27.344779Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:27.369866Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:27.369950Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:27.376329Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28724, node 4 2025-04-07T13:59:27.573946Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:27.573972Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:27.573979Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:27.574107Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:27.857466Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.857862Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:59:27.857883Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.862878Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-04-07T13:59:27.863082Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:59:27.863100Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:59:27.868202Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:59:27.868227Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:59:27.873228Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.877005Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:27.877507Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034367922, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:27.877527Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:59:27.877763Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:59:27.884202Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:27.884387Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:27.884438Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:59:27.884506Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:59:27.884542Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:59:27.884598Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:59:27.886958Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:59:27.887005Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:59:27.887018Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:59:27.887114Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:59:32.169664Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490570573626785821:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:32.169761Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:42.322744Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:59:42.322775Z node 4 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_GetValue_ToString [GOOD] Test command err: Trying to start YDB, gRPC: 29165, MsgBus: 22176 2025-04-07T13:59:45.075993Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570648315255663:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:45.076228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ffe/r3tmp/tmpGsIIrM/pdisk_1.dat 2025-04-07T13:59:45.627914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:45.628005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:45.629704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:45.669831Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29165, node 1 2025-04-07T13:59:45.793535Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:45.793563Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:45.793571Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:45.793672Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22176 TClient is connected to server localhost:22176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:46.327833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:46.342176Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:46.352982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:46.443631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570652610223478:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:46.443832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570652610223478:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:46.444090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570652610223478:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:46.444185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570652610223478:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:46.444265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570652610223478:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:46.444344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570652610223478:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:46.444425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570652610223478:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:46.444504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570652610223478:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:46.444616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570652610223478:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:46.445099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570652610223478:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:46.445261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570652610223478:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:46.445371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570652610223478:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:46.498959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570652610223479:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:46.499037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570652610223479:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:46.499296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570652610223479:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:46.499400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570652610223479:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:46.499501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570652610223479:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:46.499611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570652610223479:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:46.499690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570652610223479:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:46.499785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570652610223479:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:46.499885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570652610223479:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:46.500034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570652610223479:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:46.500148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570652610223479:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:46.500267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570652610223479:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:46.548627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570652610223482:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:46.548706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570652610223482:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:46.548913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570652610223482:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:46.549032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570652610223482:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:46.549135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570652610223482:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:46.549236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570652610223482:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:46.549340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570652610223482:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:46.549436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570652610223482:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... 9:46.601071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:46.601117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:46.601284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:46.601322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:46.601424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:46.601444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:46.601578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:46.601606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:46.601715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:46.601736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1" RETURNING String) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.col1" RETURNING String) = "val1" AND id = 6; 2025-04-07T13:59:48.903039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570661200158376:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:48.903156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:48.904123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570661200158392:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:48.909575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T13:59:48.924976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570661200158394:2402], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T13:59:50.073695Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570648315255663:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:50.074617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:50.101113Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034389000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1" RETURNING String) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.col1" RETURNING String) = "val1" AND id = 6; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == \"val1\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == \"val1\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1332) '('"_id" '"5be39f49-e79f887b-8b6bf55c-c7d28042") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $21 (Int32 '"6")) (let $22 (Just $21)) (let $23 (Int32 '1)) (let $24 '($22 $23)) (let $25 (If (== $21 (Int32 '2147483647)) $24 '((+ $22 $23) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($24 $25)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (DataType 'String)) (let $11 (DataType 'Utf8)) (let $12 (OptionalType $11)) (let $13 (OptionalType $10)) (let $14 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id" '"jsondoc" '"jsonval")) (let $28 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $29 (OptionalType (DataType 'JsonDocument))) (let $30 (DataType 'Json)) (let $31 '((VariantType (TupleType (TupleType (DataType 'Uint8) $10) $12)))) (let $32 '((ResourceType '"JsonPath"))) (let $33 (ResourceType '"JsonNode")) (let $34 (DictType $11 $33)) (let $35 '($34)) (let $36 (CallableType '() $31 '($29) $32 $35)) (let $37 '('('"strict"))) (let $38 (Udf '"Json2.JsonDocumentSqlValueUtf8" (Void) (VoidType) '"" $36 (VoidType) '"" $37)) (let $39 (CallableType '() $32 '($11))) (let $40 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $39 (VoidType) '"" '())) (let $41 (Apply $40 (Utf8 '"$.col1"))) (let $42 (Dict $34)) (let $43 (lambda '($54) (block '( (let $55 (Nothing $13)) (return $55) )))) (let $44 (lambda '($56) (block '( (let $57 (IfPresent $56 (lambda '($58) (Just (SafeCast $58 $10))) (Nothing $13))) (return (If (Exists $56) $57 (Nothing $13))) )))) (let $45 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 $27 '() $28 (lambda '($46) (block '( (let $47 (StructType $9 '('"jsondoc" $29) '('"jsonval" (OptionalType $30)))) (let $48 (KqpOlapApply $47 '('"jsondoc") (lambda '($51) (block '( (let $52 (Apply $38 $51 $41 $42)) (let $53 (Nothing $13)) (return (Visit $52 '0 $43 '1 $44)) ))))) (let $49 '('eq $48 (String '"val1"))) (let $50 '('?? $49 (Bool 'false))) (return (KqpOlapFilter $46 $50)) ))))) (return (FromFlow (NarrowMap $45 (lambda '($59 $60 $61) (block '( (let $62 (OptionalType $33)) (let $63 (CallableType '() $31 '($62) $32 $35)) (let $64 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $63 (VoidType) '"" $37)) (let $65 (IfPresent $61 (lambda '($70) (block '( (let $71 '($30 '"" '1)) (let $72 (CallableType '() '($33) $71)) (let $73 (Udf '"Json2.Parse" (Void) (VoidType) '"" $72 (VoidType) '"" '())) (return (Just (Apply $73 $70))) ))) (Nothing $62))) (let $66 (Apply $64 $65 $41 $42)) (let $67 (Visit $66 '0 (lambda '($74) (Nothing $12)) '1 (lambda '($75) $75))) (let $68 (Apply $38 $60 $41 $42)) (let $69 (Visit $68 '0 $43 '1 $44)) (return (AsStruct '('"column1" $67) '('"column2" $69) '('"id" $59))) )))))) ))) '('('"_logical_id" '1403) '('"_id" '"49d1a088-11ea4142-2bfa0c69-cb7e6dd1")))) (let $15 (DqCnUnionAll (TDqOutput $14 '0))) (let $16 (DqPhyStage '($15) (lambda '($76) $76) '('('"_logical_id" '2460) '('"_id" '"441254e6-ba716a5d-b45e9da2-c4dfe295")))) (let $17 '('"id" '"column1" '"column2")) (let $18 (DqCnResult (TDqOutput $16 '0) $17)) (let $19 (KqpTxResultBinding $8 '0 '0)) (let $20 (KqpPhysicalTx '($14 $16) '($18) '('($5 $19)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $20) '((KqpTxResultBinding (ListType (StructType '('"column1" $12) '('"column2" $13) $9)) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::CountAllPushdown+UseLlvm [GOOD] Test command err: Trying to start YDB, gRPC: 25018, MsgBus: 7978 2025-04-07T13:59:43.579194Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570640474970116:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:43.579244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001006/r3tmp/tmplOhQn6/pdisk_1.dat 2025-04-07T13:59:44.064119Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:44.093550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:44.093651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:44.097102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25018, node 1 2025-04-07T13:59:44.230940Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:44.230975Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:44.230985Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:44.231063Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7978 TClient is connected to server localhost:7978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:44.954992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:45.027724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:45.244172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649064905383:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:45.244393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649064905383:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:45.250129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649064905383:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:45.250446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649064905383:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:45.250589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649064905383:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:45.250710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649064905383:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:45.250831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649064905383:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:45.250939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649064905383:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:45.251048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649064905383:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:45.251145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649064905383:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:45.251263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649064905383:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:45.251358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649064905383:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:45.334050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649064905384:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:45.334117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649064905384:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:45.334372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649064905384:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:45.334477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649064905384:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:45.334593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649064905384:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:45.334680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649064905384:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:45.334766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649064905384:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:45.334876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649064905384:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:45.334986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649064905384:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:45.335111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649064905384:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:45.335207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649064905384:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:45.335311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649064905384:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:45.388255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649064905382:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:45.388320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649064905382:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:45.388563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649064905382:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:45.388703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649064905382:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:45.388826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649064905382:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:45.388920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649064905382:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:45.389008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649064905382:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:45.389114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649064905382:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:45.389225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649064905382: ... 84Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:45.466116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:45.466198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:45.466222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:45.466403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:45.466429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:45.466508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:45.466546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:45.466605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:45.466626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:45.466662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:45.466687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:45.466989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:45.467031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:45.467225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:45.467259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:45.467402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:45.467438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:45.467607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:45.467636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:45.467734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:45.467774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:45.575543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2025-04-07T13:59:47.895670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570657654840403:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:47.895669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570657654840415:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:47.895743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:47.898761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T13:59:47.908297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570657654840441:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T13:59:48.579748Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570640474970116:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:48.579803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:50.627627Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034388000, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '476) '('"_id" '"7aebd799-96162dfa-c7b2699a-c233ae26") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '() '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 '"count" '"*")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '995) '('"_id" '"d9690cb2-b748681d-67401965-e6d92024")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1098) '('"_id" '"3e6dd78-a98df461-94174616-7b09f05e") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapStatistics::StatsUsageWithTTL [GOOD] Test command err: Trying to start YDB, gRPC: 4594, MsgBus: 18678 2025-04-07T13:59:47.662876Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570655759075982:2251];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:47.663022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ff2/r3tmp/tmp9Cmsns/pdisk_1.dat 2025-04-07T13:59:48.191603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:48.191751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:48.193384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:48.256836Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4594, node 1 2025-04-07T13:59:48.438581Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:48.438610Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:48.438630Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:48.438718Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18678 TClient is connected to server localhost:18678 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:49.065074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:49.089650Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:49.107042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:49.212427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570664349011070:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:49.212624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570664349011070:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:49.217857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570664349011070:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:49.217996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570664349011070:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:49.218091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570664349011070:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:49.218204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570664349011070:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:49.218306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570664349011070:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:49.218403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570664349011070:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:49.218509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570664349011070:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:49.218618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570664349011070:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:49.218757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570664349011070:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:49.218903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570664349011070:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:49.255786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570664349011024:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:49.255860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570664349011024:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:49.256068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570664349011024:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:49.256156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570664349011024:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:49.256234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570664349011024:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:49.256296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570664349011024:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:49.256344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570664349011024:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:49.256405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570664349011024:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:49.256476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570664349011024:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:49.256571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570664349011024:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:49.256697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570664349011024:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:49.256805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570664349011024:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:49.285133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570664349011027:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:49.285183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570664349011027:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:49.285382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570664349011027:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:49.285506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570664349011027:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:49.285631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570664349011027:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:49.285720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570664349011027:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:49.285824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570664349011027:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:49.285920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570664349011027:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... ine=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:49.331187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:49.331287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:49.331314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:49.331436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:49.331460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:49.331533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:49.331571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:49.331612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:49.331632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:49.331669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:49.331692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:49.332028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:49.332077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:49.332225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:49.332249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:49.332388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:49.332413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:49.332552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:49.332576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:49.332732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:49.332756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:49.333232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:49.333259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:49.333332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:49.333361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:49.333564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:49.333591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:49.333678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:49.333713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:49.333792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:49.333815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:49.333844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:49.333865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:49.334124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:49.334166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:49.334308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:49.334366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:49.334482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:49.334510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:49.334657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:49.334695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:49.334790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:49.334807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:49.398305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-07T13:59:51.378102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570672938945928:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:51.378225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:51.588212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:59:51.648388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570672938945982:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:51.648445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:51.692631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:59:51.745617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570672938946037:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:51.745688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin-ColumnStore [GOOD] >> KqpOlap::NormalizeAbsentColumn [GOOD] >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns+isStream [GOOD] >> KqpOlapAggregations::Aggregation_Count_GroupByNullMix [GOOD] >> KqpOlapBlobsSharing::MultipleMergesWithRestartsWhenWait >> KqpOlapAggregations::Aggregation_ResultL_FilterL_OrderL_Limit2 >> KqpOlap::OlapRead_UsesGenericQueryOnJoinWithDataShardTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::NormalizeAbsentColumn [GOOD] Test command err: Trying to start YDB, gRPC: 19930, MsgBus: 28683 2025-04-07T13:59:43.960166Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570640422966940:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:43.960206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001003/r3tmp/tmpG8XZIQ/pdisk_1.dat 2025-04-07T13:59:44.489706Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:44.505625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:44.505751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:44.512291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19930, node 1 2025-04-07T13:59:44.729251Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:44.729273Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:44.729280Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:44.729373Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28683 TClient is connected to server localhost:28683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:45.431591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:45.447602Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:45.463987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:45.575927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:45.577839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:45.578160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:45.578350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:45.578463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:45.578587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:45.578712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:45.578837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:45.578968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:45.579071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:45.579193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:45.579331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:45.639664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:45.639727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:45.639957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:45.640047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:45.640530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:45.640658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:45.640768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:45.640878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:45.640962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:45.641074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:45.641175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:45.641286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:45.695786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649012902198:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:45.695850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649012902198:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:45.696040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649012902198:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:45.696162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649012902198:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:45.696252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649012902198:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:45.696332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649012902198:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:45.696448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649012902198:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:45.696542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649012902198:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... 186224037890;self_id=[1:7490570649012902243:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:59:52.837209Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570649012902243:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:59:52.837261Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570649012902243:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:59:52.844510Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034392000 at tablet 72075186224037890 2025-04-07T13:59:52.844562Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570649012902243:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T13:59:52.844614Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570649012902243:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:59:52.844655Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570649012902243:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=1; 2025-04-07T13:59:52.844720Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570649012902243:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=1744034092000;tx_id=18446744073709551615;;current_snapshot_ts=1744034388327; 2025-04-07T13:59:52.844740Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570649012902243:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:59:52.844767Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570649012902243:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:59:52.844791Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570649012902243:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:59:52.844817Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570649012902243:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:59:52.844869Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570649012902243:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:59:52.845037Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-04-07T13:59:52.845077Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891; 2025-04-07T13:59:52.845098Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:59:52.845119Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:59:52.845142Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:516;problem=Background activities cannot be started: no index at tablet; 2025-04-07T13:59:52.845174Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888; 2025-04-07T13:59:52.845180Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T13:59:52.845208Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T13:59:52.845240Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:59:52.845282Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=1; 2025-04-07T13:59:52.845313Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=1744034092000;tx_id=18446744073709551615;;current_snapshot_ts=1744034388068; 2025-04-07T13:59:52.845337Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:59:52.845356Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:59:52.845369Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:59:52.845388Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:59:52.845449Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:59:52.852792Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034392000 at tablet 72075186224037891 2025-04-07T13:59:52.852838Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T13:59:52.852869Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7490570649012902199:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:516;problem=Background activities cannot be started: no index at tablet; 2025-04-07T13:59:52.852905Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034392000 at tablet 72075186224037888 2025-04-07T13:59:52.852919Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T13:59:52.852956Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T13:59:52.852992Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=1; 2025-04-07T13:59:52.853029Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=1744034092000;tx_id=18446744073709551615;;current_snapshot_ts=1744034388068; 2025-04-07T13:59:52.853047Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T13:59:52.853070Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T13:59:52.853087Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T13:59:52.853109Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T13:59:52.853158Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570649012902200:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T13:59:53.308221Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490570649012902198:2290];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:59:53.308271Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490570649012902200:2292];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:59:53.323325Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7490570649012902199:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T13:59:53.323375Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7490570649012902243:2293];fline=actor.cpp:33;event=skip_flush_writing; >> KqpJoinOrder::TPCDS95+StreamLookupJoin-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns+isStream [GOOD] Test command err: Trying to start YDB, gRPC: 8999, MsgBus: 14256 2025-04-07T13:59:46.543599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570651075648965:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:46.543651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ffb/r3tmp/tmp42YW0s/pdisk_1.dat 2025-04-07T13:59:46.977794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:46.977866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:46.987001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8999, node 1 2025-04-07T13:59:47.059359Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:47.078293Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:59:47.078510Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:59:47.154731Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:47.154759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:47.154767Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:47.154881Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14256 TClient is connected to server localhost:14256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:47.787580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:47.823325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:47.967692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:48.128698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:48.199723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:49.790240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570663960552335:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:49.790392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:50.066072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:59:50.107075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:59:50.158319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:59:50.239488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:59:50.279128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:59:50.365953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:59:50.499696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570668255520143:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:50.499770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570668255520148:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:50.499816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:50.503608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:59:50.516507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570668255520150:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:59:51.546253Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570651075648965:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:51.546317Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; CREATE TABLE `/Root/ColumnTableTest` (time Timestamp NOT NULL, class Utf8 NOT NULL, uniq Utf8 NOT NULL, PRIMARY KEY (time, class, uniq)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T13:59:51.594353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:59:51.666780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672550487824:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:51.668305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672550487824:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:51.668595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672550487824:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:51.668720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672550487824:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:51.668817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672550487824:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:51.668916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672550487824:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:51.669028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672550487824:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:51.669150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672550487824:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:51.669269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672550487824:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:51.669366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672550487824:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:51.669460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672550487824:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:51.669547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672550487824:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:51.680707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:51.680779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:51.680907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:51.680948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:51.681139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:51.681174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:51.681294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:51.681329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:51.681400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:51.681425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:51.681472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:51.681497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:51.681994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:51.682029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:51.682216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:51.682242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:51.682384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:51.682410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:51.682586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:51.682617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:51.682732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:51.682760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow13TimestampTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=312;columns=3; 2025-04-07T13:59:52.387520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037919;tx_state=complete;fline=interaction.h:353;batch=time: [ 2025-04-07 13:59:51.747105 ] class: [ "test" ] uniq: [ "test" ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976710672}],"starts":[{"inc":{"count_not_include":1},"id":281474976710672}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976710672}]},"p":{"include":2147483647}}]}; 2025-04-07T13:59:52.845838Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034392443, txId: 18446744073709551615] shutting down |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_GroupByNullMix [GOOD] >> KqpOlapAggregations::Aggregation_Some_NullMixGroupBy >> DataShardVolatile::UpsertNoLocksArbiter [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter >> KqpOlap::BlockChannelScalar [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 16739, MsgBus: 28893 2025-04-07T13:57:26.195188Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570051675608956:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:26.195235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001051/r3tmp/tmpsqwz0I/pdisk_1.dat 2025-04-07T13:57:26.772897Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:26.817411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:26.817513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:26.842223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16739, node 1 2025-04-07T13:57:26.925027Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:26.925057Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:26.925065Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:26.925160Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28893 TClient is connected to server localhost:28893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:27.937740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:27.989756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:28.277989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:28.598695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:28.711067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:31.197743Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570051675608956:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:31.197814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:31.953485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570073150446932:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:31.992108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:32.256827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.287826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.314198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.370359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.395810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.428273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:32.480426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570077445414727:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:32.480499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:32.480583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570077445414732:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:32.484017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:32.493603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570077445414734:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:33.554424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.594521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.641243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.699277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.734232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.858871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.909994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:57:33.977492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.008917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.036959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.087449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.176799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.275920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.825927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:57:34.865731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.899287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:57:34.983802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:57:35.018046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:57:35.059382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T13:57:35.115443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part ... access permissions } 2025-04-07T13:59:24.251116Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:59:24.269255Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570556955983825:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:59:27.116932Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.180867Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.257764Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.346112Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.386640Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.671587Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.750459Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.820781Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.887175Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:59:27.963532Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.021085Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.115509Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:59:28.183782Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:59:29.860571Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:59:29.942780Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:59:30.033616Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:59:30.151009Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:59:30.215132Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:59:30.290095Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T13:59:30.372403Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-07T13:59:30.467132Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-07T13:59:30.528470Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-04-07T13:59:30.626808Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-07T13:59:30.684391Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-07T13:59:30.735314Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-04-07T13:59:30.816148Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 2025-04-07T13:59:30.894705Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480 2025-04-07T13:59:30.991485Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 2025-04-07T13:59:31.058997Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2025-04-07T13:59:31.137526Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710700:0, at schemeshard: 72057594046644480 2025-04-07T13:59:31.273233Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2025-04-07T13:59:31.338575Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-04-07T13:59:31.428831Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2025-04-07T13:59:31.511628Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-04-07T13:59:31.637746Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-07T13:59:31.693832Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2025-04-07T13:59:31.748964Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2025-04-07T13:59:32.039539Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:1, at schemeshard: 72057594046644480 2025-04-07T13:59:32.095614Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-07T13:59:32.158993Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-07T13:59:32.229380Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-07T13:59:32.313873Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-04-07T13:59:32.445984Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-04-07T13:59:32.495417Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:59:32.495460Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:32.521591Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-04-07T13:59:32.598049Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-07T13:59:32.652815Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 2025-04-07T13:59:32.813101Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710717:0, at schemeshard: 72057594046644480 2025-04-07T13:59:32.873464Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 2025-04-07T13:59:32.955694Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2025-04-07T13:59:33.051696Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 >> KqpOlapAggregations::Aggregation_Count_NullMix >> KqpOlap::DuplicatesInIncomingBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::BlockChannelScalar [GOOD] Test command err: Trying to start YDB, gRPC: 27228, MsgBus: 23167 2025-04-07T13:59:45.615165Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570646695090563:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:45.615201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ffd/r3tmp/tmp9DopEw/pdisk_1.dat 2025-04-07T13:59:46.236545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:46.243270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:46.243347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:46.249635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27228, node 1 2025-04-07T13:59:46.391698Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:46.391723Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:46.391730Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:46.391801Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23167 TClient is connected to server localhost:23167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:47.128194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:47.160016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:47.382979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:47.654008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:47.748043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:49.625988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570663874961464:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:49.626126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:49.973023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:59:50.052209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:59:50.086052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:59:50.114890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:59:50.151711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:59:50.196238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:59:50.264063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570668169929263:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:50.264149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:50.264555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570668169929268:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:50.269421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:59:50.280527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570668169929270:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:59:50.615240Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570646695090563:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:50.615305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:51.456100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:59:51.526061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672464896960:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:51.526252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672464896960:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:51.526497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672464896960:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:51.526598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672464896960:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:51.526686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672464896960:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:51.526769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490570672464896962:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:51.526774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672464896960:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:51.526806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490570672464896962:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:51.526864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672464896960:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:51.527018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490570672464896962:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:51.527171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672464896960:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:51.527281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672464896960:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:51.527374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672464896960:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:51.527474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672464896960:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:51.527568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570672464896960:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:51.528166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490570672464896962:2469];tablet_id=720751862240379 ... 72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:51.571618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:51.571650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:51.571785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:51.571806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:51.571900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:51.571920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:51.572124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:51.572156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:51.572244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:51.572262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:51.572623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:51.572658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:51.572747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:51.572768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:51.572961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:51.572996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:51.573113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:51.573141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:51.573191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:51.573227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:51.573261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:51.573282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:51.573531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:51.573560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:51.573748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:51.573778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:51.573883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:51.573908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:51.574031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:51.574053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:51.574137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:51.574156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:51.575076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:51.575104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:51.575179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:51.575202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:51.575329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:51.575352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:51.575422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:51.575447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:51.575498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:51.575520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:51.575553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:51.575576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:51.575850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:51.575881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:51.576024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:51.576048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:51.576155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:51.576179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:51.576305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:51.576327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:51.576408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:51.576447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> TxUsage::WriteToTopic_Demo_44 [GOOD] >> KqpOlapIndexes::IndexesActualization >> KqpOlap::OlapRead_UsesScanOnJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS95+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 63023, MsgBus: 23739 2025-04-07T13:57:00.351965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569940484307419:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:00.352476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010f9/r3tmp/tmp0n59PP/pdisk_1.dat 2025-04-07T13:57:00.676360Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63023, node 1 2025-04-07T13:57:00.751320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:00.751475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:00.753236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:00.766520Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:00.766576Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:00.766586Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:00.766689Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23739 TClient is connected to server localhost:23739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:01.299845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.324415Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:01.341538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.480090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.660752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.753848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:03.444332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569953369210860:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.444476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.716863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.750878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.828630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.869142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.895719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.936038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.975538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569953369211356:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.975611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.975766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569953369211361:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.027342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:04.041474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569953369211363:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:05.350659Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569940484307419:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:05.350715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:05.712116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.736893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.776558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.811186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.844227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.957233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.000098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.031990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.064782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.110220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.149249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.220511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.260990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.830140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:57:06.864808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.901101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.971582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.999892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.026417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type ... 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T13:59:34.023506Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T13:59:34.073629Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T13:59:34.152952Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-07T13:59:34.240594Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-07T13:59:34.306595Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-07T13:59:34.407942Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-07T13:59:34.508776Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-07T13:59:34.570639Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-07T13:59:35.727375Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2025-04-07T13:59:35.816495Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-07T13:59:35.866186Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T13:59:35.948374Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.012297Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.099943Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.179487Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.245860Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.314571Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.403607Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.447748Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.550078Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.605924Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.656270Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.724213Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.788427Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.842718Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-04-07T13:59:36.946711Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.003245Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.089852Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.143728Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.203764Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.292681Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.365998Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.651998Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2025-04-07T13:59:37.716827Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.777813Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.838138Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.898704Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-07T13:59:37.925033Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:59:37.925058Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:37.964524Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-04-07T13:59:38.024058Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2025-04-07T13:59:38.086092Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2025-04-07T13:59:38.156105Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2025-04-07T13:59:38.339581Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-04-07T13:59:38.401971Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-04-07T13:59:38.463400Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-04-07T13:59:38.507553Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 2025-04-07T13:59:53.940026Z node 5 :KQP_SLOW_LOG WARN: TraceId: "01jr88scy4fvg1egfkqfp259wg", SessionId: ydb://session/3?node_id=5&id=YmI0MjdiZTUtYmFkNjFmMmItZTg1MDZhM2QtZTZhOGVjNzc=, Slow query, duration: 15.246871s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n$ws_wh =\n(select ws1.ws_order_number ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2\n from web_sales ws1 cross join web_sales ws2\n where ws1.ws_order_number = ws2.ws_order_number\n and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk);\n-- start query 1 in stream 0 using template query95.tpl and seed 2031708268\n select\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\nwhere\n cast(d_date as date) between cast('2002-4-01' as date) and\n (cast('2002-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'AL'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\nand ws1.ws_order_number in (select ws_order_number\n from $ws_wh)\nand ws1.ws_order_number in (select wr_order_number\n from web_returns cross join $ws_wh ws_wh\n where wr_order_number = ws_wh.ws_order_number)\norder by `order count`\nlimit 100;\n", parameters: 0b >> KqpOlapAggregations::AggregationCountGroupByPushdown [GOOD] >> KqpOlapSysView::StatsSysViewBytesPackActualization >> TxUsage::WriteToTopic_Demo_45 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_UsesScanOnJoin [GOOD] Test command err: Trying to start YDB, gRPC: 7708, MsgBus: 12400 2025-04-07T13:59:51.649472Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570674484120453:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:51.649511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fef/r3tmp/tmp7pimpQ/pdisk_1.dat 2025-04-07T13:59:52.125875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:52.125984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:52.129573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:52.137078Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7708, node 1 2025-04-07T13:59:52.313084Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:52.313114Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:52.313122Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:52.313216Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12400 TClient is connected to server localhost:12400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:53.029164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:53.046819Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:53.068206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:53.217486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570683074055509:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:53.217647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570683074055509:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:53.217894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570683074055509:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:53.218051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570683074055509:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:53.218377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570683074055509:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:53.218483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570683074055509:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:53.218566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570683074055509:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:53.218683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570683074055509:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:53.218794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570683074055509:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:53.218929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570683074055509:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:53.219043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570683074055509:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:53.219144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570683074055509:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:53.273545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570683074055507:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:53.280796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570683074055507:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:53.281008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570683074055507:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:53.281117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570683074055507:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:53.281236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570683074055507:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:53.281319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570683074055507:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:53.281406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570683074055507:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:53.281488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570683074055507:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:53.281609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570683074055507:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:53.281745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570683074055507:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:53.281852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570683074055507:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:53.281938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570683074055507:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:53.323584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570683074055513:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:53.323641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570683074055513:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:53.323818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570683074055513:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:53.323913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570683074055513:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:53.324012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570683074055513:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:53.324095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570683074055513:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:53.324192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570683074055513:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:53.324325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570683074055513:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... Schema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:53.725806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:53.725827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:53.725928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:53.725952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:53.725995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:53.726014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:53.726041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:53.726061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:53.726342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:53.726370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:53.726547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:53.726573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:53.726705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:53.726739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:53.726991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:53.727028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:53.727126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:53.727145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:53.727594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:53.727623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:53.727720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:53.727745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:53.727905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:53.727927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:53.728008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:53.728035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:53.728106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:53.728130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:53.728160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:53.728191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:53.728470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:53.728576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:53.729701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:53.729759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:53.729914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:53.729940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:53.730106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:53.730135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:53.730256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:53.730281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:53.762582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2025-04-07T13:59:55.757651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570691663990626:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:55.758312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:55.892550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570691663990649:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:55.892641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:55.892711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570691663990654:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:55.896322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T13:59:55.908373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570691663990656:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T13:59:56.650626Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570674484120453:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:56.650700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:57.090213Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034396006, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::AggregationCountGroupByPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 6637, MsgBus: 24011 2025-04-07T13:59:48.550789Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570662576129947:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:48.553302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ff1/r3tmp/tmppyssgD/pdisk_1.dat 2025-04-07T13:59:49.059396Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:49.063531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:49.063607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:49.066979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6637, node 1 2025-04-07T13:59:49.182435Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:49.182453Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:49.182466Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:49.182544Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24011 TClient is connected to server localhost:24011 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:49.710859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:49.746161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:49.905961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570666871097804:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:49.906210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570666871097804:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:49.906544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570666871097804:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:49.906723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570666871097804:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:49.906833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570666871097804:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:49.906950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570666871097804:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:49.907060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570666871097804:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:49.907164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570666871097804:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:49.907300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570666871097804:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:49.907423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570666871097804:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:49.907567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570666871097804:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:49.907703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570666871097804:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:49.945775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570666871097803:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:49.945856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570666871097803:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:49.946105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570666871097803:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:49.946219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570666871097803:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:49.946337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570666871097803:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:49.946456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570666871097803:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:49.946546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570666871097803:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:49.946691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570666871097803:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:49.946800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570666871097803:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:49.946940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570666871097803:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:49.947048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570666871097803:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:49.947149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570666871097803:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:49.950285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:49.950344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:49.950476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:49.950505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:49.950778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:49.950817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:49.950932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:49.950969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:49.951038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:49.951061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:49.951106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889 ... toreV2Chunks; 2025-04-07T13:59:50.023456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:50.023485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:50.031508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:50.031562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:50.031652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:50.031675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:50.031805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:50.031825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:50.031918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:50.031953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:50.032013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:50.032031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:50.032060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:50.032079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:50.032371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:50.032464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:50.032632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:50.032655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:50.033260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:50.033295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:50.033472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:50.033506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:50.033620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:50.033656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:50.084125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2025-04-07T13:59:52.096955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570679756000129:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:52.097077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:52.097389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570679756000156:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:52.101228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T13:59:52.112843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570679756000158:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T13:59:56.658923Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570662576129947:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:56.659245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:57.040306Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034393010, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":4}]},"Column":{"Id":6}}],"KeyColumns":[{"Id":4}]}},{"Projection":{"Columns":[{"Id":6},{"Id":4}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"columns":["level"],"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":4}]},"Column":{"Id":6}}],"KeyColumns":[{"Id":4}]}},{"Projection":{"Columns":[{"Id":6},{"Id":4}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'Uint64)) (let $2 '('"level" (OptionalType (DataType 'Int32)))) (let $3 '('('"_logical_id" '579) '('"_id" '"b561a94d-680ae87b-640ceb3a-7febd4fe") '('"_wide_channels" (StructType '('_yql_agg_0 $1) $2)))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $13 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $14 '('"level")) (let $15 (KqpWideReadOlapTableRanges $13 (Void) $14 '() '() (lambda '($16) (TKqpOlapAgg $16 '('('_yql_agg_0 'count '"level")) $14)))) (return (FromFlow $15)) ))) $3)) (let $5 (DqCnHashShuffle (TDqOutput $4 '0) '('1))) (let $6 (StructType '('"column1" $1) $2)) (let $7 '('('"_logical_id" '995) '('"_id" '"e7a0a445-dfc1653e-7bde8658-ccc228d1") '('"_wide_channels" $6))) (let $8 (DqPhyStage '($5) (lambda '($17) (block '( (let $18 (lambda '($29 $30) $30 $29)) (let $19 (WideCombiner (ToFlow $17) '"" (lambda '($20 $21) $21) (lambda '($22 $23 $24) $23) (lambda '($25 $26 $27 $28) (AggrAdd $26 $28)) $18)) (return (FromFlow (WideSort $19 '('('1 (Bool 'true)))))) ))) $7)) (let $9 (DqCnMerge (TDqOutput $8 '0) '('('1 '"Asc")))) (let $10 (DqPhyStage '($9) (lambda '($31) (FromFlow (NarrowMap (ToFlow $31) (lambda '($32 $33) (AsStruct '('"column1" $32) '('"level" $33)))))) '('('"_logical_id" '1007) '('"_id" '"a58e74c8-77ead2cc-dd962af1-7a8bf69b")))) (let $11 '($4 $8 $10)) (let $12 (DqCnResult (TDqOutput $10 '0) '('"level" '"column1"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $11 '($12) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType $6) '0 '0)) '('('"type" '"scan_query")))) ) >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed >> KqpOlapWrite::WriteDeleteCleanGC >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] >> KqpOlap::OlapRead_UsesGenericQueryOnJoinWithDataShardTable [GOOD] >> KqpOlap::PushdownFilter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T13:58:41.780759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:41.780859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:41.780897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:41.780931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:41.781006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:41.781034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:41.781088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:41.781456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:41.853142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:41.853224Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T13:58:41.864675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:41.864793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:41.864952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:41.875139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:41.875336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:41.875922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:41.876327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:41.881410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:41.883252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:41.883319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:41.883483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:41.883528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:41.883568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:41.883693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T13:58:41.890605Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T13:58:42.021901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:42.022123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:42.022346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:42.022599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:42.022673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:42.033611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:42.033764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:42.033980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:42.034076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:42.034129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:42.034161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:42.041603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:42.041674Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:42.041787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:42.049581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:42.049648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:42.049689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:42.049747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:42.053199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:42.060506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:42.060788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:42.061700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:42.061838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:42.061882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:42.062113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:42.062157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:42.062374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:42.062466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:42.069779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:42.069840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:42.070083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:42.070133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:42.070487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:42.070539Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:42.070634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:42.070664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:42.070703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:42.070753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:42.070817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:42.070872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:42.070948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:42.070982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:42.071008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Pu ... 04-07T14:00:00.447621Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T14:00:00.447652Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2025-04-07T14:00:00.447710Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDeleteTableBarrier operationId: 1003:0 HandleReply TEvPrivate:TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-07T14:00:00.447964Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:00:00.448112Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 137 -> 129 2025-04-07T14:00:00.448217Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:00:00.448265Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:00:00.451033Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T14:00:00.451812Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T14:00:00.452115Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:00:00.452415Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:00:00.454154Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:00:00.454199Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:00:00.454361Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:00:00.454502Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:00:00.454550Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-04-07T14:00:00.454586Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-04-07T14:00:00.454776Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T14:00:00.454821Z node 62 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2025-04-07T14:00:00.454904Z node 62 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T14:00:00.454936Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:00:00.454969Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-04-07T14:00:00.455910Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T14:00:00.455987Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T14:00:00.456016Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-07T14:00:00.456046Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-07T14:00:00.456078Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-07T14:00:00.456639Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T14:00:00.456929Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T14:00:00.456961Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-07T14:00:00.456991Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-07T14:00:00.457020Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:00:00.457082Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-04-07T14:00:00.461061Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T14:00:00.461115Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:00:00.461152Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 1003:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-07T14:00:00.461265Z node 62 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-07T14:00:00.461296Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-07T14:00:00.461337Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-04-07T14:00:00.461376Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-07T14:00:00.461409Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-07T14:00:00.461437Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-07T14:00:00.461569Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:00:00.461609Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:00:00.462753Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:00:00.462793Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:00:00.462850Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:00:00.473391Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:00:00.474052Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:00:00.476804Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-07T14:00:00.477086Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-07T14:00:00.477126Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-07T14:00:00.477492Z node 62 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-07T14:00:00.477583Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-07T14:00:00.477613Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:469:2444] TestWaitNotification: OK eventTxId 1003 2025-04-07T14:00:00.478025Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableMoved" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:00:00.478220Z node 62 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableMoved" took 239us result status StatusSuccess 2025-04-07T14:00:00.478626Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableMoved" PathDescription { Self { Name: "TTLEnabledTableMoved" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "TTLEnabledTableMoved" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish >> TTxDataShardUploadRows::TestUploadRows >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_UsesGenericQueryOnJoinWithDataShardTable [GOOD] Test command err: Trying to start YDB, gRPC: 11052, MsgBus: 16451 2025-04-07T13:59:55.332868Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570690593369546:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:55.340124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fee/r3tmp/tmprNJp08/pdisk_1.dat 2025-04-07T13:59:55.962630Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:55.998638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:55.998733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:56.003347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11052, node 1 2025-04-07T13:59:56.133286Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:56.133309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:56.133316Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:56.133405Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16451 TClient is connected to server localhost:16451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:56.735427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:56.761154Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:56.774990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:56.876321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694888337527:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:56.876536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694888337527:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:56.876943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694888337527:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:56.877101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694888337527:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:56.877231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694888337527:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:56.877354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694888337527:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:56.877502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694888337527:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:56.877608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694888337527:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:56.877735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694888337527:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:56.877846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694888337527:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:56.877937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694888337527:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:56.878079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694888337527:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:56.926549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694888337528:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:56.926622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694888337528:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:56.926834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694888337528:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:56.926960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694888337528:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:56.927046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694888337528:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:56.927178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694888337528:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:56.927284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694888337528:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:56.927388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694888337528:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:56.927503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694888337528:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:56.927605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694888337528:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:56.927699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694888337528:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:56.927813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694888337528:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:56.950796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570694888337529:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:56.950889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570694888337529:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:56.951097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570694888337529:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:56.951190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570694888337529:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:56.951354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570694888337529:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:56.951472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570694888337529:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:56.951563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570694888337529:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:56.951644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570694888337529:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... ion=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:57.281917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:57.281963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:57.282036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:57.282068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:57.282212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:57.282236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:57.282351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:57.282388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:57.282451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:57.282475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:57.282517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:57.282537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:57.282795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:57.282875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:57.283043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:57.283072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:57.283208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:57.283227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:57.283375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:57.283414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:57.283509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:57.283528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:57.283848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:57.283876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:57.283973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:57.283996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:57.284130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:57.284154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:57.284233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:57.284255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:57.284315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:57.284344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:57.284380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:57.284401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:57.284768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:57.284802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:57.284958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:57.284984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:57.285151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:57.285173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:57.285355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:57.285390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:57.285510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:57.285529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:57.321104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2025-04-07T13:59:59.169821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570707773239927:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:59.169946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:59.170514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570707773239954:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:59.175695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T13:59:59.189959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570707773239956:2423], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:00:00.330547Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570690593369546:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:00.330656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpOlapAggregations::Aggregation_Count_NullMix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PushdownFilter [GOOD] Test command err: Trying to start YDB, gRPC: 32353, MsgBus: 24965 2025-04-07T13:59:46.801536Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570651865678598:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:46.814581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ff8/r3tmp/tmpf1F8tM/pdisk_1.dat 2025-04-07T13:59:47.545077Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:47.549258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:47.549334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:47.552623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32353, node 1 2025-04-07T13:59:47.689029Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:47.689054Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:47.689063Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:47.689130Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24965 TClient is connected to server localhost:24965 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:48.391732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:48.436567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:48.498442Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7490570660455613849:2290];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:59:48.527862Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7490570660455613849:2290];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:59:48.528008Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037891 2025-04-07T13:59:48.536138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570660455613849:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:48.536344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570660455613849:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:48.536784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570660455613849:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:48.536913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570660455613849:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:48.537021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570660455613849:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:48.537114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570660455613849:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:48.537232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570660455613849:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:48.537330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570660455613849:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:48.537451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570660455613849:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:48.537543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570660455613849:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:48.537650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570660455613849:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:48.537739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570660455613849:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:48.541192Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7490570660455613850:2291];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:59:48.572064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7490570660455613850:2291];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:59:48.572240Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-04-07T13:59:48.581258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570660455613850:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:48.581337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570660455613850:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:48.581607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570660455613850:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:48.581727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570660455613850:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:48.581857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570660455613850:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:48.582005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570660455613850:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:48.582127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570660455613850:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:48.582235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570660455613850:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:48.582343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570660455613850:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:48.582455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570660455613850:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:48.582574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570660455613850:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:48.582694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570660455613850:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:48.586959Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7490570660455613851:2292];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:59:48.616287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7490570660455613851:2292];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:59:48.616547Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-07T13:59:48.624100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570660455613851:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:48.624196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570660455613851:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:48.624490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570660455613851:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normal ... 0570700958429936:2292];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1744034401000:max};tablet=72075186224037889;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Id: 6 } Constant { Text: "5" } } } Command { Assign { Column { Id: 7 } Function { Arguments { Id: 2 } Arguments { Id: 6 } FunctionType: YQL_KERNEL KernelIdx: 0 YqlOperationId: 11 } } } Command { Assign { Column { Id: 8 } Constant { Uint8: 0 } } } Command { Assign { Column { Id: 9 } Function { Arguments { Id: 7 } Arguments { Id: 8 } FunctionType: YQL_KERNEL KernelIdx: 1 YqlOperationId: 17 } } } Command { Filter { Predicate { Id: 9 } } } Command { Projection { Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 2 } Columns { Id: 1 } Columns { Id: 3 } } } Version: 5 Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\010\207\203\001H\203\001H\207\203\014\203\014\213\004?\010?\n\001\235?\014\001\235?\016\001\006\000\t\211\010?\024\235?\002\001\235?\004\000\235?\010\001\235?\n\000\006\000\t\251\000?\032\002\000\t\251\000?\034\002\000\t\251\000?\036\002\000\t\251\000? \002\000\000\t\211\004?\026?\036\235?\n\001\006\000\t\211\006?\036\203\005@?\032?\034$BlockFunc\000\003?:\014Equals?&?*\001\t\211\004?6?\036? 4BlockCoalesce\000?.?2\000\000\000/" ; 2025-04-07T14:00:01.075453Z node 3 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715663 scanId: 3 at tablet 72075186224037890 2025-04-07T14:00:01.075970Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[3:7490570700958429905:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1744034401000:max};tablet=72075186224037890;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Id: 6 } Constant { Text: "5" } } } Command { Assign { Column { Id: 7 } Function { Arguments { Id: 2 } Arguments { Id: 6 } FunctionType: YQL_KERNEL KernelIdx: 0 YqlOperationId: 11 } } } Command { Assign { Column { Id: 8 } Constant { Uint8: 0 } } } Command { Assign { Column { Id: 9 } Function { Arguments { Id: 7 } Arguments { Id: 8 } FunctionType: YQL_KERNEL KernelIdx: 1 YqlOperationId: 17 } } } Command { Filter { Predicate { Id: 9 } } } Command { Projection { Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 2 } Columns { Id: 1 } Columns { Id: 3 } } } Version: 5 Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\010\207\203\001H\203\001H\207\203\014\203\014\213\004?\010?\n\001\235?\014\001\235?\016\001\006\000\t\211\010?\024\235?\002\001\235?\004\000\235?\010\001\235?\n\000\006\000\t\251\000?\032\002\000\t\251\000?\034\002\000\t\251\000?\036\002\000\t\251\000? \002\000\000\t\211\004?\026?\036\235?\n\001\006\000\t\211\006?\036\203\005@?\032?\034$BlockFunc\000\003?:\014Equals?&?*\001\t\211\004?6?\036? 4BlockCoalesce\000?.?2\000\000\000/" ; 2025-04-07T14:00:01.144276Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[3:7490570700958429905:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1744034401000:max};tablet=72075186224037890;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=5;column=G:6;};{yql_op=Equals;arguments=[resource_id;G:6;];kernel=local_function;column=G:7;};{op=Constant;const=0;column=G:8;};{yql_op=Coalesce;arguments=[G:7;G:8;];kernel=local_function;column=G:9;};];filters=[G:9;];projections=[level;message;resource_id;timestamp;uid;];};]; 2025-04-07T14:00:01.145273Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[3:7490570700958429905:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:01.145346Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[3:7490570700958429903:2289];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-04-07T14:00:01.145371Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[3:7490570700958429903:2289];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:01.145390Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:7490570700958429904:2290];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:01.145408Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[3:7490570700958429936:2292];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:01.145476Z node 3 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715663 scanId: 3 version: {1744034401000:max} readable: {1744034401053:max} at tablet 72075186224037888 2025-04-07T14:00:01.145559Z node 3 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715663 scanId: 3 at tablet 72075186224037888 2025-04-07T14:00:01.146042Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:7490570700958429904:2290];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1744034401000:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Id: 6 } Constant { Text: "5" } } } Command { Assign { Column { Id: 7 } Function { Arguments { Id: 2 } Arguments { Id: 6 } FunctionType: YQL_KERNEL KernelIdx: 0 YqlOperationId: 11 } } } Command { Assign { Column { Id: 8 } Constant { Uint8: 0 } } } Command { Assign { Column { Id: 9 } Function { Arguments { Id: 7 } Arguments { Id: 8 } FunctionType: YQL_KERNEL KernelIdx: 1 YqlOperationId: 17 } } } Command { Filter { Predicate { Id: 9 } } } Command { Projection { Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 2 } Columns { Id: 1 } Columns { Id: 3 } } } Version: 5 Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\010\207\203\001H\203\001H\207\203\014\203\014\213\004?\010?\n\001\235?\014\001\235?\016\001\006\000\t\211\010?\024\235?\002\001\235?\004\000\235?\010\001\235?\n\000\006\000\t\251\000?\032\002\000\t\251\000?\034\002\000\t\251\000?\036\002\000\t\251\000? \002\000\000\t\211\004?\026?\036\235?\n\001\006\000\t\211\006?\036\203\005@?\032?\034$BlockFunc\000\003?:\014Equals?&?*\001\t\211\004?6?\036? 4BlockCoalesce\000?.?2\000\000\000/" ; 2025-04-07T14:00:01.146912Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:7490570700958429904:2290];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1744034401000:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=5;column=G:6;};{yql_op=Equals;arguments=[resource_id;G:6;];kernel=local_function;column=G:7;};{op=Constant;const=0;column=G:8;};{yql_op=Coalesce;arguments=[G:7;G:8;];kernel=local_function;column=G:9;};];filters=[G:9;];projections=[level;message;resource_id;timestamp;uid;];};]; 2025-04-07T14:00:01.147656Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:7490570700958429904:2290];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:00:01.156347Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[3:7490570700958429905:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2025-04-07T14:00:01.162847Z node 3 :TX_COLUMNSHARD INFO: self_id=[3:7490570700958429960:2295];tablet_id=72075186224037890;parent=[3:7490570700958429905:2291];fline=manager.h:99;event=ask_data;request=request_id=7;3={portions_count=1};; 2025-04-07T14:00:01.163016Z node 3 :TX_COLUMNSHARD INFO: self_id=[3:7490570705253397272:2301];tablet_id=72075186224037888;parent=[3:7490570700958429904:2290];fline=manager.h:99;event=ask_data;request=request_id=8;3={portions_count=1};; 2025-04-07T14:00:01.171567Z node 3 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-07T14:00:01.171605Z node 3 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-07T14:00:01.172377Z node 3 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T14:00:01.172488Z node 3 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T14:00:01.174527Z node 3 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-07T14:00:01.176029Z node 3 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T14:00:01.176219Z node 3 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2025-04-07T14:00:01.179520Z node 3 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037890 2025-04-07T14:00:01.189535Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[3:7490570700958429936:2292];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1744034401000:max};tablet=72075186224037889;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=5;column=G:6;};{yql_op=Equals;arguments=[resource_id;G:6;];kernel=local_function;column=G:7;};{op=Constant;const=0;column=G:8;};{yql_op=Coalesce;arguments=[G:7;G:8;];kernel=local_function;column=G:9;};];filters=[G:9;];projections=[level;message;resource_id;timestamp;uid;];};]; 2025-04-07T14:00:01.191057Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[3:7490570700958429936:2292];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-04-07T14:00:01.192015Z node 3 :TX_COLUMNSHARD INFO: self_id=[3:7490570705253397279:2304];tablet_id=72075186224037889;parent=[3:7490570700958429936:2292];fline=manager.h:99;event=ask_data;request=request_id=9;3={portions_count=1};; 2025-04-07T14:00:01.192685Z node 3 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-07T14:00:01.193765Z node 3 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T14:00:01.195122Z node 3 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037889 2025-04-07T14:00:01.250792Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034401000, txId: 18446744073709551615] shutting down >> KqpOlap::DuplicatesInIncomingBatch [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart >> TTxDataShardUploadRows::TestUploadRowsLocks-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_NullMix [GOOD] Test command err: Trying to start YDB, gRPC: 63191, MsgBus: 17308 2025-04-07T13:59:57.423477Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570700744844893:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:57.423940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fe7/r3tmp/tmpdFMU2Z/pdisk_1.dat 2025-04-07T13:59:57.809698Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63191, node 1 2025-04-07T13:59:57.894395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:57.894489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:57.896531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:57.918641Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:57.918666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:57.918674Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:57.918791Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17308 TClient is connected to server localhost:17308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:58.515552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:58.532509Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:58.540414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:58.615934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570705039812866:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:58.616146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570705039812866:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:58.616453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570705039812866:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:58.616574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570705039812866:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:58.616722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570705039812866:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:58.616815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570705039812866:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:58.616915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570705039812866:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:58.617033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570705039812866:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:58.617181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570705039812866:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:58.617299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570705039812866:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:58.617392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570705039812866:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:58.617498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570705039812866:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:58.652066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570705039812865:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:58.652108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570705039812865:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:58.652288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570705039812865:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:58.652354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570705039812865:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:58.652431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570705039812865:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:58.652509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570705039812865:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:58.652585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570705039812865:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:58.652649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570705039812865:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:58.652820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570705039812865:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:58.652912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570705039812865:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:58.653037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570705039812865:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:58.653103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570705039812865:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:58.694108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570705039812867:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:58.694171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570705039812867:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:58.694364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570705039812867:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:58.694440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570705039812867:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:58.694576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570705039812867:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:58.694685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570705039812867:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:58.694763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570705039812867:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:58.694851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570705039812867:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... t=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:58.738313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:58.738425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:58.738446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:58.739011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:58.739041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:58.739124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:58.739145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:58.739294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:58.739328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:58.739403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:58.739429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:58.739529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:58.739549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:58.739582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:58.739600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:58.739858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:58.739886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:58.740083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:58.740107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:58.740204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:58.740223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:58.740418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:58.740443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:58.740548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:58.740568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(level) FROM `/Root/tableWithNulls`; 2025-04-07T14:00:00.883427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570713629747745:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:00.883685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:00.884138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570713629747772:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:00.888459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:00.907349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570713629747774:2402], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:02.147754Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034401000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(level) FROM `/Root/tableWithNulls`; 2025-04-07T14:00:02.424789Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570700744844893:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:02.424862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '527) '('"_id" '"d3076a2a-59c2bb33-33dfb245-7b7a73e") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '('"level") '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 'count '"level")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '1075) '('"_id" '"85d27918-9883c8d0-2db41440-2294b31")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1178) '('"_id" '"84d51099-bed81edc-bba390dc-9c695a1b") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_Some_NullMixGroupBy [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::DuplicatesInIncomingBatch [GOOD] Test command err: Trying to start YDB, gRPC: 7244, MsgBus: 2770 2025-04-07T13:59:57.809344Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570698579400022:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:57.809388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fe5/r3tmp/tmpvO6Gex/pdisk_1.dat 2025-04-07T13:59:58.358182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:58.358258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:58.360877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:58.380412Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7244, node 1 2025-04-07T13:59:58.602739Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:58.602762Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:58.602768Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:58.602854Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2770 TClient is connected to server localhost:2770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:59.370386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:59.397716Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Utf8 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:00:01.982934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570715759269842:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:02.000951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:02.012915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:00:02.085838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570720054237218:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:02.086027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570720054237218:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:02.086244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570720054237218:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:02.086344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570720054237218:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:02.086439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570720054237218:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:02.086523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570720054237218:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:02.086614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570720054237218:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:02.086738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570720054237218:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:02.086833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570720054237218:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:02.086917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570720054237218:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:02.087031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570720054237218:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:02.087126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570720054237218:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:02.096575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:02.096629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:02.096735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:02.096770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:02.096935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:02.096955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:02.097024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:02.097047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:02.097107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:02.097127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:02.097164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:02.097183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:02.097580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:02.097618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:02.101480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:02.101561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:02.101816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:02.101843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:02.102088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:02.102132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:02.102688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:02.102732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=472;columns=4; 2025-04-07T14:00:02.219538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570720054237353:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:02.219645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:02.222125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570720054237358:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:02.226069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:02.260864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570720054237360:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:02.812909Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570698579400022:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:02.812994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:02.882000Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034402285, txId: 18446744073709551615] shutting down >> TPQTest::TestPartitionedBlobFails [GOOD] >> TPQTest::TestReadSessions >> BasicUsage::ConflictingWrites [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow >> KqpOlap::PKDescScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_NullMixGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 12469, MsgBus: 29003 2025-04-07T13:59:55.916786Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570689693527059:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:55.916829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fe8/r3tmp/tmpf7xauT/pdisk_1.dat 2025-04-07T13:59:56.418795Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:56.423724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:56.423792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:56.428338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12469, node 1 2025-04-07T13:59:56.538932Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:56.538955Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:56.538973Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:56.540687Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29003 TClient is connected to server localhost:29003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:57.167364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:57.189171Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:57.197115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:57.286351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570698283462306:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:57.286544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570698283462306:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:57.286833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570698283462306:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:57.287138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570698283462306:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:57.287284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570698283462306:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:57.287381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570698283462306:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:57.287514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570698283462306:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:57.287624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570698283462306:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:57.287745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570698283462306:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:57.287820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570698283462306:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:57.287932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570698283462306:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:57.288068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570698283462306:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:57.334446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570698283462308:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:57.335631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570698283462308:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:57.335859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570698283462308:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:57.335968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570698283462308:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:57.336103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570698283462308:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:57.336203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570698283462308:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:57.336293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570698283462308:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:57.336385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570698283462308:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:57.336550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570698283462308:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:57.336652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570698283462308:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:57.336779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570698283462308:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:57.336877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570698283462308:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:57.368235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570698283462309:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:57.368294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570698283462309:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:57.368497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570698283462309:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:57.368653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570698283462309:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:57.368781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570698283462309:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:57.368873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570698283462309:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:57.368958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570698283462309:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:57.369055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570698283462309:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... ription=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:57.405654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:57.405722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:57.405739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:57.405859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:57.405880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:57.405952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:57.405986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:57.406031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:57.406063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:57.406093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:57.406109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:57.406394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:57.406424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:57.406628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:57.406649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:57.406751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:57.406770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:57.406951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:57.406972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:57.407049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:57.407064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SOME(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; 2025-04-07T13:59:59.098971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570706873397199:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:59.099045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:59.099290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570706873397211:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:59.102588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T13:59:59.114019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570706873397213:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:03.424969Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570689693527059:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:03.425057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:03.711302Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034400000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SOME(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (4, 7)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '870) '('"_id" '"1abc47f4-35beae2c-8118951d-72abc248") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $23 (Int32 '0)) (let $24 '((Nothing $2) $23)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $23) $24))) (RangeCreate (AsList '($24 '((Just (Int32 '"7")) $23)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"id" $1)) (let $11 '('('"_logical_id" '929) '('"_id" '"ba5344d1-e5da5117-38587953-32a7838e") '('"_wide_channels" (StructType '('_yql_agg_0 $2) $10)))) (let $12 (DqPhyStage '() (lambda '() (block '( (let $25 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $26 '('"id")) (let $27 '('('"UsedKeyColumns" $26) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $28 (KqpWideReadOlapTableRanges $25 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $27 (lambda '($29) (TKqpOlapAgg $29 '('('_yql_agg_0 'some '"level")) $26)))) (return (FromFlow $28)) ))) $11)) (let $13 (DqCnHashShuffle (TDqOutput $12 '0) '('1))) (let $14 (StructType '('"column1" $2) $10)) (let $15 '('('"_logical_id" '1352) '('"_id" '"c9e2406d-a6aeea1c-ce89e1e1-e988ea5c") '('"_wide_channels" $14))) (let $16 (DqPhyStage '($13) (lambda '($30) (block '( (let $31 (lambda '($42 $43) $43 $42)) (let $32 (WideCombiner (ToFlow $30) '"" (lambda '($33 $34) $34) (lambda '($35 $36 $37) $36) (lambda '($38 $39 $40 $41) (Coalesce $41 $39)) $31)) (return (FromFlow (WideSort $32 '('('1 (Bool 'true)))))) ))) $15)) (let $17 (DqCnMerge (TDqOutput $16 '0) '('('1 '"Asc")))) (let $18 (DqPhyStage '($17) (lambda '($44) (FromFlow (NarrowMap (ToFlow $44) (lambda '($45 $46) (AsStruct '('"column1" $45) '('"id" $46)))))) '('('"_logical_id" '1364) '('"_id" '"c1eba517-29ec3cbb-7dd2b464-ee7a5edd")))) (let $19 '($12 $16 $18)) (let $20 (DqCnResult (TDqOutput $18 '0) '('"id" '"column1"))) (let $21 (KqpTxResultBinding $9 '0 '0)) (let $22 (KqpPhysicalTx $19 '($20) '('($7 $21)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $22) '((KqpTxResultBinding (ListType $14) '1 '0)) '('('"type" '"scan_query")))) ) >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable >> KqpOlapBlobsSharing::MultipleSplits >> TTxDataShardUploadRows::TestUploadRows [GOOD] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace >> KqpOlapAggregations::Aggregation_Sum_NullMixGroupBy >> KqpOlapBlobsSharing::MultipleMergesWithRestartsAfterWait >> KqpOlapAggregations::Aggregation_Avg_GroupByNull [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish >> KqpOlapIndexes::CountMinSketchIndex [GOOD] >> KqpJoinOrder::TPCH5-StreamLookupJoin+ColumnStore [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_GroupByNull [GOOD] Test command err: Trying to start YDB, gRPC: 9359, MsgBus: 23505 2025-04-07T13:59:46.573212Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570652070230836:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:46.573296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ff6/r3tmp/tmpKtISIz/pdisk_1.dat 2025-04-07T13:59:47.049770Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:47.051051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:47.051114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:47.063643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9359, node 1 2025-04-07T13:59:47.192489Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:47.192525Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:47.192534Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:47.192631Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23505 TClient is connected to server localhost:23505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:47.817575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:47.850430Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:47.864488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:47.981732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570656365198809:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:47.981967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570656365198809:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:47.982219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570656365198809:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:47.982317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570656365198809:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:47.982413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570656365198809:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:47.982509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570656365198809:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:47.982594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570656365198809:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:47.982705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570656365198809:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:47.982820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570656365198809:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:47.982973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570656365198809:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:47.983080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570656365198809:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:47.983243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570656365198809:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:48.026416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570656365198806:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:48.026482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570656365198806:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:48.026752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570656365198806:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:48.026891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570656365198806:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:48.026987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570656365198806:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:48.027115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570656365198806:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:48.027216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570656365198806:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:48.027321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570656365198806:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:48.027440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570656365198806:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:48.027544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570656365198806:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:48.027638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570656365198806:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:48.027752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570656365198806:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:48.060724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570656365198808:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:48.060804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570656365198808:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:48.061066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570656365198808:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:48.061184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570656365198808:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:48.061324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570656365198808:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:48.061421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570656365198808:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:48.061522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570656365198808:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:48.061632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570656365198808:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... 7891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:48.104428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:48.104481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:48.104650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:48.104703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:48.104855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:48.104893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:48.105031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:48.105050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:48.105173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:48.105208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, AVG(id), AVG(level) FROM `/Root/tableWithNulls` WHERE id > 5 GROUP BY level ORDER BY level; 2025-04-07T13:59:49.966933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570664955133693:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:49.967006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:49.967346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570664955133701:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:49.970398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T13:59:49.978948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570664955133707:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:02.040029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:00:02.040063Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:05.957383Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570652070230836:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:05.964109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:06.528110Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88sp9qb7845a42mc6n648j", SessionId: ydb://session/3?node_id=1&id=ZDc0MDNmYWMtYzI5OGI4ZWUtNmMwNDBiZmYtZjJjZGU1ZGY=, Slow query, duration: 16.562612s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "--!syntax_v1\nPRAGMA Kikimr.OptUseFinalizeByKey;\n\n SELECT\n level, AVG(id), AVG(level)\n FROM `/Root/tableWithNulls`\n WHERE id > 5\n GROUP BY level\n ORDER BY level;\n \n", parameters: 0b 2025-04-07T14:00:06.530163Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034391001, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, AVG(id), AVG(level) FROM `/Root/tableWithNulls` WHERE id > 5 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":1}]},"Column":{"Id":8}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":9}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":10}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":10},{"Id":9},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":1}]},"Column":{"Id":8}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":9}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":10}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":10},{"Id":9},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '2753) '('"_id" '"7901810-46c75a0e-d5906da5-5300610d") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $29 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $29) '((Nothing $2) $29))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (TupleType $10 (DataType 'Uint64))) (let $12 (OptionalType $11)) (let $13 '('"level" $2)) (let $14 (StructType '('_yql_agg_0 $11) '('_yql_agg_1 $12) $13)) (let $15 '('('"_logical_id" '2812) '('"_id" '"24d78267-d9f8865b-6080b963-fac92bb") '('"_wide_channels" $14))) (let $16 (DqPhyStage '() (lambda '() (block '( (let $30 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $31 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $32 (KqpWideReadOlapTableRanges $30 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $31 (lambda '($34) (block '( (let $35 '('"_yql_agg_0_sum" '"sum" '"id")) (let $36 '('"_yql_agg_0_cnt" '"count" '"id")) (let $37 '('"_yql_agg_1_sum" '"sum" '"level")) (let $38 '('"_yql_agg_1_cnt" '"count" '"level")) (let $39 '($35 $36 $37 $38)) (return (TKqpOlapAgg $34 $39 '('"level"))) ))))) (let $33 (lambda '($40 $41 $42 $43 $44) (block '( (let $45 (IfPresent $43 (lambda '($46) (Just '((Convert $46 'Double) $42))) (Nothing $12))) (return '((Convert $41 'Double) $40) $45 $44) )))) (return (FromFlow (WideMap $32 $33))) ))) $15)) (let $17 (DqCnHashShuffle (TDqOutput $16 '0) '('2))) (let $18 (OptionalType $10)) (let $19 (StructType '('"column1" $10) '('"column2" $18) $13)) (let $20 '('('"_logical_id" '5542) '('"_id" '"3ef63563-e127fbe-fe6d211e-6842af59") '('"_wide_channels" $19))) (let $21 (DqPhyStage '($17) (lambda '($47) (block '( (let $48 (lambda '($55 $56 $57 $58) (Nth $56 '0) (Nth $56 '1) $57)) (let $49 (lambda '($59 $60 $61 $62 $63 $64 $65) (block '( (let $66 (IfPresent $61 (lambda '($67) (IfPresent $65 (lambda '($68) (Just '((AggrAdd (Nth $67 '0) (Nth $68 '0)) (AggrAdd (Nth $67 '1) (Nth $68 '1))))) $61)) $65)) (return (AggrAdd (Nth $60 '0) $63) (AggrAdd (Nth $60 '1) $64) $66) )))) (let $50 (lambda '($69 $70 $71 $72) (block '( (let $73 (IfPresent $72 (lambda '($74) (Just (Div (Nth $74 '0) (Nth $74 '1)))) (Nothing $18))) (return (Div $70 $71) $73 $69) )))) (let $51 (WideCombiner (ToFlow $47) '"" (lambda '($52 $53 $54) $54) $48 $49 $50)) (return (FromFlow (WideSort $51 '('('2 (Bool 'true)))))) ))) $20)) (let $22 (DqCnMerge (TDqOutput $21 '0) '('('2 '"Asc")))) (let $23 (DqPhyStage '($22) (lambda '($75) (FromFlow (NarrowMap (ToFlow $75) (lambda '($76 $77 $78) (AsStruct '('"column1" $76) '('"column2" $77) '('"level" $78)))))) '('('"_logical_id" '5554) '('"_id" '"bd0134ae-bb8455c6-bb3e2b10-cbf54446")))) (let $24 '($16 $21 $23)) (let $25 '('"level" '"column1" '"column2")) (let $26 (DqCnResult (TDqOutput $23 '0) $25)) (let $27 (KqpTxResultBinding $9 '0 '0)) (let $28 (KqpPhysicalTx $24 '($26) '('($7 $27)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $28) '((KqpTxResultBinding (ListType $19) '1 '0)) '('('"type" '"scan_query")))) ) >> TTxDataShardUploadRows::TestUploadRowsLocks-StreamLookup [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRows >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::CountMinSketchIndex [GOOD] Test command err: Trying to start YDB, gRPC: 30252, MsgBus: 24749 2025-04-07T13:59:42.457194Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570636182855992:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:42.457270Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001010/r3tmp/tmpQG1tLI/pdisk_1.dat 2025-04-07T13:59:43.014376Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:43.028911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:43.028990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:43.031182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30252, node 1 2025-04-07T13:59:43.149346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:43.149376Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:43.149384Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:43.149494Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24749 TClient is connected to server localhost:24749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:43.783217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:43.795546Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:43.813134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:43.970454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:43.970692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:43.970954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:43.971072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:43.971205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:43.971335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:43.971488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:43.971604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:43.971694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:43.971774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:43.971861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:43.971971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:44.017926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570640477823949:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:44.018000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570640477823949:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:44.018205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570640477823949:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:44.018300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570640477823949:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:44.018387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570640477823949:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:44.018482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570640477823949:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:44.018566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570640477823949:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:44.018749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570640477823949:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:44.018859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570640477823949:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:44.018952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570640477823949:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:44.019052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570640477823949:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:44.019161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570640477823949:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:44.049319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:44.049387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:44.049598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:44.049701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:44.049806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:44.049925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:44.050025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:44.050125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... 92];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2025-04-07T14:00:07.314124Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890; 2025-04-07T14:00:07.314140Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:00:07.314207Z node 1 :TX_COLUMNSHARD DEBUG: There are stats for 1 tables 2025-04-07T14:00:07.314230Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034407003 at tablet 72075186224037888 2025-04-07T14:00:07.314251Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:00:07.314267Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:00:07.314275Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:00:07.314294Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=5; 2025-04-07T14:00:07.314327Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:00:07.314343Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=1744034107003;tx_id=18446744073709551615;;current_snapshot_ts=1744034387004; 2025-04-07T14:00:07.314361Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:00:07.314377Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:07.314393Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:07.314404Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=5; 2025-04-07T14:00:07.314412Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:00:07.314454Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=1744034107003;tx_id=18446744073709551615;;current_snapshot_ts=1744034388000; 2025-04-07T14:00:07.314484Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570640477823948:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:00:07.314487Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:00:07.314521Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:07.314538Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:07.314565Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:00:07.314626Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:00:07.314752Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034407003 at tablet 72075186224037890 2025-04-07T14:00:07.314790Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:00:07.314825Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:00:07.314851Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=5; 2025-04-07T14:00:07.314884Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=1744034107003;tx_id=18446744073709551615;;current_snapshot_ts=1744034388000; 2025-04-07T14:00:07.314912Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:00:07.314933Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:07.314955Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:07.314975Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:00:07.315016Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570640477823972:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:00:07.332717Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490570640477823948:2290];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:07.332780Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490570640477823949:2291];fline=actor.cpp:33;event=skip_flush_writing; TClient::Ls request: /Root/olapTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "olapTable" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034384148 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 19 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 19 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 ColumnTableVersion: 6 ColumnTableSchemaVersion: 6 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } TableStats { DataSize: 272153240 RowCount: 230000 IndexSize: 0 LastAccessTime: 1744034394562 LastUp... (TRUNCATED) >>> cms_res_id of class name COUNT_MIN_SKETCH >>> cms_uid of class name COUNT_MIN_SKETCH >>> cms_level of class name COUNT_MIN_SKETCH >>> cms_message of class name COUNT_MIN_SKETCH >>> cms_ts of class name COUNT_MIN_SKETCH >>> shard actual id: 72075186224037888 >>> path id: 2 >>> shard actual id: 72075186224037889 >>> path id: 2 >>> shard actual id: 72075186224037890 >>> path id: 2 2025-04-07T14:00:07.650243Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7490570644772791303:2301];tablet_id=72075186224037890;parent=[1:7490570640477823972:2292];fline=manager.h:99;event=ask_data;request=request_id=158;2={portions_count=6};; >>> sketch.GetElementCount() = 76569 2025-04-07T14:00:07.651923Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7490570640477823992:2295];tablet_id=72075186224037888;parent=[1:7490570640477823948:2290];fline=manager.h:99;event=ask_data;request=request_id=160;2={portions_count=7};; 2025-04-07T14:00:07.653562Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7490570644772791296:2298];tablet_id=72075186224037889;parent=[1:7490570640477823949:2291];fline=manager.h:99;event=ask_data;request=request_id=162;2={portions_count=7};; >>> sketch.GetElementCount() = 153350 >>> sketch.GetElementCount() = 230000 2025-04-07T14:00:07.814476Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7490570640477823972:2292];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:07.834091Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490570640477823948:2290];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:07.834150Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490570640477823949:2291];fline=actor.cpp:33;event=skip_flush_writing; >> KqpOlapWrite::WriteDeleteCleanGC [GOOD] >> YdbYqlClient::SimpleColumnFamilies [GOOD] >> YdbYqlClient::TableKeyRangesSinglePartition >> KqpOlapAggregations::Aggregation_MaxL [GOOD] >> KqpOlap::PKDescScan [GOOD] >> TxUsage::WriteToTopic_Demo_45 [GOOD] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::WriteDeleteCleanGC [GOOD] Test command err: Trying to start YDB, gRPC: 19624, MsgBus: 25548 2025-04-07T14:00:00.218894Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570714783681384:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:00.218953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fe1/r3tmp/tmp9Ck0bd/pdisk_1.dat 2025-04-07T14:00:00.740950Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:00.750501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:00.750594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:00.760539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19624, node 1 2025-04-07T14:00:01.013124Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:01.013151Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:01.013158Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:01.013257Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25548 TClient is connected to server localhost:25548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:01.810632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:01.853230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:01.984322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570719078649239:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:01.984608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570719078649239:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:01.984917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570719078649239:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:01.985049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570719078649239:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:01.985166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570719078649239:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:01.985304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570719078649239:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:01.985399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570719078649239:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:01.985526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570719078649239:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:01.985662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570719078649239:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:01.985803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570719078649239:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:01.985908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570719078649239:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:01.986011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570719078649239:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:02.040118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570719078649240:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:02.040179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570719078649240:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:02.040445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570719078649240:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:02.040585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570719078649240:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:02.040731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570719078649240:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:02.040848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570719078649240:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:02.040941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570719078649240:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:02.041032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570719078649240:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:02.041163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570719078649240:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:02.041309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570719078649240:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:02.041452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570719078649240:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:02.041543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570719078649240:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:02.074102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:02.074278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:02.074509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:02.074679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:02.074775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:02.074882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:02.075009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:02.075116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:02.075266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74905707190786492 ... 75186224037888; 2025-04-07T14:00:09.175568Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:00:09.175606Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:00:09.175671Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:00:09.175727Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:00:09.175762Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:00:09.175784Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:09.175813Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:09.175836Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:00:09.175886Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:00:09.175930Z node 1 :TX_COLUMNSHARD_BLOBS DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;storage_id=__DEFAULT;tablet_id=72075186224037888;fline=blob_manager.cpp:308;event=TBlobManager::BuildGCTask skip;current_gen=1;current_step=0;reason=empty; 2025-04-07T14:00:09.175969Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034409000 at tablet 72075186224037888 2025-04-07T14:00:09.175987Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:00:09.176007Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:00:09.176029Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:00:09.176055Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:00:09.176069Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:09.176081Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:09.176093Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:00:09.176098Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2025-04-07T14:00:09.176125Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:00:09.176149Z node 1 :TX_COLUMNSHARD_BLOBS DEBUG: tablet_id=72075186224037888;self_id=[1:7490570719078649280:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;storage_id=__DEFAULT;tablet_id=72075186224037888;fline=blob_manager.cpp:308;event=TBlobManager::BuildGCTask skip;current_gen=1;current_step=0;reason=empty; 2025-04-07T14:00:09.176157Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890; 2025-04-07T14:00:09.176173Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:00:09.176186Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490570719078649240:2292];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-04-07T14:00:09.176197Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:00:09.176270Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:00:09.176341Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:00:09.176395Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:00:09.176419Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:09.176433Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:09.176451Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:00:09.176489Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:00:09.176538Z node 1 :TX_COLUMNSHARD_BLOBS DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;storage_id=__DEFAULT;tablet_id=72075186224037890;fline=blob_manager.cpp:308;event=TBlobManager::BuildGCTask skip;current_gen=1;current_step=2;reason=empty; 2025-04-07T14:00:09.177632Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034409000 at tablet 72075186224037890 2025-04-07T14:00:09.177683Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:00:09.177721Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:00:09.177772Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:00:09.177833Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:00:09.177860Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:09.177874Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:09.177895Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:00:09.177936Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:00:09.177980Z node 1 :TX_COLUMNSHARD_BLOBS DEBUG: tablet_id=72075186224037890;self_id=[1:7490570719078649238:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;storage_id=__DEFAULT;tablet_id=72075186224037890;fline=blob_manager.cpp:308;event=TBlobManager::BuildGCTask skip;current_gen=1;current_step=2;reason=empty; |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |89.2%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> KqpOlap::SimpleQueryOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PKDescScan [GOOD] Test command err: Trying to start YDB, gRPC: 15874, MsgBus: 22142 2025-04-07T14:00:05.585620Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570736589014864:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:05.585668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fdf/r3tmp/tmpmjzMoV/pdisk_1.dat 2025-04-07T14:00:06.021816Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15874, node 1 2025-04-07T14:00:06.040606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:06.040718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:06.045012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:06.134496Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:06.134519Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:06.134526Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:06.134607Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22142 TClient is connected to server localhost:22142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:06.692732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:06.732932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:06.851836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570740883982758:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:06.852065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570740883982758:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:06.852342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570740883982758:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:06.852445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570740883982758:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:06.852557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570740883982758:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:06.852654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570740883982758:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:06.856065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570740883982758:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:06.856182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570740883982758:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:06.856286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570740883982758:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:06.856383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570740883982758:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:06.856470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570740883982758:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:06.856563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570740883982758:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:06.892556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570740883982757:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:06.892611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570740883982757:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:06.892821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570740883982757:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:06.892933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570740883982757:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:06.893030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570740883982757:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:06.893121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570740883982757:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:06.893255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570740883982757:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:06.893355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570740883982757:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:06.893457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570740883982757:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:06.893569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570740883982757:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:06.893653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570740883982757:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:06.893758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570740883982757:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:06.922312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570740883982775:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:06.922381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570740883982775:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:06.922571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570740883982775:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:06.922670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570740883982775:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:06.922761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570740883982775:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:06.922844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570740883982775:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:06.922928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570740883982775:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:06.923036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570740883982775:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:06.923129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74905707408839827 ... :00:07.027708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=145904;columns=5; 2025-04-07T14:00:08.725518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570749473917686:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:08.725607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:08.726008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570749473917698:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:08.729969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:00:08.746409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570749473917700:2395], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","ReadLimit":"4","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["timestamp (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"columns":["timestamp"],"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"limit":"4","type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","ReadLimit":"4","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","ReadLimit":"4","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["timestamp (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"columns":["timestamp"],"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"limit":"4","type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","ReadLimit":"4","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} ( (let $1 '('"timestamp")) (let $2 (Uint64 '4)) (let $3 (StructType '('"timestamp" (DataType 'Timestamp)))) (let $4 '('('"_logical_id" '378) '('"_id" '"bb7ae8f9-64487354-3aa00895-3664e796") '('"_wide_channels" $3))) (let $5 (DqPhyStage '() (lambda '() (block '( (let $9 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $10 (KqpWideReadOlapTableRanges $9 (Void) $1 '('('"ItemsLimit" $2)) '() (lambda '($11) $11))) (return (FromFlow (WideTopSort $10 $2 '('('0 (Bool 'true)))))) ))) $4)) (let $6 (DqCnMerge (TDqOutput $5 '0) '('('0 '"Asc")))) (let $7 (DqPhyStage '($6) (lambda '($12) (FromFlow (NarrowMap (Take (ToFlow $12) $2) (lambda '($13) (AsStruct '('"timestamp" $13)))))) '('('"_logical_id" '391) '('"_id" '"7619e159-89e0f841-cf1486c2-230b0d7c")))) (let $8 (DqCnResult (TDqOutput $7 '0) $1)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($5 $7) '($8) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType $3) '0 '0)) '('('"type" '"scan_query")))) ) ============================== {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Reverse":true,"Name":"TableFullScan","ReadLimit":"4","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["timestamp (Desc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"reverse":true,"columns":["timestamp"],"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"limit":"4","type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Reverse":true,"Name":"TableFullScan","ReadLimit":"4","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Reverse":true,"Name":"TableFullScan","ReadLimit":"4","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["timestamp (Desc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"reverse":true,"columns":["timestamp"],"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"limit":"4","type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Reverse":true,"Name":"TableFullScan","ReadLimit":"4","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} ( (let $1 '('"timestamp")) (let $2 (Uint64 '4)) (let $3 (StructType '('"timestamp" (DataType 'Timestamp)))) (let $4 '('('"_logical_id" '378) '('"_id" '"1624926a-7bceec67-3c1d1e6b-55ff72c5") '('"_wide_channels" $3))) (let $5 (DqPhyStage '() (lambda '() (block '( (let $9 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $10 (KqpWideReadOlapTableRanges $9 (Void) $1 '('('"ItemsLimit" $2) '('"Reverse")) '() (lambda '($11) $11))) (return (FromFlow (WideTopSort $10 $2 '('('0 (Bool 'false)))))) ))) $4)) (let $6 (DqCnMerge (TDqOutput $5 '0) '('('0 '"Desc")))) (let $7 (DqPhyStage '($6) (lambda '($12) (FromFlow (NarrowMap (Take (ToFlow $12) $2) (lambda '($13) (AsStruct '('"timestamp" $13)))))) '('('"_logical_id" '391) '('"_id" '"26b0111a-3cbed310-e11657f4-6e9ce821")))) (let $8 (DqCnResult (TDqOutput $7 '0) $1)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($5 $7) '($8) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType $3) '0 '0)) '('('"type" '"scan_query")))) ) 2025-04-07T14:00:09.949815Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034409005, txId: 18446744073709551615] shutting down >> KqpOlap::CheckEarlyFilterOnEmptySelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_MaxL [GOOD] Test command err: Trying to start YDB, gRPC: 61754, MsgBus: 9550 2025-04-07T13:59:42.509658Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570634741312117:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:42.509708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001013/r3tmp/tmpYFcpTF/pdisk_1.dat 2025-04-07T13:59:43.051263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:43.051364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:43.053687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:43.082879Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61754, node 1 2025-04-07T13:59:43.165413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:43.165438Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:43.165449Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:43.165571Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9550 TClient is connected to server localhost:9550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:43.765477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:43.780588Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:43.788206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:43.879013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570639036280082:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:43.879218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570639036280082:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:43.879504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570639036280082:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:43.879646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570639036280082:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:43.879726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570639036280082:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:43.879786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570639036280082:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:43.879850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570639036280082:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:43.879917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570639036280082:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:43.879989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570639036280082:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:43.880070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570639036280082:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:43.880123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570639036280082:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:43.880201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570639036280082:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:43.915877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639036280083:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:43.915946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639036280083:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:43.916172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639036280083:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:43.916294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639036280083:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:43.916411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639036280083:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:43.916513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639036280083:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:43.916643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639036280083:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:43.916790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639036280083:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:43.916912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639036280083:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:43.917027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639036280083:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:43.917165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639036280083:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:43.917387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570639036280083:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:43.949250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570639036280084:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:43.949312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570639036280084:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:43.949498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570639036280084:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:43.949608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570639036280084:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:43.949720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570639036280084:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:43.949806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570639036280084:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:43.949908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570639036280084:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:43.949998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570639036280084:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:07.832053Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:07.999051Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:07.999115Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:08.092182Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:08.092282Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:08.189049Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:08.189118Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:08.281720Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:08.281770Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:08.359658Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:08.371135Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:00:08.428382Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:08.428456Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:08.514362Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:08.514428Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:08.605428Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:08.605480Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:08.693615Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:08.693681Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:08.803411Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:08.803489Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:08.838353Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:08.988868Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:08.988950Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:09.106782Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:09.106840Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:09.200937Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:09.201015Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:09.298142Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:09.298233Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:09.393108Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:09.393201Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODM1ODkwN2YtYjM1N2Q1NmYtZmYzZDljYjQtMjBkYTNiNTc=. TraceId : 01jr88t03t5wxn6wmay899py1x. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:09.461424Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:09.472714Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpJoinOrder::TPCH8-StreamLookupJoin+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH5-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 3517, MsgBus: 25946 2025-04-07T13:57:01.590031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569944349379359:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:01.590076Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00107f/r3tmp/tmptRqZIz/pdisk_1.dat 2025-04-07T13:57:02.060882Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:02.080525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:02.080633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:02.084938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3517, node 1 2025-04-07T13:57:02.183926Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:02.183948Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:02.183962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:02.184054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25946 TClient is connected to server localhost:25946 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:02.759180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.801287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.963511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:03.129089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:03.207107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:04.973630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569957234282825:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:04.973758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.200847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.287727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.322974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.349792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.378363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.414498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.492348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569961529250624:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.492482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.493858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569961529250630:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.497967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:05.511509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569961529250632:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:06.593093Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569944349379359:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:06.593137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:06.756134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.827136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.907673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.948487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.017429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.128509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.163308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.194821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.226295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.254715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.281363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.351373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.385146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:57:08.001559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:57:08.065404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:57:08.105114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:57:08.139234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:57:08.175085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:57:08.205949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T13:57:08.236157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part p ... 38695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:52.563016Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:52.563023Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:52.563113Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:52.563145Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:52.563297Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:52.563328Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:52.563410Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:52.563442Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:52.563505Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:52.563531Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:52.563568Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:52.563594Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:52.563925Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:52.563977Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:52.564186Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:52.564221Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:52.564367Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:52.564400Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:52.564573Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:52.564603Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:52.564742Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:52.564782Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:52.565389Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:52.565478Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:52.565712Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:52.565752Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:52.565923Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:52.565967Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:52.566162Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:52.566192Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:52.566300Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:52.566326Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:52.566947Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:52.566989Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:52.567086Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:52.567115Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:52.567275Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:52.567304Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:52.567391Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:52.567422Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:52.567488Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:52.567517Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:52.567558Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:52.567586Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:52.567883Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:52.567942Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:52.568157Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:52.568190Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:52.569208Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:52.569259Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:52.569452Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:52.569485Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:52.569591Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:52.569619Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart >> KqpOlapAggregations::AggregationAndFilterPushdownOnDiffCols >> KqpOlapAggregations::Aggregation_ResultCountT_FilterL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: 2025-04-07T14:00:05.181988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:00:05.182302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:05.182568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001367/r3tmp/tmpsangHq/pdisk_1.dat 2025-04-07T14:00:05.514729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:00:05.559078Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:05.608439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:05.608588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:05.620236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:05.760371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:00:05.847523Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:00:05.857309Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:00:05.857997Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:00:05.858271Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:00:05.906943Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:00:05.907809Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:00:05.907946Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:00:05.909671Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:00:05.909755Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:00:05.909819Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:00:05.910206Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:00:05.946678Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:00:05.946956Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:00:05.947121Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:00:05.947162Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:00:05.947226Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:00:05.947273Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:05.947712Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:05.947792Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:05.948019Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:00:05.948123Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:00:05.948638Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:05.948702Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:05.948770Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:00:05.948852Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:05.948910Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:05.948983Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:00:05.949044Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:00:05.949089Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:00:05.949124Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:00:05.949177Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:05.949364Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:00:05.949407Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:00:05.949519Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:00:05.949811Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:00:05.949889Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:00:05.950004Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:00:05.950060Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:00:05.950104Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:00:05.950140Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:00:05.950176Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:00:05.950462Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:00:05.950503Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:00:05.950569Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:00:05.950630Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:00:05.950691Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:00:05.950725Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:00:05.950766Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:00:05.950798Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:00:05.950824Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:00:05.952402Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:00:05.952457Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:00:05.963165Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:00:05.963239Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:00:05.963271Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:00:05.963307Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:00:05.963373Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:00:06.176277Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:06.176343Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:06.176394Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:00:06.176907Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:00:06.176949Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:00:06.177075Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:00:06.177126Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:00:06.177169Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:00:06.177223Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:00:06.183951Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:00:06.184053Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:06.185222Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:06.185280Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:06.185336Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:06.185384Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:00:06.185421Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:00:06.185481Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... execution plan for [3000:281474976715667] at 72075186224037890 executing on unit ReadTableScan 2025-04-07T14:00:08.131941Z node 1 :TX_DATASHARD TRACE: Add [3000:281474976715667] at 72075186224037890 to execution unit CompleteOperation 2025-04-07T14:00:08.131963Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2025-04-07T14:00:08.132102Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is DelayComplete 2025-04-07T14:00:08.132122Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompleteOperation 2025-04-07T14:00:08.132143Z node 1 :TX_DATASHARD TRACE: Add [3000:281474976715667] at 72075186224037890 to execution unit CompletedOperations 2025-04-07T14:00:08.132162Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompletedOperations 2025-04-07T14:00:08.132182Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is Executed 2025-04-07T14:00:08.132225Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompletedOperations 2025-04-07T14:00:08.132240Z node 1 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715667] at 72075186224037890 has finished 2025-04-07T14:00:08.132261Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:08.132283Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-07T14:00:08.132303Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-07T14:00:08.132323Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-07T14:00:08.143929Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:00:08.144002Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:00:08.144042Z node 1 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2025-04-07T14:00:08.144100Z node 1 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715667] from 72075186224037890 at tablet 72075186224037890 send result to client [1:1076:2874], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:00:08.144165Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T14:00:11.505944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:00:11.506178Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:11.506232Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001367/r3tmp/tmpHX4cbY/pdisk_1.dat 2025-04-07T14:00:11.758759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:00:11.785932Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:11.835034Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:11.835159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:11.849736Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:11.980398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:00:12.007507Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:631:2537] 2025-04-07T14:00:12.007751Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:00:12.047815Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:00:12.047954Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:00:12.049469Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:00:12.049554Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:00:12.049608Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:00:12.049907Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:00:12.049959Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:00:12.050046Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:00:12.050130Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:648:2546] 2025-04-07T14:00:12.050167Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:00:12.050202Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:00:12.050236Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:12.050966Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:00:12.051077Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:00:12.051149Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:630:2536], serverId# [2:639:2541], sessionId# [0:0:0] 2025-04-07T14:00:12.051217Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:12.051254Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:12.051291Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:00:12.051329Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:12.051466Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:00:12.051670Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:00:12.051756Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:00:12.053448Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:00:12.064388Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:00:12.064501Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:00:12.269321Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:667:2559], serverId# [2:669:2561], sessionId# [0:0:0] 2025-04-07T14:00:12.269718Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 517 RawX2: 8589937047 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:00:12.269768Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:12.270145Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:12.270203Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:00:12.270252Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:00:12.270498Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:00:12.270631Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:00:12.270796Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:12.270853Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:00:12.271253Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:00:12.271621Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:12.274194Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:00:12.274252Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:12.274711Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:00:12.274764Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:00:12.274828Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:12.275884Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:12.275932Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:00:12.275979Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:00:12.276037Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:00:12.276087Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:00:12.276170Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:12.281505Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:00:12.283526Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:00:12.284074Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:00:12.284132Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:00:12.297524Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2587], serverId# [2:704:2588], sessionId# [0:0:0] 2025-04-07T14:00:12.297654Z node 2 :TX_DATASHARD NOTICE: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |89.2%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |89.3%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_45 [GOOD] Test command err: 2025-04-07T13:56:29.198258Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569808089537354:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:29.198305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:56:29.367981Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00163d/r3tmp/tmp9Mpn5b/pdisk_1.dat 2025-04-07T13:56:29.548521Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:29.595432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 28697, node 1 2025-04-07T13:56:29.595549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:29.601381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:56:29.655246Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/00163d/r3tmp/yandexi2tXRg.tmp 2025-04-07T13:56:29.655279Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/00163d/r3tmp/yandexi2tXRg.tmp 2025-04-07T13:56:29.655431Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/00163d/r3tmp/yandexi2tXRg.tmp 2025-04-07T13:56:29.655545Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:56:29.701290Z INFO: TTestServer started on Port 20865 GrpcPort 28697 TClient is connected to server localhost:20865 PQClient connected to localhost:28697 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:29.976024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:30.003676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:56:30.136240Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:30.145996Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-07T13:56:32.090863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569820974439801:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:32.090973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:32.091425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569820974439834:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:32.095032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:56:32.104893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569820974439836:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:56:32.328497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:32.337532Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569820974439917:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:56:32.338737Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODdiNThlYjAtNjFmYzYzZmQtM2ZiNTVmMDAtODExZmQyZWE=, ActorId: [1:7490569820974439798:2307], ActorState: ExecuteState, TraceId: 01jr88kpp95fwbp6wps3hce6g1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:56:32.341329Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:56:32.360444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:32.437282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490569820974440184:2616] 2025-04-07T13:56:34.206286Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569808089537354:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:34.206349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:56:38.584867Z :WriteToTopic_Demo_4 INFO: TTopicSdkTestSetup started 2025-04-07T13:56:38.597277Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:56:38.656708Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7490569846744244258:2784] connected; active server actors: 1 2025-04-07T13:56:38.657035Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-07T13:56:38.657660Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-07T13:56:38.657772Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-07T13:56:38.658236Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-07T13:56:38.668057Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:56:38.668307Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-07T13:56:38.668832Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:56:38.669021Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-07T13:56:38.669041Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:56:38.669056Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-07T13:56:38.669080Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:56:38.669109Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:56:38.669138Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2025-04-07T13:56:38.669151Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-07T13:56:38.669296Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-07T13:56:38.669311Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:38.669348Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569846744244290:2433], now have 1 active actors on pipe 2025-04-07T13:56:38.761948Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:38.761990Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569846744244257:2783], now have 1 active actors on pipe 2025-04-07T13:56:38.786498Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7490569808089537566 RawX2: 4294969496 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test ... 7894] TxId 281474976710675, State EXECUTED 2025-04-07T14:00:10.872649Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TPersQueue::SendEvReadSetAckToSenders 2025-04-07T14:00:10.872702Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 1744034410888 txid# 281474976710675 TabletSource# 72075186224037895 TabletDest# 72075186224037894 SetTabletConsumer# 72075186224037894 Flags# 0 Seqno# 0} 2025-04-07T14:00:10.872722Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710675, NewState WAIT_RS_ACKS 2025-04-07T14:00:10.872750Z node 10 :PERSQUEUE DEBUG: [TxId: 281474976710675] PredicateAcks: 1/1 2025-04-07T14:00:10.872782Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] WriteId {10, 281474976710674} Partitions.size=0 2025-04-07T14:00:10.872790Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-07T14:00:10.872804Z node 10 :PERSQUEUE DEBUG: [TxId: 281474976710675] PredicateAcks: 1/1 2025-04-07T14:00:10.872820Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] WriteId {10, 281474976710674} Partitions.size=0 2025-04-07T14:00:10.872836Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] add an TxId 281474976710675 to the list for deletion 2025-04-07T14:00:10.872855Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710675, NewState DELETING 2025-04-07T14:00:10.872879Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete key for TxId 281474976710675 2025-04-07T14:00:10.872953Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-07T14:00:10.873257Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvTxProcessing::TEvReadSetAck Step: 1744034410888 TxId: 281474976710675 TabletSource: 72075186224037895 TabletDest: 72075186224037894 TabletConsumer: 72075186224037894 Flags: 0 Seqno: 0 2025-04-07T14:00:10.873273Z node 10 :PERSQUEUE DEBUG: [TxId: 281474976710675] Handle TEvReadSetAck 2025-04-07T14:00:10.873288Z node 10 :PERSQUEUE DEBUG: [TxId: 281474976710675] Predicate acks 1/1 2025-04-07T14:00:10.873317Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] TxId 281474976710675, State WAIT_RS_ACKS 2025-04-07T14:00:10.873353Z node 10 :PERSQUEUE DEBUG: [TxId: 281474976710675] PredicateAcks: 1/1 2025-04-07T14:00:10.873373Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] WriteId {10, 281474976710674} Partitions.size=0 2025-04-07T14:00:10.873378Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-07T14:00:10.873393Z node 10 :PERSQUEUE DEBUG: [TxId: 281474976710675] PredicateAcks: 1/1 2025-04-07T14:00:10.873409Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] WriteId {10, 281474976710674} Partitions.size=0 2025-04-07T14:00:10.873422Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] add an TxId 281474976710675 to the list for deletion 2025-04-07T14:00:10.873438Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] TxId 281474976710675, NewState DELETING 2025-04-07T14:00:10.873458Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] delete key for TxId 281474976710675 2025-04-07T14:00:10.873516Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-07T14:00:10.875256Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T14:00:10.875283Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710675, State DELETING 2025-04-07T14:00:10.875303Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] empty tx queue 2025-04-07T14:00:10.875318Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete WriteId {10, 281474976710674} 2025-04-07T14:00:10.875337Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete TxId 281474976710675 2025-04-07T14:00:10.875472Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T14:00:10.875484Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] TxId 281474976710675, State DELETING 2025-04-07T14:00:10.875493Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] empty tx queue 2025-04-07T14:00:10.875503Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] delete WriteId {10, 281474976710674} 2025-04-07T14:00:10.875514Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] delete TxId 281474976710675 2025-04-07T14:00:10.889318Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:10.889382Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server connected, pipe [10:7490570755213211028:2995], now have 1 active actors on pipe 2025-04-07T14:00:10.889407Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvPersQueue::TEvStatus 2025-04-07T14:00:10.889664Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-04-07T14:00:10.891052Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:10.891089Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server connected, pipe [10:7490570755213211033:2999], now have 1 active actors on pipe 2025-04-07T14:00:10.891105Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPersQueue::TEvStatus 2025-04-07T14:00:10.891295Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-04-07T14:00:10.893420Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_2|eff19c66-191af51e-23f21d91-1518b2d8_0] PartitionId [1] Generation [1] Write session: close. Timeout 0.000000s 2025-04-07T14:00:10.893450Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_2|eff19c66-191af51e-23f21d91-1518b2d8_0] PartitionId [1] Generation [1] Write session will now close 2025-04-07T14:00:10.893497Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_2|eff19c66-191af51e-23f21d91-1518b2d8_0] PartitionId [1] Generation [1] Write session: aborting 2025-04-07T14:00:10.893941Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_2|eff19c66-191af51e-23f21d91-1518b2d8_0] PartitionId [1] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T14:00:10.893978Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_2|eff19c66-191af51e-23f21d91-1518b2d8_0] PartitionId [1] Generation [1] Write session: destroy 2025-04-07T14:00:10.896645Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id_2|eff19c66-191af51e-23f21d91-1518b2d8_0 grpc closed 2025-04-07T14:00:10.896689Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id_2|eff19c66-191af51e-23f21d91-1518b2d8_0 is DEAD 2025-04-07T14:00:10.902208Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_1|c781eae1-d5ab2a06-f0941d6b-40c546af_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-07T14:00:10.902241Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_1|c781eae1-d5ab2a06-f0941d6b-40c546af_0] PartitionId [0] Generation [1] Write session will now close 2025-04-07T14:00:10.902271Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_1|c781eae1-d5ab2a06-f0941d6b-40c546af_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-07T14:00:10.901625Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=1) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:00:10.901678Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=1) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:00:10.902684Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:00:10.902720Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7490570755213210928:2493] destroyed 2025-04-07T14:00:10.902950Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:00:10.902973Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7490570755213210931:2493] destroyed 2025-04-07T14:00:10.903011Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 1, State: StateIdle] TPartition::DropOwner. 2025-04-07T14:00:10.906086Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_1|c781eae1-d5ab2a06-f0941d6b-40c546af_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T14:00:10.906122Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_1|c781eae1-d5ab2a06-f0941d6b-40c546af_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-07T14:00:10.911344Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id_1|c781eae1-d5ab2a06-f0941d6b-40c546af_0 grpc read done: success: 0 data: 2025-04-07T14:00:10.911375Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id_1|c781eae1-d5ab2a06-f0941d6b-40c546af_0 grpc read failed 2025-04-07T14:00:10.911400Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id_1|c781eae1-d5ab2a06-f0941d6b-40c546af_0 grpc closed 2025-04-07T14:00:10.911418Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id_1|c781eae1-d5ab2a06-f0941d6b-40c546af_0 is DEAD 2025-04-07T14:00:10.912260Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037895 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:00:10.912299Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037895 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:00:10.912903Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:00:10.912940Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [10:7490570755213210894:2485] destroyed 2025-04-07T14:00:10.912956Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:00:10.912992Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [10:7490570755213210897:2485] destroyed 2025-04-07T14:00:10.913028Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037895, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption >> KqpOlap::ExtractRangesReverse >> KqpOlapStats::DescibeTableStore >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |89.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut >> KqpOlap::SimpleQueryOlapStats >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultCountT_FilterL [GOOD] Test command err: Trying to start YDB, gRPC: 17978, MsgBus: 24432 2025-04-07T13:59:44.353220Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570644008705729:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:44.353267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001000/r3tmp/tmp0uJwy2/pdisk_1.dat 2025-04-07T13:59:44.871750Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:44.886633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:44.886726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:44.888279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17978, node 1 2025-04-07T13:59:44.991153Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:44.991195Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:44.991210Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:44.991299Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24432 TClient is connected to server localhost:24432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:45.639697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:45.657787Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:45.678909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:45.796058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570648303673690:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:45.796299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570648303673690:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:45.796583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570648303673690:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:45.796709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570648303673690:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:45.796807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570648303673690:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:45.796894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570648303673690:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:45.797064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570648303673690:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:45.797184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570648303673690:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:45.797302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570648303673690:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:45.797413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570648303673690:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:45.797513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570648303673690:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:45.797609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570648303673690:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:45.861555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570648303673691:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:45.861622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570648303673691:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:45.861868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570648303673691:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:45.861963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570648303673691:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:45.862055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570648303673691:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:45.862162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570648303673691:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:45.862271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570648303673691:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:45.862397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570648303673691:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:45.862498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570648303673691:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:45.862583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570648303673691:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:45.862681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570648303673691:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:45.862772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570648303673691:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:45.904500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570648303673694:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:45.904607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570648303673694:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:45.904828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570648303673694:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:45.904950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570648303673694:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:45.905068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570648303673694:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:45.905194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570648303673694:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:45.905298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570648303673694:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:45.905441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570648303673694:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:10.836282Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:11.023424Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:11.023490Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:11.107526Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:11.107584Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:11.192385Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:11.192440Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:11.297097Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:11.297196Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:11.395044Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:11.409200Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:00:11.465638Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:11.465706Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:11.552916Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:11.552987Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:11.684463Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:11.684537Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:11.778379Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:11.778443Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:11.881120Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:11.881241Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:11.941125Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:12.102716Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:12.102822Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:12.226674Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:12.226768Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:12.317473Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:12.317546Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:12.403339Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:12.403411Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:12.502683Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:12.502767Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1883:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88t2r33mwk2xgzpw8r829j. SessionId : ydb://session/3?node_id=2&id=M2U0ZDJmYjItYjYyNjkwOWEtY2FhOGQwNzQtYWRhYjkwYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:12.588295Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:12.599771Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: 2025-04-07T14:00:05.117246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:00:05.117541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:05.117788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00136f/r3tmp/tmpjBjNs8/pdisk_1.dat 2025-04-07T14:00:05.474610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:00:05.516171Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:05.572135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:05.572258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:05.585676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:05.712017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:00:05.755795Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:00:05.757133Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:00:05.757775Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:00:05.758056Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:00:05.800814Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:00:05.801679Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:00:05.801807Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:00:05.803626Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:00:05.803722Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:00:05.803778Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:00:05.804189Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:00:05.853662Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:00:05.853925Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:00:05.854100Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:00:05.854166Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:00:05.854208Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:00:05.854253Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:05.854716Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:05.854792Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:05.855025Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:00:05.855147Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:00:05.855631Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:05.855676Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:05.855734Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:00:05.855826Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:05.855879Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:05.855935Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:00:05.856000Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:00:05.856045Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:00:05.856082Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:00:05.856131Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:05.856291Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:00:05.856348Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:00:05.856477Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:00:05.856776Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:00:05.856855Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:00:05.856979Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:00:05.857033Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:00:05.857077Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:00:05.857113Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:00:05.857151Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:00:05.857466Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:00:05.857532Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:00:05.857605Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:00:05.857650Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:00:05.857717Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:00:05.857746Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:00:05.857786Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:00:05.857869Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:00:05.857898Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:00:05.859487Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:00:05.859563Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:00:05.870402Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:00:05.870490Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:00:05.870524Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:00:05.870590Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:00:05.870666Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:00:06.057310Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:06.057389Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:06.057430Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:00:06.057817Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:00:06.057855Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:00:06.057980Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:00:06.058023Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:00:06.058062Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:00:06.058097Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:00:06.062827Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:00:06.062918Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:06.063869Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:06.063923Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:06.063976Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:06.064018Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:00:06.064061Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:00:06.064111Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... 8Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-07T14:00:13.845519Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037889 2025-04-07T14:00:13.845794Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:923:2746], Recipient [2:923:2746]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:13.845839Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:13.845906Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:00:13.845960Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:00:13.846000Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037889 for ReadTableScan 2025-04-07T14:00:13.846032Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit ReadTableScan 2025-04-07T14:00:13.846075Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037889 error: , IsFatalError: 0 2025-04-07T14:00:13.846135Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2025-04-07T14:00:13.846172Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit ReadTableScan 2025-04-07T14:00:13.846204Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompleteOperation 2025-04-07T14:00:13.846235Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2025-04-07T14:00:13.846445Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is DelayComplete 2025-04-07T14:00:13.846490Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompleteOperation 2025-04-07T14:00:13.846537Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompletedOperations 2025-04-07T14:00:13.846583Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompletedOperations 2025-04-07T14:00:13.846617Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2025-04-07T14:00:13.846642Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompletedOperations 2025-04-07T14:00:13.846667Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037889 has finished 2025-04-07T14:00:13.846707Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:13.846735Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-07T14:00:13.846770Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-07T14:00:13.846807Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-07T14:00:13.858139Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:00:13.858264Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:00:13.858326Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2025-04-07T14:00:13.858421Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037889 at tablet 72075186224037889 send result to client [2:1110:2907], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:00:13.858534Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:00:13.859058Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1110:2907], Recipient [2:927:2748]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715668 Cleared: true 2025-04-07T14:00:13.859116Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-07T14:00:13.859252Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037890 step# 3500 txid# 281474976715668} 2025-04-07T14:00:13.859303Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3500} 2025-04-07T14:00:13.859357Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:00:13.859394Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:00:13.859507Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:927:2748], Recipient [2:927:2748]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:13.859543Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:13.859589Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T14:00:13.859626Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:00:13.859666Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for WaitForStreamClearance 2025-04-07T14:00:13.859696Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit WaitForStreamClearance 2025-04-07T14:00:13.859729Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [3500:281474976715668] at 72075186224037890 2025-04-07T14:00:13.859769Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-04-07T14:00:13.859815Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit WaitForStreamClearance 2025-04-07T14:00:13.859848Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit ReadTableScan 2025-04-07T14:00:13.859877Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-04-07T14:00:13.860118Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Continue 2025-04-07T14:00:13.860146Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:00:13.860174Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-07T14:00:13.860203Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2025-04-07T14:00:13.860231Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-07T14:00:13.861147Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1131:2926], Recipient [2:927:2748]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-07T14:00:13.861218Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-07T14:00:13.861534Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2025-04-07T14:00:13.862183Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T14:00:13.863698Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2025-04-07T14:00:13.863747Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2025-04-07T14:00:13.928299Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-07T14:00:13.928374Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037890 2025-04-07T14:00:13.930656Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:927:2748], Recipient [2:927:2748]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:13.930720Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:13.930809Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T14:00:13.930848Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:00:13.930890Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for ReadTableScan 2025-04-07T14:00:13.930923Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-04-07T14:00:13.930961Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037890 error: , IsFatalError: 0 2025-04-07T14:00:13.931005Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-04-07T14:00:13.931040Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit ReadTableScan 2025-04-07T14:00:13.931073Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompleteOperation 2025-04-07T14:00:13.931103Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-04-07T14:00:13.931296Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is DelayComplete 2025-04-07T14:00:13.931336Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompleteOperation 2025-04-07T14:00:13.931365Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompletedOperations 2025-04-07T14:00:13.931394Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompletedOperations 2025-04-07T14:00:13.931426Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-04-07T14:00:13.931452Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompletedOperations 2025-04-07T14:00:13.931477Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037890 has finished 2025-04-07T14:00:13.931509Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:13.931539Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-07T14:00:13.931567Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-07T14:00:13.931598Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-07T14:00:13.944339Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:00:13.944415Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:00:13.944454Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-04-07T14:00:13.944515Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1110:2907], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:00:13.944560Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 >> ExternalIndex::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH8-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 31597, MsgBus: 11415 2025-04-07T13:57:00.088586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569940854401083:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:00.088694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001108/r3tmp/tmpeKoOrP/pdisk_1.dat 2025-04-07T13:57:00.570404Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31597, node 1 2025-04-07T13:57:00.595321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:00.595539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:00.606633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:00.624911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:00.624940Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:00.624949Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:00.625079Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11415 TClient is connected to server localhost:11415 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:01.222108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.252422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.435811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.605655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:01.685220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:03.280516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569953739304685:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.280644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.543419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.589103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.671583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.743944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.769659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.836307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:03.960300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569953739305193:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.960357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569953739305198:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.960392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:03.964540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:03.983166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569953739305200:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:05.122376Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569940854401083:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:05.122429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:05.319600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.350830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.385652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.426139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.454776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.589592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.616049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.644002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.682440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.717254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.748585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.773194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.798634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.341099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:57:06.386773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.427970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.467329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.515278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.544789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.623031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part ... ::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:54.276955Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:54.277129Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:54.277157Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:54.277266Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:54.277289Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:54.278818Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:54.278899Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:54.279103Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:54.279134Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:54.279206Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:54.279234Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:54.279289Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:54.279312Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:54.279347Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:54.279374Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:54.279686Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:54.279722Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:54.279894Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:54.279921Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:54.280050Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:54.280080Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:54.280292Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:54.280323Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:54.280419Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:54.280441Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:54.282994Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:54.283055Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:54.283179Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:54.283217Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:54.283396Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:54.283437Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:54.283527Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:54.283556Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:54.283620Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:54.283645Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:54.283683Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:54.283710Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:54.284007Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:54.284068Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:54.329185Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:54.329271Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:54.329431Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:54.329467Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:54.329662Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:54.329694Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:54.329815Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:54.329845Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:06.593910Z node 5 :KQP_SLOW_LOG WARN: TraceId: "01jr88swenak7s428mwkzvx771", SessionId: ydb://session/3?node_id=5&id=ZTQzOWY3YzItNDlkOWFhNDItYjA0ODMxMzctYTI4ZDVkMjk=, Slow query, duration: 12.011977s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/\";\n\n-- TPC-H/TPC-R National Market Share Query (Q8)\n-- TPC TPC-H Parameter Substitution (Version 2.17.2 build 0)\n-- using 1680793381 as a seed to the RNG\n\nselect\n o_year,\n sum(case\n when nation = 'MOZAMBIQUE' then volume\n else 0\n end) / sum(volume) as mkt_share\nfrom\n (\n select\n DateTime::GetYear(o_orderdate) as o_year,\n l_extendedprice * (1 - l_discount) as volume,\n n2.n_name as nation\n from\n part\n cross join supplier\n cross join lineitem\n cross join orders\n cross join customer\n cross join nation n1\n cross join nation n2\n cross join region\n where\n p_partkey = l_partkey\n and s_suppkey = l_suppkey\n and l_orderkey = o_orderkey\n and o_custkey = c_custkey\n and c_nationkey = n1.n_nationkey\n and n1.n_regionkey = r_regionkey\n and r_name = 'AFRICA'\n and s_nationkey = n2.n_nationkey\n and o_orderdate between date('1995-01-01') and date('1996-12-31')\n and p_type = 'ECONOMY PLATED COPPER'\n ) as all_nations\ngroup by\n o_year\norder by\n o_year;", parameters: 0b >> KqpOlapAggregations::Aggregation_Sum_NullMixGroupBy [GOOD] >> KqpOlapAggregations::JsonDoc_GetValue |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |89.3%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit Test command err: 2025-04-07T13:56:27.395790Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569799040265933:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:27.395924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:56:27.614910Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00165a/r3tmp/tmpKjJX1W/pdisk_1.dat 2025-04-07T13:56:27.836463Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:27.866833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:27.866936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13791, node 1 2025-04-07T13:56:27.873524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:56:27.959995Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/00165a/r3tmp/yandex9JXK9P.tmp 2025-04-07T13:56:27.960037Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/00165a/r3tmp/yandex9JXK9P.tmp 2025-04-07T13:56:27.960223Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/00165a/r3tmp/yandex9JXK9P.tmp 2025-04-07T13:56:27.960326Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:56:28.005483Z INFO: TTestServer started on Port 31383 GrpcPort 13791 TClient is connected to server localhost:31383 PQClient connected to localhost:13791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:28.297936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:28.309967Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:28.329565Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:56:28.335292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:56:30.408929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569811925168456:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:30.409085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:30.409184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569811925168485:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:30.412287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:56:30.428570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569811925168487:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:56:30.646948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:30.681912Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569811925168559:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:56:30.685568Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2YzMWU1N2ItZGYwY2Q5N2MtMmEzNGM0YTItNDU5OTE2OTE=, ActorId: [1:7490569811925168446:2307], ActorState: ExecuteState, TraceId: 01jr88kn23cphwmppr0wtapvv0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:56:30.687837Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:56:30.717146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:30.816395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490569816220136128:2621] 2025-04-07T13:56:32.382773Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569799040265933:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:32.382853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:56:37.021203Z :WriteToTopic_Demo_11 INFO: TTopicSdkTestSetup started 2025-04-07T13:56:37.036304Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:56:37.050198Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7490569841989940211:2791] connected; active server actors: 1 2025-04-07T13:56:37.050763Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-07T13:56:37.051730Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-07T13:56:37.051910Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-07T13:56:37.052336Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-07T13:56:37.086634Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:56:37.086921Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-07T13:56:37.087453Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:56:37.087632Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-07T13:56:37.087655Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:56:37.087674Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-07T13:56:37.087703Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:56:37.087725Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:56:37.087791Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2025-04-07T13:56:37.087813Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-07T13:56:37.087962Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-07T13:56:37.087981Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:37.088031Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569841989940232:2436], now have 1 active actors on pipe 2025-04-07T13:56:37.095458Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:37.095489Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569841989940210:2790], now have 1 active actors on pipe 2025-04-07T13:56:37.122862Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7490569799040266215 RawX2: 4294969496 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "tes ... partition {0, {10, 281474976710674}, 100000} 2025-04-07T14:00:12.583785Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:00:12.583828Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {10, 281474976710674}, 100000}, State: StateInit] HandleOnInit TEvPQ::TEvDeletePartition 2025-04-07T14:00:12.583992Z node 10 :PERSQUEUE DEBUG: [topic_A:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:00:12.584308Z node 10 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 0, State: StateInit] bootstrapping 0 [10:7490570763675195203:7151] 2025-04-07T14:00:12.585776Z node 10 :PERSQUEUE DEBUG: [topic_A:0:Initializer] Start initializing step TInitDiskStatusStep 2025-04-07T14:00:12.586255Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:00:12.586505Z node 10 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: {0, {10, 281474976710674}, 100000}, State: StateInit] bootstrapping {0, {10, 281474976710674}, 100000} [10:7490570763675195204:7152] 2025-04-07T14:00:12.587708Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitDiskStatusStep 2025-04-07T14:00:12.588048Z node 10 :PERSQUEUE DEBUG: [topic_A:0:Initializer] Start initializing step TInitMetaStep 2025-04-07T14:00:12.588239Z node 10 :PERSQUEUE DEBUG: [topic_A:0:Initializer] Start initializing step TInitInfoRangeStep 2025-04-07T14:00:12.589063Z node 10 :PERSQUEUE DEBUG: [topic_A:0:Initializer] Start initializing step TInitDataRangeStep 2025-04-07T14:00:12.589440Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitMetaStep 2025-04-07T14:00:12.589572Z node 10 :PERSQUEUE DEBUG: [topic_A:0:TInitDataRangeStep] Got data offset 0 count 10 size 1001235 so 0 eo 10 d0000000000_00000000000000000000_00000_0000000010_00000 2025-04-07T14:00:12.589706Z node 10 :PERSQUEUE DEBUG: [topic_A:0:TInitDataRangeStep] Got data offset 10 count 1 size 7001240 so 0 eo 11 d0000000000_00000000000000000010_00000_0000000001_00013 2025-04-07T14:00:12.589811Z node 10 :PERSQUEUE DEBUG: [topic_A:0:TInitDataRangeStep] Got data offset 11 count 5 size 7401727 so 0 eo 16 d0000000000_00000000000000000011_00000_0000000005_00013 2025-04-07T14:00:12.589920Z node 10 :PERSQUEUE DEBUG: [topic_A:0:TInitDataRangeStep] Got data offset 16 count 6 size 600613 so 0 eo 22 d0000000000_00000000000000000016_00000_0000000006_00000| 2025-04-07T14:00:12.590038Z node 10 :PERSQUEUE DEBUG: [topic_A:0:Initializer] Start initializing step TInitDataStep 2025-04-07T14:00:12.590288Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitInfoRangeStep 2025-04-07T14:00:12.590538Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitDataRangeStep 2025-04-07T14:00:12.590741Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:TInitDataRangeStep] Got data offset 6 count 5 size 500529 so 6 eo 11 D0000100000_00000000000000000006_00000_0000000005_00000| 2025-04-07T14:00:12.590863Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:TInitDataRangeStep] Got data offset 11 count 1 size 100152 so 6 eo 12 D0000100000_00000000000000000011_00000_0000000001_00000| 2025-04-07T14:00:12.590998Z node 10 :PERSQUEUE ERROR: [topic_A:{0, {10, 281474976710674}, 100000}:TInitDataRangeStep] StartOffset from meta and blobs are different: 0 != 6 2025-04-07T14:00:12.631612Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:00:12.632092Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Registered with mediator time cast 2025-04-07T14:00:12.632397Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:00:12.632546Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] has a tx info 2025-04-07T14:00:12.632585Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] PlanStep 1744034366256, PlanTxId 281474976710673, ExecStep 1744034366256, ExecTxId 281474976710673 2025-04-07T14:00:12.632899Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:00:12.633371Z node 10 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:12.633386Z node 10 :PERSQUEUE INFO: [PQ: 72075186224037894] has a tx writes info 2025-04-07T14:00:12.633965Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvSubscribeLock for WriteId {10, 281474976710674} 2025-04-07T14:00:12.634112Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvLongTxService::TEvLockStatus LockId: 281474976710674 LockNode: 10 Status: STATUS_NOT_FOUND 2025-04-07T14:00:12.634132Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete partitions for WriteId {10, 281474976710674} 2025-04-07T14:00:12.634156Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvPQ::TEvDeletePartition to partition {0, {10, 281474976710674}, 100000} 2025-04-07T14:00:12.634190Z node 10 :PERSQUEUE DEBUG: [topic_A:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:00:12.634404Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:00:12.634442Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {10, 281474976710674}, 100000}, State: StateInit] HandleOnInit TEvPQ::TEvDeletePartition 2025-04-07T14:00:12.634596Z node 10 :PERSQUEUE DEBUG: [topic_A:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:00:12.634913Z node 10 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 0, State: StateInit] bootstrapping 0 [10:7490570763675195240:7158] 2025-04-07T14:00:12.635977Z node 10 :PERSQUEUE DEBUG: [topic_A:0:Initializer] Start initializing step TInitDiskStatusStep 2025-04-07T14:00:12.636950Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:00:12.637241Z node 10 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: {0, {10, 281474976710674}, 100000}, State: StateInit] bootstrapping {0, {10, 281474976710674}, 100000} [10:7490570763675195241:7159] 2025-04-07T14:00:12.638168Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitDiskStatusStep 2025-04-07T14:00:12.638835Z node 10 :PERSQUEUE DEBUG: [topic_A:0:Initializer] Start initializing step TInitMetaStep 2025-04-07T14:00:12.638917Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitMetaStep 2025-04-07T14:00:12.639183Z node 10 :PERSQUEUE DEBUG: [topic_A:0:Initializer] Start initializing step TInitInfoRangeStep 2025-04-07T14:00:12.639328Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitInfoRangeStep 2025-04-07T14:00:12.640018Z node 10 :PERSQUEUE DEBUG: [topic_A:0:Initializer] Start initializing step TInitDataRangeStep 2025-04-07T14:00:12.640111Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitDataRangeStep 2025-04-07T14:00:12.640439Z node 10 :PERSQUEUE DEBUG: [topic_A:0:TInitDataRangeStep] Got data offset 0 count 10 size 1001235 so 0 eo 10 d0000000000_00000000000000000000_00000_0000000010_00000 2025-04-07T14:00:12.640585Z node 10 :PERSQUEUE DEBUG: [topic_A:0:TInitDataRangeStep] Got data offset 10 count 1 size 7001240 so 0 eo 11 d0000000000_00000000000000000010_00000_0000000001_00013 2025-04-07T14:00:12.640913Z node 10 :PERSQUEUE DEBUG: [topic_A:0:TInitDataRangeStep] Got data offset 11 count 5 size 7401727 so 0 eo 16 d0000000000_00000000000000000011_00000_0000000005_00013 2025-04-07T14:00:12.641034Z node 10 :PERSQUEUE DEBUG: [topic_A:0:TInitDataRangeStep] Got data offset 16 count 6 size 600613 so 0 eo 22 d0000000000_00000000000000000016_00000_0000000006_00000| 2025-04-07T14:00:12.641154Z node 10 :PERSQUEUE DEBUG: [topic_A:0:Initializer] Start initializing step TInitDataStep 2025-04-07T14:00:12.641264Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:TInitDataRangeStep] Got data offset 6 count 5 size 500529 so 6 eo 11 D0000100000_00000000000000000006_00000_0000000005_00000| 2025-04-07T14:00:12.641386Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:TInitDataRangeStep] Got data offset 11 count 1 size 100152 so 6 eo 12 D0000100000_00000000000000000011_00000_0000000001_00000| 2025-04-07T14:00:12.641520Z node 10 :PERSQUEUE ERROR: [topic_A:{0, {10, 281474976710674}, 100000}:TInitDataRangeStep] StartOffset from meta and blobs are different: 0 != 6 2025-04-07T14:00:12.669460Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:00:12.669961Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Registered with mediator time cast 2025-04-07T14:00:12.670290Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:00:12.670448Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] has a tx info 2025-04-07T14:00:12.670486Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] PlanStep 1744034366256, PlanTxId 281474976710673, ExecStep 1744034366256, ExecTxId 281474976710673 2025-04-07T14:00:12.670666Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:00:12.671160Z node 10 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:12.671177Z node 10 :PERSQUEUE INFO: [PQ: 72075186224037894] has a tx writes info 2025-04-07T14:00:12.671850Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvSubscribeLock for WriteId {10, 281474976710674} 2025-04-07T14:00:12.671914Z node 10 :PERSQUEUE DEBUG: [topic_A:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:00:12.671958Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitConfigStep SUMMARY: AddressSanitizer: heap-use-after-free /-S/ydb/core/tablet/tablet_counters.h:385:26 in NKikimr::TCountersArray::Reset(NKikimr::TCountersArray const&) Shadow bytes around the buggy address: 0x50c000361b00: fa fa fa fa fa fa fa fa 00 00 00 00 00 00 00 00 0x50c000361b80: 00 00 00 00 00 00 00 fa fa fa fa fa fa fa fa fa 0x50c000361c00: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0x50c000361c80: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x50c000361d00: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa =>0x50c000361d80: fd[fd]fd fd fd fd fd fd fd fd fd fd fd fd fd fd 0x50c000361e00: fa fa fa fa fa fa fa fa 00 00 00 00 00 00 00 00 0x50c000361e80: 00 00 00 00 00 00 00 00 fa fa fa fa fa fa fa fa 0x50c000361f00: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 fa 0x50c000361f80: fa fa fa fa fa fa fa fa 00 00 00 00 00 00 00 00 0x50c000362000: 00 00 00 00 00 00 00 00 fa fa fa fa fa fa fa fa Shadow byte legend (one shadow byte represents 8 application bytes): Addressable: 00 Partially addressable: 01 02 03 04 05 06 07 Heap left redzone: fa Freed heap region: fd Stack left redzone: f1 Stack mid redzone: f2 Stack right redzone: f3 Stack after return: f5 Stack use after scope: f8 Global redzone: f9 Global init order: f6 Poisoned by user: f7 Container overflow: fc Array cookie: ac Intra object redzone: bb ASan internal: fe Left alloca redzone: ca Right alloca redzone: cb ==220009==ABORTING >> KqpOlap::ManyColumnShardsWithRestarts >> TTxDataShardUploadRows::TestUploadRowsLocks+StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_NullMixGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 22888, MsgBus: 15879 2025-04-07T14:00:07.135333Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570743596873707:2189];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fdc/r3tmp/tmpaEDmxy/pdisk_1.dat 2025-04-07T14:00:07.330479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:00:07.493718Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:07.516068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:07.516169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:07.518396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22888, node 1 2025-04-07T14:00:07.616195Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:07.616240Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:07.616260Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:07.616352Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15879 TClient is connected to server localhost:15879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:08.180607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:08.213906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:08.277937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570747891841509:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:08.280716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570747891841509:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:08.280978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570747891841509:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:08.281098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570747891841509:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:08.281199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570747891841509:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:08.281305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570747891841509:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:08.281409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570747891841509:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:08.281528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570747891841509:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:08.281644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570747891841509:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:08.281740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570747891841509:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:08.281893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570747891841509:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:08.282005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570747891841509:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:08.314856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570747891841524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:08.314917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570747891841524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:08.315079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570747891841524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:08.315180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570747891841524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:08.315259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570747891841524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:08.315331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570747891841524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:08.315396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570747891841524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:08.315485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570747891841524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:08.315566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570747891841524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:08.315643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570747891841524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:08.315716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570747891841524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:08.315778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570747891841524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:08.353202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570747891841530:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:08.353262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570747891841530:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:08.353487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570747891841530:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:08.353592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570747891841530:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:08.353688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570747891841530:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:08.353795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570747891841530:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:08.353913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570747891841530:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:08.354038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570747891841530:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:08.354151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74905707478918415 ... 7T14:00:08.405976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:08.406067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:08.406089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:08.406249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:08.406264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:08.406314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:08.406358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:08.406390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:08.406401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:08.406418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:08.406429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:08.406676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:08.406696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:08.406790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:08.406807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:08.406873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:08.406898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:08.407006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:08.407019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:08.407128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:08.407148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SUM(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; 2025-04-07T14:00:10.173725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570756481776405:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:10.173861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570756481776393:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:10.174321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:10.178668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:10.191666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570756481776431:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:14.853980Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570743596873707:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:14.879498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:15.067201Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034411000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SUM(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (4, 7)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1013) '('"_id" '"82539aef-207ed9ba-ba6afffc-187c84af") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '0)) (let $25 '((Nothing $2) $24)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"7")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (OptionalType (DataType 'Int64))) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '1072) '('"_id" '"c214b3b4-5c41b289-fcf3a9df-8d0e1e2b") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'sum '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1639) '('"_id" '"86a2170b-729ebbd7-158ba390-288e78e3") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1651) '('"_id" '"e2a0e360-5a14fe54-2a85fb01-bb1d8aed")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::JsonDoc_GetValue_ToInt >> KqpOlap::PredicatePushdown_MixStrictAndNotStrict ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TableKeyRangesSinglePartition 2025-04-07 14:00:11,236 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-07 14:00:11,412 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 71981 75.5M 75.2M 21.2M test_tool run_ut @/home/runner/.ya/build/build_root/u6pr/0018f1/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/chunk50/testing_out_stuff/test_tool.args 72458 1.1G 1.1G 1018M └─ ydb-services-ydb-ut --trace-path-append /home/runner/.ya/build/build_root/u6pr/0018f1/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/chunk50/ytest.report.tra Test command err: 2025-04-07T13:50:13.941201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490568194228575100:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:50:13.941393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018f1/r3tmp/tmp08eDHZ/pdisk_1.dat 2025-04-07T13:50:14.684147Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:50:14.751768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:50:14.751845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:50:14.757753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12993, node 1 2025-04-07T13:50:14.925255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:50:14.925281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:50:14.925288Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:50:14.925374Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:50:15.219110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.224643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:15.227008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:50:15.229773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:50:15.230010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:50:15.230027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T13:50:15.232245Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:50:15.232275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:50:15.234069Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:50:15.239106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033815279, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:15.239142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:50:15.239395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:50:15.241024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:50:15.241316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:15.241476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:15.241514Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:50:15.241592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:50:15.241625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:50:15.241676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:50:15.252539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:50:15.252614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:50:15.252631Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:50:15.252734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:50:17.515553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568211408445072:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:17.515664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:17.724411Z node 1 :TX_PROXY ERROR: Access denied for badguy@builtin with access CreateTable to path Root 2025-04-07T13:50:17.811574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490568211408445108:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:17.811637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:50:17.819025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:17.819364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 1 -> 2 2025-04-07T13:50:17.819936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:50:17.819960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T13:50:17.825897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T13:50:17.826085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:17.826277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:50:17.826369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710659:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T13:50:17.828275Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T13:50:17.828406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:17.828436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:17.828454Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T13:50:17.829057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T13:50:17.829094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T13:50:17.829107Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T13:50:17.845223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T13:50:17.845333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 2 -> 3 2025-04-07T13:50:17.846738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710659:0 ProgressState at tabletId# 72057594046644480 2025-04-07T13:50:18.037480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710659:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T13:50:18.037505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T13:50:18.037588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 3 -> 128 2025-04-07T13:50:18.040028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710659:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T13:50:18.053767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744033818093, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:50:18.053822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710659:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744033818093 2025-04-07T13:50:18.053963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 128 -> 129 2025-04-07T13:50:18.060119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:50:18.060393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 720575 ... ot 2025-04-07T14:00:03.549697Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719638. Ctx: { TraceId: 01jr88t52xe62y5fvn41xy94kh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:03.682438Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719639. Ctx: { TraceId: 01jr88t58423e0hfbds3b9a531, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:03.804823Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719640. Ctx: { TraceId: 01jr88t5c1cwadgnrfx1rt8xhm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:03.933422Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719641. Ctx: { TraceId: 01jr88t5fvdkjxg9nhpf14yvkp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:04.075015Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719642. Ctx: { TraceId: 01jr88t5kx8wnnzbzkpcbw678f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:04.206320Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719643. Ctx: { TraceId: 01jr88t5r32w5ncv5xtn522jcn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:04.362375Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719644. Ctx: { TraceId: 01jr88t5w7bwx9a414v4qpk4zd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:04.477125Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719645. Ctx: { TraceId: 01jr88t61ebmk080b191xf19r0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:04.585536Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719646. Ctx: { TraceId: 01jr88t64n2y020zw02tcwq460, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:04.734381Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719647. Ctx: { TraceId: 01jr88t686am0hw5nt01kg1pv8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:04.870156Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719648. Ctx: { TraceId: 01jr88t6cg2e8qmm5jhgyb3mht, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:04.998610Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719649. Ctx: { TraceId: 01jr88t6h55nvvzhytny1mk10r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:05.133012Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719650. Ctx: { TraceId: 01jr88t6n48xrdzwcs7dhj2p7x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:05.290958Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719651. Ctx: { TraceId: 01jr88t6s5amgqjv74x7yjw8q0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:05.450547Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719652. Ctx: { TraceId: 01jr88t6y87we67j29kzyft22d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:05.594856Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719653. Ctx: { TraceId: 01jr88t73447hg5ce255vccg5s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:05.710674Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719654. Ctx: { TraceId: 01jr88t77j2t56bcv3sfr7f4mr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:05.824208Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719655. Ctx: { TraceId: 01jr88t7b95yrc5827n4tqkfjf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:05.950275Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719656. Ctx: { TraceId: 01jr88t7f90aa6w662aerz7fvk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:06.080490Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719657. Ctx: { TraceId: 01jr88t7jq4ngwe54day9eq981, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:06.241257Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719658. Ctx: { TraceId: 01jr88t7q9e5kbfgp616hd5amk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:06.353570Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719659. Ctx: { TraceId: 01jr88t7vp2dyndcgxc3r85e7d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:06.482668Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719660. Ctx: { TraceId: 01jr88t7zqecvcbpzsd585hwj8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:06.617461Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719661. Ctx: { TraceId: 01jr88t83af94qtps5ybg8crvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:06.749386Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719662. Ctx: { TraceId: 01jr88t8794aeksfjc59eq6me3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:06.873054Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719663. Ctx: { TraceId: 01jr88t8br30hwxtakpejmhc8w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:07.036700Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719664. Ctx: { TraceId: 01jr88t8fe9m2qkvghtyp35k0y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:07.181675Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719665. Ctx: { TraceId: 01jr88t8mvdemkmt9qz4qm9p83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:07.374504Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719666. Ctx: { TraceId: 01jr88t8tw4v53w1rc88a8rv0e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:07.501683Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719667. Ctx: { TraceId: 01jr88t8zhesphpmbamcemp5va, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:07.655929Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719668. Ctx: { TraceId: 01jr88t936e19sszdj4y2nxzk8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzFjMDE0NTYtMjk0ZGI0MC0xMzE3ZTA4Ny02OWJkMDk4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:07.759339Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-04-07T14:00:07.775548Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7573699373/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/u6pr/0018f1/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/chunk50/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1747, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7573699373/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/u6pr/0018f1/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/chunk50/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> KqpOlapBlobsSharing::BlobsSharingSplit1_3_2_1_clean >> KqpOlap::SimpleQueryOlap [GOOD] >> KqpOlapAggregations::Aggregation_ResultCountL_FilterL >> KqpOlapIndexes::IndexesActualization [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadRowsLocks+StreamLookup [GOOD] Test command err: 2025-04-07T14:00:05.149451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:00:05.149720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:05.149933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00137c/r3tmp/tmph7TdIk/pdisk_1.dat 2025-04-07T14:00:05.487341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:00:05.533596Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:05.589020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:05.589179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:05.602366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:05.722919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:00:05.768286Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:646:2546] 2025-04-07T14:00:05.768516Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:00:05.817702Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:652:2548] 2025-04-07T14:00:05.817938Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:00:05.826192Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:00:05.826486Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:00:05.827978Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:00:05.828057Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:00:05.828105Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:00:05.828404Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:00:05.862865Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:00:05.863017Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:00:05.863113Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:687:2568] 2025-04-07T14:00:05.863138Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:00:05.863172Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:00:05.863203Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:05.863946Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:00:05.864059Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:00:05.864219Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:637:2540], serverId# [1:680:2564], sessionId# [0:0:0] 2025-04-07T14:00:05.864800Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:05.864859Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:05.864904Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:00:05.864949Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:05.865029Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:00:05.865226Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:00:05.865469Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:00:05.865567Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:00:05.866044Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:00:05.867534Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T14:00:05.867608Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T14:00:05.867653Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T14:00:05.867882Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:00:05.867920Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T14:00:05.867983Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:00:05.868066Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:689:2570] 2025-04-07T14:00:05.868094Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T14:00:05.868140Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T14:00:05.868176Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:00:05.869062Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T14:00:05.869127Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T14:00:05.869441Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:638:2541], serverId# [1:688:2569], sessionId# [0:0:0] 2025-04-07T14:00:05.869807Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:00:05.869843Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:05.869873Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T14:00:05.869904Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:00:05.870126Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:00:05.870293Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:00:05.870359Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-07T14:00:05.872344Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:656:2550] 2025-04-07T14:00:05.872532Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:00:05.882042Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:659:2552] 2025-04-07T14:00:05.882264Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:00:05.891084Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:00:05.891260Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:00:05.892454Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-04-07T14:00:05.892516Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-04-07T14:00:05.892561Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-04-07T14:00:05.892862Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:00:05.892918Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-04-07T14:00:05.893003Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:00:05.893081Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [1:725:2587] 2025-04-07T14:00:05.893107Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-04-07T14:00:05.893131Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-04-07T14:00:05.893154Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-07T14:00:05.893820Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037891 2025-04-07T14:00:05.893907Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037891 2025-04-07T14:00:05.894152Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:00:05.894221Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:00:05.894294Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-07T14:00:05.894326Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:05.894354Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-07T14:00:05.894385Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-07T14:00:05.894836Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:00:05.894953Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:00:05.896199Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-07T14:00:05.896254Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-07T14:00:05.896298Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-07T14:00:05.896539Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:00:05.896590Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-07T14:00:05.896652Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:00:05.896749Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:726:2588] 2025-04-07T14:00:05.896784Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-07T14:00:05.896807Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-07T14:00:05.896833Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T14:00:05.897076Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-07T14:00:05.897174Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-07T14:00:05.897302Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 7 ... TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:00:15.817964Z node 3 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:00:15.818082Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:00:15.818210Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:648:2546] 2025-04-07T14:00:15.818246Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:00:15.818281Z node 3 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:00:15.818318Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:15.818760Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:00:15.818840Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:00:15.818915Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:628:2534], serverId# [3:639:2541], sessionId# [0:0:0] 2025-04-07T14:00:15.819301Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:15.819349Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:15.819386Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:00:15.819432Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:15.819572Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:00:15.819792Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:00:15.819876Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:00:15.821469Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:00:15.833250Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:00:15.833397Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:00:16.033495Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:667:2559], serverId# [3:669:2561], sessionId# [0:0:0] 2025-04-07T14:00:16.034060Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 534 RawX2: 12884904352 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:00:16.034123Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:16.035289Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:16.035368Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:00:16.035420Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:00:16.035659Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:00:16.035791Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:00:16.035984Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:16.036053Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:00:16.036499Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:00:16.036829Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:16.070280Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:00:16.070353Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:16.071139Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:00:16.071202Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:00:16.071272Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:16.072377Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:16.072427Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:00:16.072474Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:00:16.072535Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:393:2385], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:00:16.072584Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:00:16.072703Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:16.073209Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:00:16.075154Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:00:16.075221Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:00:16.076052Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:00:16.084321Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:16.084416Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:711:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:16.084482Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:16.096727Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:00:16.102234Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:00:16.307391Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:00:16.311729Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:00:16.669681Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88thejcv3q2ayk151q5hbj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjRiMjViZDItZDczMTlmZjYtNTBmZmJhNzgtZmVlYmI3ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:16.675531Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:816:2654], serverId# [3:817:2655], sessionId# [0:0:0] 2025-04-07T14:00:16.675797Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:00:16.688359Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:00:16.688513Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:16.810383Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr88tj1z71a0g3w6s8fyx1vt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTViNWZhZGUtZDI4MjgwYzMtNDFlNDMzNWItYmQ4NjZmZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:16.812237Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint32_value: 300 } } 2025-04-07T14:00:16.821282Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-07T14:00:16.833417Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-07T14:00:16.833500Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:16.833598Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-07T14:00:16.834275Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-04-07T14:00:16.834340Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:16.899879Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr88tj6304rjzhxz9zexz2h1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTViNWZhZGUtZDI4MjgwYzMtNDFlNDMzNWItYmQ4NjZmZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:16.900331Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:00:16.912414Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:00:16.912514Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:16.927205Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTViNWZhZGUtZDI4MjgwYzMtNDFlNDMzNWItYmQ4NjZmZTA=, ActorId: [3:823:2660], ActorState: ExecuteState, TraceId: 01jr88tj6304rjzhxz9zexz2h1, Create QueryResponse for error on request, msg: 2025-04-07T14:00:16.928164Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr88tj6304rjzhxz9zexz2h1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTViNWZhZGUtZDI4MjgwYzMtNDFlNDMzNWItYmQ4NjZmZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:16.928446Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:00:16.928867Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:00:16.928924Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> KqpOlapSysView::StatsSysViewBytesDictActualization [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleQueryOlap [GOOD] Test command err: Trying to start YDB, gRPC: 22927, MsgBus: 23547 2025-04-07T14:00:12.239643Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570764187966695:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:12.239691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fd8/r3tmp/tmpejvexb/pdisk_1.dat 2025-04-07T14:00:12.761092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:12.761162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:12.769729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:12.832826Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22927, node 1 2025-04-07T14:00:13.021143Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:13.021163Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:13.021192Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:13.021263Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23547 TClient is connected to server localhost:23547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:13.967782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:14.016406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:14.197470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570772777901751:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:14.197707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570772777901751:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:14.197995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570772777901751:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:14.198137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570772777901751:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:14.198238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570772777901751:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:14.198366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570772777901751:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:14.198462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570772777901751:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:14.198579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570772777901751:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:14.198697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570772777901751:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:14.198806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570772777901751:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:14.198888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570772777901751:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:14.199011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570772777901751:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:14.287650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570772777901765:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:14.292760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570772777901765:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:14.293047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570772777901765:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:14.293153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570772777901765:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:14.293267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570772777901765:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:14.293369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570772777901765:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:14.293460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570772777901765:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:14.293555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570772777901765:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:14.293656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570772777901765:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:14.293763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570772777901765:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:14.293860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570772777901765:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:14.293970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570772777901765:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:14.376013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570772777901749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:14.376079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570772777901749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:14.376284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570772777901749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:14.376376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570772777901749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:14.376488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570772777901749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:14.376645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570772777901749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:14.382428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570772777901749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:14.382595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570772777901749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:14.382700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:74905707727779017 ... COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:14.444859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:14.444931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:14.444961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:14.445126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:14.445153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:14.445284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:14.445320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:14.445370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:14.445390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:14.445421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:14.445439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:14.445693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:14.445723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:14.445880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:14.445901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:14.446012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:14.446033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:14.446188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:14.446208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:14.446303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:14.446323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:14.471753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:14.471805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:14.471882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:14.471903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:14.472096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:14.472137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:14.472216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:14.472256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:14.472309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:14.472329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:14.472369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:14.472398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:14.478312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:14.481095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:14.481820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:14.481864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:14.481989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:14.482013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:14.482203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:14.482228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:14.482314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:14.482333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:14.520783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2025-04-07T14:00:16.656290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570781367836667:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:16.656401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:16.656716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570781367836679:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:16.662004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:00:16.675290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570781367836681:2391], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:00:17.240564Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570764187966695:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:17.240645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:17.545514Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034417000, txId: 18446744073709551615] shutting down >> KqpOlapAggregations::Aggregation_Some_Null ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::IndexesActualization [GOOD] Test command err: Trying to start YDB, gRPC: 25494, MsgBus: 3210 2025-04-07T13:59:58.068738Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570704283494650:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:58.068826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fe4/r3tmp/tmpVa7kMv/pdisk_1.dat 2025-04-07T13:59:58.503507Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25494, node 1 2025-04-07T13:59:58.546631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:58.546696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:58.551293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:58.621251Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:58.621276Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:58.621284Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:58.621441Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3210 TClient is connected to server localhost:3210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:59.236018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:59.249326Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:59.266549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:59.360901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570708578462616:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:59.361090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570708578462616:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:59.361368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570708578462616:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:59.361523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570708578462616:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:59.361617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570708578462616:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:59.361726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570708578462616:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:59.361850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570708578462616:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:59.361951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570708578462616:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:59.362085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570708578462616:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:59.362178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570708578462616:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:59.362277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570708578462616:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:59.362387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570708578462616:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:59.395507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570708578462618:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:59.395560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570708578462618:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:59.395779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570708578462618:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:59.395893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570708578462618:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:59.396028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570708578462618:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:59.396100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570708578462618:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:59.396153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570708578462618:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:59.396226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570708578462618:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:59.396312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570708578462618:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:59.396374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570708578462618:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:59.396448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570708578462618:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:59.396510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570708578462618:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:59.433885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:59.433977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:59.434206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:59.437397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:59.437518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:59.437625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:59.437734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:59.437828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... _impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:00:18.093242Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570708578462650:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=5; 2025-04-07T14:00:18.093283Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570708578462650:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=1744034118000;tx_id=18446744073709551615;;current_snapshot_ts=1744034401000; 2025-04-07T14:00:18.093341Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570708578462650:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:00:18.093375Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570708578462650:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:18.093398Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570708578462650:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:18.093427Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570708578462650:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:00:18.093489Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490570708578462650:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:00:18.093662Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-07T14:00:18.093740Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-07T14:00:18.093951Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-07T14:00:18.093989Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-07T14:00:18.094029Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-07T14:00:18.094062Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-07T14:00:18.097767Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T14:00:18.098765Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T14:00:18.098909Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T14:00:18.099097Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T14:00:18.099286Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T14:00:18.099395Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T14:00:18.100815Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-07T14:00:18.102650Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037889 2025-04-07T14:00:18.103520Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037890 2025-04-07T14:00:18.113577Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T14:00:18.166386Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2025-04-07T14:00:18.174343Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:00:18.174425Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888; 2025-04-07T14:00:18.174451Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:00:18.174488Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:00:18.174548Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:00:18.174604Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=4; 2025-04-07T14:00:18.174672Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=1744034118000;tx_id=18446744073709551615;;current_snapshot_ts=1744034401000; 2025-04-07T14:00:18.174706Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:00:18.174738Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:18.174756Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:18.174783Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:00:18.174853Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:00:18.175447Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034418000 at tablet 72075186224037888 2025-04-07T14:00:18.175482Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:00:18.175529Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:00:18.175582Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=4; 2025-04-07T14:00:18.205774Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=1744034118000;tx_id=18446744073709551615;;current_snapshot_ts=1744034401000; 2025-04-07T14:00:18.205837Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:00:18.205878Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:18.205905Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:18.205938Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:00:18.206009Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570708578462622:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:00:18.226607Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490570708578462622:2292];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:18.226663Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7490570708578462616:2290];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:18.226690Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490570708578462618:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:18.226807Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7490570708578462650:2293];fline=actor.cpp:33;event=skip_flush_writing; [[0u]] 5 / 1 2025-04-07T14:00:18.361305Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034417000, txId: 18446744073709551615] shutting down >> KqpOlapAggregations::Aggregation_ProjectionOrder |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |89.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> KqpOlap::ExtractRangesReverse [GOOD] |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |89.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewBytesDictActualization [GOOD] Test command err: Trying to start YDB, gRPC: 18103, MsgBus: 25573 2025-04-07T13:59:44.196983Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570644903042802:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:44.197061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001001/r3tmp/tmpzPL3FZ/pdisk_1.dat 2025-04-07T13:59:44.712218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:44.712324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:44.721353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18103, node 1 2025-04-07T13:59:44.830081Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:44.887828Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:59:44.896844Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:59:44.970528Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:44.970546Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:44.970552Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:44.970633Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25573 TClient is connected to server localhost:25573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:45.830817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:45.845549Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:45.868051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:45.999367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570649198010774:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:45.999538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570649198010774:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:45.999792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570649198010774:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:45.999955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570649198010774:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:46.000120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570649198010774:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:46.000542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570649198010774:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:46.000636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570649198010774:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:46.000749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570649198010774:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:46.000828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570649198010774:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:46.000900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570649198010774:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:46.001004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570649198010774:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:46.001105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570649198010774:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:46.046609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649198010771:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:46.046866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649198010771:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:46.047098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649198010771:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:46.047201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649198010771:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:46.047305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649198010771:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:46.047407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649198010771:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:46.047485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649198010771:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:46.047563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649198010771:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:46.047646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649198010771:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:46.047725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649198010771:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:46.047822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649198010771:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:46.047906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570649198010771:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:46.100228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649198010787:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:46.100290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649198010787:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:46.100500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649198010787:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:46.100584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649198010787:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:46.100657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649198010787:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:46.101230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649198010787:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:46.101326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570649198010787:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:46.101425Z node 1 :TX_COLUMNSHARD WARN: ... 04-07T13:59:46.166978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:46.167137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:46.167157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:46.167262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:46.167281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-07T13:59:46.257758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=51200216;columns=2; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-04-07T13:59:49.192249Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570644903042802:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:49.192336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('field') RESULT: 2025-04-07T13:59:57.983003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570700737618796:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:57.983097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:57.983420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570700737618808:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:57.987666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T13:59:58.007954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570700737618810:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T13:59:58.956968Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034398695, txId: 281474976710662] shutting down Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:2824656:0] EntityType: COL BlobRangeSize: 1751872 PathId: 3 Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:2817288:0] EntityType: COL BlobRangeSize: 1747280 PathId: 3 Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:2813360:0] EntityType: COL BlobRangeSize: 1745568 PathId: 3 Wait changes: 5244720/44800000 2025-04-07T13:59:59.764680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:59:59.764718Z node 1 :IMPORT WARN: Table profiles were not loaded ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('field') RESULT: 2025-04-07T14:00:04.367643Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034404257, txId: 281474976710664] shutting down Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:2813360:0] EntityType: COL BlobRangeSize: 1745568 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:2824656:0] EntityType: COL BlobRangeSize: 1751872 PathId: 3 Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:2817288:0] EntityType: COL BlobRangeSize: 1747280 PathId: 3 5244720/44800000 2025-04-07T14:00:04.409292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:00:04.462023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting actualization: 3/0.000012s ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('field') RESULT: Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 1070008 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1214208:0] EntityType: COL BlobRangeSize: 144200 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 1072784 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1217248:0] EntityType: COL BlobRangeSize: 144464 PathId: 3 Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 1067792 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1211576:0] EntityType: COL BlobRangeSize: 143784 PathId: 3 2025-04-07T14:00:06.039236Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034405913, txId: 281474976710668] shutting down Wait changes: 432448/44800000 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('field') RESULT: Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 1070008 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1214208:0] EntityType: COL BlobRangeSize: 144200 PathId: 3 Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 1067792 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1211576:0] EntityType: COL BlobRangeSize: 143784 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 1072784 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1217248:0] EntityType: COL BlobRangeSize: 144464 PathId: 3 2025-04-07T14:00:11.591120Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034411400, txId: 281474976710670] shutting down 432448/44800000 2025-04-07T14:00:11.645925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:00:11.714262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710673:0, at schemeshard: 72057594046644480 waiting actualization: 3/0.000014s ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('field') RESULT: Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:3:5:0:2813360:0] EntityType: COL BlobRangeSize: 1745568 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:3:5:0:2824648:0] EntityType: COL BlobRangeSize: 1751864 PathId: 3 Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:3:5:0:2817288:0] EntityType: COL BlobRangeSize: 1747280 PathId: 3 2025-04-07T14:00:13.342484Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034413131, txId: 281474976710674] shutting down Wait changes: 5244712/44800000 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('field') RESULT: 2025-04-07T14:00:18.889247Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034418685, txId: 281474976710676] shutting down Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:3:5:0:2813360:0] EntityType: COL BlobRangeSize: 1745568 PathId: 3 Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:3:5:0:2817288:0] EntityType: COL BlobRangeSize: 1747280 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:3:5:0:2824648:0] EntityType: COL BlobRangeSize: 1751864 PathId: 3 5244712/44800000 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple [GOOD] Test command err: 2025-04-07T13:54:53.362918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:54:53.363333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:54:53.363386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/cs_index/external;error=incorrect path status: LookupError; 2025-04-07T13:54:53.363448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001300/r3tmp/tmpQs8VnD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2976, node 1 TClient is connected to server localhost:32669 2025-04-07T13:54:54.022696Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvGetProxyServicesRequest 2025-04-07T13:54:54.023139Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvGetProxyServicesRequest 2025-04-07T13:54:54.027512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:54:54.087153Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:54:54.096062Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:54:54.096128Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:54:54.096161Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:54:54.096480Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:54:54.141439Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:54:54.142681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:54:54.142831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:54:54.142980Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:54:54.156357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:54:54.325729Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvProposeTransaction 2025-04-07T13:54:54.325811Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T13:54:54.326008Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:644:2538] 2025-04-07T13:54:54.430794Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2538] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "olapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Name: "uid" Type: "Utf8" NotNull: true StorageId: "__MEMORY" } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" StorageId: "__MEMORY" } Columns { Name: "json_payload" Type: "JsonDocument" } KeyColumnNames: "timestamp" KeyColumnNames: "uid" } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T13:54:54.431361Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2538] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T13:54:54.431433Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2538] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:54:54.431676Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2538] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T13:54:54.431870Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2538] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T13:54:54.431949Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2538] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T13:54:54.432203Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2538] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T13:54:54.434641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:54:54.436053Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2538] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T13:54:54.436119Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2538] txid# 281474976715657 SEND to# [1:643:2537] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:54:54.533206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:716:2598];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T13:54:54.554773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:716:2598];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T13:54:54.555098Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-07T13:54:54.563628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:716:2598];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:54:54.563877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:716:2598];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:54:54.564205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:716:2598];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:54:54.564368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:716:2598];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:54:54.564498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:716:2598];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:54:54.564632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:716:2598];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:54:54.565463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:716:2598];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:54:54.565636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:716:2598];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:54:54.565765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:716:2598];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:54:54.565909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:716:2598];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:54:54.566043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:716:2598];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:54:54.566175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:716:2598];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:54:54.587092Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-04-07T13:54:54.587410Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T13:54:54.587470Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T13:54:54.587690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:54.587838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:54:54.588013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:54:54.588064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T13:54:54.588171Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T13:54:54.588238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:54:54.588282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:54:54.588322Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T13:54:54.588524Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T13:54:54.588587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:54:54.588624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888 ... oFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '335) '('"_id" '"b0c3802b-104798e7-c4108201-ab297b7b")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) ) 2025-04-07T14:00:12.698226Z node 1 :KQP_YQL TRACE: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.697 TRACE ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [KQP] kqp_transform.cpp:33: PhysicalPeepholeTransformer: ( (let $1 (KqpTable '"//Root/.metadata/initialization/migrations" '"72057594046644480:6" '"" '1)) (let $2 '('"componentId" '"instant" '"modificationId")) (let $3 (Uint64 '"1001")) (let $4 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) (Void) '())) (let $5 (OptionalType (DataType 'Utf8))) (let $6 (StructType '('"componentId" $5) '('"instant" (OptionalType (DataType 'Uint32))) '('"modificationId" $5))) (let $7 '('('"_logical_id" '322) '('"_id" '"dfe3676b-3a4a2712-bb0207f7-90dfc364") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $4)) (lambda '($12) (block '( (let $13 (lambda '($14) (Member $14 '"componentId") (Member $14 '"instant") (Member $14 '"modificationId"))) (return (FromFlow (ExpandMap (Take (ToFlow $12) $3) $13))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($15) (FromFlow (NarrowMap (Take (ToFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '335) '('"_id" '"b0c3802b-104798e7-c4108201-ab297b7b")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) 2025-04-07T14:00:12.711192Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.711 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:466: Register async execution for node #260 2025-04-07T14:00:12.711348Z node 1 :KQP_YQL TRACE: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.711 TRACE ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:387: {3}, callable #269 2025-04-07T14:00:12.711445Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.711 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:577: Node #269 finished execution 2025-04-07T14:00:12.711505Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.711 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:594: Node #269 created 0 trackable nodes: 2025-04-07T14:00:12.711583Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.711 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:87: Finish, output #272, status: Async 2025-04-07T14:00:12.712485Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.712 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:133: Completed async execution for node #260 2025-04-07T14:00:12.712586Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.712 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:153: State is ExecutionRequired after apply async changes for node #260 2025-04-07T14:00:12.712690Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.712 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:59: Begin, root #272 2025-04-07T14:00:12.712748Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.712 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:72: Collect unused nodes for root #272, status: Ok 2025-04-07T14:00:12.712821Z node 1 :KQP_YQL TRACE: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.712 TRACE ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:387: {0}, callable #272 2025-04-07T14:00:12.712881Z node 1 :KQP_YQL TRACE: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.712 TRACE ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:387: {1}, callable #271 2025-04-07T14:00:12.712941Z node 1 :KQP_YQL TRACE: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.712 TRACE ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:387: {2}, callable #270 2025-04-07T14:00:12.713035Z node 1 :KQP_YQL TRACE: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.713 TRACE ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:387: {3}, callable #267 2025-04-07T14:00:12.713088Z node 1 :KQP_YQL TRACE: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.713 TRACE ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:387: {4}, callable #260 2025-04-07T14:00:12.713280Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.713 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:577: Node #260 finished execution 2025-04-07T14:00:12.713342Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.713 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:594: Node #260 created 0 trackable nodes: 2025-04-07T14:00:12.713403Z node 1 :KQP_YQL TRACE: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.713 TRACE ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:387: {3}, callable #267 2025-04-07T14:00:12.713487Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.713 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:577: Node #267 finished execution 2025-04-07T14:00:12.713564Z node 1 :KQP_YQL TRACE: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.713 TRACE ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:387: {2}, callable #270 2025-04-07T14:00:12.713792Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.713 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:577: Node #270 finished execution 2025-04-07T14:00:12.713854Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.713 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:594: Node #270 created 0 trackable nodes: 2025-04-07T14:00:12.713922Z node 1 :KQP_YQL TRACE: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.713 TRACE ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:387: {1}, callable #271 2025-04-07T14:00:12.714000Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.713 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:577: Node #271 finished execution 2025-04-07T14:00:12.714081Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.714 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:594: Node #271 created 0 trackable nodes: 2025-04-07T14:00:12.714158Z node 1 :KQP_YQL TRACE: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.714 TRACE ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:387: {0}, callable #272 2025-04-07T14:00:12.714224Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.714 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:577: Node #272 finished execution 2025-04-07T14:00:12.714284Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.714 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:594: Node #272 created 0 trackable nodes: 2025-04-07T14:00:12.714337Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.714 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:87: Finish, output #272, status: Ok 2025-04-07T14:00:12.714388Z node 1 :KQP_YQL INFO: TraceId: 01jr88tdyb7ggy504eazzffmh2, SessionId: CompileActor 2025-04-07 14:00:12.714 INFO ydb-services-ext_index-ut(pid=173637, tid=0x00007F4F21B57C80) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #272 2025-04-07T14:00:12.734381Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvExecuteKqpTransaction 2025-04-07T14:00:12.734461Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] TxId# 281474976716246 ProcessProposeKqpTransaction 2025-04-07T14:00:12.745086Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvExecuteKqpTransaction 2025-04-07T14:00:12.745159Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] TxId# 281474976716247 ProcessProposeKqpTransaction 2025-04-07T14:00:12.993407Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:716:2598];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:12.993724Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:720:2600];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:12.993786Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:723:2602];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:12.993841Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:728:2604];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:13.007290Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:716:2598];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:00:13.007461Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:720:2600];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-04-07T14:00:13.007552Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:723:2602];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-04-07T14:00:13.007633Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:728:2604];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> KqpOlap::SimpleQueryOlapStats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ExtractRangesReverse [GOOD] Test command err: Trying to start YDB, gRPC: 30699, MsgBus: 4016 2025-04-07T14:00:14.878231Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570774902132961:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:14.878591Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fd3/r3tmp/tmpkNJmVp/pdisk_1.dat 2025-04-07T14:00:15.437971Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30699, node 1 2025-04-07T14:00:15.441874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:15.441973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:15.445743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:15.536403Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:15.536421Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:15.536432Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:15.536520Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4016 TClient is connected to server localhost:4016 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:16.161573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:16.178488Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:16.196275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:16.292416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570783492068083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:16.292805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570783492068083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:16.293062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570783492068083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:16.293323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570783492068083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:16.293459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570783492068083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:16.293584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570783492068083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:16.293664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570783492068083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:16.293729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570783492068083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:16.293799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570783492068083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:16.293877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570783492068083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:16.293959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570783492068083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:16.294028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570783492068083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:16.335039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783492068084:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:16.335097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783492068084:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:16.335246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783492068084:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:16.335340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783492068084:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:16.335395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783492068084:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:16.335458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783492068084:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:16.335562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783492068084:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:16.335638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783492068084:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:16.335709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783492068084:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:16.335777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783492068084:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:16.335846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783492068084:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:16.335901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783492068084:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:16.364205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783492068087:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:16.364261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783492068087:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:16.364406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783492068087:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:16.364470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783492068087:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:16.364521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783492068087:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:16.364589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783492068087:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:16.364650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783492068087:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:16.365077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783492068087:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:16.413315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:16.413356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:16.414158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:16.414192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:16.414315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:16.414337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:16.414481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:16.414502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:16.414600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:16.414618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:16.415009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:16.415047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:16.415130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:16.415152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:16.415269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:16.415287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:16.415366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:16.415436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:16.415516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:16.415541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:16.415570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:16.415594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:16.415848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:16.415876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:16.416026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:16.416046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:16.416141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:16.416159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:16.416295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:16.416319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:16.416421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:16.416440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:16.478945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; ==================================== QUERY: SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE (`timestamp` < CAST(1000100 AS Timestamp) AND `timestamp` > CAST(1000095 AS Timestamp)) OR (`timestamp` < CAST(1000300 AS Timestamp) AND `timestamp` >= CAST(1000295 AS Timestamp)) OR (`timestamp` <= CAST(1000400 AS Timestamp) AND `timestamp` > CAST(1000395 AS Timestamp)) OR (`timestamp` <= CAST(1000500 AS Timestamp) AND `timestamp` >= CAST(1000495 AS Timestamp)) OR (`timestamp` <= CAST(1000505 AS Timestamp) AND `timestamp` >= CAST(1000499 AS Timestamp)) OR (`timestamp` < CAST(1000510 AS Timestamp) AND `timestamp` >= CAST(1000505 AS Timestamp)) OR (`timestamp` <= CAST(1001000 AS Timestamp) AND `timestamp` >= CAST(1000999 AS Timestamp)) OR (`timestamp` > CAST(1002000 AS Timestamp)) ORDER BY `timestamp` DESC LIMIT 1000; RESULT: 2025-04-07T14:00:18.525209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570792082002991:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:18.525315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:18.525519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570792082003023:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:18.537307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:00:18.552347Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T14:00:18.552646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570792082003025:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:00:19.866492Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570774902132961:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:19.866596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:20.404468Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034420002, txId: 18446744073709551615] shutting down timestamp: 1970-01-01T00:00:01.001000Z timestamp: 1970-01-01T00:00:01.000999Z timestamp: 1970-01-01T00:00:01.000509Z timestamp: 1970-01-01T00:00:01.000508Z timestamp: 1970-01-01T00:00:01.000507Z timestamp: 1970-01-01T00:00:01.000506Z timestamp: 1970-01-01T00:00:01.000505Z timestamp: 1970-01-01T00:00:01.000504Z timestamp: 1970-01-01T00:00:01.000503Z timestamp: 1970-01-01T00:00:01.000502Z timestamp: 1970-01-01T00:00:01.000501Z timestamp: 1970-01-01T00:00:01.000500Z timestamp: 1970-01-01T00:00:01.000499Z timestamp: 1970-01-01T00:00:01.000498Z timestamp: 1970-01-01T00:00:01.000497Z timestamp: 1970-01-01T00:00:01.000496Z timestamp: 1970-01-01T00:00:01.000495Z timestamp: 1970-01-01T00:00:01.000400Z timestamp: 1970-01-01T00:00:01.000399Z timestamp: 1970-01-01T00:00:01.000398Z timestamp: 1970-01-01T00:00:01.000397Z timestamp: 1970-01-01T00:00:01.000396Z timestamp: 1970-01-01T00:00:01.000299Z timestamp: 1970-01-01T00:00:01.000298Z timestamp: 1970-01-01T00:00:01.000297Z timestamp: 1970-01-01T00:00:01.000296Z timestamp: 1970-01-01T00:00:01.000295Z timestamp: 1970-01-01T00:00:01.000099Z timestamp: 1970-01-01T00:00:01.000098Z timestamp: 1970-01-01T00:00:01.000097Z timestamp: 1970-01-01T00:00:01.000096Z |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] Test command err: 2025-04-07T14:00:07.315786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:00:07.316147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:07.316389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00134f/r3tmp/tmpjV9wkj/pdisk_1.dat 2025-04-07T14:00:07.672587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:00:07.718255Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:07.766878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:07.767070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:07.778587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:07.897047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:00:07.939687Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:00:07.939980Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:00:07.980949Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:00:07.981084Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:00:07.982768Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:00:07.982863Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:00:07.982911Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:00:07.983242Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:00:08.017239Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:00:08.017481Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:00:08.017637Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:00:08.017675Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:00:08.017728Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:00:08.017773Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:08.018304Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:00:08.018429Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:00:08.018863Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:00:08.018937Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:08.018985Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:08.019031Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:00:08.019074Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:08.019258Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:00:08.019538Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:00:08.019642Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:00:08.021320Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:00:08.032169Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:00:08.032300Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:00:08.227453Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:00:08.235169Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:00:08.235279Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:08.236337Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:08.236390Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:00:08.236455Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:00:08.236714Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:00:08.236879Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:00:08.237519Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:08.237647Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:00:08.239762Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:00:08.240291Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:08.241982Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:00:08.242031Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:08.242629Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:00:08.242703Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:00:08.242777Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:08.243952Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:08.243995Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:00:08.244055Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:00:08.244134Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:00:08.244190Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:00:08.244289Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:08.247862Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:00:08.249901Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:00:08.250021Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:00:08.250109Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:00:08.261196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:08.261315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:08.261396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:08.268294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:00:08.277623Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:00:08.505247Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:00:08.508082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:00:08.863342Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr88t9t36v2v4xztz61vwfe2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg3OGM3YTMtYWY2ZTk3OTMtMzgxMDgwN2UtMWY0YzgzZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:08.867605Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2025-04-07T14:00:08.867809Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:00:08.880696Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:00:08.880874Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:09.063679Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr88tae2170g13f8z2ef6t7n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjExY2YyMDItYzkzOTEzYzMtNjJlOTkwYjYtOGViYTI5MTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:09.065734Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lo ... ode 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-07T14:00:20.978312Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2025-04-07T14:00:20.978387Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:00:20.980400Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:905:2733], Recipient [3:632:2537]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 905 RawX2: 12884904621 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t\211\003\000\000\000\000\000\000\021\255\n\000\000\003\000\000\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-04-07T14:00:20.980463Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:00:20.980559Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:00:20.980792Z node 3 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-04-07T14:00:20.980880Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-04-07T14:00:20.980940Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T14:00:20.980986Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-04-07T14:00:20.981028Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:00:20.981067Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:00:20.981112Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-07T14:00:20.981178Z node 3 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-04-07T14:00:20.981225Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T14:00:20.981250Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:00:20.981275Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit MakeScanSnapshot 2025-04-07T14:00:20.981299Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit MakeScanSnapshot 2025-04-07T14:00:20.981328Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T14:00:20.981352Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit MakeScanSnapshot 2025-04-07T14:00:20.981373Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit WaitForStreamClearance 2025-04-07T14:00:20.981395Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2025-04-07T14:00:20.981443Z node 3 :TX_DATASHARD TRACE: Requested stream clearance from [3:905:2733] for [0:281474976715665] at 72075186224037888 2025-04-07T14:00:20.981487Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2025-04-07T14:00:20.981685Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [3:905:2733], Recipient [3:632:2537]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715665 2025-04-07T14:00:20.981723Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-07T14:00:20.981822Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [3:905:2733], Recipient [3:632:2537]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715665 Cleared: true 2025-04-07T14:00:20.981855Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-07T14:00:20.981928Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:632:2537], Recipient [3:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:20.981963Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:20.982017Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:20.982063Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:00:20.982118Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2025-04-07T14:00:20.982165Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2025-04-07T14:00:20.982217Z node 3 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715665] at 72075186224037888 2025-04-07T14:00:20.982265Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T14:00:20.982310Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-07T14:00:20.982349Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ReadTableScan 2025-04-07T14:00:20.982387Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2025-04-07T14:00:20.982652Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2025-04-07T14:00:20.982683Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:00:20.982728Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:00:20.982770Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:00:20.982810Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:00:20.983403Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [3:912:2738], Recipient [3:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-07T14:00:20.983451Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-07T14:00:20.983668Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2025-04-07T14:00:20.984035Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:00:20.984082Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:20.984466Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T14:00:20.984638Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:898:2726], Recipient [3:632:2537]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:00:20.984739Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:00:20.984838Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2025-04-07T14:00:20.984888Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2025-04-07T14:00:20.987124Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T14:00:20.987176Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037888 2025-04-07T14:00:20.987344Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:632:2537], Recipient [3:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:20.987384Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:20.987438Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:20.987478Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:00:20.987523Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2025-04-07T14:00:20.987555Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2025-04-07T14:00:20.987596Z node 3 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715665] at 72075186224037888 error: , IsFatalError: 0 2025-04-07T14:00:20.987652Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T14:00:20.987685Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ReadTableScan 2025-04-07T14:00:20.987733Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:00:20.987764Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-07T14:00:20.987811Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T14:00:20.987888Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-04-07T14:00:20.987936Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:00:20.987977Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:00:20.988019Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:00:20.988072Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T14:00:20.988099Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:00:20.988131Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-04-07T14:00:20.988177Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:20.988208Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:00:20.988239Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:00:20.988271Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:00:20.988340Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:20.988384Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-07T14:00:20.988430Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleQueryOlapStats [GOOD] Test command err: Trying to start YDB, gRPC: 63227, MsgBus: 21101 2025-04-07T14:00:15.102041Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570779050043952:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:15.102082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fd2/r3tmp/tmpmFDXkk/pdisk_1.dat 2025-04-07T14:00:15.654945Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63227, node 1 2025-04-07T14:00:15.829023Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:15.829046Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:15.829052Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:15.829145Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:00:15.854040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:15.854112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:15.857696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21101 TClient is connected to server localhost:21101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:16.469512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:16.484865Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:16.501546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:16.700732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570783345011699:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:16.700942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570783345011699:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:16.701203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570783345011699:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:16.701308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570783345011699:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:16.701397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570783345011699:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:16.701481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570783345011699:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:16.701631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570783345011699:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:16.701735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570783345011699:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:16.701842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570783345011699:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:16.702015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570783345011699:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:16.702142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570783345011699:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:16.702244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570783345011699:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:16.765031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783345011710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:16.765095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783345011710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:16.765326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783345011710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:16.765471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783345011710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:16.765575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783345011710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:16.765658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783345011710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:16.765758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783345011710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:16.765848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783345011710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:16.765942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783345011710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:16.766048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783345011710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:16.766157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783345011710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:16.766267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570783345011710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:16.805868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783345011715:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:16.805941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783345011715:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:16.806185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783345011715:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:16.806329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783345011715:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:16.806413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783345011715:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:16.806519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783345011715:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:16.806608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783345011715:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:16.806701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570783345011715:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... eId":1,"OutputChannels":[{"ChannelId":48,"DstStageId":1}],"WaitInputTimeUs":46476,"TaskId":48}],"CpuTimeUs":665,"DurationUs":62000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420700,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420648,"ComputeTimeUs":40,"NodeId":1,"OutputChannels":[{"ChannelId":64,"DstStageId":1}],"WaitInputTimeUs":37526,"TaskId":64}],"CpuTimeUs":584,"DurationUs":52000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420701,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420642,"ComputeTimeUs":38,"NodeId":1,"OutputChannels":[{"ChannelId":37,"DstStageId":1}],"WaitInputTimeUs":42366,"TaskId":37}],"CpuTimeUs":583,"DurationUs":59000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420701,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420650,"ComputeTimeUs":39,"NodeId":1,"OutputChannels":[{"ChannelId":53,"DstStageId":1}],"WaitInputTimeUs":33780,"TaskId":53}],"CpuTimeUs":580,"DurationUs":51000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420701,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420642,"ComputeTimeUs":44,"NodeId":1,"OutputChannels":[{"ChannelId":54,"DstStageId":1}],"WaitInputTimeUs":41981,"TaskId":54}],"CpuTimeUs":645,"DurationUs":59000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420701,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420637,"ComputeTimeUs":189,"NodeId":1,"OutputChannels":[{"ChannelId":38,"DstStageId":1}],"WaitInputTimeUs":47511,"TaskId":38}],"CpuTimeUs":797,"DurationUs":64000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420702,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420637,"ComputeTimeUs":57,"NodeId":1,"OutputChannels":[{"ChannelId":39,"DstStageId":1}],"WaitInputTimeUs":43981,"TaskId":39}],"CpuTimeUs":3838,"DurationUs":65000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420702,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420643,"ComputeTimeUs":36,"NodeId":1,"OutputChannels":[{"ChannelId":55,"DstStageId":1}],"WaitInputTimeUs":41705,"TaskId":55}],"CpuTimeUs":599,"DurationUs":59000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420702,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420638,"ComputeTimeUs":42,"NodeId":1,"OutputChannels":[{"ChannelId":47,"DstStageId":1}],"WaitInputTimeUs":45335,"TaskId":47}],"CpuTimeUs":622,"DurationUs":64000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420703,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420635,"ComputeTimeUs":34,"NodeId":1,"OutputChannels":[{"ChannelId":32,"DstStageId":1}],"WaitInputTimeUs":30746,"TaskId":32}],"CpuTimeUs":592,"DurationUs":68000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420703,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420639,"ComputeTimeUs":44,"NodeId":1,"OutputChannels":[{"ChannelId":33,"DstStageId":1}],"WaitInputTimeUs":44562,"TaskId":33}],"CpuTimeUs":656,"DurationUs":64000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420703,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420648,"ComputeTimeUs":65,"NodeId":1,"OutputChannels":[{"ChannelId":49,"DstStageId":1}],"WaitInputTimeUs":35201,"TaskId":49}],"CpuTimeUs":1089,"DurationUs":55000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420703,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420639,"ComputeTimeUs":42,"NodeId":1,"OutputChannels":[{"ChannelId":34,"DstStageId":1}],"WaitInputTimeUs":44226,"TaskId":34}],"CpuTimeUs":568,"DurationUs":64000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420711,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420643,"ComputeTimeUs":58,"NodeId":1,"OutputChannels":[{"ChannelId":56,"DstStageId":1}],"WaitInputTimeUs":41364,"TaskId":56}],"CpuTimeUs":678,"DurationUs":68000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420712,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420645,"ComputeTimeUs":47,"NodeId":1,"OutputChannels":[{"ChannelId":57,"DstStageId":1}],"WaitInputTimeUs":32491,"TaskId":57}],"CpuTimeUs":7943,"DurationUs":67000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420712,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420644,"ComputeTimeUs":49,"NodeId":1,"OutputChannels":[{"ChannelId":58,"DstStageId":1}],"WaitInputTimeUs":39830,"TaskId":58}],"CpuTimeUs":1359,"DurationUs":68000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420713,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420645,"ComputeTimeUs":41,"NodeId":1,"OutputChannels":[{"ChannelId":59,"DstStageId":1}],"WaitInputTimeUs":39389,"TaskId":59}],"CpuTimeUs":616,"DurationUs":68000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1744034420713,"Host":"ghrun-lqtfcktism","StartTimeMs":1744034420646,"ComputeTimeUs":43,"NodeId":1,"OutputChannels":[{"ChannelId":60,"DstStageId":1}],"WaitInputTimeUs":39026,"TaskId":60}],"CpuTimeUs":618,"DurationUs":67000}],"UseLlvm":false,"DurationUs":{"Count":64,"Max":69000,"Sum":3889000,"Min":46000},"Output":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":57,"Sum":57,"Min":57},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"FirstMessageMs":{"Count":1,"Max":57,"Sum":57,"Min":57}},"Name":"3","Push":{"LastMessageMs":{"Count":1,"Max":57,"Sum":57,"Min":57},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Chunks":{"Count":1,"Max":2,"Sum":2,"Min":2},"ResumeMessageMs":{"Count":1,"Max":57,"Sum":57,"Min":57},"FirstMessageMs":{"Count":1,"Max":57,"Sum":57,"Min":57},"Bytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"PauseMessageMs":{"Count":1,"Max":57,"Sum":57,"Min":57},"WaitTimeUs":{"Count":64,"Max":47439,"Sum":2036663,"Min":6607},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1}}}],"MaxMemoryUsage":{"Count":64,"Max":1048576,"Sum":67108864,"Min":1048576},"Tasks":64,"OutputRows":{"Count":1,"Max":2,"Sum":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":89000,"BaseTimeMs":1744034420624,"NodesScanShards":[{"node_id":1,"shards_count":3}],"WaitInputTimeUs":{"Count":64,"Max":47511,"Sum":2064606,"Min":12748},"CpuTimeUs":{"Count":64,"Max":4045,"Sum":37552,"Min":160},"OutputBytes":{"Count":1,"Max":16,"Sum":16,"Min":16}}}],"Node Type":"Merge","SortColumns":["resource_id (Asc)","timestamp (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage","Stats":{"ComputeNodes":[{"PeakMemoryUsageBytes":65536,"Tasks":[{"InputBytes":16,"FinishTimeMs":1744034420710,"Host":"ghrun-lqtfcktism","ResultRows":2,"ResultBytes":16,"OutputRows":2,"StartTimeMs":1744034420686,"InputRows":2,"InputChannels":[{"ChannelId":1,"WaitTimeUs":32167,"Rows":2,"Bytes":16,"SrcStageId":0},{"ChannelId":2,"SrcStageId":0},{"ChannelId":3,"SrcStageId":0},{"ChannelId":4,"SrcStageId":0},{"ChannelId":5,"SrcStageId":0},{"ChannelId":6,"SrcStageId":0},{"ChannelId":7,"SrcStageId":0},{"ChannelId":8,"SrcStageId":0},{"ChannelId":9,"SrcStageId":0},{"ChannelId":10,"SrcStageId":0},{"ChannelId":11,"SrcStageId":0},{"ChannelId":12,"SrcStageId":0},{"ChannelId":13,"SrcStageId":0},{"ChannelId":14,"SrcStageId":0},{"ChannelId":15,"SrcStageId":0},{"ChannelId":16,"SrcStageId":0},{"ChannelId":17,"SrcStageId":0},{"ChannelId":18,"SrcStageId":0},{"ChannelId":19,"SrcStageId":0},{"ChannelId":20,"SrcStageId":0},{"ChannelId":21,"SrcStageId":0},{"ChannelId":22,"SrcStageId":0},{"ChannelId":23,"SrcStageId":0},{"ChannelId":24,"SrcStageId":0},{"ChannelId":25,"SrcStageId":0},{"ChannelId":26,"SrcStageId":0},{"ChannelId":27,"SrcStageId":0},{"ChannelId":28,"SrcStageId":0},{"ChannelId":29,"SrcStageId":0},{"ChannelId":30,"SrcStageId":0},{"ChannelId":31,"SrcStageId":0},{"ChannelId":32,"SrcStageId":0},{"ChannelId":33,"SrcStageId":0},{"ChannelId":34,"SrcStageId":0},{"ChannelId":35,"SrcStageId":0},{"ChannelId":36,"SrcStageId":0},{"ChannelId":37,"SrcStageId":0},{"ChannelId":38,"SrcStageId":0},{"ChannelId":39,"SrcStageId":0},{"ChannelId":40,"SrcStageId":0},{"ChannelId":41,"SrcStageId":0},{"ChannelId":42,"SrcStageId":0},{"ChannelId":43,"SrcStageId":0},{"ChannelId":44,"SrcStageId":0},{"ChannelId":45,"SrcStageId":0},{"ChannelId":46,"SrcStageId":0},{"ChannelId":47,"SrcStageId":0},{"ChannelId":48,"SrcStageId":0},{"ChannelId":49,"SrcStageId":0},{"ChannelId":50,"SrcStageId":0},{"ChannelId":51,"SrcStageId":0},{"ChannelId":52,"SrcStageId":0},{"ChannelId":53,"SrcStageId":0},{"ChannelId":54,"SrcStageId":0},{"ChannelId":55,"SrcStageId":0},{"ChannelId":56,"SrcStageId":0},{"ChannelId":57,"SrcStageId":0},{"ChannelId":58,"SrcStageId":0},{"ChannelId":59,"SrcStageId":0},{"ChannelId":60,"SrcStageId":0},{"ChannelId":61,"SrcStageId":0},{"ChannelId":62,"SrcStageId":0},{"ChannelId":63,"SrcStageId":0},{"ChannelId":64,"SrcStageId":0}],"ComputeTimeUs":204,"NodeId":1,"OutputChannels":[{"ChannelId":65,"DstStageId":0,"Rows":2,"Bytes":16}],"WaitInputTimeUs":32086,"TaskId":65,"OutputBytes":16}],"CpuTimeUs":2128,"DurationUs":24000}],"UseLlvm":false,"OutputRows":{"Count":1,"Max":2,"Sum":2,"Min":2},"PhysicalStageId":1,"InputBytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"DurationUs":{"Count":1,"Max":24000,"Sum":24000,"Min":24000},"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"Min":1048576},"BaseTimeMs":1744034420624,"Output":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":62,"Sum":62,"Min":62},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"FirstMessageMs":{"Count":1,"Max":62,"Sum":62,"Min":62}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Max":62,"Sum":62,"Min":62},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Chunks":{"Count":1,"Max":2,"Sum":2,"Min":2},"ResumeMessageMs":{"Count":1,"Max":62,"Sum":62,"Min":62},"FirstMessageMs":{"Count":1,"Max":62,"Sum":62,"Min":62},"Bytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"PauseMessageMs":{"Count":1,"Max":30,"Sum":30,"Min":30},"WaitTimeUs":{"Count":1,"Max":32216,"Sum":32216,"Min":32216},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":62,"Min":30}}}],"CpuTimeUs":{"Count":1,"Max":394,"Sum":394,"Min":394},"StageDurationUs":24000,"WaitInputTimeUs":{"Count":1,"Max":32086,"Sum":32086,"Min":32086},"ResultRows":{"Count":1,"Max":2,"Sum":2,"Min":2},"ResultBytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"OutputBytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"Input":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":62,"Sum":62,"Min":62},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"FirstMessageMs":{"Count":1,"Max":62,"Sum":62,"Min":62}},"Name":"1","Push":{"LastMessageMs":{"Count":1,"Max":62,"Sum":62,"Min":62},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":62,"Sum":62,"Min":62},"FirstMessageMs":{"Count":1,"Max":62,"Sum":62,"Min":62},"Bytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"PauseMessageMs":{"Count":1,"Max":30,"Sum":30,"Min":30},"WaitTimeUs":{"Count":1,"Max":32167,"Sum":32167,"Min":32167},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":62,"Min":30}}}],"Tasks":1,"InputRows":{"Count":1,"Max":2,"Sum":2,"Min":2}}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"TotalDurationUs":343644,"ProcessCpuTimeUs":252,"Compilation":{"FromCache":false,"CpuTimeUs":204708,"DurationUs":207056}}},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"A-Rows":2,"A-Cpu":4.045,"SortBy":"","Name":"Sort"}],"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["resource_id","timestamp"],"E-Cost":"No estimate","SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":2},{"Id":1}]}}]}}],"Node Type":"TableFullScan"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} >> TPQTestSlow::TestOnDiskStoredSourceIds >> DataShardVolatile::UpsertBrokenLockArbiterRestart [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart >> KqpOlap::PredicatePushdown_MixStrictAndNotStrict [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] >> TPQTestSlow::TestWriteVeryBigMessage |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut |89.3%| [LD] {RESULT} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut |89.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpOlapAggregations::JsonDoc_GetValue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdown_MixStrictAndNotStrict [GOOD] Test command err: Trying to start YDB, gRPC: 5070, MsgBus: 64010 2025-04-07T14:00:18.536436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570789609093590:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:18.536481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fca/r3tmp/tmpRfNneC/pdisk_1.dat 2025-04-07T14:00:19.017279Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:19.018801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:19.018890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:19.023596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5070, node 1 2025-04-07T14:00:19.257486Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:19.257518Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:19.257529Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:19.257617Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64010 TClient is connected to server localhost:64010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:19.910636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:19.923672Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:19.937562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:20.055128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798199028657:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:20.055409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798199028657:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:20.055693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798199028657:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:20.055840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798199028657:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:20.055947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798199028657:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:20.056060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798199028657:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:20.056149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798199028657:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:20.056256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798199028657:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:20.056378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798199028657:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:20.056512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798199028657:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:20.056635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798199028657:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:20.056785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798199028657:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:20.091746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570798199028660:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:20.091810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570798199028660:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:20.092019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570798199028660:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:20.092151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570798199028660:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:20.092253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570798199028660:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:20.092335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570798199028660:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:20.092443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570798199028660:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:20.092569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570798199028660:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:20.092699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570798199028660:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:20.092825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570798199028660:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:20.092955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570798199028660:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:20.093064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570798199028660:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:20.096256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:20.096331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:20.096477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:20.096517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:20.096795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:20.096823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:20.096900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:20.096919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:20.096957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:20.096970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_sw ... d=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:20.179497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:20.179520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:20.180289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:20.180329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:20.180406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:20.180440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:20.180636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:20.180657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:20.180745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:20.180809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:20.180872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:20.180895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:20.180928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:20.180948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:20.181300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:20.181361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:20.181525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:20.181563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:20.181747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:20.181776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:20.182011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:20.182033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:20.182118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:20.182138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:20.182800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:20.182833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:20.182913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:20.182969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:20.183169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:20.183194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:20.183283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:20.183314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:20.183363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:20.183384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:20.183433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:20.183465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:20.183792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:20.183892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:20.184089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:20.184114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:20.184245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:20.184274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:20.184476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:20.184536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:20.184705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:20.184734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:20.250152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=5800;columns=5; 2025-04-07T14:00:21.992825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570802493996262:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:21.994135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:21.995731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570802493996297:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:22.001005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:00:22.021406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570802493996299:2395], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> KqpOlapAggregations::Aggregation_ResultL_FilterL_OrderL_Limit2 [GOOD] >> TTopicApiDescribes::GetPartitionDescribe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: 2025-04-07T14:00:08.085595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:00:08.085882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:08.086105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00133b/r3tmp/tmpBdhYcR/pdisk_1.dat 2025-04-07T14:00:08.464656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:00:08.506542Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:08.565509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:08.565661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:08.577332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:08.705374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:00:08.752628Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2025-04-07T14:00:08.752966Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:00:08.797738Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:00:08.797985Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:00:08.799633Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:00:08.799739Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:00:08.799805Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:00:08.800176Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:00:08.839443Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:00:08.839695Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:00:08.839877Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:671:2562] 2025-04-07T14:00:08.839993Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:00:08.840037Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:00:08.840076Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:08.840693Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:645:2547] 2025-04-07T14:00:08.840920Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:00:08.850972Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:00:08.851099Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:00:08.851472Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:08.851533Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:08.851589Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:00:08.851642Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:00:08.852144Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:635:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2025-04-07T14:00:08.852337Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:00:08.852687Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:00:08.852807Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:00:08.853511Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:00:08.853618Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:00:08.854887Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T14:00:08.854937Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T14:00:08.854990Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T14:00:08.855249Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:00:08.855290Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T14:00:08.855357Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:00:08.855436Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:678:2566] 2025-04-07T14:00:08.855466Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T14:00:08.855495Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T14:00:08.855533Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:00:08.856297Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T14:00:08.856370Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T14:00:08.856790Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2025-04-07T14:00:08.856968Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:00:08.857004Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:08.857044Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T14:00:08.857090Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:00:08.857360Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:00:08.857593Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-07T14:00:08.857665Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-07T14:00:08.858525Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:00:08.858617Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:00:08.869604Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:00:08.869763Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:00:08.870308Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T14:00:08.870382Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-07T14:00:09.068393Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2025-04-07T14:00:09.068548Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:702:2584], serverId# [1:705:2587], sessionId# [0:0:0] 2025-04-07T14:00:09.079794Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-07T14:00:09.079890Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:00:09.080549Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:00:09.080586Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:00:09.080704Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:00:09.080750Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:00:09.080803Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-07T14:00:09.081063Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:00:09.081212Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:00:09.081402Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:09.081428Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:00:09.081469Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:00:09.081615Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:00:09.081708Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:00:09.081800Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:00:09.081852Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:00:09.083917Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:00:09.084325Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:09.085733Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:00:09.085794Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-07T14:00:09.086147Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:00:09.086453Z node ... an for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:00:22.550024Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit DirectOp 2025-04-07T14:00:22.550065Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit DirectOp 2025-04-07T14:00:22.550112Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2025-04-07T14:00:22.550290Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:00:22.550329Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit DirectOp 2025-04-07T14:00:22.550376Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:00:22.550427Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:00:22.550494Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-07T14:00:22.550515Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:00:22.550590Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-07T14:00:22.561870Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-07T14:00:22.562058Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit DirectOp 2025-04-07T14:00:22.562120Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status SCHEME_ERROR 2025-04-07T14:00:22.837072Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] Handle TEvExecuteKqpTransaction 2025-04-07T14:00:22.837182Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] TxId# 281474976715662 ProcessProposeKqpTransaction 2025-04-07T14:00:22.838109Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr88tqsbfex9y13xhm5rq0p6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTA5NzA1NTMtN2UyNjYxYmMtMWU0MDUyZjItZGNjNTZmYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:22.845537Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1071:2872], Recipient [3:636:2541]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-07T14:00:22.845700Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:00:22.845767Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v8000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v8000/18446744073709551615 ImmediateWriteEdgeReplied# v8000/18446744073709551615 2025-04-07T14:00:22.845814Z node 3 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v8000/18446744073709551615 2025-04-07T14:00:22.845883Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckRead 2025-04-07T14:00:22.846016Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-07T14:00:22.846073Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:00:22.846115Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:00:22.846153Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:00:22.846216Z node 3 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-04-07T14:00:22.846256Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-07T14:00:22.846281Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:00:22.846301Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:00:22.846323Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:00:22.846455Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-07T14:00:22.846758Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1071:2872], 0} after executionsCount# 1 2025-04-07T14:00:22.846823Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1071:2872], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:00:22.847010Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1071:2872], 0} finished in read 2025-04-07T14:00:22.847093Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-07T14:00:22.847117Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:00:22.847142Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:00:22.847169Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:00:22.847216Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-07T14:00:22.847238Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:00:22.847263Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-04-07T14:00:22.847313Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:00:22.847434Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-07T14:00:22.853052Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1071:2872], Recipient [3:636:2541]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T14:00:22.853141Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2025-04-07T14:00:23.113762Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] Handle TEvExecuteKqpTransaction 2025-04-07T14:00:23.113854Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] TxId# 281474976715663 ProcessProposeKqpTransaction 2025-04-07T14:00:23.114740Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr88tr2f5bh40r13f62c3ztc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2VjMjU3MjMtNmM5ZTE1YTAtZjcwNWJkNjItMzYwMjIwYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:23.120483Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1102:2897], Recipient [3:869:2703]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2025-04-07T14:00:23.120637Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-07T14:00:23.120753Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v5000/18446744073709551615 ImmediateWriteEdgeReplied# v5000/18446744073709551615 2025-04-07T14:00:23.120811Z node 3 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v8000/18446744073709551615 2025-04-07T14:00:23.120878Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-04-07T14:00:23.120986Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-07T14:00:23.121035Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-04-07T14:00:23.121083Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-07T14:00:23.121116Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-07T14:00:23.121190Z node 3 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2025-04-07T14:00:23.121265Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-07T14:00:23.121296Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-07T14:00:23.121319Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-04-07T14:00:23.121342Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-04-07T14:00:23.121453Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-07T14:00:23.121792Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[3:1102:2897], 0} after executionsCount# 1 2025-04-07T14:00:23.121852Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1102:2897], 0} sends rowCount# 2, bytes# 48, quota rows left# 32765, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:00:23.121996Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1102:2897], 0} finished in read 2025-04-07T14:00:23.122069Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-07T14:00:23.122095Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-04-07T14:00:23.122120Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-04-07T14:00:23.122144Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-04-07T14:00:23.122215Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-07T14:00:23.122244Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-04-07T14:00:23.122272Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2025-04-07T14:00:23.122321Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-07T14:00:23.122441Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-07T14:00:23.129929Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1102:2897], Recipient [3:869:2703]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T14:00:23.130036Z node 3 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> KqpOlapAggregations::JsonDoc_GetValue_ToInt [GOOD] >> TTopicApiDescribes::DescribeTopic |89.4%| [TA] $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_GetValue [GOOD] Test command err: Trying to start YDB, gRPC: 5143, MsgBus: 18848 2025-04-07T14:00:16.672760Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570782699680655:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:16.672834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fd0/r3tmp/tmp5EnBce/pdisk_1.dat 2025-04-07T14:00:17.353272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:17.353346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:17.355158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:17.360995Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5143, node 1 2025-04-07T14:00:17.603675Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:17.603713Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:17.603720Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:17.605598Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18848 TClient is connected to server localhost:18848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:18.522392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:18.542523Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:18.562977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:18.711928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570791289615779:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:18.712089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570791289615779:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:18.712336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570791289615779:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:18.712425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570791289615779:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:18.712496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570791289615779:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:18.712596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570791289615779:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:18.712707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570791289615779:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:18.712807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570791289615779:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:18.712912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570791289615779:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:18.713052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570791289615779:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:18.713137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570791289615779:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:18.713244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570791289615779:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:18.779081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570791289615780:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:18.779143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570791289615780:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:18.779341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570791289615780:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:18.779448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570791289615780:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:18.779529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570791289615780:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:18.779628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570791289615780:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:18.779703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570791289615780:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:18.779792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570791289615780:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:18.779875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570791289615780:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:18.779980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570791289615780:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:18.780071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570791289615780:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:18.780172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570791289615780:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:18.834135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570791289615781:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:18.834192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570791289615781:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:18.834368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570791289615781:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:18.834463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570791289615781:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:18.834552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570791289615781:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:18.834667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570791289615781:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:18.834774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570791289615781:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:18.834885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570791289615781:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... ::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:18.903563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:18.903812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:18.903841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:18.904015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:18.904042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:18.904145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:18.904164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:18.904327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:18.904347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:18.904422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:18.904438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.col1") = "val1" AND id = 6; 2025-04-07T14:00:21.047989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570804174517960:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:21.048084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570804174517985:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:21.048158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:21.054421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:21.069277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570804174517989:2403], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:21.640772Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570782699680655:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:21.640844Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:22.550345Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034422004, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.col1") = "val1" AND id = 6; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == \"val1\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == \"val1\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1328) '('"_id" '"8d82476a-c84a60c4-971c05cb-169da914") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $19 (Int32 '"6")) (let $20 (Just $19)) (let $21 (Int32 '1)) (let $22 '($20 $21)) (let $23 (If (== $19 (Int32 '2147483647)) $22 '((+ $20 $21) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($22 $23)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (DataType 'Utf8)) (let $11 (OptionalType $10)) (let $12 (DqPhyStage '() (lambda '() (block '( (let $24 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $25 '('"id" '"jsondoc" '"jsonval")) (let $26 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $27 (OptionalType (DataType 'JsonDocument))) (let $28 (DataType 'Json)) (let $29 '((VariantType (TupleType (TupleType (DataType 'Uint8) (DataType 'String)) $11)))) (let $30 '((ResourceType '"JsonPath"))) (let $31 (ResourceType '"JsonNode")) (let $32 (DictType $10 $31)) (let $33 '($32)) (let $34 (CallableType '() $29 '($27) $30 $33)) (let $35 '('('"strict"))) (let $36 (Udf '"Json2.JsonDocumentSqlValueConvertToUtf8" (Void) (VoidType) '"" $34 (VoidType) '"" $35)) (let $37 (CallableType '() $30 '($10))) (let $38 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $37 (VoidType) '"" '())) (let $39 (Apply $38 (Utf8 '"$.col1"))) (let $40 (Dict $32)) (let $41 (lambda '($51) (Nothing $11))) (let $42 (lambda '($52) $52)) (let $43 (KqpWideReadOlapTableRanges $24 %kqp%tx_result_binding_0_0 $25 '() $26 (lambda '($44) (block '( (let $45 (StructType $9 '('"jsondoc" $27) '('"jsonval" (OptionalType $28)))) (let $46 (KqpOlapApply $45 '('"jsondoc") (lambda '($49) (block '( (let $50 (Apply $36 $49 $39 $40)) (return (Visit $50 '0 $41 '1 $42)) ))))) (let $47 '('eq $46 (String '"val1"))) (let $48 '('?? $47 (Bool 'false))) (return (KqpOlapFilter $44 $48)) ))))) (return (FromFlow (NarrowMap $43 (lambda '($53 $54 $55) (block '( (let $56 (OptionalType $31)) (let $57 (CallableType '() $29 '($56) $30 $33)) (let $58 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $57 (VoidType) '"" $35)) (let $59 (IfPresent $55 (lambda '($64) (block '( (let $65 '($28 '"" '1)) (let $66 (CallableType '() '($31) $65)) (let $67 (Udf '"Json2.Parse" (Void) (VoidType) '"" $66 (VoidType) '"" '())) (return (Just (Apply $67 $64))) ))) (Nothing $56))) (let $60 (Apply $58 $59 $39 $40)) (let $61 (Visit $60 '0 $41 '1 $42)) (let $62 (Apply $36 $54 $39 $40)) (let $63 (Visit $62 '0 $41 '1 $42)) (return (AsStruct '('"column1" $61) '('"column2" $63) '('"id" $53))) )))))) ))) '('('"_logical_id" '1399) '('"_id" '"48429951-aab9219d-857c4b6a-1bcc89db")))) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($68) $68) '('('"_logical_id" '2216) '('"_id" '"c3886414-a7a59769-a384933f-f7c85654")))) (let $15 '('"id" '"column1" '"column2")) (let $16 (DqCnResult (TDqOutput $14 '0) $15)) (let $17 (KqpTxResultBinding $8 '0 '0)) (let $18 (KqpPhysicalTx '($12 $14) '($16) '('($5 $17)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $18) '((KqpTxResultBinding (ListType (StructType '('"column1" $11) '('"column2" $11) $9)) '1 '0)) '('('"type" '"scan_query")))) ) >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] >> BasicUsage::TWriteSession_WriteEncoded [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |89.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> KqpOlapAggregations::Aggregation_Some_Null [GOOD] >> TTopicApiDescribes::GetLocalDescribe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultL_FilterL_OrderL_Limit2 [GOOD] Test command err: Trying to start YDB, gRPC: 61125, MsgBus: 21844 2025-04-07T13:59:55.147635Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570689803997311:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:55.148639Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000feb/r3tmp/tmpd28SCc/pdisk_1.dat 2025-04-07T13:59:55.645005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:55.645090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:55.648119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:55.669320Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61125, node 1 2025-04-07T13:59:55.813238Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:55.813276Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:55.813284Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:55.813382Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21844 TClient is connected to server localhost:21844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:56.539010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:56.554403Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:56.569882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:56.660871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694098965149:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:56.661069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694098965149:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:56.661338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694098965149:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:56.661454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694098965149:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:56.661567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694098965149:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:56.661663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694098965149:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:56.661780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694098965149:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:56.661911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694098965149:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:56.662028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694098965149:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:56.662124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694098965149:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:56.662233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694098965149:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:56.662330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570694098965149:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:56.696313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694098965150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:56.696383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694098965150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:56.696565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694098965150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:56.696842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694098965150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:56.696968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694098965150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:56.697055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694098965150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:56.697146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694098965150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:56.697255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694098965150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:56.697346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694098965150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:56.697429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694098965150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:56.697535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694098965150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:56.697624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570694098965150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:56.741878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570694098965151:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:56.741940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570694098965151:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:56.742164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570694098965151:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:56.742256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570694098965151:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:56.742624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570694098965151:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:56.742735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570694098965151:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:56.742825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570694098965151:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:56.742923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570694098965151:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:22.086357Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:22.205487Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:22.205565Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:22.271498Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:22.271614Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:22.339333Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:22.339410Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:22.422777Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:22.422853Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:22.502955Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:22.514326Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:00:22.563083Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:22.563158Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:22.631022Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:22.631120Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:22.699252Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:22.699322Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:22.777090Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:22.777188Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:22.873065Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:22.873140Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:22.897216Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:23.036249Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:23.036351Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:23.137511Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:23.137591Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:23.245180Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:23.245258Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:23.339916Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:23.340005Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:23.446247Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:23.446345Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yjc2ZmQzNGItMzExZmIxZjQtMzZjM2NiNGQtOGI3ZTk2MGE=. TraceId : 01jr88te086gghv14b8nz8bmhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:23.524087Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:23.537308Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a8d/r3tmp/tmpGFgp6m/pdisk_1.dat 2025-04-07T13:59:15.571828Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:59:15.970723Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:16.015203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:16.015307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:16.058293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1861, node 1 2025-04-07T13:59:16.300592Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:16.300615Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:16.302565Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:16.302691Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:16.816625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:16.824256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:59:16.825657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:16.828627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:59:16.828847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:59:16.828864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T13:59:16.832124Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:59:16.832150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T13:59:16.833852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T13:59:16.834208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:16.849745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034356883, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:16.849797Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T13:59:16.850120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T13:59:16.853863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:16.854053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:16.854122Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T13:59:16.854223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T13:59:16.854291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T13:59:16.854360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T13:59:16.858062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T13:59:16.858112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T13:59:16.858132Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:59:16.858225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T13:59:30.965297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:59:30.965331Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:16.013460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2025-04-07T14:00:16.013522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2025-04-07T14:00:16.013575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |89.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_GetValue_ToInt [GOOD] Test command err: Trying to start YDB, gRPC: 2424, MsgBus: 29439 2025-04-07T14:00:18.105836Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570791325305089:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:18.106021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fcd/r3tmp/tmpICwcx3/pdisk_1.dat 2025-04-07T14:00:18.769041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:18.770037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:18.785236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:18.818965Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2424, node 1 2025-04-07T14:00:19.009248Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:19.009306Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:19.009313Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:19.009409Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29439 TClient is connected to server localhost:29439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:20.194576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:20.229435Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:20.243608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:20.397529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570799915240356:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:20.397778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570799915240356:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:20.398068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570799915240356:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:20.398199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570799915240356:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:20.398312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570799915240356:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:20.398398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570799915240356:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:20.398484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570799915240356:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:20.398587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570799915240356:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:20.404787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570799915240356:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:20.405088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570799915240356:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:20.405230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570799915240356:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:20.405366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570799915240356:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:20.455616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570799915240391:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:20.455667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570799915240391:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:20.455861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570799915240391:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:20.455922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570799915240391:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:20.455998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570799915240391:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:20.456052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570799915240391:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:20.456116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570799915240391:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:20.456189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570799915240391:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:20.456249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570799915240391:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:20.456316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570799915240391:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:20.456374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570799915240391:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:20.456461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570799915240391:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:20.489013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570799915240337:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:20.489084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570799915240337:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:20.489331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570799915240337:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:20.489428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570799915240337:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:20.489543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570799915240337:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:20.489637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570799915240337:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:20.489745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570799915240337:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:20.489856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570799915240337:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... FromChunks; 2025-04-07T14:00:20.557293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:20.557388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:20.557594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:20.557624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:20.557746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:20.557794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:20.557970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:20.557993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:20.558099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:20.558118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.obj.obj_col2_int"), JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) = 16 AND id = 6; 2025-04-07T14:00:22.739858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570808505175232:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:22.739979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:22.740655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570808505175257:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:22.747498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:22.759677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570808505175259:2403], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:23.136186Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570791325305089:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:23.136260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.obj.obj_col2_int"), JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) = 16 AND id = 6; 2025-04-07T14:00:24.287577Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034423000, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == 16","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Int32":16},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Int32":16},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == 16","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1332) '('"_id" '"130c8c04-9597d0ea-51d69bf3-b6c6751d") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $20 (Int32 '"6")) (let $21 (Just $20)) (let $22 (Int32 '1)) (let $23 '($21 $22)) (let $24 (If (== $20 (Int32 '2147483647)) $23 '((+ $21 $22) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($23 $24)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (OptionalType $6)) (let $8 (TupleType $7 $6)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"id" $6)) (let $11 (DataType 'Utf8)) (let $12 (OptionalType $11)) (let $13 (DqPhyStage '() (lambda '() (block '( (let $25 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $26 '('"id" '"jsondoc" '"jsonval")) (let $27 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $28 (OptionalType (DataType 'JsonDocument))) (let $29 (DataType 'Json)) (let $30 (TupleType (DataType 'Uint8) (DataType 'String))) (let $31 '((ResourceType '"JsonPath"))) (let $32 (ResourceType '"JsonNode")) (let $33 (DictType $11 $32)) (let $34 '($33)) (let $35 (CallableType '() '((VariantType (TupleType $30 (OptionalType (DataType 'Double))))) '($28) $31 $34)) (let $36 '('('"strict"))) (let $37 (Udf '"Json2.JsonDocumentSqlValueNumber" (Void) (VoidType) '"" $35 (VoidType) '"" $36)) (let $38 (CallableType '() $31 '($11))) (let $39 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $38 (VoidType) '"" '())) (let $40 (Apply $39 (Utf8 '"$.obj.obj_col2_int"))) (let $41 (Dict $33)) (let $42 (lambda '($53) (block '( (let $54 (Nothing $7)) (return $54) )))) (let $43 (lambda '($55) (If (Exists $55) (SafeCast $55 $7) (Nothing $7)))) (let $44 (KqpWideReadOlapTableRanges $25 %kqp%tx_result_binding_0_0 $26 '() $27 (lambda '($45) (block '( (let $46 (StructType $10 '('"jsondoc" $28) '('"jsonval" (OptionalType $29)))) (let $47 (KqpOlapApply $46 '('"jsondoc") (lambda '($50) (block '( (let $51 (Apply $37 $50 $40 $41)) (let $52 (Nothing $7)) (return (Visit $51 '0 $42 '1 $43)) ))))) (let $48 '('eq $47 (Int32 '"16"))) (let $49 '('?? $48 (Bool 'false))) (return (KqpOlapFilter $45 $49)) ))))) (return (FromFlow (NarrowMap $44 (lambda '($56 $57 $58) (block '( (let $59 (OptionalType $32)) (let $60 (CallableType '() '((VariantType (TupleType $30 $12))) '($59) $31 $34)) (let $61 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $60 (VoidType) '"" $36)) (let $62 (IfPresent $58 (lambda '($67) (block '( (let $68 '($29 '"" '1)) (let $69 (CallableType '() '($32) $68)) (let $70 (Udf '"Json2.Parse" (Void) (VoidType) '"" $69 (VoidType) '"" '())) (return (Just (Apply $70 $67))) ))) (Nothing $59))) (let $63 (Apply $61 $62 $40 $41)) (let $64 (Visit $63 '0 (lambda '($71) (Nothing $12)) '1 (lambda '($72) $72))) (let $65 (Apply $37 $57 $40 $41)) (let $66 (Visit $65 '0 $42 '1 $43)) (return (AsStruct '('"column1" $64) '('"column2" $66) '('"id" $56))) )))))) ))) '('('"_logical_id" '1403) '('"_id" '"7c78288c-8c24e856-ae4efa0d-50cd5cea")))) (let $14 (DqCnUnionAll (TDqOutput $13 '0))) (let $15 (DqPhyStage '($14) (lambda '($73) $73) '('('"_logical_id" '2376) '('"_id" '"c4641a0d-d1932e1a-14245ec-50dbb92d")))) (let $16 '('"id" '"column1" '"column2")) (let $17 (DqCnResult (TDqOutput $15 '0) $16)) (let $18 (KqpTxResultBinding $9 '0 '0)) (let $19 (KqpPhysicalTx '($13 $15) '($17) '('($5 $18)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $19) '((KqpTxResultBinding (ListType (StructType '('"column1" $12) '('"column2" $7) $10)) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_Null [GOOD] Test command err: Trying to start YDB, gRPC: 3893, MsgBus: 20866 2025-04-07T14:00:20.405114Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570798609210076:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:20.405150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fc7/r3tmp/tmpYyywtF/pdisk_1.dat 2025-04-07T14:00:20.904455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:20.904536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:20.906805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:20.956101Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3893, node 1 2025-04-07T14:00:21.045243Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:21.045267Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:21.045275Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:21.045361Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20866 TClient is connected to server localhost:20866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:21.619726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:21.634505Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:21.640309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:21.756425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570802904178058:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:21.756633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570802904178058:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:21.757082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570802904178058:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:21.757275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570802904178058:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:21.757406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570802904178058:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:21.757555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570802904178058:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:21.757794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570802904178058:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:21.757926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570802904178058:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:21.758112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570802904178058:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:21.758247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570802904178058:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:21.758382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570802904178058:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:21.758537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570802904178058:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:21.794594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570802904178060:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:21.794671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570802904178060:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:21.794903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570802904178060:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:21.795045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570802904178060:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:21.795174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570802904178060:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:21.795283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570802904178060:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:21.795381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570802904178060:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:21.795498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570802904178060:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:21.795609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570802904178060:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:21.795706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570802904178060:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:21.795821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570802904178060:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:21.795919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570802904178060:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:21.830501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570802904178056:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:21.830558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570802904178056:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:21.830765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570802904178056:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:21.830883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570802904178056:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:21.831012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570802904178056:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:21.831124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570802904178056:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:21.831250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570802904178056:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:21.831419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570802904178056:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... vent=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:21.880593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:21.880616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:21.880796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:21.880820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:21.880902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:21.880928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:21.880977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:21.880998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:21.881025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:21.881041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:21.881352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:21.881382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:21.881534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:21.881565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:21.881668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:21.881699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:21.881899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:21.881922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:21.882028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:21.882052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SOME(level) FROM `/Root/tableWithNulls` WHERE id > 5 2025-04-07T14:00:23.864340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570811494112944:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:23.864436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:23.864709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570811494112956:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:23.868635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:23.906682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570811494112958:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:25.419728Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570798609210076:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:25.421428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:25.620188Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034425000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SOME(level) FROM `/Root/tableWithNulls` WHERE id > 5 JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (declare %kqp%tx_result_binding_1_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '721) '('"_id" '"b073e19a-4980a487-21a00634-15b37b09") $3)) (let $5 (DqPhyStage '() (lambda '() (block '( (let $27 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $27) '((Nothing $2) $27))))))))))) ))) $4)) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 (KqpPhysicalTx '($5) '($6) '() '('('"type" '"compute")))) (let $8 '"%kqp%tx_result_binding_0_0") (let $9 (TupleType $2 $1)) (let $10 (TupleType (ListType (TupleType $9 $9)))) (let $11 '('('"_logical_id" '779) '('"_id" '"224dc9de-330c0ce9-f4c3d29d-f77eeec5") '('"_wide_channels" (StructType '('_yql_agg_0 $2))))) (let $12 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $30 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"level") '() $29 (lambda '($31) (TKqpOlapAgg $31 '('('_yql_agg_0 'some '"level")) '())))) (return (FromFlow $30)) ))) $11)) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($32) (block '( (let $33 (Bool 'false)) (let $34 (WideCondense1 (ToFlow $32) (lambda '($36) $36) (lambda '($37 $38) $33) (lambda '($39 $40) (Coalesce $40 $39)))) (let $35 (Condense (NarrowMap (Take $34 (Uint64 '1)) (lambda '($41) (AsStruct '('Some0 $41)))) (Nothing (OptionalType (StructType '('Some0 $2)))) (lambda '($42 $43) $33) (lambda '($44 $45) (Just $44)))) (return (FromFlow (Map $35 (lambda '($46) (AsList (AsStruct '('"column0" (Member $46 'Some0)))))))) ))) '('('"_logical_id" '1313) '('"_id" '"81e032c-ef7ff85b-136e8ec-b678fc22")))) (let $15 (DqCnValue (TDqOutput $14 '0))) (let $16 (KqpTxResultBinding $10 '0 '0)) (let $17 '('('"type" '"scan"))) (let $18 (KqpPhysicalTx '($12 $14) '($15) '('($8 $16)) $17)) (let $19 '"%kqp%tx_result_binding_1_0") (let $20 (ListType (StructType '('"column0" $2)))) (let $21 '('('"_logical_id" '1409) '('"_id" '"5906586e-78ca5e3-d44107e8-31312dec") $3)) (let $22 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_1_0)) $21)) (let $23 (DqCnResult (TDqOutput $22 '0) '('"column0"))) (let $24 (KqpTxResultBinding $20 '1 '0)) (let $25 (KqpPhysicalTx '($22) '($23) '('($19 $24)) $17)) (let $26 '($7 $18 $25)) (return (KqpPhysicalQuery $26 '((KqpTxResultBinding $20 '"2" '0)) '('('"type" '"scan_query")))) ) |89.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpIndexLookupJoin::CheckAllKeyTypesCast [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin-NotNull |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer >> TIcNodeCache::GetNodesInfoTest |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction >> KqpOlapBlobsSharing::HugeSchemeHistory >> KqpOlapStats::DescibeTableStore [GOOD] >> KqpOlapAggregations::Filter_NotAllUsedFieldsInResultSet >> SlowTopicAutopartitioning::CDC_Write >> KqpOlapIndexes::IndexesModificationError |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |89.4%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut >> KqpOlapAggregations::Aggregation_SumL_GroupL_OrderL >> KqpOlapSysView::StatsSysViewAggregation >> KqpOlapStats::AddRowsTableStandalone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] Test command err: 2025-04-07T13:58:51.755866Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570416432139742:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:51.755905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:58:51.818434Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490570418886125481:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:52.104960Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T13:58:52.136609Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b4a/r3tmp/tmp4zrHJU/pdisk_1.dat 2025-04-07T13:58:52.182836Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:58:52.580172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:52.580268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:52.583217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:52.583313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:52.616984Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:52.624405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:58:52.626548Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:58:52.627434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9566, node 1 2025-04-07T13:58:52.852346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001b4a/r3tmp/yandexxLwxcI.tmp 2025-04-07T13:58:52.852369Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001b4a/r3tmp/yandexxLwxcI.tmp 2025-04-07T13:58:52.852505Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001b4a/r3tmp/yandexxLwxcI.tmp 2025-04-07T13:58:52.852586Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:58:52.905362Z INFO: TTestServer started on Port 29318 GrpcPort 9566 TClient is connected to server localhost:29318 PQClient connected to localhost:9566 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:58:53.297378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:58:53.431417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:58:56.227377Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570440360962187:2289], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:56.227460Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570440360962171:2285], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:56.227529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:58:56.234851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T13:58:56.268752Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490570440360962210:2290], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T13:58:56.809202Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490570418886125481:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:56.809299Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:58:56.807222Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570416432139742:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:56.866609Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490570437906977375:2312], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:58:56.868413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:58:56.868633Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWRlYjY3ODgtZDY2YzMyYWQtNmY3ZTkwNGUtYTQ4MmVlMDA=, ActorId: [1:7490570437906977326:2305], ActorState: ExecuteState, TraceId: 01jr88r3nb3pp29n6cqgss6bhf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:58:56.896853Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490570440360962251:2294], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:58:56.898518Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2I5NGM1YzAtNTlkMzA2YzctYjQxZWViY2MtZDAyMDgyNmU=, ActorId: [2:7490570440360962168:2283], ActorState: ExecuteState, TraceId: 01jr88r3ep7gy4xtkqstbn7d9d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:58:56.911119Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:58:56.920928Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:58:57.045921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:58:57.210347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:58:57.437336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T13:58:57.860703Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr88r4t795xxaqxk1rjmz4wd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE3MTIwMTAtMWJhMTljZDQtYWRhNWQwOTEtNjc5ZjVmZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490570442201945122:3081] === CheckClustersList. Ok 2025-04-07T13:59:02.941121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:1, at schemeshard: 72057594046644480 2025-04-07T13:59:05.669069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-07T13:59:06.365395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:59:07.603213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:59:07.603261Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:07.749353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-04-07T13:59:09.01944 ... :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=NmY0ODkwMjgtZWMzMzkzMDYtMjhhNmMzNTAtNDJiMjMwM2M=, ActorId: [10:7490570755833210872:2282], ActorState: ExecuteState, TraceId: 01jr88tbwz0grjsqbbwddst7bp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:00:10.523064Z node 10 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:00:10.625390Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7490570755459198172:2326], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:00:10.626513Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=YmZkNmQ3ZGItNTU1ZDYzNmYtZmM3MDkzMDgtZDIyODhjMmU=, ActorId: [9:7490570755459197964:2308], ActorState: ExecuteState, TraceId: 01jr88tbv7avmz9sztjbbjr8c9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:00:10.627003Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:00:10.627182Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:00:10.990714Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:00:11.565001Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr88tcne4rdkr97b0ejs1bfx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=MzU0ZjFmM2UtNTIyNzVkNGYtOGZkNDNjN2MtNmNhNGRmZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7490570759754165783:3088] === CheckClustersList. Ok 2025-04-07T14:00:17.466560Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:1, at schemeshard: 72057594046644480 2025-04-07T14:00:18.495412Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-07T14:00:20.105548Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-07T14:00:20.591512Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:00:20.591536Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:21.364412Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-07T14:00:22.670416Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-07T14:00:24.195973Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715706:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (16261273835729377752, "Root", "00415F536F757263655F3130", 1744034425731, 1744034425731, 0, 13); 2025-04-07T14:00:25.911184Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715712. Ctx: { TraceId: 01jr88ttxnbm8qsg0jq4v93gx9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NTBkNGRlNTQtOWNhYmI1YzItNjZjNTJkZjMtMTU3NWM5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:25.944787Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-07T14:00:25.944818Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T14:00:25.944835Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-07T14:00:25.944869Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490570819883709629:4081] (SourceId=A_Source_10, PreferedPartition=1) GetOwnershipFast Partition=1 TabletId=1001 2025-04-07T14:00:25.944995Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7490570819883709630:4081], Recipient [9:7490570785523969939:3293]: NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:00:25.945100Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7490570819883709629:4081], Recipient [9:7490570785523969939:3293]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_10" 2025-04-07T14:00:25.945203Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [9:7490570785523969939:3293], Recipient [9:7490570819883709629:4081]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-07T14:00:25.945238Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7490570819883709629:4081] (SourceId=A_Source_10, PreferedPartition=1) InitTable: SourceId=A_Source_10 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-04-07T14:00:25.945317Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7490570819883709629:4081], Recipient [9:7490570785523969939:3293]: NActors::TEvents::TEvPoison 2025-04-07T14:00:25.945851Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7490570733984360350:2049], Recipient [9:7490570819883709629:4081]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-04-07T14:00:25.945882Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490570819883709629:4081] (SourceId=A_Source_10, PreferedPartition=1) StartKqpSession 2025-04-07T14:00:25.949247Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7490570733984360568:2256], Recipient [9:7490570819883709629:4081]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=ZWE3YTcwZGYtOTU0MzQ4NWEtOWE3MWJmYjEtYzBkNmJjNjA=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2025-04-07T14:00:25.949281Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490570819883709629:4081] (SourceId=A_Source_10, PreferedPartition=1) Select from the table Received TEvChooseError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. 2025-04-07T14:00:26.287880Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7490570733984360568:2256], Recipient [9:7490570819883709629:4081]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=ZWE3YTcwZGYtOTU0MzQ4NWEtOWE3MWJmYjEtYzBkNmJjNjA=" PreparedQuery: "bb35654e-82022a01-3695f9b9-df90b85f" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jr88tvbaa4rna356r726phfx" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1744034425731 } items { uint64_value: 1744034425731 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 152 Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 16261273835729377752 AND Topic = "Root" AND ProducerId = "00415F536F757263655F3130" 2025-04-07T14:00:26.288134Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7490570819883709629:4081] (SourceId=A_Source_10, PreferedPartition=1) Selected from table PartitionId=0 SeqNo=13 2025-04-07T14:00:26.288163Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7490570819883709629:4081] (SourceId=A_Source_10, PreferedPartition=1) OnPartitionChosen 2025-04-07T14:00:26.288221Z node 9 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [9:7490570819883709629:4081] (SourceId=A_Source_10, PreferedPartition=1) ReplyError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. 2025-04-07T14:00:26.757801Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715716. Ctx: { TraceId: 01jr88tvj3d0g82drn3jqtdpn3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NGNjNzU2NTgtNzY4ZjRhMDQtM2NkZmI4NTUtY2IzYjZhZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapStats::DescibeTableStore [GOOD] Test command err: Trying to start YDB, gRPC: 1787, MsgBus: 27146 2025-04-07T14:00:14.767493Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570773234305590:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:14.767541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fd4/r3tmp/tmpTiLDfV/pdisk_1.dat 2025-04-07T14:00:15.278826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:15.278919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:15.280383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1787, node 1 2025-04-07T14:00:15.368067Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:15.449965Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:00:15.492932Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:00:15.521541Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:15.521569Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:15.521578Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:15.521656Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27146 TClient is connected to server localhost:27146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:16.379247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:16.401993Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLESTORE `/Root/TableStoreTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:00:18.921377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570790414175215:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:18.935672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:18.947048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:00:19.042304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570790414175286:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:19.042472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570790414175286:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:19.042698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570790414175286:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:19.042809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570790414175286:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:19.042895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570790414175286:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:19.042979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570790414175286:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:19.043062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570790414175286:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:19.043159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570790414175286:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:19.043250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570790414175286:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:19.043339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570790414175286:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:19.043433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570790414175286:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:19.043546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570790414175286:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:19.054767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:19.054818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:19.054905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:19.054931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:19.055078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:19.055104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:19.055177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:19.055204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:19.055253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:19.055275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:19.055306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:19.055326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:19.055815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:19.055860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:19.056010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:19.056036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:19.056146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:19.056165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:19.056301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:19.056322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:19.056416Z node 1 :TX_COLUMNSHARD WARN: tabl ... umnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=25392;columns=3; 2025-04-07T14:00:19.769338Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570773234305590:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:19.769540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> BasicUsage::TWriteSession_WriteEncoded [GOOD] Test command err: 2025-04-07T13:56:23.917735Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569783306135156:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:23.917900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:56:24.157332Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001661/r3tmp/tmpLSXqnI/pdisk_1.dat 2025-04-07T13:56:24.535946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:56:24.536046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:56:24.536346Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:56:24.542801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19747, node 1 2025-04-07T13:56:24.677395Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001661/r3tmp/yandexUogIx4.tmp 2025-04-07T13:56:24.677418Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001661/r3tmp/yandexUogIx4.tmp 2025-04-07T13:56:24.677589Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001661/r3tmp/yandexUogIx4.tmp 2025-04-07T13:56:24.677693Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:56:24.725786Z INFO: TTestServer started on Port 12803 GrpcPort 19747 TClient is connected to server localhost:12803 PQClient connected to localhost:19747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:56:25.047854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:25.064502Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:56:25.076466Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T13:56:25.082754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T13:56:27.159195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569800486004957:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:27.159411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:27.159622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569800486004993:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:56:27.163293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T13:56:27.175830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569800486004995:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T13:56:27.416003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:56:27.432156Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490569800486005067:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T13:56:27.433795Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGY1ZjBjYmEtYzJlOGI1NTktYjIxOGE3NTAtZTQwYTFjNWE=, ActorId: [1:7490569800486004954:2307], ActorState: ExecuteState, TraceId: 01jr88khwffzjjbxwc4jcm7tnv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T13:56:27.435961Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T13:56:27.449060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:56:27.539882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490569800486005344:2620] 2025-04-07T13:56:28.911962Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569783306135156:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:56:28.912067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T13:56:33.879287Z :ConnectToYDB INFO: TTopicSdkTestSetup started 2025-04-07T13:56:33.895955Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-07T13:56:33.913059Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7490569826255809449:2811] connected; active server actors: 1 2025-04-07T13:56:33.913505Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-07T13:56:33.914065Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-07T13:56:33.914209Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-07T13:56:33.914686Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-07T13:56:33.917942Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:56:33.918174Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-07T13:56:33.918893Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:56:33.919076Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-07T13:56:33.919109Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T13:56:33.919126Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-07T13:56:33.919151Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:56:33.919193Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:56:33.919237Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2025-04-07T13:56:33.919252Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-07T13:56:33.926509Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-07T13:56:33.926581Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:33.926622Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569826255809470:2436], now have 1 active actors on pipe 2025-04-07T13:56:33.954903Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:56:33.954955Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7490569826255809448:2810], now have 1 active actors on pipe 2025-04-07T13:56:33.954981Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T13:56:33.969408Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7490569787601102741:2205] txId 281474976710673 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: ... pic in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2025-04-07T14:00:24.241605Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 partition ready for read: partition# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1744034419116, sizeLag# 548 2025-04-07T14:00:24.241635Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1TEvPartitionReady. Aval parts: 1 2025-04-07T14:00:24.241704Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 performing read request: guid# a50dad5d-f029692f-dbd456bb-1899b493, from# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), count# 4, size# 657, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-07T14:00:24.241839Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 READ FROM TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1)maxCount 4 maxSize 657 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid a50dad5d-f029692f-dbd456bb-1899b493 2025-04-07T14:00:24.248920Z node 13 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-07T14:00:24.248991Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-07T14:00:24.249102Z node 13 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T14:00:24.249207Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 1 Topic 'test-topic' partition 0 user test-consumer offset 0 count 4 size 657 endOffset 4 max time lag 0ms effective offset 0 2025-04-07T14:00:24.249254Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T14:00:24.249355Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-04-07T14:00:24.249387Z node 13 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T14:00:24.249573Z node 13 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:00:24.256863Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 98 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1744034419116 CreateTimestampMS: 1744034419080 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 91 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1744034419151 CreateTimestampMS: 1744034419080 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 98 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1744034419151 CreateTimestampMS: 1744034419080 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 71 bytes ..." SourceId: "" SeqNo: 4 WriteTimestampMS: 1744034419155 CreateTimestampMS: 1744034419087 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 58 RealReadOffset: 3 WaitQuotaTimeMs: 0 } Cookie: 0 } 2025-04-07T14:00:24.257193Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-04-07T14:00:24.257268Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 after read state TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid a50dad5d-f029692f-dbd456bb-1899b493 has messages 1 2025-04-07T14:00:24.261225Z :DEBUG: [/Root] [/Root] [9d31cfc2-cbd197e9-841f0e51-5ac892bf] [] Got ReadResponse, serverBytesSize = 496, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428304 2025-04-07T14:00:24.261410Z :DEBUG: [/Root] [/Root] [9d31cfc2-cbd197e9-841f0e51-5ac892bf] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428304 2025-04-07T14:00:24.257424Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 read done: guid# a50dad5d-f029692f-dbd456bb-1899b493, partition# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), size# 496 2025-04-07T14:00:24.257465Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 response to read: guid# a50dad5d-f029692f-dbd456bb-1899b493 2025-04-07T14:00:24.257795Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 Process answer. Aval parts: 0 2025-04-07T14:00:24.265067Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (0-3) 2025-04-07T14:00:24.265183Z :DEBUG: [/Root] [/Root] [9d31cfc2-cbd197e9-841f0e51-5ac892bf] [] Returning serverBytesSize = 496 to budget 2025-04-07T14:00:24.265248Z :DEBUG: [/Root] [/Root] [9d31cfc2-cbd197e9-841f0e51-5ac892bf] [] In ContinueReadingDataImpl, ReadSizeBudget = 496, ReadSizeServerDelta = 52428304 2025-04-07T14:00:24.265665Z :DEBUG: [/Root] [/Root] [9d31cfc2-cbd197e9-841f0e51-5ac892bf] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-07T14:00:24.268818Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-07T14:00:24.268904Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-07T14:00:24.268955Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 1} (2-2) 2025-04-07T14:00:24.268987Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 0} (3-3) 2025-04-07T14:00:24.269062Z :DEBUG: [/Root] [/Root] [9d31cfc2-cbd197e9-841f0e51-5ac892bf] [] The application data is transferred to the client. Number of messages 4, size 14 bytes 2025-04-07T14:00:24.269132Z :DEBUG: [/Root] [/Root] [9d31cfc2-cbd197e9-841f0e51-5ac892bf] [] Returning serverBytesSize = 0 to budget 2025-04-07T14:00:24.269370Z :INFO: [/Root] [/Root] [9d31cfc2-cbd197e9-841f0e51-5ac892bf] Closing read session. Close timeout: 0.000000s 2025-04-07T14:00:24.269428Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:0 2025-04-07T14:00:24.269496Z :INFO: [/Root] [/Root] [9d31cfc2-cbd197e9-841f0e51-5ac892bf] Counters: { Errors: 0 CurrentSessionLifetimeMs: 88 BytesRead: 14 MessagesRead: 4 BytesReadCompressed: 74 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:00:24.269654Z :NOTICE: [/Root] [/Root] [9d31cfc2-cbd197e9-841f0e51-5ac892bf] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T14:00:24.269720Z :DEBUG: [/Root] [/Root] [9d31cfc2-cbd197e9-841f0e51-5ac892bf] [] Abort session to cluster 2025-04-07T14:00:24.268994Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 grpc read done: success# 1, data# { read_request { bytes_size: 496 } } 2025-04-07T14:00:24.269230Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 got read request: guid# 3c146e5c-2a6f041d-56f1ac52-1e9ed5c2 2025-04-07T14:00:24.270372Z :NOTICE: [/Root] [/Root] [9d31cfc2-cbd197e9-841f0e51-5ac892bf] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:00:24.270648Z :INFO: [/Root] TraceId [] SessionId [ca2a1690-78ea6c98-df935359-aec70526|beea2ffb-7c06df42-f5dbcbdd-5c97a304_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-07T14:00:24.270691Z :INFO: [/Root] TraceId [] SessionId [ca2a1690-78ea6c98-df935359-aec70526|beea2ffb-7c06df42-f5dbcbdd-5c97a304_0] PartitionId [0] Generation [1] Write session will now close 2025-04-07T14:00:24.270742Z :DEBUG: [/Root] TraceId [] SessionId [ca2a1690-78ea6c98-df935359-aec70526|beea2ffb-7c06df42-f5dbcbdd-5c97a304_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-07T14:00:24.271264Z :INFO: [/Root] TraceId [] SessionId [ca2a1690-78ea6c98-df935359-aec70526|beea2ffb-7c06df42-f5dbcbdd-5c97a304_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T14:00:24.271324Z :DEBUG: [/Root] TraceId [] SessionId [ca2a1690-78ea6c98-df935359-aec70526|beea2ffb-7c06df42-f5dbcbdd-5c97a304_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-07T14:00:24.287113Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 grpc read done: success# 0, data# { } 2025-04-07T14:00:24.287150Z node 13 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 grpc read failed 2025-04-07T14:00:24.287188Z node 13 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 grpc closed 2025-04-07T14:00:24.287226Z node 13 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_13_1_18415891683887684919_v1 is DEAD 2025-04-07T14:00:24.288224Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: ca2a1690-78ea6c98-df935359-aec70526|beea2ffb-7c06df42-f5dbcbdd-5c97a304_0 grpc read done: success: 0 data: 2025-04-07T14:00:24.288251Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: ca2a1690-78ea6c98-df935359-aec70526|beea2ffb-7c06df42-f5dbcbdd-5c97a304_0 grpc read failed 2025-04-07T14:00:24.288278Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: ca2a1690-78ea6c98-df935359-aec70526|beea2ffb-7c06df42-f5dbcbdd-5c97a304_0 grpc closed 2025-04-07T14:00:24.288301Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: ca2a1690-78ea6c98-df935359-aec70526|beea2ffb-7c06df42-f5dbcbdd-5c97a304_0 is DEAD 2025-04-07T14:00:24.289322Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:00:24.289748Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [13:7490570817299804999:2506] disconnected; active server actors: 1 2025-04-07T14:00:24.289781Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [13:7490570817299804999:2506] client test-consumer disconnected session test-consumer_13_1_18415891683887684919_v1 2025-04-07T14:00:24.289914Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:00:24.289958Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [13:7490570791530000986:2448] destroyed 2025-04-07T14:00:24.289979Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:00:24.290005Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_13_1_18415891683887684919_v1 2025-04-07T14:00:24.290029Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [13:7490570817299805002:2509] destroyed 2025-04-07T14:00:24.290083Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T14:00:24.290187Z node 13 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_13_1_18415891683887684919_v1 >> KqpJoinOrder::TPCDS92-StreamLookupJoin+ColumnStore [GOOD] >> KqpOlapAggregations::Aggregation_Avg_Null >> KqpOlapSysView::StatsSysViewBytesPackActualization [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart [GOOD] >> KqpOlapAggregations::Aggregation_Sum_NullGroupBy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewBytesPackActualization [GOOD] Test command err: Trying to start YDB, gRPC: 2238, MsgBus: 28556 2025-04-07T13:59:58.965166Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570706761518952:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:58.965413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fe2/r3tmp/tmpC7WjfM/pdisk_1.dat 2025-04-07T13:59:59.339285Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2238, node 1 2025-04-07T13:59:59.397557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:59.399731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:59.405723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:59.438658Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:59.438679Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:59.438686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:59.438785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28556 TClient is connected to server localhost:28556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:00.045222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:00.061743Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:00.071966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:00.180117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570715351454003:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:00.180284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570715351454003:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:00.180532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570715351454003:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:00.180638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570715351454003:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:00.180911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570715351454003:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:00.181040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570715351454003:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:00.181156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570715351454003:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:00.181282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570715351454003:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:00.181412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570715351454003:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:00.181529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570715351454003:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:00.181632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570715351454003:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:00.181768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570715351454003:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:00.274886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570715351454004:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:00.274953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570715351454004:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:00.275162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570715351454004:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:00.275328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570715351454004:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:00.275414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570715351454004:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:00.275534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570715351454004:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:00.275632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570715351454004:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:00.275722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570715351454004:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:00.275858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570715351454004:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:00.275944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570715351454004:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:00.276029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570715351454004:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:00.276130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570715351454004:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:00.311082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570715351454005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:00.311140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570715351454005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:00.311353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570715351454005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:00.311467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570715351454005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:00.311600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570715351454005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:00.311704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570715351454005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:00.311778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570715351454005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:00.311867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570715351454005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... e=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:00.353665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:00.353686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:00.353812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:00.353836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:00.408453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6400144;columns=1; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-04-07T14:00:03.964231Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570706761518952:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:03.964296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('pk_int') RESULT: 2025-04-07T14:00:11.263474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570762596094725:2449], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:11.263588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:11.264304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570762596094737:2452], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:11.268415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:00:11.292946Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T14:00:11.293241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570762596094739:2453], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:00:12.616053Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034412232, txId: 281474976710662] shutting down Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:1072784:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:1070008:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:1067792:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Wait changes: 3210584/6400000 2025-04-07T14:00:14.333146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:00:14.333197Z node 1 :IMPORT WARN: Table profiles were not loaded ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('pk_int') RESULT: Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 2025-04-07T14:00:18.046853Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034417900, txId: 281474976710664] shutting down 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:1067792:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:1070008:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:1072784:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 3210584/6400000 2025-04-07T14:00:18.099104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:00:18.164699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting actualization: 3/0.000013s ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('pk_int') RESULT: Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:276352:0] EntityType: COL BlobRangeSize: 276352 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:276904:0] EntityType: COL BlobRangeSize: 276904 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:277656:0] EntityType: COL BlobRangeSize: 277656 PathId: 3 2025-04-07T14:00:19.785794Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034419603, txId: 281474976710668] shutting down Wait changes: 830912/6400000 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('pk_int') RESULT: Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:276904:0] EntityType: COL BlobRangeSize: 276904 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:277656:0] EntityType: COL BlobRangeSize: 277656 PathId: 3 Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:276352:0] EntityType: COL BlobRangeSize: 276352 PathId: 3 830912/6400000 2025-04-07T14:00:25.501337Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034425262, txId: 281474976710670] shutting down 2025-04-07T14:00:25.546053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:00:25.608832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710673:0, at schemeshard: 72057594046644480 waiting actualization: 3/0.000015s ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('pk_int') RESULT: Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:3:5:0:1067792:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: 2025-04-07T14:00:27.262215Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034427127, txId: 281474976710674] shutting down [72075186224037888:1:3:5:0:1070008:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:3:5:0:1072784:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Wait changes: 3210584/6400000 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('pk_int') RESULT: Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:3:5:0:1070008:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:3:5:0:1072784:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:3:5:0:1067792:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 2025-04-07T14:00:33.107653Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034432614, txId: 281474976710676] shutting down 3210584/6400000 >> TPQTest::TestReadSessions [GOOD] >> TPQTest::TestReadSubscription |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpOlapAggregations::Filter_NotAllUsedFieldsInResultSet [GOOD] >> KqpOlapIndexes::IndexesModificationError [GOOD] |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |89.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS92-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 29471, MsgBus: 3728 2025-04-07T13:57:15.145635Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570006589497460:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:15.145693Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001066/r3tmp/tmpr3MMrN/pdisk_1.dat 2025-04-07T13:57:15.880192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:15.880286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:15.887940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:15.919192Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29471, node 1 2025-04-07T13:57:16.091592Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:16.091615Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:16.091635Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:16.091726Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3728 TClient is connected to server localhost:3728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:17.085434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:17.142320Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:17.159091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:17.333489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:17.617388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:17.714601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:19.842912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570023769368355:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:19.843053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:20.098912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.150217Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570006589497460:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:20.150556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:20.156087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.235552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.311010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.412196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.468263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.562230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570028064336157:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:20.562304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:20.562674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570028064336162:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:20.566755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:20.591851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570028064336164:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:23.313428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:23.352364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:23.404603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:23.453907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:23.583124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:23.872603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:57:23.915351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:57:23.967711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:57:24.011203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:57:24.065171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:57:24.126942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:57:24.181227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:57:24.241865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:57:25.375600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:57:25.451718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:57:25.518521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:57:25.595896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:57:25.648785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:57:25.715134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: E ... te;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:14.375331Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:14.375365Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:14.375484Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:14.375507Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:14.379898Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:14.379964Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:14.380101Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:14.380136Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:14.380331Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:14.380366Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:14.380465Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:14.380504Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:14.380576Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:14.380606Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:14.380652Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:14.380709Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:14.381113Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:14.381160Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:14.381379Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:14.381413Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:14.381575Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:14.381608Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:14.381802Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:14.381835Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:14.381945Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:14.381975Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:14.389448Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:14.389523Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:14.389633Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:14.389670Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:14.389875Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:14.389910Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:14.390040Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:14.390080Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:14.390155Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:14.390189Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:14.390234Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:14.390270Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:14.390621Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:14.390664Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:14.390882Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:14.390922Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:14.391084Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:14.391119Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:14.391341Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:14.391380Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:14.391504Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:14.391533Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:27.113025Z node 5 :KQP_SLOW_LOG WARN: TraceId: "01jr88tg423whqn2tnpzvt1f18", SessionId: ydb://session/3?node_id=5&id=ZjNkNWE2YzAtODA0ZDVjMTQtMTczYTljMTgtODM0NWNkOTg=, Slow query, duration: 12.390087s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n$bla = (\n SELECT\n web_sales.ws_item_sk bla_item_sk,\n avg(ws_ext_discount_amt) bla_ext_discount_amt\n FROM\n web_sales\n cross join date_dim\n WHERE\n cast(d_date as date) between cast('2001-03-12' as date) and\n (cast('2001-03-12' as date) + DateTime::IntervalFromDays(90))\n and d_date_sk = ws_sold_date_sk\n group by web_sales.ws_item_sk\n );\n\n-- start query 1 in stream 0 using template query92.tpl and seed 2031708268\nselect\n sum(ws_ext_discount_amt) as `Excess Discount Amount`\nfrom\n web_sales\n cross join item\n cross join date_dim\n join $bla bla on (item.i_item_sk = bla.bla_item_sk)\nwhere\ni_manufact_id = 356\nand i_item_sk = ws_item_sk\nand cast(d_date as date) between cast('2001-03-12' as date) and\n (cast('2001-03-12' as date) + DateTime::IntervalFromDays(90))\nand d_date_sk = ws_sold_date_sk\nand ws_ext_discount_amt\n > 1.3 * bla.bla_ext_discount_amt\norder by `Excess Discount Amount`\nlimit 100;\n", parameters: 0b >> KqpOlapStats::AddRowsTableStandalone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::IndexesModificationError [GOOD] Test command err: Trying to start YDB, gRPC: 18673, MsgBus: 21873 2025-04-07T14:00:31.163738Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570847475887816:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:31.163806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fc3/r3tmp/tmpIKRhob/pdisk_1.dat 2025-04-07T14:00:32.204344Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:32.263288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:32.263375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:32.264228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:32.274003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18673, node 1 2025-04-07T14:00:32.425407Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:32.425439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:32.425448Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:32.425569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21873 TClient is connected to server localhost:21873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:33.797577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:33.845550Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:33.866956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:34.055373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570856065822879:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:34.055584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570856065822879:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:34.055876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570856065822879:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:34.055987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570856065822879:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:34.056131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570856065822879:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:34.056284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570856065822879:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:34.056403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570856065822879:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:34.056506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570856065822879:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:34.066596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570856065822879:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:34.066740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570856065822879:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:34.066845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570856065822879:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:34.066940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570856065822879:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:34.179664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570856065822880:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:34.179733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570856065822880:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:34.179971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570856065822880:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:34.180068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570856065822880:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:34.180152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570856065822880:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:34.180268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570856065822880:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:34.180369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570856065822880:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:34.180459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570856065822880:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:34.180552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570856065822880:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:34.181029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570856065822880:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:34.181212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570856065822880:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:34.181330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570856065822880:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:34.248107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570856065822878:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:34.248192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570856065822878:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:34.248439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570856065822878:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:34.253706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570856065822878:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:34.253906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570856065822878:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:34.254005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570856065822878:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:34.254116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570856065822878:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04- ... iption=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:34.310720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:34.310749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:34.310775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:34.311057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:34.311101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:34.311280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:34.311302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:34.311497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:34.311522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:34.311763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:34.311787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:34.311905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:34.311926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:34.313445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:34.313478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:34.313575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:34.313636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:34.313826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:34.313847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:34.313910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:34.313930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:34.313967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:34.313980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:34.313998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:34.314010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:34.314233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:34.314266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:34.314448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:34.314462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:34.314568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:34.314602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:34.314705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:34.314717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:34.314781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:34.314793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:34.382514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-07T14:00:36.168805Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570847475887816:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:36.168899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:36.769534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570868950725074:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:36.779769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:36.789230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:00:36.921923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570868950725129:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:36.922015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:36.975971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570868950725145:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:36.976071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.016930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570873245692450:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.016995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.024409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:00:37.097095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570873245692497:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.097179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.099917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710663:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::UpsertDependenciesShardsRestart [GOOD] Test command err: 2025-04-07T13:58:01.600111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:58:01.605156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:58:01.605469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a7f/r3tmp/tmpybuRMP/pdisk_1.dat 2025-04-07T13:58:03.638035Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.225002s 2025-04-07T13:58:03.648831Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.235780s 2025-04-07T13:58:03.766103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:58:04.030892Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:04.152140Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T13:58:04.153023Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T13:58:04.162974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:04.163120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:04.193740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:58:04.579293Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T13:58:04.579380Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T13:58:04.588252Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:611:2520] 2025-04-07T13:58:04.759332Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T13:58:04.796430Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T13:58:04.796543Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T13:58:04.796919Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T13:58:04.797119Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T13:58:04.797224Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T13:58:04.841251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T13:58:04.841770Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T13:58:04.842312Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T13:58:04.842385Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T13:58:04.989884Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:627:2535], Recipient [1:636:2541]: NKikimr::TEvTablet::TEvBoot 2025-04-07T13:58:04.990802Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:627:2535], Recipient [1:636:2541]: NKikimr::TEvTablet::TEvRestored 2025-04-07T13:58:05.001066Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:636:2541] 2025-04-07T13:58:05.001393Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:05.081803Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:627:2535], Recipient [1:636:2541]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T13:58:05.120971Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:05.121111Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T13:58:05.143513Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T13:58:05.152838Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T13:58:05.164787Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T13:58:05.196870Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T13:58:05.247337Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T13:58:05.280838Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T13:58:05.281056Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:652:2550] 2025-04-07T13:58:05.281103Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T13:58:05.281135Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T13:58:05.281169Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T13:58:05.281620Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:636:2541], Recipient [1:636:2541]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T13:58:05.281679Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T13:58:05.281895Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T13:58:05.281991Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T13:58:05.282063Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:641:2543], Recipient [1:636:2541]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:05.282098Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T13:58:05.282139Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:632:2538], serverId# [1:641:2543], sessionId# [0:0:0] 2025-04-07T13:58:05.282495Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T13:58:05.282531Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T13:58:05.282568Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T13:58:05.282598Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T13:58:05.282627Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T13:58:05.282658Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T13:58:05.282700Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T13:58:05.282775Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:641:2543] 2025-04-07T13:58:05.282832Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T13:58:05.287569Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T13:58:05.296879Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T13:58:05.304894Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T13:58:05.308963Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T13:58:05.309059Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T13:58:05.309100Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T13:58:05.309135Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T13:58:05.309172Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T13:58:05.311192Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T13:58:05.311266Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T13:58:05.311374Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T13:58:05.311414Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T13:58:05.311469Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T13:58:05.311497Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T13:58:05.311538Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T13:58:05.311574Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T13:58:05.311639Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T13:58:05.314933Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:653:2551], Recipient [1:636:2541]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T13:58:05.314990Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T13:58:05.329369Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T13:58:05.329461Z node 1 :TX_DATASHARD TRACE: Complete execution for [ ... count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:00:34.135568Z node 17 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-07T14:00:34.135729Z node 17 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [17:1094:2868] 2025-04-07T14:00:34.135810Z node 17 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-07T14:00:34.135922Z node 17 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-04-07T14:00:34.135998Z node 17 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T14:00:34.136516Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [17:1004:2805], Recipient [17:980:2793]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037890 OperationCookie: 281474976715664 2025-04-07T14:00:34.136603Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037890 for split OpId 281474976715664 2025-04-07T14:00:34.137132Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [17:1004:2805], Recipient [17:1004:2805]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:34.137192Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:34.137363Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037890] received poison pill [17:1088:2862] 2025-04-07T14:00:34.137477Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037890] notify reset [17:1088:2862] 2025-04-07T14:00:34.137774Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [17:1088:2862], Recipient [17:980:2793]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:00:34.137829Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:00:34.138888Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [17:726:2071] 2025-04-07T14:00:34.138940Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [17:726:2071] 2025-04-07T14:00:34.139082Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [17:24:2071], Recipient [17:1004:2805]: {TEvRegisterTabletResult TabletId# 72075186224037890 Entry# 3000} 2025-04-07T14:00:34.139119Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-07T14:00:34.139165Z node 17 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 3000 2025-04-07T14:00:34.139205Z node 17 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T14:00:34.139255Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046316545] send [17:731:2071] 2025-04-07T14:00:34.139294Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046316545] push event to server [17:731:2071] 2025-04-07T14:00:34.139662Z node 17 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T14:00:34.139723Z node 17 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:34.139771Z node 17 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-07T14:00:34.139809Z node 17 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-07T14:00:34.139844Z node 17 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-07T14:00:34.139872Z node 17 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-07T14:00:34.139915Z node 17 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:00:34.139991Z node 17 :TX_DATASHARD DEBUG: 72075186224037891 ack snapshot OpId 281474976715664 2025-04-07T14:00:34.140047Z node 17 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2025-04-07T14:00:34.140140Z node 17 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:00:34.140233Z node 17 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-07T14:00:34.140314Z node 17 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [17:1095:2869] 2025-04-07T14:00:34.140346Z node 17 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-04-07T14:00:34.140388Z node 17 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2025-04-07T14:00:34.140422Z node 17 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-07T14:00:34.140859Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [17:1010:2808], Recipient [17:980:2793]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037891 OperationCookie: 281474976715664 2025-04-07T14:00:34.140918Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715664 2025-04-07T14:00:34.141265Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [17:1090:2864], Recipient [17:1004:2805]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:00:34.141332Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:00:34.141445Z node 17 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [17:1088:2862], serverId# [17:1090:2864], sessionId# [0:0:0] 2025-04-07T14:00:34.141565Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [17:1010:2808], Recipient [17:1010:2808]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:34.141599Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:00:34.141760Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [17:726:2071] 2025-04-07T14:00:34.141794Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [17:726:2071] 2025-04-07T14:00:34.141846Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037891] received poison pill [17:1089:2863] 2025-04-07T14:00:34.141893Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037891] notify reset [17:1089:2863] 2025-04-07T14:00:34.142172Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [17:1089:2863], Recipient [17:980:2793]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:00:34.142211Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:00:34.142276Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [17:24:2071], Recipient [17:1010:2808]: {TEvRegisterTabletResult TabletId# 72075186224037891 Entry# 3000} 2025-04-07T14:00:34.142310Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-07T14:00:34.142341Z node 17 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 3000 2025-04-07T14:00:34.142376Z node 17 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-07T14:00:34.142637Z node 17 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-07T14:00:34.142681Z node 17 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:34.142717Z node 17 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037891 2025-04-07T14:00:34.142774Z node 17 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037891 has no attached operations 2025-04-07T14:00:34.142805Z node 17 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037891 2025-04-07T14:00:34.142836Z node 17 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-07T14:00:34.142879Z node 17 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-07T14:00:34.142930Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046316545] send [17:731:2071] 2025-04-07T14:00:34.142961Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046316545] push event to server [17:731:2071] 2025-04-07T14:00:34.143240Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [17:1091:2865], Recipient [17:1010:2808]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:00:34.143278Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:00:34.143319Z node 17 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [17:1089:2863], serverId# [17:1091:2865], sessionId# [0:0:0] 2025-04-07T14:00:34.143642Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [17:24:2071], Recipient [17:1004:2805]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2025-04-07T14:00:34.143681Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-07T14:00:34.143715Z node 17 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 3000 2025-04-07T14:00:34.143756Z node 17 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037890: waitStep# 3000 readStep# 3000 observedStep# 3000 2025-04-07T14:00:34.143812Z node 17 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037890 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-04-07T14:00:34.144065Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [17:24:2071], Recipient [17:1010:2808]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2025-04-07T14:00:34.144099Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-07T14:00:34.144130Z node 17 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 0 next step 3000 2025-04-07T14:00:34.144162Z node 17 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037891: waitStep# 3000 readStep# 3000 observedStep# 3000 2025-04-07T14:00:34.144217Z node 17 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037891 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-04-07T14:00:34.157523Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715664 2025-04-07T14:00:34.171653Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037889] send [17:1062:2842] 2025-04-07T14:00:34.171724Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037889] push event to server [17:1062:2842] 2025-04-07T14:00:34.172941Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [17:381:2376], Recipient [17:1064:2843] 2025-04-07T14:00:34.173040Z node 17 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-04-07T14:00:34.175951Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715664 2025-04-07T14:00:34.176119Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 2 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-07T14:00:34.177319Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [17:967:2785], Recipient [17:980:2793]: NKikimr::TEvTablet::TEvFollowerGcApplied ... split finished >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Filter_NotAllUsedFieldsInResultSet [GOOD] Test command err: Trying to start YDB, gRPC: 18144, MsgBus: 61361 2025-04-07T14:00:30.807741Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570840493013084:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:30.807876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fc0/r3tmp/tmpiaFePB/pdisk_1.dat 2025-04-07T14:00:31.481393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:31.481486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:31.502554Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:31.507432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18144, node 1 2025-04-07T14:00:31.760785Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:31.760808Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:31.760815Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:31.761391Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61361 TClient is connected to server localhost:61361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:32.499860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:32.539144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:32.749529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570849082948213:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:32.749760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570849082948213:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:32.750059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570849082948213:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:32.750179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570849082948213:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:32.750434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570849082948213:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:32.750533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570849082948213:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:32.750778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570849082948213:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:32.750913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570849082948213:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:32.751049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570849082948213:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:32.751142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570849082948213:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:32.751240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570849082948213:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:32.751347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570849082948213:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:32.822705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570849082948214:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:32.822920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570849082948214:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:32.823152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570849082948214:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:32.823277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570849082948214:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:32.823411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570849082948214:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:32.823510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570849082948214:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:32.823600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570849082948214:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:32.823698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570849082948214:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:32.823791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570849082948214:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:32.824274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570849082948214:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:32.824408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570849082948214:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:32.824492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570849082948214:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:32.856395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570849082948212:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:32.856465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570849082948212:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:32.856791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570849082948212:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:32.856919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570849082948212:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:32.857105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570849082948212:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:32.857211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570849082948212:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:32.857321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570849082948212:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:32.857431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570849082948212:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:32.857572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:74905708490829482 ... RD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:32.931786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:32.931810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:32.931913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:32.931932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:32.932820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:32.932851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:32.932925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:32.932950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:32.933224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:32.933252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:32.933329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:32.933366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:32.933432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:32.933470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:32.933534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:32.933558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:32.933985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:32.934029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:32.934209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:32.934231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:32.934357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:32.934378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:32.934540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:32.934560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:32.934666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:32.934687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; 2025-04-07T14:00:35.361241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570861967850380:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:35.361361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:35.362012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570861967850415:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:35.366713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:35.388884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570861967850417:2403], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:35.796789Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570840493013084:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:35.804647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:36.647577Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034436000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"level == 5","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level","resource_id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 5","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 '('"id" '"resource_id")) (let $2 (DqPhyStage '() (lambda '() (block '( (let $6 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $7 '('"id" '"level" '"resource_id")) (let $8 (KqpWideReadOlapTableRanges $6 (Void) $7 '() '() (lambda '($9) (block '( (let $10 '('eq '"level" (Int32 '"5"))) (let $11 '('?? $10 (Bool 'false))) (return (TKqpOlapExtractMembers (KqpOlapFilter $9 $11) $1)) ))))) (return (FromFlow (NarrowMap $8 (lambda '($12 $13) (AsStruct '('"id" $12) '('"resource_id" $13)))))) ))) '('('"_logical_id" '564) '('"_id" '"b2a18c0-7945736d-a09c33b-7ad3f9c7")))) (let $3 (DqCnUnionAll (TDqOutput $2 '"0"))) (let $4 (DqPhyStage '($3) (lambda '($14) $14) '('('"_logical_id" '743) '('"_id" '"7e427985-c893fa00-c2be015-83a19faf")))) (let $5 (DqCnResult (TDqOutput $4 '"0") $1)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($2 $4) '($5) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"id" (DataType 'Int32)) '('"resource_id" (OptionalType (DataType 'Utf8))))) '"0" '"0")) '('('"type" '"scan_query")))) ) |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |89.5%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut >> KqpJoinOrder::TPCH11-StreamLookupJoin+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapStats::AddRowsTableStandalone [GOOD] Test command err: Trying to start YDB, gRPC: 21094, MsgBus: 25921 2025-04-07T14:00:32.081419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570850357689206:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:32.081467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fbc/r3tmp/tmpzcqcmf/pdisk_1.dat 2025-04-07T14:00:32.840397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:32.841112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:32.841471Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:32.870036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:33.121619Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.151613s 2025-04-07T14:00:33.121718Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.151740s TServer::EnableGrpc on GrpcPort 21094, node 1 2025-04-07T14:00:33.325539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:33.325560Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:33.325567Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:33.325649Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25921 TClient is connected to server localhost:25921 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:00:34.437341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:00:36.642452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570867537559021:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:36.642560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:36.849134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:00:36.942736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570867537559098:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:36.942982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570867537559098:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:36.943238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570867537559098:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:36.943345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570867537559098:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:36.943429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570867537559098:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:36.943528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570867537559098:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:36.943629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570867537559098:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:36.943726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570867537559098:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:36.943822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570867537559098:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:36.943909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570867537559098:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:36.944003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570867537559098:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:36.944080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570867537559098:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:36.952770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:36.952829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:36.952926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:36.952961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:36.953137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:36.953178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:36.953280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:36.953309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:36.953369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:36.953393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:36.953424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:36.953451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:36.953966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:36.954009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:36.954192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:36.954216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:36.954322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:36.954369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:36.954546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:36.954578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:36.954686Z node 1 :TX_COLU ... umnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=24288;columns=3; 2025-04-07T14:00:37.088982Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570850357689206:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:37.089093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpOlap::PredicatePushdown_LikePushedDownForStringType >> KqpOlapSysView::StatsSysViewBytesColumnActualization |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |89.5%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> KqpOlapAggregations::Aggregation_Some_GroupBy >> KqpOlap::PredicatePushdownCastErrors >> KqpOlapTiering::TieringValidation >> KqpOlapTiering::EvictionResetTiering >> TPQTest::TestReadSubscription [GOOD] >> TPQTest::TestReadAndDeleteConsumer >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription >> TTopicApiDescribes::GetLocalDescribe [GOOD] >> KqpOlapAggregations::Aggregation_Sum_GroupBy >> KqpOlapAggregations::Aggregation_Avg_Null [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH11-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 29403, MsgBus: 7453 2025-04-07T13:57:38.166050Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570102736564702:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:38.166077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001049/r3tmp/tmp6Q95AF/pdisk_1.dat 2025-04-07T13:57:38.819394Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:38.828151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:38.828243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:38.830083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29403, node 1 2025-04-07T13:57:39.130339Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:39.130362Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:39.130371Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:39.130454Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7453 TClient is connected to server localhost:7453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:39.897167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:39.911765Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:39.923227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:40.105566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:40.328039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:40.400158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:42.371103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570119916435604:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:42.371216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:42.596181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:42.647442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:42.694609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:42.733411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:42.833528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:42.875219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:42.968965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570119916436106:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:42.969032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:42.972891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570119916436111:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:42.978469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:42.998492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570119916436113:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:43.220854Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570102736564702:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:43.220970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:44.443114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.485086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.516050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.550090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.619185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.751934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.793435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.817014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.856453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.888279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.935614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:57:44.974499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:57:45.046314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:57:45.756876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:57:45.848544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:57:45.904819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:57:45.978520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:57:46.053075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:57:46.081672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: E ... ode 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:22.810726Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:22.810756Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:22.810827Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:22.810854Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:22.810892Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:22.810921Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:22.812455Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:22.812512Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:22.812748Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:22.812786Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:22.812921Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:22.812954Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:22.813133Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:22.813182Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:22.813326Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:22.813361Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:22.814327Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:22.814372Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:22.814472Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:22.814501Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:22.814661Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:22.814689Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:22.814778Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:22.814814Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:22.814879Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:22.814905Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:22.814943Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:22.814970Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:22.815297Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:22.815366Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:22.815562Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:22.815591Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:22.815759Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:22.815793Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:22.816979Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:22.817023Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:22.817158Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:22.817201Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:22.856554Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:22.856867Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:22.856908Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:22.860927Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:22.860991Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:22.861246Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:22.861282Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:22.861422Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:22.861462Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:33.206637Z node 5 :KQP_SLOW_LOG WARN: TraceId: "01jr88trctanxdqrde32qknvc2", SessionId: ydb://session/3?node_id=5&id=NzRiYmMzZWItNmVlNDBmNjMtODU4NDY2OTItN2FlMjU1MGY=, Slow query, duration: 10.011332s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n-- TPC-H/TPC-R Important Stock Identification Query (Q11)\n-- TPC TPC-H Parameter Substitution (Version 2.17.2 build 0)\n-- using 1680793381 as a seed to the RNG\n\n$join1 = (\nselect\n ps.ps_partkey as ps_partkey,\n ps.ps_supplycost as ps_supplycost,\n ps.ps_availqty as ps_availqty,\n s.s_nationkey as s_nationkey\nfrom\n `/Root/partsupp` as ps\njoin\n `/Root/supplier` as s\non\n ps.ps_suppkey = s.s_suppkey\n);\n$join2 = (\nselect\n j.ps_partkey as ps_partkey,\n j.ps_supplycost as ps_supplycost,\n j.ps_availqty as ps_availqty,\n j.s_nationkey as s_nationkey\nfrom\n $join1 as j\njoin\n `/Root/nation` as n\non\n n.n_nationkey = j.s_nationkey\nwhere\n n.n_name = 'CANADA'\n);\n$threshold = (\nselect\n sum(ps_supplycost * ps_availqty) * 0.0001000000 as threshold\nfrom\n $join2\n);\n$values = (\nselect\n ps_partkey,\n sum(ps_supplycost * ps_availqty) as value\nfrom\n $join2\ngroup by\n ps_partkey\n);\n\nselect\n v.ps_partkey as ps_partkey,\n v.value as value\nfrom\n $values as v\ncross join\n $threshold as t\nwhere\n v.value > t.threshold\norder by\n value desc;\n", parameters: 0b >> KqpOlap::ManyColumnShardsWithRestarts [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_Null [GOOD] Test command err: Trying to start YDB, gRPC: 23595, MsgBus: 22713 2025-04-07T14:00:35.084516Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570863158679083:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:35.084551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fb9/r3tmp/tmpHQ8JtN/pdisk_1.dat 2025-04-07T14:00:35.596515Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:35.606018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:35.606113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:35.625681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23595, node 1 2025-04-07T14:00:35.921245Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:35.921273Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:35.921280Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:35.921379Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22713 TClient is connected to server localhost:22713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:37.143095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:37.163245Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:37.175371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:37.288103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570871748614355:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:37.288748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570871748614355:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:37.289037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570871748614355:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:37.289182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570871748614355:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:37.289282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570871748614355:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:37.289380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570871748614355:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:37.289472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570871748614355:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:37.289567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570871748614355:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:37.289662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570871748614355:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:37.289765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570871748614355:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:37.289858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570871748614355:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:37.289944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570871748614355:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:37.371500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570871748614356:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:37.371562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570871748614356:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:37.371794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570871748614356:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:37.371902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570871748614356:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:37.371996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570871748614356:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:37.376870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570871748614356:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:37.377078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570871748614356:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:37.377182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570871748614356:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:37.377293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570871748614356:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:37.377394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570871748614356:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:37.377480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570871748614356:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:37.377559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570871748614356:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:37.443344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570871748614380:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:37.443420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570871748614380:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:37.443647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570871748614380:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:37.443747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570871748614380:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:37.443860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570871748614380:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:37.443940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570871748614380:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:37.444033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570871748614380:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:37.444137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570871748614380:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:37.553265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:37.553296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:37.553417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:37.553441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:37.553478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:37.553502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:37.553800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:37.553840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:37.553994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:37.554020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:37.554127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:37.554152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:37.554311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:37.554336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:37.554434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:37.554455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT AVG(level) FROM `/Root/tableWithNulls` WHERE id > 5; 2025-04-07T14:00:39.461907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570880338549253:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:39.461907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570880338549220:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:39.462005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:39.465859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:39.475664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570880338549258:2400], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:40.084930Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570863158679083:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:40.085018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:41.534308Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034440001, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT AVG(level) FROM `/Root/tableWithNulls` WHERE id > 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (declare %kqp%tx_result_binding_1_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Double)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '1624) '('"_id" '"8bbf8fc-3cd64c1b-997ca55b-6c87da5f") $3)) (let $5 (DqPhyStage '() (lambda '() (block '( (let $30 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $30) '((Nothing $2) $30))))))))))) ))) $4)) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 (KqpPhysicalTx '($5) '($6) '() '('('"type" '"compute")))) (let $8 '"%kqp%tx_result_binding_0_0") (let $9 (TupleType $2 $1)) (let $10 (TupleType (ListType (TupleType $9 $9)))) (let $11 (DataType 'Double)) (let $12 (OptionalType (TupleType $11 (DataType 'Uint64)))) (let $13 '('('"_logical_id" '1682) '('"_id" '"d26c0ef8-4450be2f-d46848ce-1261b138") '('"_wide_channels" (StructType '('_yql_agg_0 $12))))) (let $14 (DqPhyStage '() (lambda '() (block '( (let $31 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $32 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $33 (KqpWideReadOlapTableRanges $31 %kqp%tx_result_binding_0_0 '('"level") '() $32 (lambda '($34) (block '( (let $35 '('"_yql_agg_0_sum" '"sum" '"level")) (let $36 '('"_yql_agg_0_cnt" '"count" '"level")) (return (TKqpOlapAgg $34 '($35 $36) '())) ))))) (return (FromFlow (WideMap $33 (lambda '($37 $38) (IfPresent $38 (lambda '($39) (Just '((Convert $39 'Double) $37))) (Nothing $12)))))) ))) $13)) (let $15 (DqCnUnionAll (TDqOutput $14 '0))) (let $16 (OptionalType $11)) (let $17 (DqPhyStage '($15) (lambda '($40) (block '( (let $41 (Bool 'false)) (let $42 (WideCondense1 (ToFlow $40) (lambda '($44) $44) (lambda '($45 $46) $41) (lambda '($47 $48) (IfPresent $47 (lambda '($49) (IfPresent $48 (lambda '($50) (Just '((AggrAdd (Nth $49 '0) (Nth $50 '0)) (AggrAdd (Nth $49 '1) (Nth $50 '1))))) $47)) $48)))) (let $43 (Condense (NarrowMap (Take $42 (Uint64 '1)) (lambda '($51) (block '( (let $52 (IfPresent $51 (lambda '($53) (Just (Div (Nth $53 '0) (Nth $53 '1)))) (Nothing $16))) (return (AsStruct '('Avg0 $52))) )))) (Nothing (OptionalType (StructType '('Avg0 $16)))) (lambda '($54 $55) $41) (lambda '($56 $57) (Just $56)))) (return (FromFlow (Map $43 (lambda '($58) (AsList (AsStruct '('"column0" (Member $58 'Avg0)))))))) ))) '('('"_logical_id" '3360) '('"_id" '"945f278b-54a5766a-3cf7890c-a0fef7e0")))) (let $18 (DqCnValue (TDqOutput $17 '0))) (let $19 (KqpTxResultBinding $10 '0 '0)) (let $20 '('('"type" '"scan"))) (let $21 (KqpPhysicalTx '($14 $17) '($18) '('($8 $19)) $20)) (let $22 '"%kqp%tx_result_binding_1_0") (let $23 (ListType (StructType '('"column0" $16)))) (let $24 '('('"_logical_id" '3456) '('"_id" '"c276260b-a434f374-def72cca-a234d235") $3)) (let $25 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_1_0)) $24)) (let $26 (DqCnResult (TDqOutput $25 '0) '('"column0"))) (let $27 (KqpTxResultBinding $23 '1 '0)) (let $28 (KqpPhysicalTx '($25) '($26) '('($22 $27)) $20)) (let $29 '($7 $21 $28)) (return (KqpPhysicalQuery $29 '((KqpTxResultBinding $23 '"2" '0)) '('('"type" '"scan_query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000a8e/r3tmp/tmpj1xTRv/pdisk_1.dat 2025-04-07T14:00:27.625421Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:00:27.636022Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:00:27.738210Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:27.807663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:28.114036Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:28.155512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:28.155585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:28.164429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:28.164508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:28.178939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:28.199679Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:00:28.205417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4216, node 1 2025-04-07T14:00:28.444898Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/000a8e/r3tmp/yandexl0T8xm.tmp 2025-04-07T14:00:28.444927Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/000a8e/r3tmp/yandexl0T8xm.tmp 2025-04-07T14:00:28.445096Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/000a8e/r3tmp/yandexl0T8xm.tmp 2025-04-07T14:00:28.445223Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:00:28.508022Z INFO: TTestServer started on Port 1967 GrpcPort 4216 TClient is connected to server localhost:1967 PQClient connected to localhost:4216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:29.076458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:00:29.200783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T14:00:31.609603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570844959502722:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:31.609627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570844959502708:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:31.609725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:31.614951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2025-04-07T14:00:31.660848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570844959502732:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2025-04-07T14:00:32.038014Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490570844959502829:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:00:32.085191Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjE2OGU5MTItYjE1NzU0NDAtNDJiNDNlNjktN2M0MGUwYTA=, ActorId: [1:7490570844959502706:2303], ActorState: ExecuteState, TraceId: 01jr88v0kd7adnnednbm31ejbs, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:00:32.087482Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:00:32.090280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-04-07T14:00:32.284552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-04-07T14:00:32.423782Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490570851244456852:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:00:32.426759Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWE5ZGQyNTMtZTdhNDZmNDYtN2U1ZDc5NzMtMjQwYTFkN2Y=, ActorId: [2:7490570851244456815:2284], ActorState: ExecuteState, TraceId: 01jr88v1b97n1ydjp0dtpevw2x, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:00:32.427166Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:00:32.583820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:00:33.949964Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01jr88v1zr0myreahe3ywp6n4z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTkyNzkzODQtMTRiNDVkZmQtZWIzZDE0MDItOTE4ZTQzMjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490570857844405209:3115] === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-04-07T14:00:39.323080Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-07T14:00:39.882625Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720679. Ctx: { TraceId: 01jr88v88x17t1zckhrer7r0nq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdjYzE3NjMtNDE0YTdiYjQtMjBlMDkxMWYtMTUzNDE0OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:00:39.889520Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:39.889570Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896] doesn't have tx writes info 2025-04-07T14:00:39.892656Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:39.892708Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899] doesn't have tx writes info 2025-04-07T14:00:39.959744Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:39.959783Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898] doesn't have tx writes info 2025-04-07T14:00:39.960060Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:39.960078Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893] doesn't have tx writes info 2025-04-07T14:00:40.026228Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:40.041906Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:40.079989Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][] pipe [1:7490570879319242289:3482] connected; active server actors: 1 2025-04 ... PartitionId: 12 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-07T14:00:40.247273Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] bootstrapping 3 [2:7490570885604195827:2388] 2025-04-07T14:00:40.256974Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:40.249735Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 1 [2:7490570885604195827:2388] 2025-04-07T14:00:40.251414Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [2:7490570885604195828:2389] 2025-04-07T14:00:40.270063Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 1 [2:7490570885604195828:2389] 2025-04-07T14:00:40.286036Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] bootstrapping 9 [2:7490570885604195829:2390] 2025-04-07T14:00:40.258561Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898] Config applied version 0 actor [1:7490570832074600241:2277] txId 281474976720678 config: PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 15 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 14 PartitionIds: 11 TopicName: "rt3.dc1--topic-x" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "unknown" Ident: "unknown" Topic: "topic-x" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--topic-x" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 14 Status: Active CreateVersion: 1 TabletId: 0 } Partitions { PartitionId: 11 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 AllPartitions { PartitionId: 14 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 11 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-07T14:00:40.259026Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] bootstrapping 1 [1:7490570883614209884:2468] 2025-04-07T14:00:40.261548Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 1 [1:7490570883614209884:2468] 2025-04-07T14:00:40.300326Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 1 [2:7490570885604195829:2390] 2025-04-07T14:00:40.321036Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] bootstrapping 11 [1:7490570883614209908:2473] 2025-04-07T14:00:40.324264Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 1 [1:7490570883614209908:2473] 2025-04-07T14:00:40.336181Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] bootstrapping 14 [1:7490570883614209907:2472] 2025-04-07T14:00:40.338259Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 1 [1:7490570883614209907:2472] 2025-04-07T14:00:40.343378Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7490570885604195831:2392] 2025-04-07T14:00:40.346397Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 1 [2:7490570885604195831:2392] 2025-04-07T14:00:40.398594Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [2:7490570885604195830:2391] 2025-04-07T14:00:40.400802Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 1 [2:7490570885604195830:2391] 2025-04-07T14:00:40.405294Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7490570885604195832:2393] 2025-04-07T14:00:40.407455Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 1 [2:7490570885604195832:2393] 2025-04-07T14:00:40.418443Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7490570885604195833:2394] 2025-04-07T14:00:40.426467Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 1 [2:7490570885604195833:2394] 2025-04-07T14:00:40.438177Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7490570885604195834:2395] 2025-04-07T14:00:40.440533Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 1 [2:7490570885604195834:2395] ===Query complete Create topic result: 1 2025-04-07T14:00:40.640943Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570883614209973:3769]: Request location 2025-04-07T14:00:40.651583Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570883614209984:3773] connected; active server actors: 1 2025-04-07T14:00:40.651664Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 1 2025-04-07T14:00:40.651681Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2025-04-07T14:00:40.651694Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 1 2025-04-07T14:00:40.651708Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-04-07T14:00:40.651721Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 1 2025-04-07T14:00:40.651732Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2025-04-07T14:00:40.651745Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 1 2025-04-07T14:00:40.651759Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 1 2025-04-07T14:00:40.651771Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 1 2025-04-07T14:00:40.651782Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 1 2025-04-07T14:00:40.651793Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 1 2025-04-07T14:00:40.651807Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 1 2025-04-07T14:00:40.651818Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 1 2025-04-07T14:00:40.651828Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 1 2025-04-07T14:00:40.651840Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 1 2025-04-07T14:00:40.653023Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570883614209973:3769]: Got location 2025-04-07T14:00:40.655483Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570883614209990:3776]: Request location 2025-04-07T14:00:40.658629Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570883614209990:3776]: Got location 2025-04-07T14:00:40.659425Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570883614209994:3779]: Request location 2025-04-07T14:00:40.655247Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570883614209984:3773] disconnected; active server actors: 1 2025-04-07T14:00:40.655283Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570883614209984:3773] disconnected no session 2025-04-07T14:00:40.655896Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570883614209992:3778] connected; active server actors: 1 2025-04-07T14:00:40.656511Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2025-04-07T14:00:40.656527Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-04-07T14:00:40.656541Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2025-04-07T14:00:40.658981Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570883614209992:3778] disconnected; active server actors: 1 2025-04-07T14:00:40.658996Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570883614209992:3778] disconnected no session 2025-04-07T14:00:40.659829Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570883614209996:3781] connected; active server actors: 1 2025-04-07T14:00:41.686108Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490570887909177365:2493] TxId: 281474976720685. Ctx: { TraceId: 01jr88vae9aamgckyz308cmz7s, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjFkY2I2MGQtNTcwOWE4ZDgtNTYwZGM5ZTQtYzc2MzRjZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 >> TTopicApiDescribes::DescribeTopic [GOOD] >> TPQTest::TestReadAndDeleteConsumer [FAIL] >> TTopicApiDescribes::GetPartitionDescribe [GOOD] >> KqpJoinOrder::TPCDS96-StreamLookupJoin+ColumnStore [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] >> KqpOlapAggregations::AggregationAndFilterPushdownOnDiffCols [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ManyColumnShardsWithRestarts [GOOD] Test command err: 2025-04-07T14:00:23.998717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T14:00:24.015841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:00:24.016309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:00:24.016473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:24.017249Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:24.017293Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fcf/r3tmp/tmpYZWhcE/pdisk_1.dat 2025-04-07T14:00:24.592271Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:24.831724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:00:24.951424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:24.951543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:24.960226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:24.960375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:24.979959Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:00:24.980418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:24.980765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:25.469449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:25.716369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:25.716615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:25.716951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:25.717088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:25.717219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:25.717345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:25.717472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:25.717587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:25.717713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:25.717875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:25.718012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:25.718145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:25.788192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:25.788288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:25.788602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:25.788721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:25.788872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:25.788985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:25.789076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:25.789292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:25.789397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:25.789502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:25.789630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:25.789730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:25.832007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:25.832091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:25.832295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:25.832446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:25.832561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:25.832677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:25.832782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:25.832902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:25.833008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:25.833103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:25.833215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:25.833322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:129 ... component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; 2025-04-07T14:00:37.908509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3846:4279], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.908632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3858:4285], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.908751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.918119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:39.028330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3861:4288], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 2025-04-07T14:00:40.010255Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr88v6q8ek107bj9wbwa9j33, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFiN2FhMjAtY2VjZWI1Y2EtNGMwNDE1OC1mY2QyYWEx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=152;columns=1; -- EvScanData from [1:4322:4557]: hijack event -- EvScanData from [1:4328:4560]: hijack event FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=152;columns=1; -- EvScanData from [1:4336:4564]: hijack event FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=152;columns=1; 2025-04-07T14:00:40.732442Z node 1 :KQP_COMPUTE WARN: SelfId: [1:4244:4544]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0, Running 2025-04-07T14:00:40.732624Z node 1 :KQP_COMPUTE WARN: SelfId: [1:4244:4544]. TKqpScanFetcherActor: broken pipe with tablet 72075186224037891, restarting scan from last received key (Timestamp : +\x9E\x98\0\0\0\0\0, Utf8 : uid_10001963), attempt #1 (total 1) schedule after 0.000000s -- EvScanData from [2:4331:3355]: hijack event 2025-04-07T14:00:40.734263Z node 1 :KQP_COMPUTE WARN: SelfId: [1:4244:4544]. Got EvDeliveryProblem, TabletId: 72075186224037892, NotDelivered: 0, Running 2025-04-07T14:00:40.734367Z node 1 :KQP_COMPUTE WARN: SelfId: [1:4244:4544]. TKqpScanFetcherActor: broken pipe with tablet 72075186224037892, restarting scan from last received key (Timestamp : L\x9E\x98\0\0\0\0\0, Utf8 : uid_10001996), attempt #1 (total 1) schedule after 0.000000s 2025-04-07T14:00:40.734482Z node 1 :KQP_COMPUTE WARN: SelfId: [1:4244:4544]. Got EvDeliveryProblem, TabletId: 72075186224037894, NotDelivered: 0, Running 2025-04-07T14:00:40.734561Z node 1 :KQP_COMPUTE WARN: SelfId: [1:4244:4544]. TKqpScanFetcherActor: broken pipe with tablet 72075186224037894, restarting scan from last received key (Timestamp : M\x9E\x98\0\0\0\0\0, Utf8 : uid_10001997), attempt #1 (total 1) schedule after 0.000000s -- EvScanData from [2:4334:3358]: hijack event -- EvScanData from [2:4343:3361]: hijack event -- EvScanData from [2:4346:3364]: hijack event 2025-04-07T14:00:40.982726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:4389:4595];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:41.024521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.037434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:4389:4595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.178175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:4405:4602];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:41.209351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:4414:4607];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:41.225641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.236219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.276405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:4419:4610];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:41.279158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:4405:4602];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.279342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:4414:4607];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.294958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.307747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:4414:4607];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.308025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:4419:4610];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.339817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:4422:4611];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:41.353057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.357173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:4422:4611];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.544375Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:4430:3382];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:41.561009Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.568255Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:4430:3382];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.616373Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:4434:3383];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:41.644911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.693708Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:4447:3384];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:41.699290Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:4434:3383];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.709500Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.750131Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:4460:3385];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:41.807151Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:4468:3386];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:41.821931Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.850278Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.853091Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:4460:3385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:41.855078Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:4468:3386];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:00:43.184899Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr88v6q8ek107bj9wbwa9j33, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFiN2FhMjAtY2VjZWI1Y2EtNGMwNDE1OC1mY2QyYWEx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"type_id":4}}],"rows":[{"items":[{"uint64_value":20000}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1} 2025-04-07T14:00:43.191368Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2500, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0011f8/r3tmp/tmpHPFmVW/pdisk_1.dat 2025-04-07T14:00:27.203530Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:00:27.213749Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:00:27.330048Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:27.330591Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:27.984622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:27.992981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:27.999158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:27.999226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:28.011189Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:00:28.011291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:28.013491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:28.056842Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26011, node 1 2025-04-07T14:00:28.225095Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:00:28.225387Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:00:28.317524Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0011f8/r3tmp/yandex9tIDb8.tmp 2025-04-07T14:00:28.317549Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0011f8/r3tmp/yandex9tIDb8.tmp 2025-04-07T14:00:28.317728Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0011f8/r3tmp/yandex9tIDb8.tmp 2025-04-07T14:00:28.317828Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:00:28.479583Z INFO: TTestServer started on Port 1220 GrpcPort 26011 TClient is connected to server localhost:1220 PQClient connected to localhost:26011 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:29.047878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:00:29.092997Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:00:29.116266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T14:00:33.861219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570854356283975:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:33.861349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:33.868808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570854356283989:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:33.879127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T14:00:33.936575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570854356284020:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:33.965046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:33.965806Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-04-07T14:00:33.966012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570854356283991:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T14:00:34.419551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:00:34.467929Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490570858629726051:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:00:34.468984Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzQ4MWUzYjctNTY5ODZkZDUtYTliNWE5OWItMWZjOGQ4MDQ=, ActorId: [2:7490570858629726011:2287], ActorState: ExecuteState, TraceId: 01jr88v38ka0xz2d0haey6nzgv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:00:34.471555Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:00:34.474032Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490570858651251370:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:00:34.482922Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTU1YjkzNjItYzM3MmQ0ZWUtNWQ4NjkyZWItODc0ZWM4ZmE=, ActorId: [1:7490570854356283950:2306], ActorState: ExecuteState, TraceId: 01jr88v2q49nnmz6vsewg80crb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:00:34.483358Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:00:34.677992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:00:34.920520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:00:35.374270Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr88v41sf1dym02qe3kzqx62, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2MwOTVjNjEtODNlMDI3Ni1kNTU3YTA5OC04NWIxYjI1NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490570862946219118:3097] === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-04-07T14:00:40.573788Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-07T14:00:41.022247Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:41.022277Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897] doesn't have tx writes info 2025-04-07T14:00:41.021401Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:41.021452Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896] doesn't have tx writes info 2025-04-07T14:00:41.021684Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:41.021698Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893] doesn't have tx writes inf ... key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } consumer_stats { min_partitions_last_read_time { seconds: 1744034442 nanos: 5000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } topic_stats { min_last_write_time { seconds: 1744034442 nanos: 577000000 } max_write_time_lag { } bytes_written { } } } } } Describe topic with location 2025-04-07T14:00:42.626709Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-07T14:00:42.626829Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" include_location: true 2025-04-07T14:00:42.626914Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x 2025-04-07T14:00:42.627724Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570893010991771:2549]: Request location 2025-04-07T14:00:42.680691Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570893010991773:2550] connected; active server actors: 1 2025-04-07T14:00:42.680756Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 2 2025-04-07T14:00:42.680772Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-04-07T14:00:42.680786Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 2 2025-04-07T14:00:42.680800Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-04-07T14:00:42.680812Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 2 2025-04-07T14:00:42.680826Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2025-04-07T14:00:42.680838Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 2 2025-04-07T14:00:42.680850Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2025-04-07T14:00:42.680863Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2025-04-07T14:00:42.680874Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 2 2025-04-07T14:00:42.680887Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2025-04-07T14:00:42.680898Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 2025-04-07T14:00:42.680909Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2025-04-07T14:00:42.680919Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2025-04-07T14:00:42.680930Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2025-04-07T14:00:42.684911Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570893010991771:2549]: Got location 2025-04-07T14:00:42.685823Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570893010991773:2550] disconnected; active server actors: 1 2025-04-07T14:00:42.685853Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570893010991773:2550] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1744034441205 tx_id: 281474976710676 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe topic with no stats or location 2025-04-07T14:00:42.696960Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-07T14:00:42.697056Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" 2025-04-07T14:00:42.697133Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1744034441205 tx_id: 281474976710676 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe bad topic 2025-04-07T14:00:42.701803Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-07T14:00:42.701863Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-04-07T14:00:42.701919Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//bad-topic Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } 2025-04-07T14:00:43.047647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:00:43.047684Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpOlap::PredicatePushdown_LikePushedDownForStringType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] Test command err: 2025-04-07T14:00:26.177470Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570823267642165:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:26.177513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:00:26.309252Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490570825246987859:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:26.738347Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:00:26.745088Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00122a/r3tmp/tmpWdz7xx/pdisk_1.dat 2025-04-07T14:00:26.893020Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:00:27.230169Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:27.351999Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:27.415807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:27.415891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:27.416992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:27.417062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:27.445352Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:00:27.445609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:27.445906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25018, node 1 2025-04-07T14:00:27.786225Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/00122a/r3tmp/yandext2wK8q.tmp 2025-04-07T14:00:27.786246Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/00122a/r3tmp/yandext2wK8q.tmp 2025-04-07T14:00:27.786390Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/00122a/r3tmp/yandext2wK8q.tmp 2025-04-07T14:00:27.792917Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:00:27.962828Z INFO: TTestServer started on Port 30032 GrpcPort 25018 TClient is connected to server localhost:30032 PQClient connected to localhost:25018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:28.466151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:00:28.559182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T14:00:28.997094Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720661, at schemeshard: 72057594046644480 2025-04-07T14:00:31.178794Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570823267642165:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:31.178853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:31.308844Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490570825246987859:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:31.308914Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:32.598706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570849037447013:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:32.598867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:32.602685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570849037447043:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:32.758569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2025-04-07T14:00:32.846246Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720662, at schemeshard: 72057594046644480 2025-04-07T14:00:32.847065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570849037447045:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2025-04-07T14:00:33.374123Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490570855311759208:2294], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:00:33.376330Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2VkMDkwMjAtNDAxOTRhMjUtOGJmNTQ3YzctOWE4MTJmYg==, ActorId: [2:7490570855311759184:2288], ActorState: ExecuteState, TraceId: 01jr88v22vfpryf45117a5ag77, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:00:33.381043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-04-07T14:00:33.383643Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:00:33.385109Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490570849037447139:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:00:33.386545Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgwZTc1ODUtMjQzZmFiZjktZGJjODJmYTAtZTIxNDM4NDY=, ActorId: [1:7490570849037447010:2308], ActorState: ExecuteState, TraceId: 01jr88v1h0c35c0e4e67czxm51, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:00:33.386896Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:00:33.565942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-04-07T14:00:33.937602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:00:34.641370Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01jr88v32p0nvrw8pmr16wfjdh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmNjODk0MmQtODk1ZjU4ZmMtMjZkZWVkNmMtMzBiZTBjZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490570857627382199:3093] === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-04-07T14:00:41.137753Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-07T14:00:41.620158Z node 1 :PERSQUEUE NOTICE: [PQ: 720751862240378 ... :PERSQUEUE INFO: [rt3.dc1--topic-x:5:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:00:42.912500Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 2 [1:7490570891987122122:2536] 2025-04-07T14:00:42.914541Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570891987122143:2543] connected; active server actors: 1 2025-04-07T14:00:42.914585Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037899, NodeId 1, Generation 2 2025-04-07T14:00:42.916064Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-04-07T14:00:42.915687Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:14:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:00:42.915733Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 2 [1:7490570891987122125:2538] Got response: 2025-04-07T14:00:42.917933Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:11:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } } } } 2025-04-07T14:00:42.917964Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 2 [1:7490570891987122126:2539] 2025-04-07T14:00:42.956411Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:10:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:00:42.956479Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 2 [1:7490570891987122123:2537] 2025-04-07T14:00:42.976975Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570891987122134:2540]: Got location 2025-04-07T14:00:43.010168Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-07T14:00:43.010277Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 3 include_stats: true include_location: true 2025-04-07T14:00:43.010312Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7490570896282089523:2546]: Bootstrap 2025-04-07T14:00:43.010965Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570896282089523:2546]: Request location 2025-04-07T14:00:43.022157Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570896282089523:2546]: Got location 2025-04-07T14:00:43.022288Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570896282089523:2546]: Something wrong on location, retry. Response: Status: false Locations { PartitionId: 3 } 2025-04-07T14:00:43.021683Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570891987122143:2543] disconnected; active server actors: 1 2025-04-07T14:00:43.021717Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570891987122143:2543] disconnected no session 2025-04-07T14:00:43.021760Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570896282089526:2548] connected; active server actors: 1 2025-04-07T14:00:43.021815Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037898, NodeId 1, Generation 2 2025-04-07T14:00:43.021832Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037896, NodeId 1, Generation 2 2025-04-07T14:00:43.029638Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:43.029673Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894] doesn't have tx writes info 2025-04-07T14:00:43.031232Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:43.031261Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-07T14:00:43.032657Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:43.032698Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895] doesn't have tx writes info 2025-04-07T14:00:43.034564Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:43.034580Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897] doesn't have tx writes info 2025-04-07T14:00:43.035210Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7490570898261433243:2458] 2025-04-07T14:00:43.037961Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7490570898261433244:2459] 2025-04-07T14:00:43.040303Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] bootstrapping 3 [2:7490570898261433246:2460] 2025-04-07T14:00:43.042781Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [2:7490570898261433247:2461] 2025-04-07T14:00:43.045230Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] bootstrapping 9 [2:7490570898261433249:2462] 2025-04-07T14:00:43.047378Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [2:7490570898261433250:2463] 2025-04-07T14:00:43.052601Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7490570898261433255:2464] 2025-04-07T14:00:43.054764Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7490570898261433256:2465] 2025-04-07T14:00:43.064969Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:8:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:00:43.065001Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 2 [2:7490570898261433243:2458] 2025-04-07T14:00:43.067749Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:12:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:00:43.067779Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 2 [2:7490570898261433244:2459] 2025-04-07T14:00:43.069469Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:00:43.069492Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 2 [2:7490570898261433246:2460] 2025-04-07T14:00:43.070646Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:00:43.070670Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 2 [2:7490570898261433247:2461] 2025-04-07T14:00:43.072283Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:9:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:00:43.072306Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 2 [2:7490570898261433249:2462] 2025-04-07T14:00:43.073470Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:00:43.073502Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 2 [2:7490570898261433250:2463] 2025-04-07T14:00:43.120128Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037894, NodeId 2, Generation 2 2025-04-07T14:00:43.120161Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037892, NodeId 2, Generation 2 2025-04-07T14:00:43.120178Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037895, NodeId 2, Generation 2 2025-04-07T14:00:43.120277Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:13:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:00:43.120318Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 2 [2:7490570898261433256:2465] 2025-04-07T14:00:43.124713Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:7:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:00:43.124804Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 2 [2:7490570898261433255:2464] 2025-04-07T14:00:43.131841Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037897, NodeId 2, Generation 2 2025-04-07T14:00:43.224367Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570896282089523:2546]: Request location 2025-04-07T14:00:43.225310Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570896282089523:2546]: Got location 2025-04-07T14:00:43.225039Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-04-07T14:00:43.247590Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570896282089526:2548] disconnected; active server actors: 1 2025-04-07T14:00:43.247645Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570896282089526:2548] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 3 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744034443 nanos: 39000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_location { node_id: 2 generation: 2 } } } } } 2025-04-07T14:00:43.277029Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-07T14:00:43.277130Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-04-07T14:00:43.277178Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7490570896282089593:2553]: Bootstrap Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:59:40.637563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:59:40.637657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:59:40.637712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:59:40.637742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:59:40.637787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:59:40.637829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:59:40.637907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:59:40.638250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:59:40.707478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:59:40.707541Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:40.718978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:59:40.719097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:59:40.719284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:59:40.724925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:59:40.725160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:59:40.725805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:40.726032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:59:40.728095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:40.729365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:40.729430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:40.729541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:59:40.729578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:40.729610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:59:40.729763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.736559Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:59:40.851905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:59:40.852140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.852347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:59:40.852567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:59:40.852617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.857191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:40.857339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:59:40.857524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.857598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:59:40.857637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:59:40.857665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:59:40.859905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.859961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:59:40.860007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:59:40.861786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.861841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.861883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:40.861952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:59:40.870877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:59:40.872436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:59:40.872607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:59:40.873489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:40.873599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:59:40.873649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:40.873843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:59:40.873875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:40.873997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:40.874069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:59:40.875518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:40.875546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:40.875664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:40.875691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:59:40.875858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.875911Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:59:40.876005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:59:40.876048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:40.876098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:59:40.876156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:40.876188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:59:40.876213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:59:40.876272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:59:40.876305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:59:40.876339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:59:40.878189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:40.878287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:40.878320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:59:40.878352Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:59:40.878382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:40.878468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 025-04-07T14:00:45.063366Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:00:45.063420Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-07T14:00:45.109641Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:321:2306]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:00:45.109762Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409546 2025-04-07T14:00:45.109885Z node 3 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186233409546, for tableId 2: RowCount 100, DataSize 13940, IndexSize 102, PartCount 1 2025-04-07T14:00:45.110012Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-07T14:00:45.110446Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:321:2306], Recipient [3:122:2148]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 43 Memory: 124000 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 43 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-07T14:00:45.110508Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-07T14:00:45.110590Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0043 2025-04-07T14:00:45.110732Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-07T14:00:45.110773Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-07T14:00:45.132872Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:321:2306]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-07T14:00:45.132946Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-07T14:00:45.133024Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2025-04-07T14:00:45.133080Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:45.133113Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409546 2025-04-07T14:00:45.133154Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-04-07T14:00:45.133183Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409546 2025-04-07T14:00:45.143661Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:323:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:00:45.143757Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409547 2025-04-07T14:00:45.143848Z node 3 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186233409547, for tableId 2: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2025-04-07T14:00:45.143971Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 2 2025-04-07T14:00:45.144330Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:323:2307], Recipient [3:122:2148]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 36 Memory: 119120 } ShardState: 2 UserTablePartOwners: 72075186233409547 NodeId: 3 StartTime: 43 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-07T14:00:45.144380Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-07T14:00:45.144440Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0036 2025-04-07T14:00:45.144576Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-07T14:00:45.157577Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:323:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-07T14:00:45.157647Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-07T14:00:45.157740Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409547 outdated step 5000002 last cleanup 0 2025-04-07T14:00:45.157810Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409547 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:45.157843Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409547 2025-04-07T14:00:45.157870Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409547 has no attached operations 2025-04-07T14:00:45.157897Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409547 2025-04-07T14:00:45.190248Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T14:00:45.190322Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T14:00:45.190351Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-07T14:00:45.190412Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-04-07T14:00:45.190445Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-04-07T14:00:45.190569Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-07T14:00:45.190651Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-04-07T14:00:45.190697Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-04-07T14:00:45.190801Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:29.000000Z at schemeshard 72057594046678944 2025-04-07T14:00:45.190889Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-07T14:00:45.190923Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-04-07T14:00:45.190946Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-07T14:00:45.190983Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:2 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2025-04-07T14:00:45.191065Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:00:45.203051Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T14:00:45.203153Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T14:00:45.203191Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:00:45.231147Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:1333:3257], Recipient [3:321:2306]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:45.231224Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:45.231280Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409546, clientId# [3:1332:3256], serverId# [3:1333:3257], sessionId# [0:0:0] 2025-04-07T14:00:45.231513Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:1331:3255], Recipient [3:321:2306]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } 2025-04-07T14:00:45.233831Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:1336:3260], Recipient [3:323:2307]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:45.233894Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:45.233929Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409547, clientId# [3:1335:3259], serverId# [3:1336:3260], sessionId# [0:0:0] 2025-04-07T14:00:45.234123Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:1334:3258], Recipient [3:323:2307]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::AggregationAndFilterPushdownOnDiffCols [GOOD] Test command err: Trying to start YDB, gRPC: 27533, MsgBus: 9673 2025-04-07T14:00:13.687655Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570770328209662:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:13.687947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fd6/r3tmp/tmpX7DyA0/pdisk_1.dat 2025-04-07T14:00:14.260270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:14.260528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:14.270110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:14.335851Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27533, node 1 2025-04-07T14:00:14.633229Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:14.633254Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:14.633262Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:14.633348Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9673 TClient is connected to server localhost:9673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:15.562396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:15.592061Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:15.613826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:15.769590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570778918144710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:15.769812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570778918144710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:15.770071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570778918144710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:15.770188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570778918144710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:15.770303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570778918144710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:15.770473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570778918144710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:15.770577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570778918144710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:15.770699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570778918144710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:15.770843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570778918144710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:15.770946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570778918144710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:15.771061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570778918144710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:15.771181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570778918144710:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:15.825238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570778918144713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:15.825300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570778918144713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:15.825519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570778918144713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:15.825600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570778918144713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:15.825741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570778918144713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:15.825858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570778918144713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:15.825951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570778918144713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:15.826068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570778918144713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:15.826165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570778918144713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:15.826241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570778918144713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:15.826387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570778918144713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:15.826484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570778918144713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:15.861583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570778918144719:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:15.861643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570778918144719:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:15.861849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570778918144719:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:15.861967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570778918144719:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:15.862058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570778918144719:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:15.862141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570778918144719:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:15.862219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570778918144719:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:15.862306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570778918144719:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:42.889164Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:43.076291Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:43.076377Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:43.149031Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:43.149095Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:43.225068Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:43.225158Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:43.312841Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:43.312935Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:43.417198Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:00:43.417324Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:43.477069Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:43.477154Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:43.553089Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:43.553202Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:43.626459Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:43.626562Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:43.700112Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:43.700190Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:43.773019Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:43.773101Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:43.819784Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:43.933278Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:43.933350Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:44.047364Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:44.047430Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:44.114201Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:44.114284Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:44.185466Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:44.185530Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:44.261103Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:00:44.261190Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3006], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88v1j0apcabb9a7xkt2zwc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTE5NDJkNWYtZWU4YjA1MjMtYzc2NzVmOC0yNzNjMDViNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:00:44.351279Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:00:44.351386Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2359];fline=actor.cpp:33;event=skip_flush_writing; >> KqpOlapAggregations::Aggregation_Sum_NullGroupBy [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdown_LikePushedDownForStringType [GOOD] Test command err: Trying to start YDB, gRPC: 31920, MsgBus: 2114 2025-04-07T14:00:40.718031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570884803161176:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:40.718176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fb4/r3tmp/tmpsVB6Sd/pdisk_1.dat 2025-04-07T14:00:41.237162Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:41.261520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:41.261610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:41.265554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31920, node 1 2025-04-07T14:00:41.364058Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:41.364081Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:41.364090Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:41.364226Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2114 TClient is connected to server localhost:2114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:42.497345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:42.520045Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:42.528597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:42.717680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570893393096288:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:42.717869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570893393096288:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:42.718095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570893393096288:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:42.718230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570893393096288:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:42.718336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570893393096288:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:42.718423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570893393096288:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:42.718514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570893393096288:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:42.718600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570893393096288:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:42.718697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570893393096288:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:42.718789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570893393096288:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:42.718890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570893393096288:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:42.718971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570893393096288:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:42.806027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570893393096289:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:42.806083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570893393096289:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:42.806282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570893393096289:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:42.806365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570893393096289:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:42.806443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570893393096289:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:42.806522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570893393096289:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:42.806602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570893393096289:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:42.806675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570893393096289:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:42.806746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570893393096289:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:42.806817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570893393096289:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:42.806893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570893393096289:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:42.806982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570893393096289:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:42.837172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570893393096298:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:42.837216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570893393096298:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:42.837381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570893393096298:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:42.837489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570893393096298:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:42.837586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570893393096298:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:42.837662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570893393096298:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:42.837732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570893393096298:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:42.837802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570893393096298:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... act.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:42.886513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:42.886532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:42.886895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:42.886917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:42.886998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:42.887017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:42.887144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:42.887175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:42.887256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:42.887279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:42.887319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:42.887340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:42.887366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:42.887390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:42.887625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:42.887652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:42.887783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:42.887805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:42.887906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:42.887923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:42.888048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:42.888067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:42.888161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:42.888176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:42.888491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:42.888515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:42.888584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:42.888600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:42.890104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:42.890135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:42.890212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:42.890234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:42.890289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:42.890307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:42.890337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:42.890359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:42.890585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:42.890626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:42.890775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:42.890797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:42.890895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:42.890917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:42.891057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:42.891075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:42.891146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:42.891165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; 2025-04-07T14:00:45.292097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570906277998467:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:45.292166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570906277998484:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:45.292201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:45.295924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:45.311528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570906277998494:2403], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:45.716121Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570884803161176:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:45.716176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoinOrder::TPCDS88-StreamLookupJoin+ColumnStore [GOOD] |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpYql::UuidPrimaryKey >> KqpOlapAggregations::CountAllPushdown-UseLlvm >> KqpScripting::StreamExecuteYqlScriptMixed >> KqpYql::DdlDmlMix >> KqpScripting::LimitOnShard >> KqpOlap::SimpleLookupOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_NullGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 7352, MsgBus: 19599 2025-04-07T14:00:36.766419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570867784412571:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:36.766553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fb7/r3tmp/tmpy5qKJ2/pdisk_1.dat 2025-04-07T14:00:37.469934Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:37.489290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:37.489373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:37.491075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7352, node 1 2025-04-07T14:00:37.659130Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:37.659172Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:37.659186Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:37.659311Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19599 TClient is connected to server localhost:19599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:38.671774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:38.711004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:38.819402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570876374347691:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:38.819561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570876374347691:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:38.819876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570876374347691:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:38.820043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570876374347691:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:38.820126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570876374347691:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:38.820229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570876374347691:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:38.820353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570876374347691:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:38.820523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570876374347691:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:38.820648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570876374347691:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:38.820814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570876374347691:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:38.821518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570876374347691:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:38.821678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570876374347691:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:38.866672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570876374347693:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:38.866724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570876374347693:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:38.866929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570876374347693:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:38.867055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570876374347693:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:38.867538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570876374347693:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:38.867686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570876374347693:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:38.867799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570876374347693:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:38.867904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570876374347693:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:38.868015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570876374347693:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:38.868100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570876374347693:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:38.868176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570876374347693:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:38.868330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570876374347693:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:38.943395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570876374347715:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:38.943447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570876374347715:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:38.943642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570876374347715:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:38.943741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570876374347715:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:38.943860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570876374347715:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:38.943982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570876374347715:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:38.944083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570876374347715:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:38.944186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570876374347715:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:38.944287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570876374347715 ... :39.019051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:39.019136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:39.019161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:39.019315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:39.019337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:39.019416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:39.019455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:39.019501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:39.019517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:39.019547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:39.019572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:39.019878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:39.019906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:39.020069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:39.020092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:39.020209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:39.020237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:39.020390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:39.020417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:39.020516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:39.020542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SUM(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2025-04-07T14:00:40.966829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570884964282564:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:40.966879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570884964282582:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:40.966939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:40.971421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:40.998789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570884964282594:2400], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:41.824857Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570867784412571:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:41.825182Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:46.070893Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034442003, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SUM(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1013) '('"_id" '"a7de75e2-8c107bff-fcf5415f-ee8b7fc8") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '1)) (let $25 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"6")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"7")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (OptionalType (DataType 'Int64))) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '1072) '('"_id" '"aeab05f9-f525fff9-7901f9c2-5dbe5a1a") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'sum '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1639) '('"_id" '"25ed84f5-e582fefe-a23b19d3-9413b39") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1651) '('"_id" '"1d23df3-b4d8e713-bdcd693e-58e89653")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 23012, MsgBus: 11786 2025-04-07T13:57:34.175276Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570084838570443:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:34.186102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00104d/r3tmp/tmpFOasBc/pdisk_1.dat 2025-04-07T13:57:34.844375Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:34.844552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:34.844676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:34.848862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23012, node 1 2025-04-07T13:57:35.010471Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:35.010487Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:35.010504Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:35.010576Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11786 TClient is connected to server localhost:11786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:36.005267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:36.041427Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:36.055716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:36.247233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:57:36.432071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T13:57:36.523549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:38.408142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570102018441214:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:38.408282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:38.701799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.753400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.801706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.878843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:38.950427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:39.005767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:39.073042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570106313409008:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:39.073141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:39.079523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570106313409013:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:39.084728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:39.096402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570106313409015:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:39.174443Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570084838570443:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:39.174505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:40.322704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.369774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.402243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:40.441849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21498, MsgBus: 11357 2025-04-07T13:57:42.446006Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490570119886948622:2121];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:42.452342Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00104d/r3tmp/tmpoycRP1/pdisk_1.dat 2025-04-07T13:57:42.613593Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:42.613668Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:42.616164Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:42.628256Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21498, node 2 2025-04-07T13:57:42.693147Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:42.693182Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:42.693188Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:42.693267Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11357 TClient is connected to server localhost:11357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T13:57:43.314320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:57:43.335599Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:43.348384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, op ... Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:33.478201Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:33.704464Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:00:33.773946Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:00:33.834380Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:00:33.920093Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:00:33.921201Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490570834902602849:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:33.924781Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:34.037543Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:00:34.118240Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:00:34.249019Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570860672408825:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:34.249163Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:34.253064Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490570860672408830:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:34.258712Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:00:34.278659Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490570860672408832:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:00:35.877039Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:00:36.011081Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26351, MsgBus: 14316 2025-04-07T14:00:38.440427Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490570874420040344:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:38.440493Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00104d/r3tmp/tmpvfrWzr/pdisk_1.dat 2025-04-07T14:00:38.638640Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:38.670173Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:38.670325Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:38.671974Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26351, node 6 2025-04-07T14:00:38.749433Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:38.749460Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:38.749472Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:38.749615Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14316 TClient is connected to server localhost:14316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:39.616385Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:39.638005Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:39.733231Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:39.997507Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:40.121732Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:43.278809Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570895894878544:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:43.278917Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:43.431892Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:00:43.449627Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490570874420040344:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:43.451124Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:43.537206Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:00:43.625543Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:00:43.705747Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:00:43.872244Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:00:43.949651Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:00:44.041071Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570900189846350:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:44.041201Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:44.041629Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570900189846355:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:44.047182Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:00:44.071574Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490570900189846357:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:00:45.715409Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:00:45.839845Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange >> KqpOlapAggregations::Aggregation_Sum_Null >> TTopicApiDescribes::DescribeConsumer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS96-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 21254, MsgBus: 10046 2025-04-07T13:57:15.722481Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570002635004430:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:15.722539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00105a/r3tmp/tmp9VdVUb/pdisk_1.dat 2025-04-07T13:57:16.147778Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:16.164849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:16.164967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:16.173943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21254, node 1 2025-04-07T13:57:16.457239Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:16.457259Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:16.457273Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:16.457349Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10046 TClient is connected to server localhost:10046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:17.253074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:17.305846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:17.603683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:17.855280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:17.968429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:20.082624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570024109842400:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:20.082893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:20.569628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.629061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.679121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.723053Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570002635004430:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:20.723108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:20.746096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.793570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.879891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.995820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570024109842901:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:20.995920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:20.996307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570024109842906:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:21.005058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:21.017358Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:57:21.017884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570024109842908:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:24.296197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:24.371602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:24.460966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:24.565638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:24.644426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:25.094837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:57:25.154769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:57:25.362472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:57:25.409100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:57:25.452496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:57:25.510769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:57:25.660879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:57:25.795296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:57:27.076530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:57:27.167937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:57:27.251770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:57:27.302130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:57:27.347456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:57:27.387914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type ... 490570831206699918:5920];tablet_id=72075186224038700;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:28.244926Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:28.244964Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:28.245041Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;self_id=[5:7490570831206699918:5920];tablet_id=72075186224038700;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:28.245104Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:28.245143Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:28.245244Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;self_id=[5:7490570831206699918:5920];tablet_id=72075186224038700;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:28.245398Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;self_id=[5:7490570831206699918:5920];tablet_id=72075186224038700;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:28.245584Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;self_id=[5:7490570831206699918:5920];tablet_id=72075186224038700;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:28.245769Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;self_id=[5:7490570831206699918:5920];tablet_id=72075186224038700;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:28.308717Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:28.308820Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:28.308980Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:28.309035Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:28.309286Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:28.309345Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:28.309487Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:28.309546Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:28.309625Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:28.309661Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:28.309712Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:28.309746Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:28.310181Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:28.310280Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:28.310545Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:28.310583Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:28.310760Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:28.310797Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:28.311021Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:28.311056Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:28.311214Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:28.311253Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:28.344766Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:28.344871Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:28.345009Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:28.345048Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:28.345289Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:28.345328Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:28.345443Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:28.345490Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:28.345572Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:28.345614Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:28.345668Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:28.345706Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:28.346157Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:28.346244Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:28.346476Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:28.346522Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:28.346687Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:28.346723Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:28.346963Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:28.347001Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:28.347138Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:28.347178Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> KqpOlap::ManyColumnShardsFilterPushdownEmptySet [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] >> KqpOlapAggregations::Aggregation_ResultCountL_FilterL [GOOD] >> TIcNodeCache::GetNodesInfoTest [GOOD] >> KqpOlapAggregations::Aggregation_Sum >> KqpOlapSysView::StatsSysViewTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:59:39.847503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:59:39.847602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:59:39.847641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:59:39.847670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:59:39.847712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:59:39.847736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:59:39.847793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:59:39.848128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:59:39.919089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:59:39.919144Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:39.938475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:59:39.938636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:59:39.938823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:59:39.955579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:59:39.955806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:59:39.956385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:39.956599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:59:39.958567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:39.959790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:39.959850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:39.959958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:59:39.959997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:39.960027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:59:39.960174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:59:39.966237Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:59:40.089166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:59:40.089425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.089624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:59:40.089836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:59:40.089889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.092047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:40.092154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:59:40.092318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.092364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:59:40.092422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:59:40.092460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:59:40.095215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.095306Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:59:40.095337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:59:40.096912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.096960Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.097003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:40.097050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:59:40.105030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:59:40.109687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:59:40.109873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:59:40.110882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:40.111028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:59:40.111089Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:40.111328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:59:40.111375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:40.111533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:40.111626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:59:40.113688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:40.113725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:40.113891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:40.113926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:59:40.114177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:40.114214Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:59:40.114313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:59:40.114341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:40.114386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:59:40.114422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:40.114462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:59:40.114486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:59:40.114556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:59:40.114595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:59:40.114623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:59:40.116439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:40.116541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:40.116577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:59:40.116623Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:59:40.116657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:40.118908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... -07T14:00:49.412753Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:352:2331], Recipient [3:352:2331]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:00:49.412799Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:00:49.444815Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:352:2331]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-04-07T14:00:49.444903Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-04-07T14:00:49.444992Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:352:2331]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-04-07T14:00:49.445033Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-04-07T14:00:49.445068Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-04-07T14:00:49.445124Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-04-07T14:00:49.445212Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-04-07T14:00:49.552437Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:757:2646]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:00:49.552523Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409552 2025-04-07T14:00:49.552622Z node 3 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186233409552, for tableId 2: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2025-04-07T14:00:49.552791Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409552, FollowerId 0, tableId 2 2025-04-07T14:00:49.553155Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:757:2646], Recipient [3:883:2747]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409552 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 43 Memory: 119120 } ShardState: 2 UserTablePartOwners: 72075186233409552 NodeId: 3 StartTime: 118 TableOwnerId: 72075186233409549 FollowerId: 0 2025-04-07T14:00:49.553202Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-07T14:00:49.553257Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0043 2025-04-07T14:00:49.553358Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-07T14:00:49.553403Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-07T14:00:49.569143Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:757:2646]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-07T14:00:49.569216Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-07T14:00:49.569299Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409552 outdated step 200 last cleanup 0 2025-04-07T14:00:49.569364Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409552 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:49.569399Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409552 2025-04-07T14:00:49.569429Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409552 has no attached operations 2025-04-07T14:00:49.569464Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409552 2025-04-07T14:00:49.579913Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:765:2652]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-07T14:00:49.579988Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-07T14:00:49.580072Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409553 outdated step 200 last cleanup 0 2025-04-07T14:00:49.580133Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409553 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:00:49.580178Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409553 2025-04-07T14:00:49.580210Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409553 has no attached operations 2025-04-07T14:00:49.580239Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409553 2025-04-07T14:00:49.580360Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:765:2652]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:00:49.580414Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409553 2025-04-07T14:00:49.580466Z node 3 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186233409553, for tableId 2: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2025-04-07T14:00:49.580523Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409553, FollowerId 0, tableId 2 2025-04-07T14:00:49.580842Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:765:2652], Recipient [3:883:2747]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409553 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 28 Memory: 119120 } ShardState: 2 UserTablePartOwners: 72075186233409553 NodeId: 3 StartTime: 119 TableOwnerId: 72075186233409549 FollowerId: 0 2025-04-07T14:00:49.580899Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-07T14:00:49.580949Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0028 2025-04-07T14:00:49.581068Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-07T14:00:49.592530Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:883:2747]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:00:49.592597Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:00:49.592683Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:883:2747], Recipient [3:883:2747]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:00:49.592711Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:00:49.603147Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:883:2747]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-04-07T14:00:49.603227Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-04-07T14:00:49.603529Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:883:2747]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-04-07T14:00:49.603564Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-04-07T14:00:49.603593Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-04-07T14:00:49.603653Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-04-07T14:00:49.603705Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-04-07T14:00:49.603812Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269746180, Sender [3:2014:3839], Recipient [3:883:2747]: NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-04-07T14:00:49.603840Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-04-07T14:00:49.625694Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:2017:3842], Recipient [3:757:2646]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:49.625776Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:49.625844Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409552, clientId# [3:2016:3841], serverId# [3:2017:3842], sessionId# [0:0:0] 2025-04-07T14:00:49.626076Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:2015:3840], Recipient [3:757:2646]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } 2025-04-07T14:00:49.626728Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:2020:3845], Recipient [3:765:2652]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:49.626759Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:49.626787Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409553, clientId# [3:2019:3844], serverId# [3:2020:3845], sessionId# [0:0:0] 2025-04-07T14:00:49.626918Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:2018:3843], Recipient [3:765:2652]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } |89.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [GOOD] Test command err: 2025-04-07T14:00:30.161698Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570844051738562:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:30.161757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:00:30.715539Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009a7/r3tmp/tmpHepCUE/pdisk_1.dat 2025-04-07T14:00:30.736104Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:00:30.948838Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:31.426515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:31.622334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:31.622446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:31.629455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:31.629558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:31.642552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:31.654334Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:00:31.661386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24476, node 1 2025-04-07T14:00:31.847471Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:31.985267Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:32.121216Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:00:32.141904Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:00:32.349947Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0009a7/r3tmp/yandexFYgHjF.tmp 2025-04-07T14:00:32.349980Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0009a7/r3tmp/yandexFYgHjF.tmp 2025-04-07T14:00:32.350127Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0009a7/r3tmp/yandexFYgHjF.tmp 2025-04-07T14:00:32.350232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:00:32.516254Z INFO: TTestServer started on Port 12836 GrpcPort 24476 TClient is connected to server localhost:12836 PQClient connected to localhost:24476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:33.844585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:00:34.033432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T14:00:35.172384Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570844051738562:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:35.172451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:37.663485Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570870541626178:2287], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.663573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570870541626211:2291], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.663619Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.675666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T14:00:37.756860Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490570870541626216:2292], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T14:00:38.146155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:00:38.196740Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490570874116510625:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:00:38.199136Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTM0OTdlMzgtZTgzNWEyZjctOThkYzEzOTktMTdlZjk1NzE=, ActorId: [1:7490570874116510572:2309], ActorState: ExecuteState, TraceId: 01jr88v6kw0c4gq9gmznpfkgy5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:00:38.201754Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:00:38.205685Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490570870541626250:2296], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:00:38.205975Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWM0ZWZmZjctODczOTJkMGMtM2JiNWJmMDItODhiZWY1MDc=, ActorId: [2:7490570870541626176:2286], ActorState: ExecuteState, TraceId: 01jr88v6gk8da59zsyqqpp0jvp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:00:38.206450Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:00:38.336031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:00:38.562565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:00:38.945466Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr88v7h01ran10q5y4ny1y9w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzcyZDdjNWMtNTkzYjFhYjAtZGJjNzUyNWYtNTYxMTBiZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490570878411478369:3101] === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-04-07T14:00:44.349307Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-07T14:00:44.699843Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:44.699898Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896] doesn't have tx writes info 2025-04-07T14:00:44.725101Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr88vd2pc5gyz9gj6vq4y96s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc0NWZiZC1mZjllOGU5LTc4MmJjMjczLWEzOGY3MmU4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14: ... } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1744034447 nanos: 32000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 11 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744034447 nanos: 56000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1744034447 nanos: 64000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 12 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744034447 nanos: 55000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1744034447 nanos: 70000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 13 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744034447 nanos: 51000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1744034447 nanos: 70000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 14 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744034447 nanos: 56000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1744034447 nanos: 64000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } } } } 2025-04-07T14:00:47.758172Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-04-07T14:00:47.758275Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" include_location: true 2025-04-07T14:00:47.758936Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570917066185960:2579]: Request location 2025-04-07T14:00:47.759677Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570917066185962:2580] connected; active server actors: 1 2025-04-07T14:00:47.760010Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 2 2025-04-07T14:00:47.760041Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-04-07T14:00:47.760063Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 1, Generation 2 2025-04-07T14:00:47.760080Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-04-07T14:00:47.760096Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 2, Generation 2 2025-04-07T14:00:47.760122Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2025-04-07T14:00:47.760135Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 2 2025-04-07T14:00:47.760157Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2025-04-07T14:00:47.760171Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2025-04-07T14:00:47.760185Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 1, Generation 2 2025-04-07T14:00:47.760196Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2025-04-07T14:00:47.760208Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 2025-04-07T14:00:47.760220Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2025-04-07T14:00:47.760232Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2025-04-07T14:00:47.760244Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2025-04-07T14:00:47.760447Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7490570917066185960:2579]: Got location Got response: 2025-04-07T14:00:47.761431Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570917066185962:2580] disconnected; active server actors: 1 2025-04-07T14:00:47.761457Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7490570917066185962:2580] disconnected no session operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1744034445902 tx_id: 281474976710676 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } } } } 2025-04-07T14:00:47.764764Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-04-07T14:00:47.764833Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1744034445902 tx_id: 281474976710676 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } } } } 2025-04-07T14:00:47.768472Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-04-07T14:00:47.768534Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//bad-topic" consumer: "my-consumer" include_stats: true include_location: true Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } >> KqpOlapAggregations::Aggregation_Some_GroupBy [GOOD] |89.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ManyColumnShardsFilterPushdownEmptySet [GOOD] Test command err: 2025-04-07T13:59:50.014008Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:634:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T13:59:50.028943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:636:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T13:59:50.029403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:59:50.029578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T13:59:50.031191Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:59:50.031380Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001004/r3tmp/tmpAsnem5/pdisk_1.dat 2025-04-07T13:59:50.615388Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:50.796192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T13:59:50.925639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:50.925835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:50.934584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:50.934679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:50.960792Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:59:50.961415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:50.961747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:51.431252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:51.681226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:51.681505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:51.681796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:51.681941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:51.682063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:51.682181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:51.682294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:51.682432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:51.682564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:51.682688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:51.682840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:51.682940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:51.749676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:51.749788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:51.750050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:51.750212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:51.750321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:51.750453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:51.750573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:51.750686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:51.750813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:51.750992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:51.751142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:51.751262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:51.784335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:51.784440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:51.784657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:51.784801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:51.784916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:51.785029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:51.785144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:51.785295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:51.785416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:51.785518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:51.785614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:51.785725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:129 ... eSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:52.198040Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:52.198168Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:52.198214Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:52.198264Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:52.198304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:52.198643Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:52.198702Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:52.198883Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:52.198925Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:52.199081Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:52.199131Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:52.199363Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:52.199413Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:52.199564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:52.199611Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:53.307575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; 2025-04-07T13:59:54.277222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateTabletsObjectReply for unknown txId 281474976715658 at schemeshard 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; 2025-04-07T14:00:45.988233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7249:6472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:45.988351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7260:6477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:45.988616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:45.999951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:46.077155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7263:6480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 2025-04-07T14:00:48.373435Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr88vemwcbxsppp0nsgsx0c5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjI4ZWY4NGEtZDgwNWRjZTMtYTkzODgzNjYtYjk2Mzc0OGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"level","type":{"optional_type":{"item":{"type_id":1}}}},{"name":"message","type":{"optional_type":{"item":{"type_id":4608}}}},{"name":"resource_id","type":{"optional_type":{"item":{"type_id":4608}}}},{"name":"timestamp","type":{"type_id":50}},{"name":"uid","type":{"type_id":4608}}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":65} 2025-04-07T14:00:48.760636Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2500, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultCountL_FilterL [GOOD] Test command err: Trying to start YDB, gRPC: 12362, MsgBus: 27117 2025-04-07T14:00:19.003888Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570790260539914:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:19.004226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fc8/r3tmp/tmpAQcw8z/pdisk_1.dat 2025-04-07T14:00:19.461951Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:19.474005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:19.474113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:19.477859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12362, node 1 2025-04-07T14:00:19.599970Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:19.599996Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:19.600003Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:19.600116Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27117 TClient is connected to server localhost:27117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:20.332458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:20.348943Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:20.358553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:20.500154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570798850475024:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:20.500384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570798850475024:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:20.502477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570798850475024:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:20.502734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570798850475024:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:20.502876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570798850475024:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:20.504791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570798850475024:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:20.505061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570798850475024:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:20.505265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570798850475024:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:20.505416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570798850475024:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:20.505572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570798850475024:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:20.505737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570798850475024:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:20.505854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570798850475024:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:20.563366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798850475018:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:20.563430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798850475018:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:20.563645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798850475018:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:20.563762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798850475018:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:20.563873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798850475018:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:20.563968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798850475018:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:20.564075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798850475018:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:20.564169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798850475018:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:20.564300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798850475018:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:20.564432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798850475018:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:20.564538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798850475018:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:20.564654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570798850475018:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:20.601267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570798850475019:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:20.601334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570798850475019:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:20.601566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570798850475019:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:20.601673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570798850475019:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:20.601781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570798850475019:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:20.601909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570798850475019:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:20.602038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570798850475019:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:20.602188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570798850475019:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:47.346383Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:47.493279Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:47.493365Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:47.577164Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:47.577239Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:47.661980Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:47.662040Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:47.761077Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:47.761170Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:47.849745Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:47.850274Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:00:47.915799Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:47.915879Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:48.009113Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:48.009203Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:48.097846Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:48.097922Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:48.195753Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:48.195828Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:48.312905Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:48.312980Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:48.348458Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:48.498138Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:48.498221Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:48.621149Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:48.621230Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:48.725466Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:48.725540Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:48.824606Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:48.824706Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:48.921002Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:00:48.921067Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3005], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWUyYjk2MzAtMTA3MzA2OGEtNmM1YThkNi04OTJlN2QwYg==. TraceId : 01jr88v6cc10xma6anecn82erg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:00:49.001118Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:00:49.001245Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2359];fline=actor.cpp:33;event=skip_flush_writing; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS88-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 62010, MsgBus: 61509 2025-04-07T13:55:49.969536Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569636684026952:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:49.969654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00116e/r3tmp/tmpkuXoPa/pdisk_1.dat 2025-04-07T13:55:50.538424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:55:50.538550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:55:50.552289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:55:50.568698Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62010, node 1 2025-04-07T13:55:50.728269Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:55:50.728291Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:55:50.728303Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:55:50.728392Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61509 TClient is connected to server localhost:61509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:55:51.414616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:51.436209Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:55:51.455198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:51.636693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:51.885513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:52.039652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:55:54.685622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569658158865130:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:54.701165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:54.742379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:55:54.789866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:55:54.828739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:55:54.867195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:55:54.927859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:55:54.966203Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569636684026952:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:55:54.966284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:55:55.037692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:55:55.114087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569662453832926:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:55.114187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:55.114536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569662453832931:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:55:55.118301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:55:55.133045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569662453832933:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:55:57.255415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:55:57.293079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:55:57.369271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:55:57.455301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:55:57.512806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:55:57.688476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:55:57.732229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:55:57.769373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:55:57.823439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:55:57.885579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:55:57.945990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.000089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.029122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:55:58.853934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:55:58.960658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:55:59.009029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:55:59.070296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:55:59.117664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:55:59.162475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type ... Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:54.387834Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:54.387836Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:54.387858Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:54.387969Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:54.387991Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:54.388000Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:54.388045Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:54.388152Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:54.388154Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:54.388180Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:54.388190Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:54.388261Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:54.388265Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:54.388286Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:54.388290Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T13:59:54.388328Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:54.388358Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:54.389271Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:54.389344Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:54.389557Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:54.389592Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:54.389733Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:54.389768Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:54.389952Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:54.389983Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:54.390089Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:54.390118Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:39.812886Z node 5 :KQP_SLOW_LOG WARN: TraceId: "01jr88swk48c3jvqegre88zmqh", SessionId: ydb://session/3?node_id=5&id=MTQxMmY0NGUtOWIwOWM4NzAtNGNmNzhlYWItODM2NjA1ODg=, Slow query, duration: 45.087765s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query88.tpl and seed 318176889\nselect *\nfrom\n (select count(*) h8_30_to_9\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 8\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s1 cross join\n (select count(*) h9_to_9_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s2 cross join\n (select count(*) h9_30_to_10\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s3 cross join\n (select count(*) h10_to_10_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s4 cross join\n (select count(*) h10_30_to_11\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s5 cross join\n (select count(*) h11_to_11_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s6 cross join\n (select count(*) h11_30_to_12\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s7 cross join\n (select count(*) h12_to_12_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 12\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s8\n;", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] Test command err: 2025-04-07T14:00:30.171030Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570840117444191:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:30.171300Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009cb/r3tmp/tmpmEelpm/pdisk_1.dat 2025-04-07T14:00:30.847949Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:00:30.852938Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:00:30.985049Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:31.313014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:31.686037Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:31.760474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:31.760589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:31.773617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:31.773692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:31.775639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:31.793341Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:00:31.797492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31613, node 1 2025-04-07T14:00:32.250595Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0009cb/r3tmp/yandex4BjKhV.tmp 2025-04-07T14:00:32.250617Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0009cb/r3tmp/yandex4BjKhV.tmp 2025-04-07T14:00:32.341852Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0009cb/r3tmp/yandex4BjKhV.tmp 2025-04-07T14:00:32.341991Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:00:32.403096Z INFO: TTestServer started on Port 9242 GrpcPort 31613 TClient is connected to server localhost:9242 PQClient connected to localhost:31613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:33.552943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:00:33.736315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T14:00:35.169205Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570840117444191:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:35.169262Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:37.506052Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570874053321565:2287], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.506180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.506475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570874053321601:2291], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.518380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-07T14:00:37.568487Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490570874053321603:2292], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-04-07T14:00:38.076560Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490570874053321637:2296], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:00:38.076927Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODBjOTg1MmUtN2YyYTc4MWUtYmQ4OGRhZGUtYzNhZjkwYjI=, ActorId: [2:7490570874053321562:2285], ActorState: ExecuteState, TraceId: 01jr88v6bzdve68p7k0yg1qf3f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:00:38.079105Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:00:38.085350Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490570870182216339:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:00:38.086487Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTlhNjNlZDItZTNmN2FlYzktOTJkYzVhZGUtYTY2NzE5Yzc=, ActorId: [1:7490570870182216289:2309], ActorState: ExecuteState, TraceId: 01jr88v6qkf6z11cjwshe48fpq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:00:38.086794Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:00:38.102352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-04-07T14:00:38.233267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-04-07T14:00:38.481305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:00:38.944368Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jr88v7ez2vmavpp5wzw7g28f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTliYmEzM2ItMjc0OTQxNmItNGU1NTZkOTktMjAzZTYzYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490570878772151365:3083] === CheckClustersList. Ok 2025-04-07T14:00:46.605346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:00:46.605381Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:49.450993Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490570921721825185:2507] TxId: 281474976720688. Ctx: { TraceId: 01jr88vj185f8zg2gbnkw7nc60, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2U0MDg0NzgtYmRiNGY1MWYtYWY5ODY5MWUtN2QyNGZmOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2025-04-07T14:00:49.459739Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490570921721825189:2507], TxId: 281474976720688, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=M2U0MDg0NzgtYmRiNGY1MWYtYWY5ODY5MWUtN2QyNGZmOWE=. CustomerSuppliedId : . TraceId : 01jr88vj185f8zg2gbnkw7nc60. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490570921721825185:2507], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> KqpOlapTiering::TieringValidation [GOOD] >> KqpOlap::OlapRead_FailsOnDataQuery >> KqpOlap::OlapDeleteImmediate [GOOD] |89.6%| [TA] $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_GroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 11629, MsgBus: 23999 2025-04-07T14:00:41.486193Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570887734156371:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:41.486441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fb1/r3tmp/tmpcpAqM9/pdisk_1.dat 2025-04-07T14:00:42.050115Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:42.055318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:42.055426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:42.061929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11629, node 1 2025-04-07T14:00:42.273199Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:42.273222Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:42.273231Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:42.273340Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23999 TClient is connected to server localhost:23999 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:43.193036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:43.214473Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:43.229282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:43.327573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570896324091497:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:43.327764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570896324091497:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:43.328063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570896324091497:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:43.328194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570896324091497:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:43.328278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570896324091497:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:43.328385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570896324091497:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:43.328505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570896324091497:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:43.328601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570896324091497:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:43.329162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570896324091497:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:43.329307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570896324091497:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:43.329391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570896324091497:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:43.329490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570896324091497:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:43.377688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570896324091496:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:43.377750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570896324091496:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:43.377953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570896324091496:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:43.378071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570896324091496:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:43.378216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570896324091496:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:43.378303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570896324091496:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:43.378407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570896324091496:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:43.378521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570896324091496:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:43.378635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570896324091496:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:43.378781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570896324091496:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:43.378888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570896324091496:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:43.378985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570896324091496:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:43.441440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570896324091510:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:43.441507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570896324091510:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:43.441699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570896324091510:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:43.441811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570896324091510:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:43.441916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570896324091510:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:43.442008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570896324091510:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:43.442102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570896324091510:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:43.442207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570896324091510:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... CLASS_NAME=Granules;id=1; 2025-04-07T14:00:43.534769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:43.534883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:43.534909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:43.535101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:43.535123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:43.535207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:43.535235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:43.537391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:43.537434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:43.537488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:43.537517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:43.537868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:43.542440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:43.542704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:43.542738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:43.542896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:43.542925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:43.544068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:43.544144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:43.544287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:43.544315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SOME(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; 2025-04-07T14:00:45.659171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570904914026384:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:45.659266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570904914026400:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:45.659366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:45.678235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:45.713523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570904914026408:2403], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:46.490272Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570887734156371:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:46.490623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:50.071075Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034446000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SOME(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [4, 5]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '870) '('"_id" '"a839b98f-252b5f98-83c6f183-53f66494") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $23 (Int32 '1)) (let $24 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $23) $24))) (RangeCreate (AsList '($24 '((Just (Int32 '"5")) $23)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"id" $1)) (let $11 '('('"_logical_id" '929) '('"_id" '"cebbac5e-f0cddf41-6ca7a889-a33b49a9") '('"_wide_channels" (StructType '('_yql_agg_0 $2) $10)))) (let $12 (DqPhyStage '() (lambda '() (block '( (let $25 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $26 '('"id")) (let $27 '('('"UsedKeyColumns" $26) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $28 (KqpWideReadOlapTableRanges $25 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $27 (lambda '($29) (TKqpOlapAgg $29 '('('_yql_agg_0 'some '"level")) $26)))) (return (FromFlow $28)) ))) $11)) (let $13 (DqCnHashShuffle (TDqOutput $12 '0) '('1))) (let $14 (StructType '('"column1" $2) $10)) (let $15 '('('"_logical_id" '1352) '('"_id" '"56cfde82-5a7796b7-53827de7-967bdffa") '('"_wide_channels" $14))) (let $16 (DqPhyStage '($13) (lambda '($30) (block '( (let $31 (lambda '($42 $43) $43 $42)) (let $32 (WideCombiner (ToFlow $30) '"" (lambda '($33 $34) $34) (lambda '($35 $36 $37) $36) (lambda '($38 $39 $40 $41) (Coalesce $41 $39)) $31)) (return (FromFlow (WideSort $32 '('('1 (Bool 'true)))))) ))) $15)) (let $17 (DqCnMerge (TDqOutput $16 '0) '('('1 '"Asc")))) (let $18 (DqPhyStage '($17) (lambda '($44) (FromFlow (NarrowMap (ToFlow $44) (lambda '($45 $46) (AsStruct '('"column1" $45) '('"id" $46)))))) '('('"_logical_id" '1364) '('"_id" '"d7064b6-a3b0c13f-74211b0a-9bc08a68")))) (let $19 '($12 $16 $18)) (let $20 (DqCnResult (TDqOutput $18 '0) '('"id" '"column1"))) (let $21 (KqpTxResultBinding $9 '0 '0)) (let $22 (KqpPhysicalTx $19 '($20) '('($7 $21)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $22) '((KqpTxResultBinding (ListType $14) '1 '0)) '('('"type" '"scan_query")))) ) |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapDeleteImmediate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapTiering::TieringValidation [GOOD] Test command err: Trying to start YDB, gRPC: 16726, MsgBus: 23344 2025-04-07T14:00:41.511233Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570887452778057:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:41.511584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000faf/r3tmp/tmpCwugLd/pdisk_1.dat 2025-04-07T14:00:42.113915Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:42.126812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:42.126890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:42.175409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16726, node 1 2025-04-07T14:00:42.279491Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:42.279521Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:42.279551Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:42.279671Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23344 TClient is connected to server localhost:23344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:43.095787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:45.338293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:45.455666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570904632647819:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:45.455853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570904632647819:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:45.456068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570904632647819:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:45.456279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570904632647819:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:45.456362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570904632647819:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:45.456454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570904632647819:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:45.456528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570904632647819:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:45.456611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570904632647819:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:45.457065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570904632647819:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:45.457217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570904632647819:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:45.457325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570904632647819:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:45.457423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570904632647819:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:45.463835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570904632647822:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:45.463911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570904632647822:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:45.464126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570904632647822:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:45.464215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570904632647822:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:45.464312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570904632647822:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:45.464396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570904632647822:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:45.464464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570904632647822:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:45.464557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570904632647822:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:45.464647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570904632647822:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:45.473728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570904632647822:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:45.473885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570904632647822:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:45.474010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570904632647822:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:45.476464Z node 1 :TX_TIERING INFO: fline=manager.cpp:41;event=start_subscribing_metadata; 2025-04-07T14:00:45.484975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:45.485053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:45.485209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:45.485250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:45.485477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:45.485515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:45.485657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:45.485706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:45.485810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:45.485853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer ... ma::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:45.544635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:45.544729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:45.544747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:45.544940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:45.544967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:45.545034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:45.545081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:45.545165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:45.545185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:45.545212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:45.545234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:45.545501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:45.545560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:45.545734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:45.545774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:45.545925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:45.545967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:45.546148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:45.546171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:45.546264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:45.546283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:45.550664Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:00:45.550693Z node 1 :TX_TIERING DEBUG: manager.cpp:54 :Waiting secrets for update at tablet 72075186224037890 2025-04-07T14:00:45.550707Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:00:45.550712Z node 1 :TX_TIERING DEBUG: manager.cpp:54 :Waiting secrets for update at tablet 72075186224037889 2025-04-07T14:00:45.551011Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:00:45.551028Z node 1 :TX_TIERING DEBUG: manager.cpp:54 :Waiting secrets for update at tablet 72075186224037891 2025-04-07T14:00:45.552147Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:00:45.552166Z node 1 :TX_TIERING DEBUG: manager.cpp:54 :Waiting secrets for update at tablet 72075186224037888 2025-04-07T14:00:45.554693Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:57;event=TEvRefreshSubscriberData;snapshot=secrets; 2025-04-07T14:00:45.554705Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037891 2025-04-07T14:00:45.554768Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:57;event=TEvRefreshSubscriberData;snapshot=secrets; 2025-04-07T14:00:45.554795Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037888 2025-04-07T14:00:45.555092Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:57;event=TEvRefreshSubscriberData;snapshot=secrets; 2025-04-07T14:00:45.555101Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037890 2025-04-07T14:00:45.555502Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:57;event=TEvRefreshSubscriberData;snapshot=secrets; 2025-04-07T14:00:45.555514Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037889 2025-04-07T14:00:45.569550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-07T14:00:45.616790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570904632648074:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:45.616884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:45.848550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-07T14:00:46.037365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570908927615482:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:46.037426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:46.037726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570908927615487:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:46.040277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-07T14:00:46.047934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570908927615489:2394], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:00:46.511179Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570887452778057:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:46.511274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:46.939514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:00:47.407972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-04-07T14:00:47.873149Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:00:47.873176Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037888 2025-04-07T14:00:47.873592Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:00:47.873604Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037890 2025-04-07T14:00:47.873676Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:00:47.873683Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037889 2025-04-07T14:00:47.873768Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:00:47.873774Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037891 2025-04-07T14:00:47.997775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T14:00:48.483985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T14:00:48.939647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T14:00:50.445294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 >> KqpOlap::PredicatePushdownParameterTypesValidation >> KqpOlap::ScanQueryOltpAndOlap >> KqpOlapAggregations::Aggregation_Sum_GroupBy [GOOD] >> KqpOlap::CompositeRangeOlap >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] >> KqpOlapAggregations::BlocksRead >> KqpOlap::PredicatePushdown >> KqpOlapSparsed::Switching >> KqpDatetime64ColumnShard::UseTime64Columns >> KqpDecimalColumnShard::TestFilterNulls >> KqpOlap::SimpleLookupOlap [GOOD] >> KqpOlapAggregations::CountAllPushdown-UseLlvm [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_GroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 21730, MsgBus: 16389 2025-04-07T14:00:43.209226Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570899005231322:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:43.209301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fa9/r3tmp/tmp1ZeaPd/pdisk_1.dat 2025-04-07T14:00:43.959365Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:43.980798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:43.980879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:43.985297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21730, node 1 2025-04-07T14:00:44.094424Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:44.094446Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:44.094452Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:44.094521Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16389 TClient is connected to server localhost:16389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:44.808187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:45.029301Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:45.045428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:45.150987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570907595166571:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:45.151206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570907595166571:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:45.151469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570907595166571:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:45.151588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570907595166571:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:45.151697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570907595166571:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:45.151787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570907595166571:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:45.151879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570907595166571:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:45.152019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570907595166571:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:45.152128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570907595166571:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:45.152233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570907595166571:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:45.152331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570907595166571:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:45.152436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570907595166571:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:45.190834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570907595166578:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:45.190874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570907595166578:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:45.191005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570907595166578:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:45.191053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570907595166578:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:45.191104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570907595166578:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:45.191158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570907595166578:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:45.191205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570907595166578:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:45.191263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570907595166578:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:45.191314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570907595166578:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:45.191362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570907595166578:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:45.191406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570907595166578:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:45.191461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570907595166578:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:45.255110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570907595166579:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:45.255167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570907595166579:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:45.255342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570907595166579:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:45.255447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570907595166579:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:45.255561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570907595166579:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:45.255647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570907595166579:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:45.255722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570907595166579:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:45.255798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570907595166579:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... 00:45.324947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:45.325090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:45.325109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:45.325268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:45.325289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:45.325381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:45.325420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:45.325469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:45.325486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:45.325512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:45.325530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:45.325915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:45.326012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:45.326178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:45.326205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:45.326334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:45.326359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:45.326509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:45.326531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:45.326606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:45.326622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SUM(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; 2025-04-07T14:00:47.405489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570916185101480:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:47.405557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570916185101472:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:47.405994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:47.409977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:47.423705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570916185101486:2403], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:48.223728Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570899005231322:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:48.236919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:52.373283Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034448002, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SUM(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [4, 5]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1013) '('"_id" '"18b0402-fae4bc1c-f0011e60-499ba3b1") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '1)) (let $25 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"5")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (OptionalType (DataType 'Int64))) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '1072) '('"_id" '"ee8ad40e-3413b205-644a1aa5-2c96f448") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'sum '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1639) '('"_id" '"674668f9-45b14743-a8b6faa4-94f58e14") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1651) '('"_id" '"440d7d28-731aed0-782559-2ef4f7da")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlap::OlapUpsert [GOOD] >> KqpOlap::BlockChannelForce >> KqpOlapStatistics::StatsUsageNotPK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T13:58:44.002122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:58:44.002211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:44.002859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:58:44.002924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:58:44.002991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:58:44.003022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:58:44.003093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:58:44.003454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:58:44.091008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:58:44.091074Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T13:58:44.112936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:58:44.113052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:58:44.113265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:58:44.124196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:58:44.124410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:58:44.125132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:44.125564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:58:44.133014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:44.134817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:44.134877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:44.135035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:58:44.135088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:44.135124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:58:44.135246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T13:58:44.142306Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T13:58:44.339497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:58:44.339728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.339957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:58:44.340234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:58:44.340306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.349653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:44.349805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:58:44.350035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.350135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:58:44.350195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:58:44.350234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:58:44.356623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.356711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:58:44.356802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:58:44.366018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.366089Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.366138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:44.366219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:58:44.369836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:58:44.373467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:58:44.373709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:58:44.374639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:58:44.374779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:58:44.374820Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:44.375056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:58:44.375105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:58:44.375300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:58:44.375390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:58:44.377561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:58:44.377608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:58:44.377799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:58:44.377833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:58:44.378076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:58:44.378110Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:58:44.378178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:58:44.378201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:44.378229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:58:44.378268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:58:44.378318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:58:44.378355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:58:44.378411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:58:44.378454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:58:44.378474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Pu ... , ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1003 2025-04-07T14:00:53.339131Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 416611830026 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-07T14:00:53.339202Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-04-07T14:00:53.339352Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 416611830026 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-07T14:00:53.339428Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:00:53.339527Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 416611830026 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-07T14:00:53.339591Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:00:53.339638Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-07T14:00:53.344090Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:00:53.344219Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:00:53.346883Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T14:00:53.347371Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1394 } } 2025-04-07T14:00:53.347416Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-04-07T14:00:53.347584Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1394 } } 2025-04-07T14:00:53.347697Z node 97 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1394 } } 2025-04-07T14:00:53.348581Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 427 RawX2: 416611830111 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-07T14:00:53.348626Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-04-07T14:00:53.348775Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 427 RawX2: 416611830111 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-07T14:00:53.348828Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:00:53.348924Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 427 RawX2: 416611830111 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-07T14:00:53.348988Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:00:53.349030Z node 97 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T14:00:53.349078Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:00:53.349140Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:00:53.349172Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-04-07T14:00:53.352742Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T14:00:53.353502Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T14:00:53.354093Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T14:00:53.354154Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2025-04-07T14:00:53.354206Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T14:00:53.354246Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2025-04-07T14:00:53.354322Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-07T14:00:53.354359Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2025-04-07T14:00:53.357735Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T14:00:53.357798Z node 97 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-04-07T14:00:53.357915Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-07T14:00:53.357959Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-07T14:00:53.358007Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-04-07T14:00:53.358057Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-07T14:00:53.358104Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-07T14:00:53.358138Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-07T14:00:53.358295Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:00:53.358336Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-07T14:00:53.360860Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-07T14:00:53.360912Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-07T14:00:53.361346Z node 97 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-07T14:00:53.361438Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-07T14:00:53.361476Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [97:514:2478] TestWaitNotification: OK eventTxId 1003 2025-04-07T14:00:53.361979Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:00:53.362223Z node 97 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 261us result status StatusSuccess 2025-04-07T14:00:53.362761Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleLookupOlap [GOOD] Test command err: Trying to start YDB, gRPC: 16395, MsgBus: 26977 2025-04-07T14:00:49.005284Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570924966815999:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:49.006101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fa6/r3tmp/tmplUWm8t/pdisk_1.dat 2025-04-07T14:00:49.468194Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:49.472243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:49.472355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:49.475468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16395, node 1 2025-04-07T14:00:49.593214Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:49.593238Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:49.593245Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:49.593337Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26977 TClient is connected to server localhost:26977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:50.467384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:50.481370Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:50.494764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:50.605530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570929261783811:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:50.605763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570929261783811:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:50.606052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570929261783811:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:50.606202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570929261783811:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:50.606362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570929261783811:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:50.606479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570929261783811:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:50.606596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570929261783811:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:50.606701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570929261783811:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:50.606843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570929261783811:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:50.606956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570929261783811:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:50.607082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570929261783811:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:50.607197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570929261783811:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:50.649637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570929261783810:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:50.649697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570929261783810:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:50.649928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570929261783810:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:50.650032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570929261783810:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:50.650136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570929261783810:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:50.650239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570929261783810:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:50.650321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570929261783810:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:50.650420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570929261783810:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:50.650514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570929261783810:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:50.650615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570929261783810:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:50.650707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570929261783810:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:50.650836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570929261783810:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:50.713837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570929261783814:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:50.713936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570929261783814:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:50.714194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570929261783814:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:50.714343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570929261783814:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:50.714439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570929261783814:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:50.714527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570929261783814:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:50.714632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570929261783814:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:50.714808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570929261783814:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... 8010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:50.768028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:50.768471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:50.768499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:50.768578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:50.768597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:50.768758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:50.768786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:50.768871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:50.768904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:50.768961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:50.768999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:50.769037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:50.769067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:50.769322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:50.769372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:50.769550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:50.769570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:50.769687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:50.769709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:50.769872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:50.769894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:50.769991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:50.770009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:50.770744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:50.770778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:50.771776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:50.771810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:50.771980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:50.772002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:50.772073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:50.772112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:50.772164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:50.772187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:50.772216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:50.772422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:50.772814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:50.772858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:50.773027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:50.773051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:50.773172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:50.773195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:50.773362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:50.773388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:50.773505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:50.773527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:50.829377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2025-04-07T14:00:52.780949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570937851718694:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:52.781047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:52.781288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570937851718730:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:52.784391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:00:52.810277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570937851718732:2387], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:00:53.766858Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034453000, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::CountAllPushdown-UseLlvm [GOOD] Test command err: Trying to start YDB, gRPC: 28884, MsgBus: 25418 2025-04-07T14:00:48.473944Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570918795779605:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:48.473981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fa8/r3tmp/tmpL0uTOV/pdisk_1.dat 2025-04-07T14:00:48.967405Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:48.971096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:48.971180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:48.973777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28884, node 1 2025-04-07T14:00:49.045001Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:49.045027Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:49.045037Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:49.045154Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25418 TClient is connected to server localhost:25418 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:49.759023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:49.785195Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:49.803108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:49.902502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570923090747573:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:49.902695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570923090747573:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:49.902957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570923090747573:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:49.903105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570923090747573:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:49.903218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570923090747573:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:49.903366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570923090747573:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:49.903482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570923090747573:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:49.903585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570923090747573:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:49.903719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570923090747573:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:49.903878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570923090747573:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:49.904033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570923090747573:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:49.904149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570923090747573:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:49.949880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570923090747577:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:49.949956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570923090747577:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:49.950201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570923090747577:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:49.950331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570923090747577:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:49.950451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570923090747577:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:49.950556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570923090747577:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:49.950655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570923090747577:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:49.950763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570923090747577:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:49.950868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570923090747577:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:49.950989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570923090747577:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:49.951124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570923090747577:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:49.951229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570923090747577:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:49.986425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570923090747578:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:49.986502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570923090747578:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:49.986714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570923090747578:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:49.986848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570923090747578:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:49.986941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570923090747578:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:49.987054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570923090747578:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:49.987157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570923090747578:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:49.987256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570923090747578:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... 8Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:50.036614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:50.036708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:50.036728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:50.036890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:50.036912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:50.037010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:50.037035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:50.037076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:50.037093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:50.037119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:50.037154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:50.037581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:50.037634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:50.037856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:50.037892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:50.037989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:50.038018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:50.038202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:50.038239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:50.038351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:50.038390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:50.100059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2025-04-07T14:00:52.378460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570935975649891:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:52.378555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:52.379059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570935975649927:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:52.384058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:00:52.395430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570935975649929:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:00:53.481752Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570918795779605:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:53.481815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:53.770915Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034453000, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '476) '('"_id" '"599652e1-348710ba-1584b2d8-95eb8edb") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '() '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 '"count" '"*")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '995) '('"_id" '"9e5305d3-33d3c36a-9594def9-e4aaf03c")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1098) '('"_id" '"33e44aa7-b6f78801-a6195928-856590ea") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_Sum_Null [GOOD] >> KqpOlapBlobsSharing::MultipleSplitsWithRestartsWhenWait [FAIL] >> KqpDecimalColumnShard::TestSimpleQueries >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] >> KqpOlap::OlapRead_FailsOnDataQuery [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_Null [GOOD] Test command err: Trying to start YDB, gRPC: 13201, MsgBus: 19423 2025-04-07T14:00:49.840763Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570923711481632:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:49.840952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fa5/r3tmp/tmpIasaYQ/pdisk_1.dat 2025-04-07T14:00:50.329096Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:50.350232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:50.350297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:50.354330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13201, node 1 2025-04-07T14:00:50.457486Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:50.457510Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:50.457520Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:50.457616Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19423 TClient is connected to server localhost:19423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:51.152400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:51.196541Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:51.211240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:51.370105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570932301416742:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:51.370341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570932301416742:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:51.370607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570932301416742:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:51.370745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570932301416742:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:51.370881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570932301416742:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:51.371045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570932301416742:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:51.371168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570932301416742:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:51.371296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570932301416742:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:51.371433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570932301416742:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:51.371565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570932301416742:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:51.371678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570932301416742:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:51.371779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570932301416742:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:51.427083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570932301416743:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:51.427147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570932301416743:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:51.429048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570932301416743:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:51.429258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570932301416743:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:51.429368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570932301416743:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:51.429458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570932301416743:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:51.429571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570932301416743:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:51.429668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570932301416743:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:51.429794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570932301416743:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:51.429899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570932301416743:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:51.430005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570932301416743:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:51.430110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570932301416743:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:51.468209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570932301416744:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:51.468279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570932301416744:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:51.468479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570932301416744:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:51.468592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570932301416744:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:51.468722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570932301416744:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:51.468836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570932301416744:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:51.468931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570932301416744:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:51.469023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570932301416744:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:51.547034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:51.547061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:51.547153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:51.547239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:51.547283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:51.547307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:51.547340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:51.547421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:51.547728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:51.547792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:51.547958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:51.548006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:51.548101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:51.548125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:51.548269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:51.548297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:51.548449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:51.548472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SUM(level) FROM `/Root/tableWithNulls` WHERE id > 5; 2025-04-07T14:00:53.395056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570940891351630:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:53.395167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:53.396763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570940891351642:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:53.403255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:53.415634Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T14:00:53.416741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570940891351644:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:54.836298Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570923711481632:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:54.836373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:54.984075Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034454000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SUM(level) FROM `/Root/tableWithNulls` WHERE id > 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (declare %kqp%tx_result_binding_1_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Int64)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '864) '('"_id" '"87cfb82a-ee4b1d75-9b32f955-17d27911") $3)) (let $5 (DqPhyStage '() (lambda '() (block '( (let $28 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $28) '((Nothing $2) $28))))))))))) ))) $4)) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 (KqpPhysicalTx '($5) '($6) '() '('('"type" '"compute")))) (let $8 '"%kqp%tx_result_binding_0_0") (let $9 (TupleType $2 $1)) (let $10 (TupleType (ListType (TupleType $9 $9)))) (let $11 (OptionalType (DataType 'Int64))) (let $12 '('('"_logical_id" '922) '('"_id" '"7b7120ab-f7736ae7-f55d4c56-d65a1ce0") '('"_wide_channels" (StructType '('_yql_agg_0 $11))))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $29 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $30 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $29 %kqp%tx_result_binding_0_0 '('"level") '() $30 (lambda '($32) (TKqpOlapAgg $32 '('('_yql_agg_0 'sum '"level")) '())))) (return (FromFlow $31)) ))) $12)) (let $14 (DqCnUnionAll (TDqOutput $13 '0))) (let $15 (DqPhyStage '($14) (lambda '($33) (block '( (let $34 (Bool 'false)) (let $35 (WideCondense1 (ToFlow $33) (lambda '($37) $37) (lambda '($38 $39) $34) (lambda '($40 $41) (AggrAdd $40 $41)))) (let $36 (Condense (NarrowMap (Take $35 (Uint64 '1)) (lambda '($42) (AsStruct '('Sum0 $42)))) (Nothing (OptionalType (StructType '('Sum0 $11)))) (lambda '($43 $44) $34) (lambda '($45 $46) (Just $45)))) (return (FromFlow (Map $36 (lambda '($47) (AsList (AsStruct '('"column0" (Member $47 'Sum0)))))))) ))) '('('"_logical_id" '1600) '('"_id" '"c7ca2a3c-103a3eef-a07acff1-e947e9e9")))) (let $16 (DqCnValue (TDqOutput $15 '0))) (let $17 (KqpTxResultBinding $10 '0 '0)) (let $18 '('('"type" '"scan"))) (let $19 (KqpPhysicalTx '($13 $15) '($16) '('($8 $17)) $18)) (let $20 '"%kqp%tx_result_binding_1_0") (let $21 (ListType (StructType '('"column0" $11)))) (let $22 '('('"_logical_id" '1696) '('"_id" '"e305de7-898161fd-30bf9e7d-9a745405") $3)) (let $23 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_1_0)) $22)) (let $24 (DqCnResult (TDqOutput $23 '0) '('"column0"))) (let $25 (KqpTxResultBinding $21 '1 '0)) (let $26 (KqpPhysicalTx '($23) '($24) '('($20 $25)) $18)) (let $27 '($7 $19 $26)) (return (KqpPhysicalQuery $27 '((KqpTxResultBinding $21 '"2" '0)) '('('"type" '"scan_query")))) ) >> KqpOlapBlobsSharing::MultipleSplitsThenMerges >> KqpOlapAggregations::Aggregation_Some_NullGroupBy |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |89.6%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> KqpOlapTiering::EvictionIncreaseDuration >> KqpOlapCompression::TestAlterCompressionTableInTableStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_FailsOnDataQuery [GOOD] Test command err: Trying to start YDB, gRPC: 12594, MsgBus: 14615 2025-04-07T14:00:52.145229Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570937210542983:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:52.145985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f9f/r3tmp/tmpslxmGZ/pdisk_1.dat 2025-04-07T14:00:52.777820Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:52.778900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:52.778970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:52.786197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12594, node 1 2025-04-07T14:00:52.965548Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:52.965571Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:52.965577Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:52.965650Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14615 TClient is connected to server localhost:14615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:53.744523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:53.755126Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:53.777094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:53.881977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570941505510825:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:53.882214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570941505510825:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:53.882559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570941505510825:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:53.882721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570941505510825:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:53.882822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570941505510825:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:53.882968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570941505510825:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:53.883119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570941505510825:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:53.883261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570941505510825:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:53.883391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570941505510825:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:53.883501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570941505510825:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:53.883643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570941505510825:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:53.883754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570941505510825:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:53.918909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570941505510826:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:53.918977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570941505510826:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:53.919230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570941505510826:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:53.919382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570941505510826:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:53.919486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570941505510826:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:53.919592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570941505510826:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:53.919719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570941505510826:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:53.919843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570941505510826:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:53.919985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570941505510826:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:53.920096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570941505510826:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:53.920216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570941505510826:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:53.920372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570941505510826:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:53.960545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570941505510827:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:53.960596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570941505510827:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:53.960808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570941505510827:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:53.960932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570941505510827:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:53.961051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570941505510827:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:53.961152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570941505510827:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:53.961251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570941505510827:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:53.961373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570941505510827:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... 5186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:54.283287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:54.283301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:54.283400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:54.283412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:54.283472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:54.283502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:54.283532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:54.283543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:54.283567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:54.283586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:54.283760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:54.283792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:54.283916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:54.283930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:54.284009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:54.284025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:54.284154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:54.284169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:54.284250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:54.284265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:54.284534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:54.284551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:54.284623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:54.284636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:54.284792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:54.284821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:54.284931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:54.284961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:54.285019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:54.285050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:54.285084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:54.285114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:54.285424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:54.285482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:54.285653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:54.285693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:54.285842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:54.285871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:54.286048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:54.286078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:54.286196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:54.286214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:54.321796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2025-04-07T14:00:55.902507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570950095445950:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:55.904979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570950095445942:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:55.905070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:55.906576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:00:55.915540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570950095445956:2421], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:00:56.496691Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490570954390413343:2414] TxId: 281474976710663. Ctx: { TraceId: 01jr88vraxfn82hhzwet4p7xcn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDllNzc3ZDAtNjdlNzU1YjYtMzRiZTJjYjgtODI2YjFkM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Data manipulation queries do not support column shard tables. 2025-04-07T14:00:56.514440Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDllNzc3ZDAtNjdlNzU1YjYtMzRiZTJjYjgtODI2YjFkM2I=, ActorId: [1:7490570950095445930:2414], ActorState: ExecuteState, TraceId: 01jr88vraxfn82hhzwet4p7xcn, Create QueryResponse for error on request, msg: >> KqpOlap::ScanQueryOltpAndOlap [GOOD] >> KqpOlapAggregations::BlocksRead [GOOD] >> KqpYql::DdlDmlMix [GOOD] >> KqpYql::CreateUseTable >> KqpOlapWrite::DefaultValues >> KqpScripting::LimitOnShard [GOOD] >> KqpScripting::NoAstSizeLimit >> KqpDatetime64ColumnShard::UseTime64Columns [GOOD] >> KqpDatetime64ColumnShard::UseDatetime64AsPrimaryKey >> KqpOlapStatistics::StatsUsageNotPK [GOOD] |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |89.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ScanQueryOltpAndOlap [GOOD] Test command err: Trying to start YDB, gRPC: 13059, MsgBus: 23886 2025-04-07T14:00:53.438969Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570941641321822:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:53.439014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f9c/r3tmp/tmpuemSwW/pdisk_1.dat 2025-04-07T14:00:53.923794Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:53.925199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:53.925260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:53.928501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13059, node 1 2025-04-07T14:00:54.046538Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:54.046561Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:54.046568Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:54.046667Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23886 TClient is connected to server localhost:23886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:54.635203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:54.676910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:54.816092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570945936289786:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:54.816272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570945936289786:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:54.816568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570945936289786:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:54.816691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570945936289786:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:54.816850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570945936289786:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:54.816956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570945936289786:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:54.817076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570945936289786:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:54.817200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570945936289786:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:54.817296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570945936289786:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:54.818961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570945936289786:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:54.819333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570945936289786:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:54.819511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570945936289786:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:54.852641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570945936289787:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:54.852714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570945936289787:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:54.852882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570945936289787:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:54.852947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570945936289787:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:54.853004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570945936289787:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:54.853061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570945936289787:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:54.853108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570945936289787:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:54.853172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570945936289787:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:54.853220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570945936289787:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:54.853290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570945936289787:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:54.853346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570945936289787:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:54.853397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570945936289787:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:54.894147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570945936289788:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:54.894241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570945936289788:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:54.894468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570945936289788:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:54.894572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570945936289788:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:54.894690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570945936289788:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:54.894789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570945936289788:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:54.894885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570945936289788:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:54.894989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570945936289788:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:54.895093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:74905709459362897 ... 72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:54.954814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:54.954838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:54.954989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:54.955015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:54.955095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:54.955118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:54.955167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:54.955230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:54.955259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:54.955279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:54.955598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:54.955635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:54.955801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:54.955824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:54.955960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:54.955984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:54.956157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:54.956188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:54.956288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:54.956308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:54.958803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:54.958840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:54.958946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:54.958970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:54.959147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:54.959167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:54.959253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:54.959284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:54.959534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:54.959562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:54.959596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:54.959617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:54.959924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:54.959958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:54.960151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:54.960179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:54.960303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:54.960332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:54.970768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:54.970828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:54.970973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:54.971008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:55.021450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3632;columns=5; 2025-04-07T14:00:55.105926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:57.127322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570958821192090:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:57.127418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570958821192084:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:57.127503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:57.131291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:00:57.140224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570958821192098:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:00:58.489344Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570941641321822:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:58.489737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:58.533875Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034458320, txId: 281474976710664] shutting down |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::BlocksRead [GOOD] Test command err: Trying to start YDB, gRPC: 30433, MsgBus: 31124 2025-04-07T14:00:54.001988Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570942030275956:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:54.003830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f96/r3tmp/tmpxz2C53/pdisk_1.dat 2025-04-07T14:00:54.373094Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:54.378397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:54.378604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:54.380582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30433, node 1 2025-04-07T14:00:54.465248Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:54.465277Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:54.465288Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:54.465386Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31124 TClient is connected to server localhost:31124 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:55.106539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:55.134048Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:55.151800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:55.596946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570950620211067:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:55.597190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570950620211067:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:55.597483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570950620211067:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:55.597582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570950620211067:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:55.597716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570950620211067:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:55.597871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570950620211067:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:55.597980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570950620211067:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:55.598090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570950620211067:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:55.598192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570950620211067:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:55.598299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570950620211067:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:55.598419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570950620211067:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:55.598503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570950620211067:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:55.649003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570950620211083:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:55.649064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570950620211083:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:55.649327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570950620211083:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:55.649452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570950620211083:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:55.649567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570950620211083:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:55.649652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570950620211083:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:55.649771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570950620211083:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:55.649902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570950620211083:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:55.650006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570950620211083:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:55.650122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570950620211083:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:55.651676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570950620211083:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:55.651823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570950620211083:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:55.687943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570950620211094:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:55.688013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570950620211094:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:55.688231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570950620211094:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:55.688365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570950620211094:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:55.688475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570950620211094:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:55.688578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570950620211094:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:55.688981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570950620211094:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:55.689114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570950620211094:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... ormalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:55.751934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:55.751957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:55.752099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:55.752122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:55.752357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:55.752382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:55.752473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:55.752491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:55.753284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:55.753318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:55.753382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:55.753406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:55.753592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:55.753614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:55.753702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:55.753727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:55.753769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:55.753790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:55.753821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:55.753839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:55.754098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:55.754192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:55.754378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:55.754398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:55.754490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:55.754510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:55.754674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:55.754694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:55.754819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:55.754838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA UseBlocks; PRAGMA Kikimr.OptEnableOlapPushdown = "false"; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; 2025-04-07T14:00:57.474068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570959210145943:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:57.474154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:57.474525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570959210145969:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:57.478142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:57.486813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570959210145972:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:58.410405Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034458005, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA UseBlocks; PRAGMA Kikimr.OptEnableOlapPushdown = "false"; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"item.level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1},{"Id":3},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level","resource_id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1},{"Id":3},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DqPhyStage '() (lambda '() (block '( (let $5 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $6 '('"id" '"level" '"resource_id")) (let $7 (KqpBlockReadOlapTableRanges $5 (Void) $6 '() '() (lambda '($9) $9))) (let $8 (lambda '($10 $11 $12 $13) (block '( (let $14 (BlockFunc '"Equals" (BlockType (OptionalType (DataType 'Bool))) $11 (AsScalar (Int32 '"5")))) (return $10 $12 (BlockCoalesce $14 (AsScalar (Bool 'false))) $13) )))) (return (FromFlow (NarrowMap (WideFromBlocks (BlockCompress (WideMap $7 $8) '2)) (lambda '($15 $16) (AsStruct '('"id" $15) '('"resource_id" $16)))))) ))) '('('"_logical_id" '577) '('"_id" '"c1d2c7e3-e51ff91c-c131a28b-6a5cb70b")))) (let $2 (DqCnUnionAll (TDqOutput $1 '"0"))) (let $3 (DqPhyStage '($2) (lambda '($17) $17) '('('"_logical_id" '626) '('"_id" '"944b73c5-b19540a-527389f9-b4b60a4e")))) (let $4 (DqCnResult (TDqOutput $3 '"0") '('"id" '"resource_id"))) (return (KqpPhysicalQuery '((KqpPhysicalTx '($1 $3) '($4) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"id" (DataType 'Int32)) '('"resource_id" (OptionalType (DataType 'Utf8))))) '"0" '"0")) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_Avg_NullMix >> KqpYql::UuidPrimaryKey [GOOD] >> KqpOlap::CompositeRangeOlap [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] Test command err: 2025-04-07T13:58:05.063930Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570220082459519:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:05.064027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:58:05.268764Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490570220227694741:2189];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e6a/r3tmp/tmpGPBbes/pdisk_1.dat 2025-04-07T13:58:05.636896Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T13:58:06.079436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:58:06.177129Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:58:06.209775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:06.209879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:06.219199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:58:06.229004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:58:06.230543Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T13:58:06.237269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:58:06.283722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9293, node 1 2025-04-07T13:58:06.494122Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:58:06.494144Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:58:06.494151Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:58:06.494241Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:58:07.192630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:58:07.346177Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2025-04-07T13:58:10.064353Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570220082459519:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:10.064421Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:58:10.168570Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490570220227694741:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:58:10.168635Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:58:10.192991Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:58:10.214930Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:58:10.214996Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:58:10.217766Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDQyYzQxZTctNThlMjA4NmUtY2NhYmY1NWUtOGRhYjQxMTA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDQyYzQxZTctNThlMjA4NmUtY2NhYmY1NWUtOGRhYjQxMTA= 2025-04-07T13:58:10.236897Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:58:10.236900Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2025-04-07T13:58:10.237632Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7490570241702531365:2279], Start check tables existence, number paths: 2 2025-04-07T13:58:10.237792Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:58:10.237802Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:58:10.242632Z node 2 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2025-04-07T13:58:10.244494Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7490570241702531365:2279], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T13:58:10.244558Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7490570241702531365:2279], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T13:58:10.244587Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7490570241702531365:2279], Successfully finished 2025-04-07T13:58:10.244687Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T13:58:10.271080Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570241557296802:2304], Start check tables existence, number paths: 2 2025-04-07T13:58:10.271187Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDQyYzQxZTctNThlMjA4NmUtY2NhYmY1NWUtOGRhYjQxMTA=, ActorId: [1:7490570241557296803:2305], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:58:10.274103Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570241557296802:2304], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T13:58:10.274145Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570241557296802:2304], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T13:58:10.274165Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570241557296802:2304], Successfully finished 2025-04-07T13:58:10.274207Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T13:58:10.276621Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570241557296822:2523], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:58:10.292918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:58:10.306946Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570241557296822:2523], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-07T13:58:10.309244Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570241557296822:2523], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-07T13:58:10.335709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570241557296822:2523], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T13:58:10.432985Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570241557296822:2523], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:58:10.438344Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570241557296822:2523], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-07T13:58:10.441047Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTNkY2RjN2YtMjg2OTU0MjktMmUyMTQ3NWMtNTNlNTE1YWE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OTNkY2RjN2YtMjg2OTU0MjktMmUyMTQ3NWMtNTNlNTE1YWE= 2025-04-07T13:58:10.441312Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-07T13:58:10.441330Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-07T13:58:10.441380Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTNkY2RjN2YtMjg2OTU0MjktMmUyMTQ3NWMtNTNlNTE1YWE=, ActorId: [1:7490570241557296906:2306], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:58:10.441671Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTNkY2RjN2YtMjg2OTU0MjktMmUyMTQ3NWMtNTNlNTE1YWE=, ActorId: [1:7490570241557296906:2306], ActorState: ReadyState, TraceId: 01jr88ppr9fpagbyeq0bb936bs, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7490570241557296905:2583] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-07T13:58:10.441736Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570241557296908:2307], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-07T13:58:10.441802Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7490570241557296906:2306], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OTNkY2RjN2YtMjg2OTU0MjktMmUyMTQ3NWMtNTNlNTE1YWE= 2025-04-07T13:58:10.441841Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7490570241557296909:2308], Database: /Root, Start database fetching 2025-04-07T13:58:10.447373Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcher ... > Connected TServer::EnableGrpc on GrpcPort 26458, node 11 2025-04-07T14:00:50.261321Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:50.261344Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:50.261352Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:50.261494Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:50.861140Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:50.871509Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:00:50.905297Z node 11 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2025-04-07T14:00:54.778885Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7490570922334888464:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:54.778941Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:55.698686Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T14:00:55.698779Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7490570948104692685:2304], Start check tables existence, number paths: 2 2025-04-07T14:00:55.699534Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T14:00:55.703333Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T14:00:55.708104Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=NjgxMWEzMDYtMTk4MTViYzUtYmRiYWIxZTQtZjU1MzBmZjU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NjgxMWEzMDYtMTk4MTViYzUtYmRiYWIxZTQtZjU1MzBmZjU= 2025-04-07T14:00:55.708764Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7490570948104692685:2304], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T14:00:55.708825Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7490570948104692685:2304], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T14:00:55.708868Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7490570948104692685:2304], Successfully finished 2025-04-07T14:00:55.709010Z node 11 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-07T14:00:55.709059Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T14:00:55.709780Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=NjgxMWEzMDYtMTk4MTViYzUtYmRiYWIxZTQtZjU1MzBmZjU=, ActorId: [11:7490570948104692710:2305], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:00:55.712916Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7490570948104692712:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T14:00:55.717153Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:00:55.720157Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7490570948104692712:2300], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2025-04-07T14:00:55.738798Z node 11 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7490570948104692712:2300], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-07T14:00:55.766201Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7490570948104692712:2300], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:00:55.839522Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7490570948104692712:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T14:00:55.842606Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7490570948104692712:2300], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-07T14:00:55.843091Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2025-04-07T14:00:55.843117Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2025-04-07T14:00:55.843197Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7490570948104692772:2307], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-04-07T14:00:55.848861Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7490570948104692772:2307], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-07T14:00:55.848958Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2025-04-07T14:00:55.848980Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-07T14:00:55.849296Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [11:7490570948104692781:2308], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-07T14:00:55.851845Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [11:7490570948104692781:2308], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-04-07T14:00:55.864585Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-07T14:00:55.864608Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-07T14:00:55.864725Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=NjgxMWEzMDYtMTk4MTViYzUtYmRiYWIxZTQtZjU1MzBmZjU=, ActorId: [11:7490570948104692710:2305], ActorState: ReadyState, TraceId: 01jr88vr9qcdtcww0vpx5fs2aj, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT ALL ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `test@user`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-07T14:00:55.864906Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7490570948104692793:2310], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-07T14:00:55.870632Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7490570948104692793:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:55.870741Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:56.057798Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.062214Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=NjgxMWEzMDYtMTk4MTViYzUtYmRiYWIxZTQtZjU1MzBmZjU=, ActorId: [11:7490570948104692710:2305], ActorState: ExecuteState, TraceId: 01jr88vr9qcdtcww0vpx5fs2aj, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [11:7490570948104692794:2305] WorkloadServiceCleanup: 0 2025-04-07T14:00:56.064749Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=NjgxMWEzMDYtMTk4MTViYzUtYmRiYWIxZTQtZjU1MzBmZjU=, ActorId: [11:7490570948104692710:2305], ActorState: CleanupState, TraceId: 01jr88vr9qcdtcww0vpx5fs2aj, EndCleanup, isFinal: 0 2025-04-07T14:00:56.064846Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=NjgxMWEzMDYtMTk4MTViYzUtYmRiYWIxZTQtZjU1MzBmZjU=, ActorId: [11:7490570948104692710:2305], ActorState: CleanupState, TraceId: 01jr88vr9qcdtcww0vpx5fs2aj, Sent query response back to proxy, proxyRequestId: 3, proxyId: [11:7490570922334888435:2168] 2025-04-07T14:00:56.065522Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [11:7490570948104692781:2308], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-04-07T14:00:56.093589Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=NjgxMWEzMDYtMTk4MTViYzUtYmRiYWIxZTQtZjU1MzBmZjU=, ActorId: [11:7490570948104692710:2305], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T14:00:56.093646Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=NjgxMWEzMDYtMTk4MTViYzUtYmRiYWIxZTQtZjU1MzBmZjU=, ActorId: [11:7490570948104692710:2305], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:00:56.093678Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=NjgxMWEzMDYtMTk4MTViYzUtYmRiYWIxZTQtZjU1MzBmZjU=, ActorId: [11:7490570948104692710:2305], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T14:00:56.093718Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=NjgxMWEzMDYtMTk4MTViYzUtYmRiYWIxZTQtZjU1MzBmZjU=, ActorId: [11:7490570948104692710:2305], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T14:00:56.093810Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=NjgxMWEzMDYtMTk4MTViYzUtYmRiYWIxZTQtZjU1MzBmZjU=, ActorId: [11:7490570948104692710:2305], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapStatistics::StatsUsageNotPK [GOOD] Test command err: Trying to start YDB, gRPC: 7828, MsgBus: 6534 2025-04-07T14:00:55.637242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570951432860727:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:55.637453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f83/r3tmp/tmpoBlF5b/pdisk_1.dat 2025-04-07T14:00:56.133372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:56.133500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:56.136292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:56.140481Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7828, node 1 2025-04-07T14:00:56.255966Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:56.255991Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:56.256001Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:56.256126Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6534 TClient is connected to server localhost:6534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:56.897182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:56.922902Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:56.934277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:57.063779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570960022795856:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:57.064013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570960022795856:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:57.064307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570960022795856:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:57.064444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570960022795856:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:57.064578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570960022795856:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:57.064736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570960022795856:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:57.064876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570960022795856:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:57.064999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570960022795856:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:57.065162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570960022795856:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:57.065272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570960022795856:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:57.065389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570960022795856:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:57.065498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570960022795856:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:57.102958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570960022795858:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:57.103022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570960022795858:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:57.103302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570960022795858:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:57.103437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570960022795858:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:57.103541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570960022795858:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:57.103629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570960022795858:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:57.103765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570960022795858:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:57.103900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570960022795858:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:57.104017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570960022795858:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:57.104150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570960022795858:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:57.104270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570960022795858:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:57.104368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570960022795858:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:57.139585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570960022795857:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:57.139654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570960022795857:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:57.139869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570960022795857:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:57.140012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570960022795857:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:57.140134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570960022795857:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:57.140227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570960022795857:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:57.140355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570960022795857:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:57.140464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570960022795857:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descrip ... 186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:57.184353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:57.184374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:57.184439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:57.184472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:57.184542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:57.184568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:57.184599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:57.184634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:57.186228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:57.186281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:57.186436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:57.186477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:57.186648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:57.186691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:57.186871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:57.186899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:57.187009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:57.187036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:57.187638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:57.187681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:57.187764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:57.187783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:57.187976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:57.188000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:57.188060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:57.188084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:57.188167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:57.188189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:57.188242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:57.188280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:57.188576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:57.188613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:57.188801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:57.188827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:57.188966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:57.188990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:57.189161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:57.189182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:57.189333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:57.189354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-07T14:00:57.247373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:00:58.950700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570964317763450:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:58.950843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:59.232033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570968612730787:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:59.232144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:59.241022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:00:59.279327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570968612730834:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:59.279391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:59.291810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:00:59.351883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570968612730880:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:59.351971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpScripting::StreamExecuteYqlScriptMixed [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadAndDeleteConsumer [FAIL] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2025-04-07T13:59:03.981806Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:03.981901Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:04.014900Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:04.056391Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } Consumers { Name: "another-user" Generation: 1 Important: false } 2025-04-07T13:59:04.057594Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-04-07T13:59:04.059888Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:04.063804Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-07T13:59:04.065464Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T13:59:04.068633Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [1:186:2199] 2025-04-07T13:59:04.070254Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:186:2199] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:04.082490Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:59:04.082560Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:208:2214], now have 1 active actors on pipe 2025-04-07T13:59:04.082620Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-07T13:59:04.082664Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-07T13:59:04.082901Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2025-04-07T13:59:04.082970Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2025-04-07T13:59:04.083048Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-04-07T13:59:04.083127Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-04-07T13:59:04.083163Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-04-07T13:59:04.093125Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:59:04.093201Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:210:2216], now have 1 active actors on pipe 2025-04-07T13:59:04.093363Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-07T13:59:04.093412Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-07T13:59:04.093507Z node 1 :PERSQUEUE INFO: new Cookie default|755ffb71-d55c6bb7-6ec5224a-1c3007a0_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-07T13:59:04.093644Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T13:59:04.093768Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:59:04.094137Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T13:59:04.094174Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:212:2218], now have 1 active actors on pipe 2025-04-07T13:59:04.094229Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-07T13:59:04.094273Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-07T13:59:04.094326Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2025-04-07T13:59:04.094356Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2025-04-07T13:59:04.094528Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 1 partNo 0 2025-04-07T13:59:04.095324Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 72 count 1 nextOffset 1 batches 1 2025-04-07T13:59:04.095413Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 2 partNo 0 2025-04-07T13:59:04.095456Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 112 count 2 nextOffset 2 batches 1 2025-04-07T13:59:04.095906Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--asdfgs--topic' partition 0 compactOffset 0,2 HeadOffset 0 endOffset 0 curOffset 2 d0000000000_00000000000000000000_00000_0000000002_00000| size 94 WTime 332 2025-04-07T13:59:04.096092Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:04.116404Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 18 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T13:59:04.116507Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:59:04.116585Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid', Topic: 'rt3.dc1--asdfgs--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-07T13:59:04.116631Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T13:59:04.116704Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid', Topic: 'rt3.dc1--asdfgs--topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-07T13:59:04.116905Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-04-07T13:59:04.116973Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-04-07T13:59:04.117036Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user another-user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-04-07T13:59:04.117068Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 1 startOffset 0 ReadingTimestamp 1 rrg 1 2025-04-07T13:59:04.117275Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T13:59:04.117367Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--asdfgs--topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 2 max time lag 0ms effective offset 0 2025-04-07T13:59:04.117417Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T13:59:04.117711Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-07T13:59:04.117776Z node 1 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T13:59:04.117882Z node 1 :PERSQUEUE DEBUG: Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp done, result 332 queuesize 2 startOffset 0 2025-04-07T13:59:04.117925Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user another-user readTimeStamp for offset 0 initiated queuesize 1 start ... ptured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:00:41.838299Z node 41 :PERSQUEUE INFO: new Cookie default|7518d98d-8f35c438-a2c56ab1-3744892e_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:41.848251Z node 41 :PERSQUEUE INFO: new Cookie default|c8ba290f-3f0195d8-3b7a2b14-36a0aeb0_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:101:2057] recipient: [42:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:101:2057] recipient: [42:99:2133] Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:106:2057] recipient: [42:99:2133] 2025-04-07T14:00:42.521547Z node 42 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:42.521642Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [42:147:2057] recipient: [42:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [42:147:2057] recipient: [42:145:2168] Leader for TabletID 72057594037927938 is [42:151:2172] sender: [42:152:2057] recipient: [42:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:177:2057] recipient: [42:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:42.551585Z node 42 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:42.552372Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 45 actor [42:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 45 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 45 ReadRuleGenerations: 45 ReadRuleGenerations: 45 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 45 Important: false } Consumers { Name: "user1" Generation: 45 Important: true } Consumers { Name: "user2" Generation: 45 Important: true } 2025-04-07T14:00:42.553102Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [42:184:2197] 2025-04-07T14:00:42.556031Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [42:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:42.580103Z node 42 :PERSQUEUE INFO: new Cookie default|24dabaf4-2097370f-a869d8d9-2445ec3b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:44.020375Z node 42 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-04-07T14:00:44.138678Z node 42 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:241:2057] recipient: [42:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:244:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:245:2057] recipient: [42:243:2244] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:246:2245] sender: [42:247:2057] recipient: [42:243:2244] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:44.228711Z node 42 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:44.228800Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:00:44.229578Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [42:297:2288] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:00:44.257734Z node 42 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:00:44.257849Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [42:297:2288] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:00:44.327813Z node 42 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8296398 2025-04-07T14:00:44.417842Z node 42 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8296398 2025-04-07T14:00:44.503442Z node 42 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8296398 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:246:2245] sender: [42:328:2057] recipient: [42:14:2061] 2025-04-07T14:00:44.507570Z node 42 :PERSQUEUE ERROR: [PQ: 72057594037927937] Config has too small version 42 actual 45 actor [42:325:2307] txId 42 config: PartitionIds: 0 Version: 42 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } Consumers { Name: "user2" Important: true } 2025-04-07T14:00:44.562784Z node 42 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8296398 assertion failed at ydb/core/persqueue/ut/pq_ut.cpp:2433, auto NKikimr::NPQ::NTestSuiteTPQTest::TTestCaseTestReadAndDeleteConsumer::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()(const TString &, std::function, bool &) const: ((int)consumerDeleteResult->Record.GetStatus() == (int)NKikimrPQ::EStatus::OK) failed: (2 != 0) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x184B50C0) ??+0 (0x179EDDDD) ??+0 (0x179EACFD) NKikimr::RunTestWithReboots(TVector> const&, std::__y1::function&)> ()>, std::__y1::function> const&, std::__y1::function, bool&)>, unsigned int, unsigned long, unsigned int, unsigned int, bool)+753 (0x370B4C51) NKikimr::NPQ::NTestSuiteTPQTest::TTestCaseTestReadAndDeleteConsumer::Execute_(NUnitTest::TTestContext&)+473 (0x178F4D49) std::__y1::__function::__func, void ()>::operator()()+280 (0x178FD6B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x184F3FD9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x184BBC29) NKikimr::NPQ::NTestSuiteTPQTest::TCurrentTest::Execute()+1204 (0x178FC884) NUnitTest::TTestFactory::Execute()+2438 (0x184BD4F6) NUnitTest::RunMain(int, char**)+5149 (0x184EDC1D) ??+0 (0x7FE573E13D90) __libc_start_main+128 (0x7FE573E13E40) _start+41 (0x1584F029) forced failure at ydb/core/testlib/tablet_helpers.cpp:806, void NKikimr::RunTestWithReboots(const TVector &, std::function, std::function, bool &)>, ui32, ui64, ui32, ui32, bool): Failed at dispatch Trace with exception NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x184B50C0) NKikimr::RunTestWithReboots(TVector> const&, std::__y1::function&)> ()>, std::__y1::function> const&, std::__y1::function, bool&)>, unsigned int, unsigned long, unsigned int, unsigned int, bool)+7733 (0x370B6795) NKikimr::NPQ::NTestSuiteTPQTest::TTestCaseTestReadAndDeleteConsumer::Execute_(NUnitTest::TTestContext&)+473 (0x178F4D49) std::__y1::__function::__func, void ()>::operator()()+280 (0x178FD6B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x184F3FD9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x184BBC29) NKikimr::NPQ::NTestSuiteTPQTest::TCurrentTest::Execute()+1204 (0x178FC884) NUnitTest::TTestFactory::Execute()+2438 (0x184BD4F6) NUnitTest::RunMain(int, char**)+5149 (0x184EDC1D) ??+0 (0x7FE573E13D90) __libc_start_main+128 (0x7FE573E13E40) _start+41 (0x1584F029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 62157, MsgBus: 8110 2025-04-07T14:00:49.600173Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570924796429245:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:49.600261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e91/r3tmp/tmpYa2miE/pdisk_1.dat 2025-04-07T14:00:50.330884Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:50.332416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:50.332530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:50.344263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62157, node 1 2025-04-07T14:00:50.609379Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:50.609405Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:50.609418Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:50.609529Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8110 TClient is connected to server localhost:8110 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:51.761725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:51.824888Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:53.677183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570941976298932:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:53.677276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:54.668555Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570924796429245:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:54.668898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:55.904142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.331751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570954861200937:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:56.331821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:56.332205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570954861200942:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:56.350442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:00:56.359939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570954861200944:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:00:58.854096Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490570963451135778:2379], status: GENERIC_ERROR, issues:
:3:25: Error: Invalid value "invalid-uuid" for type Uuid 2025-04-07T14:00:58.855300Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmFjZTdmYzgtMTFkMzVkYTktMTRhOWMzOTktZDQ2ZmQ3NGE=, ActorId: [1:7490570941976298927:2301], ActorState: ExecuteState, TraceId: 01jr88vv6s64mze6xkj67dbkwj, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::CompositeRangeOlap [GOOD] Test command err: Trying to start YDB, gRPC: 32333, MsgBus: 61965 2025-04-07T14:00:53.861071Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570941309373141:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:53.861118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f97/r3tmp/tmp7WdqWF/pdisk_1.dat 2025-04-07T14:00:54.366127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:54.366227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:54.373073Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:54.379214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32333, node 1 2025-04-07T14:00:54.567806Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:54.567833Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:54.567855Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:54.567927Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61965 TClient is connected to server localhost:61965 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:55.430782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:55.458968Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:55.480404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:55.608206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570949899308409:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:55.608382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570949899308409:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:55.608598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570949899308409:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:55.609009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570949899308409:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:55.609141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570949899308409:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:55.609255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570949899308409:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:55.609361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570949899308409:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:55.609896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570949899308409:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:55.610272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570949899308409:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:55.610362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570949899308409:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:55.610526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570949899308409:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:55.610627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570949899308409:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:55.653512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570949899308410:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:55.653582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570949899308410:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:55.653788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570949899308410:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:55.653945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570949899308410:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:55.654045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570949899308410:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:55.654140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570949899308410:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:55.654234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570949899308410:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:55.654329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570949899308410:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:55.654437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570949899308410:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:55.654531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570949899308410:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:55.654639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570949899308410:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:55.654723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570949899308410:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:55.689340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570949899308411:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:55.689402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570949899308411:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:55.689614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570949899308411:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:55.689725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570949899308411:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:55.689846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570949899308411:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:55.689930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570949899308411:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:55.690037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570949899308411:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:55.690142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570949899308411:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... cess=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:55.733763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:55.733807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:55.733867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:55.733889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:55.733934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:55.733971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:55.734241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:55.734279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:55.734516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:55.734543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:55.734661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:55.734686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:55.734841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:55.734868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:55.734970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:55.734997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:55.736073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:55.736096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:55.736155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:55.736173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:55.736265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:55.736278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:55.736340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:55.736364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:55.736405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:55.736416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:55.736438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:55.736455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:55.736616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:55.736721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:55.736933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:55.736957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:55.737074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:55.737106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:55.737328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:55.737357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:55.737451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:55.737475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-07T14:00:55.789495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2025-04-07T14:00:57.388908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570958489243330:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:57.388968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570958489243319:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:57.389667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:57.393591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:00:57.400579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570958489243333:2387], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:00:58.274602Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034458000, txId: 18446744073709551615] shutting down 2025-04-07T14:00:58.621810Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034458082, txId: 18446744073709551615] shutting down 2025-04-07T14:00:58.864788Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570941309373141:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:58.864851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:59.063362Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034459000, txId: 18446744073709551615] shutting down 2025-04-07T14:00:59.422359Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034459041, txId: 18446744073709551615] shutting down 2025-04-07T14:00:59.749568Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034459391, txId: 18446744073709551615] shutting down 2025-04-07T14:01:00.100896Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034460000, txId: 18446744073709551615] shutting down 2025-04-07T14:01:00.504844Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034460063, txId: 18446744073709551615] shutting down >> KqpOlapCompression::TestAlterCompressionTableInTableStore [GOOD] >> KqpOlapAggregations::Aggregation_Count_NullMixGroupBy >> KqpOlapAggregations::Json_Query >> KqpOlapStatistics::StatsUsage >> KqpOlapSparsed::SwitchingMultiColumn >> KqpDecimalColumnShard::TestJoinById ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapCompression::TestAlterCompressionTableInTableStore [GOOD] Test command err: Trying to start YDB, gRPC: 14397, MsgBus: 6205 2025-04-07T14:00:58.297997Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570964161403901:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:58.302083Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f74/r3tmp/tmpYWPeu7/pdisk_1.dat 2025-04-07T14:00:58.802252Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:58.805458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:58.805546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:58.808633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14397, node 1 2025-04-07T14:00:58.920222Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:58.920246Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:58.920255Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:58.920335Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6205 TClient is connected to server localhost:6205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:59.581464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:59.605306Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLESTORE `/Root/TableStoreTest` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:01.661092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570977046306287:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:01.661301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:01.685704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:01.753525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570977046306355:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:01.753741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570977046306355:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:01.753962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570977046306355:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:01.754048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570977046306355:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:01.754145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570977046306355:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:01.754239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570977046306355:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:01.754320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570977046306355:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:01.754401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570977046306355:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:01.754503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570977046306355:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:01.754602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570977046306355:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:01.754686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570977046306355:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:01.754779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570977046306355:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:01.764503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:01.764558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:01.764697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:01.764756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:01.764896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:01.764929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:01.765011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:01.765038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:01.765101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:01.765166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:01.765202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:01.765221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:01.765782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:01.765843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:01.766054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:01.766084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:01.766257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:01.766286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:01.766499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:01.766518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:01.766656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:01.766680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; CREATE TABLE `/Root/TableStoreTest/ColumnTableTest` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) PARTITION BY HASH(pk_int) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:01.815724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570977046306425:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:01.815818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:01.829914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:01:01.901468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570977046306525:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:01.901539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpOlapAggregations::Aggregation_Avg_GroupBy >> KqpOlapAggregations::BlockGenericSimpleAggregation >> KqpScripting::NoAstSizeLimit [GOOD] >> KqpOlap::BlockChannelForce [GOOD] >> KqpYql::CreateUseTable [GOOD] >> KqpOlapWrite::DefaultValues [GOOD] >> KqpDecimalColumnShard::TestFilterNulls [GOOD] >> KqpDecimalColumnShard::TestGroupByDecimal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::NoAstSizeLimit [GOOD] Test command err: Trying to start YDB, gRPC: 21581, MsgBus: 19731 2025-04-07T14:00:49.600491Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570923458160225:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:49.600552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e95/r3tmp/tmplAF2Eg/pdisk_1.dat 2025-04-07T14:00:50.240944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:50.241056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:50.271820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:50.307969Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21581, node 1 2025-04-07T14:00:50.613220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:50.613246Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:50.613252Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:50.613335Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19731 TClient is connected to server localhost:19731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:51.895596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:51.921390Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:51.928591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:00:52.112088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:00:52.315139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:52.387615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:54.196749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570944932998272:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:54.196882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:54.601859Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570923458160225:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:54.614654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:55.508998Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2104} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.201700s 2025-04-07T14:00:55.509040Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2156} StateWork event processing took too much time Type# 2146435078 Duration# 0.201847s 2025-04-07T14:00:55.903963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:00:55.951146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:00:55.981560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.057619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.124748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.155813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.331659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570953522933382:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:56.331742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:56.331799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570953522933387:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:56.350609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:00:56.367647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570953522933389:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:00:58.864536Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034458796, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 10584, MsgBus: 29299 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e95/r3tmp/tmpkYKCub/pdisk_1.dat 2025-04-07T14:01:00.048186Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:00.049051Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:01:00.086631Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:00.086706Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:00.091982Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10584, node 2 2025-04-07T14:01:00.204799Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:00.204822Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:00.204829Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:00.204927Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29299 TClient is connected to server localhost:29299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:00.693850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:03.314372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570985048586438:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.314599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.326512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:03.445425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570985048586561:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.445531Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.466994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570985048586568:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.467072Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.467390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570985048586573:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.471227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:03.483782Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490570985048586575:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } >> KqpOlapAggregations::Aggregation_NoPushdownOnDisabledEmitAggApply >> KqpOlap::PredicatePushdownParameterTypesValidation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::BlockChannelForce [GOOD] Test command err: Trying to start YDB, gRPC: 3839, MsgBus: 13020 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f88/r3tmp/tmpGCx1js/pdisk_1.dat 2025-04-07T14:00:55.921099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:56.040805Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:56.045022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:56.045102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:56.048521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3839, node 1 2025-04-07T14:00:56.185031Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:56.185058Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:56.185064Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:56.185157Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13020 TClient is connected to server localhost:13020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:56.691271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:56.721381Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:56.739222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.894561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:00:57.056562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:57.127365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:00:58.931498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570962788654993:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:58.931655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:58.986719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:00:59.063971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:00:59.144646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:00:59.179064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:00:59.213410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:00:59.334379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:00:59.393454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570967083622799:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:59.393503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:59.393608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570967083622804:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:59.396096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:00:59.404375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570967083622806:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:01:00.541848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:01:00.643716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570971378590503:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:00.643904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570971378590503:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:00.644200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570971378590503:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:00.644303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570971378590503:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:00.644396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570971378590503:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:00.644501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570971378590503:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:00.644603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570971378590503:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:00.645001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570971378590503:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:00.645159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570971378590503:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:00.645264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570971378590503:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:00.645288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490570971378590502:2466];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:00.645315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490570971378590502:2466];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:00.645367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570971378590503:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:00.645435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490570971378590502:2466];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:00.645463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570971378590503:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:00.645522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490570971378590502:2466];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:00.645609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490570971378590502:2466];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:00.645696Z node 1 :TX_COLUMNSHARD WARN: ... 72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:00.685984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:00.686025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:00.686194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:00.686231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:00.686339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:00.686370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:00.686527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:00.686558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:00.686738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:00.686796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:00.694270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:00.694324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:00.694405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:00.694431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:00.694606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:00.694632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:00.694719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:00.694748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:00.694801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:00.694821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:00.694854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:00.694874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:00.696877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:00.696932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:00.697085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:00.697142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:00.697278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:00.697314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:00.697475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:00.697511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:00.697604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:00.697632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:00.706027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:00.706077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:00.706182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:00.706209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:00.706364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:00.706400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:00.706476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:00.706507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:00.706600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:00.706658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:00.706706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:00.706738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:00.707033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:00.707079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:00.707278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:00.707320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:00.707420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:00.707446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:00.707573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:00.707594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:00.707673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:00.707701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::DefaultValues [GOOD] Test command err: Trying to start YDB, gRPC: 8065, MsgBus: 20345 2025-04-07T14:00:59.698973Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570967637468204:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:59.699025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f6d/r3tmp/tmpvbn2mC/pdisk_1.dat 2025-04-07T14:01:00.181292Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:00.212637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:00.212735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:00.219632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8065, node 1 2025-04-07T14:01:00.427290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:00.427315Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:00.427337Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:00.427444Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20345 TClient is connected to server localhost:20345 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:01.027267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:01.060104Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:01.091345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:01.209497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570976227403462:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:01.209737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570976227403462:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:01.210059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570976227403462:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:01.210186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570976227403462:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:01.210317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570976227403462:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:01.210421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570976227403462:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:01.210521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570976227403462:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:01.210640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570976227403462:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:01.210772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570976227403462:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:01.210877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570976227403462:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:01.211005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570976227403462:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:01.211146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570976227403462:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:01.251723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570976227403463:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:01.251790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570976227403463:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:01.252030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570976227403463:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:01.252165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570976227403463:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:01.252272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570976227403463:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:01.252365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570976227403463:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:01.252447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570976227403463:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:01.252561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570976227403463:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:01.252655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570976227403463:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:01.255919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570976227403463:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:01.256632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570976227403463:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:01.256830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570976227403463:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:01.290598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570976227403466:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:01.290686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570976227403466:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:01.290880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570976227403466:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:01.290991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570976227403466:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:01.291121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570976227403466:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:01.291212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570976227403466:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:01.291335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570976227403466:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:01.291437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570976227403466:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... 31Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:01.326414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:01.326444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:01.326533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:01.326558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:01.326591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:01.326609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:01.326933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:01.326991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:01.327129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:01.327155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:01.327352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:01.327376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:01.327547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:01.327571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:01.327683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:01.327703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:01.328406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:01.328461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:01.328562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:01.328583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:01.330372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:01.330409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:01.330511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:01.330536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:01.330588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:01.330605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:01.330631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:01.330667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:01.330980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:01.331073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:01.331255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:01.331278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:01.331405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:01.331429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:01.331600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:01.331623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:01.331727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:01.331746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-07T14:01:01.382163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:01:03.246665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570984817338354:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.246798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.490043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710660:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6400144;columns=1; ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapStore/olapTable` WHERE field = 'abcde' RESULT: 2025-04-07T14:01:04.014924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570989112305745:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:04.015004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:04.015229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570989112305750:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:04.019282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:01:04.029731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570989112305752:2404], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:01:04.831705Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570967637468204:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:04.831906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; count: 2025-04-07T14:01:05.125005Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034464074, txId: 18446744073709551615] shutting down 800000 >> KqpDatetime64ColumnShard::UseDatetime64AsPrimaryKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::CreateUseTable [GOOD] Test command err: Trying to start YDB, gRPC: 2213, MsgBus: 11527 2025-04-07T14:00:49.602242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570925124071780:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:49.602897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e90/r3tmp/tmp61jm1o/pdisk_1.dat 2025-04-07T14:00:50.260197Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:50.285274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:50.285382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:50.301321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2213, node 1 2025-04-07T14:00:50.611929Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:50.611960Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:50.611972Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:50.612067Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11527 TClient is connected to server localhost:11527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:51.736042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:51.762315Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:51.775218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:51.982899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:52.220045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:52.301466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:53.823151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570942303942510:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:53.823255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:54.626428Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570925124071780:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:54.626491Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:55.903123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:00:55.929468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:00:55.974881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.022872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.105431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.178829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.331799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570955188844916:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:56.331910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:56.331992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570955188844921:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:56.350419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:00:56.359944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570955188844923:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Optimization, code: 1070
:4:24: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 Trying to start YDB, gRPC: 27989, MsgBus: 14226 2025-04-07T14:00:59.461398Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490570967875635836:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:59.461532Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e90/r3tmp/tmprR08w8/pdisk_1.dat 2025-04-07T14:00:59.583134Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:59.599074Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:59.599145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:59.601024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27989, node 2 2025-04-07T14:00:59.653130Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:59.653148Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:59.653152Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:59.653222Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14226 TClient is connected to server localhost:14226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:00.211118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:00.232869Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:01:00.251963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:00.349389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:00.546424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:00.642788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:02.916251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570980760539430:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:02.916336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:02.947156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:01:02.983809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:01:03.036480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:01:03.073188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:01:03.148645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:01:03.205903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:01:03.307013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570985055507228:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.307160Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.308218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570985055507234:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.312050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:01:03.325819Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490570985055507236:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:01:04.481781Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490570967875635836:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:04.481906Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:04.563449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:01:04.901050Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034464921, txId: 281474976715673] shutting down >> KqpOlapAggregations::Aggregation_Some_NullGroupBy [GOOD] >> KqpOlapAggregations::Aggregation_Avg_NullMix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownParameterTypesValidation [GOOD] Test command err: Trying to start YDB, gRPC: 28679, MsgBus: 24510 2025-04-07T14:00:53.312407Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570939920180332:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:53.312634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f9a/r3tmp/tmprZW9C4/pdisk_1.dat 2025-04-07T14:00:53.898104Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:53.905113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:53.905229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:53.906543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28679, node 1 2025-04-07T14:00:54.053397Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:54.053421Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:54.053429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:54.053539Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24510 TClient is connected to server localhost:24510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:54.737470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:54.765147Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:54.775173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:54.833231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570944215148150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:54.833496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570944215148150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:54.833804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570944215148150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:54.833940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570944215148150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:54.834062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570944215148150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:54.834159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570944215148150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:54.834267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570944215148150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:54.834394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570944215148150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:54.834509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570944215148150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:54.834632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570944215148150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:54.834750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570944215148150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:54.834872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570944215148150:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:54.838201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:54.838266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:54.838383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:54.838412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:54.838638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:54.838670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:54.838829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:54.838881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:54.838992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:54.839016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:54.839054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:54.839075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:54.839617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:54.839649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:54.863095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:54.863175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:54.863418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:54.863451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:54.863665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:54.863714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:54.863866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:54.863959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:54.908583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034454799 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "olapStore" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034454946 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 720... (TRUNCATED) TClient::Ls request: /Root/olapStore TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "olapStore" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034454946 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } } Children { Name: "OlapParametersTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034454960 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 Schemesha... (TRUNCATED) TClient::Ls request: /Root/olapStore/OlapParametersTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "OlapParametersTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744034454960 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 I... (TRUNCATED) 2025-04-07T14:00:57.070597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570957100050264:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:57.070687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570957100050256:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:57.071167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:57.075592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:00:57.085216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570957100050278:2375], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:00:57.936114Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034457137, txId: 18446744073709551615] shutting down 2025-04-07T14:00:58.309134Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570939920180332:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:58.309205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:58.509269Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034458000, txId: 18446744073709551615] shutting down 2025-04-07T14:00:59.028580Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034458425, txId: 18446744073709551615] shutting down 2025-04-07T14:00:59.516242Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034459006, txId: 18446744073709551615] shutting down 2025-04-07T14:00:59.989835Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034459496, txId: 18446744073709551615] shutting down 2025-04-07T14:01:00.484074Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034460000, txId: 18446744073709551615] shutting down 2025-04-07T14:01:00.945580Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034460455, txId: 18446744073709551615] shutting down 2025-04-07T14:01:01.446178Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034461001, txId: 18446744073709551615] shutting down 2025-04-07T14:01:01.914995Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034461435, txId: 18446744073709551615] shutting down 2025-04-07T14:01:02.410807Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034462002, txId: 18446744073709551615] shutting down 2025-04-07T14:01:03.013042Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034462366, txId: 18446744073709551615] shutting down 2025-04-07T14:01:03.544134Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034463000, txId: 18446744073709551615] shutting down 2025-04-07T14:01:04.109155Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034464000, txId: 18446744073709551615] shutting down 2025-04-07T14:01:04.920691Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034464102, txId: 18446744073709551615] shutting down 2025-04-07T14:01:05.546411Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034465005, txId: 18446744073709551615] shutting down >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] >> KqpOlapAggregations::Aggregation_Some_GroupByNull >> KqpOlapStatistics::StatsUsage [GOOD] >> KqpDecimalColumnShard::TestSimpleQueries [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpDatetime64ColumnShard::UseDatetime64AsPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 5503, MsgBus: 30446 2025-04-07T14:00:54.232458Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570945667276794:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:54.233803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f91/r3tmp/tmpWV8b3X/pdisk_1.dat 2025-04-07T14:00:54.661097Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:54.666691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:54.666772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 5503, node 1 2025-04-07T14:00:54.676954Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:00:54.677115Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:00:54.677499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:54.773267Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:54.773303Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:54.773312Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:54.773420Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30446 TClient is connected to server localhost:30446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:55.790414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, interval Interval64, timestamp Timestamp64, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:00:57.848388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570958552179173:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:57.849231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:58.127070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:00:58.226174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570962847146546:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:58.226351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570962847146546:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:58.226589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570962847146546:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:58.226700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570962847146546:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:58.226788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570962847146546:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:58.226922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570962847146546:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:58.227016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570962847146546:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:58.227109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570962847146546:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:58.227206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570962847146546:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:58.227299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570962847146546:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:58.227385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570962847146546:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:58.227466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570962847146546:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:58.233866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:58.233950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:58.234055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:58.234083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:58.234230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:58.234251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:58.234339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:58.234379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:58.234440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:58.234468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:58.234515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:58.234537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:58.235005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:58.235034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:58.235186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:58.235208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:58.235339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:58.235375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:58.235547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:58.235569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:58.235826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=R ... 5-04-07T14:01:03.557779Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570981830978945:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:03.557953Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570981830978945:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:03.558148Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570981830978945:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:03.558248Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570981830978945:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:03.558336Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570981830978945:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:03.558402Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570981830978945:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:03.558481Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570981830978945:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:03.558566Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570981830978945:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:03.558671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570981830978945:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:03.558747Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570981830978945:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:03.558827Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570981830978945:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:03.558921Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570981830978945:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:03.565702Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:03.565760Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:03.565880Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:03.565910Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:03.566094Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:03.566121Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:03.566203Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:03.566239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:03.566330Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:03.566357Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:03.566394Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:03.566421Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:03.566751Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:03.566786Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:03.566950Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:03.566974Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:03.567079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:03.567105Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:03.567254Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:03.567284Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:03.567387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:03.567409Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=280;columns=2; 2025-04-07T14:01:03.623125Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570981830979028:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.623212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.625603Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570981830979033:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:03.629272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:03.655598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490570981830979035:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:01:04.140042Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034464004, txId: 18446744073709551615] shutting down 2025-04-07T14:01:04.578239Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034464032, txId: 18446744073709551615] shutting down 2025-04-07T14:01:04.993850Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034464459, txId: 18446744073709551615] shutting down 2025-04-07T14:01:05.253750Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490570968946076543:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:05.253819Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:05.389029Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034465000, txId: 18446744073709551615] shutting down 2025-04-07T14:01:05.758296Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034465334, txId: 18446744073709551615] shutting down 2025-04-07T14:01:06.242267Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034466000, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_NullMix [GOOD] Test command err: Trying to start YDB, gRPC: 23888, MsgBus: 24855 2025-04-07T14:01:00.852411Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570970070054605:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:00.852685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f6c/r3tmp/tmpAg18qs/pdisk_1.dat 2025-04-07T14:01:01.231285Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23888, node 1 2025-04-07T14:01:01.307469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:01.308702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:01.327543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:01.386970Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:01.386995Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:01.387002Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:01.387083Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24855 TClient is connected to server localhost:24855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:02.001361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:02.022324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:02.118171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570978659989728:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:02.118419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570978659989728:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:02.118695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570978659989728:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:02.118825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570978659989728:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:02.118924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570978659989728:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:02.119036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570978659989728:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:02.119153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570978659989728:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:02.119283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570978659989728:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:02.119402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570978659989728:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:02.119526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570978659989728:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:02.119640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570978659989728:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:02.119753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570978659989728:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:02.156428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570978659989729:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:02.156494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570978659989729:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:02.156736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570978659989729:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:02.156846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570978659989729:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:02.156951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570978659989729:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:02.157078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570978659989729:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:02.157181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570978659989729:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:02.157284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570978659989729:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:02.157403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570978659989729:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:02.157497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570978659989729:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:02.157609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570978659989729:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:02.157714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570978659989729:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:02.190260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570978659989727:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:02.190354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570978659989727:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:02.190548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570978659989727:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:02.190665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570978659989727:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:02.190789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570978659989727:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:02.190897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570978659989727:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:02.191000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570978659989727:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:02.191123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570978659989727:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:02.191255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74905709786599897 ... 2.252688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:02.252714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:02.252814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:02.252837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:02.252984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:02.253010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:02.253125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:02.253169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:02.253227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:02.253248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:02.253282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:02.253306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:02.253696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:02.253749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:02.253915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:02.253961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:02.254159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:02.254186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:02.254335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:02.254356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:02.254459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:02.254486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT AVG(level) FROM `/Root/tableWithNulls`; 2025-04-07T14:01:04.205421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570987249924606:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:04.205528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570987249924632:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:04.205581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:04.209762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:04.224747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570987249924635:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:05.835109Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570970070054605:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:05.835177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:06.435945Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034465000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT AVG(level) FROM `/Root/tableWithNulls`; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Double)))))) (let $1 (DataType 'Double)) (let $2 (OptionalType (TupleType $1 (DataType 'Uint64)))) (let $3 '('('"_logical_id" '1423) '('"_id" '"b10a5343-a096204b-e4aa5d3d-e125e152") '('"_wide_channels" (StructType '('_yql_agg_0 $2))))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $18 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $19 (KqpWideReadOlapTableRanges $18 (Void) '('"level") '() '() (lambda '($20) (block '( (let $21 '('"_yql_agg_0_sum" '"sum" '"level")) (let $22 '('"_yql_agg_0_cnt" '"count" '"level")) (return (TKqpOlapAgg $20 '($21 $22) '())) ))))) (return (FromFlow (WideMap $19 (lambda '($23 $24) (IfPresent $24 (lambda '($25) (Just '((Convert $25 'Double) $23))) (Nothing $2)))))) ))) $3)) (let $5 (DqCnUnionAll (TDqOutput $4 '0))) (let $6 (OptionalType $1)) (let $7 (DqPhyStage '($5) (lambda '($26) (block '( (let $27 (Bool 'false)) (let $28 (WideCondense1 (ToFlow $26) (lambda '($30) $30) (lambda '($31 $32) $27) (lambda '($33 $34) (IfPresent $33 (lambda '($35) (IfPresent $34 (lambda '($36) (Just '((AggrAdd (Nth $35 '0) (Nth $36 '0)) (AggrAdd (Nth $35 '1) (Nth $36 '1))))) $33)) $34)))) (let $29 (Condense (NarrowMap (Take $28 (Uint64 '1)) (lambda '($37) (block '( (let $38 (IfPresent $37 (lambda '($39) (Just (Div (Nth $39 '0) (Nth $39 '1)))) (Nothing $6))) (return (AsStruct '('Avg0 $38))) )))) (Nothing (OptionalType (StructType '('Avg0 $6)))) (lambda '($40 $41) $27) (lambda '($42 $43) (Just $42)))) (return (FromFlow (Map $29 (lambda '($44) (AsList (AsStruct '('"column0" (Member $44 'Avg0)))))))) ))) '('('"_logical_id" '3080) '('"_id" '"d7e47bd1-bd651fe5-f77c2f-7f538957")))) (let $8 (DqCnValue (TDqOutput $7 '0))) (let $9 '('('"type" '"scan"))) (let $10 (KqpPhysicalTx '($4 $7) '($8) '() $9)) (let $11 '"%kqp%tx_result_binding_0_0") (let $12 (ListType (StructType '('"column0" $6)))) (let $13 '('('"_logical_id" '3176) '('"_id" '"ad55ac5b-245cb220-d5bd53d5-f36d45") '('"_partition_mode" '"single"))) (let $14 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $13)) (let $15 (DqCnResult (TDqOutput $14 '0) '('"column0"))) (let $16 (KqpTxResultBinding $12 '0 '0)) (let $17 (KqpPhysicalTx '($14) '($15) '('($11 $16)) $9)) (return (KqpPhysicalQuery '($10 $17) '((KqpTxResultBinding $12 '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit1_3_2_1_clean Test command err: Trying to start YDB, gRPC: 6303, MsgBus: 4697 2025-04-07T14:00:18.863464Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570788941017957:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:18.863593Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fcc/r3tmp/tmpHmp2wt/pdisk_1.dat 2025-04-07T14:00:19.580727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:19.580833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:19.606287Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:19.607675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6303, node 1 2025-04-07T14:00:19.913191Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:19.913217Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:19.913226Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:19.913319Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4697 TClient is connected to server localhost:4697 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:20.769643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:20.793741Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:20.816402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:20.911649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:00:20.959153Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:00:20.959286Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-04-07T14:00:20.968641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:20.968865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:20.969074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:20.969158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:20.969274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:20.969371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:20.969462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:20.969555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:20.969650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:20.969740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:20.969816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:20.969888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:20.977676Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:00:21.041014Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:00:21.041147Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-07T14:00:21.047880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:21.047940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:21.048160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:21.048266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:21.048361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:21.048519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:21.048650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:21.048785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:21.048875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:21.048994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:21.049096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:21.049228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:21.052519Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7490570797530953094:2293];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:00:21.072531Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7490570797530953094:2293];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:00:21.072640Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037890 2025-04-07T14:00:21.079632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570797530953094:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:21.079705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570797530953094:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:21.079897Z node 1 :TX_COLUMNSHARD WARN: tab ... ;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888; 2025-04-07T14:00:52.464913Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:00:52.464929Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:00:52.464957Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:00:52.464984Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:00:52.465008Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:00:52.465021Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:52.465032Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:52.465046Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:00:52.465077Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:00:52.468766Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034452000 at tablet 72075186224037891 2025-04-07T14:00:52.468802Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490570797530953126:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:00:52.468844Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490570797530953126:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:00:52.468879Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490570797530953126:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:00:52.468914Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490570797530953126:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:00:52.468941Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490570797530953126:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:52.468956Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490570797530953126:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:52.468978Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490570797530953126:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:00:52.469025Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490570797530953126:2294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:00:52.469103Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034452000 at tablet 72075186224037889 2025-04-07T14:00:52.469119Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:00:52.469160Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:00:52.469180Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:00:52.469204Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:00:52.469219Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:52.469234Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:52.469251Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:00:52.469283Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490570797530953090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:00:52.469345Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034452000 at tablet 72075186224037888 2025-04-07T14:00:52.469362Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:00:52.469413Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:00:52.469439Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:00:52.469462Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:00:52.469478Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:52.469490Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:00:52.469504Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:00:52.469535Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490570797530953093:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x1AF00E25 2025-04-07T14:00:52.567644Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490570797530953093:2292];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:52.567693Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7490570797530953126:2294];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:52.567718Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490570797530953090:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:00:52.567755Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7490570797530953094:2293];fline=actor.cpp:33;event=skip_flush_writing; 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x17AA48D6 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:257: Execute_ @ 0x17AAA0F1 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7FC699DCAD8F 18. ??:0: ?? @ 0x7FC699DCAE3F 19. ??:0: ?? @ 0x15851028 >> KqpOlapStats::AddRowsTableInTableStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_NullGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 18512, MsgBus: 2744 2025-04-07T14:00:58.076292Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570963238821504:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:58.078074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f78/r3tmp/tmpnaWCvr/pdisk_1.dat 2025-04-07T14:00:58.460010Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18512, node 1 2025-04-07T14:00:58.526894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:58.526988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:58.544525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:58.599518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:58.599542Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:58.599551Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:58.599636Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2744 TClient is connected to server localhost:2744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:59.239804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:59.262712Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:59.275296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:59.380561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570967533789477:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:59.380795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570967533789477:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:59.381042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570967533789477:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:59.381220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570967533789477:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:59.381352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570967533789477:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:59.381442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570967533789477:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:59.381595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570967533789477:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:59.381716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570967533789477:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:59.381824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570967533789477:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:59.381944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570967533789477:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:59.382052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570967533789477:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:59.382146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570967533789477:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:59.421930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570967533789483:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:59.422011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570967533789483:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:59.422229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570967533789483:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:59.422350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570967533789483:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:59.422439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570967533789483:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:59.422558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570967533789483:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:59.422656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570967533789483:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:59.422744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570967533789483:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:59.422876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570967533789483:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:59.422971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570967533789483:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:59.423091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570967533789483:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:59.423200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570967533789483:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:59.468255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570967533789485:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:59.468886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570967533789485:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:59.469163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570967533789485:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:59.469268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570967533789485:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:59.469394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570967533789485:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:59.469508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570967533789485:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:59.469622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570967533789485:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:59.469763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570967533789485:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... LASS_NAME=Granules;id=1; 2025-04-07T14:00:59.516069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:59.516154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:59.516177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:59.516287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:59.516308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:59.516399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:59.516485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:59.516545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:59.516568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:59.516596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:59.516626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:59.517527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:59.517577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:59.517798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:59.517833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:59.517935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:59.517955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:59.518136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:59.518166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:59.518263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:59.518279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SOME(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2025-04-07T14:01:01.367093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570976123724369:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:01.367170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:01.367585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570976123724381:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:01.371280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:01.389501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570976123724383:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:05.995600Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570963238821504:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:06.132177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:06.357973Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034462000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SOME(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '870) '('"_id" '"62ad73ff-6c70bb41-fb149dc6-4c806117") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $23 (Int32 '1)) (let $24 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"6")) $23) $24))) (RangeCreate (AsList '($24 '((Just (Int32 '"7")) $23)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"id" $1)) (let $11 '('('"_logical_id" '929) '('"_id" '"5ca6cc1c-17c22c13-f13f67d8-ccbcaf4f") '('"_wide_channels" (StructType '('_yql_agg_0 $2) $10)))) (let $12 (DqPhyStage '() (lambda '() (block '( (let $25 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $26 '('"id")) (let $27 '('('"UsedKeyColumns" $26) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $28 (KqpWideReadOlapTableRanges $25 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $27 (lambda '($29) (TKqpOlapAgg $29 '('('_yql_agg_0 'some '"level")) $26)))) (return (FromFlow $28)) ))) $11)) (let $13 (DqCnHashShuffle (TDqOutput $12 '0) '('1))) (let $14 (StructType '('"column1" $2) $10)) (let $15 '('('"_logical_id" '1352) '('"_id" '"736f594a-d6eafc8c-30dc9a28-ad0be070") '('"_wide_channels" $14))) (let $16 (DqPhyStage '($13) (lambda '($30) (block '( (let $31 (lambda '($42 $43) $43 $42)) (let $32 (WideCombiner (ToFlow $30) '"" (lambda '($33 $34) $34) (lambda '($35 $36 $37) $36) (lambda '($38 $39 $40 $41) (Coalesce $41 $39)) $31)) (return (FromFlow (WideSort $32 '('('1 (Bool 'true)))))) ))) $15)) (let $17 (DqCnMerge (TDqOutput $16 '0) '('('1 '"Asc")))) (let $18 (DqPhyStage '($17) (lambda '($44) (FromFlow (NarrowMap (ToFlow $44) (lambda '($45 $46) (AsStruct '('"column1" $45) '('"id" $46)))))) '('('"_logical_id" '1364) '('"_id" '"a472f899-84d3d94c-2a118a77-432d03a5")))) (let $19 '($12 $16 $18)) (let $20 (DqCnResult (TDqOutput $18 '0) '('"id" '"column1"))) (let $21 (KqpTxResultBinding $9 '0 '0)) (let $22 (KqpPhysicalTx $19 '($20) '('($7 $21)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $22) '((KqpTxResultBinding (ListType $14) '1 '0)) '('('"type" '"scan_query")))) ) |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpOlapAggregations::Json_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleSplitsWithRestartsWhenWait [FAIL] Test command err: Trying to start YDB, gRPC: 4715, MsgBus: 64021 2025-04-07T13:59:46.854699Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570651455726718:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:46.854777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ff5/r3tmp/tmpURelz1/pdisk_1.dat 2025-04-07T13:59:47.611067Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:47.637080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:47.637504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:47.655292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4715, node 1 2025-04-07T13:59:47.888336Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:47.888359Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:47.888366Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:47.888454Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64021 TClient is connected to server localhost:64021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:48.609130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:49.176719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:51.856890Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570651455726718:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:51.856960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:54.469231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490570685815469983:2322];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:54.469433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490570685815469983:2322];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:54.469701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490570685815469983:2322];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:54.469818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490570685815469983:2322];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:54.469932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490570685815469983:2322];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:54.470044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490570685815469983:2322];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:54.470140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490570685815469983:2322];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:54.470253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490570685815469983:2322];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:54.470356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490570685815469983:2322];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:54.470464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490570685815469983:2322];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:54.470572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490570685815469983:2322];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:54.470672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490570685815469983:2322];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:54.472950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7490570685815469986:2325];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:54.472999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7490570685815469986:2325];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:54.473242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7490570685815469986:2325];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:54.473348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7490570685815469986:2325];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:54.473436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7490570685815469986:2325];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:54.473520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7490570685815469986:2325];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:54.473732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7490570685815469986:2325];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:54.473849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7490570685815469986:2325];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:54.473943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7490570685815469986:2325];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:54.474033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7490570685815469986:2325];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:54.474137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7490570685815469986:2325];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:54.474228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7490570685815469986:2325];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:54.525678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570685815469984:2323];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:54.525747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570685815469984:2323];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:54.525948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570685815469984:2323];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:54.526045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570685815469984:2323];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:54.526197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038904;self_id=[1:7490570685815469973:2313];tablet_id=72075186224038904;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:54.526210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570685815469984:2323];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:54.526237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038904;self_id=[1:7490570685815469973:2313];tablet_id=72075186224038904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:54.526328Z node ... fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:51.898714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038789;self_id=[1:7490570922038733636:11274];tablet_id=72075186224038789;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:51.899024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038528;self_id=[1:7490570926333701003:11289];tablet_id=72075186224038528;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:51.939841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038563;self_id=[1:7490570922038733522:11242];tablet_id=72075186224038563;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:51.947347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038633;self_id=[1:7490570922038733363:11210];tablet_id=72075186224038633;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:51.986457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038511;self_id=[1:7490570926333701399:11367];tablet_id=72075186224038511;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:51.986510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038830;self_id=[1:7490570922038733572:11254];tablet_id=72075186224038830;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.033344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038814;self_id=[1:7490570926333701041:11298];tablet_id=72075186224038814;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.045966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038538;self_id=[1:7490570926333701279:11340];tablet_id=72075186224038538;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.074431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038591;self_id=[1:7490570922038733431:11224];tablet_id=72075186224038591;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.111815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038580;self_id=[1:7490570926333700961:11282];tablet_id=72075186224038580;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.155592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038586;self_id=[1:7490570922038733477:11232];tablet_id=72075186224038586;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.231475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038493;self_id=[1:7490570926333701580:11410];tablet_id=72075186224038493;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.231566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038474;self_id=[1:7490570926333700979:11285];tablet_id=72075186224038474;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.274721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038452;self_id=[1:7490570926333701483:11385];tablet_id=72075186224038452;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.284202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038542;self_id=[1:7490570926333701181:11321];tablet_id=72075186224038542;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.326239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038524;self_id=[1:7490570922038733504:11240];tablet_id=72075186224038524;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.360079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038527;self_id=[1:7490570926333701314:11346];tablet_id=72075186224038527;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.393751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[1:7490570922038733537:11243];tablet_id=72075186224038066;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.399723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038437;self_id=[1:7490570926333701494:11386];tablet_id=72075186224038437;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.442470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038860;self_id=[1:7490570926333701119:11310];tablet_id=72075186224038860;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.445224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038582;self_id=[1:7490570926333701142:11316];tablet_id=72075186224038582;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.486544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038571;self_id=[1:7490570926333701291:11341];tablet_id=72075186224038571;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.507620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038533;self_id=[1:7490570926333701464:11383];tablet_id=72075186224038533;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.518434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038503;self_id=[1:7490570926333701217:11322];tablet_id=72075186224038503;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.546424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038219;self_id=[1:7490570926333701029:11297];tablet_id=72075186224038219;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.554240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038268;self_id=[1:7490570926333701426:11370];tablet_id=72075186224038268;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.590236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038379;self_id=[1:7490570926333701051:11299];tablet_id=72075186224038379;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.649075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038874;self_id=[1:7490570926333701103:11309];tablet_id=72075186224038874;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.674395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038107;self_id=[1:7490570926333701526:11394];tablet_id=72075186224038107;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.692413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038495;self_id=[1:7490570926333701071:11300];tablet_id=72075186224038495;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.718393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[1:7490570930628669051:11459];tablet_id=72075186224038017;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.750306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038210;self_id=[1:7490570926333701175:11317];tablet_id=72075186224038210;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.768450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7490570926333701346:11356];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.787323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038540;self_id=[1:7490570926333701129:11314];tablet_id=72075186224038540;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.809287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038572;self_id=[1:7490570926333701124:11313];tablet_id=72075186224038572;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.823366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7490570926333701347:11357];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.843170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038554;self_id=[1:7490570926333701265:11339];tablet_id=72075186224038554;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.876342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038120;self_id=[1:7490570926333701620:11423];tablet_id=72075186224038120;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.886923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038341;self_id=[1:7490570926333701627:11424];tablet_id=72075186224038341;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:00:52.939614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[1:7490570930628669155:11479];tablet_id=72075186224038044;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; assertion failed at ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284, void NKikimr::NKqp::NTestSuiteKqpOlapBlobsSharing::TReshardingTest::WaitResharding(const TString &): (result.GetStatus() == NYdb::EStatus::SUCCESS) failed: (CLIENT_DEADLINE_EXCEEDED != SUCCESS)
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4715 , with diff: (|SU)C(LIENT_DEADLINE_EX|)CE(EDED|SS) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284: WaitResharding @ 0x17AB979D 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:644: Execute_ @ 0x17AC483E 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 10. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 11. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 12. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 13. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 14. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 15. ??:0: ?? @ 0x7FCA16EB1D8F 16. ??:0: ?? @ 0x7FCA16EB1E3F 17. ??:0: ?? @ 0x15851028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] Test command err: Trying to start YDB, gRPC: 62989, MsgBus: 10887 2025-04-07T14:00:49.606329Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570923601794938:2251];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:49.606388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e92/r3tmp/tmpwAQtsX/pdisk_1.dat 2025-04-07T14:00:50.215766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:50.215912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:50.239312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:50.297116Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62989, node 1 2025-04-07T14:00:50.609330Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:50.609375Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:50.609388Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:50.609505Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10887 TClient is connected to server localhost:10887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:51.698240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:51.750216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:51.947925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:52.189790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:52.325377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:53.525730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570940781665627:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:53.525858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:54.605969Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570923601794938:2251];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:54.606047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:55.906515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:00:55.954989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:00:55.992217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.022691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.103631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.139337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:00:56.331681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570953666568035:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:56.331798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:56.331986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570953666568040:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:56.350515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:00:56.360103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570953666568042:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:00:59.302359Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034459202, txId: 281474976710671] shutting down 2025-04-07T14:00:59.733325Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034459741, txId: 281474976710673] shutting down 2025-04-07T14:01:00.634412Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034460644, txId: 281474976710677] shutting down Trying to start YDB, gRPC: 24292, MsgBus: 14009 2025-04-07T14:01:01.721257Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490570976165136001:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:01.771823Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e92/r3tmp/tmpRnYwNy/pdisk_1.dat 2025-04-07T14:01:01.895595Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:01.906988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:01.907082Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:01.909987Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24292, node 2 2025-04-07T14:01:01.987774Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:01.987798Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:01.987807Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:01.987927Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14009 TClient is connected to server localhost:14009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:01:02.527257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:01:02.542695Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:02.553158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:02.628194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:02.832869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:02.916902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:05.165911Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570993345006727:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:05.166040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:05.233672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:01:05.290272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:01:05.319338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:01:05.356163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:01:05.391534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:01:05.439308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:01:05.492943Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570993345007222:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:05.493033Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:05.493261Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570993345007227:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:05.497704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:01:05.512174Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490570993345007229:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:01:06.685968Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490570976165136001:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:06.686038Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:06.806963Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034466832, txId: 281474976715671] shutting down 2025-04-07T14:01:07.152524Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034467175, txId: 281474976715673] shutting down >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapStatistics::StatsUsage [GOOD] Test command err: Trying to start YDB, gRPC: 25394, MsgBus: 25442 2025-04-07T14:01:03.574434Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570984054929573:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:03.574467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f63/r3tmp/tmp3mkqwu/pdisk_1.dat 2025-04-07T14:01:04.055685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:04.055803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:04.061602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:04.073670Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25394, node 1 2025-04-07T14:01:04.261329Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:04.261361Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:04.261374Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:04.261496Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25442 TClient is connected to server localhost:25442 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:04.927374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:04.946202Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:04.964141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:05.122542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570992644864836:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:05.122733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570992644864836:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:05.123053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570992644864836:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:05.123177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570992644864836:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:05.123283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570992644864836:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:05.123403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570992644864836:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:05.123523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570992644864836:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:05.123660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570992644864836:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:05.123784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570992644864836:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:05.123902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570992644864836:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:05.124019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570992644864836:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:05.124135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570992644864836:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:05.173231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570992644864839:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:05.173306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570992644864839:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:05.173540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570992644864839:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:05.173664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570992644864839:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:05.173791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570992644864839:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:05.173913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570992644864839:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:05.174015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570992644864839:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:05.174119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570992644864839:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:05.174244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570992644864839:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:05.174364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570992644864839:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:05.174472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570992644864839:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:05.174613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570992644864839:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:05.213238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570992644864859:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:05.213323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570992644864859:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:05.213544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570992644864859:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:05.213636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570992644864859:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:05.213747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570992644864859:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:05.213839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570992644864859:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:05.213925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570992644864859:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:05.214045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570992644864859:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... 186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:05.280583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:05.280610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:05.282275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:05.282345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:05.282411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:05.282432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:05.282463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:05.282488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:05.282812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:05.282890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:05.283028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:05.283054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:05.283256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:05.283286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:05.283457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:05.283484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:05.283585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:05.283606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:05.284803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:05.284850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:05.284952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:05.284980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:05.285164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:05.285193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:05.285270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:05.285300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:05.285348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:05.285368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:05.285400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:05.285420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:05.285753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:05.285850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:05.286007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:05.286057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:05.286170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:05.286199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:05.286355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:05.286386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:05.286489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:05.286529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:05.337053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-07T14:01:07.313173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571001234799727:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:07.313293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:07.600012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:01:07.702173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571001234799789:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:07.702255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:07.759326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571001234799804:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:07.759397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:07.800929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571001234799819:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:07.801005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:07.809882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710663:0, at schemeshard: 72057594046644480 >> KqpDataIntegrityTrails::UpsertViaLegacyScripting-Streaming |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestSimpleQueries [GOOD] Test command err: Trying to start YDB, gRPC: 64655, MsgBus: 23125 2025-04-07T14:00:57.236232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570958457816154:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:57.236422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f81/r3tmp/tmpt2KqyR/pdisk_1.dat 2025-04-07T14:00:57.569184Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64655, node 1 2025-04-07T14:00:57.642609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:57.642708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:57.645065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:57.740711Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:57.740734Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:57.740741Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:57.740906Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23125 TClient is connected to server localhost:23125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:58.438051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:58.465089Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:00.505751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570971342718524:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:00.505850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:00.715841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:00.774722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570971342718600:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:00.778843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570971342718600:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:00.779143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570971342718600:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:00.779250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570971342718600:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:00.779337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570971342718600:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:00.779432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570971342718600:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:00.779518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570971342718600:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:00.779603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570971342718600:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:00.779695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570971342718600:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:00.779784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570971342718600:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:00.779866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570971342718600:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:00.779949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570971342718600:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:00.782575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:00.782626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:00.782718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:00.782745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:00.782894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:00.782914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:00.783003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:00.783051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:00.783111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:00.783134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:00.783167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:00.783190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:00.783606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:00.783634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:00.784554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:00.784597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:00.784862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:00.784924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:00.785129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:00.785160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:00.785263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14 ... fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:04.545508Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570989918010676:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:04.545588Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570989918010676:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:04.545669Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570989918010676:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:04.545745Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570989918010676:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:04.545822Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570989918010676:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:04.545903Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490570989918010676:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:04.568396Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:04.568455Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:04.568541Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:04.568565Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:04.568934Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:04.568976Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:04.569061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:04.569087Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:04.569161Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:04.569182Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:04.569215Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:04.569242Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:04.569553Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:04.569584Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:04.569741Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:04.569764Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:04.569877Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:04.574114Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:04.574379Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:04.574416Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:04.574508Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:04.574531Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; 2025-04-07T14:01:04.655802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570988522587973:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:04.655897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:04.656207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570988522587978:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:04.659849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:04.672726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570988522587980:2403], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:05.569363Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034465000, txId: 18446744073709551615] shutting down 2025-04-07T14:01:06.051531Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034465194, txId: 18446744073709551615] shutting down 2025-04-07T14:01:06.082978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570998507945372:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:06.083092Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:06.088820Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490570977033108113:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:06.088925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490570998507945377:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:06.088980Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:06.094733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:06.104367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490570998507945379:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:01:06.618921Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034466153, txId: 18446744073709551615] shutting down 2025-04-07T14:01:07.025964Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034466510, txId: 18446744073709551615] shutting down >> KqpOlapSysView::StatsSysViewBytesColumnActualization [GOOD] >> KqpOlapAggregations::BlockGenericSimpleAggregation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_Query [GOOD] Test command err: Trying to start YDB, gRPC: 61417, MsgBus: 15668 2025-04-07T14:01:02.909818Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570980742927629:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:02.919113Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f67/r3tmp/tmpnZSX0O/pdisk_1.dat 2025-04-07T14:01:03.414877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:03.414986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:03.418102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:03.433731Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61417, node 1 2025-04-07T14:01:03.584702Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:03.584719Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:03.584724Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:03.584795Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15668 TClient is connected to server localhost:15668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:04.359723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:04.377300Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:04.387085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:04.478877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570989332862886:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:04.479074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570989332862886:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:04.479355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570989332862886:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:04.479467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570989332862886:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:04.479590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570989332862886:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:04.479690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570989332862886:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:04.479788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570989332862886:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:04.479899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570989332862886:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:04.480000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570989332862886:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:04.480085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570989332862886:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:04.480275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570989332862886:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:04.480379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570989332862886:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:04.523528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570989332862899:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:04.523582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570989332862899:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:04.523818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570989332862899:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:04.523916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570989332862899:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:04.524019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570989332862899:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:04.524107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570989332862899:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:04.524214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570989332862899:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:04.524296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570989332862899:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:04.524390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570989332862899:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:04.524481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570989332862899:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:04.524565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570989332862899:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:04.524648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570989332862899:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:04.557419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570989332862927:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:04.557478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570989332862927:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:04.557693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570989332862927:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:04.557793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570989332862927:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:04.557902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570989332862927:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:04.558022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570989332862927:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:04.558121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570989332862927:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:04.558212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570989332862927:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... eaner;id=4; 2025-04-07T14:01:04.614962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:04.615033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:04.615053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:04.615093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:04.615109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:04.615135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:04.615152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:04.615391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:04.615438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:04.615590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:04.615611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:04.615698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:04.615716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:04.615910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:04.615932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:04.616035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:04.616053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_QUERY(jsonval, "$.col1" WITH UNCONDITIONAL WRAPPER), JSON_QUERY(jsondoc, "$.col1" WITH UNCONDITIONAL WRAPPER) FROM `/Root/tableWithNulls` WHERE level = 1; 2025-04-07T14:01:06.961497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570997922797760:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:06.961573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570997922797782:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:06.961645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:06.965763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:06.975990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570997922797797:2403], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:07.970633Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570980742927629:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:07.970993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:08.158782Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034467021, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_QUERY(jsonval, "$.col1" WITH UNCONDITIONAL WRAPPER), JSON_QUERY(jsondoc, "$.col1" WITH UNCONDITIONAL WRAPPER) FROM `/Root/tableWithNulls` WHERE level = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"level == 1","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","jsondoc","jsonval","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":1},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval","level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","jsondoc","jsonval","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":1},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 1","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'JsonDocument)) (let $2 (OptionalType $1)) (let $3 (DqPhyStage '() (lambda '() (block '( (let $8 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $9 '('"id" '"jsondoc" '"jsonval" '"level")) (let $10 (Bool 'false)) (let $11 (KqpWideReadOlapTableRanges $8 (Void) $9 '() '() (lambda '($12) (block '( (let $13 '('eq '"level" (Int32 '1))) (let $14 '('?? $13 $10)) (let $15 '('"id" '"jsondoc" '"jsonval")) (return (TKqpOlapExtractMembers (KqpOlapFilter $12 $14) $15)) ))))) (return (FromFlow (NarrowMap $11 (lambda '($16 $17 $18) (block '( (let $19 (ResourceType '"JsonNode")) (let $20 (OptionalType $19)) (let $21 '($20)) (let $22 '((ResourceType '"JsonPath"))) (let $23 (DataType 'Utf8)) (let $24 (DictType $23 $19)) (let $25 '($24)) (let $26 '((DataType 'Bool))) (let $27 (CallableType '() $21 $21 $22 $25 $26 $21 $26 $21)) (let $28 (Udf '"Json2.SqlQueryWrap" (Void) (VoidType) '"" $27 (VoidType) '"" '())) (let $29 (Nothing $20)) (let $30 (IfPresent $18 (lambda '($43) (block '( (let $44 '((DataType 'Json) '"" '1)) (let $45 (CallableType '() '($19) $44)) (let $46 (Udf '"Json2.Parse" (Void) (VoidType) '"" $45 (VoidType) '"" '())) (return (Just (Apply $46 $43))) ))) $29)) (let $31 (CallableType '() $22 '($23))) (let $32 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $31 (VoidType) '"" '())) (let $33 (Apply $32 (Utf8 '"$.col1"))) (let $34 (Dict $24)) (let $35 (Apply $28 $30 $33 $34 $10 $29 $10 $29)) (let $36 (lambda '($47) (block '( (let $48 '($19 '"" '1)) (let $49 (CallableType '() '($1) $48)) (let $50 (Udf '"Json2.SerializeToJsonDocument" (Void) (VoidType) '"" $49 (VoidType) '"" '())) (return (Just (Apply $50 $47))) )))) (let $37 (Nothing $2)) (let $38 (IfPresent $35 $36 $37)) (let $39 (CallableType '() $21 '($2) $22 $25 $26 $21 $26 $21)) (let $40 (Udf '"Json2.JsonDocumentSqlQueryWrap" (Void) (VoidType) '"" $39 (VoidType) '"" '())) (let $41 (Apply $40 $17 $33 $34 $10 $29 $10 $29)) (let $42 (IfPresent $41 $36 $37)) (return (AsStruct '('"column1" $38) '('"column2" $42) '('"id" $16))) )))))) ))) '('('"_logical_id" '762) '('"_id" '"a820f74b-5e6063fa-f23eeece-68989f38")))) (let $4 (DqCnUnionAll (TDqOutput $3 '"0"))) (let $5 (DqPhyStage '($4) (lambda '($51) $51) '('('"_logical_id" '1440) '('"_id" '"3d2dfb1b-7c78ade8-2497c88c-d86f808b")))) (let $6 '('"id" '"column1" '"column2")) (let $7 (DqCnResult (TDqOutput $5 '"0") $6)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($3 $5) '($7) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"column1" $2) '('"column2" $2) '('"id" (DataType 'Int32)))) '"0" '"0")) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_Count_GroupBy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:59:35.890546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:59:35.890666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:59:35.890712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:59:35.890753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:59:35.890810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:59:35.890845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:59:35.890927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:59:35.891358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:59:35.978301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:59:35.978364Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:35.995053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:59:35.995233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:59:35.995416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:59:36.026267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:59:36.026554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:59:36.027236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:36.027455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:59:36.031220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:36.032697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:36.032772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:36.032900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:59:36.032960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:36.033005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:59:36.033176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.040499Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:59:36.188961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:59:36.189250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.189456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:59:36.189682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:59:36.189741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.192982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:36.193118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:59:36.193356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.193413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:59:36.193455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:59:36.193490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:59:36.200892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.200981Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:59:36.201039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:59:36.203509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.203586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.203678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:36.203746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:59:36.207634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:59:36.213330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:59:36.213588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:59:36.214661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:36.214814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:59:36.214884Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:36.215149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:59:36.215197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:36.215395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:36.215500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:59:36.217877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:36.217931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:36.218101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:36.218140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:59:36.218396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.218453Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:59:36.218572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:59:36.218605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:36.218667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:59:36.218715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:36.218749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:59:36.218777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:59:36.218839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:59:36.218885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:59:36.218922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:59:36.220899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:36.221025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:36.221069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:59:36.221106Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:59:36.221145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:36.221269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:297:2287], Recipient [3:308:2296]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-07T14:01:08.431041Z node 3 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:19.150000Z 2025-04-07T14:01:08.487601Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T14:01:08.487688Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T14:01:08.487723Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-07T14:01:08.487800Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-07T14:01:08.487837Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-07T14:01:08.487965Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-07T14:01:08.488038Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-04-07T14:01:08.488083Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-04-07T14:01:08.488164Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:18.000000Z at schemeshard 72057594046678944 2025-04-07T14:01:08.488325Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:01:08.500463Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T14:01:08.500549Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T14:01:08.500579Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:01:08.820211Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:08.820289Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:08.820372Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:308:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:01:08.820512Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:122:2148], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:08.820543Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:08.861148Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue wakeup 2025-04-07T14:01:08.861279Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 1, Rows# 100, Deletes# 0, Compaction# 1970-01-01T00:00:18.000000Z}, next wakeup in# 0.000000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-07T14:01:08.861416Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 30 seconds 2025-04-07T14:01:08.861648Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [3:122:2148], Recipient [3:308:2296]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 2 } CompactSinglePartedShards: true 2025-04-07T14:01:08.861807Z node 3 :TX_DATASHARD INFO: Started background compaction# 7 of 72075186233409546 tableId# 2 localTid# 1001, requested from [3:122:2148], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-04-07T14:01:08.863062Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 6, ts 1970-01-01T00:00:19.150000Z 2025-04-07T14:01:08.863106Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409546 2025-04-07T14:01:08.863152Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 6, front# 7 2025-04-07T14:01:08.866990Z node 3 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186233409546, for tableId 2: RowCount 100, DataSize 13940, IndexSize 102, PartCount 1, LoadedSize 102, Spent{0.000s wa 0.000s cnt 1}, HistogramKeys 1 2025-04-07T14:01:08.873263Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1257:3197], Recipient [3:308:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-07T14:01:08.873340Z node 3 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186233409546, for tableId 2 2025-04-07T14:01:08.873413Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-07T14:01:08.874674Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:308:2296], Recipient [3:122:2148]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 5 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 19 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 21313 Memory: 124000 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 42 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-07T14:01:08.874727Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-07T14:01:08.874785Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 2.1313 2025-04-07T14:01:08.874894Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 19 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-07T14:01:08.874941Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-07T14:01:08.883305Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:297:2287], Recipient [3:308:2296]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-07T14:01:08.899252Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 7, ts 1970-01-01T00:00:20.151000Z 2025-04-07T14:01:08.899332Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 7, front# 7 2025-04-07T14:01:08.899373Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:122:2148]pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:01:08.899728Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553211, Sender [3:308:2296], Recipient [3:122:2148]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 2 } Status: OK 2025-04-07T14:01:08.899774Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvCompactTableResult 2025-04-07T14:01:08.899849Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 0 seconds 2025-04-07T14:01:08.899902Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-07T14:01:08.905836Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:297:2287], Recipient [3:308:2296]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-07T14:01:08.918760Z node 3 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:20.151000Z 2025-04-07T14:01:08.974590Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T14:01:08.974669Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T14:01:08.974703Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-07T14:01:08.974775Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-07T14:01:08.974807Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-07T14:01:08.974929Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-07T14:01:08.975002Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-04-07T14:01:08.975048Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-04-07T14:01:08.975123Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:19.000000Z at schemeshard 72057594046678944 2025-04-07T14:01:08.975247Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:01:08.987300Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T14:01:08.987377Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T14:01:08.987422Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 >> KqpOlapWrite::TestRemoveTableBeforeIndexation >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] >> KqpOlap::MultiInsertWithSinks >> KqpOlapAggregations::Aggregation_ResultCountExpr >> KqpOlapAggregations::Aggregation_Sum_GroupByNullMix >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns-isStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewBytesColumnActualization [GOOD] Test command err: Trying to start YDB, gRPC: 21940, MsgBus: 27834 2025-04-07T14:00:40.753045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570882988907758:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:40.753128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fb2/r3tmp/tmppcrJwG/pdisk_1.dat 2025-04-07T14:00:41.237184Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:41.253861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:41.253961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:41.258449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21940, node 1 2025-04-07T14:00:41.407636Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:41.407672Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:41.407697Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:41.407794Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27834 TClient is connected to server localhost:27834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:42.122484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:42.147396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:42.305402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570891578842898:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:42.305616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570891578842898:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:42.305906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570891578842898:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:42.306195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570891578842898:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:42.306319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570891578842898:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:42.306498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570891578842898:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:42.306598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570891578842898:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:42.306712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570891578842898:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:42.306852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570891578842898:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:42.307001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570891578842898:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:42.307149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570891578842898:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:42.307283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570891578842898:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:42.347361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570891578842894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:42.347433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570891578842894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:42.347654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570891578842894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:42.347792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570891578842894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:42.347950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570891578842894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:42.348062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570891578842894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:42.348172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570891578842894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:42.348292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570891578842894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:42.348413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570891578842894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:42.348540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570891578842894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:42.348653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570891578842894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:42.348763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570891578842894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:42.469417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570891578842895:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:42.469486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570891578842895:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:42.469662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570891578842895:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:42.470020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570891578842895:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:42.470167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570891578842895:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:42.470255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570891578842895:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:42.470383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570891578842895:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:42.470484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570891578842895:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:42.470617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:74905708915788428 ... iption=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:42.511692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:42.511978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:42.512007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:42.512155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:42.512175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:42.512306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:42.512334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:42.512461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:42.512487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:42.512581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:42.512600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:42.514925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:42.514964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:42.515067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:42.515088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:42.515247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:42.515272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:42.515355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:42.515427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:42.515530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:42.515568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:42.515622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:42.515643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:42.515903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:42.515956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:42.516125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:42.516157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:42.516249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:42.516265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:42.516425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:42.516452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:42.516542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:42.516568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:42.568046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=51200216;columns=2; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-04-07T14:00:45.731418Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570882988907758:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:45.731474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=51200216;columns=2; WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64') RESULT: 2025-04-07T14:00:55.607048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570947413418241:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:55.607132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:55.607342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570947413418253:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:55.611980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:00:55.620612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570947413418255:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:00:56.233933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:00:56.233972Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:56.586450Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034456335, txId: 281474976710662] shutting down Wait changes: 0/0 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64') RESULT: 2025-04-07T14:01:02.050896Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034461928, txId: 281474976710664] shutting down 0/0 2025-04-07T14:01:02.085465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:01:02.145988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting actualization: 9/0.000013s ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64','_yql_delete_flag') RESULT: 2025-04-07T14:01:03.727380Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034463552, txId: 281474976710668] shutting down Wait changes: 0/0 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64','_yql_delete_flag') RESULT: 2025-04-07T14:01:09.379106Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034469176, txId: 281474976710670] shutting down 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::BlockGenericSimpleAggregation [GOOD] Test command err: Trying to start YDB, gRPC: 16560, MsgBus: 15590 2025-04-07T14:01:04.771574Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570989023617058:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:04.773595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f59/r3tmp/tmpqMlrfq/pdisk_1.dat 2025-04-07T14:01:05.157486Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:05.161272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:05.161379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:05.165460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16560, node 1 2025-04-07T14:01:05.264737Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:05.264759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:05.264771Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:05.264908Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15590 TClient is connected to server localhost:15590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:05.941163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:05.979144Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:05.987022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:06.077772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570997613552303:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:06.078025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570997613552303:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:06.078286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570997613552303:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:06.078412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570997613552303:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:06.078505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570997613552303:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:06.078628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570997613552303:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:06.078726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570997613552303:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:06.078822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570997613552303:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:06.078964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570997613552303:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:06.079061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570997613552303:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:06.079163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570997613552303:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:06.079274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570997613552303:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:06.115736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570997613552302:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:06.115796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570997613552302:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:06.116006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570997613552302:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:06.116113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570997613552302:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:06.116211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570997613552302:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:06.116314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570997613552302:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:06.116403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570997613552302:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:06.116529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570997613552302:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:06.116653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570997613552302:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:06.116772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570997613552302:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:06.116869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570997613552302:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:06.116960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570997613552302:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:06.150293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570997613552304:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:06.150352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570997613552304:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:06.150555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570997613552304:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:06.150679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570997613552304:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:06.150794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570997613552304:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:06.150886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570997613552304:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:06.150988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570997613552304:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:06.151118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570997613552304:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... :62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:06.200502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:06.200568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:06.200597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:06.200782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:06.200812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:06.200888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:06.200934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:06.200994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:06.201021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:06.201067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:06.201096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:06.201454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:06.201556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:06.201763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:06.201793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:06.201935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:06.201970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:06.202128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:06.202155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:06.202228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:06.202249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, COUNT(*), SUM(id) FROM `/Root/tableWithNulls` WHERE level = 5 GROUP BY level ORDER BY level; 2025-04-07T14:01:08.022834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571006203487174:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:08.022916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:08.023254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571006203487201:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:08.033681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:08.042792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571006203487203:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, COUNT(*), SUM(id) FROM `/Root/tableWithNulls` WHERE level = 5 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"level == 5","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":11}},{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":12}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":11},{"Id":12},{"Id":3}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":11}},{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":12}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":11},{"Id":12},{"Id":3}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 5","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'Uint64)) (let $2 (DataType 'Int64)) (let $3 '('"level" (OptionalType (DataType 'Int32)))) (let $4 (StructType '('_yql_agg_0 $1) '('_yql_agg_1 $2) $3)) (let $5 '('('"_logical_id" '1172) '('"_id" '"3fff1fa3-33c60818-c1128f0-a915153d") '('"_wide_channels" $4))) (let $6 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $17 (KqpBlockReadOlapTableRanges $16 (Void) '('"id" '"level") '() '() (lambda '($18) (block '( (let $19 '('eq '"level" (Int32 '"5"))) (let $20 '('?? $19 (Bool 'false))) (let $21 '('_yql_agg_0 '"count" '"*")) (let $22 '('_yql_agg_1 'sum '"id")) (return (TKqpOlapAgg (KqpOlapFilter $18 $20) '($21 $22) '('"level"))) ))))) (return (FromFlow $17)) ))) $5)) (let $7 (DqCnHashShuffle (TDqOutput $6 '0) '('2))) (let $8 (StructType '('"column1" $1) '('"column2" $2) $3)) (let $9 '('('"_logical_id" '1834) '('"_id" '"b52f5689-7fb826a-1bc05577-87c8d8d1") '('"_wide_channels" $8))) (let $10 (DqPhyStage '($7) (lambda '($23) (block '( (let $24 (lambda '($31 $32 $33 $34) $32 $33)) (let $25 (lambda '($35 $36 $37 $38 $39 $40) (AggrAdd $36 $39) (AggrAdd $37 $40))) (let $26 (lambda '($41 $42 $43) $42 $43 $41)) (let $27 (WideCombiner (WideFromBlocks (ToFlow $23)) '"" (lambda '($28 $29 $30) $30) $24 $25 $26)) (return (FromFlow (WideSort $27 '('('2 (Bool 'true)))))) ))) $9)) (let $11 (DqCnMerge (TDqOutput $10 '0) '('('2 '"Asc")))) (let $12 (DqPhyStage '($11) (lambda '($44) (FromFlow (NarrowMap (ToFlow $44) (lambda '($45 $46 $47) (AsStruct '('"column1" $45) '('"column2" $46) '('"level" $47)))))) '('('"_logical_id" '1846) '('"_id" '"8cfd5bc-47e0d8fd-76871318-6445dfdc")))) (let $13 '($6 $10 $12)) (let $14 '('"level" '"column1" '"column2")) (let $15 (DqCnResult (TDqOutput $12 '0) $14)) (return (KqpPhysicalQuery '((KqpPhysicalTx $13 '($15) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $8) '0 '0)) '('('"type" '"query")))) ) >> KqpOlap::OlapRead_GenericQuery >> KqpOlap::ScanFailedSnapshotTooOld ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:59:37.214069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:59:37.214154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:59:37.214189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:59:37.214231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:59:37.214261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:59:37.214285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:59:37.214338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:59:37.214607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:59:37.281679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:59:37.281738Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:37.292864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:59:37.292987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:59:37.293151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:59:37.299742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:59:37.299936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:59:37.300513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:37.300727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:59:37.302671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:37.303929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:37.303982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:37.304104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:59:37.304141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:37.304173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:59:37.304344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.311308Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:59:37.412216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:59:37.412457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.412689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:59:37.412885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:59:37.412937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.419630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:37.419766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:59:37.419987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.420045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:59:37.420096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:59:37.420131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:59:37.422343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.422439Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:59:37.422476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:59:37.424324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.424372Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.424420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:37.424472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:59:37.427343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:59:37.428674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:59:37.428838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:59:37.429624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:37.429745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:59:37.429806Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:37.430006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:59:37.430038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:37.430171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:37.430261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:59:37.431669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:37.431698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:37.431807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:37.431850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:59:37.432028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:37.432060Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:59:37.432125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:59:37.432173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:37.432209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:59:37.432247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:37.432291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:59:37.432318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:59:37.432372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:59:37.432402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:59:37.432428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:59:37.434150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:37.434244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:37.434338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:59:37.434385Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:59:37.434439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:37.434524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... Shard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:08.480680Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:121:2147], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:08.480711Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:08.830579Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:08.830647Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:08.830726Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:121:2147], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:08.830755Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:08.890663Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:306:2294]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:01:08.890746Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409546 2025-04-07T14:01:08.891102Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-07T14:01:08.892111Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:306:2294], Recipient [3:121:2147]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 7 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 158 Memory: 124000 Storage: 14156 GroupWriteThroughput { GroupID: 0 Channel: 0 Throughput: 261 } GroupWriteThroughput { GroupID: 0 Channel: 1 Throughput: 443 } GroupWriteIops { GroupID: 0 Channel: 0 Iops: 1 } } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 40 TableOwnerId: 72057594046678944 FollowerId: 2025-04-07T14:01:08.892162Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-07T14:01:08.892220Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0158 2025-04-07T14:01:08.892332Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-07T14:01:08.892374Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-07T14:01:08.894281Z node 3 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186233409546, for tableId 2: RowCount 100, DataSize 13940, IndexSize 102, PartCount 1, LoadedSize 102, Spent{0.000s wa 0.000s cnt 1}, HistogramKeys 1 2025-04-07T14:01:08.894545Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1056:3002], Recipient [3:306:2294]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-07T14:01:08.894587Z node 3 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186233409546, for tableId 2 2025-04-07T14:01:08.908115Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:306:2294]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-07T14:01:08.908188Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-07T14:01:08.908273Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2025-04-07T14:01:08.908337Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:01:08.908373Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409546 2025-04-07T14:01:08.908407Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-04-07T14:01:08.908437Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409546 2025-04-07T14:01:08.949123Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T14:01:08.949210Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T14:01:08.949246Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-07T14:01:08.949315Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-07T14:01:08.949353Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-07T14:01:08.949499Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-07T14:01:08.949574Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-04-07T14:01:08.949620Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-04-07T14:01:08.949700Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:01:20.000000Z at schemeshard 72057594046678944 2025-04-07T14:01:08.949838Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:01:08.962924Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T14:01:08.963002Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T14:01:08.963033Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:01:09.182226Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:09.182294Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:09.182369Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:121:2147], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:09.182397Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:09.494985Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:09.495057Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:09.495145Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:121:2147], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:09.495188Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:09.769345Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:09.769413Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:09.769485Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:121:2147], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:09.769511Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:10.041345Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:10.041416Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:10.041491Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:121:2147], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:10.041529Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:10.311572Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:10.311658Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:10.311759Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:121:2147], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:10.311799Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:10.334538Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:306:2294]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:01:10.606457Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:10.606530Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:01:10.606600Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:121:2147], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:01:10.606628Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> KqpOlapAggregations::Aggregation_Count_NullMixGroupBy [GOOD] >> KqpOlapWrite::TierDraftsGC >> KqpOlapAggregations::BlockGenericSelectAll >> KqpOlapSysView::StatsSysViewColumns >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_Limit2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_NullMixGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 30652, MsgBus: 6523 2025-04-07T14:01:02.707086Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570978724173042:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:02.707138Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f6b/r3tmp/tmprOua8k/pdisk_1.dat 2025-04-07T14:01:03.203751Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:03.210257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:03.210360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:03.213520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30652, node 1 2025-04-07T14:01:03.287628Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:03.287654Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:03.287665Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:03.287758Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6523 TClient is connected to server localhost:6523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:03.862989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:03.894284Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:03.901278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:03.992158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570983019141002:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:03.992389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570983019141002:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:03.992642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570983019141002:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:03.992943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570983019141002:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:03.993076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570983019141002:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:03.993207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570983019141002:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:03.993338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570983019141002:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:03.993493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570983019141002:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:03.993597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570983019141002:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:03.993702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570983019141002:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:03.993811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570983019141002:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:03.993913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570983019141002:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:04.039816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570983019141005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:04.039888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570983019141005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:04.040156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570983019141005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:04.040282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570983019141005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:04.040390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570983019141005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:04.040507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570983019141005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:04.040627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570983019141005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:04.042266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570983019141005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:04.042446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570983019141005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:04.042543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570983019141005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:04.042633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570983019141005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:04.042739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570983019141005:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:04.087932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570983019141001:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:04.088009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570983019141001:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:04.088278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570983019141001:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:04.088396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570983019141001:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:04.088534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570983019141001:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:04.088740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570983019141001:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:04.089020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570983019141001:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:04.089287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570983019141001:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... ; 2025-04-07T14:01:04.147381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:04.147487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:04.147509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:04.147658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:04.147678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:04.147764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:04.147793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:04.147906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:04.147928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:04.147960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:04.147979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:04.148304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:04.148345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:04.148507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:04.148533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:04.149386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:04.149429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:04.157600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:04.157643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:04.157731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:04.157756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; 2025-04-07T14:01:06.183799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570995904043184:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:06.183884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:06.184250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570995904043196:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:06.188801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:06.201559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570995904043198:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:10.602027Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570978724173042:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:10.602305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:10.967994Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034467000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (4, 7)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '877) '('"_id" '"100f6b07-6e5c61b6-f4f96be4-73207842") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '0)) (let $25 '((Nothing $2) $24)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"7")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Uint64)) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '936) '('"_id" '"ee9df492-60044bf-9af3ce54-2e51311a") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'count '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1359) '('"_id" '"8808686c-5c3e89d7-d2a1d5b0-e4101ee8") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1371) '('"_id" '"ab54f8f5-40da9134-8b895ac4-32241ff0")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapClickbench::ClickBenchSmoke >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] >> KqpDecimalColumnShard::TestOrderByDecimal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2025-04-07T14:00:24.480010Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:24.480128Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:24.500978Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:24.528741Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-07T14:00:24.529867Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-04-07T14:00:24.532274Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:00:24.535949Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-07T14:00:24.537713Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:24.550109Z node 1 :PERSQUEUE INFO: new Cookie default|4daf0f25-c8f39980-2292b54c-8f0c6a50_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:24.572890Z node 1 :PERSQUEUE INFO: new Cookie default|a92186e7-297e1a7a-9123b7b3-f325aa9e_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:24.628570Z node 1 :PERSQUEUE INFO: new Cookie default|d9d418c6-58373e2c-435409c8-ea801149_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:240:2057] recipient: [1:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:243:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:244:2057] recipient: [1:242:2244] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:245:2245] sender: [1:246:2057] recipient: [1:242:2244] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_GET_THROTTLING Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:24.887911Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:24.887992Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:00:24.892810Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:296:2288] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:00:24.895227Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:297:2289] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:00:24.924345Z node 1 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:00:24.924458Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:296:2288] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:00:24.944823Z node 1 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:00:24.944922Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:297:2289] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:245:2245] sender: [1:329:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2025-04-07T14:00:25.677775Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:25.677855Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:175:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:25.722464Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:25.723385Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-07T14:00:25.724128Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:182:2195] 2025-04-07T14:00:25.726518Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:00:25.728316Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:183:2196] 2025-04-07T14:00:25.729905Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:25.741299Z node 2 :PERSQUEUE INFO: new Cookie default|2107ab6d-bc6cf1dd-9551f954-32d26b7f_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:25.749370Z node 2 :PERSQUEUE INFO: new Cookie default|ea432a9d-66922283-513b8dfa-c262eae4_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:25.769468Z node 2 :PERSQUEUE INFO: new Cookie default|c6e0d1ea-a8870de6-5ef94ef0-33d4e796_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvPersQueue::TEvOffsets ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:237:2057] recipient: [2:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:239:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:241:2057] recipient: [2:240:2241] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:242:2242] sender: [2:243:2057] recipient: [2:240:2241] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_GET_THROTTLING Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:25.847086Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:25.847142Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:00:25. ... SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 54 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 54 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 54 Important: false } 2025-04-07T14:01:12.409681Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:182:2195] 2025-04-07T14:01:12.411423Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [54:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:01:12.412637Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:183:2196] 2025-04-07T14:01:12.413999Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [54:183:2196] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:12.420433Z node 54 :PERSQUEUE INFO: new Cookie default|e2c8f65c-4c43ab7e-f14e303-c6b4eeca_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:12.426121Z node 54 :PERSQUEUE INFO: new Cookie default|28efb0e7-658fc8ed-9281e3c1-460c3e5f_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:12.457376Z node 54 :PERSQUEUE INFO: new Cookie default|dab14cd8-94f3dc92-f1be9fcb-e5f795ce_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:240:2057] recipient: [54:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:243:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:244:2057] recipient: [54:242:2244] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:245:2245] sender: [54:246:2057] recipient: [54:242:2244] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_GET_THROTTLING Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:12.548425Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:01:12.548496Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:01:12.553703Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:296:2288] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:01:12.556375Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:297:2289] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:01:12.584235Z node 54 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:01:12.584325Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [54:296:2288] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:01:12.612211Z node 54 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:01:12.612312Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [54:297:2289] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:245:2245] sender: [54:329:2057] recipient: [54:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:101:2057] recipient: [55:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:101:2057] recipient: [55:99:2133] Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:106:2057] recipient: [55:99:2133] 2025-04-07T14:01:13.300228Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:01:13.300301Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:147:2057] recipient: [55:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:147:2057] recipient: [55:145:2168] Leader for TabletID 72057594037927938 is [55:151:2172] sender: [55:152:2057] recipient: [55:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:175:2057] recipient: [55:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:13.331832Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:01:13.332712Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 55 actor [55:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } 2025-04-07T14:01:13.333378Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:182:2195] 2025-04-07T14:01:13.335906Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:01:13.337835Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:183:2196] 2025-04-07T14:01:13.339920Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:13.348814Z node 55 :PERSQUEUE INFO: new Cookie default|672be997-95350056-84a81533-d3e5ee02_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:13.358018Z node 55 :PERSQUEUE INFO: new Cookie default|f90880f7-59a3c819-ba598e09-e975051c_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:13.384054Z node 55 :PERSQUEUE INFO: new Cookie default|b94eadc2-14640c7d-96fde87d-2bc06579_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:238:2057] recipient: [55:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:241:2057] recipient: [55:14:2061] Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:242:2057] recipient: [55:240:2242] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:243:2243] sender: [55:244:2057] recipient: [55:240:2242] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_GET_THROTTLING Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:13.441353Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:01:13.441399Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:01:13.442055Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:294:2286] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:01:13.443801Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:295:2287] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:01:13.477333Z node 55 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:01:13.477419Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:294:2286] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:01:13.511336Z node 55 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:01:13.511433Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:295:2287] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:243:2243] sender: [55:327:2057] recipient: [55:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR >> KqpOlap::BulkUpsertUpdate >> KqpOlapStats::AddRowsTableInTableStore [GOOD] >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility-EnableLlvm >> KqpOlapAggregations::Aggregation_ProjectionOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapStats::AddRowsTableInTableStore [GOOD] Test command err: Trying to start YDB, gRPC: 1214, MsgBus: 5572 2025-04-07T14:01:09.047535Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571010612395419:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:09.047596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f4e/r3tmp/tmpphTIJQ/pdisk_1.dat 2025-04-07T14:01:09.682146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:09.682232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:09.686505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:09.703815Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1214, node 1 2025-04-07T14:01:09.855733Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:09.855923Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:09.855931Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:09.856009Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5572 TClient is connected to server localhost:5572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:10.472512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:10.502372Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLESTORE `/Root/TableStoreTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:12.698344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571023497297933:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:12.698435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:13.023141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:13.130902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027792265300:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.131114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027792265300:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.131391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027792265300:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.131509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027792265300:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.131609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027792265300:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.131715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027792265300:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.131815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027792265300:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.131916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027792265300:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:13.132016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027792265300:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:13.132138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027792265300:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.132259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027792265300:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:13.132356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027792265300:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:13.141144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:13.141208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:13.141302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:13.141336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:13.141499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:13.141524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:13.141616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:13.141644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:13.141719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:13.141743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:13.141784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:13.141811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:13.142332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:13.142368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:13.142560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:13.142589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.142777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:13.142810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:13.142981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:13.143015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:13.143141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:13.143170 ... umnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=24288;columns=3; 2025-04-07T14:01:14.048322Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571010612395419:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:14.048410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] Test command err: 2025-04-07T13:57:52.698243Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570165519281368:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:52.698289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001eae/r3tmp/tmpAjEcve/pdisk_1.dat 2025-04-07T13:57:53.109288Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:53.111568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:53.111675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:53.115700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5948, node 1 2025-04-07T13:57:53.215180Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:53.215200Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:53.215212Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:53.215310Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5970 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:53.566302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:53.581511Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:53.633589Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2025-04-07T13:57:55.881914Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T13:57:55.882002Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570178404183865:2302], Start check tables existence, number paths: 2 2025-04-07T13:57:55.888272Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-07T13:57:55.888334Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T13:57:55.888364Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T13:57:55.888441Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570178404183865:2302], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T13:57:55.888499Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570178404183865:2302], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T13:57:55.888530Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490570178404183865:2302], Successfully finished 2025-04-07T13:57:55.888624Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T13:57:55.898302Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2Y3OTc1NTYtNTEwNmFkODEtNTIyZjFhZDctYWM5ZjM3ZTY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id N2Y3OTc1NTYtNTEwNmFkODEtNTIyZjFhZDctYWM5ZjM3ZTY= 2025-04-07T13:57:55.898578Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2Y3OTc1NTYtNTEwNmFkODEtNTIyZjFhZDctYWM5ZjM3ZTY=, ActorId: [1:7490570178404183881:2303], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:57:55.961018Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570178404183884:2296], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:57:55.964495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T13:57:55.967643Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570178404183884:2296], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-07T13:57:55.969322Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570178404183884:2296], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-07T13:57:55.975897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570178404183884:2296], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T13:57:56.049624Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570178404183884:2296], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T13:57:56.054272Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570178404183884:2296], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-07T13:57:56.058681Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDhjNTk1NzAtNGZlMWJjOTMtNmM5NmZjOWItMzgwZWMxMzQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NDhjNTk1NzAtNGZlMWJjOTMtNmM5NmZjOWItMzgwZWMxMzQ= 2025-04-07T13:57:56.058987Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDhjNTk1NzAtNGZlMWJjOTMtNmM5NmZjOWItMzgwZWMxMzQ=, ActorId: [1:7490570182699151239:2304], ActorState: unknown state, session actor bootstrapped 2025-04-07T13:57:56.059648Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDhjNTk1NzAtNGZlMWJjOTMtNmM5NmZjOWItMzgwZWMxMzQ=, ActorId: [1:7490570182699151239:2304], ActorState: ReadyState, TraceId: 01jr88p8pv12dkv2wrjtnbpp1r, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7490570182699151238:2334] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-07T13:57:56.059696Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-07T13:57:56.059709Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-07T13:57:56.059759Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7490570182699151239:2304], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NDhjNTk1NzAtNGZlMWJjOTMtNmM5NmZjOWItMzgwZWMxMzQ= 2025-04-07T13:57:56.059798Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570182699151241:2305], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-07T13:57:56.059865Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7490570182699151242:2306], Database: /Root, Start database fetching 2025-04-07T13:57:56.064754Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7490570182699151242:2306], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-07T13:57:56.064971Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570182699151241:2305], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-07T13:57:56.065054Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-07T13:57:56.065101Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-07T13:57:56.065136Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-07T13:57:56.065590Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7490570182699151253:2308], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-07T13:57:56.065655Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7490570182699151252:2307], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NDhjNTk1NzAtNGZlMWJjOTMtNmM5NmZjOWItMzgwZWMxMzQ=, Start pool fetching 2025-04-07T13:57:56.065679Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570182699151254:2309], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-07T13:57:56.067635Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570182699151254:2309], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-07T13:57:56.067711Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7490570182699151252:2307], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NDhjNTk1NzAtNGZlMWJjOTMtNmM5NmZjOWItMzgwZWMxMzQ=, Pool info successfully resolved 2025-04-07T13:57:56.067752Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDhjNTk1NzAtNGZlMWJjOTMtNmM5NmZjOWItMzgwZWMxMzQ= 2025-04-07T13:57:56.067819Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7490570182699151253:2308], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7490570182699151239:2304], session id: ydb://session/3?node_id=1&id=NDhjNTk1NzAtNGZlMWJjOTMtNmM5NmZjOWItMzgwZWMxMzQ= 2025-04-07T13:57:56.067873Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NDhjNTk1NzAtNGZlMWJjOTMtNmM5NmZjOWItMzgwZWMxMzQ= 2025-04-07T13:57:56.067915Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got create teables request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-07T13:57:56.067933Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service tables creation 2025-04-07T13:57:56.073159Z node 1 : ... ctorState: ExecuteState, TraceId: 01jr88smexev617fgg97thzvb9, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T13:59:46.517088Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Y2Y4NzgzYjktZTQ2NzBjMjItY2RhYWYzYjctNTQxMDljZjg=, ActorId: [6:7490570652956427620:2416], ActorState: ExecuteState, TraceId: 01jr88smexev617fgg97thzvb9, EndCleanup, isFinal: 1 2025-04-07T13:59:46.517143Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Y2Y4NzgzYjktZTQ2NzBjMjItY2RhYWYzYjctNTQxMDljZjg=, ActorId: [6:7490570652956427620:2416], ActorState: ExecuteState, TraceId: 01jr88smexev617fgg97thzvb9, Sent query response back to proxy, proxyRequestId: 15, proxyId: [6:7490570614301720832:2095] 2025-04-07T13:59:46.517189Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Y2Y4NzgzYjktZTQ2NzBjMjItY2RhYWYzYjctNTQxMDljZjg=, ActorId: [6:7490570652956427620:2416], ActorState: unknown state, TraceId: 01jr88smexev617fgg97thzvb9, Cleanup temp tables: 0 2025-04-07T13:59:46.517435Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Y2Y4NzgzYjktZTQ2NzBjMjItY2RhYWYzYjctNTQxMDljZjg=, ActorId: [6:7490570652956427620:2416], ActorState: unknown state, TraceId: 01jr88smexev617fgg97thzvb9, Session actor destroyed 2025-04-07T13:59:46.530059Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTZlODg2MTMtMTRmN2QyNzAtYTJiYzdkMjUtOTE3NDM3MmM=, ActorId: [6:7490570635776557859:2302], ActorState: ReadyState, TraceId: 01jr88smk1909jv21ar2tjqyks, received request, proxyRequestId: 18 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL sample_pool_id; DROP RESOURCE POOL default; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-07T13:59:46.567575Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7490570635776557959:2310], DatabaseId: /Root, PoolId: sample_pool_id, Got delete notification 2025-04-07T13:59:46.567762Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-07T13:59:46.567863Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570652956427696:2433], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-07T13:59:46.574935Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570652956427696:2433], DatabaseId: /Root, PoolId: sample_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2025-04-07T13:59:46.575721Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool sample_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2025-04-07T13:59:46.578809Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7490570635776558155:2334], DatabaseId: /Root, PoolId: default, Got delete notification 2025-04-07T13:59:46.578874Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-07T13:59:46.578914Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570652956427715:2434], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-07T13:59:46.579140Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490570652956427715:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:46.579196Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:46.584332Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NTZlODg2MTMtMTRmN2QyNzAtYTJiYzdkMjUtOTE3NDM3MmM=, ActorId: [6:7490570635776557859:2302], ActorState: ExecuteState, TraceId: 01jr88smk1909jv21ar2tjqyks, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [6:7490570652956427686:2302] WorkloadServiceCleanup: 0 2025-04-07T13:59:46.586501Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTZlODg2MTMtMTRmN2QyNzAtYTJiYzdkMjUtOTE3NDM3MmM=, ActorId: [6:7490570635776557859:2302], ActorState: CleanupState, TraceId: 01jr88smk1909jv21ar2tjqyks, EndCleanup, isFinal: 0 2025-04-07T13:59:46.586615Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTZlODg2MTMtMTRmN2QyNzAtYTJiYzdkMjUtOTE3NDM3MmM=, ActorId: [6:7490570635776557859:2302], ActorState: CleanupState, TraceId: 01jr88smk1909jv21ar2tjqyks, Sent query response back to proxy, proxyRequestId: 18, proxyId: [6:7490570614301720832:2095] Wait pool handlers 0.000018s: number handlers = 2 Wait pool handlers 1.000794s: number handlers = 2 Wait pool handlers 2.000944s: number handlers = 2 Wait pool handlers 3.001088s: number handlers = 2 Wait pool handlers 4.002436s: number handlers = 2 Wait pool handlers 5.004838s: number handlers = 2 Wait pool handlers 6.006428s: number handlers = 2 2025-04-07T13:59:52.672881Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:59:52.672921Z node 6 :IMPORT WARN: Table profiles were not loaded Wait pool handlers 7.010441s: number handlers = 2 Wait pool handlers 8.014428s: number handlers = 2 Wait pool handlers 9.017281s: number handlers = 2 Wait pool handlers 10.017790s: number handlers = 2 Wait pool handlers 11.018601s: number handlers = 2 Wait pool handlers 12.022171s: number handlers = 2 Wait pool handlers 13.022427s: number handlers = 2 2025-04-07T13:59:59.715461Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7490570635776557959:2310], DatabaseId: /Root, PoolId: sample_pool_id, Try to start scheduled refresh Wait pool handlers 14.026861s: number handlers = 2 Wait pool handlers 15.027034s: number handlers = 2 Wait pool handlers 16.028161s: number handlers = 2 Wait pool handlers 17.030436s: number handlers = 2 Wait pool handlers 18.035160s: number handlers = 2 Wait pool handlers 19.035291s: number handlers = 2 Wait pool handlers 20.038077s: number handlers = 2 Wait pool handlers 21.041664s: number handlers = 2 Wait pool handlers 22.041863s: number handlers = 2 Wait pool handlers 23.042009s: number handlers = 2 Wait pool handlers 24.042122s: number handlers = 2 Wait pool handlers 25.046435s: number handlers = 2 Wait pool handlers 26.046838s: number handlers = 2 Wait pool handlers 27.047632s: number handlers = 2 Wait pool handlers 28.050448s: number handlers = 2 Wait pool handlers 29.054435s: number handlers = 2 Wait pool handlers 30.058442s: number handlers = 2 Wait pool handlers 31.065736s: number handlers = 2 Wait pool handlers 32.066004s: number handlers = 2 Wait pool handlers 33.066154s: number handlers = 2 Wait pool handlers 34.066281s: number handlers = 2 Wait pool handlers 35.066443s: number handlers = 2 Wait pool handlers 36.070433s: number handlers = 2 Wait pool handlers 37.071654s: number handlers = 2 Wait pool handlers 38.074502s: number handlers = 2 Wait pool handlers 39.078450s: number handlers = 2 Wait pool handlers 40.081588s: number handlers = 2 Wait pool handlers 41.081748s: number handlers = 2 Wait pool handlers 42.081879s: number handlers = 2 Wait pool handlers 43.082523s: number handlers = 2 Wait pool handlers 44.085309s: number handlers = 2 Wait pool handlers 45.085440s: number handlers = 2 Wait pool handlers 46.102437s: number handlers = 2 Wait pool handlers 47.106445s: number handlers = 2 Wait pool handlers 48.110436s: number handlers = 2 Wait pool handlers 49.114494s: number handlers = 2 Wait pool handlers 50.114616s: number handlers = 2 Wait pool handlers 51.114935s: number handlers = 2 Wait pool handlers 52.118693s: number handlers = 2 Wait pool handlers 53.120133s: number handlers = 2 Wait pool handlers 54.122166s: number handlers = 2 Wait pool handlers 55.122438s: number handlers = 2 Wait pool handlers 56.123109s: number handlers = 2 Wait pool handlers 57.123487s: number handlers = 2 Wait pool handlers 58.125685s: number handlers = 2 Wait pool handlers 59.125823s: number handlers = 2 Wait pool handlers 60.125959s: number handlers = 2 Wait pool handlers 61.126103s: number handlers = 2 Wait pool handlers 62.126436s: number handlers = 2 Wait pool handlers 63.130447s: number handlers = 2 Wait pool handlers 64.134445s: number handlers = 2 Wait pool handlers 65.138452s: number handlers = 2 Wait pool handlers 66.154562s: number handlers = 2 Wait pool handlers 67.157993s: number handlers = 2 Wait pool handlers 68.158122s: number handlers = 2 Wait pool handlers 69.158256s: number handlers = 2 Wait pool handlers 70.158402s: number handlers = 2 Wait pool handlers 71.160924s: number handlers = 2 Wait pool handlers 72.163741s: number handlers = 2 Wait pool handlers 73.163889s: number handlers = 2 Wait pool handlers 74.164016s: number handlers = 2 Wait pool handlers 75.166443s: number handlers = 2 Wait pool handlers 76.169598s: number handlers = 2 Wait pool handlers 77.170485s: number handlers = 2 Wait pool handlers 78.171946s: number handlers = 2 Wait pool handlers 79.174491s: number handlers = 2 Wait pool handlers 80.174660s: number handlers = 2 Wait pool handlers 81.178589s: number handlers = 2 Wait pool handlers 82.178701s: number handlers = 2 Wait pool handlers 83.182674s: number handlers = 2 Wait pool handlers 84.186436s: number handlers = 2 Wait pool handlers 85.190379s: number handlers = 2 2025-04-07T14:01:12.272961Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7490570635776558155:2334], DatabaseId: /Root, PoolId: default, Got stop pool handler request, waiting for 0 requests 2025-04-07T14:01:12.273131Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7490570635776557959:2310], DatabaseId: /Root, PoolId: sample_pool_id, Got stop pool handler request, waiting for 0 requests 2025-04-07T14:01:12.273235Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: default 2025-04-07T14:01:12.273265Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-07T14:01:12.821695Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NTZlODg2MTMtMTRmN2QyNzAtYTJiYzdkMjUtOTE3NDM3MmM=, ActorId: [6:7490570635776557859:2302], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T14:01:12.821770Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NTZlODg2MTMtMTRmN2QyNzAtYTJiYzdkMjUtOTE3NDM3MmM=, ActorId: [6:7490570635776557859:2302], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:01:12.821803Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTZlODg2MTMtMTRmN2QyNzAtYTJiYzdkMjUtOTE3NDM3MmM=, ActorId: [6:7490570635776557859:2302], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T14:01:12.821831Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTZlODg2MTMtMTRmN2QyNzAtYTJiYzdkMjUtOTE3NDM3MmM=, ActorId: [6:7490570635776557859:2302], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T14:01:12.821916Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTZlODg2MTMtMTRmN2QyNzAtYTJiYzdkMjUtOTE3NDM3MmM=, ActorId: [6:7490570635776557859:2302], ActorState: unknown state, Session actor destroyed >> KqpDecimalColumnShard::TestJoinById [GOOD] >> KqpOlap::OlapUpsertImmediate [GOOD] >> KqpOlap::MultiInsertWithSinks [GOOD] >> KqpOlap::OlapRead_GenericQuery [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapUpsertImmediate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ProjectionOrder [GOOD] Test command err: Trying to start YDB, gRPC: 17903, MsgBus: 25140 2025-04-07T14:00:20.911373Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570799828205970:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:20.911488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fc5/r3tmp/tmpu1MbGH/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17903, node 1 2025-04-07T14:00:21.384339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:21.384463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:21.390126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:21.444780Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:21.450697Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:00:21.450724Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:00:21.541194Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:21.541214Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:21.541222Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:21.541318Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25140 TClient is connected to server localhost:25140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:22.213249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:22.234092Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:22.253214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:22.352088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570808418141084:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:22.352267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570808418141084:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:22.352629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570808418141084:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:22.352759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570808418141084:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:22.352874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570808418141084:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:22.353010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570808418141084:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:22.353098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570808418141084:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:22.353221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570808418141084:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:22.353343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570808418141084:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:22.353468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570808418141084:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:22.353563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570808418141084:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:22.353679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570808418141084:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:22.405762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570808418141085:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:22.405882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570808418141085:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:22.406110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570808418141085:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:22.406205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570808418141085:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:22.406315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570808418141085:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:22.406429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570808418141085:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:22.406511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570808418141085:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:22.407279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570808418141085:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:22.407426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570808418141085:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:22.407524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570808418141085:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:22.407618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570808418141085:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:22.407719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570808418141085:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:22.442172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570808418141078:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:22.442252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570808418141078:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:22.444814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570808418141078:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:22.445076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570808418141078:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:22.445223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570808418141078:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:22.445321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570808418141078:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:22.445441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570808418141078:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:22.445542Z node 1 :TX_COLUMNSHARD WARN: ... 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:12.409221Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:12.585081Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:12.585163Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:12.679601Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:12.679667Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:12.782187Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:12.782261Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:12.885130Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:12.885198Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:12.979894Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1214:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:12.993341Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1214:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:01:13.061082Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:13.061161Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:13.169079Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:13.169161Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:13.261399Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:13.261465Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:13.363910Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:13.363979Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:13.472689Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:13.472751Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:13.523015Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1214:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:13.659790Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:13.659862Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:13.789118Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:13.789195Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:13.885111Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:13.885205Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:13.976085Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:13.976158Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:14.065203Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:14.065273Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWE1YTk2MzEtMmNjNzVhYzgtYmMwY2JkZDMtNTE1MzU5OTU=. CustomerSuppliedId : . TraceId : 01jr88vjyd2gy7ranhyja9q88z. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:14.149224Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1214:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:14.173311Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1214:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpOlapAggregations::Aggregation_SumL_GroupL_OrderL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestJoinById [GOOD] Test command err: Trying to start YDB, gRPC: 18924, MsgBus: 25492 2025-04-07T14:01:04.007528Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570986625682453:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:04.007571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f5c/r3tmp/tmpYRCLhG/pdisk_1.dat 2025-04-07T14:01:04.489087Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:04.510139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:04.510229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:04.526970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18924, node 1 2025-04-07T14:01:04.677331Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:04.677356Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:04.677365Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:04.677473Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25492 TClient is connected to server localhost:25492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:05.267620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:07.296187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570999510584971:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:07.296326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:07.553796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:07.661671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999510585048:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:07.661850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999510585048:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:07.662104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999510585048:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:07.662211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999510585048:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:07.662326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999510585048:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:07.662450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999510585048:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:07.662547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999510585048:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:07.662643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999510585048:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:07.662743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999510585048:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:07.662834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999510585048:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:07.662926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999510585048:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:07.663020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999510585048:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:07.671213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:07.671283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:07.671383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:07.671409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:07.671596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:07.671625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:07.671710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:07.671760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:07.671824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:07.671847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:07.671881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:07.671905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:07.672410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:07.672441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:07.672643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:07.672690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:07.672844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:07.672870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:07.673062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:07.673088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:07.673212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:07.673249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FAL ... 018514098822:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:11.673108Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571018514098822:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:11.673210Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571018514098822:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:11.673303Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571018514098822:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:11.673388Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571018514098822:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:11.673476Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571018514098822:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:11.673567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571018514098822:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:11.673654Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571018514098822:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:11.673746Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571018514098822:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:11.673833Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571018514098822:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:11.676943Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:11.676995Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:11.677109Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:11.677138Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:11.677266Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:11.677288Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:11.677358Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:11.677383Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:11.677436Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:11.677456Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:11.677489Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:11.677511Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:11.677767Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:11.677802Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:11.677937Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:11.677961Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:11.678061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:11.678083Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:11.678211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:11.678234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:11.678316Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:11.678336Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=352;columns=3; 2025-04-07T14:01:11.767720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571016690454510:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:11.767811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:11.768160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571016690454515:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:11.771249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:11.781763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571016690454517:2420], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:01:14.046631Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034473000, txId: 18446744073709551615] shutting down 2025-04-07T14:01:14.085407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571031399000787:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:14.085515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:14.088810Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571031399000792:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:14.097131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:14.109323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571031399000794:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:01:15.475629Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034475001, txId: 18446744073709551615] shutting down >> KqpOlapAggregations::Aggregation_Some_GroupByNull [GOOD] >> KqpOlapAggregations::Aggregation_Sum_NullMix >> KqpOlap::CompactionPlanner ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::MultiInsertWithSinks [GOOD] Test command err: Trying to start YDB, gRPC: 8654, MsgBus: 27464 2025-04-07T14:01:11.202905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571019589962430:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:11.203030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f43/r3tmp/tmpRx6ox3/pdisk_1.dat 2025-04-07T14:01:11.640329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:11.640409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:11.670035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8654, node 1 2025-04-07T14:01:11.693317Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:11.893659Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:11.893681Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:11.893689Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:11.893788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27464 TClient is connected to server localhost:27464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:12.886779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:12.933307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:13.160448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571028179897566:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.160622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571028179897566:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.164890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571028179897566:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.165061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571028179897566:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.165197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571028179897566:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.165293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571028179897566:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.165401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571028179897566:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.165509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571028179897566:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:13.165618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571028179897566:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:13.165760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571028179897566:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.165876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571028179897566:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:13.165982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571028179897566:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:13.263689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571028179897567:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.263749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571028179897567:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.263958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571028179897567:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.264047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571028179897567:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.264171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571028179897567:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.264262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571028179897567:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.264352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571028179897567:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.264438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571028179897567:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:13.264550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571028179897567:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:13.264647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571028179897567:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.272944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571028179897567:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:13.273110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571028179897567:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:13.344285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571028179897568:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.355573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571028179897568:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.355772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571028179897568:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.355873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571028179897568:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.355973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571028179897568:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.356078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571028179897568:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.356182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571028179897568:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.356285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571028179897568:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:13.356400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571028179897568 ... =CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:13.451231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:13.451271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:13.451350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:13.451371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:13.451400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:13.451417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:13.451683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:13.451712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:13.451859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:13.451878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.452050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:13.452083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:13.452228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:13.452250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:13.452323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:13.452341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:13.453940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:13.453983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:13.454078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:13.454098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:13.454226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:13.454247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:13.454324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:13.454347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:13.454389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:13.454410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:13.454438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:13.454455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:13.454745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:13.454786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:13.454933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:13.454960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.455079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:13.455108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:13.455269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:13.455288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:13.455378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:13.455394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:13.518598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-07T14:01:15.511109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571036769832440:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:15.511214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:15.511537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571036769832475:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:15.515076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:15.538265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571036769832477:2386], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:01:16.174867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;local_tx_no=6;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037890;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:00.000000 ] uid: [ "a" ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710662}],"finishes":[{"inc":{"count_include":1},"id":281474976710662}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000000;a;"}}]}; 2025-04-07T14:01:16.194856Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571019589962430:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:16.194908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:16.196240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;local_tx_no=6;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:01.000000 ] uid: [ "b" ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710662}],"finishes":[{"inc":{"count_include":1},"id":281474976710662}]},"p":{"include":0,"pk":"1970-01-01 00:00:01.000000;b;"}}]}; 2025-04-07T14:01:16.213507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;local_tx_no=6;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:02.000000 ] uid: [ "c" ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710662}],"finishes":[{"inc":{"count_include":1},"id":281474976710662}]},"p":{"include":0,"pk":"1970-01-01 00:00:02.000000;c;"}}]}; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_GenericQuery [GOOD] Test command err: Trying to start YDB, gRPC: 13531, MsgBus: 15776 2025-04-07T14:01:11.820690Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571019212175071:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:11.820775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f2f/r3tmp/tmpf3VOYV/pdisk_1.dat 2025-04-07T14:01:12.672077Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:12.675978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:12.676056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:12.678274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13531, node 1 2025-04-07T14:01:12.880079Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:12.880120Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:12.880129Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:12.880224Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15776 TClient is connected to server localhost:15776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:13.452250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:13.472309Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:13.494154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:13.600835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027802110133:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.601109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027802110133:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.601415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027802110133:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.601560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027802110133:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.601677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027802110133:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.601793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027802110133:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.601913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027802110133:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.602058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027802110133:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:13.602185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027802110133:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:13.602324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027802110133:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.602465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027802110133:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:13.602587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027802110133:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:13.642882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027802110135:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.642955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027802110135:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.643200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027802110135:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.643347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027802110135:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.643462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027802110135:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.643572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027802110135:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.643717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027802110135:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.643819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027802110135:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:13.643919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027802110135:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:13.644024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027802110135:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.644115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027802110135:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:13.644245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027802110135:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:13.676907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571027802110143:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.676968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571027802110143:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.677219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571027802110143:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.677320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571027802110143:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.677426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571027802110143:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.677550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571027802110143:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.677644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571027802110143:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.677765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571027802110143:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:14.038203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:14.038250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:14.038356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:14.038380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:14.038535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:14.038556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:14.038654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:14.038677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:14.038729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:14.038765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:14.038800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:14.038821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:14.039119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:14.039155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:14.039330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:14.039352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:14.039477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:14.039501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:14.039708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:14.039730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:14.039830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:14.039850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:14.040303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:14.040329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:14.040429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:14.040450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:14.040621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:14.040675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:14.040747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:14.040772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:14.040819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:14.040847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:14.040894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:14.040916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:14.041203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:14.041235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:14.041403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:14.041424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:14.041612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:14.041635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:14.041777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:14.041796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:14.041934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:14.041958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:14.074861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; 2025-04-07T14:01:15.603132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571036392045215:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:15.603227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:15.606291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571036392045242:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:15.612482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:01:15.630773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571036392045244:2415], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:01:16.810014Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571019212175071:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:16.810114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpOlapAggregations::BlockGenericSelectAll [GOOD] >> KqpDataIntegrityTrails::UpsertViaLegacyScripting-Streaming [GOOD] >> KqpJoinOrder::TPCDS23-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS34+StreamLookupJoin-ColumnStore >> KqpOlapWrite::TierDraftsGC [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_GroupByNull [GOOD] Test command err: Trying to start YDB, gRPC: 28104, MsgBus: 29164 2025-04-07T14:01:08.487184Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571007125019061:2252];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:08.487239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f50/r3tmp/tmpPUcZSh/pdisk_1.dat 2025-04-07T14:01:08.910041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:08.910153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:08.916798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:08.954020Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28104, node 1 2025-04-07T14:01:09.069022Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:09.069063Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:09.069072Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:09.069217Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29164 TClient is connected to server localhost:29164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:09.891772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:09.912990Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:09.928317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:10.054124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571011419986810:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:10.054301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571011419986810:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:10.054554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571011419986810:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:10.054663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571011419986810:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:10.054748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571011419986810:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:10.054892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571011419986810:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:10.055750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571011419986810:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:10.055924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571011419986810:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:10.056047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571011419986810:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:10.065364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571011419986810:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:10.065489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571011419986810:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:10.065607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571011419986810:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:10.097320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571011419986804:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:10.097391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571011419986804:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:10.097572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571011419986804:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:10.097668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571011419986804:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:10.097794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571011419986804:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:10.097894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571011419986804:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:10.097982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571011419986804:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:10.098106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571011419986804:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:10.098213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571011419986804:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:10.098302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571011419986804:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:10.098428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571011419986804:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:10.098539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571011419986804:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:10.129286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571011419986805:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:10.129360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571011419986805:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:10.129605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571011419986805:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:10.129694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571011419986805:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:10.129807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571011419986805:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:10.129912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571011419986805:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:10.130055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571011419986805:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:10.130148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571011419986805:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... 2025-04-07T14:01:10.179207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:10.179472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:10.179495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:10.179571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:10.179593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:10.179751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:10.179778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:10.184823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:10.184898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:10.185264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:10.189005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:10.189264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:10.189293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:10.189407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:10.189433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:10.189611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:10.189637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:10.189746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:10.189769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SOME(id), SOME(level) FROM `/Root/tableWithNulls` WHERE id = 6 GROUP BY level ORDER BY level; 2025-04-07T14:01:12.028055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571024304888995:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:12.028178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:12.028676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571024304889007:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:12.032953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:12.053604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571024304889009:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:17.339194Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571007125019061:2252];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:17.446918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:17.626868Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034473000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SOME(id), SOME(level) FROM `/Root/tableWithNulls` WHERE id = 6 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1297) '('"_id" '"3baf79f1-94833314-ea5929e5-f7c9df53") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $25 (Int32 '"6")) (let $26 (Just $25)) (let $27 (Int32 '1)) (let $28 '($26 $27)) (let $29 (If (== $25 (Int32 '2147483647)) $28 '((+ $26 $27) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($28 $29)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (OptionalType $6)) (let $8 (TupleType $7 $6)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"level" $7)) (let $11 (StructType '('_yql_agg_0 $6) '('_yql_agg_1 $7) $10)) (let $12 '('('"_logical_id" '1356) '('"_id" '"807e38b7-2838ac63-e9d2da64-bce1c596") '('"_wide_channels" $11))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $30 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $31 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $32 (KqpWideReadOlapTableRanges $30 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $31 (lambda '($33) (block '( (let $34 '('_yql_agg_0 'some '"id")) (let $35 '('_yql_agg_1 'some '"level")) (return (TKqpOlapAgg $33 '($34 $35) '('"level"))) ))))) (return (FromFlow $32)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('2))) (let $15 (StructType '('"column1" $6) '('"column2" $7) $10)) (let $16 '('('"_logical_id" '1894) '('"_id" '"b4f6536b-991db36d-b28d8a31-b343e91") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($36) (block '( (let $37 (lambda '($44 $45 $46 $47) $45 $46)) (let $38 (lambda '($48 $49 $50 $51 $52 $53) $52 (Coalesce $53 $50))) (let $39 (lambda '($54 $55 $56) $55 $56 $54)) (let $40 (WideCombiner (ToFlow $36) '"" (lambda '($41 $42 $43) $43) $37 $38 $39)) (return (FromFlow (WideSort $40 '('('2 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('2 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($57) (FromFlow (NarrowMap (ToFlow $57) (lambda '($58 $59 $60) (AsStruct '('"column1" $58) '('"column2" $59) '('"level" $60)))))) '('('"_logical_id" '1906) '('"_id" '"310785bd-b8078f98-b8676258-dc25f16f")))) (let $20 '($13 $17 $19)) (let $21 '('"level" '"column1" '"column2")) (let $22 (DqCnResult (TDqOutput $19 '0) $21)) (let $23 (KqpTxResultBinding $9 '0 '0)) (let $24 (KqpPhysicalTx $20 '($22) '('($5 $23)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $24) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::BlockGenericSelectAll [GOOD] Test command err: Trying to start YDB, gRPC: 8080, MsgBus: 28546 2025-04-07T14:01:12.645456Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571023383576135:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:12.645594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f14/r3tmp/tmpSFbOqU/pdisk_1.dat 2025-04-07T14:01:13.513118Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:13.517219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:13.517307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:13.520114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8080, node 1 2025-04-07T14:01:13.728906Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:13.728928Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:13.728939Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:13.729021Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28546 TClient is connected to server localhost:28546 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:14.503377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:14.537823Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:14.555964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:14.676651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571031973511270:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:14.676850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571031973511270:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:14.677117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571031973511270:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:14.677238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571031973511270:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:14.677359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571031973511270:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:14.677504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571031973511270:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:14.677607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571031973511270:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:14.677726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571031973511270:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:14.677826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571031973511270:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:14.677916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571031973511270:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:14.678041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571031973511270:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:14.678146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571031973511270:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:14.732257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571031973511263:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:14.732316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571031973511263:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:14.732512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571031973511263:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:14.732616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571031973511263:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:14.732733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571031973511263:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:14.732848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571031973511263:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:14.732955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571031973511263:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:14.733100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571031973511263:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:14.733229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571031973511263:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:14.733332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571031973511263:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:14.733415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571031973511263:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:14.733521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571031973511263:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:14.789498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571031973511265:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:14.789563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571031973511265:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:14.789752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571031973511265:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:14.789889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571031973511265:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:14.790018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571031973511265:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:14.790125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571031973511265:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:14.790212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571031973511265:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:14.790313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571031973511265:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... RD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:14.862702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:14.862721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:14.862855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:14.862883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:14.862945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:14.862977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:14.863024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:14.863041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:14.863071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:14.863089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:14.863358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:14.863434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:14.863586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:14.863617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:14.863714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:14.863734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:14.863891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:14.863915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:14.863999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:14.864013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id, level FROM `/Root/tableWithNulls` WHERE level != 5 OR level IS NULL ORDER BY id, resource_id, level; 2025-04-07T14:01:17.010407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571044858413438:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:17.010552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:17.010805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571044858413465:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:17.014516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:17.028188Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T14:01:17.028364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571044858413467:2403], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:17.609386Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571023383576135:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:17.609447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id, level FROM `/Root/tableWithNulls` WHERE level != 5 OR level IS NULL ORDER BY id, resource_id, level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"","Name":"Sort"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"level != 5","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":12,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Function":{"Id":7,"Arguments":[{"Id":3}]},"Column":{"Id":11}}},{"Assign":{"Function":{"Id":23,"Arguments":[{"Id":11}]},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":1,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":12}]},"Column":{"Id":13}}},{"Filter":{"Predicate":{"Id":13}}},{"Projection":{"Columns":[{"Id":1},{"Id":3},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Sort-Filter-TableFullScan"}],"Node Type":"Merge","SortColumns":["id (Asc)","resource_id (Asc)","level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level","resource_id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":12,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Function":{"Id":7,"Arguments":[{"Id":3}]},"Column":{"Id":11}}},{"Assign":{"Function":{"Id":23,"Arguments":[{"Id":11}]},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":1,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":12}]},"Column":{"Id":13}}},{"Filter":{"Predicate":{"Id":13}}},{"Projection":{"Columns":[{"Id":1},{"Id":3},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level != 5","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"SortBy":"","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'Int32)) (let $2 (StructType '('"id" $1) '('"level" (OptionalType $1)) '('"resource_id" (OptionalType (DataType 'Utf8))))) (let $3 '('('"_logical_id" '904) '('"_id" '"59bd85f1-22e44898-35273f20-e80133d8") '('"_wide_channels" $2))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $10 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $11 '('"id" '"level" '"resource_id")) (let $12 (KqpBlockReadOlapTableRanges $10 (Void) $11 '() '() (lambda '($15) (block '( (let $16 '('neq '"level" (Int32 '"5"))) (let $17 '('?? $16 (Bool 'false))) (return (KqpOlapFilter $15 (KqpOlapOr $17 '('empty '"level")))) ))))) (let $13 (Bool 'true)) (let $14 '('('0 $13) '('2 $13) '('1 $13))) (return (FromFlow (WideFromBlocks (WideSortBlocks $12 $14)))) ))) $3)) (let $5 '('('0 '"Asc") '('2 '"Asc") '('1 '"Asc"))) (let $6 (DqCnMerge (TDqOutput $4 '0) $5)) (let $7 (DqPhyStage '($6) (lambda '($18) (FromFlow (NarrowMap (ToFlow $18) (lambda '($19 $20 $21) (AsStruct '('"id" $19) '('"level" $20) '('"resource_id" $21)))))) '('('"_logical_id" '916) '('"_id" '"27fb1330-673630dd-53c5e9c4-4d0f959e")))) (let $8 '('"id" '"resource_id" '"level")) (let $9 (DqCnResult (TDqOutput $7 '0) $8)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($4 $7) '($9) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $2) '"0" '"0")) '('('"type" '"query")))) ) >> KqpOlapBlobsSharing::BlobsSharingSplit1_3_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_SumL_GroupL_OrderL [GOOD] Test command err: Trying to start YDB, gRPC: 62079, MsgBus: 25109 2025-04-07T14:00:31.274092Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570845417374438:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:31.274119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fbe/r3tmp/tmpeleZ7F/pdisk_1.dat 2025-04-07T14:00:32.309127Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:32.333005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:32.341109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:32.341211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:32.344653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62079, node 1 2025-04-07T14:00:32.629102Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:32.629133Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:32.629139Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:32.629243Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25109 TClient is connected to server localhost:25109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:34.377932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:34.409114Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:34.430512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:34.659517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570858302277016:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:34.659697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570858302277016:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:34.659933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570858302277016:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:34.660039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570858302277016:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:34.660132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570858302277016:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:34.660234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570858302277016:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:34.660316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570858302277016:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:34.660440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570858302277016:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:34.668428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570858302277016:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:34.677268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570858302277016:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:34.677473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570858302277016:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:34.677574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570858302277016:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:34.739617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570858302277017:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:34.739668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570858302277017:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:34.739874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570858302277017:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:34.739991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570858302277017:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:34.740077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570858302277017:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:34.740177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570858302277017:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:34.740266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570858302277017:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:34.740375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570858302277017:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:34.740497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570858302277017:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:34.740596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570858302277017:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:34.740708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570858302277017:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:34.740808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570858302277017:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:34.815924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570858302277042:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:34.815987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570858302277042:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:34.816175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570858302277042:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:34.816947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570858302277042:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:34.817138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570858302277042:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:34.817284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570858302277042:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:34.817377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570858302277042:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04- ... 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:15.321047Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:15.541054Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:15.541135Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:15.659609Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:15.659681Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:15.761068Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:15.761148Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:15.856603Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:15.856701Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:15.992257Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:16.005264Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:01:16.033070Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:16.033149Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:16.127669Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:16.127731Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:16.222674Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:16.222738Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:16.316889Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:16.316955Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:16.418037Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:16.418108Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:16.521545Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:16.605116Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:16.605191Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:16.721204Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:16.721296Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:16.830535Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:16.830623Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:16.921248Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:16.921330Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:17.015251Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:17.015330Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1784:3051], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWQ4MWQ3YTEtNTk5MmZmMWYtMTYxMThhMzQtMzdiNzAwMWI=. TraceId : 01jr88vrd0bmd2xqacf7ykccpx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:17.133234Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:17.144643Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpOlapAggregations::Aggregation_Sum [GOOD] >> KqpOlapAggregations::Json_GetValue_ToInt >> KqpOlapAggregations::Aggregation_Count_GroupBy [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertViaLegacyScripting-Streaming [GOOD] Test command err: Trying to start YDB, gRPC: 6090, MsgBus: 65080 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010a9/r3tmp/tmpvhorQa/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6090, node 1 TClient is connected to server localhost:65080 TClient is connected to server localhost:65080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns-isStream [GOOD] >> KqpDecimalColumnShard::TestJoinByDecimal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::TierDraftsGC [GOOD] Test command err: Trying to start YDB, gRPC: 32452, MsgBus: 17963 2025-04-07T14:01:12.457416Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571023372234042:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:12.457727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f15/r3tmp/tmpwuizGl/pdisk_1.dat 2025-04-07T14:01:13.005605Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:13.008021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:13.008093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:13.015729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32452, node 1 2025-04-07T14:01:13.187509Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:13.187541Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:13.187551Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:13.187648Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17963 TClient is connected to server localhost:17963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:13.791874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:13.827218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:13.928226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571027667201873:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.928420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571027667201873:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.928692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571027667201873:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.928802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571027667201873:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.928908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571027667201873:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.929026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571027667201873:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.929161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571027667201873:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.929272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571027667201873:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:13.929417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571027667201873:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:13.929534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571027667201873:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.929639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571027667201873:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:13.929731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571027667201873:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:13.963829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.963888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.964093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.964204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.964290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.964372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.964469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.964579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:13.964729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:13.964829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.964909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:13.965030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:13.996658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.996740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.996923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.997035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.997147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.997236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.997345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.997433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:13.997516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74905710276672018 ... 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:19.098355Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:19.098408Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:19.098940Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034479000 at tablet 72075186224037888 2025-04-07T14:01:19.098965Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:01:19.098988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:01:19.099004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:01:19.099021Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:19.099047Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:19.099066Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:19.099082Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:19.099100Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:19.099137Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571027667201874:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:19.113359Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2025-04-07T14:01:19.113417Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890; 2025-04-07T14:01:19.113438Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:01:19.113471Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:01:19.113517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:01:19.113535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:01:19.113563Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:19.113607Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:19.113635Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:19.113652Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:19.113678Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:19.113736Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:19.113869Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571027667201915:2293];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-04-07T14:01:19.113905Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571027667201915:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891; 2025-04-07T14:01:19.113914Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:01:19.113933Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571027667201915:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:01:19.113955Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7490571027667201915:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:516;problem=Background activities cannot be started: no index at tablet; 2025-04-07T14:01:19.114458Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034479000 at tablet 72075186224037890 2025-04-07T14:01:19.114490Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:01:19.114520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:01:19.114538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:01:19.114562Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:19.114595Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:19.114618Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:19.114637Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:19.114660Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:19.114706Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571027667201875:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:19.114769Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034479000 at tablet 72075186224037891 2025-04-07T14:01:19.114786Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571027667201915:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:01:19.114805Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7490571027667201915:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:516;problem=Background activities cannot be started: no index at tablet; 2025-04-07T14:01:19.426339Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490571027667201873:2290];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:19.426390Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7490571027667201915:2293];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:19.426414Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7490571027667201875:2292];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:19.426455Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490571027667201874:2291];fline=actor.cpp:33;event=skip_flush_writing; >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling+AllowSpilling >> KqpOlap::SelectLimit1ManyShards >> KqpOlap::BulkUpsertUpdate [GOOD] |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |89.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_GroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 10838, MsgBus: 27343 2025-04-07T14:01:10.968476Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571015621789796:2252];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:10.968512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f46/r3tmp/tmpNgXqGF/pdisk_1.dat 2025-04-07T14:01:11.487356Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:11.492155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:11.492257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:11.495786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10838, node 1 2025-04-07T14:01:11.617222Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:11.617250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:11.617257Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:11.617357Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27343 TClient is connected to server localhost:27343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:12.626230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:12.658360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:12.753058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024211724839:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:12.753273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024211724839:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:12.753536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024211724839:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:12.753655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024211724839:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:12.753799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024211724839:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:12.753922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024211724839:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:12.754024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024211724839:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:12.754127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024211724839:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:12.754238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024211724839:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:12.754361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024211724839:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:12.754501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024211724839:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:12.754622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024211724839:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:12.804004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024211724840:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:12.804102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024211724840:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:12.804340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024211724840:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:12.804477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024211724840:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:12.804606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024211724840:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:12.804744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024211724840:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:12.804838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024211724840:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:12.804960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024211724840:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:12.805142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024211724840:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:12.805277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024211724840:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:12.805378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024211724840:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:12.805500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024211724840:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:12.851416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571024211724856:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:12.851488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571024211724856:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:12.851676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571024211724856:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:12.851845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571024211724856:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:12.851964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571024211724856:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:12.852060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571024211724856:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:12.852189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571024211724856:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:12.852291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571024211724856:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:12.852405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:74905710242117248 ... -07T14:01:12.910095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:12.910194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:12.910219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:12.910422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:12.910442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:12.910548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:12.910636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:12.910683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:12.910702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:12.910757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:12.910782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:12.911085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:12.911130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:12.911330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:12.911352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:12.911444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:12.911464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:12.911637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:12.911661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:12.911767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:12.911785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; 2025-04-07T14:01:14.654822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571032801659725:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:14.654920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:14.655462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571032801659746:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:14.666791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:14.679991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571032801659748:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:19.479872Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571015621789796:2252];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:19.480724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:19.666003Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034475001, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [4, 5]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '877) '('"_id" '"dd8c16b2-35f376f9-a8584498-945ff8c9") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '1)) (let $25 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"5")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Uint64)) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '936) '('"_id" '"2c8ec059-4b0138d9-702a0fd7-e063874e") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'count '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1359) '('"_id" '"2bd659b4-fe7914eb-86255deb-9de0b73c") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1371) '('"_id" '"db3f22ae-52f4dc12-302d999-6f99299b")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum [GOOD] Test command err: Trying to start YDB, gRPC: 5317, MsgBus: 14052 2025-04-07T14:00:50.316301Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570926696597868:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:50.316343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fa3/r3tmp/tmp3QM9cO/pdisk_1.dat 2025-04-07T14:00:50.906737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:50.906824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:50.913522Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:50.913736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5317, node 1 2025-04-07T14:00:51.051181Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:51.051202Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:51.051209Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:51.051356Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14052 TClient is connected to server localhost:14052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:51.750891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:51.763072Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:51.779828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:51.884905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570930991565838:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:51.885149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570930991565838:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:51.885410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570930991565838:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:51.885548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570930991565838:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:51.885685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570930991565838:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:51.885820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570930991565838:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:51.885936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570930991565838:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:51.886033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570930991565838:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:51.886147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570930991565838:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:51.886268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570930991565838:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:51.886374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570930991565838:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:51.886514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570930991565838:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:51.922105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570930991565840:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:51.922177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570930991565840:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:51.922399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570930991565840:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:51.922499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570930991565840:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:51.922599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570930991565840:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:51.922682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570930991565840:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:51.922772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570930991565840:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:51.922869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570930991565840:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:51.922989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570930991565840:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:51.923080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570930991565840:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:51.923200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570930991565840:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:51.923303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570930991565840:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:51.957974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570930991565850:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:51.958041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570930991565850:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:51.958616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570930991565850:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:51.958757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570930991565850:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:51.958876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570930991565850:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:51.958965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570930991565850:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:51.959109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570930991565850:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:51.959215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570930991565850:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:17.895675Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:18.072917Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:18.073001Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:18.169098Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:18.169170Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:18.264643Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:18.264731Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:18.358826Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:18.358924Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:18.461134Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:18.472405Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:01:18.527362Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:18.527433Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:18.612375Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:18.612444Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:18.713110Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:18.713184Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:18.803564Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:18.803615Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:18.895195Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:18.895280Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:18.949113Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:19.077159Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:19.077228Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:19.192700Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:19.192784Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:19.286670Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:19.286748Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:19.380691Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:19.380765Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:19.469070Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:19.469155Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3007], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88w40xaat5fkzpvabhmhr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjI5MDgyMzUtYjEzYzE0YTctY2JlMjcwMzMtMzIzYWM1ZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:19.549200Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:19.560565Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns-isStream [GOOD] Test command err: Trying to start YDB, gRPC: 63334, MsgBus: 62172 2025-04-07T14:01:11.612621Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571019007369835:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:11.624940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f18/r3tmp/tmpyOGAc4/pdisk_1.dat 2025-04-07T14:01:12.666885Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:12.670729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:12.670804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:12.671954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:01:12.674252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63334, node 1 2025-04-07T14:01:12.833241Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:12.833271Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:12.833279Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:12.833371Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62172 TClient is connected to server localhost:62172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:13.766950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:13.785046Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:13.803438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:13.992093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:14.177736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:14.268611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:16.438646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571040482207878:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:16.438741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:16.612995Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571019007369835:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:16.613043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:16.731241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:01:16.796024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:01:16.841430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:01:16.879032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:01:16.910585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:01:16.948900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:01:17.041830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571044777175671:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:17.041930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:17.042144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571044777175677:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:17.045680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:01:17.061406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571044777175679:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } CREATE TABLE `/Root/ColumnTableTest` (time Timestamp NOT NULL, class Utf8 NOT NULL, uniq Utf8 NOT NULL, PRIMARY KEY (time, class, uniq)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:18.433164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:01:18.513131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571049072143373:2470];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:18.513351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571049072143373:2470];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:18.513653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571049072143373:2470];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:18.513785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571049072143373:2470];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:18.513898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571049072143373:2470];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:18.514018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571049072143373:2470];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:18.514130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571049072143373:2470];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:18.514241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571049072143373:2470];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:18.514358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571049072143373:2470];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:18.514461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571049072143373:2470];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:18.514574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571049072143373:2470];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:18.514686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571049072143373:2470];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:18.517617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:18.517680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:18.517797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:18.517844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:18.518002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:18.518033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:18.518127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:18.518158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:18.518232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:18.518261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:18.518302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:18.518333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:18.518787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:18.518828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:18.518967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:18.518997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:18.519107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:18.519140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:18.519301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:18.519335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:18.519460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:18.519482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow13TimestampTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=312;columns=3; 2025-04-07T14:01:19.444981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037919;tx_state=complete;fline=interaction.h:353;batch=time: [ 2025-04-07 14:01:18.560682 ] class: [ "test" ] uniq: [ "test" ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976710672}],"starts":[{"inc":{"count_not_include":1},"id":281474976710672}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976710672}]},"p":{"include":2147483647}}]}; 2025-04-07T14:01:19.972853Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034479502, txId: 18446744073709551615] shutting down >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility-EnableLlvm [GOOD] >> KqpOlapAggregations::Aggregation_Sum_GroupByNullMix [GOOD] >> KqpOlap::OlapDeleteImmediatePK [GOOD] |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |89.8%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut >> KqpOlapSysView::StatsSysViewEnumStringBytes >> KqpOlapAggregations::Aggregation_Count_GroupByNull [GOOD] >> KqpOlap::BlockChannelAuto |89.8%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::BulkUpsertUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 17162, MsgBus: 27558 2025-04-07T14:01:15.030256Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571031881137584:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:15.030521Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f03/r3tmp/tmpc7mH73/pdisk_1.dat 2025-04-07T14:01:15.712227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:15.712312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:15.724340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:15.750537Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17162, node 1 2025-04-07T14:01:15.993729Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:15.993762Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:15.993769Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:15.993843Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27558 TClient is connected to server localhost:27558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:16.854407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:16.873461Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int64 NOT NULL, value Int32 NOT NULL, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:19.235834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571053355974563:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:19.242325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:19.261794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:19.349225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571053355974641:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:19.349458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571053355974641:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:19.349736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571053355974641:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:19.349869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571053355974641:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:19.350006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571053355974641:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:19.350141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571053355974641:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:19.350246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571053355974641:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:19.350346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571053355974641:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:19.350443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571053355974641:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:19.350547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571053355974641:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:19.350658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571053355974641:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:19.350749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571053355974641:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:19.366417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:19.366486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:19.366597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:19.366642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:19.367007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:19.367029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:19.367111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:19.367167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:19.367235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:19.367257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:19.367297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:19.367329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:19.367910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:19.367971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:19.368189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:19.368226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:19.368375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:19.368417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:19.371860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:19.371910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:19.372070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:19.372105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=208;columns=2; 2025-04-07T14:01:19.481869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571053355974776:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:19.481941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:19.482313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571053355974781:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:19.486357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:19.498129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571053355974783:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:20.066305Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571031881137584:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:20.067224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:20.200316Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034479544, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=208;columns=2; 2025-04-07T14:01:20.608979Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034480000, txId: 18446744073709551615] shutting down 2025-04-07T14:01:21.136608Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034481000, txId: 18446744073709551615] shutting down >> KqpOlapBlobsSharing::BlobsSharingSplit1_1_clean_with_restarts |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapDeleteImmediatePK [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility-EnableLlvm [GOOD] Test command err: Trying to start YDB, gRPC: 2440, MsgBus: 10369 2025-04-07T14:01:15.787554Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571036211404146:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:15.787590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eff/r3tmp/tmpWwWqs6/pdisk_1.dat 2025-04-07T14:01:16.367980Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:16.379337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:16.379433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:16.383379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2440, node 1 2025-04-07T14:01:16.593133Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:16.593154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:16.593162Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:16.593253Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10369 TClient is connected to server localhost:10369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:17.642612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:17.663399Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:17.671341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:17.814935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571044801339406:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:17.815149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571044801339406:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:17.815451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571044801339406:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:17.815564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571044801339406:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:17.815660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571044801339406:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:17.815777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571044801339406:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:17.815895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571044801339406:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:17.815996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571044801339406:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:17.816095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571044801339406:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:17.816179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571044801339406:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:17.816280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571044801339406:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:17.816391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571044801339406:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:17.918775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571044801339408:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:17.918844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571044801339408:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:17.919086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571044801339408:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:17.919179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571044801339408:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:17.919301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571044801339408:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:17.919411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571044801339408:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:17.919524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571044801339408:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:17.919628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571044801339408:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:17.919719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571044801339408:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:17.919826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571044801339408:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:17.919939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571044801339408:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:17.920040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571044801339408:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:17.967127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571044801339412:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:17.967341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571044801339412:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:17.967580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571044801339412:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:17.967713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571044801339412:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:17.967836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571044801339412:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:17.967930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571044801339412:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:17.968017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571044801339412:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:17.968099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571044801339412:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... 762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:18.038797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:18.038884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:18.038912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:18.039100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:18.039135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:18.039221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:18.039244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:18.039304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:18.039323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:18.039353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:18.039373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:18.039633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:18.039676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:18.039917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:18.039943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:18.040079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:18.040115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:18.040271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:18.040296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:18.040408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:18.040441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:18.100752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2025-04-07T14:01:20.152383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571057686241752:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:20.152439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571057686241725:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:20.152707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:20.164249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:20.188866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571057686241763:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:01:20.830421Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571036211404146:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:20.830485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:21.355376Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034481000, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '475) '('"_id" '"43675d3e-ce872dba-6733f4a8-850d25cf") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '() '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 '"count" '"*")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '994) '('"_id" '"51838151-9f0797ae-be06f781-563176da")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1097) '('"_id" '"1fbcd6ef-e15e4bf2-bce00f-c2b40c5e") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_GroupByNull [GOOD] |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> KqpOlapBlobsSharing::TableReshardingConsistency64 >> KqpOlapBlobsSharing::MultipleSplitsWithRestartsAfterWait [FAIL] >> KqpOlapAggregations::Aggregation_Avg_GroupBy [GOOD] >> KqpOlap::PredicatePushdown_DifferentLvlOfFilters >> KqpDatetime64ColumnShard::UseTimestamp64AsPrimaryKey >> KqpOlapAggregations::Json_GetValue >> KqpOlapAggregations::Aggregation_ResultCountAll_FilterL |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> KqpOlapAggregations::Aggregation_Sum_NullMix [GOOD] >> KqpOlapAggregations::Aggregation_Avg_NullGroupBy >> KqpDecimalColumnShard::TestFilterEqual >> KqpOlapSysView::StatsSysViewTable [GOOD] >> KqpOlapWrite::TestRemoveTableBeforeIndexation [GOOD] >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling+AllowSpilling [GOOD] >> TPQTest::TestLowWatermark [GOOD] >> KqpDecimalColumnShard::TestOrderByDecimal [GOOD] >> KqpOlapAggregations::Json_GetValue_ToInt [GOOD] >> KqpDecimalColumnShard::TestGroupByDecimal [GOOD] >> TPQTest::TestGetTimestamps >> KqpOlap::ScanFailedSnapshotTooOld [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling+AllowSpilling [GOOD] Test command err: Trying to start YDB, gRPC: 31639, MsgBus: 28375 2025-04-07T14:01:21.645151Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571062787250872:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:21.645271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ee8/r3tmp/tmpVPSAoa/pdisk_1.dat 2025-04-07T14:01:22.131895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:22.131991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:22.133843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:22.165180Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31639, node 1 2025-04-07T14:01:22.273248Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:22.273270Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:22.273278Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:22.273386Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28375 TClient is connected to server localhost:28375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:23.007984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:23.053550Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:23.070109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:23.178928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571071377186003:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:23.179152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571071377186003:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:23.179424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571071377186003:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:23.179563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571071377186003:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:23.179688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571071377186003:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:23.179821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571071377186003:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:23.179914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571071377186003:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:23.180053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571071377186003:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:23.180193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571071377186003:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:23.180308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571071377186003:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:23.180452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571071377186003:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:23.180575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571071377186003:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:23.253215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571071377185997:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:23.253316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571071377185997:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:23.253532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571071377185997:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:23.253634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571071377185997:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:23.253729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571071377185997:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:23.253849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571071377185997:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:23.253941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571071377185997:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:23.254051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571071377185997:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:23.254145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571071377185997:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:23.254229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571071377185997:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:23.254311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571071377185997:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:23.254430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571071377185997:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:23.259425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:23.259488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:23.259587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:23.259619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:23.259778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:23.259799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:23.259939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:23.259970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:23.260024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:23.260042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_ ... normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:23.348470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:23.348571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:23.348598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:23.350478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:23.350513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:23.350613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:23.350651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:23.350723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:23.350748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:23.350780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:23.350807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:23.351104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:23.351194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:23.351403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:23.351443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:23.351564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:23.351589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:23.351772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:23.351801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:23.351898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:23.351918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:23.352341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:23.352371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:23.352461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:23.352483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:23.352696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:23.352724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:23.352805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:23.352829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:23.352885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:23.352907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:23.352939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:23.352961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:23.353296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:23.353451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:23.353629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:23.353662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:23.353814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:23.353847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:23.354064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:23.354090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:23.354188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:23.354218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:23.408535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2025-04-07T14:01:25.531119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571079967121024:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:25.531281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:25.531521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571079967121051:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:25.534888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:25.544573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571079967121053:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleSplitsWithRestartsAfterWait [FAIL] Test command err: Trying to start YDB, gRPC: 24146, MsgBus: 27148 2025-04-07T13:59:42.221796Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570634539395570:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:42.221842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00100c/r3tmp/tmpWSh3UQ/pdisk_1.dat 2025-04-07T13:59:42.732952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:42.733062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:42.735053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24146, node 1 2025-04-07T13:59:42.779454Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:59:42.781334Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:59:42.824336Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:42.878402Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:42.878426Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:42.878438Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:42.878558Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27148 TClient is connected to server localhost:27148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:43.704293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:43.716547Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:44.161195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T13:59:47.228056Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570634539395570:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:47.228303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:59:50.308981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038872;self_id=[1:7490570664604171466:2351];tablet_id=72075186224038872;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:50.309234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038872;self_id=[1:7490570664604171466:2351];tablet_id=72075186224038872;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:50.309537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038872;self_id=[1:7490570664604171466:2351];tablet_id=72075186224038872;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:50.309666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038872;self_id=[1:7490570664604171466:2351];tablet_id=72075186224038872;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:50.309787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038872;self_id=[1:7490570664604171466:2351];tablet_id=72075186224038872;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:50.309906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038872;self_id=[1:7490570664604171466:2351];tablet_id=72075186224038872;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:50.310035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038872;self_id=[1:7490570664604171466:2351];tablet_id=72075186224038872;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:50.310146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038872;self_id=[1:7490570664604171466:2351];tablet_id=72075186224038872;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:50.310262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038872;self_id=[1:7490570664604171466:2351];tablet_id=72075186224038872;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:50.310372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038872;self_id=[1:7490570664604171466:2351];tablet_id=72075186224038872;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:50.310511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038872;self_id=[1:7490570664604171466:2351];tablet_id=72075186224038872;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:50.310615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038872;self_id=[1:7490570664604171466:2351];tablet_id=72075186224038872;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:50.325844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490570664604171353:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:50.325925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490570664604171353:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:50.326166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490570664604171353:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:50.326268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490570664604171353:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:50.326370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490570664604171353:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:50.326477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490570664604171353:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:50.326575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490570664604171353:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:50.326704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490570664604171353:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:50.326840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490570664604171353:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:50.326968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490570664604171353:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:50.327092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490570664604171353:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:50.327186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490570664604171353:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:50.344385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490570664604171358:2316];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:50.344594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490570664604171358:2316];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:50.344915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490570664604171358:2316];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:50.345028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490570664604171358:2316];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:50.345137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490570664604171358:2316];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:50.345243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490570664604171358:2316];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;ev ... Ttl;skip=not_ready_tiers; 2025-04-07T14:01:20.647790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490570986726767423:8183];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037951;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.647982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490570986726767423:8183];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037951;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.650062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570986726766962:8135];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.650479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490570986726767249:8167];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037920;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.650611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490570986726767249:8167];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037920;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.651578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490570986726766961:8134];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.651760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490570986726766961:8134];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.698791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038865;self_id=[1:7490571046856320432:10340];tablet_id=72075186224038865;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:01:20.703395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490570986726767230:8164];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037919;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.707709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7490570986726767287:8172];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037924;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.738930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038523;self_id=[1:7490571046856320189:10306];tablet_id=72075186224038523;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:01:20.742888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490570986726766153:8127];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037903;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.743101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490570986726766153:8127];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037903;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.763514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038277;self_id=[1:7490571046856319963:10278];tablet_id=72075186224038277;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:01:20.764136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7490570986726767162:8154];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037908;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.764367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037945;self_id=[1:7490570986726767427:8185];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037945;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.790522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038494;self_id=[1:7490571046856320054:10290];tablet_id=72075186224038494;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:01:20.798195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490570986726767040:8138];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.798372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490570986726767040:8138];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.799212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7490570986726767216:8161];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037932;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.799605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[1:7490570986726767146:8148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037907;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.800119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490570986726767031:8137];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.800485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;self_id=[1:7490570986726767412:8181];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037943;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.800612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[1:7490570986726767115:8143];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037906;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.802092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038526;self_id=[1:7490571046856320033:10287];tablet_id=72075186224038526;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:01:20.802550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;self_id=[1:7490570986726767403:8178];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037944;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.802730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;self_id=[1:7490570986726767403:8178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037944;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.803161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570986726767253:8169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.803792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;self_id=[1:7490570986726767500:8190];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037942;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.804994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[1:7490570986726767156:8151];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037909;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.805317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[1:7490570986726767131:8146];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037931;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.805644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037912;self_id=[1:7490570986726767132:8147];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037912;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.806870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7490570986726767413:8182];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037925;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.806991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7490570986726767413:8182];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037925;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.807183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[1:7490570986726767399:8176];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037937;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.810812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7490570986726767252:8168];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037904;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:01:20.848091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038528;self_id=[1:7490571046856320349:10321];tablet_id=72075186224038528;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; assertion failed at ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284, void NKikimr::NKqp::NTestSuiteKqpOlapBlobsSharing::TReshardingTest::WaitResharding(const TString &): (result.GetStatus() == NYdb::EStatus::SUCCESS) failed: (CLIENT_DEADLINE_EXCEEDED != SUCCESS)
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:24146 , with diff: (|SU)C(LIENT_DEADLINE_EX|)CE(EDED|SS) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284: WaitResharding @ 0x17AB979D 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:628: Execute_ @ 0x17AC3CB1 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 10. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 11. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 12. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 13. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 14. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 15. ??:0: ?? @ 0x7FCE9C5A4D8F 16. ??:0: ?? @ 0x7FCE9C5A4E3F 17. ??:0: ?? @ 0x15851028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_GetValue_ToInt [GOOD] Test command err: Trying to start YDB, gRPC: 25833, MsgBus: 65506 2025-04-07T14:01:21.069160Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571061425281945:2235];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:21.069346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ef2/r3tmp/tmpjShkYi/pdisk_1.dat 2025-04-07T14:01:21.631392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:21.631463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:21.633592Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:21.649026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25833, node 1 2025-04-07T14:01:21.901255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:21.901277Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:21.901286Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:21.901390Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65506 TClient is connected to server localhost:65506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:22.757655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:22.792440Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:22.815906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:22.941180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571065720249722:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:22.941360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571065720249722:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:22.941634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571065720249722:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:22.941738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571065720249722:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:22.941847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571065720249722:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:22.941950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571065720249722:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:22.942050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571065720249722:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:22.942155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571065720249722:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:22.942261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571065720249722:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:22.942377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571065720249722:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:22.942472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571065720249722:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:22.942578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571065720249722:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:23.025497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571065720249725:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:23.025564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571065720249725:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:23.025869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571065720249725:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:23.025978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571065720249725:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:23.026079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571065720249725:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:23.026174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571065720249725:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:23.026269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571065720249725:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:23.026350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571065720249725:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:23.026447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571065720249725:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:23.026552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571065720249725:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:23.026656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571065720249725:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:23.026771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571065720249725:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:23.081825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065720249741:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:23.081899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065720249741:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:23.082134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065720249741:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:23.082228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065720249741:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:23.082306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065720249741:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:23.082395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065720249741:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:23.082500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065720249741:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:23.082592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065720249741:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... tionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:23.188925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:23.188982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:23.189163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:23.189202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:23.189329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:23.189351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:23.189501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:23.189540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:23.189644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:23.189665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.obj.obj_col2_int" RETURNING Int), JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.obj.obj_col2_int" RETURNING Int) = 16 AND id = 1; 2025-04-07T14:01:25.261088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571078605151897:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:25.261293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:25.263369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571078605151924:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:25.267142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:25.279142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571078605151926:2403], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:26.086162Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571061425281945:2235];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:26.086231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:26.497234Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034486000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.obj.obj_col2_int" RETURNING Int), JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.obj.obj_col2_int" RETURNING Int) = 16 AND id = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == 16","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Int32":16},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [1, 1]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Int32":16},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == 16","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1334) '('"_id" '"abb25ef5-328745c3-5f7b1fa7-62919c3e") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $18 (Int32 '1)) (let $19 (Just $18)) (let $20 '($19 $18)) (let $21 (If (== $18 (Int32 '2147483647)) $20 '((+ $19 $18) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($20 $21)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (OptionalType $6)) (let $8 (TupleType $7 $6)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"id" $6)) (let $11 (DqPhyStage '() (lambda '() (block '( (let $22 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $23 '('"id" '"jsondoc" '"jsonval")) (let $24 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $25 (OptionalType (DataType 'JsonDocument))) (let $26 '((VariantType (TupleType (TupleType (DataType 'Uint8) (DataType 'String)) (OptionalType (DataType 'Double)))))) (let $27 (ResourceType '"JsonNode")) (let $28 (OptionalType $27)) (let $29 '((ResourceType '"JsonPath"))) (let $30 (DataType 'Utf8)) (let $31 (DictType $30 $27)) (let $32 '($31)) (let $33 (CallableType '() $26 '($28) $29 $32)) (let $34 '('('"strict"))) (let $35 (Udf '"Json2.SqlValueNumber" (Void) (VoidType) '"" $33 (VoidType) '"" $34)) (let $36 (lambda '($55) (block '( (let $56 '((DataType 'Json) '"" '1)) (let $57 (CallableType '() '($27) $56)) (let $58 (Udf '"Json2.Parse" (Void) (VoidType) '"" $57 (VoidType) '"" '())) (return (Just (Apply $58 $55))) )))) (let $37 (Nothing $28)) (let $38 (CallableType '() $29 '($30))) (let $39 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $38 (VoidType) '"" '())) (let $40 (Apply $39 (Utf8 '"$.obj.obj_col2_int"))) (let $41 (Dict $31)) (let $42 (lambda '($59) (block '( (let $60 (Nothing $7)) (return $60) )))) (let $43 (lambda '($61) (If (Exists $61) (SafeCast $61 $7) (Nothing $7)))) (let $44 (KqpWideReadOlapTableRanges $22 %kqp%tx_result_binding_0_0 $23 '() $24 (lambda '($45) (block '( (let $46 (DataType 'Json)) (let $47 (StructType $10 '('"jsondoc" $25) '('"jsonval" (OptionalType $46)))) (let $48 (KqpOlapApply $47 '('"jsonval") (lambda '($51) (block '( (let $52 (IfPresent $51 $36 $37)) (let $53 (Apply $35 $52 $40 $41)) (let $54 (Nothing $7)) (return (Visit $53 '0 $42 '1 $43)) ))))) (let $49 '('eq $48 (Int32 '"16"))) (let $50 '('?? $49 (Bool 'false))) (return (KqpOlapFilter $45 $50)) ))))) (return (FromFlow (NarrowMap $44 (lambda '($62 $63 $64) (block '( (let $65 (IfPresent $64 $36 $37)) (let $66 (Apply $35 $65 $40 $41)) (let $67 (Visit $66 '0 $42 '1 $43)) (let $68 (CallableType '() $26 '($25) $29 $32)) (let $69 (Udf '"Json2.JsonDocumentSqlValueNumber" (Void) (VoidType) '"" $68 (VoidType) '"" $34)) (let $70 (Apply $69 $63 $40 $41)) (let $71 (Visit $70 '0 $42 '1 $43)) (return (AsStruct '('"column1" $67) '('"column2" $71) '('"id" $62))) )))))) ))) '('('"_logical_id" '1405) '('"_id" '"79159d9a-9002cb16-5a6b413f-7486e02b")))) (let $12 (DqCnUnionAll (TDqOutput $11 '0))) (let $13 (DqPhyStage '($12) (lambda '($72) $72) '('('"_logical_id" '2473) '('"_id" '"72297ed1-d9c9b0c0-a8d34e9-adbb4a8c")))) (let $14 '('"id" '"column1" '"column2")) (let $15 (DqCnResult (TDqOutput $13 '0) $14)) (let $16 (KqpTxResultBinding $9 '0 '0)) (let $17 (KqpPhysicalTx '($11 $13) '($15) '('($5 $16)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $17) '((KqpTxResultBinding (ListType (StructType '('"column1" $7) '('"column2" $7) $10)) '1 '0)) '('('"type" '"scan_query")))) ) >> TPQTest::TestGetTimestamps [GOOD] >> TPQTest::TestMaxTimeLagRewind ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestGroupByDecimal [GOOD] Test command err: Trying to start YDB, gRPC: 18217, MsgBus: 12821 2025-04-07T14:00:54.210676Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570944345689607:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:54.210729Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f8e/r3tmp/tmpgwjAjs/pdisk_1.dat 2025-04-07T14:00:54.730856Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:54.733401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:54.733492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:54.738057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18217, node 1 2025-04-07T14:00:54.882023Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:54.882073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:54.882085Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:54.882398Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12821 TClient is connected to server localhost:12821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:55.842569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:00:57.866431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570957230592138:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:57.866520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:58.094980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:00:58.185331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570961525559511:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:58.185521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570961525559511:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:58.185768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570961525559511:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:58.185867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570961525559511:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:58.185966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570961525559511:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:58.186077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570961525559511:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:58.186171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570961525559511:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:58.186263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570961525559511:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:58.186369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570961525559511:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:58.186470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570961525559511:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:58.186554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570961525559511:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:58.186678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570961525559511:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:58.198866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:58.198947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:58.199040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:58.199065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:58.199214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:58.199237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:58.199323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:58.199350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:58.199405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:58.199426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:58.199461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:58.199488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:58.199949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:58.199977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:58.200135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:58.200158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:58.200277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:58.200303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:58.200490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:58.200514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:58.200640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:58.200689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FAL ... tablet_id=72075186224037888;self_id=[4:7490571030402997981:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:14.518609Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:14.518688Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:14.518797Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:14.518828Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:14.519033Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:14.519070Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:14.519220Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:14.519260Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:14.519337Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:14.519360Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:14.519404Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:14.519434Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:14.519826Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:14.519871Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:14.520091Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:14.520129Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:14.520265Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:14.520289Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:14.520469Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:14.520514Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:14.520639Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:14.520847Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; 2025-04-07T14:01:14.667089Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571031922942295:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:14.667190Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:14.667622Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571031922942300:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:14.671285Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:14.688596Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571031922942302:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:01:15.181887Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490571013223128257:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:15.181991Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:21.007868Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034475001, txId: 18446744073709551615] shutting down 2025-04-07T14:01:21.025380Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490571060467769236:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:21.025463Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:21.025837Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490571060467769241:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:21.030441Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:21.046062Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490571060467769243:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:21.196989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:01:21.197026Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:25.581270Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:01:25.581300Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:26.603800Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034481091, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ScanFailedSnapshotTooOld [GOOD] Test command err: Trying to start YDB, gRPC: 1553, MsgBus: 3437 2025-04-07T14:01:11.936093Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571016054271786:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:11.936133Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f17/r3tmp/tmpsJqy9M/pdisk_1.dat 2025-04-07T14:01:12.655417Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:12.686328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:12.686427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:12.688217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1553, node 1 2025-04-07T14:01:12.812778Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:12.812799Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:12.812806Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:12.812893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3437 TClient is connected to server localhost:3437 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:13.427251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:13.445740Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/cnt` (key Int32 NOT NULL, c Int32, PRIMARY KEY (key)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:15.984736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571033234141602:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:15.984852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:16.246862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:16.349222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571037529108974:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:16.349415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571037529108974:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:16.349638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571037529108974:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:16.349728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571037529108974:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:16.349808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571037529108974:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:16.349909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571037529108974:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:16.349992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571037529108974:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:16.350082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571037529108974:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:16.350164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571037529108974:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:16.350273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571037529108974:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:16.350373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571037529108974:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:16.350452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571037529108974:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:16.353519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:16.353571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:16.353647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:16.353672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:16.353811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:16.353832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:16.353910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:16.353933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:16.353983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:16.354002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:16.354034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:16.354055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:16.354460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:16.354487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:16.354627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:16.354649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:16.354756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:16.354776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:16.354910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:16.354929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:16.355034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:16.355054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:16.940477Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571016054271786:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:16.963415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:26.488189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571080478782190:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:26.488277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:26.488535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571080478782195:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:26.495226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:26.506196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571080478782197:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:27.567546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=interaction.h:353;batch=key: [ 1 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976710661}],"starts":[{"inc":{"count_not_include":1},"id":281474976710661}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":1},"id":281474976710661}],"starts":[{"inc":{"count_include":1},"id":281474976710661}],"finishes":[{"inc":{"count_include":1},"id":281474976710661}]},"p":{"include":0,"pk":"1;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976710661}]},"p":{"include":2147483647}}]}; 2025-04-07T14:01:27.657847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:01:27.657881Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_NullMix [GOOD] Test command err: Trying to start YDB, gRPC: 22470, MsgBus: 4349 2025-04-07T14:01:19.099007Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571051349335875:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:19.099202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000efd/r3tmp/tmpszswAo/pdisk_1.dat 2025-04-07T14:01:19.781906Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:19.804205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:19.804272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:19.809505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22470, node 1 2025-04-07T14:01:20.001310Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:20.001333Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:20.001340Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:20.001423Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4349 TClient is connected to server localhost:4349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:20.922625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:20.952879Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:20.972397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:21.096977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571059939270972:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:21.097200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571059939270972:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:21.097494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571059939270972:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:21.097627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571059939270972:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:21.097775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571059939270972:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:21.097897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571059939270972:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:21.098027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571059939270972:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:21.098171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571059939270972:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:21.098295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571059939270972:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:21.098416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571059939270972:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:21.098517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571059939270972:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:21.098644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571059939270972:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:21.177267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571059939270974:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:21.177338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571059939270974:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:21.177580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571059939270974:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:21.177709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571059939270974:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:21.177818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571059939270974:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:21.177923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571059939270974:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:21.178032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571059939270974:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:21.178135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571059939270974:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:21.178264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571059939270974:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:21.178374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571059939270974:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:21.178458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571059939270974:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:21.178922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571059939270974:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:21.246319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571059939270973:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:21.246440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571059939270973:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:21.246689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571059939270973:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:21.246819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571059939270973:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:21.246938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571059939270973:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:21.247089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571059939270973:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:21.247195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571059939270973:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:21.247346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571059939270973:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... 62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:21.314544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:21.314670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:21.314688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:21.315306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:21.315403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:21.315502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:21.315528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:21.315679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:21.315705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:21.315797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:21.315885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:21.315938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:21.315958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:21.315992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:21.316011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:21.316313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:21.316363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:21.316533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:21.316554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:21.316733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:21.316776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:21.316948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:21.316982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:21.317114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:21.317139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SUM(level) FROM `/Root/tableWithNulls`; 2025-04-07T14:01:23.203770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571068529205874:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:23.203832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571068529205850:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:23.204484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:23.210331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:23.222988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571068529205879:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:24.085021Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571051349335875:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:24.085095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:24.703024Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034484003, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SUM(level) FROM `/Root/tableWithNulls`; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Int64)))))) (let $1 (OptionalType (DataType 'Int64))) (let $2 '('('"_logical_id" '663) '('"_id" '"d074ad5e-eec4f076-94bdce8f-37464942") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '('"level") '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 'sum '"level")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Sum0 $28)))) (Nothing (OptionalType (StructType '('Sum0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Member $33 'Sum0)))))))) ))) '('('"_logical_id" '1334) '('"_id" '"ec2c7bde-b25316c-db91d835-8b85e48f")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1430) '('"_id" '"da13530a-5bd4a56-4ee5e660-d40b9dd6") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlap::PredicatePushdown_DifferentLvlOfFilters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestOrderByDecimal [GOOD] Test command err: Trying to start YDB, gRPC: 5949, MsgBus: 2815 2025-04-07T14:01:14.427538Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571029735351723:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:14.427833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f0a/r3tmp/tmppcWhR2/pdisk_1.dat 2025-04-07T14:01:15.165023Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:15.167478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:15.167566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:15.176097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5949, node 1 2025-04-07T14:01:15.421248Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:15.421270Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:15.421278Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:15.421360Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2815 TClient is connected to server localhost:2815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:16.420143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:18.998877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571046915221405:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:18.999007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:19.362901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:19.420739Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571029735351723:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:19.420790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:19.435805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571051210188777:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:19.435961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571051210188777:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:19.436223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571051210188777:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:19.436317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571051210188777:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:19.436396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571051210188777:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:19.436478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571051210188777:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:19.436559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571051210188777:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:19.436657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571051210188777:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:19.441354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571051210188777:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:19.441452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571051210188777:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:19.442991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571051210188777:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:19.443132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571051210188777:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:19.451876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:19.451933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:19.452012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:19.452042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:19.452193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:19.452214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:19.452291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:19.452316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:19.452372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:19.452393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:19.452423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:19.452443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:19.452876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:19.452911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:19.453073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:19.453109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:19.453229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:19.453253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:19.453387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:19.453408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV ... S_NAME=CleanGranuleId; 2025-04-07T14:01:24.589162Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490571074230072566:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:24.589268Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490571074230072566:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:24.589381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490571074230072566:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:24.589481Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490571074230072566:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:24.589577Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490571074230072566:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:24.589671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490571074230072566:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:24.589759Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490571074230072566:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:24.599419Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:24.599475Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:24.599569Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:24.599591Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:24.599728Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:24.599750Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:24.599829Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:24.599854Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:24.599942Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:24.599971Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:24.600006Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:24.600028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:24.600317Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:24.600343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:24.600510Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:24.600534Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:24.600680Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:24.600712Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:24.600874Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:24.600897Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:24.600989Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:24.601023Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; 2025-04-07T14:01:24.710778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571072685025471:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:24.710875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:24.711281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571072685025476:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:24.716380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:24.731913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571072685025478:2416], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:24.776126Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571052755235402:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:24.776215Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:25.465057Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034485004, txId: 18446744073709551615] shutting down 2025-04-07T14:01:25.489184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571078525039961:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:25.489270Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:25.489699Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571078525039966:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:25.493559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:25.516410Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T14:01:25.520331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571078525039968:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:26.146287Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034485557, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::TestRemoveTableBeforeIndexation [GOOD] Test command err: Trying to start YDB, gRPC: 10935, MsgBus: 7387 2025-04-07T14:01:11.008567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571017708643128:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:11.014362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f48/r3tmp/tmpGu207i/pdisk_1.dat 2025-04-07T14:01:11.428489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:11.428602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:11.434290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:11.506407Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10935, node 1 2025-04-07T14:01:11.711966Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:11.711986Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:11.711995Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:11.712067Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7387 TClient is connected to server localhost:7387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:12.763602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:12.797799Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:12.806404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:12.916296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:12.918512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:12.918793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:12.918906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:12.919021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:12.919139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:12.919251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:12.919372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:12.919466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:12.919575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:12.919703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:12.919795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:12.954602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:12.954658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:12.954847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:12.954969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:12.955069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:12.955161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:12.955256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:12.955363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:12.955467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:12.955584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:12.955683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:12.955783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:13.026620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571022003610939:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.026679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571022003610939:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.026881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571022003610939:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.027011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571022003610939:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.027116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571022003610939:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.027205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571022003610939:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.027346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571022003610939:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.027454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571022003610939:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... torTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:25.154260Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888; 2025-04-07T14:01:25.154282Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:01:25.154303Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:01:25.154333Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:25.154356Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:25.154384Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:25.154404Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:25.154419Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:25.154436Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:25.154478Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:25.155184Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034485000 at tablet 72075186224037888 2025-04-07T14:01:25.155213Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:01:25.155240Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:25.155266Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:25.155292Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:25.155310Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:25.155323Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:25.155340Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:25.155376Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571022003610937:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:25.184810Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-04-07T14:01:25.184883Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889; 2025-04-07T14:01:25.184904Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:01:25.184933Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:01:25.184973Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:25.185004Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:25.185047Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:25.185084Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:25.185100Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:25.185117Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:25.185163Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:25.185550Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034485000 at tablet 72075186224037889 2025-04-07T14:01:25.185576Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:01:25.185603Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:25.185625Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:25.185653Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:25.185673Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:25.185688Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:25.185704Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:25.185740Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571022003610938:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:25.259948Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490571022003610937:2290];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:25.260034Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7490571022003610943:2293];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:25.260068Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490571022003610938:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:25.260093Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7490571022003610939:2292];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:25.761744Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490571022003610937:2290];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:25.761803Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7490571022003610943:2293];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:25.761831Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490571022003610938:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:25.761892Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7490571022003610939:2292];fline=actor.cpp:33;event=skip_flush_writing; |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_GroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 61739, MsgBus: 8570 2025-04-07T14:01:04.576154Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570987160942378:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:04.576275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f5b/r3tmp/tmpikJ1bM/pdisk_1.dat 2025-04-07T14:01:05.176555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:05.176620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:05.182205Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:05.203706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61739, node 1 2025-04-07T14:01:05.501136Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:05.501164Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:05.501171Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:05.501246Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8570 TClient is connected to server localhost:8570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:06.212575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:06.246795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:06.331460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570995750877499:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:06.331605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570995750877499:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:06.331893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570995750877499:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:06.332011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570995750877499:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:06.332066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570995750877499:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:06.332132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570995750877499:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:06.332210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570995750877499:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:06.332288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570995750877499:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:06.332403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570995750877499:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:06.332481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570995750877499:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:06.332561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570995750877499:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:06.332843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570995750877499:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:06.370041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570995750877513:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:06.370124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570995750877513:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:06.370443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570995750877513:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:06.370668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570995750877513:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:06.370823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570995750877513:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:06.370936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570995750877513:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:06.371152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570995750877513:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:06.371334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570995750877513:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:06.371517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570995750877513:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:06.371707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570995750877513:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:06.371862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570995750877513:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:06.372014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570995750877513:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:06.403393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570995750877512:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:06.403455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570995750877512:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:06.403682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570995750877512:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:06.403821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570995750877512:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:06.403921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570995750877512:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:06.404037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570995750877512:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:06.404122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570995750877512:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:06.404228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570995750877512:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:06.404339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570995750877512: ... OLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:06.446036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:06.446087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:06.446119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:06.446392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:06.446475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:06.446649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:06.446747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:06.446905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:06.446957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:06.447126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:06.447155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:06.447283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:06.447310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; 2025-04-07T14:01:08.423897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571004340812392:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:08.424046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:08.425130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571004340812404:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:08.428917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:08.440309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571004340812406:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:09.565720Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570987160942378:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:09.565799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:20.133889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:01:20.133924Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:22.582580Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88w2t579q80p1mppz1h8ge", SessionId: ydb://session/3?node_id=1&id=MWYwOTE3OTQtZGEwNzNjZTctOWRkMzI3OTMtZTQzM2VkMDY=, Slow query, duration: 14.161102s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "--!syntax_v1\nPRAGMA Kikimr.OptUseFinalizeByKey;\n\n SELECT\n id, AVG(level)\n FROM `/Root/tableWithNulls`\n WHERE id BETWEEN 4 AND 5\n GROUP BY id\n ORDER BY id;\n \n", parameters: 0b 2025-04-07T14:01:22.585756Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034469002, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [4, 5]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1773) '('"_id" '"55c40346-94ba6657-62b2d147-5b42c38a") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $26 (Int32 '1)) (let $27 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $26) $27))) (RangeCreate (AsList '($27 '((Just (Int32 '"5")) $26)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (OptionalType (TupleType $10 (DataType 'Uint64)))) (let $12 '('"id" $1)) (let $13 '('('"_logical_id" '1832) '('"_id" '"6e4a35c6-725124ec-5dfc06ae-743f308") '('"_wide_channels" (StructType '('_yql_agg_0 $11) $12)))) (let $14 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('"id")) (let $30 '('('"UsedKeyColumns" $29) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $30 (lambda '($33) (block '( (let $34 '('"_yql_agg_0_sum" '"sum" '"level")) (let $35 '('"_yql_agg_0_cnt" '"count" '"level")) (return (TKqpOlapAgg $33 '($34 $35) $29)) ))))) (let $32 (lambda '($36 $37 $38) (block '( (let $39 (IfPresent $37 (lambda '($40) (Just '((Convert $40 'Double) $36))) (Nothing $11))) (return $39 $38) )))) (return (FromFlow (WideMap $31 $32))) ))) $13)) (let $15 (DqCnHashShuffle (TDqOutput $14 '0) '('1))) (let $16 (OptionalType $10)) (let $17 (StructType '('"column1" $16) $12)) (let $18 '('('"_logical_id" '3420) '('"_id" '"438f8969-9a01d7f5-4da684bb-7299a839") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($15) (lambda '($41) (block '( (let $42 (lambda '($55 $56) (block '( (let $57 (IfPresent $56 (lambda '($58) (Just (Div (Nth $58 '0) (Nth $58 '1)))) (Nothing $16))) (return $57 $55) )))) (let $43 (WideCombiner (ToFlow $41) '"" (lambda '($44 $45) $45) (lambda '($46 $47 $48) $47) (lambda '($49 $50 $51 $52) (IfPresent $50 (lambda '($53) (IfPresent $52 (lambda '($54) (Just '((AggrAdd (Nth $53 '0) (Nth $54 '0)) (AggrAdd (Nth $53 '1) (Nth $54 '1))))) $50)) $52)) $42)) (return (FromFlow (WideSort $43 '('('1 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('1 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($59) (FromFlow (NarrowMap (ToFlow $59) (lambda '($60 $61) (AsStruct '('"column1" $60) '('"id" $61)))))) '('('"_logical_id" '3432) '('"_id" '"66f43212-7043a972-70d1eaf7-7f6b4c45")))) (let $22 '($14 $19 $21)) (let $23 (DqCnResult (TDqOutput $21 '0) '('"id" '"column1"))) (let $24 (KqpTxResultBinding $9 '0 '0)) (let $25 (KqpPhysicalTx $22 '($23) '('($7 $24)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $25) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |89.9%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |89.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut >> KqpOlapBlobsSharing::BlobsSharingSplit1_1 >> KqpOlapSysView::StatsSysViewBytesDictStatActualization >> KqpDatetime64ColumnShard::UseTimestamp64AsPrimaryKey [GOOD] >> KqpOlap::OlapLayout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewTable [GOOD] >> KqpOlapAggregations::Json_GetValue [GOOD] Test command err: Trying to start YDB, gRPC: 24754, MsgBus: 5989 2025-04-07T14:00:50.792073Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570926792509526:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:50.792123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fa0/r3tmp/tmpLzP8Cj/pdisk_1.dat 2025-04-07T14:00:51.399825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:51.399915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:51.402489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:51.485519Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24754, node 1 2025-04-07T14:00:51.652753Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:51.652783Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:51.652791Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:51.652896Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5989 TClient is connected to server localhost:5989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:52.396656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:52.412153Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:52.435155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:52.551842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570935382444582:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:52.552147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570935382444582:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:52.552364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570935382444582:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:52.552464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570935382444582:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:52.552537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570935382444582:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:52.552641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570935382444582:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:52.552732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570935382444582:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:52.552802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570935382444582:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:52.552884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570935382444582:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:52.552949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570935382444582:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:52.553023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570935382444582:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:52.553091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570935382444582:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:52.587254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570935382444584:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:52.587365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570935382444584:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:52.587654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570935382444584:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:52.587772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570935382444584:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:52.587867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570935382444584:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:52.587956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570935382444584:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:52.588099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570935382444584:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:52.588229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570935382444584:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:52.588354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570935382444584:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:52.588482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570935382444584:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:52.588586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570935382444584:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:52.588683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570935382444584:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:52.624068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570935382444583:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:52.624146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570935382444583:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:52.624410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570935382444583:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:52.624528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570935382444583:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:52.624645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570935382444583:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:52.624760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570935382444583:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:52.624886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570935382444583:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:52.625003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570935382444583:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... 6; 2025-04-07T14:00:52.676146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:52.676216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:52.676236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:52.676270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:52.676302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:52.676586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:52.676632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:52.677388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:52.677416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:52.677969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:52.678004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:52.684862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:52.684913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:52.685546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:52.685571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:52.747524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 Status: 48 TxId: 281474976710660 Issues { message: "Check failed: path: \'/Root/olapStore\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeColumnStore, state: EPathStateNoChanges)" severity: 1 } SchemeShardStatus: 4 SchemeShardReason: "Check failed: path: \'/Root/olapStore\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeColumnStore, state: EPathStateNoChanges)" SchemeShardTabletId: 72057594046644480 PathId: 2 PathCreateTxId: 281474976710658 2025-04-07T14:00:52.800023Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T14:00:52.803249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710661 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; WAIT_COMPACTION: 0 2025-04-07T14:00:55.800779Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570926792509526:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:55.800851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/olapTable_1/.sys/primary_index_stats` WHERE Activity = 1 GROUP BY PathId, TabletId, Kind ORDER BY PathId, TabletId, Kind RESULT: 2025-04-07T14:00:59.856987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570965447216511:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:59.857082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:59.857330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570965447216523:2688], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:59.862496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T14:00:59.872348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570965447216525:2689], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T14:01:06.403586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:01:06.403615Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:12.085001Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88vw60bwwrc5tqq5xrktpe", SessionId: ydb://session/3?node_id=1&id=OWQ5OTZmNTktZGZjZmU3MDgtMWE1NWM5ZWUtYTEwYTZhZjc=, Slow query, duration: 12.232738s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT PathId, Kind, TabletId\n FROM `/Root/olapStore/olapTable_1/.sys/primary_index_stats`\n WHERE Activity = 1\n GROUP BY PathId, TabletId, Kind\n ORDER BY PathId, TabletId, Kind\n ", parameters: 0b 2025-04-07T14:01:12.086980Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034460776, txId: 281474976710664] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/olapTable_2/.sys/primary_index_stats` WHERE Activity = 1 GROUP BY PathId, TabletId, Kind ORDER BY PathId, TabletId, Kind RESULT: 2025-04-07T14:01:25.128225Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88w8ed49877g417xsperwd", SessionId: ydb://session/3?node_id=1&id=NzcyM2VkMzgtZDBjOWQyMzAtMjk3YzNjYzgtODU4MDBlMzQ=, Slow query, duration: 12.726366s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT PathId, Kind, TabletId\n FROM `/Root/olapStore/olapTable_2/.sys/primary_index_stats`\n WHERE Activity = 1\n GROUP BY PathId, TabletId, Kind\n ORDER BY PathId, TabletId, Kind\n ", parameters: 0b 2025-04-07T14:01:25.130598Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034472964, txId: 281474976710666] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 4 TabletId: 72075186224037889 Kind: INSERTED PathId: 4 TabletId: 72075186224037891 Kind: INSERTED PathId: 4 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable_1/.sys/primary_index_stats` WHERE PathId > UInt64("3") ORDER BY PathId, Kind, TabletId RESULT: 2025-04-07T14:01:25.700112Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034485556, txId: 281474976710668] shutting down >> KqpOlapBlobsSharing::BlobsSharingSplit3_1 >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_OrderT_Limit2 >> KqpOlapAggregations::BlockGenericWithDistinct >> KqpOlapBlobsSharing::SplitEmpty >> KqpOlapAggregations::Aggregation_Some >> KqpOlap::BlockChannelAuto [GOOD] >> KqpDecimalColumnShard::TestJoinByDecimal [GOOD] >> KqpDecimalColumnShard::TestFilterEqual [GOOD] >> KqpDecimalColumnShard::TestAggregation >> KqpDecimalColumnShard::TestFilterCompare >> KqpOlap::OlapLayout [GOOD] >> KqpOlapAggregations::NoErrorOnLegacyPragma >> KqpOlapSysView::StatsSysViewColumns [GOOD] >> KqpOlapAggregations::BlockGenericWithDistinct [GOOD] >> KqpOlapAggregations::Aggregation_Some [GOOD] >> KqpOlap::SelectLimit1ManyShards [GOOD] >> KqpOlapAggregations::Aggregation_Avg_NullGroupBy [GOOD] >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_Limit2 [GOOD] |89.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestJoinByDecimal [GOOD] Test command err: Trying to start YDB, gRPC: 8454, MsgBus: 9977 2025-04-07T14:01:21.446089Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571061011521021:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:21.446227Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eee/r3tmp/tmpjLmU4Z/pdisk_1.dat 2025-04-07T14:01:22.164227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:22.164329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:22.169956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:22.209158Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8454, node 1 2025-04-07T14:01:22.481214Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:22.481234Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:22.481239Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:22.481310Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9977 TClient is connected to server localhost:9977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:23.405777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:23.421457Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:25.898284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571078191390694:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:25.898363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:26.170846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:26.226550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082486358067:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:26.226878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082486358067:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:26.227263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082486358067:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:26.227471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082486358067:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:26.227689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082486358067:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:26.227832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082486358067:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:26.227926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082486358067:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:26.228036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082486358067:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:26.228136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082486358067:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:26.228224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082486358067:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:26.228312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082486358067:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:26.228390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082486358067:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:26.231538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:26.231587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:26.234989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:26.235037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:26.235283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:26.235307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:26.235398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:26.235442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:26.235522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:26.235544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:26.235576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:26.235597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:26.248170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:26.248231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:26.248446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:26.248516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:26.249894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:26.249926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:26.250127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:26.250156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:26.250255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:2 ... LASS_NAME=TablesCleaner; 2025-04-07T14:01:30.341310Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571101000712427:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:30.341409Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571101000712427:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:30.341490Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571101000712427:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:30.341578Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571101000712427:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:30.341670Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571101000712427:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:30.342041Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571101000712427:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:30.342198Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571101000712427:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:30.342299Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7490571101000712427:2331];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:30.347129Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:30.347176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:30.347253Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:30.347275Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:30.347393Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:30.347414Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:30.347479Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:30.347500Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:30.347545Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:30.347564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:30.347594Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:30.347617Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:30.347886Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:30.347916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:30.348066Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:30.348091Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:30.348191Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:30.348225Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:30.348403Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:30.348427Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:30.348508Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:30.348529Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=352;columns=3; 2025-04-07T14:01:30.425951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571099666227529:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:30.426044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:30.426291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571099666227534:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:30.431049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:30.483657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571099666227536:2422], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:01:31.680817Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571083820842448:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:31.680896Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:32.129774Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034491003, txId: 18446744073709551615] shutting down 2025-04-07T14:01:32.155380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571109590647089:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:32.155437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:32.155569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571109590647094:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:32.160844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:32.184997Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571109590647096:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:01:33.257641Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034492228, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit1_1_clean_with_restarts Test command err: Trying to start YDB, gRPC: 7406, MsgBus: 13976 2025-04-07T14:01:23.873270Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571071856961303:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:23.873525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ed6/r3tmp/tmpOhZPyo/pdisk_1.dat 2025-04-07T14:01:24.344910Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:24.364441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:24.364536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:24.367546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7406, node 1 2025-04-07T14:01:24.503771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:24.503799Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:24.503807Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:24.503916Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13976 TClient is connected to server localhost:13976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:25.199491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:25.215990Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:25.229382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:25.275525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7490571080446896403:2290];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:01:25.307778Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7490571080446896403:2290];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:01:25.307936Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-04-07T14:01:25.318667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571080446896403:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:25.318891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571080446896403:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:25.319200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571080446896403:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:25.319332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571080446896403:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:25.319488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571080446896403:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:25.319622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571080446896403:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:25.319742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571080446896403:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:25.319867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571080446896403:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:25.321036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571080446896403:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:25.321248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571080446896403:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:25.321397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571080446896403:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:25.321514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571080446896403:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:25.325398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7490571080446896416:2291];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:01:25.352838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7490571080446896416:2291];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:01:25.352962Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-07T14:01:25.359466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571080446896416:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:25.359535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571080446896416:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:25.359760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571080446896416:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:25.359883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571080446896416:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:25.359987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571080446896416:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:25.360094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571080446896416:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:25.360310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571080446896416:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:25.360422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571080446896416:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:25.360526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571080446896416:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:25.360646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571080446896416:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:25.361108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571080446896416:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:25.361216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571080446896416:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:25.363847Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037889 2025-04-07T14:01:25.363962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:01:25.363984Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:01:25.364162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:01:25.364273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:25.364319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::E ... ic=0; 2025-04-07T14:01:35.516184Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:35.516244Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:35.516295Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:35.516339Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:35.516361Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:35.516390Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:35.516452Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:35.521808Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034495000 at tablet 72075186224037888 2025-04-07T14:01:35.521861Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:01:35.521913Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:35.521969Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:35.522024Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:35.522051Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:35.522068Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:35.522100Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:35.522157Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571110511668128:2523];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:35.522252Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490571110511668128:2523];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:35.527748Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-04-07T14:01:35.536833Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889; 2025-04-07T14:01:35.536882Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:01:35.536918Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:01:35.536975Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:35.537036Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:35.537094Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:35.537124Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:35.537143Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:35.537170Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:35.537231Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:35.537806Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034495000 at tablet 72075186224037889 2025-04-07T14:01:35.537846Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:01:35.537885Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:35.537937Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:35.537975Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:35.537998Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:35.538015Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:35.538042Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:35.538087Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571110511668124:2522];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x180DC7E2 1. /-S/util/system/yassert.cpp:55: Panic @ 0x180CBE3A 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x1AF00E25 2025-04-07T14:01:36.004989Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490571110511668124:2522];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:36.024888Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490571110511668128:2523];fline=actor.cpp:33;event=skip_flush_writing; 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x17AA48D6 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:229: Execute_ @ 0x17AA848D 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7F7337A31D8F 18. ??:0: ?? @ 0x7F7337A31E3F 19. ??:0: ?? @ 0x15851028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewColumns [GOOD] Test command err: Trying to start YDB, gRPC: 16893, MsgBus: 7548 2025-04-07T14:01:13.469190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571026243220294:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:13.469225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f0c/r3tmp/tmpOXpRBA/pdisk_1.dat 2025-04-07T14:01:14.060107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:14.060191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:14.066442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:14.073051Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16893, node 1 2025-04-07T14:01:14.397693Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:14.397717Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:14.397724Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:14.397807Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7548 TClient is connected to server localhost:7548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:15.388064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:15.432343Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:15.444411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:15.561303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571034833155586:2294];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:15.561515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571034833155586:2294];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:15.561775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571034833155586:2294];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:15.561880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571034833155586:2294];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:15.562011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571034833155586:2294];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:15.562133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571034833155586:2294];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:15.562240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571034833155586:2294];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:15.562380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571034833155586:2294];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:15.562504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571034833155586:2294];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:15.562596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571034833155586:2294];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:15.562707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571034833155586:2294];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:15.562813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571034833155586:2294];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:15.615183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571034833155573:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:15.615257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571034833155573:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:15.615484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571034833155573:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:15.615589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571034833155573:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:15.615700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571034833155573:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:15.615843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571034833155573:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:15.615959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571034833155573:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:15.616071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571034833155573:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:15.616158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571034833155573:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:15.616272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571034833155573:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:15.616372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571034833155573:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:15.616481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571034833155573:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:15.640784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:15.640853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:15.640983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:15.641020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:15.641239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:15.641285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:15.641399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:15.641449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:15.645044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:15.645138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_swi ... chema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:15.798727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:15.798837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:15.798859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:15.798992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:15.799015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:15.799144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:15.799162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:15.811316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:15.812814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:15.812924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:15.812948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:15.813202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:15.813231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:15.813335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:15.813368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:15.813419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:15.813439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:15.813489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:15.813517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:15.813794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:15.813844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:15.814012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:15.814034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:15.814155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:15.814178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:15.814329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:15.814349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:15.814446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:15.814466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:15.855317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-04-07T14:01:18.474335Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571026243220294:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:18.474440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT TabletId, PathId, Kind FROM `/Root/olapStore/.sys/store_primary_index_stats` ORDER BY PathId, Kind, TabletId LIMIT 4; RESULT: 2025-04-07T14:01:27.486544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571086372763779:2575], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:27.486646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:27.487005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571086372763791:2578], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:27.497577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:27.507807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571086372763793:2579], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:01:28.431070Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034488185, txId: 281474976710662] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 ==================================== QUERY: SELECT SUM(BlobRangeSize) as Bytes, SUM(Rows) as Rows, PathId, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Activity == 1 GROUP BY PathId, TabletId ORDER BY Bytes RESULT: 2025-04-07T14:01:29.065036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:01:29.065088Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:33.974851Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034488962, txId: 281474976710664] shutting down TabletId: 72075186224037889 Rows: 33005 PathId: 3 Bytes: 246760 TabletId: 72075186224037890 Rows: 33490 PathId: 3 Bytes: 249560 TabletId: 72075186224037888 Rows: 33505 PathId: 3 Bytes: 250032 ==================================== QUERY: SELECT Sum(Rows) as Rows, Kind, Sum(ColumnRawBytes) as RawBytes, PathId FROM `/Root/olapStore/.sys/store_primary_index_portion_stats` WHERE Activity == 1 GROUP BY Kind, PathId ORDER BY PathId, Kind, Rows RESULT: 2025-04-07T14:01:40.371915Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034494840, txId: 281474976710666] shutting down Rows: 20000 PathId: 3 Kind: INSERTED RawBytes: 23478900 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some [GOOD] Test command err: Trying to start YDB, gRPC: 30049, MsgBus: 30236 2025-04-07T14:01:34.141431Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571116618485188:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:34.141871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eb1/r3tmp/tmpHdLAcN/pdisk_1.dat 2025-04-07T14:01:34.834976Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:34.839252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:34.839355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:34.845067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30049, node 1 2025-04-07T14:01:34.978682Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:34.978702Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:34.978708Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:34.978812Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30236 TClient is connected to server localhost:30236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:35.813969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:35.844631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:35.946686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571120913453022:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:35.946883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571120913453022:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:35.947157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571120913453022:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:35.947285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571120913453022:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:35.947409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571120913453022:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:35.947526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571120913453022:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:35.947618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571120913453022:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:35.947727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571120913453022:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:35.947844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571120913453022:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:35.947949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571120913453022:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:35.948066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571120913453022:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:35.948204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571120913453022:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:35.996368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571120913453023:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:35.996451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571120913453023:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:35.996681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571120913453023:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:35.996794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571120913453023:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:35.996911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571120913453023:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:35.997036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571120913453023:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:35.997129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571120913453023:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:35.997244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571120913453023:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:35.997340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571120913453023:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:35.997420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571120913453023:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:35.997581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571120913453023:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:35.997667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571120913453023:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:36.035194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120913453024:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:36.035278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120913453024:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:36.035513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120913453024:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:36.035632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120913453024:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:36.035736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120913453024:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:36.035835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120913453024:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:36.035936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120913453024:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:36.036139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120913453024:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:36.036262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:74905711209134530 ... 07T14:01:36.089236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:36.089288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:36.089422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:36.089444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:36.089545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:36.089620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:36.089665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:36.089681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:36.089720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:36.089749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:36.090058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:36.090950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:36.091180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:36.091207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:36.091319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:36.091351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:36.091528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:36.091551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:36.091661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:36.091681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SOME(level) FROM `/Root/tableWithNulls` WHERE id=1 2025-04-07T14:01:38.048967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571133798355206:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:38.049074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:38.049552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571133798355218:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:38.054326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:38.065630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571133798355220:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:39.185348Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571116618485188:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:39.186348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:39.369289Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034498115, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SOME(level) FROM `/Root/tableWithNulls` WHERE id=1 JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id [1, 1]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (declare %kqp%tx_result_binding_1_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Int32)))))) (let $1 '('"_partition_mode" '"single")) (let $2 '('('"_logical_id" '1076) '('"_id" '"27ea449-f876fbdf-b0a55df8-d7aa1d55") $1)) (let $3 (DqPhyStage '() (lambda '() (block '( (let $27 (Int32 '1)) (let $28 (Just $27)) (let $29 '($28 $27)) (let $30 (If (== $27 (Int32 '2147483647)) $29 '((+ $28 $27) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($29 $30)))))))))) ))) $2)) (let $4 (DqCnValue (TDqOutput $3 '0))) (let $5 (KqpPhysicalTx '($3) '($4) '() '('('"type" '"compute")))) (let $6 '"%kqp%tx_result_binding_0_0") (let $7 (DataType 'Int32)) (let $8 (OptionalType $7)) (let $9 (TupleType $8 $7)) (let $10 (TupleType (ListType (TupleType $9 $9)))) (let $11 '('('"_logical_id" '1134) '('"_id" '"4f023895-81185a16-845be98b-388cc18b") '('"_wide_channels" (StructType '('_yql_agg_0 $8))))) (let $12 (DqPhyStage '() (lambda '() (block '( (let $31 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $32 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $33 (KqpWideReadOlapTableRanges $31 %kqp%tx_result_binding_0_0 '('"level") '() $32 (lambda '($34) (TKqpOlapAgg $34 '('('_yql_agg_0 'some '"level")) '())))) (return (FromFlow $33)) ))) $11)) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($35) (block '( (let $36 (Bool 'false)) (let $37 (WideCondense1 (ToFlow $35) (lambda '($39) $39) (lambda '($40 $41) $36) (lambda '($42 $43) (Coalesce $43 $42)))) (let $38 (Condense (NarrowMap (Take $37 (Uint64 '1)) (lambda '($44) (AsStruct '('Some0 $44)))) (Nothing (OptionalType (StructType '('Some0 $8)))) (lambda '($45 $46) $36) (lambda '($47 $48) (Just $47)))) (return (FromFlow (Map $38 (lambda '($49) (AsList (AsStruct '('"column0" (Member $49 'Some0)))))))) ))) '('('"_logical_id" '1668) '('"_id" '"a055deb9-28de90dc-e3953cc0-94ad5720")))) (let $15 (DqCnValue (TDqOutput $14 '0))) (let $16 (KqpTxResultBinding $10 '0 '0)) (let $17 '('('"type" '"scan"))) (let $18 (KqpPhysicalTx '($12 $14) '($15) '('($6 $16)) $17)) (let $19 '"%kqp%tx_result_binding_1_0") (let $20 (ListType (StructType '('"column0" $8)))) (let $21 '('('"_logical_id" '1764) '('"_id" '"7321d271-76bbc168-4de26f38-c0be109c") $1)) (let $22 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_1_0)) $21)) (let $23 (DqCnResult (TDqOutput $22 '0) '('"column0"))) (let $24 (KqpTxResultBinding $20 '1 '0)) (let $25 (KqpPhysicalTx '($22) '($23) '('($19 $24)) $17)) (let $26 '($5 $18 $25)) (return (KqpPhysicalQuery $26 '((KqpTxResultBinding $20 '"2" '0)) '('('"type" '"scan_query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SelectLimit1ManyShards [GOOD] Test command err: 2025-04-07T14:01:28.431011Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T14:01:28.449292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:01:28.449812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:01:28.450000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:01:28.450818Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:01:28.450860Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eeb/r3tmp/tmpU4rwPl/pdisk_1.dat 2025-04-07T14:01:28.873306Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:29.057747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:01:29.188448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:29.188593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:29.192160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:29.192263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:29.214073Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:01:29.214601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:29.214978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:29.656784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:29.896173Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:1282:2802];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:01:29.922953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:1282:2802];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:01:29.923282Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-04-07T14:01:29.931153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:29.931387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:29.931625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:29.931728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:29.931815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:29.931924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:29.932052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:29.932156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:29.932260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:29.932360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:29.932465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:29.932577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:29.968193Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037895;self_id=[1:1290:2807];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:01:29.990335Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037895;self_id=[1:1290:2807];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:01:29.990553Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037895 2025-04-07T14:01:29.996194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:29.996287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:29.996503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:29.996612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:29.996739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:29.996846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:29.996940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:29.997094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:29.997208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:29.997337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:29.997431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:29.997551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:30.004026Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037896;self_id=[1:1295:2809];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:01:30.022619Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037896;self_id=[1:1295:2809];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:01:30.022829Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037896 2025-04-07T14:01:30.048693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:30.048774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:30.048982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:30.049105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:30.049204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:30.049305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_ ... "StartTimeMs\":1744034499930,\"TaskId\":122,\"WaitInputTimeUs\":135085}]},{\"CpuTimeUs\":918,\"Tasks\":[{\"ComputeTimeUs\":10,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499930,\"TaskId\":123,\"WaitInputTimeUs\":135630}]},{\"CpuTimeUs\":1005,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499931,\"TaskId\":124,\"WaitInputTimeUs\":136094}]},{\"CpuTimeUs\":967,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499932,\"TaskId\":125,\"WaitInputTimeUs\":136514}]},{\"CpuTimeUs\":915,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499932,\"TaskId\":126,\"WaitInputTimeUs\":137059}]},{\"CpuTimeUs\":1097,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499933,\"TaskId\":127,\"WaitInputTimeUs\":137669}]},{\"CpuTimeUs\":1000,\"Tasks\":[{\"ComputeTimeUs\":9,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499933,\"TaskId\":128,\"WaitInputTimeUs\":138126}]},{\"CpuTimeUs\":1262,\"Tasks\":[{\"ComputeTimeUs\":18,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499900,\"TaskId\":67,\"WaitInputTimeUs\":158022}]},{\"CpuTimeUs\":976,\"Tasks\":[{\"ComputeTimeUs\":8,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499901,\"TaskId\":68,\"WaitInputTimeUs\":158398}]},{\"CpuTimeUs\":1031,\"Tasks\":[{\"ComputeTimeUs\":7,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499902,\"TaskId\":69,\"WaitInputTimeUs\":158813}]},{\"CpuTimeUs\":951,\"Tasks\":[{\"ComputeTimeUs\":8,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499902,\"TaskId\":70,\"WaitInputTimeUs\":159189}]},{\"CpuTimeUs\":887,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499903,\"TaskId\":71,\"WaitInputTimeUs\":159614}]},{\"CpuTimeUs\":990,\"Tasks\":[{\"ComputeTimeUs\":5,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499903,\"TaskId\":72,\"WaitInputTimeUs\":160012}]},{\"CpuTimeUs\":1017,\"Tasks\":[{\"ComputeTimeUs\":9,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499904,\"TaskId\":73,\"WaitInputTimeUs\":160450}]},{\"CpuTimeUs\":1011,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499904,\"TaskId\":74,\"WaitInputTimeUs\":160938}]},{\"CpuTimeUs\":960,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499905,\"TaskId\":75,\"WaitInputTimeUs\":161310}]},{\"CpuTimeUs\":985,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499906,\"TaskId\":76,\"WaitInputTimeUs\":161665}]},{\"CpuTimeUs\":892,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499906,\"TaskId\":77,\"WaitInputTimeUs\":162078}]},{\"CpuTimeUs\":904,\"Tasks\":[{\"ComputeTimeUs\":7,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499907,\"TaskId\":78,\"WaitInputTimeUs\":162481}]},{\"CpuTimeUs\":889,\"Tasks\":[{\"ComputeTimeUs\":8,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499907,\"TaskId\":79,\"WaitInputTimeUs\":162861}]},{\"CpuTimeUs\":922,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499908,\"TaskId\":80,\"WaitInputTimeUs\":163317}]},{\"CpuTimeUs\":984,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499908,\"TaskId\":81,\"WaitInputTimeUs\":163765}]},{\"CpuTimeUs\":1024,\"Tasks\":[{\"ComputeTimeUs\":7,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499909,\"TaskId\":82,\"WaitInputTimeUs\":164131}]},{\"CpuTimeUs\":962,\"Tasks\":[{\"ComputeTimeUs\":7,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499909,\"TaskId\":83,\"WaitInputTimeUs\":164522}]},{\"CpuTimeUs\":1025,\"Tasks\":[{\"ComputeTimeUs\":9,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499910,\"TaskId\":84,\"WaitInputTimeUs\":164954}]},{\"CpuTimeUs\":974,\"Tasks\":[{\"ComputeTimeUs\":7,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499911,\"TaskId\":85,\"WaitInputTimeUs\":165330}]},{\"CpuTimeUs\":924,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499911,\"TaskId\":86,\"WaitInputTimeUs\":165787}]},{\"CpuTimeUs\":922,\"Tasks\":[{\"ComputeTimeUs\":8,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499912,\"TaskId\":87,\"WaitInputTimeUs\":166250}]},{\"CpuTimeUs\":987,\"Tasks\":[{\"ComputeTimeUs\":7,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499912,\"TaskId\":88,\"WaitInputTimeUs\":166622}]},{\"CpuTimeUs\":1011,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499913,\"TaskId\":89,\"WaitInputTimeUs\":167045}]},{\"CpuTimeUs\":1039,\"Tasks\":[{\"ComputeTimeUs\":10,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499913,\"TaskId\":90,\"WaitInputTimeUs\":167398}]},{\"CpuTimeUs\":1002,\"Tasks\":[{\"ComputeTimeUs\":8,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499914,\"TaskId\":91,\"WaitInputTimeUs\":167730}]},{\"CpuTimeUs\":1057,\"Tasks\":[{\"ComputeTimeUs\":7,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499914,\"TaskId\":92,\"WaitInputTimeUs\":168092}]},{\"CpuTimeUs\":1028,\"Tasks\":[{\"ComputeTimeUs\":10,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499915,\"TaskId\":93,\"WaitInputTimeUs\":168453}]},{\"CpuTimeUs\":973,\"Tasks\":[{\"ComputeTimeUs\":9,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499915,\"TaskId\":94,\"WaitInputTimeUs\":169011}]},{\"CpuTimeUs\":1099,\"Tasks\":[{\"ComputeTimeUs\":11,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499916,\"TaskId\":95,\"WaitInputTimeUs\":169379}]},{\"CpuTimeUs\":1140,\"Tasks\":[{\"ComputeTimeUs\":9,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499916,\"TaskId\":96,\"WaitInputTimeUs\":169766}]},{\"CpuTimeUs\":1057,\"Tasks\":[{\"ComputeTimeUs\":10,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499917,\"TaskId\":97,\"WaitInputTimeUs\":170102}]},{\"CpuTimeUs\":1003,\"Tasks\":[{\"ComputeTimeUs\":11,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499917,\"TaskId\":98,\"WaitInputTimeUs\":170379}]},{\"CpuTimeUs\":969,\"Tasks\":[{\"ComputeTimeUs\":9,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499918,\"TaskId\":99,\"WaitInputTimeUs\":170766}]},{\"CpuTimeUs\":1034,\"Tasks\":[{\"ComputeTimeUs\":9,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499919,\"TaskId\":100,\"WaitInputTimeUs\":171130}]},{\"CpuTimeUs\":1037,\"Tasks\":[{\"ComputeTimeUs\":9,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499919,\"TaskId\":101,\"WaitInputTimeUs\":171384}]},{\"CpuTimeUs\":1040,\"Tasks\":[{\"ComputeTimeUs\":10,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499920,\"TaskId\":102,\"WaitInputTimeUs\":171717}]},{\"CpuTimeUs\":1036,\"Tasks\":[{\"ComputeTimeUs\":7,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499920,\"TaskId\":103,\"WaitInputTimeUs\":171998}]},{\"CpuTimeUs\":1019,\"Tasks\":[{\"ComputeTimeUs\":9,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499921,\"TaskId\":104,\"WaitInputTimeUs\":172442}]},{\"CpuTimeUs\":1085,\"Tasks\":[{\"ComputeTimeUs\":9,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499921,\"TaskId\":105,\"WaitInputTimeUs\":172817}]},{\"CpuTimeUs\":1126,\"Tasks\":[{\"ComputeTimeUs\":9,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499922,\"TaskId\":106,\"WaitInputTimeUs\":173210}]},{\"CpuTimeUs\":1035,\"Tasks\":[{\"ComputeTimeUs\":8,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499922,\"TaskId\":107,\"WaitInputTimeUs\":173445}]},{\"CpuTimeUs\":943,\"Tasks\":[{\"ComputeTimeUs\":7,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499923,\"TaskId\":108,\"WaitInputTimeUs\":173803}]},{\"CpuTimeUs\":1145,\"Tasks\":[{\"ComputeTimeUs\":8,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499923,\"TaskId\":109,\"WaitInputTimeUs\":174126}]},{\"CpuTimeUs\":983,\"Tasks\":[{\"ComputeTimeUs\":7,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499924,\"TaskId\":110,\"WaitInputTimeUs\":174345}]},{\"CpuTimeUs\":1125,\"Tasks\":[{\"ComputeTimeUs\":9,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499924,\"TaskId\":111,\"WaitInputTimeUs\":174559}]},{\"CpuTimeUs\":1129,\"Tasks\":[{\"ComputeTimeUs\":10,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499925,\"TaskId\":112,\"WaitInputTimeUs\":174781}]},{\"CpuTimeUs\":1011,\"Tasks\":[{\"ComputeTimeUs\":8,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499926,\"TaskId\":113,\"WaitInputTimeUs\":175004}]},{\"CpuTimeUs\":1030,\"Tasks\":[{\"ComputeTimeUs\":8,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499926,\"TaskId\":114,\"WaitInputTimeUs\":175318}]},{\"CpuTimeUs\":857,\"Tasks\":[{\"ComputeTimeUs\":8,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499927,\"TaskId\":115,\"WaitInputTimeUs\":175575}]},{\"CpuTimeUs\":871,\"Tasks\":[{\"ComputeTimeUs\":7,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499927,\"TaskId\":116,\"WaitInputTimeUs\":175936}]},{\"CpuTimeUs\":779,\"Tasks\":[{\"ComputeTimeUs\":7,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499928,\"TaskId\":117,\"WaitInputTimeUs\":176190}]},{\"CpuTimeUs\":854,\"Tasks\":[{\"ComputeTimeUs\":5,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499928,\"TaskId\":118,\"WaitInputTimeUs\":176436}]},{\"CpuTimeUs\":891,\"Tasks\":[{\"ComputeTimeUs\":7,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499929,\"TaskId\":119,\"WaitInputTimeUs\":176769}]},{\"CpuTimeUs\":1030,\"Tasks\":[{\"ComputeTimeUs\":8,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499929,\"TaskId\":120,\"WaitInputTimeUs\":177262}]},{\"CpuTimeUs\":822,\"Tasks\":[{\"ComputeTimeUs\":5,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034499929,\"TaskId\":121,\"WaitInputTimeUs\":177730}]}],\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"selectStore\\/selectTable\"]}],\"StageGuid\":\"\"}],\"StageGuid\":\"45ac59b0-c7a10eed-c3b966ec-5e0ceb22\",\"Stats\":{\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\n\022\016\010\272\005\020\303\370\001\030\371\224\n \201\001" } } 2025-04-07T14:01:40.677242Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:3295:3899] TxId: 281474976715662. Ctx: { TraceId: 01jr88x1rb79mactky2jc720t2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjBjZDA1MjEtYzhiMjIwNGQtYjIyZWNmODYtZGMxZGY1ZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:01:40.677358Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:3295:3899] TxId: 281474976715662. Ctx: { TraceId: 01jr88x1rb79mactky2jc720t2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjBjZDA1MjEtYzhiMjIwNGQtYjIyZWNmODYtZGMxZGY1ZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.166521s ReadRows: 4 ReadBytes: 4318 ru: 111 rate limiter was not found force flag: 1 2025-04-07T14:01:40.682032Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.h:30;event=destroy;allocation_id=28;stage=FO::MERGE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_Limit2 [GOOD] Test command err: Trying to start YDB, gRPC: 18248, MsgBus: 3969 2025-04-07T14:01:13.460869Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571025170527943:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:13.463180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f0d/r3tmp/tmpZxI4mT/pdisk_1.dat 2025-04-07T14:01:14.064870Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:14.081685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:14.081769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:14.100806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18248, node 1 2025-04-07T14:01:14.161267Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:14.161299Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:14.161319Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:14.161435Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3969 TClient is connected to server localhost:3969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:14.947502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:14.991139Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:15.019800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:15.195050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571033760463072:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:15.195738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571033760463072:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:15.200591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571033760463072:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:15.200948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571033760463072:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:15.201063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571033760463072:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:15.201219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571033760463072:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:15.201339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571033760463072:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:15.201456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571033760463072:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:15.201581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571033760463072:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:15.201671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571033760463072:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:15.201762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571033760463072:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:15.201862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571033760463072:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:15.254435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033760463073:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:15.254497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033760463073:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:15.254733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033760463073:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:15.254874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033760463073:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:15.255007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033760463073:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:15.255113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033760463073:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:15.255207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033760463073:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:15.255301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033760463073:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:15.255388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033760463073:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:15.255489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033760463073:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:15.255612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033760463073:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:15.255734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033760463073:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:15.307786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033760463074:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:15.307876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033760463074:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:15.308136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033760463074:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:15.308285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033760463074:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:15.308393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033760463074:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:15.308532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033760463074:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:15.308641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033760463074:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:15.308757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033760463074:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:40.041049Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:40.188321Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:40.188386Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:40.255016Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:40.255073Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:40.323810Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:40.323879Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:40.388984Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:40.389068Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:40.466069Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:40.477506Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1216:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:01:40.519836Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:40.519898Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:40.584438Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:40.584563Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:40.649692Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:40.649763Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:40.715913Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:40.715999Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:40.783083Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:40.783165Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:40.827778Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:40.947519Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:40.947585Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:41.040165Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:41.040242Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:41.109555Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:41.109635Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:41.181660Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:41.181733Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:41.250945Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:41.251023Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3005], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jr88wtq2enw9g0137t19b471. SessionId : ydb://session/3?node_id=2&id=MWY1MTY3NmUtZjIxNzQ5OTItMjBhNGVkMWQtZGU5MmM4M2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:41.307544Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:41.333384Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1216:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::BlockGenericWithDistinct [GOOD] Test command err: Trying to start YDB, gRPC: 12772, MsgBus: 15543 2025-04-07T14:01:34.080541Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571115430341408:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:34.082626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eb6/r3tmp/tmpifY171/pdisk_1.dat 2025-04-07T14:01:35.080759Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:35.083656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:35.083752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:35.087299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:35.095699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 12772, node 1 2025-04-07T14:01:35.265400Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:35.265421Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:35.265427Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:35.265507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15543 TClient is connected to server localhost:15543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:35.988940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:36.019944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:36.121557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571124020276531:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:36.131991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571124020276531:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:36.132301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571124020276531:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:36.132401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571124020276531:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:36.132483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571124020276531:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:36.132550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571124020276531:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:36.132830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571124020276531:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:36.133026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571124020276531:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:36.133167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571124020276531:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:36.136417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571124020276531:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:36.136726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571124020276531:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:36.136843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571124020276531:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:36.189656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571124020276532:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:36.189727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571124020276532:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:36.189992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571124020276532:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:36.190151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571124020276532:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:36.190257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571124020276532:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:36.190358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571124020276532:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:36.190469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571124020276532:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:36.190573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571124020276532:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:36.190680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571124020276532:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:36.190783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571124020276532:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:36.190883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571124020276532:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:36.191012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571124020276532:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:36.228504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571124020276533:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:36.228572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571124020276533:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:36.228827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571124020276533:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:36.228934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571124020276533:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:36.229039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571124020276533:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:36.229208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571124020276533:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:36.229302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571124020276533:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:36.229424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571124020276533:2292];tablet_id=72075186224037891;process=TTxInitSch ... ma::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:36.277281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:36.277333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:36.277353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:36.277404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:36.277431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:36.277813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:36.277868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:36.278047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:36.278097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:36.278236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:36.278258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:36.278466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:36.278492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:36.278584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:36.278601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(DISTINCT id) FROM `/Root/tableWithNulls` WHERE level = 5 AND Cast(id AS String) = "5"; 2025-04-07T14:01:38.223459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571132610211424:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:38.223556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571132610211410:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:38.223685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:38.233516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:38.245398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571132610211439:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:39.073371Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571115430341408:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:39.073436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(DISTINCT id) FROM `/Root/tableWithNulls` WHERE level = 5 AND Cast(id AS String) = "5"; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"GroupBy":"item.id","Aggregation":"state","Name":"Aggregate"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"item.id == \"5\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"level == 5","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"Aggregate-Filter-Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":4}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 5","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.id == \"5\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"GroupBy":"item.id","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (Bool 'false)) (let $2 (lambda '($20) $20)) (let $3 '('('"_logical_id" '1098) '('"_id" '"76f98c08-948b09a2-50bffc59-49057ee6") '('"_wide_channels" (StructType '('"id" (DataType 'Int32)))))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $14 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $15 (KqpBlockReadOlapTableRanges $14 (Void) '('"id" '"level") '() '() (lambda '($16) (block '( (let $17 '('eq '"level" (Int32 '"5"))) (let $18 '('?? $17 $1)) (return (TKqpOlapExtractMembers (KqpOlapFilter $16 $18) '('"id"))) ))))) (return (FromFlow (WideCombiner (WideFilter (WideFromBlocks $15) (lambda '($19) (== (SafeCast $19 (DataType 'String)) (String '"5")))) '-1073741824 $2 (lambda '($21 $22) $21) (lambda '($23 $24 $25) $25) (lambda '($26 $27) $27)))) ))) $3)) (let $5 (DqCnHashShuffle (TDqOutput $4 '0) '('0))) (let $6 (Uint64 '1)) (let $7 (DataType 'Uint64)) (let $8 '('('"_logical_id" '1803) '('"_id" '"b3b607be-1fed5e41-3d40b9f1-cd89145e") '('"_wide_channels" (StructType '('_yql_agg_0 (OptionalType $7)))))) (let $9 (DqPhyStage '($5) (lambda '($28) (block '( (let $29 (lambda '($32 $33))) (let $30 (WideCombiner (ToFlow $28) '"" $2 $29 $29 $2)) (let $31 (Condense1 (NarrowMap $30 (lambda '($34) (AsStruct '('"id" $34)))) (lambda '($35) $6) (lambda '($36 $37) $1) (lambda '($38 $39) (Inc $39)))) (return (FromFlow (ExpandMap $31 (lambda '($40) (Just $40))))) ))) $8)) (let $10 (DqCnUnionAll (TDqOutput $9 '0))) (let $11 (DqPhyStage '($10) (lambda '($41) (block '( (let $42 (WideCondense1 (ToFlow $41) (lambda '($44) $44) (lambda '($45 $46) $1) (lambda '($47 $48) (IfPresent $47 (lambda '($49) (IfPresent $48 (lambda '($50) (Just (AggrAdd $49 $50))) $47)) $48)))) (let $43 (Condense (NarrowMap (Take $42 $6) (lambda '($51) (AsStruct '('Count0 (Unwrap $51))))) (Nothing (OptionalType (StructType '('Count0 $7)))) (lambda '($52 $53) $1) (lambda '($54 $55) (Just $54)))) (return (FromFlow (Map $43 (lambda '($56) (AsStruct '('"column0" (Coalesce (Member $56 'Count0) (Uint64 '0)))))))) ))) '('('"_logical_id" '2538) '('"_id" '"654904da-1b8026d2-e1dbd279-2cb7ad5e")))) (let $12 '($4 $9 $11)) (let $13 (DqCnResult (TDqOutput $11 '0) '('"column0"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $12 '($13) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType (StructType '('"column0" $7))) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_NullGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 63885, MsgBus: 10319 2025-04-07T14:01:26.178235Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571081508388729:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:26.178291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ec8/r3tmp/tmpnWTpMV/pdisk_1.dat 2025-04-07T14:01:26.724754Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:26.734309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:26.734383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:26.737672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63885, node 1 2025-04-07T14:01:26.887312Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:26.887333Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:26.887341Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:26.887428Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10319 TClient is connected to server localhost:10319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:27.474639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:27.497717Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:27.509607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:27.592872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085803356570:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:27.593041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085803356570:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:27.593375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085803356570:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:27.593482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085803356570:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:27.593569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085803356570:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:27.593668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085803356570:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:27.593778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085803356570:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:27.593898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085803356570:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:27.594027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085803356570:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:27.594178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085803356570:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:27.594288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085803356570:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:27.594392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085803356570:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:27.630802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085803356572:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:27.630880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085803356572:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:27.631087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085803356572:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:27.631189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085803356572:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:27.631295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085803356572:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:27.631411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085803356572:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:27.631504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085803356572:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:27.631639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085803356572:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:27.631752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085803356572:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:27.631891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085803356572:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:27.631999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085803356572:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:27.632098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085803356572:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:27.661741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571085803356573:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:27.661811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571085803356573:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:27.662011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571085803356573:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:27.662175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571085803356573:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:27.662275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571085803356573:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:27.662365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571085803356573:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:27.662478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571085803356573:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:27.662585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571085803356573:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... LUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:27.705517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:27.705547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:27.705565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:27.705808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:27.705853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:27.706055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:27.706085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:27.706225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:27.706246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:27.706412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:27.706434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:27.706535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:27.706564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2025-04-07T14:01:29.410219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571094393291459:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:29.410267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571094393291471:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:29.410318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:29.413976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:29.422254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571094393291473:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:41.697541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:01:41.697575Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:42.279767Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571081508388729:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:42.406414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:42.635993Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88wqhe02qgs2x05jzfddeh", SessionId: ydb://session/3?node_id=1&id=MjI0Y2E5YzItMWVhNmYyZWQtM2JiMzJjZTEtOWNjNzUyZDc=, Slow query, duration: 13.227321s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "--!syntax_v1\nPRAGMA Kikimr.OptUseFinalizeByKey;\n\n SELECT\n id, AVG(level)\n FROM `/Root/tableWithNulls`\n WHERE id BETWEEN 6 AND 7\n GROUP BY id\n ORDER BY id;\n \n", parameters: 0b 2025-04-07T14:01:42.638507Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034490000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1773) '('"_id" '"8f09b61c-2edb8feb-27960aa7-6dbfdcba") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $26 (Int32 '1)) (let $27 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"6")) $26) $27))) (RangeCreate (AsList '($27 '((Just (Int32 '"7")) $26)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (OptionalType (TupleType $10 (DataType 'Uint64)))) (let $12 '('"id" $1)) (let $13 '('('"_logical_id" '1832) '('"_id" '"55ada5eb-3ca23b08-7cfed78b-9176507e") '('"_wide_channels" (StructType '('_yql_agg_0 $11) $12)))) (let $14 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('"id")) (let $30 '('('"UsedKeyColumns" $29) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $30 (lambda '($33) (block '( (let $34 '('"_yql_agg_0_sum" '"sum" '"level")) (let $35 '('"_yql_agg_0_cnt" '"count" '"level")) (return (TKqpOlapAgg $33 '($34 $35) $29)) ))))) (let $32 (lambda '($36 $37 $38) (block '( (let $39 (IfPresent $37 (lambda '($40) (Just '((Convert $40 'Double) $36))) (Nothing $11))) (return $39 $38) )))) (return (FromFlow (WideMap $31 $32))) ))) $13)) (let $15 (DqCnHashShuffle (TDqOutput $14 '0) '('1))) (let $16 (OptionalType $10)) (let $17 (StructType '('"column1" $16) $12)) (let $18 '('('"_logical_id" '3420) '('"_id" '"64ba7934-67952705-e4185b4d-b2af934a") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($15) (lambda '($41) (block '( (let $42 (lambda '($55 $56) (block '( (let $57 (IfPresent $56 (lambda '($58) (Just (Div (Nth $58 '0) (Nth $58 '1)))) (Nothing $16))) (return $57 $55) )))) (let $43 (WideCombiner (ToFlow $41) '"" (lambda '($44 $45) $45) (lambda '($46 $47 $48) $47) (lambda '($49 $50 $51 $52) (IfPresent $50 (lambda '($53) (IfPresent $52 (lambda '($54) (Just '((AggrAdd (Nth $53 '0) (Nth $54 '0)) (AggrAdd (Nth $53 '1) (Nth $54 '1))))) $50)) $52)) $42)) (return (FromFlow (WideSort $43 '('('1 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('1 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($59) (FromFlow (NarrowMap (ToFlow $59) (lambda '($60 $61) (AsStruct '('"column1" $60) '('"id" $61)))))) '('('"_logical_id" '3432) '('"_id" '"b25f57b5-9073b827-32a603eb-f7778796")))) (let $22 '($14 $19 $21)) (let $23 (DqCnResult (TDqOutput $21 '0) '('"id" '"column1"))) (let $24 (KqpTxResultBinding $9 '0 '0)) (let $25 (KqpPhysicalTx $22 '($23) '('($7 $24)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $25) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_GetValue [GOOD] Test command err: Trying to start YDB, gRPC: 3680, MsgBus: 17696 2025-04-07T14:01:26.196982Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571083737179360:2118];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:26.197026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ecb/r3tmp/tmp0xogQi/pdisk_1.dat 2025-04-07T14:01:26.835797Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:26.884543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:26.884630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:26.891142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3680, node 1 2025-04-07T14:01:27.114067Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:27.114089Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:27.114098Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:27.114175Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17696 TClient is connected to server localhost:17696 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:27.891476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:27.930382Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:27.959134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:28.055992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571092327114566:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:28.056172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571092327114566:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:28.056442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571092327114566:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:28.056612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571092327114566:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:28.056770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571092327114566:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:28.056888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571092327114566:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:28.057006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571092327114566:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:28.057164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571092327114566:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:28.057292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571092327114566:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:28.057395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571092327114566:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:28.057487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571092327114566:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:28.057589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571092327114566:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:28.090062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571088032147268:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:28.090112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571088032147268:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:28.090310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571088032147268:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:28.090413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571088032147268:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:28.090499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571088032147268:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:28.090624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571088032147268:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:28.090734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571088032147268:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:28.090843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571088032147268:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:28.090940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571088032147268:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:28.091056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571088032147268:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:28.091173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571088032147268:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:28.091285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571088032147268:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:28.122551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571088032147269:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:28.122608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571088032147269:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:28.122784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571088032147269:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:28.122889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571088032147269:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:28.122982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571088032147269:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:28.123077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571088032147269:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:28.123174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571088032147269:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:28.123265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571088032147269:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... inished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:28.166777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:28.167031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:28.167059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:28.167212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:28.167233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:28.167336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:28.167356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:28.167514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:28.167544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:28.167624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:28.167695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.col1") = "val1" AND id = 1; 2025-04-07T14:01:29.948933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571096622082139:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:29.949200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:29.949645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571096622082166:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:29.954253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:29.965961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571096622082168:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:31.282741Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571083737179360:2118];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:31.293118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:31.415053Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034490016, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.col1") = "val1" AND id = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == \"val1\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [1, 1]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == \"val1\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1328) '('"_id" '"73b43aca-62eaca45-758aa8c5-b31bd40b") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $19 (Int32 '1)) (let $20 (Just $19)) (let $21 '($20 $19)) (let $22 (If (== $19 (Int32 '2147483647)) $21 '((+ $20 $19) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($21 $22)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (DataType 'Utf8)) (let $11 (OptionalType $10)) (let $12 (DqPhyStage '() (lambda '() (block '( (let $23 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $24 '('"id" '"jsondoc" '"jsonval")) (let $25 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $26 (OptionalType (DataType 'JsonDocument))) (let $27 '((VariantType (TupleType (TupleType (DataType 'Uint8) (DataType 'String)) $11)))) (let $28 (ResourceType '"JsonNode")) (let $29 (OptionalType $28)) (let $30 '((ResourceType '"JsonPath"))) (let $31 (DictType $10 $28)) (let $32 '($31)) (let $33 (CallableType '() $27 '($29) $30 $32)) (let $34 '('('"strict"))) (let $35 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $33 (VoidType) '"" $34)) (let $36 (lambda '($54) (block '( (let $55 '((DataType 'Json) '"" '1)) (let $56 (CallableType '() '($28) $55)) (let $57 (Udf '"Json2.Parse" (Void) (VoidType) '"" $56 (VoidType) '"" '())) (return (Just (Apply $57 $54))) )))) (let $37 (Nothing $29)) (let $38 (CallableType '() $30 '($10))) (let $39 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $38 (VoidType) '"" '())) (let $40 (Apply $39 (Utf8 '"$.col1"))) (let $41 (Dict $31)) (let $42 (lambda '($58) (Nothing $11))) (let $43 (lambda '($59) $59)) (let $44 (KqpWideReadOlapTableRanges $23 %kqp%tx_result_binding_0_0 $24 '() $25 (lambda '($45) (block '( (let $46 (DataType 'Json)) (let $47 (StructType $9 '('"jsondoc" $26) '('"jsonval" (OptionalType $46)))) (let $48 (KqpOlapApply $47 '('"jsonval") (lambda '($51) (block '( (let $52 (IfPresent $51 $36 $37)) (let $53 (Apply $35 $52 $40 $41)) (return (Visit $53 '0 $42 '1 $43)) ))))) (let $49 '('eq $48 (String '"val1"))) (let $50 '('?? $49 (Bool 'false))) (return (KqpOlapFilter $45 $50)) ))))) (return (FromFlow (NarrowMap $44 (lambda '($60 $61 $62) (block '( (let $63 (IfPresent $62 $36 $37)) (let $64 (Apply $35 $63 $40 $41)) (let $65 (Visit $64 '0 $42 '1 $43)) (let $66 (CallableType '() $27 '($26) $30 $32)) (let $67 (Udf '"Json2.JsonDocumentSqlValueConvertToUtf8" (Void) (VoidType) '"" $66 (VoidType) '"" $34)) (let $68 (Apply $67 $61 $40 $41)) (let $69 (Visit $68 '0 $42 '1 $43)) (return (AsStruct '('"column1" $65) '('"column2" $69) '('"id" $60))) )))))) ))) '('('"_logical_id" '1399) '('"_id" '"756d43c2-1317120e-d71c8b17-c0fb041f")))) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($70) $70) '('('"_logical_id" '2259) '('"_id" '"600947d5-266c6afa-c04c206c-2fa478a8")))) (let $15 '('"id" '"column1" '"column2")) (let $16 (DqCnResult (TDqOutput $14 '0) $15)) (let $17 (KqpTxResultBinding $8 '0 '0)) (let $18 (KqpPhysicalTx '($12 $14) '($16) '('($5 $17)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $18) '((KqpTxResultBinding (ListType (StructType '('"column1" $11) '('"column2" $11) $9)) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit1_3_1 Test command err: Trying to start YDB, gRPC: 10980, MsgBus: 16557 2025-04-07T14:01:20.531089Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571057016289636:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:20.592801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ef4/r3tmp/tmpbzcxLD/pdisk_1.dat 2025-04-07T14:01:21.239587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:21.239678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:21.251034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10980, node 1 2025-04-07T14:01:21.307485Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:21.323856Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:01:21.323927Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:01:21.401245Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:21.401270Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:21.401283Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:21.401407Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16557 TClient is connected to server localhost:16557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:22.188345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:22.198303Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:22.209228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:22.268740Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:01:22.294377Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:01:22.294523Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-04-07T14:01:22.305162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:22.305607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:22.305895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:22.306039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:22.306152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:22.306293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:22.306412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:22.306549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:22.306706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:22.306826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:22.306915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:22.307035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:22.311048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:01:22.341529Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:01:22.341680Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037891 2025-04-07T14:01:22.347751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:22.347820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:22.348015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:22.348102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:22.348216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:22.348298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:22.348392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:22.348505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:22.348606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:22.349012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:22.349167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:22.349277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:22.352539Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7490571065606224883:2292];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:01:22.374003Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7490571065606224883:2292];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:01:22.374108Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037890 2025-04-07T14:01:22.380683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571065606224883:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:22.380741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571065606224883:2292];tablet_id=72075186224 ... 2075186224037889;self_id=[1:7490571065606224881:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:32.405540Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:32.405554Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:32.405571Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:32.405603Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571065606224881:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:32.405661Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-04-07T14:01:32.405695Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891; 2025-04-07T14:01:32.405717Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:01:32.405735Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:01:32.405761Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:32.405787Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:32.405811Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:32.405826Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:32.405839Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:32.405855Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:32.405892Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:32.405975Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034492000 at tablet 72075186224037888 2025-04-07T14:01:32.405992Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571065606224915:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:01:32.406012Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571065606224915:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:32.406030Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571065606224915:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:32.406050Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571065606224915:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:32.406064Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571065606224915:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:32.406077Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571065606224915:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:32.406091Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571065606224915:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:32.406120Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571065606224915:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:32.411122Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034492000 at tablet 72075186224037891 2025-04-07T14:01:32.411163Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:01:32.411207Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:32.411239Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:32.411271Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:32.411299Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:32.411333Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:32.411358Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:32.411402Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571065606224882:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:32.423679Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490571065606224881:2290];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:32.423726Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7490571065606224882:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:32.423749Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490571065606224915:2293];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:32.664769Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7490571065606224883:2292];fline=actor.cpp:33;event=skip_flush_writing; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x180DC7E2 1. /-S/util/system/yassert.cpp:55: Panic @ 0x180CBE3A 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x1AF00E25 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x17AA48D6 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:248: Execute_ @ 0x17AA9926 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 2025-04-07T14:01:32.921969Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7490571065606224882:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:32.922012Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490571065606224915:2293];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:32.922034Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490571065606224881:2290];fline=actor.cpp:33;event=skip_flush_writing; 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7F5420C12D8F 18. ??:0: ?? @ 0x7F5420C12E3F 19. ??:0: ?? @ 0x15851028 >> KqpJoinOrder::TPCDS34+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS34-StreamLookupJoin+ColumnStore |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut >> KqpOlapAggregations::NoErrorOnLegacyPragma [GOOD] >> KqpOlapAggregations::Aggregation_Avg_GroupByNullMix >> KqpOlap::ExtractRanges >> KqpOlap::PredicatePushdownPartial >> KqpOlap::CompactionPlanner [GOOD] >> KqpOlapAggregations::Aggregation_Avg_NullMixGroupBy >> KqpOlapCompression::OffCompression >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling-AllowSpilling >> KqpOlapBlobsSharing::MultipleMerge >> KqpOlapAggregations::CountAllNoPushdown >> KqpOlap::SimpleQueryOlapDiagnostics >> KqpOlapAggregations::Aggregation_Count_NullGroupBy >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility+EnableLlvm |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |89.9%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |89.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |89.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |89.9%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |89.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |89.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::CompactionPlanner [GOOD] Test command err: Trying to start YDB, gRPC: 27223, MsgBus: 7440 2025-04-07T14:01:19.217153Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571052236452610:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:19.217410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ef7/r3tmp/tmp49uItw/pdisk_1.dat 2025-04-07T14:01:19.833103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:19.833188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:19.871905Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:19.891981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27223, node 1 2025-04-07T14:01:20.069116Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:20.069139Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:20.069146Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:20.069234Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7440 TClient is connected to server localhost:7440 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:21.022736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:21.042735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:21.217993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571060826387728:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:21.218210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571060826387728:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:21.218435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571060826387728:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:21.218537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571060826387728:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:21.218621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571060826387728:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:21.218698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571060826387728:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:21.218801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571060826387728:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:21.218913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571060826387728:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:21.219016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571060826387728:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:21.219113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571060826387728:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:21.219207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571060826387728:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:21.219289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571060826387728:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:21.278823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571060826387729:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:21.278875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571060826387729:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:21.279093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571060826387729:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:21.279208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571060826387729:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:21.279316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571060826387729:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:21.279398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571060826387729:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:21.279490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571060826387729:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:21.279590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571060826387729:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:21.279692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571060826387729:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:21.279789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571060826387729:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:21.279889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571060826387729:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:21.280017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571060826387729:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:21.322011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571060826387756:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:21.322092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571060826387756:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:21.322297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571060826387756:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:21.322409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571060826387756:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:21.322525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571060826387756:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:21.322640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571060826387756:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:21.322745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571060826387756:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:21.322866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571060826387756:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:21.322978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571060826387756: ... -07T14:01:21.416565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:21.416615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:21.416724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:21.416749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:21.417134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:21.417164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:21.417260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:21.417284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:21.417338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:21.417362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:21.417395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:21.417435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:21.417766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:21.417797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:21.417984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:21.418021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:21.418133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:21.418180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:21.418345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:21.418369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:21.418450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:21.418467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:21.472254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-07T14:01:24.181431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571069416322633:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:24.191546Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571052236452610:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:24.191625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:24.191664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:24.194155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710660:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-04-07T14:01:30.321048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571099481093912:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:30.321129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:30.328494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710661:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 2025-04-07T14:01:34.852002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:01:34.852027Z node 1 :IMPORT WARN: Table profiles were not loaded WAIT_COMPACTION: 3 2025-04-07T14:01:36.622416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571125250897866:2542], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:36.622481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:36.627507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710662:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=82565408;columns=5; WAIT_COMPACTION: 9 WAIT_COMPACTION: 9 WAIT_COMPACTION: 9 WAIT_COMPACTION: 9 WAIT_COMPACTION: 9 2025-04-07T14:01:45.067733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571163905603758:2637], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:45.067830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:45.113019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571163905603767:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:45.113135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:45.115554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:01:45.146553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571163905603858:2690], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:45.146619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:45.146657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571163905603863:2693], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:45.150126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-04-07T14:01:45.161234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571163905603865:2694], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-04-07T14:01:46.045770Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034505206, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_GroupByNullMix [GOOD] Test command err: Trying to start YDB, gRPC: 3696, MsgBus: 6815 2025-04-07T14:01:11.438775Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571017031241841:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:11.438813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f3e/r3tmp/tmp6xd090/pdisk_1.dat 2025-04-07T14:01:11.941989Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:11.943448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:11.943547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:11.949227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3696, node 1 2025-04-07T14:01:12.071798Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:12.071828Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:12.071845Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:12.071953Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6815 TClient is connected to server localhost:6815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:12.940690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:12.956479Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:12.972233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:13.063060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571025621177098:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.063281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571025621177098:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.063541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571025621177098:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.063681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571025621177098:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.063785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571025621177098:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.063916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571025621177098:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.064014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571025621177098:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.064124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571025621177098:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:13.064252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571025621177098:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:13.064364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571025621177098:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.064453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571025621177098:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:13.064562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571025621177098:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:13.094459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571025621177102:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.094525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571025621177102:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.094748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571025621177102:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.094866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571025621177102:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.094970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571025621177102:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.095083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571025621177102:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.095164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571025621177102:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.095275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571025621177102:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:13.095385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571025621177102:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:13.095503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571025621177102:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.095744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571025621177102:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:13.095828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571025621177102:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:13.126054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571025621177099:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.126121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571025621177099:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.126315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571025621177099:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.126424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571025621177099:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.126544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571025621177099:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.126650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571025621177099:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.126766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571025621177099:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.126878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571025621177099:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descrip ... ;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:13.170308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:13.170451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:13.170474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:13.170574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:13.170602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:13.170670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:13.170690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:13.170722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:13.170742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:13.171044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:13.171084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:13.171236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:13.171259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.171391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:13.171413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:13.171563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:13.171584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:13.171702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:13.171723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SUM(id), SUM(level) FROM `/Root/tableWithNulls` WHERE id >= 5 GROUP BY level ORDER BY level; 2025-04-07T14:01:15.189116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571034211111967:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:15.189283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:15.191098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571034211112002:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:15.195951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:15.217414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571034211112004:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:16.440775Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571017031241841:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:16.440842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:21.618348Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034476002, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SUM(id), SUM(level) FROM `/Root/tableWithNulls` WHERE id >= 5 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1237) '('"_id" '"19e48b6-e321b051-faf73842-a33cecf6") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) (Int32 '1)) '((Nothing $2) (Int32 '0)))))))))))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Int64)) (let $11 (OptionalType $10)) (let $12 '('"level" $2)) (let $13 (StructType '('_yql_agg_0 $10) '('_yql_agg_1 $11) $12)) (let $14 '('('"_logical_id" '1296) '('"_id" '"6f432bd-ae7c3844-ef840b03-92b9b1c7") '('"_wide_channels" $13))) (let $15 (DqPhyStage '() (lambda '() (block '( (let $27 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $28 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $27 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (block '( (let $31 '('_yql_agg_0 'sum '"id")) (let $32 '('_yql_agg_1 'sum '"level")) (return (TKqpOlapAgg $30 '($31 $32) '('"level"))) ))))) (return (FromFlow $29)) ))) $14)) (let $16 (DqCnHashShuffle (TDqOutput $15 '0) '('2))) (let $17 (StructType '('"column1" $10) '('"column2" $11) $12)) (let $18 '('('"_logical_id" '2136) '('"_id" '"f78dcd3c-711308b1-b3d247b5-c6195d3e") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($16) (lambda '($33) (block '( (let $34 (lambda '($41 $42 $43 $44) $42 $43)) (let $35 (lambda '($45 $46 $47 $48 $49 $50) (AggrAdd $46 $49) (AggrAdd $47 $50))) (let $36 (lambda '($51 $52 $53) $52 $53 $51)) (let $37 (WideCombiner (ToFlow $33) '"" (lambda '($38 $39 $40) $40) $34 $35 $36)) (return (FromFlow (WideSort $37 '('('2 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('2 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($54) (FromFlow (NarrowMap (ToFlow $54) (lambda '($55 $56 $57) (AsStruct '('"column1" $55) '('"column2" $56) '('"level" $57)))))) '('('"_logical_id" '2148) '('"_id" '"2d6fe7f8-3368bad8-1127d7e2-673fa50d")))) (let $22 '($15 $19 $21)) (let $23 '('"level" '"column1" '"column2")) (let $24 (DqCnResult (TDqOutput $21 '0) $23)) (let $25 (KqpTxResultBinding $9 '0 '0)) (let $26 (KqpPhysicalTx $22 '($24) '('($7 $25)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $26) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit1_1 Test command err: Trying to start YDB, gRPC: 14335, MsgBus: 24536 2025-04-07T14:01:33.237660Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571110583105598:2232];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:33.237717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eb9/r3tmp/tmpJCVEiy/pdisk_1.dat 2025-04-07T14:01:33.613142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:33.613229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:33.614844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:33.617017Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14335, node 1 2025-04-07T14:01:33.780554Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:33.780573Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:33.780584Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:33.780684Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24536 TClient is connected to server localhost:24536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:34.426110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:34.461616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:34.805857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7490571114878073393:2290];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:01:34.847651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7490571114878073393:2290];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:01:34.847843Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-04-07T14:01:34.857644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571114878073393:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:34.857848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571114878073393:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:34.858135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571114878073393:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:34.858285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571114878073393:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:34.858403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571114878073393:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:34.858523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571114878073393:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:34.858638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571114878073393:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:34.858748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571114878073393:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:34.858856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571114878073393:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:34.858955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571114878073393:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:34.859069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571114878073393:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:34.859182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571114878073393:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:34.864525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:01:34.898455Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:01:34.898601Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037891 2025-04-07T14:01:34.905232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:34.905303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:34.905545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:34.905643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:34.905767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:34.905867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:34.905983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:34.906081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:34.906161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:34.906277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:34.906398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:34.906484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:34.910089Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7490571114878073409:2293];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:01:34.935805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7490571114878073409:2293];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:01:34.935926Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037890 2025-04-07T14:01:34.949895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571114878073409:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:34.949965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571114878073409:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:34.950201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571114878073409:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normal ... olumnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:01:43.472854Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:43.472858Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888; 2025-04-07T14:01:43.472873Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:01:43.472902Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:01:43.472913Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:43.472956Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:43.472958Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:43.472987Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.473005Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.473007Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:43.473043Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:43.473062Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:43.473090Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.473098Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:43.473107Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.473132Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:43.473191Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:43.475093Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034503001 at tablet 72075186224037891 2025-04-07T14:01:43.475141Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:01:43.475201Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:43.475245Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:43.475286Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:43.475312Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.475332Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.475359Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:43.475422Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571114878073395:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:43.475508Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034503001 at tablet 72075186224037888 2025-04-07T14:01:43.475527Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:01:43.475557Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:43.475587Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:43.475615Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:43.475631Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.475646Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.475661Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:43.475701Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571114878073394:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:43.503715Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490571114878073394:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:43.503753Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7490571114878073409:2293];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:43.504025Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490571114878073393:2290];fline=actor.cpp:33;event=skip_flush_writing; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x180DC7E2 1. /-S/util/system/yassert.cpp:55: Panic @ 0x180CBE3A 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x1AF00E25 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x17AA48D6 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:208: Execute_ @ 0x17A9E31C 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 2025-04-07T14:01:43.957263Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7490571114878073395:2292];fline=actor.cpp:33;event=skip_flush_writing; 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7F68C934FD8F 18. ??:0: ?? @ 0x7F68C934FE3F 19. ??:0: ?? @ 0x15851028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit3_1 Test command err: Trying to start YDB, gRPC: 9950, MsgBus: 26989 2025-04-07T14:01:33.498111Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571111933762314:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:33.498156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ec0/r3tmp/tmpuJ2kXl/pdisk_1.dat 2025-04-07T14:01:34.033157Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:34.040360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:34.040444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:34.042681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9950, node 1 2025-04-07T14:01:34.168807Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:34.168829Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:34.168841Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:34.168969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26989 TClient is connected to server localhost:26989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:35.141186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:35.164393Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:35.188907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:35.295302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:01:35.339204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:01:35.343232Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037890 2025-04-07T14:01:35.361221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:35.361501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:35.361795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:35.361913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:35.362049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:35.362167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:35.362277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:35.362382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:35.362526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:35.362655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:35.362783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:35.362912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:35.371913Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:01:35.413186Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:01:35.413314Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-07T14:01:35.419180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:35.419248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:35.419482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:35.419823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:35.419924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:35.420016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:35.420099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:35.420399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:35.420536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:35.420675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:35.420791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:35.421093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:35.424268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7490571120523697582:2291];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:01:35.453296Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7490571120523697582:2291];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:01:35.453424Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-04-07T14:01:35.468902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571120523697582:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:35.468971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571120523697582:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:35.469358Z node 1 :TX_COLUMNSHARD WARN: ... eueBackgroundActivities;periodic=1; 2025-04-07T14:01:43.656018Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571120523697597:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:43.656050Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571120523697597:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:43.656081Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571120523697597:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:43.656104Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571120523697597:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.656122Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571120523697597:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.656144Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571120523697597:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:43.656184Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7490571120523697597:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:43.656289Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7490571120523697585:2292];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:43.656312Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7490571120523697597:2293];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:43.656958Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034503001 at tablet 72075186224037890 2025-04-07T14:01:43.657001Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:01:43.657074Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:43.657119Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:43.657182Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:43.657211Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.657235Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.657264Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:43.657323Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571120523697585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:43.657536Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034503001 at tablet 72075186224037888 2025-04-07T14:01:43.657563Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:01:43.657604Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:43.657641Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:43.657680Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:43.657708Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.657733Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.657764Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:43.657808Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571120523697581:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:43.657892Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034503001 at tablet 72075186224037889 2025-04-07T14:01:43.657924Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571120523697582:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:01:43.657960Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571120523697582:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:01:43.657992Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571120523697582:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:01:43.658026Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571120523697582:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:01:43.658053Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571120523697582:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.658073Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571120523697582:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:01:43.658094Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571120523697582:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:01:43.658143Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571120523697582:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:01:44.108498Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490571120523697582:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:44.108499Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490571120523697581:2290];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:44.156634Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7490571120523697585:2292];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:44.156705Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7490571120523697597:2293];fline=actor.cpp:33;event=skip_flush_writing; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x180DC7E2 1. /-S/util/system/yassert.cpp:55: Panic @ 0x180CBE3A 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x1AF00E25 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x17AA48D6 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:240: Execute_ @ 0x17AA9352 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7FD64E9E4D8F 18. ??:0: ?? @ 0x7FD64E9E4E3F 19. ??:0: ?? @ 0x15851028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::NoErrorOnLegacyPragma [GOOD] Test command err: Trying to start YDB, gRPC: 26627, MsgBus: 25409 2025-04-07T14:01:41.308985Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571147722714861:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:41.309051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eab/r3tmp/tmpv5lDC6/pdisk_1.dat 2025-04-07T14:01:41.800117Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26627, node 1 2025-04-07T14:01:41.836107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:41.836261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:41.837864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:41.878520Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:41.878542Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:41.878548Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:41.878655Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25409 TClient is connected to server localhost:25409 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:42.407971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:42.421150Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:42.428768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:42.513258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571152017682827:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:42.513433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571152017682827:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:42.513849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571152017682827:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:42.514028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571152017682827:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:42.514155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571152017682827:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:42.514302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571152017682827:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:42.514410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571152017682827:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:42.514515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571152017682827:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:42.514622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571152017682827:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:42.514725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571152017682827:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:42.514830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571152017682827:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:42.514970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571152017682827:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:42.568586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571152017682826:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:42.568799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571152017682826:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:42.569120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571152017682826:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:42.569304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571152017682826:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:42.569424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571152017682826:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:42.569554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571152017682826:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:42.569660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571152017682826:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:42.569786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571152017682826:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:42.569885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571152017682826:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:42.569983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571152017682826:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:42.570102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571152017682826:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:42.570226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571152017682826:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:42.615139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571152017682842:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:42.615221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571152017682842:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:42.615463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571152017682842:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:42.615555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571152017682842:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:42.615644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571152017682842:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:42.615727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571152017682842:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:42.615830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571152017682842:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:42.615950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571152017682842:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... mChunks; 2025-04-07T14:01:42.662138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:42.662161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:42.662319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:42.662348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:42.662438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:42.662460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:42.662864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:42.662897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:42.662983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:42.663006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:42.663164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:42.663204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:42.663284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:42.663330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:42.663401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:42.663427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:42.663457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:42.663477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:42.663681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:42.663719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:42.663882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:42.663907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:42.664040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:42.664066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:42.664205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:42.664229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:42.664331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:42.664351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA Kikimr.KqpPushOlapProcess = "false"; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; 2025-04-07T14:01:44.526666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571160607617729:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:44.526665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571160607617704:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:44.526758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:44.530890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:44.540299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571160607617733:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:45.182025Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034505000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA Kikimr.KqpPushOlapProcess = "false"; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"level == 5","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level","resource_id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 5","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 '('"id" '"resource_id")) (let $2 (DqPhyStage '() (lambda '() (block '( (let $6 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $7 '('"id" '"level" '"resource_id")) (let $8 (KqpWideReadOlapTableRanges $6 (Void) $7 '() '() (lambda '($9) (block '( (let $10 '('eq '"level" (Int32 '"5"))) (let $11 '('?? $10 (Bool 'false))) (return (TKqpOlapExtractMembers (KqpOlapFilter $9 $11) $1)) ))))) (return (FromFlow (NarrowMap $8 (lambda '($12 $13) (AsStruct '('"id" $12) '('"resource_id" $13)))))) ))) '('('"_logical_id" '572) '('"_id" '"9bed0bc8-a7b059da-3014834-8f8008eb")))) (let $3 (DqCnUnionAll (TDqOutput $2 '"0"))) (let $4 (DqPhyStage '($3) (lambda '($14) $14) '('('"_logical_id" '751) '('"_id" '"b5c3fb11-2f202eac-480eda-488a5804")))) (let $5 (DqCnResult (TDqOutput $4 '"0") $1)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($2 $4) '($5) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"id" (DataType 'Int32)) '('"resource_id" (OptionalType (DataType 'Utf8))))) '"0" '"0")) '('('"type" '"scan_query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapLayout [GOOD] Test command err: Trying to start YDB, gRPC: 1133, MsgBus: 29796 2025-04-07T14:01:32.803162Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571108599685780:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:32.803325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ec1/r3tmp/tmpa0BqYS/pdisk_1.dat 2025-04-07T14:01:33.440400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:33.440472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:33.445371Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:33.447905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1133, node 1 2025-04-07T14:01:33.647932Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:33.647952Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:33.647960Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:33.648066Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29796 TClient is connected to server localhost:29796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:34.362037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:34.397767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:34.875402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7490571117189621050:2299];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:34.875590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7490571117189621050:2299];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:34.875873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7490571117189621050:2299];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:34.875963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7490571117189621050:2299];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:34.876123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7490571117189621050:2299];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:34.876213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7490571117189621050:2299];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:34.876309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7490571117189621050:2299];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:34.876422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7490571117189621050:2299];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:34.876529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7490571117189621050:2299];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:34.876617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7490571117189621050:2299];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:34.876747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7490571117189621050:2299];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:34.876855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7490571117189621050:2299];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:34.926047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571117189621030:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:34.926103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571117189621030:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:34.926313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571117189621030:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:34.926450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571117189621030:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:34.926536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571117189621030:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:34.926586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571117189621030:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:34.926706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571117189621030:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:34.926782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571117189621030:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:34.926840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571117189621030:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:34.926899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571117189621030:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:34.926953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571117189621030:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:34.927013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571117189621030:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:34.950325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571117189621051:2300];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:34.950392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571117189621051:2300];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:34.950709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571117189621051:2300];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:34.950808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571117189621051:2300];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:34.950889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571117189621051:2300];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:34.951012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571117189621051:2300];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:34.951111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571117189621051:2300];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:34.951187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571117189621051:2300];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:34.951271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571117189621051 ... shards, code: 2003 } } } 2025-04-07T14:01:38.854997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571134369492281:2607], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:38.855154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } {
: Error: Execution, code: 1060 subissue: {
:15:35: Error: Executing CREATE TABLE subissue: {
: Error: Cannot create table with 17 column shards, only 16 are available, code: 2003 } } } 2025-04-07T14:01:38.885960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571134369492306:2614], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:38.886026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:38.905869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:01:38.936606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571134369492354:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:38.936681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:38.952859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T14:01:39.020800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571138664459693:2630], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.020877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.025265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:01:39.056952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571138664459749:2640], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.057058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.065371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T14:01:39.102720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571138664459811:2653], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.102778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.111393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T14:01:39.148594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571138664459855:2662], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.148688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.154255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T14:01:39.202976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571138664459894:2671], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.203048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.211657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T14:01:39.240993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571138664459933:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.241132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.256197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T14:01:39.317231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571138664459991:2688], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.317299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.330147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T14:01:39.343313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateTabletsObjectReply for unknown txId 281474976710683 at schemeshard 72057594046644480 2025-04-07T14:01:39.379815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571138664460033:2696], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.379913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } {
: Error: Execution, code: 1060 subissue: {
:15:35: Error: Executing CREATE TABLE subissue: {
: Error: cannot find appropriate group for 9 shards, code: 2003 } } } 2025-04-07T14:01:39.407209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571138664460051:2703], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.407270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.416092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T14:01:39.470657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571138664460118:2712], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.470718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.473970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T14:01:39.506806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571138664460157:2721], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.506857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.514901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T14:01:39.552305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571138664460201:2730], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.552352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:39.569293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::BlockChannelAuto [GOOD] Test command err: Trying to start YDB, gRPC: 31780, MsgBus: 1310 2025-04-07T14:01:23.693270Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571071202635268:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:23.693319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ede/r3tmp/tmpzS8IOB/pdisk_1.dat 2025-04-07T14:01:24.258046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:24.258165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:24.267393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:24.268512Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31780, node 1 2025-04-07T14:01:24.461311Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:24.461347Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:24.461353Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:24.461455Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1310 TClient is connected to server localhost:1310 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:25.260562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:25.279515Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:25.298098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:25.462042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:25.621101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:25.713531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:27.532719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571088382505942:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:27.532829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:27.790977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:01:27.882102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:01:27.978009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:01:28.013854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:01:28.048687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:01:28.077077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:01:28.137794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571092677473736:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:28.137859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:28.138113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571092677473742:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:28.141152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:01:28.149887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571092677473744:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:01:28.690687Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571071202635268:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:28.690750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:29.108256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:01:29.183278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490571096972441430:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:29.183277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571096972441433:2470];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:29.183459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571096972441433:2470];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:29.183750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571096972441433:2470];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:29.183872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571096972441433:2470];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:29.183973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571096972441433:2470];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:29.184025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490571096972441430:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:29.184077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571096972441433:2470];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:29.184189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490571096972441430:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:29.184197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571096972441433:2470];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:29.184266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490571096972441430:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:29.184304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571096972441433:2470];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:29.184328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490571096972441430:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:29.184413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571096972441433:2470];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:29.184426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490571096972441430:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 20 ... 29.224932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:29.224974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:29.225031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:29.225070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:29.225118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:29.225169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:29.225216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:29.225245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:29.225481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:29.225514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:29.225638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:29.225658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:29.225687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490571096972441432:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:29.225726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490571096972441432:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:29.225752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:29.225772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:29.225893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:29.225939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:29.226008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490571096972441432:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:29.226028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:29.226049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:29.226128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490571096972441432:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:29.226224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490571096972441432:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:29.226305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490571096972441432:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:29.226384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490571096972441432:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:29.230966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490571096972441432:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:29.231088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490571096972441432:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:29.231166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490571096972441432:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:29.231233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490571096972441432:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:29.231294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490571096972441432:2469];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:29.241231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:29.241281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:29.241370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:29.241396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:29.241516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:29.241544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:29.241611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:29.241639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:29.241687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:29.241710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:29.241740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:29.241766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:29.241997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:29.242032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:29.242159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:29.242186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:29.242285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:29.242320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:29.242451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:29.242478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:29.242562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:29.242587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdown_DifferentLvlOfFilters [GOOD] Test command err: Trying to start YDB, gRPC: 23947, MsgBus: 27900 2025-04-07T14:01:25.034356Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571073086868534:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:25.034394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ed2/r3tmp/tmpc5rpzQ/pdisk_1.dat 2025-04-07T14:01:25.754341Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:25.774108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:25.774184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:25.782260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23947, node 1 2025-04-07T14:01:25.917170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:25.917192Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:25.917209Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:25.917284Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27900 TClient is connected to server localhost:27900 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:26.638829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:26.665387Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:26.694266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:26.874072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571081676803783:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:26.874322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571081676803783:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:26.874575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571081676803783:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:26.874680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571081676803783:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:26.874777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571081676803783:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:26.874865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571081676803783:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:26.874952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571081676803783:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:26.875081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571081676803783:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:26.875192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571081676803783:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:26.875297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571081676803783:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:26.875378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571081676803783:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:26.875475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571081676803783:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:26.978402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571081676803784:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:26.978455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571081676803784:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:26.978648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571081676803784:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:26.978767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571081676803784:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:26.978862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571081676803784:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:26.978948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571081676803784:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:26.979035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571081676803784:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:26.979165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571081676803784:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:26.979277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571081676803784:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:26.979369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571081676803784:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:26.979474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571081676803784:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:26.979573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571081676803784:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:26.982900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:26.982971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:26.983090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:26.983128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:26.983340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:26.983362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:26.983467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:26.983556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:26.983623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:26.983649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_ ... node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:27.091439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:27.091469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:27.091582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:27.091610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:27.091767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:27.091798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:27.091911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:27.091939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:27.091996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:27.092033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:27.092067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:27.092096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:27.092401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:27.092465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:27.092651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:27.092761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:27.092917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:27.092940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:27.093125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:27.093150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:27.093273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:27.093292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:27.093674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:27.093710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:27.093792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:27.093812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:27.093961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:27.093985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:27.094064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:27.094089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:27.094194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:27.094218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:27.094245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:27.094263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:27.094544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:27.094647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:27.094809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:27.094830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:27.094947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:27.094973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:27.095123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:27.095145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:27.095236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:27.095255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:27.135113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=5800;columns=5; 2025-04-07T14:01:28.907393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571090266738723:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:28.907458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571090266738697:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:28.907723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:28.911772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:28.929205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571090266738726:2395], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:01:30.034884Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571073086868534:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:30.034971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpOlapTiering::EvictionResetTiering [GOOD] >> KqpOlapSysView::StatsSysView >> KqpOlap::ManyColumnShards >> KqpOlapBlobsSharing::TableReshardingModuloN >> KqpOlapAggregations::Aggregation_MinR_GroupL_OrderL >> KqpOlapAggregations::Aggregation >> KqpOlapAggregations::Aggregation_ResultDistinctCountRI_GroupByL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapTiering::EvictionResetTiering [GOOD] Test command err: Trying to start YDB, gRPC: 4022, MsgBus: 13753 2025-04-07T14:00:41.629933Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570891290611704:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:41.645604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fae/r3tmp/tmpPkVsUw/pdisk_1.dat 2025-04-07T14:00:42.344817Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:42.348912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:42.348985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:42.353753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4022, node 1 2025-04-07T14:00:42.581478Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:42.581500Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:42.581507Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:42.581588Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13753 TClient is connected to server localhost:13753 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:43.355900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:45.522732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:45.681112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570908470481456:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:45.681326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570908470481456:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:45.681594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570908470481456:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:45.681721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570908470481456:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:45.681840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570908470481456:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:45.681963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570908470481456:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:45.682094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570908470481456:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:45.682223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570908470481456:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:45.682341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570908470481456:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:45.682446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570908470481456:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:45.682561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570908470481456:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:45.682681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570908470481456:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:45.691261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570908470481457:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:45.691328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570908470481457:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:45.691541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570908470481457:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:45.691665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570908470481457:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:45.691762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570908470481457:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:45.691883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570908470481457:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:45.691992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570908470481457:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:45.692109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570908470481457:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:45.692220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570908470481457:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:45.692326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570908470481457:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:45.692435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570908470481457:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:45.692539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570908470481457:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:45.730566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570908470481455:2302];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:45.730643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570908470481455:2302];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:45.730902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570908470481455:2302];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:45.731022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570908470481455:2302];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:45.731112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570908470481455:2302];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:45.731191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570908470481455:2302];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:45.731300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570908470481455:2302];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:45.731398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570908470481455:2302];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:45.731523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570908470481455 ... ne=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 waiting actualization: 0/0.000020s waiting actualization: 0/1.005364s waiting actualization: 0/2.009346s waiting actualization: 0/3.010427s waiting actualization: 0/4.013331s ==================================== QUERY: SELECT TierName, SUM(ColumnRawBytes) As RawBytes FROM `/Root/olapStore/olapTable/.sys/primary_index_portion_stats` WHERE Activity == 1 GROUP BY TierName RESULT: TierName: __DEFAULT RawBytes: 119239000 2025-04-07T14:01:25.473125Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034479232, txId: 281474976710824] shutting down 2025-04-07T14:01:25.613638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710830:0, at schemeshard: 72057594046644480 waiting actualization: 0/0.000014s waiting actualization: 0/1.000577s waiting actualization: 0/2.000684s waiting actualization: 0/3.000868s waiting actualization: 0/4.001848s ==================================== QUERY: SELECT TierName, SUM(ColumnRawBytes) As RawBytes FROM `/Root/olapStore/olapTable/.sys/primary_index_portion_stats` WHERE Activity == 1 GROUP BY TierName RESULT: 2025-04-07T14:01:37.866642Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034491508, txId: 281474976710847] shutting down TierName: tier1 RawBytes: 119239000 2025-04-07T14:01:37.905267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710853:0, at schemeshard: 72057594046644480 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 ==================================== QUERY: SELECT TierName, SUM(ColumnRawBytes) As RawBytes FROM `/Root/olapStore/olapTable/.sys/primary_index_portion_stats` WHERE Activity == 1 GROUP BY TierName RESULT: TierName: __DEFAULT RawBytes: 119239000 2025-04-07T14:01:49.313338Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034503518, txId: 281474976710872] shutting down FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037891 >> KqpOlapCompression::DisabledAlterCompression >> KqpOlapAggregations::Aggregation_Avg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestAggregation Test command err: Trying to start YDB, gRPC: 30462, MsgBus: 23637 2025-04-07T14:01:24.892620Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571074600662229:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:24.892749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ed5/r3tmp/tmpkBuP07/pdisk_1.dat 2025-04-07T14:01:25.463525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:25.463613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:25.485838Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:25.487590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30462, node 1 2025-04-07T14:01:25.673600Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:25.673624Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:25.673666Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:25.673752Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23637 TClient is connected to server localhost:23637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:26.551202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (timestamp Timestamp64 NOT NULL, interval Interval64, PRIMARY KEY (timestamp)) PARTITION BY HASH(timestamp) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:28.717259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571091780531927:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:28.717354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:28.910421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:28.982225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571091780532005:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:28.982403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571091780532005:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:28.982675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571091780532005:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:28.982798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571091780532005:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:28.982910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571091780532005:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:28.983006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571091780532005:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:28.983116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571091780532005:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:28.983224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571091780532005:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:28.983327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571091780532005:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:28.983430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571091780532005:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:28.983530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571091780532005:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:28.983616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571091780532005:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:28.990214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:28.990273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:28.990367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:28.990399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:28.990562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:28.990587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:28.990682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:28.990734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:28.990807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:28.990833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:28.990870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:28.990893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:28.991332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:28.991369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:28.991562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:28.991593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:28.991707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:28.991726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:28.991840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:28.991863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:28.991941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:28.991968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event= ... ::ExecContext*) const /-S/ydb/library/arrow_clickhouse/AggregateFunctions/AggregateFunctionWrapper.h:85:37 #3 0x19328570 in arrow::compute::CallFunction(std::__y1::basic_string, std::__y1::allocator> const&, std::__y1::vector> const&, arrow::compute::FunctionOptions const*, arrow::compute::ExecContext*) /-S/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec.cc:1041:16 #4 0x217064e7 in NKikimr::NSsa::TInternalFunction::Call(NKikimr::NSsa::TAggregateAssign const&, NKikimr::NSsa::TDatumBatch const&) const /-S/ydb/core/formats/arrow/program.cpp:78:26 #5 0x216eecfe in NKikimr::NSsa::TProgramStep::ApplyAggregates(NKikimr::NSsa::TDatumBatch&, arrow::compute::ExecContext*) const /-S/ydb/core/formats/arrow/program.cpp:759:56 #6 0x216f6cda in NKikimr::NSsa::TProgramStep::Apply(std::__y1::shared_ptr&, arrow::compute::ExecContext*) const /-S/ydb/core/formats/arrow/program.cpp:928:23 #7 0x173d1e49 in NKikimr::NSsa::TProgram::ApplyTo(std::__y1::shared_ptr&, arrow::compute::ExecContext*) const /-S/ydb/core/formats/arrow/program.h:479:37 #8 0x33b522de in NKikimr::NSsa::TProgram::ApplyTo(std::__y1::shared_ptr&, arrow::compute::ExecContext*) const /-S/ydb/core/formats/arrow/program.h:467:27 #9 0x33b438ff in arrow::Status NKikimr::NOlap::TProgramContainer::ApplyProgram(std::__y1::shared_ptr&) const /-S/ydb/core/tx/program/program.h:96:29 #10 0x33b4262f in NKikimr::NOlap::NReader::NPlain::TBaseMergeTask::PrepareResultBatch() /-S/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/merge.cpp:39:85 #11 0x33b47fc1 in NKikimr::NOlap::NReader::NPlain::TStartMergeTask::DoExecuteImpl() /-S/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/merge.cpp:101:13 #12 0x33c68d54 in NKikimr::NOlap::NReader::IDataTasksProcessor::ITask::DoExecute(std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/reader/common/conveyor_task.cpp:8:19 #13 0x33acc802 in NKikimr::NConveyor::ITask::Execute(std::__y1::shared_ptr, std::__y1::shared_ptr const&) /-S/ydb/core/tx/conveyor/usage/abstract.cpp:13:36 #14 0x46014700 in NKikimr::NConveyor::TWorker::ExecuteTask(NKikimr::NConveyor::TWorkerTask const&) /-S/ydb/core/tx/conveyor/service/worker.cpp:10:36 #15 0x4600dd0f in NKikimr::NConveyor::TWorker::StateMain(TAutoPtr&) /-S/ydb/core/tx/conveyor/service/worker.h:93:13 #16 0x1aeb7a46 in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.h:533:23 #17 0x1aeb1818 in NActors::TGenericExecutorThread::Execute(NActors::TMailbox*, bool) /-S/ydb/library/actors/core/executor_thread.cpp:248:28 #18 0x1aebac74 in NActors::TGenericExecutorThread::ProcessExecutorPool(NActors::IExecutorPool*)::$_0::operator()(NActors::TMailbox*, bool) const /-S/ydb/library/actors/core/executor_thread.cpp:425:39 #19 0x1aeb9b7d in NActors::TGenericExecutorThread::ProcessExecutorPool(NActors::IExecutorPool*) /-S/ydb/library/actors/core/executor_thread.cpp:479:13 #20 0x1aebc56a in NActors::TExecutorThread::ThreadProc() /-S/ydb/library/actors/core/executor_thread.cpp:510:9 #21 0x180e8b44 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #22 0x17da03f8 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 #23 0x7f240305eac2 (/lib/x86_64-linux-gnu/libc.so.6+0x94ac2) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #24 0x7f24030f084f (/lib/x86_64-linux-gnu/libc.so.6+0x12684f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) AddressSanitizer can not provide additional info. warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 warning: address range table at offset 0x60 has a premature terminator entry at offset 0x70 warning: address range table at offset 0x90 has a premature terminator entry at offset 0xa0 warning: address range table at offset 0xc0 has a premature terminator entry at offset 0xd0 warning: address range table at offset 0xa20 has a premature terminator entry at offset 0xa30 warning: address range table at offset 0xa50 has a premature terminator entry at offset 0xa60 warning: address range table at offset 0xa80 has a premature terminator entry at offset 0xa90 warning: address range table at offset 0xab0 has a premature terminator entry at offset 0xac0 warning: address range table at offset 0xae0 has a premature terminator entry at offset 0xaf0 warning: address range table at offset 0xb10 has a premature terminator entry at offset 0xb20 warning: address range table at offset 0xb40 has a premature terminator entry at offset 0xb50 warning: address range table at offset 0xb70 has a premature terminator entry at offset 0xb80 warning: address range table at offset 0xba0 has a premature terminator entry at offset 0xbb0 warning: address range table at offset 0xbd0 has a premature terminator entry at offset 0xbe0 warning: address range table at offset 0xc00 has a premature terminator entry at offset 0xc10 warning: address range table at offset 0xc30 has a premature terminator entry at offset 0xc40 warning: address range table at offset 0xc60 has a premature terminator entry at offset 0xc70 warning: address range table at offset 0xc90 has a premature terminator entry at offset 0xca0 warning: address range table at offset 0xcc0 has a premature terminator entry at offset 0xcd0 warning: address range table at offset 0xcf0 has a premature terminator entry at offset 0xd00 warning: address range table at offset 0xd20 has a premature terminator entry at offset 0xd30 warning: address range table at offset 0xd50 has a premature terminator entry at offset 0xd60 warning: address range table at offset 0xd80 has a premature terminator entry at offset 0xd90 warning: address range table at offset 0x1360 has a premature terminator entry at offset 0x1370 warning: address range table at offset 0x1390 has a premature terminator entry at offset 0x13a0 warning: address range table at offset 0x13c0 has a premature terminator entry at offset 0x13d0 warning: address range table at offset 0x1510 has a premature terminator entry at offset 0x1520 warning: address range table at offset 0x1540 has a premature terminator entry at offset 0x1550 warning: address range table at offset 0x1570 has a premature terminator entry at offset 0x1580 warning: address range table at offset 0x15a0 has a premature terminator entry at offset 0x15b0 warning: address range table at offset 0x15d0 has a premature terminator entry at offset 0x15e0 warning: address range table at offset 0x1600 has a premature terminator entry at offset 0x1610 SUMMARY: AddressSanitizer: SEGV /-S/ydb/library/arrow_clickhouse/Aggregator.cpp:104:74 in CH::Aggregator::Aggregator(CH::Aggregator::Params const&) Thread T95 (ydb-core.User) created by T0 here: 2025-04-07T14:01:48.032816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:01:48.032854Z node 2 :IMPORT WARN: Table profiles were not loaded #0 0x17d885d1 in pthread_create /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:250:3 #1 0x180e6f4f in Start /-S/util/system/thread.cpp:229:27 #2 0x180e6f4f in TThread::Start() /-S/util/system/thread.cpp:314:34 #3 0x1ae8d31c in NActors::TBasicExecutorPool::Start() /-S/ydb/library/actors/core/executor_pool_basic.cpp:479:32 #4 0x1ae6a01b in NActors::TCpuManager::Start() /-S/ydb/library/actors/core/cpu_manager.cpp:82:32 #5 0x1ae5f84d in NActors::TActorSystem::Start() /-S/ydb/library/actors/core/actorsystem.cpp:312:21 #6 0x387c66ef in NActors::TTestActorRuntime::Initialize(NActors::TTestActorRuntime::TEgg) /-S/ydb/core/testlib/actors/test_runtime.cpp:218:32 #7 0x392518d7 in NActors::TTestBasicRuntime::Initialize(NActors::TTestActorRuntime::TEgg) /-S/ydb/core/testlib/basics/runtime.cpp:22:28 #8 0x39352484 in NKikimr::SetupBasicServices(NActors::TTestActorRuntime&, NKikimr::TAppPrepare&, bool, NKikimr::NFake::INode*, NKikimr::NFake::TStorage, NKikimrSharedCache::TSharedCacheConfig const*, bool) /-S/ydb/core/testlib/basics/services.cpp:378:17 #9 0x392c13ce in NKikimr::SetupTabletServices(NActors::TTestActorRuntime&, NKikimr::TAppPrepare*, bool, NKikimr::NFake::TStorage, NKikimrSharedCache::TSharedCacheConfig const*, bool) /-S/ydb/core/testlib/tablet_helpers.cpp:649:9 #10 0x3915959a in NKikimr::Tests::TServer::Initialize() /-S/ydb/core/testlib/test_client.cpp:286:9 #11 0x3915777f in NKikimr::Tests::TServer::TServer(TIntrusiveConstPtr>, bool) /-S/ydb/core/testlib/test_client.cpp:181:13 #12 0x3915a13b in NKikimr::Tests::TServer::TServer(NKikimr::Tests::TServerSettings const&, bool) /-S/ydb/core/testlib/test_client.cpp:186:11 #13 0x463a5ddb in MakeHolder /-S/util/generic/ptr.h:370:27 #14 0x463a5ddb in NKikimr::NKqp::TKikimrRunner::TKikimrRunner(NKikimr::NKqp::TKikimrSettings const&) /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:156:18 #15 0x4651e94d in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:686:30 #16 0x4651e94d in NKikimr::NKqp::TTestHelper::TTestHelper(NKikimr::NKqp::TKikimrSettings const&) /-S/ydb/core/kqp/ut/common/columnshard.cpp:46:18 #17 0x17ce4ba3 in TDecimalTestCase /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:30:15 #18 0x17ce4ba3 in NKikimr::NKqp::NTestSuiteKqpDecimalColumnShard::TTestCaseTestAggregation::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:224:26 #19 0x17cf05a7 in operator() /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26:1 #20 0x17cf05a7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344:25 #21 0x17cf05a7 in __call<(lambda at /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419:5 #22 0x17cf05a7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195:16 #23 0x17cf05a7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366:12 #24 0x185a0428 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519:16 #25 0x185a0428 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170:12 #26 0x185a0428 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #27 0x18568098 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #28 0x17cef773 in NKikimr::NKqp::NTestSuiteKqpDecimalColumnShard::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26:1 #29 0x18569965 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #30 0x1859a06c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #31 0x7f2402ff3d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) ==295148==ABORTING |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |90.0%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |90.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |90.0%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut >> KqpDecimalColumnShard::TestFilterCompare [FAIL] >> KqpOlap::ExtractRanges [GOOD] >> KqpOlapCompression::OffCompression [GOOD] >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling-AllowSpilling [GOOD] >> KqpOlap::SimpleQueryOlapDiagnostics [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ExtractRanges [GOOD] Test command err: Trying to start YDB, gRPC: 22806, MsgBus: 13047 2025-04-07T14:01:47.378086Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571171172540267:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:47.378536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ea8/r3tmp/tmpuLKuhE/pdisk_1.dat 2025-04-07T14:01:47.986514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:47.986587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:47.992044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:48.049436Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22806, node 1 2025-04-07T14:01:48.162543Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:01:48.172500Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:01:48.208377Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:48.208395Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:48.208401Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:48.208485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13047 TClient is connected to server localhost:13047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:48.761371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:48.775117Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:48.783862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:48.899181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571175467508251:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:48.899403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571175467508251:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:48.899662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571175467508251:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:48.899784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571175467508251:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:48.899896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571175467508251:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:48.899993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571175467508251:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:48.900102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571175467508251:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:48.900243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571175467508251:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:48.900362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571175467508251:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:48.900481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571175467508251:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:48.900597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571175467508251:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:48.901136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571175467508251:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:48.938092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571175467508252:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:48.938146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571175467508252:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:48.938336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571175467508252:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:48.938454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571175467508252:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:48.938561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571175467508252:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:48.938644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571175467508252:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:48.938750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571175467508252:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:48.938849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571175467508252:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:48.939019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571175467508252:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:48.939123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571175467508252:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:48.939206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571175467508252:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:48.939302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571175467508252:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:48.970110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571175467508253:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:48.970166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571175467508253:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:48.970390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571175467508253:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:48.970504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571175467508253:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:48.970594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571175467508253:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:48.970712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571175467508253:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:48.970803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571175467508253:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:48.970906Z node 1 :TX_COLUMNSHARD WARN: ... a::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:49.011821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:49.011887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:49.011906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:49.011950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:49.011966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:49.011993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:49.012021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:49.012337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:49.012408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:49.012562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:49.012587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.013390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:49.013421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:49.013570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:49.013594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:49.013677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:49.013694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:49.014008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:49.014034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:49.014111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:49.014129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:49.014254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:49.014276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:49.014346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:49.014369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:49.014473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:49.014505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:49.014538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:49.014555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:49.014780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:49.014817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:49.014958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:49.014993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.015093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:49.015110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:49.015291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:49.016063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:49.016181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:49.016199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:49.082826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; ==================================== QUERY: SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE (`timestamp` < CAST(1000100 AS Timestamp) AND `timestamp` > CAST(1000095 AS Timestamp)) OR (`timestamp` <= CAST(1001000 AS Timestamp) AND `timestamp` >= CAST(1000999 AS Timestamp)) OR (`timestamp` > CAST(1002000 AS Timestamp)) ORDER BY `timestamp` LIMIT 1000; RESULT: 2025-04-07T14:01:51.088890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571188352410463:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.089001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.092944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571188352410490:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.097214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:51.109498Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T14:01:51.112853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571188352410492:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:01:52.188435Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034511156, txId: 18446744073709551615] shutting down timestamp: 1970-01-01T00:00:01.000096Z timestamp: 1970-01-01T00:00:01.000097Z timestamp: 1970-01-01T00:00:01.000098Z timestamp: 1970-01-01T00:00:01.000099Z timestamp: 1970-01-01T00:00:01.000999Z timestamp: 1970-01-01T00:00:01.001000Z 2025-04-07T14:01:52.380805Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571171172540267:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:52.380890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapCompression::OffCompression [GOOD] Test command err: Trying to start YDB, gRPC: 7205, MsgBus: 31438 2025-04-07T14:01:48.234113Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571175545264844:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:48.234252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e9d/r3tmp/tmpyzITgi/pdisk_1.dat 2025-04-07T14:01:48.709700Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7205, node 1 2025-04-07T14:01:48.738353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:48.738436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:48.763962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:48.872884Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:48.872903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:48.872920Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:48.873008Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31438 TClient is connected to server localhost:31438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:49.506143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:49.549573Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/StandaloneTable` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) PARTITION BY HASH(pk_int) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:51.777923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571188430167241:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.778056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.145792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:52.212428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571192725134614:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:52.212896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571192725134614:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:52.213129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571192725134614:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:52.213251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571192725134614:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:52.213340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571192725134614:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:52.213432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571192725134614:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:52.213523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571192725134614:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:52.213634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571192725134614:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:52.213811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571192725134614:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:52.213976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571192725134614:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:52.214082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571192725134614:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:52.214176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571192725134614:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:52.218026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:52.218074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:52.218153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:52.218176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:52.218315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:52.218365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:52.218452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:52.218477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:52.218528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:52.218551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:52.218584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:52.218618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:52.219053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:52.219079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:52.219227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:52.219246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:52.219359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:52.219379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:52.219509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:52.219528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:52.219665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:52.219693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:52.296917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571192725134738:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.296995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.302574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 CREATE TABLESTORE `/Root/TableStoreTest` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:52.329069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571192725134765:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.329135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.338701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:01:52.378815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571192725134791:2375];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:52.378877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571192725134791:2375];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:52.379095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571192725134791:2375];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:52.379198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571192725134791:2375];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:52.379287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571192725134791:2375];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:52.379372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571192725134791:2375];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:52.379451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571192725134791:2375];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:52.379539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571192725134791:2375];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:52.379629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571192725134791:2375];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:52.379721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571192725134791:2375];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:52.379838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571192725134791:2375];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:52.379952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571192725134791:2375];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:52.383786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:52.383838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:52.383911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:52.383932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:52.384094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:52.384118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:52.384187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:52.384207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:52.384262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:52.384281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:52.384309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:52.384328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:52.384572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:52.384607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:52.384757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:52.384780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:52.384874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:52.384895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:52.385043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:52.385082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:52.385167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:52.385186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:52.431803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571192725134830:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.431862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.436839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710661:0, at schemeshard: 72057594046644480 >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced >> KqpYql::UuidPrimaryKeyDisabled |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |90.0%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |90.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling-AllowSpilling [GOOD] Test command err: Trying to start YDB, gRPC: 26367, MsgBus: 20930 2025-04-07T14:01:48.426261Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571177055186557:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:48.609799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e90/r3tmp/tmpfU8r0l/pdisk_1.dat 2025-04-07T14:01:48.902965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:48.903056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:48.905790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:48.925717Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26367, node 1 2025-04-07T14:01:49.022568Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:49.022593Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:49.022606Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:49.022709Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20930 TClient is connected to server localhost:20930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:49.571179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:49.585786Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:49.602660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:49.716063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571181350154376:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:49.716229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571181350154376:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:49.716496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571181350154376:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:49.716624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571181350154376:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:49.716946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571181350154376:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:49.717097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571181350154376:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:49.717203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571181350154376:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:49.717334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571181350154376:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:49.717435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571181350154376:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:49.717543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571181350154376:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.717646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571181350154376:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:49.717780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571181350154376:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:49.759467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571181350154382:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:49.759528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571181350154382:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:49.759747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571181350154382:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:49.759862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571181350154382:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:49.759982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571181350154382:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:49.760105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571181350154382:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:49.760227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571181350154382:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:49.760344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571181350154382:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:49.760469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571181350154382:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:49.760568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571181350154382:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.760713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571181350154382:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:49.760822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571181350154382:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:49.793889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571181350154375:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:49.793947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571181350154375:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:49.794164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571181350154375:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:49.794280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571181350154375:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:49.794376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571181350154375:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:49.794486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571181350154375:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:49.794583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571181350154375:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:49.794733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571181350154375:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:49.839973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:49.840066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:49.840087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:49.840247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:49.840272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:49.840397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:49.840440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:49.840499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:49.840524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:49.840576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:49.840602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:49.840919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:49.840954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:49.841112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:49.841134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.841304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:49.841328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:49.841485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:49.841508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:49.841594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:49.841613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:49.842170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:49.842199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:49.842269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:49.842289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:49.842450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:49.842494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:49.842676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:49.842706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:49.842760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:49.842795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:49.842833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:49.842853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:49.844797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:49.844858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:49.845087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:49.845129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.845705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:49.845747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:49.845920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:49.845952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:49.846065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:49.846084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:49.904142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2025-04-07T14:01:52.017173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571194235056708:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.017286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.020802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571194235056728:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.026755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:52.040962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571194235056730:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } >> KqpOlapAggregations::CountAllNoPushdown [GOOD] >> KqpOlapAggregations::Aggregation_ResultCountAll_FilterL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleQueryOlapDiagnostics [GOOD] Test command err: Trying to start YDB, gRPC: 62203, MsgBus: 28989 2025-04-07T14:01:48.595079Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571178439673769:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:48.595348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e75/r3tmp/tmpGjPvhH/pdisk_1.dat 2025-04-07T14:01:49.060658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:49.060831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:49.061176Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:49.063102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62203, node 1 2025-04-07T14:01:49.217213Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:49.217237Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:49.217258Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:49.217360Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28989 TClient is connected to server localhost:28989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:49.967494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:49.984822Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:50.004937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:50.109356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187029608881:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:50.109626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187029608881:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:50.109912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187029608881:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:50.110014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187029608881:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:50.110158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187029608881:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:50.110279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187029608881:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:50.110374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187029608881:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:50.110509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187029608881:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:50.110650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187029608881:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:50.110751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187029608881:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:50.110899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187029608881:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:50.111020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187029608881:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:50.148599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187029608882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:50.148792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187029608882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:50.149065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187029608882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:50.149197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187029608882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:50.149288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187029608882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:50.149400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187029608882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:50.149519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187029608882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:50.149648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187029608882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:50.149745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187029608882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:50.149866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187029608882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:50.149969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187029608882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:50.150087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187029608882:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:50.185399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187029608883:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:50.185495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187029608883:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:50.185742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187029608883:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:50.185880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187029608883:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:50.185974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187029608883:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:50.186090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187029608883:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:50.186200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187029608883:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:50.186289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187029608883:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... ;id=16; 2025-04-07T14:01:50.234156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:50.234551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:50.234576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:50.234676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:50.234696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:50.234859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:50.234883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:50.234966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:50.234993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:50.235038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:50.235065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:50.235120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:50.235160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:50.235435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:50.235495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:50.235657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:50.235687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:50.235830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:50.235852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:50.235996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:50.236015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:50.236114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:50.236133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:50.236786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:50.236815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:50.236903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:50.236923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:50.237118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:50.237145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:50.237270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:50.237357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:50.237403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:50.237423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:50.237468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:50.237491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:50.237744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:50.240237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:50.240457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:50.240485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:50.240629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:50.240650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:50.240870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:50.240905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:50.241033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:50.241056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:50.295336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2025-04-07T14:01:51.928803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571191324576469:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.928864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571191324576497:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.928955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.932966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:51.944231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571191324576507:2387], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:01:52.946474Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034512000, txId: 18446744073709551615] shutting down 2025-04-07T14:01:53.409990Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034513004, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestFilterCompare [FAIL] Test command err: Trying to start YDB, gRPC: 13554, MsgBus: 22318 2025-04-07T14:01:26.355145Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571080806648775:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:26.355370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ec3/r3tmp/tmpMr8TtE/pdisk_1.dat 2025-04-07T14:01:26.979820Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:26.985605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:26.985684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:26.988575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13554, node 1 2025-04-07T14:01:27.215648Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:27.215672Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:27.215695Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:27.215781Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22318 TClient is connected to server localhost:22318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:27.967638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:27.993182Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:29.942695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571093691551165:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:29.942842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:30.184389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:30.237314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571097986518538:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:30.237578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571097986518538:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:30.237884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571097986518538:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:30.238014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571097986518538:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:30.238113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571097986518538:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:30.238208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571097986518538:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:30.238324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571097986518538:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:30.238429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571097986518538:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:30.238527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571097986518538:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:30.238587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571097986518538:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:30.238652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571097986518538:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:30.238706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571097986518538:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:30.244502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:30.244579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:30.244725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:30.244765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:30.245119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:30.245148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:30.245259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:30.245308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:30.245383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:30.245409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:30.245448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:30.245474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:30.245997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:30.246076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:30.246267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:30.246311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:30.246482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:30.246512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:30.246694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:30.246716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:30.246820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14 ... abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:46.675422Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[4:7490571169179591960:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:46.675519Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[4:7490571169179591960:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:46.675616Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[4:7490571169179591960:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:46.689243Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:46.689301Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:46.689404Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:46.689436Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:46.689616Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:46.689645Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:46.689734Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:46.689788Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:46.689869Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:46.689898Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:46.689940Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:46.689970Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:46.690336Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:46.690374Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:46.690564Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:46.690592Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:46.690722Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:46.690748Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:46.690921Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:46.690946Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:46.691045Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:46.691076Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; 2025-04-07T14:01:46.779822Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571168961067467:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:46.779931Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:46.780168Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571168961067472:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:46.787762Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:46.819899Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571168961067474:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:01:47.275936Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490571168961067535:2358], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp:331 ConvertValueToColumn(): requirement false failed, message: Unsupported content: Decimal, code: 1 2025-04-07T14:01:47.277045Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzFjYmY2NmItMjA2NzBiYjUtNjQwNDI2ZjItMzQ1NWJlOTQ=, ActorId: [3:7490571168961067465:2349], ActorState: ExecuteState, TraceId: 01jr88xa0qa2w4bv3e9tv4tjx0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-04-07T14:01:47.894763Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490571151999722094:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:47.894831Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.cpp:806, TString NKikimr::NKqp::StreamResultToYsonImpl(TIterator &, TVector *, bool, const NYdb::EStatus &, const TString &) [TIterator = NYdb::NTable::TScanQueryPartIterator]: (streamPart.EOS())
: Fatal: Execution, code: 1060
: Fatal: ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp:331 ConvertValueToColumn(): requirement false failed, message: Unsupported content: Decimal, code: 1 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:806: StreamResultToYsonImpl @ 0x463C0D75 2. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:829: ?? @ 0x463BFD07 3. /-S/ydb/core/kqp/ut/common/columnshard.cpp:107: ReadData @ 0x4652AEF5 4. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:44: CheckQuery @ 0x17CE1BBE 5. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:166: operator() @ 0x17CE1BBE 6. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:179: Execute_ @ 0x17CE16E4 7. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26: operator() @ 0x17CF05A7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26:1) &> @ 0x17CF05A7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26:1) &> @ 0x17CF05A7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17CF05A7 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17CF05A7 12. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 13. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 14. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 15. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 16. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26: Execute @ 0x17CEF773 17. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 18. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 19. ??:0: ?? @ 0x7FA30AC02D8F 20. ??:0: ?? @ 0x7FA30AC02E3F 21. ??:0: ?? @ 0x15851028 >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::CountAllNoPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 22519, MsgBus: 12696 2025-04-07T14:01:48.696097Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571177742420688:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:48.696136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000dde/r3tmp/tmpUBkwBz/pdisk_1.dat 2025-04-07T14:01:49.241650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:49.241782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:49.243693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22519, node 1 2025-04-07T14:01:49.339382Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:49.428916Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:49.428936Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:49.428943Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:49.429063Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12696 TClient is connected to server localhost:12696 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:50.124676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:50.144256Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:50.154319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:50.248124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571186332355965:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:50.248357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571186332355965:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:50.248611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571186332355965:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:50.249438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571186332355965:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:50.249573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571186332355965:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:50.249684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571186332355965:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:50.249792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571186332355965:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:50.249905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571186332355965:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:50.250010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571186332355965:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:50.250089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571186332355965:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:50.250191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571186332355965:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:50.250307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571186332355965:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:50.284934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571186332355997:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:50.285005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571186332355997:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:50.285364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571186332355997:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:50.285517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571186332355997:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:50.285615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571186332355997:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:50.285706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571186332355997:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:50.285793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571186332355997:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:50.285892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571186332355997:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:50.286004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571186332355997:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:50.286114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571186332355997:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:50.286198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571186332355997:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:50.286296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571186332355997:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:50.320333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571186332355963:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:50.320396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571186332355963:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:50.320576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571186332355963:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:50.320848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571186332355963:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:50.321003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571186332355963:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:50.321137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571186332355963:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:50.321252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571186332355963:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:50.321361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571186332355963:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... RN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:50.368837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:50.368932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:50.368953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:50.369013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:50.369055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:50.369087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:50.369112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:50.369368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:50.369420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:50.369591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:50.369627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:50.369737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:50.369759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:50.369910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:50.369930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:50.370047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:50.370072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:50.378702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:50.378749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:50.378841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:50.378864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:50.379037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:50.379062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:50.379149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:50.379179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:50.379257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:50.379294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:50.379327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:50.379346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:50.379602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:50.379651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:50.379817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:50.379838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:50.379935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:50.379967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:50.380148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:50.380177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:50.380265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:50.380284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:50.436925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2025-04-07T14:01:52.938000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571194922291007:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.938060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.938411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571194922291019:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.944535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:52.955191Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T14:01:52.956867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571194922291021:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:01:53.713164Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571177742420688:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:53.713215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:54.235951Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034513000, txId: 18446744073709551615] shutting down >> KqpOlapAggregations::Aggregation_NoPushdownOnDisabledEmitAggApply [GOOD] >> KqpOlapCompression::DisabledAlterCompression [GOOD] >> KqpPragma::OrderedColumns >> KqpYql::InsertIgnore |90.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultCountAll_FilterL [GOOD] Test command err: Trying to start YDB, gRPC: 12297, MsgBus: 63692 2025-04-07T14:01:26.181586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571080779338732:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:26.181862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ec9/r3tmp/tmp93TKhe/pdisk_1.dat 2025-04-07T14:01:26.703529Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:26.728294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:26.728575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:26.733242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12297, node 1 2025-04-07T14:01:26.840548Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:26.840566Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:26.840576Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:26.840691Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63692 TClient is connected to server localhost:63692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:27.443126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:27.455133Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:27.471059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:27.581742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085074306560:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:27.581944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085074306560:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:27.582180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085074306560:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:27.582276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085074306560:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:27.582365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085074306560:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:27.582439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085074306560:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:27.582553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085074306560:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:27.582662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085074306560:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:27.582769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085074306560:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:27.582873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085074306560:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:27.582957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085074306560:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:27.583033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571085074306560:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:27.623498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085074306561:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:27.623751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085074306561:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:27.623957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085074306561:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:27.624102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085074306561:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:27.624216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085074306561:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:27.624316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085074306561:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:27.624405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085074306561:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:27.624515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085074306561:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:27.624618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085074306561:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:27.624748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085074306561:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:27.624987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085074306561:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:27.625084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571085074306561:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:27.654981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571085074306568:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:27.655033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571085074306568:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:27.655274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571085074306568:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:27.655378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571085074306568:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:27.655486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571085074306568:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:27.655593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571085074306568:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:27.655676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571085074306568:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:27.655786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571085074306568:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:52.437177Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:52.615574Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:52.615642Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:52.717239Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:52.717319Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:52.817473Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:52.817534Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:52.911790Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:52.911847Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:53.010233Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1214:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:53.025007Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1214:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:01:53.080644Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:53.080759Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:53.185039Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:53.185109Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:53.289146Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:53.289210Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:53.384823Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:53.384909Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:53.483273Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:53.483339Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:53.539631Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1214:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:53.671242Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:53.671325Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:53.794736Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:53.794797Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:53.889503Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:53.889549Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:53.998104Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:53.998168Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:54.097273Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:01:54.097356Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1880:3010], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88x6p8f5a0qrbeetzhkhts. SessionId : ydb://session/3?node_id=2&id=NTA5NjE5ODktNTlhMjYwYWItMjE4ZDI1MTMtM2EzODIwNjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:01:54.173103Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1214:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:54.197228Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1214:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility+EnableLlvm [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2025-04-07T14:00:23.594188Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:23.594272Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:23.625625Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:23.646793Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-07T14:00:23.647967Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-04-07T14:00:23.650627Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:00:23.653098Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-07T14:00:23.654945Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:23.677943Z node 1 :PERSQUEUE INFO: new Cookie default|bba94c56-8a1b4c7d-1ee14944-a48efb39_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:23.695000Z node 1 :PERSQUEUE INFO: new Cookie default|5246a729-599cfb75-1c178128-7595f0e9_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:23.722404Z node 1 :PERSQUEUE INFO: new Cookie default|c463fc6d-8dd7a9e4-e97331e4-7997910e_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:23.731303Z node 1 :PERSQUEUE INFO: new Cookie default|2ca0523c-20985f5b-1c3bf3f4-ac0261cb_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:23.747609Z node 1 :PERSQUEUE INFO: new Cookie default|303eed35-4ce6507a-d02a36d4-d7483eb8_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:23.755772Z node 1 :PERSQUEUE INFO: new Cookie default|73d6b5f5-71190dcb-16ca999-902bd57b_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2025-04-07T14:00:24.369558Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:24.369655Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:175:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:180:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:181:2057] recipient: [2:179:2191] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:182:2192] sender: [2:183:2057] recipient: [2:179:2191] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_GET_THROTTLING Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:24.477003Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:24.477091Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:182:2192] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_GET_THROTTLING Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:182:2192] sender: [2:259:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:00:26.174648Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:00:26.175773Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-07T14:00:26.185284 ... PQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_GET_THROTTLING Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [47:290:2283] sender: [47:390:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:101:2057] recipient: [48:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:101:2057] recipient: [48:99:2133] Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:106:2057] recipient: [48:99:2133] 2025-04-07T14:01:55.079838Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:01:55.079903Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:147:2057] recipient: [48:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:147:2057] recipient: [48:145:2168] Leader for TabletID 72057594037927938 is [48:151:2172] sender: [48:152:2057] recipient: [48:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:177:2057] recipient: [48:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:55.126744Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:01:55.127771Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 48 actor [48:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 48 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 Important: false } 2025-04-07T14:01:55.132869Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [48:184:2197] 2025-04-07T14:01:55.135802Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:01:55.141471Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [48:185:2198] 2025-04-07T14:01:55.143650Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:55.155884Z node 48 :PERSQUEUE INFO: new Cookie default|95089f9e-2b04552c-19c12a09-cdcd042f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:55.162466Z node 48 :PERSQUEUE INFO: new Cookie default|e04fcf27-5263f88b-8b805970-eb6daf42_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:55.199455Z node 48 :PERSQUEUE INFO: new Cookie default|6a23fb42-5d500d5c-3406d9ea-603581d0_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:55.210414Z node 48 :PERSQUEUE INFO: new Cookie default|8c47b1e0-f334ce76-70c25c7b-d6e50806_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:55.221065Z node 48 :PERSQUEUE INFO: new Cookie default|ba35dfb-64a57c4b-4e28a2b4-644bd747_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:55.231870Z node 48 :PERSQUEUE INFO: new Cookie default|6fe5d06b-6834da7f-b7d330fe-2b1af6fe_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:101:2057] recipient: [49:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:101:2057] recipient: [49:99:2133] Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:106:2057] recipient: [49:99:2133] 2025-04-07T14:01:55.943086Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:01:55.943163Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:147:2057] recipient: [49:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:147:2057] recipient: [49:145:2168] Leader for TabletID 72057594037927938 is [49:151:2172] sender: [49:152:2057] recipient: [49:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:175:2057] recipient: [49:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:56.010275Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:01:56.011242Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 49 actor [49:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 49 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 Important: false } 2025-04-07T14:01:56.012245Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [49:182:2195] 2025-04-07T14:01:56.015304Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:01:56.024073Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [49:183:2196] 2025-04-07T14:01:56.026601Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:183:2196] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:56.039177Z node 49 :PERSQUEUE INFO: new Cookie default|22912e6-273e588-eb50913d-65cf7799_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:56.046432Z node 49 :PERSQUEUE INFO: new Cookie default|4ecb84f0-ad55a724-3f588251-dde1c7b8_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:56.093102Z node 49 :PERSQUEUE INFO: new Cookie default|8e5c943d-1d69b341-ff0e21cf-bbc68122_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:56.103952Z node 49 :PERSQUEUE INFO: new Cookie default|974fcd70-767ed93f-93d9c0ef-3244cde7_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:56.115254Z node 49 :PERSQUEUE INFO: new Cookie default|a9318e24-cb0d3780-f6ee0567-958c0d75_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:01:56.126683Z node 49 :PERSQUEUE INFO: new Cookie default|5d70ca70-9e08dabb-6927c75f-78c822b6_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapCompression::DisabledAlterCompression [GOOD] Test command err: Trying to start YDB, gRPC: 23269, MsgBus: 27490 2025-04-07T14:01:52.220490Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571193690589438:2109];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:52.223688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d25/r3tmp/tmp51JlgK/pdisk_1.dat 2025-04-07T14:01:52.611669Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:52.621625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:52.621724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:52.623746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23269, node 1 2025-04-07T14:01:52.769312Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:52.769332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:52.769350Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:52.769476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27490 TClient is connected to server localhost:27490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:53.385806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:53.407327Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/StandaloneTable` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) PARTITION BY HASH(pk_int) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:55.522825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571206575491899:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:55.522925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:55.826082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:55.916462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571206575491975:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:55.916625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571206575491975:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:55.916969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571206575491975:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:55.917100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571206575491975:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:55.917194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571206575491975:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:55.917295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571206575491975:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:55.917397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571206575491975:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:55.917500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571206575491975:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:55.917604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571206575491975:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:55.917732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571206575491975:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:55.917835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571206575491975:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:55.917925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571206575491975:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:55.923813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:55.923878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:55.923977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:55.924006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:55.924148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:55.924171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:55.924255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:55.924280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:55.924332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:55.924355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:55.924387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:55.924409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:55.924846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:55.924878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:55.925068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:55.925106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:55.925253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:55.925280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:55.925432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:55.925455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:55.925557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:55.925582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:56.009960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571206575492097:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:56.010051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } CREATE TABLESTORE `/Root/TableStoreTest` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:56.032845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571210870459404:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:56.032940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:56.042350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:01:56.090119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571210870459429:2374];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:56.090190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571210870459429:2374];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:56.090409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571210870459429:2374];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:56.090539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571210870459429:2374];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:56.090631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571210870459429:2374];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:56.090727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571210870459429:2374];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:56.090841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571210870459429:2374];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:56.090955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571210870459429:2374];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:56.091053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571210870459429:2374];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:56.091146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571210870459429:2374];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:56.091235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571210870459429:2374];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:56.091323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571210870459429:2374];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:56.095420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:56.095469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:56.095540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:56.095562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:56.095682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:56.095705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:56.095772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:56.095795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:56.095838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:56.095858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:56.095888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:56.095908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:56.096251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:56.096301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:56.096447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:56.096472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:56.096583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:56.096605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:56.096779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:56.096803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:56.096897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:56.096917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:56.136321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571210870459468:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:56.136404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } CREATE TABLE `/Root/TableStoreTest/ColumnTableTest` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) PARTITION BY HASH(pk_int) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:01:56.165075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571210870459486:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:56.165157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:56.174985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:01:56.259319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571210870459529:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:56.259386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_NoPushdownOnDisabledEmitAggApply [GOOD] Test command err: Trying to start YDB, gRPC: 9007, MsgBus: 61372 2025-04-07T14:01:06.510900Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570995577021945:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:06.515526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f57/r3tmp/tmpN6k0k0/pdisk_1.dat 2025-04-07T14:01:06.933917Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:06.934949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:06.934990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:06.938130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9007, node 1 2025-04-07T14:01:07.047315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:07.047336Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:07.047353Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:07.047567Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61372 TClient is connected to server localhost:61372 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:07.627653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:07.643168Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:07.659335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:07.812523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999871989781:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:07.813759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999871989781:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:07.814039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999871989781:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:07.814216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999871989781:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:07.814331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999871989781:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:07.814426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999871989781:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:07.814504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999871989781:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:07.814625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999871989781:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:07.814747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999871989781:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:07.814876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999871989781:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:07.814978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999871989781:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:07.815051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570999871989781:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:07.859292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570999871989783:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:07.859347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570999871989783:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:07.859547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570999871989783:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:07.859655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570999871989783:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:07.859746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570999871989783:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:07.859832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570999871989783:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:07.859927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570999871989783:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:07.860017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570999871989783:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:07.860136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570999871989783:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:07.860244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570999871989783:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:07.860347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570999871989783:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:07.860449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570999871989783:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:07.896976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570999871989784:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:07.897039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570999871989784:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:07.897254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570999871989784:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:07.897365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570999871989784:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:07.897477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570999871989784:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:07.897577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570999871989784:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:07.897676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570999871989784:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:07.897785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570999871989784:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:53.597178Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:53.777116Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:53.777201Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:53.869112Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:53.869195Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:53.965123Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:53.965200Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:54.065098Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:54.065181Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:54.162368Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:54.173940Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1216:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:01:54.233108Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:54.233187Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:54.327625Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:54.327700Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:54.425103Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:54.425181Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:54.521088Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:54.521162Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:54.617109Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:54.617184Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:54.672186Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:54.808318Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:54.808406Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:54.929173Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:54.929244Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:55.025157Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:55.025241Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:55.123489Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:55.123570Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:55.216527Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:55.216616Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88wvzacgzx3m5x7fgtb1kd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmQxOGQ0MWEtOWM4ODg5MWQtZWJkMjYzNzktOGY2MTczNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:55.286452Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:55.312536Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1216:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility+EnableLlvm [GOOD] Test command err: Trying to start YDB, gRPC: 21529, MsgBus: 2770 2025-04-07T14:01:48.568347Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571179042259881:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:48.568397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000dda/r3tmp/tmpeuUma1/pdisk_1.dat 2025-04-07T14:01:49.067285Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:49.072514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:49.072610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:49.078089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21529, node 1 2025-04-07T14:01:49.281046Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:49.281066Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:49.281074Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:49.281162Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2770 TClient is connected to server localhost:2770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:49.948298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:49.964680Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:49.987016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:50.124811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187632195060:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:50.125046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187632195060:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:50.125353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187632195060:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:50.125481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187632195060:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:50.125596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187632195060:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:50.125712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187632195060:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:50.125844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187632195060:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:50.125950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187632195060:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:50.126072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187632195060:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:50.126181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187632195060:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:50.126312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187632195060:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:50.126449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187632195060:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:50.178618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187632195070:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:50.178852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187632195070:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:50.179078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187632195070:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:50.179209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187632195070:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:50.179300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187632195070:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:50.179397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187632195070:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:50.179495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187632195070:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:50.179577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187632195070:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:50.179653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187632195070:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:50.179851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187632195070:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:50.179952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187632195070:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:50.180027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571187632195070:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:50.211889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187632195071:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:50.211952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187632195071:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:50.212126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187632195071:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:50.212219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187632195071:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:50.212311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187632195071:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:50.212402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187632195071:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:50.212520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187632195071:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:50.212609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187632195071:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... NAME=Granules;id=1; 2025-04-07T14:01:50.274914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:50.274998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:50.275019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:50.275138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:50.275158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:50.275246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:50.275276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:50.275367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:50.275386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:50.275413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:50.275431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:50.275662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:50.275776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:50.275933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:50.275956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:50.276045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:50.276073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:50.276229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:50.276250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:50.276346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:50.276365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:50.347619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2025-04-07T14:01:52.880701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571196222130117:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.880811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.881065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571196222130129:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.885525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:52.894831Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T14:01:52.895558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571196222130131:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:01:53.615995Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571179042259881:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:53.616240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:56.085945Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034513000, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '475) '('"_id" '"b3dc19d1-8144c41b-44d8e0a2-6f079ea2") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '() '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 '"count" '"*")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '994) '('"_id" '"c1992172-2cd63c47-b1e16b1d-6c793cf5")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1097) '('"_id" '"2d334c01-8aa4f48a-4d177167-35dfe5e") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_Count_NullGroupBy [GOOD] |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpOlap::MetadataMemoryManager >> KqpOlapAggregations::Json_Exists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_NullGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 7573, MsgBus: 23520 2025-04-07T14:01:48.535091Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571179182368060:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:48.535124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e1a/r3tmp/tmpkuqRrc/pdisk_1.dat 2025-04-07T14:01:49.083570Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:49.126425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:49.126513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:49.129273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7573, node 1 2025-04-07T14:01:49.278009Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:49.278033Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:49.278040Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:49.278130Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23520 TClient is connected to server localhost:23520 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:50.015002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:50.048974Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:50.067866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:50.161003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187772303336:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:50.161182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187772303336:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:50.161412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187772303336:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:50.161539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187772303336:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:50.161655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187772303336:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:50.161769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187772303336:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:50.161859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187772303336:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:50.162054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187772303336:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:50.162207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187772303336:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:50.162291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187772303336:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:50.162409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187772303336:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:50.162542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571187772303336:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:50.208046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187772303337:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:50.208111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187772303337:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:50.208301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187772303337:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:50.208405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187772303337:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:50.208511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187772303337:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:50.208620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187772303337:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:50.208745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187772303337:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:50.208850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187772303337:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:50.208944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187772303337:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:50.209042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187772303337:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:50.209147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187772303337:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:50.209233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571187772303337:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:50.242669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187772303338:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:50.242744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187772303338:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:50.242961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187772303338:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:50.243064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187772303338:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:50.243140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187772303338:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:50.243254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187772303338:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:50.243348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187772303338:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:50.243444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571187772303338:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... -07T14:01:50.293505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:50.293589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:50.293608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:50.293749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:50.293776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:50.293942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:50.293984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:50.294027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:50.294049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:50.294091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:50.294122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:50.294342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:50.294438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:50.294631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:50.294672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:50.294807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:50.294834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:50.295016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:50.295042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:50.295156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:50.295185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2025-04-07T14:01:52.301199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571196362238198:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.301367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.301613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571196362238234:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:52.307363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:52.319211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571196362238236:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:01:57.014610Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571179182368060:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:57.263794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:57.557714Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034513000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '877) '('"_id" '"804ef66c-a8fef73b-f5c8debc-4716a324") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '1)) (let $25 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"6")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"7")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Uint64)) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '936) '('"_id" '"301323ab-c3e385af-e894be76-a18a40a5") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'count '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1359) '('"_id" '"8acff5d9-3a5f0fc1-77342ded-43700d8") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1371) '('"_id" '"eb2ce025-582e9b84-3b3db702-5f33db20")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpYql::UuidPrimaryKeyDisabled [GOOD] >> KqpOlapIndexes::SchemeActualizationOnceOnStart >> KqpOlap::PredicateDoNotPushdown >> KqpOlap::SimpleRangeOlap |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpScripting::UnsafeTimestampCast >> KqpOlapAggregations::Aggregation_ResultCountExpr [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 13099, MsgBus: 14039 2025-04-07T14:01:54.978511Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571201467680225:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:54.978783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e4e/r3tmp/tmpDrD10p/pdisk_1.dat 2025-04-07T14:01:55.624142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:55.624234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:55.638303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:55.664952Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13099, node 1 2025-04-07T14:01:55.921146Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:55.921170Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:55.921175Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:55.921270Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14039 TClient is connected to server localhost:14039 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:56.679001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:59.126962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571222942517210:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:59.127092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:59.785091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571222942517239:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:59.785176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:59.856319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571222942517254:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:59.869607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:59.964905Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571201467680225:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:59.965021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:59.996988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571222942517272:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:59.997094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:00.014768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:02:00.264124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571227237484657:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:00.264229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_OrderT_Limit2 [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCC >> KqpOlap::PredicatePushdownPartial [GOOD] >> KqpDataIntegrityTrails::UpsertEvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultCountExpr [GOOD] Test command err: Trying to start YDB, gRPC: 63156, MsgBus: 18915 2025-04-07T14:01:11.393391Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571016202975046:2231];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:11.405157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f37/r3tmp/tmprgK20X/pdisk_1.dat 2025-04-07T14:01:12.358180Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:12.388364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:12.388457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:12.437839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63156, node 1 2025-04-07T14:01:12.617318Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:12.617352Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:12.617365Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:12.617473Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18915 TClient is connected to server localhost:18915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:13.231805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:13.253592Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:13.266981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:13.417194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024792910124:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.417390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024792910124:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.417663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024792910124:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.417785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024792910124:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.417899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024792910124:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.417994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024792910124:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.418098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024792910124:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.418201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024792910124:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:13.418305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024792910124:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:13.418404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024792910124:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.418508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024792910124:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:13.418595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571024792910124:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:13.503023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571024792910125:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.503126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571024792910125:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.503481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571024792910125:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.503594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571024792910125:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.503681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571024792910125:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.503777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571024792910125:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.503897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571024792910125:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.503988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571024792910125:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:13.504110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571024792910125:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:13.504191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571024792910125:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:13.504305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571024792910125:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:13.504406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571024792910125:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:13.556270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024792910126:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:13.556341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024792910126:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:13.556563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024792910126:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:13.561029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024792910126:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:13.561840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024792910126:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:13.561976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024792910126:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:13.562068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024792910126:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:13.562175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571024792910126:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:59.317233Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:59.502929Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:59.503004Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:59.612315Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:59.612393Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:59.717226Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:59.717286Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:59.811592Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:59.811662Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:01:59.941199Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:01:59.952537Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1216:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:01:59.992008Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:01:59.992088Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:00.088865Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:00.088949Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:00.185146Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:00.185220Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:00.289144Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:00.289223Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:00.389100Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:00.389171Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:00.469126Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:00.577662Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:00.577725Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:00.701140Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:00.701213Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:00.811466Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:00.811531Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:00.909177Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:00.909256Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:01.004377Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:01.004450Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3012], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr88x1gcfa4er23e39fakxt7. SessionId : ydb://session/3?node_id=2&id=NzhlOTU4NmYtN2Q3YWEyMzktMmJjMWZkY2ItODM0Y2E0ZjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:01.094645Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:01.117547Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1216:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_OrderT_Limit2 [GOOD] Test command err: Trying to start YDB, gRPC: 25403, MsgBus: 17162 2025-04-07T14:01:33.714871Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571110861451251:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:33.715099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eb8/r3tmp/tmpqbbPC4/pdisk_1.dat 2025-04-07T14:01:34.203955Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:34.251912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:34.251993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:34.255955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25403, node 1 2025-04-07T14:01:34.344510Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:34.344541Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:34.344549Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:34.344631Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17162 TClient is connected to server localhost:17162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:35.192837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:35.211577Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:35.226896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:35.412882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571119451386374:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:35.413163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571119451386374:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:35.413485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571119451386374:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:35.413735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571119451386374:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:35.413989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571119451386374:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:35.414168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571119451386374:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:35.414300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571119451386374:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:35.414406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571119451386374:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:35.414522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571119451386374:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:35.414627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571119451386374:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:35.414734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571119451386374:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:35.414863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571119451386374:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:35.454661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571119451386377:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:35.454744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571119451386377:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:35.454997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571119451386377:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:35.455103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571119451386377:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:35.455222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571119451386377:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:35.455320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571119451386377:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:35.455415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571119451386377:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:35.455540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571119451386377:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:35.455668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571119451386377:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:35.455758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571119451386377:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:35.455883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571119451386377:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:35.455988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571119451386377:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:35.489273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571119451386373:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:35.489334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571119451386373:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:35.489562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571119451386373:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:35.489676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571119451386373:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:35.489765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571119451386373:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:35.489874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571119451386373:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:35.489969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571119451386373:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:35.490080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571119451386373:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:00.080139Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:00.305455Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:00.305529Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:00.409140Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:00.409215Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:00.515678Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:00.515754Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:00.616100Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:00.616183Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:00.721486Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:02:00.721578Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:00.797062Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:00.797135Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:00.897100Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:00.897230Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:00.988221Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:00.988311Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:01.078305Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:01.078386Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:01.174905Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:01.175007Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:01.223061Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:01.336943Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:01.337039Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:01.465638Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:01.465726Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:01.561094Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:01.561181Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:01.650933Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:01.651006Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:01.743214Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:01.743295Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1735:3002], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGQ3MjczMGEtYTAxNWYxZmQtZDk3MzAyNDEtOTkwZjY2OTU=. TraceId : 01jr88xdw44q0m3rpd0b6tnr82. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:01.842983Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:02:01.843140Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2359];fline=actor.cpp:33;event=skip_flush_writing; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownPartial [GOOD] Test command err: Trying to start YDB, gRPC: 24184, MsgBus: 14979 2025-04-07T14:01:48.032984Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571176282081911:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:48.034665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ea7/r3tmp/tmpTl6pZQ/pdisk_1.dat 2025-04-07T14:01:48.603882Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:48.612621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:48.612788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:48.615510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24184, node 1 2025-04-07T14:01:48.793255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:48.793278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:48.793288Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:48.793402Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14979 TClient is connected to server localhost:14979 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:49.388486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:49.401666Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:49.424740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:49.556485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571180577049753:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:49.556792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571180577049753:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:49.557052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571180577049753:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:49.557176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571180577049753:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:49.557284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571180577049753:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:49.557375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571180577049753:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:49.557480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571180577049753:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:49.557574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571180577049753:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:49.557683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571180577049753:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:49.557791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571180577049753:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.557919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571180577049753:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:49.558028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571180577049753:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:49.596184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180577049748:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:49.596276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180577049748:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:49.596484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180577049748:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:49.596598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180577049748:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:49.597029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180577049748:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:49.597141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180577049748:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:49.597224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180577049748:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:49.597356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180577049748:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:49.597486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180577049748:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:49.597575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180577049748:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.597690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180577049748:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:49.597810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180577049748:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:49.627152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180577049749:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:49.627197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180577049749:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:49.627336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180577049749:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:49.627408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180577049749:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:49.627475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180577049749:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:49.627534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180577049749:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:49.627583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180577049749:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:49.627661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180577049749:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... tionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:49.674904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:49.674932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:49.674962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:49.674978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:49.675237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:49.675252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:49.675358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:49.675376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.675455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:49.675467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:49.675606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:49.675618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:49.675668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:49.675677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:49.727635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=5800;columns=5; --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE LENGTH(`uid`) > 0 AND `resource_id` = "10001" ORDER BY `timestamp` 2025-04-07T14:01:51.613796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571189166984684:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.613850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571189166984673:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.614275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.619887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:51.630350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571189166984687:2395], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:01:52.501937Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034512003, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE LENGTH(`uid`) > 0 AND `resource_id` = "10001" ORDER BY `timestamp` 2025-04-07T14:01:52.871595Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034512290, txId: 18446744073709551615] shutting down 2025-04-07T14:01:53.019509Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571176282081911:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:53.019612Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND `level` > 1 AND LENGTH(`uid`) > 0 ORDER BY `timestamp` 2025-04-07T14:01:53.557291Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034513000, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND `level` > 1 AND LENGTH(`uid`) > 0 ORDER BY `timestamp` 2025-04-07T14:01:54.110276Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034513487, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` >= "10001" AND LENGTH(`uid`) > 0 AND `level` >= 1 AND `level` < 3 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` >= "10001" AND LENGTH(`uid`) > 0 AND `level` >= 1 AND `level` < 3 ORDER BY `timestamp` 2025-04-07T14:01:54.726591Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034514005, txId: 18446744073709551615] shutting down 2025-04-07T14:01:55.087568Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034515000, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE LENGTH(`uid`) > 0 AND (`resource_id` >= "10001" OR `level`>= 1 AND `level` <= 3) ORDER BY `timestamp` 2025-04-07T14:01:55.813423Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034515027, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE LENGTH(`uid`) > 0 AND (`resource_id` >= "10001" OR `level`>= 1 AND `level` <= 3) ORDER BY `timestamp` 2025-04-07T14:01:56.209245Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034516000, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT(`resource_id` = "10001" OR `level` >= 1) AND LENGTH(`uid`) > 0 ORDER BY `timestamp` 2025-04-07T14:01:56.968649Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034516147, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT(`resource_id` = "10001" OR `level` >= 1) AND LENGTH(`uid`) > 0 ORDER BY `timestamp` 2025-04-07T14:01:57.338359Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034517001, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT(`resource_id` = "10001" AND `level` != 1) AND LENGTH(`uid`) > 0 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT(`resource_id` = "10001" AND `level` != 1) AND LENGTH(`uid`) > 0 ORDER BY `timestamp` 2025-04-07T14:01:58.198157Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034518002, txId: 18446744073709551615] shutting down 2025-04-07T14:01:58.607104Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034518149, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND Unwrap(`level`/1) = `level` ORDER BY `timestamp` 2025-04-07T14:01:59.856312Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034519000, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND Unwrap(`level`/1) = `level` ORDER BY `timestamp` 2025-04-07T14:02:00.750831Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034520004, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND Unwrap(`level`/1) = `level` AND `level` > 1 ORDER BY `timestamp` 2025-04-07T14:02:01.960013Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034521005, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND Unwrap(`level`/1) = `level` AND `level` > 1 ORDER BY `timestamp` 2025-04-07T14:02:02.658731Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034522000, txId: 18446744073709551615] shutting down >> KqpOlapSysView::StatsSysViewRanges >> KqpOlapAggregations::AggregationCountPushdown >> KqpOlapBlobsSharing::ChangeSchemaAndSplit >> KqpOlapAggregations::Aggregation_ResultT_FilterL_Limit2 |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |90.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops >> KqpYql::InsertIgnore [GOOD] >> KqpYql::JsonCast >> KqpOlapBlobsSharing::TableReshardingConsistency64 [FAIL] >> KqpPragma::OrderedColumns [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut |90.1%| [LD] {RESULT} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut >> KqpOlap::MetadataMemoryManager [GOOD] >> KqpOlap::PredicatePushdownNulls >> KqpOlapBlobsSharing::BlobsSharingSplit1_1_clean >> KqpOlapAggregations::Aggregation_Avg_NullMixGroupBy [GOOD] >> KqpOlapIndexes::IndexesInBS >> KqpOlap::SimpleRangeOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::MetadataMemoryManager [GOOD] Test command err: Trying to start YDB, gRPC: 18435, MsgBus: 15868 2025-04-07T14:02:00.305133Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571230029904691:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:00.305276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ce7/r3tmp/tmpqJPUz3/pdisk_1.dat 2025-04-07T14:02:00.923930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:00.924022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:00.930133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:00.966296Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18435, node 1 2025-04-07T14:02:01.105315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:01.105335Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:01.105342Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:01.105443Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15868 TClient is connected to server localhost:15868 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:01.838329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:01.851797Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:01.861624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:02.005975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571234324872529:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:02.006186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571234324872529:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:02.006443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571234324872529:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:02.006556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571234324872529:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:02.006706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571234324872529:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:02.006797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571234324872529:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:02.006909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571234324872529:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:02.007048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571234324872529:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:02.007169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571234324872529:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:02.007273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571234324872529:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:02.007429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571234324872529:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:02.007613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571234324872529:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:02.045569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571234324872528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:02.045628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571234324872528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:02.045825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571234324872528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:02.045923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571234324872528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:02.046020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571234324872528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:02.046127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571234324872528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:02.046223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571234324872528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:02.046334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571234324872528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:02.046418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571234324872528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:02.046533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571234324872528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:02.046615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571234324872528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:02.046692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571234324872528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:02.096010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571234324872536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:02.096065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571234324872536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:02.096273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571234324872536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:02.096394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571234324872536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:02.096512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571234324872536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:02.096652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571234324872536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:02.096791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571234324872536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:02.096899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571234324872536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... ished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:02.139950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:02.140164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:02.140185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:02.140246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:02.140276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:02.140317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:02.140332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:02.140364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:02.140389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:02.140686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:02.140738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:02.140907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:02.140933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:02.141089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:02.141111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:02.141313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:02.141360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:02.141454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:02.141472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:02.141886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:02.141912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:02.141988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:02.142006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:02.142158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:02.142195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:02.142291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:02.142325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:02.142372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:02.142390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:02.142440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:02.142464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:02.142742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:02.142774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:02.142945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:02.142966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:02.143104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:02.143129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:02.144562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:02.144597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:02.144737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:02.144757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:02.196348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; 2025-04-07T14:02:05.280789Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571230029904691:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:05.280853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:05.388973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571251504742066:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:05.389041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571251504742073:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:05.389100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:05.396480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:02:05.423736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571251504742095:2416], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:02:08.567781Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034526003, txId: 18446744073709551615] shutting down 2025-04-07T14:02:08.630843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:10.251957Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034529000, txId: 18446744073709551615] shutting down >> TPQTest::TestMaxTimeLagRewind [GOOD] >> TPQTest::TestManyConsumers >> KqpOlapSysView::StatsSysViewBytesDictStatActualization [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::TableReshardingConsistency64 [FAIL] Test command err: Trying to start YDB, gRPC: 7999, MsgBus: 62007 2025-04-07T14:01:25.028575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571077744600128:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:25.028614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ed3/r3tmp/tmpfbepVe/pdisk_1.dat 2025-04-07T14:01:25.594093Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:25.619663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:25.619809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:25.626435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7999, node 1 2025-04-07T14:01:25.837919Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:25.837938Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:25.837947Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:25.838041Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62007 TClient is connected to server localhost:62007 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:26.551313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:26.584997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:26.935002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082039568054:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:26.935318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082039568054:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:26.935641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082039568054:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:26.935829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082039568054:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:26.935956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082039568054:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:26.936098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082039568054:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:26.936239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082039568054:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:26.936378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082039568054:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:26.936527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082039568054:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:26.936697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082039568054:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:26.936823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082039568054:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:26.936917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571082039568054:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:26.987583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571082039568056:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:26.991158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571082039568056:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:26.991458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571082039568056:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:26.991650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571082039568056:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:26.991761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571082039568056:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:26.991888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571082039568056:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:26.992013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571082039568056:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:26.992129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571082039568056:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:26.992222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571082039568056:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:26.992337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571082039568056:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:26.992486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571082039568056:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:26.992593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571082039568056:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:27.027453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7490571082039568052:2299];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:27.027518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7490571082039568052:2299];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:27.027761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7490571082039568052:2299];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:27.027888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7490571082039568052:2299];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:27.028015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7490571082039568052:2299];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:27.028157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7490571082039568052:2299];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:27.028243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7490571082039568052:2299];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:27.028365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7490571082039568052:2299];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:27.028472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7490571082039568052 ... ng down [[230000u]] 2025-04-07T14:01:42.769789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:01:42.771813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-07T14:01:42.800638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:42.809270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:0) 2025-04-07T14:01:44.715473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720660:0, at schemeshard: 72057594046644480 2025-04-07T14:01:44.736475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-04-07T14:01:44.750185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:0) RESHARDING_WAIT_FINISHED... (SPLIT:0) 2025-04-07T14:01:46.723894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-04-07T14:01:46.753976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-04-07T14:01:46.770276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:0) RESHARDING_WAIT_FINISHED... (SPLIT:0) 2025-04-07T14:01:48.479159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 2025-04-07T14:01:48.495669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720667:0, at schemeshard: 72057594046644480 2025-04-07T14:01:48.508461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720668:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:0) 2025-04-07T14:01:49.724391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720669:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:0) RESHARDING_FINISHED 2025-04-07T14:01:49.944357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:01:49.947968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725657:0, at schemeshard: 72057594046644480 2025-04-07T14:01:50.014200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725658:0, at schemeshard: 72057594046644480 2025-04-07T14:01:50.029310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725659:0, at schemeshard: 72057594046644480 2025-04-07T14:01:50.741411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725660:0, at schemeshard: 72057594046644480 2025-04-07T14:01:50.757931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725661:0, at schemeshard: 72057594046644480 2025-04-07T14:01:50.781287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725662:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) 2025-04-07T14:01:51.765157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725663:0, at schemeshard: 72057594046644480 2025-04-07T14:01:51.785042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725664:0, at schemeshard: 72057594046644480 2025-04-07T14:01:51.808376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725665:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) 2025-04-07T14:01:52.735648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725666:0, at schemeshard: 72057594046644480 2025-04-07T14:01:52.768614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725667:0, at schemeshard: 72057594046644480 2025-04-07T14:01:52.782524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725668:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) 2025-04-07T14:01:53.735312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725669:0, at schemeshard: 72057594046644480 2025-04-07T14:01:53.756198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725670:0, at schemeshard: 72057594046644480 2025-04-07T14:01:53.777294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725671:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) 2025-04-07T14:01:54.754238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725672:0, at schemeshard: 72057594046644480 2025-04-07T14:01:54.770833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725673:0, at schemeshard: 72057594046644480 2025-04-07T14:01:54.786568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725674:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) RESHARDING_WAIT_FINISHED... (SPLIT:1) 2025-04-07T14:01:56.491093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725675:0, at schemeshard: 72057594046644480 2025-04-07T14:01:56.534678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725676:0, at schemeshard: 72057594046644480 2025-04-07T14:01:56.550325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725677:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) 2025-04-07T14:01:57.542352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725678:0, at schemeshard: 72057594046644480 2025-04-07T14:01:57.554201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725679:0, at schemeshard: 72057594046644480 2025-04-07T14:01:57.571590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725680:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) 2025-04-07T14:01:58.762331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725681:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) RESHARDING_FINISHED 2025-04-07T14:02:00.954766Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034519024, txId: 18446744073709551615] shutting down [[57685u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:553, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[57685u]]|[[230000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:553: CompareYson @ 0x463AF800 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x17AD8DC2 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:351: Execute @ 0x17AAF25E 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:401: Execute_ @ 0x17AAABB0 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7FD681244D8F 18. ??:0: ?? @ 0x7FD681244E3F 19. ??:0: ?? @ 0x15851028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_NullMixGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 64670, MsgBus: 14707 2025-04-07T14:01:48.138889Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571175382457885:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:48.139184Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ea6/r3tmp/tmpZ9WCPu/pdisk_1.dat 2025-04-07T14:01:48.661155Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:48.684690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:48.684768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:48.689294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64670, node 1 2025-04-07T14:01:48.816814Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:48.816834Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:48.816845Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:48.816947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14707 TClient is connected to server localhost:14707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:49.675776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:49.697389Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:49.706818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:49.829148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571179677425709:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:49.829309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571179677425709:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:49.829599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571179677425709:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:49.829677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571179677425709:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:49.829736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571179677425709:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:49.829824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571179677425709:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:49.829905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571179677425709:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:49.830012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571179677425709:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:49.830151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571179677425709:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:49.830269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571179677425709:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.830338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571179677425709:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:49.830423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571179677425709:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:49.871479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571179677425713:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:49.871554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571179677425713:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:49.871826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571179677425713:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:49.871949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571179677425713:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:49.872076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571179677425713:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:49.872176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571179677425713:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:49.872272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571179677425713:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:49.872384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571179677425713:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:49.872506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571179677425713:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:49.872607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571179677425713:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.872755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571179677425713:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:49.872863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571179677425713:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:49.905199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571179677425710:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:49.905256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571179677425710:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:49.905482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571179677425710:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:49.905607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571179677425710:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:49.905740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571179677425710:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:49.905838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571179677425710:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:49.905927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571179677425710:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:49.906049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571179677425710:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... e 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:49.963168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:49.963199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:49.963234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:49.963506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:49.963538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:49.963713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:49.963736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.963870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:49.963892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:49.964674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:49.964716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:49.964818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:49.964837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; 2025-04-07T14:01:51.738038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571188267360609:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.738105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571188267360598:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.738248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.742739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:51.758847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571188267360612:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:02:03.660827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:02:03.660853Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:07.601096Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571175382457885:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:07.601425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:08.758768Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88xd8nfnrcepzyakckqvbq", SessionId: ydb://session/3?node_id=1&id=YmYyZTAxZDUtMzYxZTdiODctYTcwZTZmMmUtN2U0MTVlNjQ=, Slow query, duration: 17.025215s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "--!syntax_v1\nPRAGMA Kikimr.OptUseFinalizeByKey;\n\n SELECT\n id, AVG(level)\n FROM `/Root/tableWithNulls`\n WHERE id > 4 AND id < 7\n GROUP BY id\n ORDER BY id;\n \n", parameters: 0b 2025-04-07T14:02:08.773310Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034512000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (4, 7)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1773) '('"_id" '"21b21c6c-9ad52a3e-92d5a8ab-65c258a4") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $26 (Int32 '0)) (let $27 '((Nothing $2) $26)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $26) $27))) (RangeCreate (AsList '($27 '((Just (Int32 '"7")) $26)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (OptionalType (TupleType $10 (DataType 'Uint64)))) (let $12 '('"id" $1)) (let $13 '('('"_logical_id" '1832) '('"_id" '"fb4f9378-e7e8726d-8bb97978-4fdf810e") '('"_wide_channels" (StructType '('_yql_agg_0 $11) $12)))) (let $14 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('"id")) (let $30 '('('"UsedKeyColumns" $29) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $30 (lambda '($33) (block '( (let $34 '('"_yql_agg_0_sum" '"sum" '"level")) (let $35 '('"_yql_agg_0_cnt" '"count" '"level")) (return (TKqpOlapAgg $33 '($34 $35) $29)) ))))) (let $32 (lambda '($36 $37 $38) (block '( (let $39 (IfPresent $37 (lambda '($40) (Just '((Convert $40 'Double) $36))) (Nothing $11))) (return $39 $38) )))) (return (FromFlow (WideMap $31 $32))) ))) $13)) (let $15 (DqCnHashShuffle (TDqOutput $14 '0) '('1))) (let $16 (OptionalType $10)) (let $17 (StructType '('"column1" $16) $12)) (let $18 '('('"_logical_id" '3420) '('"_id" '"da2c5a8b-d7bb8314-6885dd2d-23b64d1d") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($15) (lambda '($41) (block '( (let $42 (lambda '($55 $56) (block '( (let $57 (IfPresent $56 (lambda '($58) (Just (Div (Nth $58 '0) (Nth $58 '1)))) (Nothing $16))) (return $57 $55) )))) (let $43 (WideCombiner (ToFlow $41) '"" (lambda '($44 $45) $45) (lambda '($46 $47 $48) $47) (lambda '($49 $50 $51 $52) (IfPresent $50 (lambda '($53) (IfPresent $52 (lambda '($54) (Just '((AggrAdd (Nth $53 '0) (Nth $54 '0)) (AggrAdd (Nth $53 '1) (Nth $54 '1))))) $50)) $52)) $42)) (return (FromFlow (WideSort $43 '('('1 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('1 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($59) (FromFlow (NarrowMap (ToFlow $59) (lambda '($60 $61) (AsStruct '('"column1" $60) '('"id" $61)))))) '('('"_logical_id" '3432) '('"_id" '"1bd837af-82d96069-36888fb8-56ad8542")))) (let $22 '($14 $19 $21)) (let $23 (DqCnResult (TDqOutput $21 '0) '('"id" '"column1"))) (let $24 (KqpTxResultBinding $9 '0 '0)) (let $25 (KqpPhysicalTx $22 '($23) '('($7 $24)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $25) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewBytesDictStatActualization [GOOD] Test command err: Trying to start YDB, gRPC: 30135, MsgBus: 27334 2025-04-07T14:01:33.164239Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571112630887473:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:33.164367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eba/r3tmp/tmpp62DiB/pdisk_1.dat 2025-04-07T14:01:33.736262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:33.736551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:33.738054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:33.742649Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30135, node 1 2025-04-07T14:01:33.916387Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:33.916408Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:33.916416Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:33.916477Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27334 TClient is connected to server localhost:27334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:34.890457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:34.907199Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:34.919146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:35.031382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571116925855245:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:35.031605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571116925855245:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:35.031880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571116925855245:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:35.031965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571116925855245:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:35.032040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571116925855245:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:35.032109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571116925855245:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:35.032188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571116925855245:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:35.032269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571116925855245:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:35.032350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571116925855245:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:35.032428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571116925855245:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:35.032509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571116925855245:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:35.032583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571116925855245:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:35.062881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571116925855246:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:35.062937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571116925855246:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:35.063140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571116925855246:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:35.063212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571116925855246:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:35.063270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571116925855246:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:35.063344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571116925855246:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:35.063398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571116925855246:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:35.063831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571116925855246:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:35.063939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571116925855246:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:35.064065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571116925855246:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:35.064162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571116925855246:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:35.064292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571116925855246:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:35.094260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571116925855247:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:35.094342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571116925855247:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:35.094664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571116925855247:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:35.094791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571116925855247:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:35.094888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571116925855247:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:35.095120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571116925855247:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:35.095256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571116925855247:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:35.095365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571116925855247:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... ;};]; 2025-04-07T14:02:11.560476Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.562025Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.563409Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.564920Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.566431Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.567791Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.569331Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.570687Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.572028Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.573548Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.574896Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.576282Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.577837Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.579377Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.581346Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.582754Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.584075Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.585537Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.586906Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.588203Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.589700Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.591052Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2025-04-07T14:02:11.592361Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; Rows: 266613 TierName: __DEFAULT Activity:  ColumnRawBytes: 17063232 Details: {"primary_key_max":"799992;","primary_key_min":"0;","snapshot_min":{"plan_step":1744034496001,"tx_id":18446744073709551615},"snapshot_max":{"plan_step":1744034496001,"tx_id":18446744073709551615}} TabletId: 72075186224037888 Kind: INSERTED PortionId: 4 ColumnBlobBytes: 1214208 CompactionLevel: 0 IndexRawBytes: 8 IndexBlobBytes: 8 Optimized: Stats: PathId: 3 Rows: 267322 TierName: __DEFAULT Activity:  ColumnRawBytes: 17108608 Details: {"primary_key_max":"799997;","primary_key_min":"1;","snapshot_min":{"plan_step":1744034496001,"tx_id":18446744073709551615},"snapshot_max":{"plan_step":1744034496001,"tx_id":18446744073709551615}} TabletId: 72075186224037889 Kind: INSERTED PortionId: 4 ColumnBlobBytes: 1217248 CompactionLevel: 0 IndexRawBytes: 8 IndexBlobBytes: 8 Optimized: Stats: PathId: 3 Rows: 266065 TierName: __DEFAULT Activity:  ColumnRawBytes: 17028160 Details: {"primary_key_max":"799999;","primary_key_min":"2;","snapshot_min":{"plan_step":1744034496001,"tx_id":18446744073709551615},"snapshot_max":{"plan_step":1744034496001,"tx_id":18446744073709551615}} TabletId: 72075186224037890 Kind: INSERTED PortionId: 4 ColumnBlobBytes: 1211576 CompactionLevel: 0 IndexRawBytes: 8 IndexBlobBytes: 8 Optimized: Stats: PathId: 3 2025-04-07T14:02:11.729031Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034531474, txId: 281474976710681] shutting down >> KqpOlapIndexes::SchemeActualizationOnceOnStart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleRangeOlap [GOOD] Test command err: Trying to start YDB, gRPC: 62865, MsgBus: 7162 2025-04-07T14:02:01.860081Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571233178270096:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:01.860266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000cd9/r3tmp/tmpeeolR8/pdisk_1.dat 2025-04-07T14:02:02.609928Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:02.622347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:02.622422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:02.633768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62865, node 1 2025-04-07T14:02:02.953372Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:02.953392Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:02.953407Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:02.953489Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7162 TClient is connected to server localhost:7162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:04.451878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:04.585403Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:04.598715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:04.854676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571246063172506:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:04.854857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571246063172506:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:04.855138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571246063172506:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:04.855259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571246063172506:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:04.855385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571246063172506:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:04.855482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571246063172506:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:04.855563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571246063172506:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:04.855660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571246063172506:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:04.855760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571246063172506:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:04.855865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571246063172506:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:04.855962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571246063172506:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:04.856046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571246063172506:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:04.950686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571246063172507:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:04.950761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571246063172507:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:04.950966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571246063172507:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:04.951077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571246063172507:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:04.951163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571246063172507:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:04.951245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571246063172507:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:04.951329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571246063172507:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:04.951411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571246063172507:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:04.951513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571246063172507:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:04.951620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571246063172507:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:04.951712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571246063172507:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:04.951800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571246063172507:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:05.073660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571246063172547:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:05.073741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571246063172547:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:05.073950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571246063172547:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:05.074052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571246063172547:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:05.074158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571246063172547:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:05.074288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571246063172547:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:05.074417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571246063172547:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:05.074501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571246063172547:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... d=1; 2025-04-07T14:02:05.205563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:05.205662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:05.205699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:05.205862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:05.205884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:05.205964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:05.205991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:05.206046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:05.206076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:05.206112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:05.206132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:05.206408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:05.206446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:05.206592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:05.206620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:05.206717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:05.206750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:05.206924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:05.206946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:05.207025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:05.207043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:05.208077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:05.208108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:05.208187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:05.208214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:05.208439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:05.208472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:05.208544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:05.208584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:05.208634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:05.212708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:05.212828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:05.212864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:05.220435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:05.220503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:05.220697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:05.220731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:05.220863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:05.220892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:05.221081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:05.225098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:05.225288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:05.225324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-07T14:02:05.331233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2025-04-07T14:02:06.856882Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571233178270096:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:06.856950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:09.363299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571267538009321:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:09.363454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:09.363822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571267538009349:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:09.367869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:02:09.398447Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T14:02:09.400960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571267538009351:2402], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:02:11.418118Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034530000, txId: 18446744073709551615] shutting down >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] >> KqpOlapAggregations::Json_Exists [GOOD] >> KqpOlapAggregations::Aggregation_MaxR_GroupL_OrderL >> KqpOlapTiering::EvictionIncreaseDuration [GOOD] >> KqpOlapBlobsSharing::MultipleMergesWithRestartsWhenWait [FAIL] |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |90.1%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut >> KqpOlap::PredicateDoNotPushdown [GOOD] >> KqpOlapAggregations::Aggregation [GOOD] |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |90.2%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::SchemeActualizationOnceOnStart [GOOD] Test command err: Trying to start YDB, gRPC: 22309, MsgBus: 18134 2025-04-07T14:02:01.417831Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571232934127566:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:01.417972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000cc7/r3tmp/tmpuNd1aH/pdisk_1.dat 2025-04-07T14:02:01.925756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:01.925836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:01.927098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:01.953752Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22309, node 1 2025-04-07T14:02:02.029160Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:02.029199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:02.029210Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:02.029297Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18134 TClient is connected to server localhost:18134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:03.195039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:03.237247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:03.447775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571241524062701:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:03.447957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571241524062701:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:03.448275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571241524062701:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:03.448406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571241524062701:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:03.448502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571241524062701:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:03.448612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571241524062701:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:03.449155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571241524062701:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:03.449327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571241524062701:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:03.449436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571241524062701:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:03.449530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571241524062701:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:03.449616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571241524062701:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:03.449727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571241524062701:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:03.546204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571241524062704:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:03.546282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571241524062704:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:03.546531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571241524062704:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:03.546633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571241524062704:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:03.546754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571241524062704:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:03.546859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571241524062704:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:03.546967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571241524062704:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:03.547061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571241524062704:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:03.547164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571241524062704:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:03.547251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571241524062704:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:03.547351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571241524062704:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:03.547466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571241524062704:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:03.736455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571241524062742:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:03.736524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571241524062742:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:03.736772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571241524062742:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:03.736876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571241524062742:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:03.736955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571241524062742:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:03.737052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571241524062742:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:03.737135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571241524062742:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:03.737244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571241524062742:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:03.737345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74905712415240627 ... 883285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:03.883304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:04.003192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; 2025-04-07T14:02:06.977390Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571232934127566:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:06.978387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:10.228992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571267293866893:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.258435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.260273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:02:10.407027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571271588834252:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.420922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:02:10.429738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.546227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571271588834301:2449], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.546307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.549730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:10.637726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571271588834348:2454], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.637808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.640285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:10.709124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571271588834395:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.709218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.714070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:10.783041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571271588834442:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.783124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.803281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:10.878568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571271588834489:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.878658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.889139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:11.000820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:11.003614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571271588834536:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.006250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.085534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571275883801879:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.085631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.089618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-07T14:02:11.173235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571275883801926:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.173325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.185912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-07T14:02:11.344840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571275883802033:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.353200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.415126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571275883801997:2487];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:11.416469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571275883802007:2494];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:11.457371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571275883801975:2485];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:11.458829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571275883802058:2505], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.480079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480 2025-04-07T14:02:11.502485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571275883801986:2486];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:11.537069Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-04-07T14:02:11.537506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571275883802075:2513], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-04-07T14:02:13.380441Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034532000, txId: 18446744073709551615] shutting down |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |90.2%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicateDoNotPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 6284, MsgBus: 30720 2025-04-07T14:02:01.901673Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571231506406093:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:01.901723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000cc6/r3tmp/tmpktCMG3/pdisk_1.dat 2025-04-07T14:02:02.712227Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:02.717928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:02.718012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:02.721814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6284, node 1 2025-04-07T14:02:03.205109Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:03.205130Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:03.205136Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:03.205251Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30720 TClient is connected to server localhost:30720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:04.850888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:04.895713Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:04.921211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:05.097522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571248686275956:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:05.097732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571248686275956:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:05.097959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571248686275956:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:05.098056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571248686275956:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:05.098168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571248686275956:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:05.098255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571248686275956:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:05.098333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571248686275956:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:05.098420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571248686275956:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:05.098540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571248686275956:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:05.098646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571248686275956:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:05.098739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571248686275956:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:05.098846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571248686275956:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:05.165178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571244391308643:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:05.165233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571244391308643:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:05.165449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571244391308643:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:05.165544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571244391308643:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:05.165640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571244391308643:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:05.165738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571244391308643:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:05.165821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571244391308643:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:05.165906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571244391308643:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:05.165997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571244391308643:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:05.166098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571244391308643:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:05.166211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571244391308643:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:05.166297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571244391308643:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:05.249859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571244391308644:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:05.255199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571244391308644:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:05.255457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571244391308644:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:05.255574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571244391308644:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:05.255657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571244391308644:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:05.255812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571244391308644:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:05.255910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571244391308644:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:05.256022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571244391308644:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... 05.358703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:05.358741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:05.358828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:05.358849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:05.359003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:05.359024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:05.359097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:05.359116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:05.359157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:05.359177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:05.359205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:05.359225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:05.359487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:05.359526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:05.362120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:05.362154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:05.362299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:05.362325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:05.362507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:05.362527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:05.362612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:05.362629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:05.363193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:05.363220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:05.363298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:05.363330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:05.363488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:05.363511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:05.363582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:05.363604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:05.363694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:05.363714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:05.363754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:05.363779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:05.364043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:05.364078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:05.364266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:05.364297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:05.364424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:05.364451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:05.364650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:05.364691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:05.364796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:05.364825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:05.504206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=5800;columns=5; 2025-04-07T14:02:06.904913Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571231506406093:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:06.904968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:11.240992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571274456080081:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.241123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.241593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571274456080108:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.245608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:02:11.263072Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T14:02:11.263383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571274456080110:2416], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |90.2%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation [GOOD] Test command err: Trying to start YDB, gRPC: 6547, MsgBus: 12476 2025-04-07T14:01:52.042557Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571194307775462:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:52.042583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000cf8/r3tmp/tmpnNeHEs/pdisk_1.dat 2025-04-07T14:01:52.704626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:52.704740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:52.709806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:52.729030Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6547, node 1 2025-04-07T14:01:52.888219Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:52.888245Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:52.888253Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:52.888378Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12476 TClient is connected to server localhost:12476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:53.628022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:53.657056Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:53.675186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:53.834229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571198602743429:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:53.834430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571198602743429:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:53.834706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571198602743429:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:53.834835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571198602743429:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:53.834960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571198602743429:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:53.835050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571198602743429:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:53.835126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571198602743429:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:53.835216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571198602743429:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:53.835334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571198602743429:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:53.835461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571198602743429:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:53.835564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571198602743429:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:53.835648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571198602743429:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:53.927216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571198602743443:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:53.927286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571198602743443:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:53.927558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571198602743443:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:53.927674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571198602743443:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:53.927785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571198602743443:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:53.927895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571198602743443:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:53.928005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571198602743443:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:53.928118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571198602743443:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:53.928278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571198602743443:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:53.928431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571198602743443:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:53.928540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571198602743443:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:53.928655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571198602743443:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:53.970472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571198602743446:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:53.970566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571198602743446:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:53.970806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571198602743446:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:53.970908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571198602743446:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:53.970992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571198602743446:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:53.971091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571198602743446:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:53.971172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571198602743446:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:53.971337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571198602743446:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... hema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:54.019992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:54.020263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:54.020320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:54.020466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:54.020487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:54.020645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:54.020696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:54.020852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:54.020874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:54.020993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:54.021024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:54.021764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:54.021794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:54.021911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:54.021938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:54.022081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:54.022101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:54.022199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:54.022284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:54.022354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:54.022377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:54.022411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:54.022438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:54.022686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:54.022735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:54.022934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:54.022961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:54.023068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:54.023088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:54.023244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:54.023266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:54.023363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:54.023381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:01:54.088540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-07T14:01:56.365179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571211487645611:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:56.365291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:56.365656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571211487645642:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:56.378998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:01:56.398379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571211487645644:2386], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:01:57.042796Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571194307775462:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:57.042852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:58.180446Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034517001, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2025-04-07T14:01:59.444906Z node 1 :KQP_YQL WARN: TraceId: 01jr88xp8rba0r5tpvb6j1pssm, SessionId: CompileActor 2025-04-07 14:01:59.444 WARN ydb-core-kqp-ut-olap(pid=299779, tid=0x00007F29DEFAB640) [KQP] kqp_opt_phy_olap_agg.cpp:50: Expected TCoMember callable to get column under aggregation. Got: Failed to render expression to pretty string: yql/essentials/ast/yql_expr.cpp:1973 BuildValueNode(): requirement ctx.AllowFreeArgs failed, message: Free arguments are not allowed 2025-04-07T14:02:07.700882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:02:07.700919Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:13.143180Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88xp8rba0r5tpvb6j1pssm", SessionId: ydb://session/3?node_id=1&id=MThlMmMxZTQtZTI0Y2MzMmEtYjM3YWNhZTEtNWFhYTI0OTY=, Slow query, duration: 13.819387s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n --!syntax_v1\n\n SELECT\n COUNT(*), MAX(`resource_id`), MAX(`timestamp`), MIN(LENGTH(`message`))\n FROM `/Root/olapStore/olapTable`\n ", parameters: 0b 2025-04-07T14:02:13.143623Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034520000, txId: 18446744073709551615] shutting down 2025-04-07T14:02:14.120421Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034533000, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapTiering::EvictionIncreaseDuration [GOOD] Test command err: Trying to start YDB, gRPC: 15050, MsgBus: 23675 2025-04-07T14:00:58.242067Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570960260782418:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:58.242126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f75/r3tmp/tmp7wIuqe/pdisk_1.dat 2025-04-07T14:00:58.646012Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:58.669231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:58.669350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:58.672704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15050, node 1 2025-04-07T14:00:58.735807Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:58.735832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:58.735849Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:58.735963Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23675 TClient is connected to server localhost:23675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:59.311379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:59.323244Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:01.269095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:01.374531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570973145684884:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:01.374533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570973145684883:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:01.374724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570973145684883:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:01.374801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570973145684884:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:01.375086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570973145684883:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:01.375093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570973145684884:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:01.375208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570973145684883:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:01.375230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570973145684884:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:01.375436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570973145684883:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:01.375440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570973145684884:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:01.375562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570973145684883:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:01.375581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570973145684884:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:01.375681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570973145684883:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:01.375688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570973145684884:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:01.376058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570973145684883:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:01.376094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570973145684884:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:01.376310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570973145684884:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:01.376376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570973145684883:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:01.376435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570973145684884:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:01.376483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570973145684883:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:01.376539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570973145684884:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:01.376638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570973145684883:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:01.376653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570973145684884:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:01.376801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570973145684883:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:01.379738Z node 1 :TX_TIERING INFO: fline=manager.cpp:41;event=start_subscribing_metadata; 2025-04-07T14:01:01.410629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570973145684899:2304];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:01.410695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570973145684899:2304];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:01.410925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570973145684899:2304];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:01.411055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570973145684899:2304];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:01.411160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570973145684899:2304];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:01.411254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570973145684899:2304];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:01.411388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570973145684899:2304];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:01.411526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570973145684899:2304];t ... ;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1176808;columns=5; WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 waiting actualization: 0/0.000027s waiting actualization: 0/1.000231s waiting actualization: 0/2.004009s waiting actualization: 0/3.007612s waiting actualization: 0/4.008059s ==================================== QUERY: SELECT TierName, SUM(ColumnRawBytes) As RawBytes FROM `/Root/olapStore/olapTable/.sys/primary_index_portion_stats` WHERE Activity == 1 GROUP BY TierName RESULT: TierName: __DEFAULT RawBytes: 119239000 2025-04-07T14:01:42.930470Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034497529, txId: 281474976710817] shutting down 2025-04-07T14:01:42.959639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710824:0, at schemeshard: 72057594046644480 waiting actualization: 0/0.000014s waiting actualization: 0/1.000122s waiting actualization: 0/2.000229s waiting actualization: 0/3.000342s waiting actualization: 0/4.002542s ==================================== QUERY: SELECT TierName, SUM(ColumnRawBytes) As RawBytes FROM `/Root/olapStore/olapTable/.sys/primary_index_portion_stats` WHERE Activity == 1 GROUP BY TierName RESULT: 2025-04-07T14:01:53.763428Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034508538, txId: 281474976710841] shutting down TierName: tier1 RawBytes: 119239000 2025-04-07T14:01:53.892863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710843:0, at schemeshard: 72057594046644480 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 ==================================== QUERY: SELECT TierName, SUM(ColumnRawBytes) As RawBytes FROM `/Root/olapStore/olapTable/.sys/primary_index_portion_stats` WHERE Activity == 1 GROUP BY TierName RESULT: 2025-04-07T14:02:14.331338Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88xnx41zwmh7n6vp0a3ztt", SessionId: ydb://session/3?node_id=1&id=YzI1NThlMmItZjZiMDhiOGYtOGU1MmJmNzEtNTFjOTE1ZTk=, Slow query, duration: 15.380662s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT\n TierName, SUM(ColumnRawBytes) As RawBytes\n FROM `/Root/olapStore/olapTable/.sys/primary_index_portion_stats`\n WHERE Activity == 1\n GROUP BY TierName\n ", parameters: 0b 2025-04-07T14:02:14.338747Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034519888, txId: 281474976710864] shutting down 2025-04-07T14:02:14.346833Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88xpmwbrq50whzyxr2qjkf", SessionId: ydb://session/3?node_id=1&id=ZjNkNjUxYzEtYmQzNTE5NzgtYWMwNGVlNjUtZDJjNzUwYTU=, Slow query, duration: 14.638472s, status: STATUS_CODE_UNSPECIFIED, user: metadata@system, results: 0b, text: "SELECT * FROM `//Root/.metadata/tiering/tiers`;\n", parameters: 0b TierName: __DEFAULT RawBytes: 119239000 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037891 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037890 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_Exists [GOOD] Test command err: Trying to start YDB, gRPC: 1828, MsgBus: 18585 2025-04-07T14:02:01.116748Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571233842495547:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:01.117052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ce0/r3tmp/tmpL5cJPY/pdisk_1.dat 2025-04-07T14:02:01.885257Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:01.906365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:01.906480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:01.913813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1828, node 1 2025-04-07T14:02:02.054384Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:02.054412Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:02.054421Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:02.054506Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18585 TClient is connected to server localhost:18585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:03.395845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:03.426038Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:03.441147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:03.573768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571242432430654:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:03.672807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571242432430654:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:03.673097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571242432430654:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:03.673207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571242432430654:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:03.673294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571242432430654:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:03.673364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571242432430654:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:03.673444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571242432430654:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:03.673532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571242432430654:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:03.673609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571242432430654:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:03.673696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571242432430654:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:03.673788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571242432430654:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:03.673851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571242432430654:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:03.726501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571242432430657:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:03.726556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571242432430657:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:03.726768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571242432430657:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:03.726858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571242432430657:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:03.726950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571242432430657:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:03.727043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571242432430657:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:03.727127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571242432430657:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:03.727226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571242432430657:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:03.727315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571242432430657:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:03.727395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571242432430657:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:03.727500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571242432430657:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:03.727582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571242432430657:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:03.862055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571242432430656:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:03.872859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571242432430656:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:03.889135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571242432430656:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:03.889267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571242432430656:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:03.889352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571242432430656:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:03.889440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571242432430656:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:03.889548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571242432430656:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:03.889664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571242432430656:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:04.028641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:04.029195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:04.029258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:04.029602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:04.029629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:04.029786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:04.029807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:04.029894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:04.029913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:04.030080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:04.030104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:04.030180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:04.030197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsonval, "$.col1") AND level = 1; 2025-04-07T14:02:06.112782Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571233842495547:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:06.112863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:10.786823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571272497202056:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.786909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.790847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571272497202091:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:10.794856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:02:10.833024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571272497202093:2424], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:02:13.011974Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034532002, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsonval, "$.col1") AND level = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"level == 1","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","jsondoc","jsonval","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Int32":1},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":3},{"Id":10}]},"Column":{"Id":11}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":3,"FunctionType":2,"Arguments":[{"Id":11},{"Id":12}]},"Column":{"Id":13}}},{"Assign":{"Function":{"YqlOperationId":0,"KernelIdx":4,"FunctionType":2,"Arguments":[{"Id":9},{"Id":13}]},"Column":{"Id":14}}},{"Filter":{"Predicate":{"Id":14}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval","level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","jsondoc","jsonval","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Int32":1},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":3},{"Id":10}]},"Column":{"Id":11}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":3,"FunctionType":2,"Arguments":[{"Id":11},{"Id":12}]},"Column":{"Id":13}}},{"Assign":{"Function":{"YqlOperationId":0,"KernelIdx":4,"FunctionType":2,"Arguments":[{"Id":9},{"Id":13}]},"Column":{"Id":14}}},{"Filter":{"Predicate":{"Id":14}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 1","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'Int32)) (let $2 '('"id" $1)) (let $3 (OptionalType (DataType 'Bool))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $9 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $10 '('"id" '"jsondoc" '"jsonval" '"level")) (let $11 (OptionalType (DataType 'JsonDocument))) (let $12 '($3)) (let $13 (ResourceType '"JsonNode")) (let $14 (OptionalType $13)) (let $15 '((ResourceType '"JsonPath"))) (let $16 (DataType 'Utf8)) (let $17 (DictType $16 $13)) (let $18 '($17)) (let $19 (CallableType '() $12 '($14) $15 $18 $12)) (let $20 '('('"strict"))) (let $21 (Udf '"Json2.SqlExists" (Void) (VoidType) '"" $19 (VoidType) '"" $20)) (let $22 (lambda '($41) (block '( (let $42 '((DataType 'Json) '"" '1)) (let $43 (CallableType '() '($13) $42)) (let $44 (Udf '"Json2.Parse" (Void) (VoidType) '"" $43 (VoidType) '"" '())) (return (Just (Apply $44 $41))) )))) (let $23 (Nothing $14)) (let $24 (CallableType '() $15 '($16))) (let $25 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $24 (VoidType) '"" '())) (let $26 (Apply $25 (Utf8 '"$.col1"))) (let $27 (Dict $17)) (let $28 (Bool 'false)) (let $29 (Just $28)) (let $30 (KqpWideReadOlapTableRanges $9 (Void) $10 '() '() (lambda '($31) (block '( (let $32 (DataType 'Json)) (let $33 (StructType $2 '('"jsondoc" $11) '('"jsonval" (OptionalType $32)) '('"level" (OptionalType $1)))) (let $34 (KqpOlapApply $33 '('"jsonval") (lambda '($39) (block '( (let $40 (IfPresent $39 $22 $23)) (return (Apply $21 $40 $26 $27 $29)) ))))) (let $35 '('?? $34 $28)) (let $36 '('eq '"level" (Int32 '1))) (let $37 '('?? $36 $28)) (let $38 '('"id" '"jsondoc" '"jsonval")) (return (TKqpOlapExtractMembers (KqpOlapFilter $31 (KqpOlapAnd $35 $37)) $38)) ))))) (return (FromFlow (NarrowMap $30 (lambda '($45 $46 $47) (block '( (let $48 (IfPresent $47 $22 $23)) (let $49 (Apply $21 $48 $26 $27 $29)) (let $50 (CallableType '() $12 '($11) $15 $18 $12)) (let $51 (Udf '"Json2.JsonDocumentSqlExists" (Void) (VoidType) '"" $50 (VoidType) '"" $20)) (let $52 (Apply $51 $46 $26 $27 $29)) (return (AsStruct '('"column1" $49) '('"column2" $52) '('"id" $45))) )))))) ))) '('('"_logical_id" '929) '('"_id" '"26801097-bb9c1553-a3369706-61025812")))) (let $5 (DqCnUnionAll (TDqOutput $4 '"0"))) (let $6 (DqPhyStage '($5) (lambda '($53) $53) '('('"_logical_id" '1451) '('"_id" '"d0a876f5-49b0986e-e4b6184b-9a223f8a")))) (let $7 '('"id" '"column1" '"column2")) (let $8 (DqCnResult (TDqOutput $6 '"0") $7)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($4 $6) '($8) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"column1" $3) '('"column2" $3) $2)) '"0" '"0")) '('('"type" '"scan_query")))) ) >> KqpScripting::UnsafeTimestampCast [GOOD] >> KqpScripting::SystemTables ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a65/r3tmp/tmpOgRWjA/pdisk_1.dat 2025-04-07T13:59:18.627945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T13:59:18.927707Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:18.969924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:18.970069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:18.979879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14142, node 1 2025-04-07T13:59:19.192372Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:19.192396Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:19.192403Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:19.192505Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:19.559890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:19.580428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T13:59:19.581890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:19.585502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T13:59:19.585705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T13:59:19.585728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T13:59:19.588310Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T13:59:19.588347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T13:59:19.593340Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:19.595346Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T13:59:19.612048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034359648, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T13:59:19.612102Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T13:59:19.612418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T13:59:19.619892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T13:59:19.620101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T13:59:19.620163Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T13:59:19.620299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T13:59:19.620347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T13:59:19.620432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T13:59:19.623803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T13:59:19.623860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T13:59:19.623878Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T13:59:19.623961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T13:59:33.888839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T13:59:33.888877Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:18.980949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2025-04-07T14:00:18.981031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2025-04-07T14:00:18.981088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 2025-04-07T14:01:18.984884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2025-04-07T14:01:18.984972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2025-04-07T14:01:18.985024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 >> KqpOlap::ManyColumnShards [GOOD] >> KqpOlapSysView::StatsSysViewFilter >> KqpOlapAggregations::Json_GetValue_ToString >> KqpOlapAggregations::Aggregation_MinL >> KqpOlap::PredicatePushdownWithParameters >> KqpYql::JsonCast [GOOD] |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |90.2%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] |90.2%| [TA] $(B)/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapAggregations::Aggregation_Avg_GroupByNullMix [GOOD] >> KqpOlapAggregations::Aggregation_Some_GroupByNullMix >> KqpOlap::CountWhereColumnIsNull >> KqpOlapWrite::TierDraftsGCWithRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ManyColumnShards [GOOD] Test command err: 2025-04-07T14:01:59.522242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T14:01:59.563906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:01:59.564373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:01:59.564524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:01:59.565345Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:01:59.565390Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d64/r3tmp/tmpy4aD3r/pdisk_1.dat 2025-04-07T14:02:00.387228Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:00.640500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:02:00.800728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:00.800847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:00.806550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:00.806651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:00.831887Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:02:00.832437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:00.832782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:01.347809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:01.892204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:01.892425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:01.895878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:01.896120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:01.896229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:01.896367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:01.896488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:01.896603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:01.896732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:01.896836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:01.897013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:01.897147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1282:2802];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:02.007221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:02.007330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:02.007588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:02.007704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:02.007822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:02.007947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:02.008075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:02.008217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:02.008328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:02.008432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:02.008535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:02.008634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1290:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:02.140963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:02.141074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:02.141303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:02.141456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:02.141562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:02.141684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:02.141835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:02.142000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:02.142130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:02.142242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:02.142369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1295:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:02.142544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:129 ... _COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:02.658128Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:02.658190Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:02.658227Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:02.658512Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:02.658565Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:02.658737Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:02.658777Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:02.658904Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:02.658947Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:02.659195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:02.659238Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:02.659344Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:02.659381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:02.663431Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:02.663496Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:02.663582Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:02.663635Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:02.663804Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:02.663857Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:02.663949Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:02.663991Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:02.664056Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:02.664089Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:02.664132Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:02.664222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:02.664493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:02.664536Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:02.664715Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:02.664759Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:02.664903Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:02.664941Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:02.665120Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:02.665171Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:02.665287Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:02.665336Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:04.120002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; 2025-04-07T14:02:05.287877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateTabletsObjectReply for unknown txId 281474976715658 at schemeshard 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; 2025-04-07T14:02:14.331095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3091:3899], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:14.331207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3103:3905], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:14.331278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:14.357438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:02:15.190297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3108:3909], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 2025-04-07T14:02:16.265800Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr88y4mrbzbdx11jyaaeen2a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmQ3YTIzNDAtODc3ODc0OTEtYmQ5OWEyMWEtYzA2ZWI2NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:02:17.064094Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr88y4mrbzbdx11jyaaeen2a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmQ3YTIzNDAtODc3ODc0OTEtYmQ5OWEyMWEtYzA2ZWI2NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"type_id":4}}],"rows":[{"items":[{"uint64_value":20000}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1} 2025-04-07T14:02:17.067903Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleMergesWithRestartsWhenWait [FAIL] Test command err: Trying to start YDB, gRPC: 15122, MsgBus: 22042 2025-04-07T13:59:55.180223Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570693140369553:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:55.180385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fe9/r3tmp/tmpm6IU4P/pdisk_1.dat 2025-04-07T13:59:55.703711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:55.703800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:55.711811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:55.747166Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15122, node 1 2025-04-07T13:59:55.921173Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:55.921207Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:55.921214Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:55.921295Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22042 TClient is connected to server localhost:22042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:56.719173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:56.732515Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:59:57.256059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:00.184805Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570693140369553:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:00.184866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:03.323683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570723205145387:2324];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:03.323864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570723205145387:2324];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:03.324151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570723205145387:2324];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:03.324267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570723205145387:2324];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:03.324362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570723205145387:2324];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:03.324473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570723205145387:2324];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:03.324565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570723205145387:2324];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:03.324890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570723205145387:2324];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:03.325045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570723205145387:2324];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:03.325151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570723205145387:2324];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:03.325275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570723205145387:2324];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:03.325368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570723205145387:2324];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:03.328074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570723205145363:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:03.328176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570723205145363:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:03.328304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570723205145363:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:03.328414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570723205145363:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:03.328515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570723205145363:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:03.328618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570723205145363:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:03.328941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570723205145363:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:03.329103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570723205145363:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:03.329237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570723205145363:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:03.329343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570723205145363:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:03.329494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570723205145363:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:03.329589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570723205145363:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:03.385417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570723205145359:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:03.385491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570723205145359:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:03.385744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570723205145359:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:03.385880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570723205145359:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:03.385991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570723205145359:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:03.386097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570723205145359:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:03.386208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570723 ... 7490571217126480701:6320];tablet_id=72075186224038233;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:01:59.659823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[1:7490571212831513287:4904];tablet_id=72075186224038064;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:01:59.703776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[1:7490571212831513214:2366];tablet_id=72075186224038079;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:01:59.714481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[1:7490571212831513182:4331];tablet_id=72075186224038050;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:01:59.785188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038621;self_id=[1:7490571217126480660:4549];tablet_id=72075186224038621;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:01:59.804514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[1:7490571212831513148:5032];tablet_id=72075186224038090;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:01:59.827570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038551;self_id=[1:7490571212831513250:7176];tablet_id=72075186224038551;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:01:59.856746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;self_id=[1:7490571217126480914:5640];tablet_id=72075186224038731;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:01:59.915382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038373;self_id=[1:7490571217126480975:3203];tablet_id=72075186224038373;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:01:59.934309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038441;self_id=[1:7490571217126480758:4291];tablet_id=72075186224038441;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.014671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038168;self_id=[1:7490571217126480629:5942];tablet_id=72075186224038168;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.047921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038488;self_id=[1:7490571217126480742:4382];tablet_id=72075186224038488;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.137550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038429;self_id=[1:7490571217126480820:4249];tablet_id=72075186224038429;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.146363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038291;self_id=[1:7490571217126480807:6137];tablet_id=72075186224038291;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.284848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038183;self_id=[1:7490571217126481050:3823];tablet_id=72075186224038183;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.371864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038608;self_id=[1:7490571217126480853:5577];tablet_id=72075186224038608;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.439681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038554;self_id=[1:7490571217126480835:4419];tablet_id=72075186224038554;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.466799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038601;self_id=[1:7490571212831513239:3169];tablet_id=72075186224038601;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.501489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038831;self_id=[1:7490571217126480900:4025];tablet_id=72075186224038831;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.552477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037998;self_id=[1:7490571217126480751:4540];tablet_id=72075186224037998;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.596913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038582;self_id=[1:7490571217126481017:2995];tablet_id=72075186224038582;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.615983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038384;self_id=[1:7490571217126481005:3297];tablet_id=72075186224038384;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.696273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038393;self_id=[1:7490571217126480956:3810];tablet_id=72075186224038393;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.706363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038222;self_id=[1:7490571217126480982:4587];tablet_id=72075186224038222;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.810216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038784;self_id=[1:7490571217126481080:6130];tablet_id=72075186224038784;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.879012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7490571217126480926:4015];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:00.901420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[1:7490571217126481036:3462];tablet_id=72075186224038060;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:01.094872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:01.095279Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976725663, at schemeshard: 72057594046644480 2025-04-07T14:02:01.098828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:01.117491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:02.538862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:02.539214Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976725666, at schemeshard: 72057594046644480 2025-04-07T14:02:02.544090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:02.562899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725668:0, at schemeshard: 72057594046644480 2025-04-07T14:02:03.914437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725669:0, at schemeshard: 72057594046644480 2025-04-07T14:02:03.915290Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976725669, at schemeshard: 72057594046644480 2025-04-07T14:02:03.932538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725670:0, at schemeshard: 72057594046644480 2025-04-07T14:02:03.989967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725671:0, at schemeshard: 72057594046644480 2025-04-07T14:02:05.530183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725672:0, at schemeshard: 72057594046644480 2025-04-07T14:02:05.530552Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976725672, at schemeshard: 72057594046644480 2025-04-07T14:02:05.559777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725673:0, at schemeshard: 72057594046644480 2025-04-07T14:02:05.595608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725674:0, at schemeshard: 72057594046644480 assertion failed at ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284, void NKikimr::NKqp::NTestSuiteKqpOlapBlobsSharing::TReshardingTest::WaitResharding(const TString &): (result.GetStatus() == NYdb::EStatus::SUCCESS) failed: (CLIENT_DEADLINE_EXCEEDED != SUCCESS)
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:15122 , with diff: (|SU)C(LIENT_DEADLINE_EX|)CE(EDED|SS) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284: WaitResharding @ 0x17AB979D 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:675: Execute_ @ 0x17AC64C8 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 10. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 11. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 12. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 13. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 14. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 15. ??:0: ?? @ 0x7F6BD5BFDD8F 16. ??:0: ?? @ 0x7F6BD5BFDE3F 17. ??:0: ?? @ 0x15851028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonCast [GOOD] Test command err: Trying to start YDB, gRPC: 32335, MsgBus: 16545 2025-04-07T14:01:57.557532Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571216474529205:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:57.557985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e7e/r3tmp/tmpegEBSq/pdisk_1.dat 2025-04-07T14:01:58.187306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:58.187395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:58.197961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:58.215565Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32335, node 1 2025-04-07T14:01:58.461345Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:58.461372Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:58.461379Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:58.461459Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16545 TClient is connected to server localhost:16545 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:59.647080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:59.680264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:59.955242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:00.325499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:00.546630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:02.552841Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571216474529205:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:02.552920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:03.712613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571242244334535:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:03.713388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:04.122479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:04.202014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:04.299370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:04.369885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:04.454434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:04.621381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:04.805683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571246539302346:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:04.805795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:04.806088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571246539302352:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:04.809791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:02:04.872942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571246539302354:2439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Table intent determination, code: 1040
:3:35: Error: INSERT OR IGNORE is not yet supported for Kikimr. Trying to start YDB, gRPC: 22306, MsgBus: 19097 2025-04-07T14:02:08.821013Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571264071544043:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e7e/r3tmp/tmp3oZEYE/pdisk_1.dat 2025-04-07T14:02:09.187655Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:02:09.526477Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:09.529450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:09.529550Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:09.531202Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22306, node 2 2025-04-07T14:02:09.817785Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:09.817812Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:09.817819Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:09.817913Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19097 TClient is connected to server localhost:19097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:11.207803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:11.218868Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:11.232377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:11.353812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:11.577035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:11.690076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:13.817729Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571264071544043:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:13.817823Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:15.376912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571294136316641:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:15.377058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:15.432086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:15.505072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:15.550677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:15.652544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:15.731736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:15.870334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:15.973009Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571294136317150:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:15.973170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:15.975765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571294136317155:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:15.984787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:02:16.015800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571294136317157:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } [[#]] |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |90.2%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 29924, MsgBus: 10466 2025-04-07T14:01:57.432925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571215865979772:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:57.432968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e6b/r3tmp/tmpcSxTJQ/pdisk_1.dat 2025-04-07T14:01:58.136201Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:58.163795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:58.163889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:58.165819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29924, node 1 2025-04-07T14:01:58.321301Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:58.321327Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:58.321335Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:58.321465Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10466 TClient is connected to server localhost:10466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:59.379776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:59.445833Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:59.475358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:59.788986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:00.183355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:00.334549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:02.436853Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571215865979772:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:02.525940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:02.906231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571237340817824:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:02.906337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:03.775718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:03.823650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:03.867495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:03.943947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:04.062066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:04.167907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:04.292815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571245930752929:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:04.292921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:04.297184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571245930752934:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:04.301169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:02:04.320964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571245930752936:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:02:06.194260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:02:06.937310Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034526955, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 18592, MsgBus: 27935 2025-04-07T14:02:09.309182Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571268165218593:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e6b/r3tmp/tmpr8tQwF/pdisk_1.dat 2025-04-07T14:02:09.592385Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:02:09.837077Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:09.861811Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:09.861907Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:09.872946Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18592, node 2 2025-04-07T14:02:10.181372Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:10.181394Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:10.181404Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:10.181511Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27935 TClient is connected to server localhost:27935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:11.476278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:11.523861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:11.685367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:02:12.001835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:02:12.108574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:14.274787Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571268165218593:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:14.274902Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:15.548828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571293935023922:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:15.548936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:15.652455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:15.737435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:15.820335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:15.931254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:15.984653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:16.086162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:16.185119Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571298229991728:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:16.185237Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:16.185517Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571298229991733:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:16.189910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:02:16.209790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571298229991735:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:02:17.477909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:02:18.398610Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034538386, txId: 281474976715675] shutting down >> KqpOlapBlobsSharing::MultipleSchemaVersions >> KqpOlap::YqlScriptOltpAndOlap >> KqpOlapAggregations::Aggregation_ResultT_FilterL_OrderT_Limit2 >> KqpOlap::PredicatePushdownNulls [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_GroupByNullMix [GOOD] Test command err: Trying to start YDB, gRPC: 25069, MsgBus: 3741 2025-04-07T14:01:48.186505Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571175942028849:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:48.186543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e9f/r3tmp/tmppoaX9g/pdisk_1.dat 2025-04-07T14:01:48.725145Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:48.733150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:48.733226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:48.742412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25069, node 1 2025-04-07T14:01:48.810744Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:48.810770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:48.810780Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:48.810885Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3741 TClient is connected to server localhost:3741 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:49.437853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:49.460960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:49.559658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180236996825:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:49.559879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180236996825:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:49.560167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180236996825:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:49.560267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180236996825:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:49.560369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180236996825:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:49.560492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180236996825:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:49.560579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180236996825:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:49.561156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180236996825:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:49.561308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180236996825:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:49.561422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180236996825:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.561530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180236996825:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:49.561634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571180236996825:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:49.609647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571180236996827:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:49.609715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571180236996827:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:49.609930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571180236996827:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:49.610055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571180236996827:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:49.610170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571180236996827:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:49.610285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571180236996827:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:49.610446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571180236996827:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:49.610552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571180236996827:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:49.610672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571180236996827:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:49.610786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571180236996827:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.610908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571180236996827:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:49.611006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571180236996827:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:49.643992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180236996828:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:49.644058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180236996828:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:49.644294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180236996828:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:49.644406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180236996828:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:49.644511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180236996828:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:49.644844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180236996828:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:49.644968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180236996828:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:49.645087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180236996828:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:49.645217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571180236996828: ... ARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:49.695115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:49.695216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:49.695475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:49.695514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:49.695669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:49.695700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:49.695908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:49.695939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:49.696108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:49.696140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, AVG(id), AVG(level) FROM `/Root/tableWithNulls` WHERE id >= 5 GROUP BY level ORDER BY level; 2025-04-07T14:01:51.647041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571188826931725:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.647909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571188826931714:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.648032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:51.651815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:01:51.666901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571188826931728:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:02:03.720488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:02:03.720519Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:15.378019Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571175942028849:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:15.403065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:17.197029Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88xd0pfwjt2s5v8qbh856a", SessionId: ydb://session/3?node_id=1&id=OWY3ZDExOTktMmQxMjZhMmEtMzQ2ZDA5ZmItMjYxNDc1NTI=, Slow query, duration: 25.534749s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "--!syntax_v1\nPRAGMA Kikimr.OptUseFinalizeByKey;\n\n SELECT\n level, AVG(id), AVG(level)\n FROM `/Root/tableWithNulls`\n WHERE id >= 5\n GROUP BY level\n ORDER BY level;\n \n", parameters: 0b 2025-04-07T14:02:17.199245Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034512000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, AVG(id), AVG(level) FROM `/Root/tableWithNulls` WHERE id >= 5 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":1}]},"Column":{"Id":8}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":9}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":10}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":10},{"Id":9},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":1}]},"Column":{"Id":8}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":9}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":10}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":10},{"Id":9},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '2753) '('"_id" '"6570d84a-efbd74a5-c9af09e0-aba18090") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) (Int32 '1)) '((Nothing $2) (Int32 '0)))))))))))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (TupleType $10 (DataType 'Uint64))) (let $12 (OptionalType $11)) (let $13 '('"level" $2)) (let $14 (StructType '('_yql_agg_0 $11) '('_yql_agg_1 $12) $13)) (let $15 '('('"_logical_id" '2812) '('"_id" '"f44451fe-adb338f5-54a37d87-51394e17") '('"_wide_channels" $14))) (let $16 (DqPhyStage '() (lambda '() (block '( (let $29 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $30 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $29 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $30 (lambda '($33) (block '( (let $34 '('"_yql_agg_0_sum" '"sum" '"id")) (let $35 '('"_yql_agg_0_cnt" '"count" '"id")) (let $36 '('"_yql_agg_1_sum" '"sum" '"level")) (let $37 '('"_yql_agg_1_cnt" '"count" '"level")) (let $38 '($34 $35 $36 $37)) (return (TKqpOlapAgg $33 $38 '('"level"))) ))))) (let $32 (lambda '($39 $40 $41 $42 $43) (block '( (let $44 (IfPresent $42 (lambda '($45) (Just '((Convert $45 'Double) $41))) (Nothing $12))) (return '((Convert $40 'Double) $39) $44 $43) )))) (return (FromFlow (WideMap $31 $32))) ))) $15)) (let $17 (DqCnHashShuffle (TDqOutput $16 '0) '('2))) (let $18 (OptionalType $10)) (let $19 (StructType '('"column1" $10) '('"column2" $18) $13)) (let $20 '('('"_logical_id" '5542) '('"_id" '"c972e46e-1230c90d-1504782-63b55034") '('"_wide_channels" $19))) (let $21 (DqPhyStage '($17) (lambda '($46) (block '( (let $47 (lambda '($54 $55 $56 $57) (Nth $55 '0) (Nth $55 '1) $56)) (let $48 (lambda '($58 $59 $60 $61 $62 $63 $64) (block '( (let $65 (IfPresent $60 (lambda '($66) (IfPresent $64 (lambda '($67) (Just '((AggrAdd (Nth $66 '0) (Nth $67 '0)) (AggrAdd (Nth $66 '1) (Nth $67 '1))))) $60)) $64)) (return (AggrAdd (Nth $59 '0) $62) (AggrAdd (Nth $59 '1) $63) $65) )))) (let $49 (lambda '($68 $69 $70 $71) (block '( (let $72 (IfPresent $71 (lambda '($73) (Just (Div (Nth $73 '0) (Nth $73 '1)))) (Nothing $18))) (return (Div $69 $70) $72 $68) )))) (let $50 (WideCombiner (ToFlow $46) '"" (lambda '($51 $52 $53) $53) $47 $48 $49)) (return (FromFlow (WideSort $50 '('('2 (Bool 'true)))))) ))) $20)) (let $22 (DqCnMerge (TDqOutput $21 '0) '('('2 '"Asc")))) (let $23 (DqPhyStage '($22) (lambda '($74) (FromFlow (NarrowMap (ToFlow $74) (lambda '($75 $76 $77) (AsStruct '('"column1" $75) '('"column2" $76) '('"level" $77)))))) '('('"_logical_id" '5554) '('"_id" '"9b30af87-94af6e99-2d9d5974-cad03289")))) (let $24 '($16 $21 $23)) (let $25 '('"level" '"column1" '"column2")) (let $26 (DqCnResult (TDqOutput $23 '0) $25)) (let $27 (KqpTxResultBinding $9 '0 '0)) (let $28 (KqpPhysicalTx $24 '($26) '('($7 $27)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $28) '((KqpTxResultBinding (ListType $19) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_Sum_GroupByNull >> KqpOlapSysView::StatsSysView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownNulls [GOOD] Test command err: Trying to start YDB, gRPC: 7017, MsgBus: 10726 2025-04-07T14:02:11.674592Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571276064939343:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:11.674807Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ca0/r3tmp/tmpf4s19E/pdisk_1.dat 2025-04-07T14:02:12.765534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:12.841974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:12.842060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:12.844480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:02:12.849899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7017, node 1 2025-04-07T14:02:13.133160Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:13.133178Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:13.133187Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:13.133257Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10726 TClient is connected to server localhost:10726 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:14.993181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:15.040257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:15.310149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571293244809076:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:15.310335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571293244809076:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:15.310543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571293244809076:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:15.310653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571293244809076:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:15.310772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571293244809076:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:15.310863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571293244809076:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:15.310952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571293244809076:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:15.311036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571293244809076:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:15.311127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571293244809076:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:15.311212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571293244809076:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:15.311365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571293244809076:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:15.311498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571293244809076:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:15.451215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571293244809077:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:15.451272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571293244809077:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:15.451473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571293244809077:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:15.451561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571293244809077:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:15.451648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571293244809077:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:15.451743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571293244809077:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:15.451844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571293244809077:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:15.451946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571293244809077:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:15.452034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571293244809077:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:15.452119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571293244809077:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:15.452197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571293244809077:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:15.452292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571293244809077:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:15.588088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571293244809078:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:15.588151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571293244809078:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:15.588356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571293244809078:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:15.588473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571293244809078:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:15.588571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571293244809078:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:15.588719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571293244809078:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:15.588845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571293244809078:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:15.588954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571293244809078:2294];tablet_id=72075186224037890;process=TTxInitSchem ... 7], TxId: 281474976710663, task: 47. Ctx: { TraceId : 01jr88y6nkd3f852m7q3a0vpck. SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.339980Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646076:2459], TxId: 281474976710663, task: 39. Ctx: { CustomerSuppliedId : . TraceId : 01jr88y6nkd3f852m7q3a0vpck. SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.340087Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646085:2468], TxId: 281474976710663, task: 48. Ctx: { TraceId : 01jr88y6nkd3f852m7q3a0vpck. SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.340189Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646095:2478], TxId: 281474976710663, task: 58. Ctx: { TraceId : 01jr88y6nkd3f852m7q3a0vpck. SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.340293Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646070:2453], TxId: 281474976710663, task: 33. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. TraceId : 01jr88y6nkd3f852m7q3a0vpck. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.340395Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646113:2481], TxId: 281474976710663, task: 61. Ctx: { TraceId : 01jr88y6nkd3f852m7q3a0vpck. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.340582Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646087:2470], TxId: 281474976710663, task: 50. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. TraceId : 01jr88y6nkd3f852m7q3a0vpck. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.340614Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646079:2462], TxId: 281474976710663, task: 42. Ctx: { SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. TraceId : 01jr88y6nkd3f852m7q3a0vpck. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.342369Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646089:2472], TxId: 281474976710663, task: 52. Ctx: { CustomerSuppliedId : . TraceId : 01jr88y6nkd3f852m7q3a0vpck. SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.342706Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646090:2473], TxId: 281474976710663, task: 53. Ctx: { SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CustomerSuppliedId : . TraceId : 01jr88y6nkd3f852m7q3a0vpck. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.342941Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646091:2474], TxId: 281474976710663, task: 54. Ctx: { TraceId : 01jr88y6nkd3f852m7q3a0vpck. SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.343198Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646092:2475], TxId: 281474976710663, task: 55. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. TraceId : 01jr88y6nkd3f852m7q3a0vpck. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.343406Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646094:2477], TxId: 281474976710663, task: 57. Ctx: { TraceId : 01jr88y6nkd3f852m7q3a0vpck. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.343624Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646114:2482], TxId: 281474976710663, task: 62. Ctx: { SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CustomerSuppliedId : . TraceId : 01jr88y6nkd3f852m7q3a0vpck. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.343843Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646096:2479], TxId: 281474976710663, task: 59. Ctx: { TraceId : 01jr88y6nkd3f852m7q3a0vpck. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.344508Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:02:20.344857Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646097:2480], TxId: 281474976710663, task: 60. Ctx: { TraceId : 01jr88y6nkd3f852m7q3a0vpck. SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.345197Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646117:2485], TxId: 281474976710663, task: 65. Ctx: { TraceId : 01jr88y6nkd3f852m7q3a0vpck. SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.345651Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646082:2465], TxId: 281474976710663, task: 45. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. TraceId : 01jr88y6nkd3f852m7q3a0vpck. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.345923Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646083:2466], TxId: 281474976710663, task: 46. Ctx: { CustomerSuppliedId : . TraceId : 01jr88y6nkd3f852m7q3a0vpck. SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.346208Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646115:2483], TxId: 281474976710663, task: 63. Ctx: { TraceId : 01jr88y6nkd3f852m7q3a0vpck. SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.346430Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646116:2484], TxId: 281474976710663, task: 64. Ctx: { CustomerSuppliedId : . TraceId : 01jr88y6nkd3f852m7q3a0vpck. SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.346643Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646086:2469], TxId: 281474976710663, task: 49. Ctx: { SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. TraceId : 01jr88y6nkd3f852m7q3a0vpck. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.346883Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571314719646088:2471], TxId: 281474976710663, task: 51. Ctx: { SessionId : ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=. CustomerSuppliedId : . TraceId : 01jr88y6nkd3f852m7q3a0vpck. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571310424678726:2402], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:02:20.347301Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWY4MzhjN2UtYzFiNGViNWYtNmFhYmE1ZTAtZTY5NDk4YzM=, ActorId: [1:7490571306129711317:2402], ActorState: ExecuteState, TraceId: 01jr88y6nkd3f852m7q3a0vpck, Create QueryResponse for error on request, msg: 2025-04-07T14:02:20.348514Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034540003, txId: 18446744073709551615] shutting down >> KqpOlapBlobsSharing::UpsertWhileSplitTest >> KqpOlapSparsed::SwitchingStandalone >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce >> KqpOlapBlobsSharing::MultipleMergesWithRestartsAfterWait [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysView [GOOD] Test command err: Trying to start YDB, gRPC: 21199, MsgBus: 27377 2025-04-07T14:01:51.953299Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571191614658460:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:51.953352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d9a/r3tmp/tmp6wN1cF/pdisk_1.dat 2025-04-07T14:01:52.383223Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:52.398378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:52.398434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:52.400691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21199, node 1 2025-04-07T14:01:52.472208Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:52.472229Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:52.472252Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:52.472356Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27377 TClient is connected to server localhost:27377 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:53.030379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:53.068436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:53.176329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571200204593707:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:53.176596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571200204593707:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:53.176894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571200204593707:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:53.177059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571200204593707:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:53.177155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571200204593707:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:53.177267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571200204593707:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:53.177360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571200204593707:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:53.177441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571200204593707:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:53.177557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571200204593707:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:53.177675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571200204593707:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:53.177772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571200204593707:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:53.177893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571200204593707:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:53.212691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571200204593713:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:53.212755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571200204593713:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:53.212977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571200204593713:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:53.213100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571200204593713:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:53.213216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571200204593713:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:53.213299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571200204593713:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:53.213399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571200204593713:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:53.213503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571200204593713:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:53.213596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571200204593713:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:53.213686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571200204593713:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:53.213808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571200204593713:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:53.213902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571200204593713:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:53.274669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571200204593720:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:53.274753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571200204593720:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:53.274951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571200204593720:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:53.275068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571200204593720:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:53.275170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571200204593720:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:53.275284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571200204593720:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:53.275391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571200204593720:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:53.275502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571200204593720:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:53.275614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:74905712002045937 ... =serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; 2025-04-07T14:01:56.954906Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571191614658460:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:56.954985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-04-07T14:02:07.368349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:02:07.368384Z node 1 :IMPORT WARN: Table profiles were not loaded WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT PathId, Kind, TabletId, Sum(Rows) as Rows FROM `/Root/olapStore/.sys/store_primary_index_portion_stats` WHERE Activity == 1 GROUP BY PathId, Kind, TabletId ORDER BY TabletId, Kind, PathId RESULT: 2025-04-07T14:02:11.664946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571277514007543:3801], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.665106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.669024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571277514007555:3804], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.673762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:02:11.698925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571277514007557:3805], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:02:21.742522Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88y2a5b8hr12pf22xf90sw", SessionId: ydb://session/3?node_id=1&id=NWFjZjUyOTAtMTdmNDdkMjctZDM0MTc0NS1hYWZiMzA2Nw==, Slow query, duration: 10.079951s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT PathId, Kind, TabletId, Sum(Rows) as Rows\n FROM `/Root/olapStore/.sys/store_primary_index_portion_stats`\n WHERE Activity == 1\n GROUP BY PathId, Kind, TabletId\n ORDER BY TabletId, Kind, PathId\n ", parameters: 0b TabletId: 72075186224037888 Rows: 33295 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Rows: 33320 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Rows: 33385 Kind: INSERTED PathId: 3 2025-04-07T14:02:21.747695Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034533024, txId: 281474976710662] shutting down >> KqpYql::FlexibleTypes >> SlowTopicAutopartitioning::CDC_Write [GOOD] >> KqpYql::EvaluateExprPgNull >> TPQTest::TestManyConsumers [GOOD] >> KqpOlap::PredicatePushdownWithParameters [GOOD] |90.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpOlapAggregations::Json_GetValue_ToString [GOOD] >> KqpScripting::SystemTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownWithParameters [GOOD] Test command err: Trying to start YDB, gRPC: 3283, MsgBus: 29951 2025-04-07T14:02:18.797212Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571304477250285:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:18.797432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c6c/r3tmp/tmpGiEOVC/pdisk_1.dat 2025-04-07T14:02:19.540119Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:19.543790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:19.543855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:19.560906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3283, node 1 2025-04-07T14:02:19.817182Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:19.817205Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:19.817212Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:19.817317Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29951 TClient is connected to server localhost:29951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:20.951433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:20.975384Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:20.992188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:21.146013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571317362152715:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:21.146209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571317362152715:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:21.146465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571317362152715:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:21.146564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571317362152715:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:21.146686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571317362152715:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:21.146798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571317362152715:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:21.146888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571317362152715:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:21.146979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571317362152715:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:21.147084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571317362152715:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:21.147168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571317362152715:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:21.147279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571317362152715:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:21.147381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571317362152715:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:21.217307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317362152717:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:21.217369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317362152717:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:21.217568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317362152717:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:21.217713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317362152717:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:21.217807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317362152717:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:21.217952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317362152717:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:21.218068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317362152717:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:21.218188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317362152717:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:21.218280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317362152717:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:21.218412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317362152717:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:21.218521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317362152717:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:21.218605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317362152717:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:21.268129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317362152749:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:21.268192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317362152749:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:21.268381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317362152749:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:21.268469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317362152749:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:21.268552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317362152749:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:21.268652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317362152749:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:21.269018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317362152749:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:21.269131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317362152749:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... t=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:21.329779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:21.329816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:21.329891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:21.329915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:21.329977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:21.330003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:21.330043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:21.330083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:21.330353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:21.330394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:21.330556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:21.330576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:21.330662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:21.330682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:21.330845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:21.330876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:21.330989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:21.331006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:21.333271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:21.333324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:21.333405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:21.333439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:21.333570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:21.333592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:21.333671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:21.333704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:21.333753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:21.333788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:21.333841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:21.333859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:21.334158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:21.334188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:21.334353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:21.334373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:21.334464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:21.334494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:21.334659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:21.334692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:21.334782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:21.334800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:21.417336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; 2025-04-07T14:02:23.330908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571325952087615:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:23.331009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:23.331423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571325952087650:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:23.334911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:02:23.359842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571325952087652:2396], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:02:23.853247Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571304477250285:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:23.853332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:24.418241Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034544000, txId: 18446744073709551615] shutting down 2025-04-07T14:02:25.079776Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034544168, txId: 18446744073709551615] shutting down Query: --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = "false"; DECLARE $in_uid AS Utf8; DECLARE $in_level AS Int32; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE uid > $in_uid AND level > $in_level ORDER BY `timestamp`; Expected: [[3000983u];[3000984u];[3000988u];[3000989u];[3000993u];[3000994u];[3000998u];[3000999u]] Received: [[3000983u];[3000984u];[3000988u];[3000989u];[3000993u];[3000994u];[3000998u];[3000999u]] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_GetValue_ToString [GOOD] Test command err: Trying to start YDB, gRPC: 3915, MsgBus: 24046 2025-04-07T14:02:18.715595Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571306501041714:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:18.715691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c78/r3tmp/tmpQktz3Q/pdisk_1.dat 2025-04-07T14:02:19.559365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:19.559428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:19.560233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:19.575046Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3915, node 1 2025-04-07T14:02:19.862164Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:19.862181Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:19.862200Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:19.862274Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24046 TClient is connected to server localhost:24046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:20.843583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:20.875280Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:20.894719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:21.081789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571315090976851:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:21.081976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571315090976851:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:21.082194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571315090976851:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:21.082308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571315090976851:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:21.082402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571315090976851:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:21.082513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571315090976851:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:21.082621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571315090976851:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:21.082712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571315090976851:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:21.082818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571315090976851:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:21.082920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571315090976851:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:21.083019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571315090976851:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:21.083122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571315090976851:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:21.196117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571315090976843:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:21.196175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571315090976843:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:21.196383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571315090976843:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:21.196498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571315090976843:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:21.196602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571315090976843:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:21.196707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571315090976843:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:21.196822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571315090976843:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:21.196925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571315090976843:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:21.197051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571315090976843:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:21.197145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571315090976843:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:21.197246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571315090976843:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:21.197360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571315090976843:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:21.293648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571315090976844:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:21.293702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571315090976844:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:21.293909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571315090976844:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:21.294017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571315090976844:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:21.294121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571315090976844:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:21.294238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571315090976844:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:21.294329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571315090976844:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:21.294433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571315090976844:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... et_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:21.432859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:21.433050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:21.433075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:21.433191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:21.433213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:21.433362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:21.433383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:21.433507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:21.433526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1" RETURNING String), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.col1" RETURNING String) = "val1" AND id = 1; 2025-04-07T14:02:23.706178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571327975879014:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:23.706334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:23.706637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571327975879049:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:23.710162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:02:23.716647Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571306501041714:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:23.716720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:23.731286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571327975879051:2403], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:02:25.260368Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034544000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1" RETURNING String), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.col1" RETURNING String) = "val1" AND id = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == \"val1\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [1, 1]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == \"val1\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1332) '('"_id" '"bf1af240-e52bf158-11935e-74c05d0") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $21 (Int32 '1)) (let $22 (Just $21)) (let $23 '($22 $21)) (let $24 (If (== $21 (Int32 '2147483647)) $23 '((+ $22 $21) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($23 $24)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (DataType 'String)) (let $11 (DataType 'Utf8)) (let $12 (OptionalType $11)) (let $13 (OptionalType $10)) (let $14 (DqPhyStage '() (lambda '() (block '( (let $25 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $26 '('"id" '"jsondoc" '"jsonval")) (let $27 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $28 (OptionalType (DataType 'JsonDocument))) (let $29 '((VariantType (TupleType (TupleType (DataType 'Uint8) $10) $12)))) (let $30 (ResourceType '"JsonNode")) (let $31 (OptionalType $30)) (let $32 '((ResourceType '"JsonPath"))) (let $33 (DictType $11 $30)) (let $34 '($33)) (let $35 (CallableType '() $29 '($31) $32 $34)) (let $36 '('('"strict"))) (let $37 (Udf '"Json2.SqlValueUtf8" (Void) (VoidType) '"" $35 (VoidType) '"" $36)) (let $38 (lambda '($57) (block '( (let $58 '((DataType 'Json) '"" '1)) (let $59 (CallableType '() '($30) $58)) (let $60 (Udf '"Json2.Parse" (Void) (VoidType) '"" $59 (VoidType) '"" '())) (return (Just (Apply $60 $57))) )))) (let $39 (Nothing $31)) (let $40 (CallableType '() $32 '($11))) (let $41 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $40 (VoidType) '"" '())) (let $42 (Apply $41 (Utf8 '"$.col1"))) (let $43 (Dict $33)) (let $44 (lambda '($61) (block '( (let $62 (Nothing $13)) (return $62) )))) (let $45 (lambda '($63) (block '( (let $64 (IfPresent $63 (lambda '($65) (Just (SafeCast $65 $10))) (Nothing $13))) (return (If (Exists $63) $64 (Nothing $13))) )))) (let $46 (KqpWideReadOlapTableRanges $25 %kqp%tx_result_binding_0_0 $26 '() $27 (lambda '($47) (block '( (let $48 (DataType 'Json)) (let $49 (StructType $9 '('"jsondoc" $28) '('"jsonval" (OptionalType $48)))) (let $50 (KqpOlapApply $49 '('"jsonval") (lambda '($53) (block '( (let $54 (IfPresent $53 $38 $39)) (let $55 (Apply $37 $54 $42 $43)) (let $56 (Nothing $13)) (return (Visit $55 '0 $44 '1 $45)) ))))) (let $51 '('eq $50 (String '"val1"))) (let $52 '('?? $51 (Bool 'false))) (return (KqpOlapFilter $47 $52)) ))))) (return (FromFlow (NarrowMap $46 (lambda '($66 $67 $68) (block '( (let $69 (IfPresent $68 $38 $39)) (let $70 (Apply $37 $69 $42 $43)) (let $71 (Visit $70 '0 $44 '1 $45)) (let $72 (CallableType '() $29 '($28) $32 $34)) (let $73 (Udf '"Json2.JsonDocumentSqlValueConvertToUtf8" (Void) (VoidType) '"" $72 (VoidType) '"" $36)) (let $74 (Apply $73 $67 $42 $43)) (let $75 (Visit $74 '0 (lambda '($76) (Nothing $12)) '1 (lambda '($77) $77))) (return (AsStruct '('"column1" $71) '('"column2" $75) '('"id" $66))) )))))) ))) '('('"_logical_id" '1403) '('"_id" '"cd02b652-6d9269a2-c014f474-f7b017cd")))) (let $15 (DqCnUnionAll (TDqOutput $14 '0))) (let $16 (DqPhyStage '($15) (lambda '($78) $78) '('('"_logical_id" '2458) '('"_id" '"6d646f12-d100a954-b0b896ee-7f75716")))) (let $17 '('"id" '"column1" '"column2")) (let $18 (DqCnResult (TDqOutput $16 '0) $17)) (let $19 (KqpTxResultBinding $8 '0 '0)) (let $20 (KqpPhysicalTx '($14 $16) '($18) '('($5 $19)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $20) '((KqpTxResultBinding (ListType (StructType '('"column1" $13) '('"column2" $12) $9)) '1 '0)) '('('"type" '"scan_query")))) ) |90.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpOlap::YqlScriptOltpAndOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit1_1_clean Test command err: Trying to start YDB, gRPC: 18526, MsgBus: 22820 2025-04-07T14:02:12.071283Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571280822856536:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:12.071733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c93/r3tmp/tmpsFKMDo/pdisk_1.dat 2025-04-07T14:02:12.863364Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:12.873572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:12.873655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:12.885979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18526, node 1 2025-04-07T14:02:13.065241Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:13.065264Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:13.065275Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:13.065403Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22820 TClient is connected to server localhost:22820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:13.807487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:13.830314Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:13.848885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:13.905756Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:02:13.936051Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:02:13.936217Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-04-07T14:02:13.965301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:13.965481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:13.965726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:13.965821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:13.965904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:13.965982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:13.966078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:13.966163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:13.966299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:13.966392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:13.966481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:13.966557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:13.978071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:02:14.025491Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:02:14.025629Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-07T14:02:14.037738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:14.044795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:14.045048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:14.045140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:14.045217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:14.045312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:14.045390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:14.045468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:14.045569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:14.045664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:14.045801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:14.045895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:14.048832Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037889 2025-04-07T14:02:14.048999Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:02:14.049048Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:02:14.049191Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:02:14.049303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:14.049341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema: ... t_indexation_tasks;insert_overload_size=0; 2025-04-07T14:02:25.111375Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:02:25.111455Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:02:25.111558Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:02:25.111594Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:02:25.111625Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:02:25.111685Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:02:25.117542Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034545000 at tablet 72075186224037889 2025-04-07T14:02:25.117594Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:02:25.117661Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:02:25.117730Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:02:25.117772Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:02:25.117799Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:02:25.117821Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:02:25.117847Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:02:25.117912Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571285117824362:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:02:25.125136Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:02:25.125210Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888; 2025-04-07T14:02:25.127485Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:02:25.127568Z node 1 :TX_COLUMNSHARD DEBUG: There are stats for 1 tables 2025-04-07T14:02:25.127635Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:02:25.127690Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:02:25.127746Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:02:25.127790Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:02:25.127833Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:02:25.127854Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:02:25.127879Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:02:25.127974Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:02:25.128081Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034545000 at tablet 72075186224037888 2025-04-07T14:02:25.128113Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:02:25.128139Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:02:25.128170Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:02:25.128202Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:02:25.128224Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:02:25.128238Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:02:25.128259Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:02:25.128296Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571285117824373:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:02:25.337818Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490571285117824373:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:25.337883Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490571285117824362:2290];fline=actor.cpp:33;event=skip_flush_writing; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x180DC7E2 1. /-S/util/system/yassert.cpp:55: Panic @ 0x180CBE3A 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x1AF00E25 2025-04-07T14:02:25.840575Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490571285117824373:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:25.840574Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490571285117824362:2290];fline=actor.cpp:33;event=skip_flush_writing; 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x17AA48D6 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:216: Execute_ @ 0x17AA6904 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7F4533BECD8F 18. ??:0: ?? @ 0x7F4533BECE3F 19. ??:0: ?? @ 0x15851028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SystemTables [GOOD] Test command err: Trying to start YDB, gRPC: 8492, MsgBus: 24025 2025-04-07T14:02:02.424860Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571235426767328:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:02.427854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e59/r3tmp/tmpXNlsC2/pdisk_1.dat 2025-04-07T14:02:03.420981Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:03.424179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:03.424243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:03.441662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:03.442496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 8492, node 1 2025-04-07T14:02:03.868318Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:03.868345Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:03.868351Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:03.868426Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24025 TClient is connected to server localhost:24025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:05.499234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:05.567220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:05.888313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:06.314957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:06.435036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:07.465438Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571235426767328:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:07.465511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:11.213271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571274081474664:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.213426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:11.976236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:12.024709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:12.075756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:12.110573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:12.173102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:12.238500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:12.311837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571278376442482:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:12.311889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:12.312202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571278376442487:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:12.316222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:02:12.338321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571278376442489:2444], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:02:15.995161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18099, MsgBus: 18571 2025-04-07T14:02:18.222734Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571307005773252:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:18.222772Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e59/r3tmp/tmpCGQWmK/pdisk_1.dat 2025-04-07T14:02:18.741503Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:18.791560Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:18.791638Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:18.813443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18099, node 2 2025-04-07T14:02:19.024930Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:19.024972Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:19.024979Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:19.025071Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18571 TClient is connected to server localhost:18571 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:19.830325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:19.841850Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:19.851495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:02:19.947299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:20.152520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:20.279934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:23.229222Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571307005773252:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:23.229420Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:23.253249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571328480611432:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:23.253399Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:23.284084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:23.319782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:23.379607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:23.451483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:23.513477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:23.578557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:23.666891Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571328480611931:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:23.666975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:23.667299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571328480611936:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:23.670955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:02:23.687490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571328480611938:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:02:25.458467Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034545412, txId: 281474976710671] shutting down 2025-04-07T14:02:25.673795Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034545643, txId: 281474976710673] shutting down 2025-04-07T14:02:27.280495Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034547185, txId: 281474976710675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleMergesWithRestartsAfterWait [FAIL] Test command err: Trying to start YDB, gRPC: 30556, MsgBus: 7457 2025-04-07T14:00:08.231524Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570746185038035:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:08.231822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fdb/r3tmp/tmps3RC1O/pdisk_1.dat 2025-04-07T14:00:08.613131Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30556, node 1 2025-04-07T14:00:08.643194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:08.643297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:08.649057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:08.675835Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:08.675850Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:08.675864Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:08.675922Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7457 TClient is connected to server localhost:7457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:09.335197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:09.782515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:13.220765Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570746185038035:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:13.221005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:15.312723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570776249812535:2306];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:15.312986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570776249812535:2306];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:15.313222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570776249812535:2306];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:15.313331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570776249812535:2306];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:15.313450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570776249812535:2306];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:15.313568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570776249812535:2306];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:15.313702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570776249812535:2306];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:15.313807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570776249812535:2306];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:15.313944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570776249812535:2306];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:15.314048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570776249812535:2306];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:15.314149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570776249812535:2306];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:15.314247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570776249812535:2306];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:15.327320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570776249812635:2312];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:15.328547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570776249812635:2312];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:15.328809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570776249812635:2312];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:15.328922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570776249812635:2312];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:15.329628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570776249812635:2312];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:15.329746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570776249812635:2312];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:15.329850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570776249812635:2312];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:15.329965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570776249812635:2312];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:15.330088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570776249812635:2312];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:15.330210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570776249812635:2312];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:15.330337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570776249812635:2312];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:15.330445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570776249812635:2312];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:15.379221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7490570776249812536:2307];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:15.379288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7490570776249812536:2307];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:15.379535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7490570776249812536:2307];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:15.379667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7490570776249812536:2307];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:15.379796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7490570776249812536:2307];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:15.379911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7490570776249812536:2307];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:15.380025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7490570776249812536:2307];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormali ... =72075186224037899;self_id=[1:7490571210041588069:13435];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037899;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.646002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7490571210041586868:13380];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037932;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.652089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[1:7490571210041586666:13378];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037931;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.653256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[1:7490571210041588106:13439];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037909;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.654978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7490571210041586990:13384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037901;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.663593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571210041587665:13395];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037919;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.682183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038658;self_id=[1:7490571300235909729:5078];tablet_id=72075186224038658;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:19.683900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571210041586244:13374];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.703407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038192;self_id=[1:7490571300235909493:4130];tablet_id=72075186224038192;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:19.711125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490571210041586771:13379];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037920;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.725838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038159;self_id=[1:7490571304530877160:6059];tablet_id=72075186224038159;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:19.747813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7490571210041586122:13367];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037930;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.752452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571210041586088:13366];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.763233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038532;self_id=[1:7490571304530877118:2684];tablet_id=72075186224038532;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:19.764327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[1:7490571210041586982:13383];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037905;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.790777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038189;self_id=[1:7490571300235909654:4055];tablet_id=72075186224038189;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:19.811372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038550;self_id=[1:7490571300235909642:4057];tablet_id=72075186224038550;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:19.812788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7490571210041586904:13381];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037933;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.812983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7490571210041586904:13381];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037933;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.849981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038576;self_id=[1:7490571304530877209:5395];tablet_id=72075186224038576;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:19.852315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[1:7490571210041587425:13391];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037907;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.884154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038911;self_id=[1:7490571304530877186:3405];tablet_id=72075186224038911;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:19.884785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[1:7490571210041586947:13382];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037914;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.884906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[1:7490571210041586947:13382];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037914;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.898955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571210041586037:13360];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037922;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.899457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571210041586665:13377];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.933589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038853;self_id=[1:7490571300235909449:3699];tablet_id=72075186224038853;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:19.947872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038540;self_id=[1:7490571300235909615:3992];tablet_id=72075186224038540;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:19.950912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7490571210041586039:13361];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037928;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.951297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[1:7490571210041587874:13426];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037918;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.953487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571210041587837:13410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.953676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7490571210041588116:13444];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037908;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.959091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[1:7490571210041587406:13389];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037913;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:19.976279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038492;self_id=[1:7490571300235909775:3197];tablet_id=72075186224038492;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:20.010313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038290;self_id=[1:7490571304530877415:6444];tablet_id=72075186224038290;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:20.057549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038894;self_id=[1:7490571300235909471:3817];tablet_id=72075186224038894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:02:20.058226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7490571210041587387:13386];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037902;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:20.069082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038476;self_id=[1:7490571304530877136:3702];tablet_id=72075186224038476;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; assertion failed at ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284, void NKikimr::NKqp::NTestSuiteKqpOlapBlobsSharing::TReshardingTest::WaitResharding(const TString &): (result.GetStatus() == NYdb::EStatus::SUCCESS) failed: (CLIENT_DEADLINE_EXCEEDED != SUCCESS)
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:30556 , with diff: (|SU)C(LIENT_DEADLINE_EX|)CE(EDED|SS) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284: WaitResharding @ 0x17AB979D 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:659: Execute_ @ 0x17AC593D 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 10. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 11. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 12. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 13. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 14. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 15. ??:0: ?? @ 0x7FEDAC710D8F 16. ??:0: ?? @ 0x7FEDAC710E3F 17. ??:0: ?? @ 0x15851028 >> KqpOlap::CheckEarlyFilterOnEmptySelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::YqlScriptOltpAndOlap [GOOD] Test command err: Trying to start YDB, gRPC: 11374, MsgBus: 62936 2025-04-07T14:02:21.246598Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571318919070396:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:21.246818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c43/r3tmp/tmpsXvGYG/pdisk_1.dat 2025-04-07T14:02:22.017012Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:22.049786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:22.049849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:22.050989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11374, node 1 2025-04-07T14:02:22.253721Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:22.253740Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:22.253751Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:22.253825Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62936 TClient is connected to server localhost:62936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:23.106061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:23.140623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:23.354190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571327509005524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:23.354444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571327509005524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:23.354734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571327509005524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:23.354851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571327509005524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:23.354944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571327509005524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:23.355058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571327509005524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:23.355168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571327509005524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:23.355279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571327509005524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:23.355398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571327509005524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:23.355487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571327509005524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:23.355571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571327509005524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:23.355682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571327509005524:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:23.450987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571327509005525:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:23.451044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571327509005525:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:23.451228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571327509005525:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:23.451307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571327509005525:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:23.451402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571327509005525:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:23.451481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571327509005525:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:23.451555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571327509005525:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:23.451654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571327509005525:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:23.451739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571327509005525:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:23.451844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571327509005525:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:23.451930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571327509005525:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:23.452020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571327509005525:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:23.527663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571327509005528:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:23.527721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571327509005528:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:23.527892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571327509005528:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:23.527986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571327509005528:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:23.528081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571327509005528:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:23.528173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571327509005528:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:23.528258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571327509005528:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:23.528337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571327509005528:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:23.528421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74905713275090055 ... 72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:23.580109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:23.580134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:23.580270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:23.580293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:23.580365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:23.580387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:23.580432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:23.580451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:23.580502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:23.580534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:23.580818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:23.580853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:23.581005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:23.581025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:23.581137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:23.581160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:23.584996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:23.585031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:23.585117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:23.585135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:23.586646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:23.586680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:23.586762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:23.586783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:23.586931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:23.586957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:23.587050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:23.587080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:23.587142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:23.587162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:23.587197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:23.587217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:23.587508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:23.587541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:23.587725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:23.587754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:23.587858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:23.587897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:23.588074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:23.588096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:23.588191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:23.588209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:23.638659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3632;columns=5; 2025-04-07T14:02:23.766786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:25.956580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571336098940524:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:25.957007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:25.959227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571336098940536:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:25.969818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:02:25.989671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571336098940539:2400], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:02:26.209120Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571318919070396:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:26.209172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:27.845935Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034547640, txId: 281474976710664] shutting down >> KqpOlapSparsed::Switching [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestManyConsumers [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2025-04-07T13:59:03.271500Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:03.271588Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:03.330093Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:03.366672Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-07T13:59:03.367812Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-04-07T13:59:03.374451Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T13:59:03.376769Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-07T13:59:03.378381Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:03.385491Z node 1 :PERSQUEUE INFO: new Cookie default|c2248deb-fc7d8039-91d0b800-a02909e7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2025-04-07T13:59:03.776399Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:03.776463Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:175:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:03.798586Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:03.799629Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-07T13:59:03.800363Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:182:2195] 2025-04-07T13:59:03.802842Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T13:59:03.804802Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:183:2196] 2025-04-07T13:59:03.806494Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:03.813253Z node 2 :PERSQUEUE INFO: new Cookie default|8c28f39e-b54e0e73-378e2cac-a90c810a_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] 2025-04-07T13:59:04.235107Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:04.235171Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:147:2057] recipient: [3:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:147:2057] recipient: [3:145:2168] Leader for TabletID 72057594037927938 is [3:151:2172] sender: [3:152:2057] recipient: [3:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:175:2057] recipient: [3:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:04.252920Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:04.253649Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-07T13:59:04.254201Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:182:2195] 2025-04-07T13:59:04.256346Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T13:59:04.257879Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:183:2196] 2025-04-07T13:59:04.259562Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:183:2196] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:04.266255Z node 3 :PERSQUEUE INFO: new Cookie default|55be8997-313efeb5-de31e1b8-1253c065_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] 2025-04-07T13:59:04.714122Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:04.714204Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:147:2057] recipient: [4:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:147:2057] recipient: [4:145:2168] Leader for TabletID 72057594037927938 is [4:151:2172] sender: [4:152:2057] recipient: [4:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:177:2057] recipient: [4:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T13:59:04.803136Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:04.803982Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 2097152 SourceIdL ... Handle TEvPersQueue::TEvStatus 2025-04-07T14:02:24.355811Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:02:24.374111Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:02:24.412364Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:02:24.412433Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1012:3008], now have 1 active actors on pipe 2025-04-07T14:02:24.412541Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2025-04-07T14:02:24.414119Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:02:24.430594Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:02:24.467981Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:02:24.468048Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1015:3011], now have 1 active actors on pipe 2025-04-07T14:02:24.468144Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2025-04-07T14:02:24.469678Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:02:24.479853Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:02:24.517540Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:02:24.517614Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1018:3014], now have 1 active actors on pipe 2025-04-07T14:02:24.517717Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2025-04-07T14:02:24.519310Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:02:24.536520Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:02:24.576276Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:02:24.576351Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1021:3017], now have 1 active actors on pipe 2025-04-07T14:02:24.576447Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2025-04-07T14:02:24.578330Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:02:24.617361Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:02:24.660139Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:02:24.660226Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1024:3020], now have 1 active actors on pipe 2025-04-07T14:02:24.660323Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2025-04-07T14:02:24.666316Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:02:24.708552Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:02:24.787678Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:02:24.787752Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1027:3023], now have 1 active actors on pipe 2025-04-07T14:02:24.787860Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2025-04-07T14:02:24.789613Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:02:24.808485Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:02:24.852792Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:02:24.852861Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1030:3026], now have 1 active actors on pipe 2025-04-07T14:02:24.852973Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2025-04-07T14:02:24.854599Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:02:24.877319Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:02:24.934748Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:02:24.934831Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1033:3029], now have 1 active actors on pipe 2025-04-07T14:02:24.934939Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2025-04-07T14:02:24.936585Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:02:24.953679Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:02:25.021225Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:02:25.021297Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1036:3032], now have 1 active actors on pipe 2025-04-07T14:02:25.021406Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2025-04-07T14:02:25.023009Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:02:25.039921Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:02:25.078977Z node 87 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [87:1039:3035] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to ACTORLIB_LONG_TIMER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> KqpJoinOrder::CanonizedJoinOrderTPCC [GOOD] >> KqpJoinOrder::CanonizedJoinOrderLookupBug >> KqpOlap::TableSinkWithOlapStore >> KqpOlap::OlapRead_UsesScanOnJoinWithDataShardTable >> KqpDataIntegrityTrails::Select |90.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpOlapWrite::WriteFails >> KqpOlapAggregations::JsonDoc_Exists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::CheckEarlyFilterOnEmptySelect [GOOD] Test command err: Trying to start YDB, gRPC: 31090, MsgBus: 28888 2025-04-07T14:00:12.581127Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570763395568655:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:12.581568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fd9/r3tmp/tmpQ5S3rI/pdisk_1.dat 2025-04-07T14:00:13.253011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:13.253095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:13.261288Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:13.262649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31090, node 1 2025-04-07T14:00:13.434496Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:13.434539Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:13.434551Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:13.434690Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28888 TClient is connected to server localhost:28888 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:14.210823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:14.263234Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:14.289830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:14.485329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570771985503763:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:14.485718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570771985503763:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:14.486111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570771985503763:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:14.486270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570771985503763:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:14.486396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570771985503763:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:14.486507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570771985503763:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:14.486630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570771985503763:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:14.486761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570771985503763:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:14.486900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570771985503763:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:14.487016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570771985503763:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:14.487155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570771985503763:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:14.487258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570771985503763:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:14.564858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570771985503764:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:14.564927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570771985503764:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:14.565207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570771985503764:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:14.565336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570771985503764:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:14.565450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570771985503764:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:14.565536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570771985503764:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:14.565617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570771985503764:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:14.565719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570771985503764:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:14.565823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570771985503764:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:14.565928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570771985503764:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:14.566014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570771985503764:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:14.566118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570771985503764:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:14.615469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570771985503765:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:14.615524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570771985503765:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:14.615723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570771985503765:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:14.615809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570771985503765:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:14.615865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570771985503765:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:14.615928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570771985503765:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:14.616001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570771985503765:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:14.616076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570771985503765:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... ority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable` WHERE uid='dsfdfsd' LIMIT 10; RESULT: 2025-04-07T14:02:04.899490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571244431989709:9719], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:04.899624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:04.899859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571244431989721:9676], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:04.905694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:02:04.949582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571244431989723:9696], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:02:28.064824Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88xvp12t3h759fe4rk5k3b", SessionId: ydb://session/3?node_id=1&id=N2I2M2U5YS0xNTA4ZjZhYy1mYjJjZGJhYy1mMjMwMjVhYQ==, Slow query, duration: 23.167183s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT * FROM `/Root/olapStore/olapTable`\n WHERE uid='dsfdfsd'\n LIMIT 10;\n ", parameters: 0b 2025-04-07T14:02:28.066984Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034526000, txId: 18446744073709551615] shutting down 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSparsed::Switching [GOOD] Test command err: Trying to start YDB, gRPC: 4464, MsgBus: 15322 2025-04-07T14:00:54.075491Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570944136198813:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:54.075533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f93/r3tmp/tmpGDVOGt/pdisk_1.dat 2025-04-07T14:00:54.492098Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:54.496786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:54.496888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:54.501858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4464, node 1 2025-04-07T14:00:54.683443Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:54.683466Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:54.683478Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:54.683565Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15322 TClient is connected to server localhost:15322 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:55.733467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:55.761163Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:55.785690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:55.866797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:55.866991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:55.867254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:55.867408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:55.867502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:55.867617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:55.867722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:55.867829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:55.867946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:55.868077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:55.868255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:55.868376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:55.899714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:55.899773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:55.899999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:55.900103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:55.900195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:55.900298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:55.900382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:55.900468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:55.900550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:55.900616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:55.900710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:55.900840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:55.926384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:55.926444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:55.926641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:55.926775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:55.926868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:55.926974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:55.927075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:55.927173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... rd::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.441568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.441764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.441898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.541949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.542183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.542380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.542526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.542703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.542866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.645785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.646012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.646214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.646345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.646478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.646606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.749348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.749714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.749908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.750251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.750435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.750551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.929361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.929750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.929951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.933536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.933768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:20.933901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:21.066153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:21.066398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:21.067642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:21.067880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:21.079077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:21.080299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:21.167289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:21.167686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:21.167894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:21.168246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570948431166585:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:21.168389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570948431166575:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:21.168541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570948431166579:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:21.236554Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034540000, txId: 18446744073709551615] shutting down count: 14000 Timing: checkTable took 2 seconds WAIT_COMPACTION: 9 WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 Timing: wait took 6 seconds ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapStore/olapTable` WHERE field == 'abcde' RESULT: 2025-04-07T14:02:28.650820Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034548000, txId: 18446744073709551615] shutting down count: 12689 ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapStore/olapTable` RESULT: 2025-04-07T14:02:29.885983Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034549000, txId: 18446744073709551615] shutting down count: 14000 Timing: checkTable took 2 seconds Timing: wait took 0 seconds >> KqpOlapAggregations::Aggregation_Some_GroupByNullMix [GOOD] >> KqpYql::FlexibleTypes [GOOD] >> KqpYql::FromBytes >> KqpOlap::CountWhereColumnIsNull [GOOD] >> KqpOlap::SimpleCount >> KqpOlapIndexes::IndexesInLocalMetadata >> KqpYql::EvaluateExprPgNull [GOOD] >> KqpYql::EvaluateExprYsonAndType >> KqpOlapAggregations::Aggregation_Sum_GroupByNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::CountWhereColumnIsNull [GOOD] Test command err: Trying to start YDB, gRPC: 12987, MsgBus: 28031 2025-04-07T14:02:20.296347Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571312936173381:2113];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:20.297046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c53/r3tmp/tmpA8ML5R/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12987, node 1 2025-04-07T14:02:20.728656Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:02:20.728694Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:02:20.745369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:20.747447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:20.752304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:20.764999Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:20.789000Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:20.789020Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:20.789030Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:20.789113Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28031 TClient is connected to server localhost:28031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:21.425058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:21.436047Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:21.452441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:21.617401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317231141276:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:21.617609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317231141276:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:21.617878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317231141276:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:21.618019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317231141276:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:21.618124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317231141276:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:21.618279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317231141276:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:21.618401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317231141276:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:21.618511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317231141276:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:21.618625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317231141276:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:21.618745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317231141276:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:21.618861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317231141276:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:21.618998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571317231141276:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:21.658529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571317231141275:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:21.658606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571317231141275:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:21.658807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571317231141275:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:21.658901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571317231141275:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:21.659013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571317231141275:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:21.659127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571317231141275:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:21.659209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571317231141275:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:21.659297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571317231141275:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:21.659400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571317231141275:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:21.659494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571317231141275:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:21.659601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571317231141275:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:21.659696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571317231141275:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:21.699665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317231141278:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:21.699730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317231141278:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:21.699921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317231141278:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:21.700024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317231141278:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:21.700127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317231141278:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:21.700251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317231141278:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:21.700352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571317231141278:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:21.700444Z node 1 :TX_COLUMNSHARD WARN: ... lt"],"t":0.107}],"full":{"a":1744034551542977,"name":"_full_task","f":1744034551542977,"d_finished":0,"c":0,"l":1744034551650945,"d":107968},"events":[{"name":"bootstrap","f":1744034551590147,"d_finished":1669,"c":1,"l":1744034551591816,"d":1669},{"a":1744034551650650,"name":"ack","f":1744034551634379,"d_finished":173,"c":1,"l":1744034551634552,"d":468},{"a":1744034551650634,"name":"processing","f":1744034551634366,"d_finished":3982,"c":8,"l":1744034551650622,"d":4293},{"name":"ProduceResults","f":1744034551591189,"d_finished":3601,"c":11,"l":1744034551650864,"d":3601},{"a":1744034551650866,"name":"Finish","f":1744034551650866,"d_finished":0,"c":0,"l":1744034551650945,"d":79},{"name":"task_result","f":1744034551639609,"d_finished":3656,"c":7,"l":1744034551650619,"d":3656}],"id":"72075186224037889::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;) 2025-04-07T14:02:31.651393Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815339:2715];TabletId=72075186224037889;ScanId=3;TxId=281474976710669;ScanGen=1;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:02:31.542562Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=65368;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=65368;selected_rows=0; 2025-04-07T14:02:31.651413Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815339:2715];TabletId=72075186224037889;ScanId=3;TxId=281474976710669;ScanGen=1;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:02:31.651455Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7490571360180815339:2715];TabletId=72075186224037889;ScanId=3;TxId=281474976710669;ScanGen=1;fline=context.h:77;profile=;; 2025-04-07T14:02:31.651512Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7490571360180815339:2715];TabletId=72075186224037889;ScanId=3;TxId=281474976710669;ScanGen=1;fline=context.h:78;fetching=ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;; 2025-04-07T14:02:31.653249Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T14:02:31.653278Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T14:02:31.653298Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=7; 2025-04-07T14:02:31.653330Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=7; 2025-04-07T14:02:31.653353Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T14:02:31.653437Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;); 2025-04-07T14:02:31.653470Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-04-07T14:02:31.653543Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;);columns=3;rows=1; 2025-04-07T14:02:31.653586Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=21;num_rows=1;batch_columns=8,9,level; 2025-04-07T14:02:31.653684Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7490571360180815338:2714] send ScanData to [1:7490571338705978764:2713] txId: 281474976710669 scanId: 3 gen: 1 tablet: 72075186224037890 bytes: 21 rows: 1 page faults: 0 finished: 0 pageFault: 0 arrow schema: 8: uint64 9: uint64 level: int32 2025-04-07T14:02:31.653778Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;); 2025-04-07T14:02:31.653865Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;); 2025-04-07T14:02:31.653943Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;); 2025-04-07T14:02:31.655790Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:02:31.655896Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;); 2025-04-07T14:02:31.655972Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;); 2025-04-07T14:02:31.655991Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7490571360180815338:2714] finished for tablet 72075186224037890 2025-04-07T14:02:31.656037Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7490571360180815338:2714] send ScanData to [1:7490571338705978764:2713] txId: 281474976710669 scanId: 3 gen: 1 tablet: 72075186224037890 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:02:31.656442Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:7490571360180815338:2714] and sent to [1:7490571338705978764:2713] packs: 0 txId: 281474976710669 scanId: 3 gen: 1 tablet: 72075186224037890 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.046},{"events":["f_ProduceResults"],"t":0.049},{"events":["l_bootstrap"],"t":0.05},{"events":["f_ack","f_processing"],"t":0.094},{"events":["f_task_result"],"t":0.099},{"events":["l_task_result"],"t":0.114},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.116}],"full":{"a":1744034551539906,"name":"_full_task","f":1744034551539906,"d_finished":0,"c":0,"l":1744034551656059,"d":116153},"events":[{"name":"bootstrap","f":1744034551586578,"d_finished":3557,"c":1,"l":1744034551590135,"d":3557},{"a":1744034551655765,"name":"ack","f":1744034551634063,"d_finished":294,"c":1,"l":1744034551634357,"d":588},{"a":1744034551655740,"name":"processing","f":1744034551634030,"d_finished":2299,"c":8,"l":1744034551653980,"d":2618},{"name":"ProduceResults","f":1744034551589574,"d_finished":1836,"c":11,"l":1744034551655975,"d":1836},{"a":1744034551655976,"name":"Finish","f":1744034551655976,"d_finished":0,"c":0,"l":1744034551656059,"d":83},{"name":"task_result","f":1744034551639327,"d_finished":1837,"c":7,"l":1744034551653978,"d":1837}],"id":"72075186224037890::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;) 2025-04-07T14:02:31.656484Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:02:31.539490Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4352;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4352;selected_rows=0; 2025-04-07T14:02:31.656498Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:02:31.656542Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=context.h:77;profile=;; 2025-04-07T14:02:31.656616Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7490571360180815338:2714];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=context.h:78;fetching=ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;; 2025-04-07T14:02:31.806280Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034546002, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_GroupByNullMix [GOOD] Test command err: Trying to start YDB, gRPC: 4756, MsgBus: 11044 2025-04-07T14:02:20.520613Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571312466895500:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:20.540820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c63/r3tmp/tmpJYKOcs/pdisk_1.dat 2025-04-07T14:02:21.190938Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:21.227152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:21.227249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:21.236719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4756, node 1 2025-04-07T14:02:21.469782Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:21.469801Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:21.469808Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:21.469874Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11044 TClient is connected to server localhost:11044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:22.432044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:22.449805Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:22.465258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:22.579476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571321056830624:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:22.579695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571321056830624:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:22.579918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571321056830624:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:22.580015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571321056830624:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:22.580165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571321056830624:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:22.580288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571321056830624:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:22.580403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571321056830624:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:22.580523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571321056830624:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:22.580651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571321056830624:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:22.581399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571321056830624:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:22.581517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571321056830624:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:22.581613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571321056830624:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:22.637070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571321056830625:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:22.637125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571321056830625:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:22.637323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571321056830625:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:22.637455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571321056830625:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:22.637549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571321056830625:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:22.637647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571321056830625:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:22.637745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571321056830625:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:22.637856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571321056830625:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:22.637980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571321056830625:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:22.638095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571321056830625:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:22.638207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571321056830625:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:22.638298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571321056830625:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:22.670635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571321056830626:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:22.670694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571321056830626:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:22.670883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571321056830626:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:22.670998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571321056830626:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:22.671104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571321056830626:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:22.671202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571321056830626:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:22.671315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571321056830626:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:22.671440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571321056830626:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... :02:22.724706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:22.724817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:22.724839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:22.724953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:22.725042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:22.725096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:22.725127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:22.725160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:22.725178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:22.725470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:22.725522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:22.725685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:22.725709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:22.726852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:22.726888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:22.727067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:22.727089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:22.727193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:22.727214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SOME(id), SOME(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 5 AND 6 GROUP BY level ORDER BY level; 2025-04-07T14:02:24.682518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571329646765492:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:24.689005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:24.689028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571329646765527:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:24.698427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:02:24.717081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571329646765529:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:02:25.509265Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571312466895500:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:25.509334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:31.177318Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034546002, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SOME(id), SOME(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 5 AND 6 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [5, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1026) '('"_id" '"b495500a-50fb7404-17c2411b-2608c7bd") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $25 (Int32 '1)) (let $26 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"5")) $25) $26))) (RangeCreate (AsList '($26 '((Just (Int32 '"6")) $25)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"level" $2)) (let $11 (StructType '('_yql_agg_0 $1) '('_yql_agg_1 $2) $10)) (let $12 '('('"_logical_id" '1085) '('"_id" '"a10ba371-ed00d41b-4073c60a-11df4bc8") '('"_wide_channels" $11))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $27 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $28 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $27 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (block '( (let $31 '('_yql_agg_0 'some '"id")) (let $32 '('_yql_agg_1 'some '"level")) (return (TKqpOlapAgg $30 '($31 $32) '('"level"))) ))))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('2))) (let $15 (StructType '('"column1" $1) '('"column2" $2) $10)) (let $16 '('('"_logical_id" '1623) '('"_id" '"b361b39b-695a9f3e-3c4dac9c-1b7cb113") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($33) (block '( (let $34 (lambda '($41 $42 $43 $44) $42 $43)) (let $35 (lambda '($45 $46 $47 $48 $49 $50) $49 (Coalesce $50 $47))) (let $36 (lambda '($51 $52 $53) $52 $53 $51)) (let $37 (WideCombiner (ToFlow $33) '"" (lambda '($38 $39 $40) $40) $34 $35 $36)) (return (FromFlow (WideSort $37 '('('2 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('2 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($54) (FromFlow (NarrowMap (ToFlow $54) (lambda '($55 $56 $57) (AsStruct '('"column1" $55) '('"column2" $56) '('"level" $57)))))) '('('"_logical_id" '1635) '('"_id" '"e8a46ddc-9665e3af-92c261f3-3623e66")))) (let $20 '($13 $17 $19)) (let $21 '('"level" '"column1" '"column2")) (let $22 (DqCnResult (TDqOutput $19 '0) $21)) (let $23 (KqpTxResultBinding $9 '0 '0)) (let $24 (KqpPhysicalTx $20 '($22) '('($7 $23)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $24) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |90.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_GroupByNull [GOOD] Test command err: Trying to start YDB, gRPC: 2070, MsgBus: 6031 2025-04-07T14:02:22.777710Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571321755369793:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:22.777877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c35/r3tmp/tmprrrHNi/pdisk_1.dat 2025-04-07T14:02:23.296378Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:23.313892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:23.313964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:23.320388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2070, node 1 2025-04-07T14:02:23.416700Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:23.416719Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:23.416729Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:23.416818Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6031 TClient is connected to server localhost:6031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:24.127591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:24.157280Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:24.172262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:24.307806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571330345304913:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:24.308064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571330345304913:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:24.308283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571330345304913:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:24.308496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571330345304913:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:24.308591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571330345304913:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:24.308681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571330345304913:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:24.308795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571330345304913:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:24.308917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571330345304913:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:24.309046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571330345304913:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:24.309162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571330345304913:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:24.309280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571330345304913:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:24.309402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571330345304913:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:24.362318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571330345304914:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:24.362386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571330345304914:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:24.362615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571330345304914:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:24.362757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571330345304914:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:24.362860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571330345304914:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:24.362975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571330345304914:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:24.363086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571330345304914:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:24.363190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571330345304914:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:24.363287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571330345304914:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:24.363377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571330345304914:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:24.363496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571330345304914:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:24.363581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571330345304914:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:24.407760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571330345304915:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:24.407824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571330345304915:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:24.408008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571330345304915:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:24.408107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571330345304915:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:24.408216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571330345304915:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:24.408300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571330345304915:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:24.408400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571330345304915:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:24.408511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571330345304915:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descrip ... ription=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:24.478619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:24.478816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:24.478850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:24.478931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:24.478967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:24.479010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:24.479026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:24.479053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:24.479071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:24.479388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:24.479420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:24.479561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:24.479580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:24.479677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:24.479702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:24.479872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:24.479893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:24.479991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:24.480029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SUM(id), SUM(level) FROM `/Root/tableWithNulls` WHERE id > 5 GROUP BY level ORDER BY level; 2025-04-07T14:02:26.451436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571338935239796:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:26.451506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:26.451878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571338935239817:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:26.458083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:02:26.465624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571338935239819:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:02:27.758789Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571321755369793:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:27.758852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:32.432987Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034547000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SUM(id), SUM(level) FROM `/Root/tableWithNulls` WHERE id > 5 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1237) '('"_id" '"39655bae-5f2a04a1-1171f36f-34c55021") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $27 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $27) '((Nothing $2) $27))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Int64)) (let $11 (OptionalType $10)) (let $12 '('"level" $2)) (let $13 (StructType '('_yql_agg_0 $10) '('_yql_agg_1 $11) $12)) (let $14 '('('"_logical_id" '1296) '('"_id" '"78c7b376-bbaf514d-b7deba38-446d0a5") '('"_wide_channels" $13))) (let $15 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $30 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $29 (lambda '($31) (block '( (let $32 '('_yql_agg_0 'sum '"id")) (let $33 '('_yql_agg_1 'sum '"level")) (return (TKqpOlapAgg $31 '($32 $33) '('"level"))) ))))) (return (FromFlow $30)) ))) $14)) (let $16 (DqCnHashShuffle (TDqOutput $15 '0) '('2))) (let $17 (StructType '('"column1" $10) '('"column2" $11) $12)) (let $18 '('('"_logical_id" '2136) '('"_id" '"1ba3dea9-fa0d8edc-f071e34b-f25a8956") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($16) (lambda '($34) (block '( (let $35 (lambda '($42 $43 $44 $45) $43 $44)) (let $36 (lambda '($46 $47 $48 $49 $50 $51) (AggrAdd $47 $50) (AggrAdd $48 $51))) (let $37 (lambda '($52 $53 $54) $53 $54 $52)) (let $38 (WideCombiner (ToFlow $34) '"" (lambda '($39 $40 $41) $41) $35 $36 $37)) (return (FromFlow (WideSort $38 '('('2 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('2 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($55) (FromFlow (NarrowMap (ToFlow $55) (lambda '($56 $57 $58) (AsStruct '('"column1" $56) '('"column2" $57) '('"level" $58)))))) '('('"_logical_id" '2148) '('"_id" '"e337cae-9985a6c4-16d617b5-7793d73c")))) (let $22 '($15 $19 $21)) (let $23 '('"level" '"column1" '"column2")) (let $24 (DqCnResult (TDqOutput $21 '0) $23)) (let $25 (KqpTxResultBinding $9 '0 '0)) (let $26 (KqpPhysicalTx $22 '($24) '('($7 $25)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $26) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_Avg [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg [GOOD] Test command err: Trying to start YDB, gRPC: 19908, MsgBus: 14221 2025-04-07T14:01:52.256292Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571195410877134:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:52.272845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d06/r3tmp/tmphmuabl/pdisk_1.dat 2025-04-07T14:01:52.900426Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:52.916878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:52.916944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:52.929585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19908, node 1 2025-04-07T14:01:53.153227Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:53.153257Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:53.153267Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:53.153355Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14221 TClient is connected to server localhost:14221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:54.102906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:54.118069Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:54.137637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:54.350660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571204000812384:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:54.350857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571204000812384:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:54.351119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571204000812384:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:54.351267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571204000812384:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:54.351381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571204000812384:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:54.351491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571204000812384:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:54.351594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571204000812384:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:54.351693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571204000812384:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:54.351822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571204000812384:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:54.351924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571204000812384:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:54.352038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571204000812384:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:54.352159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571204000812384:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:54.459032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571204000812391:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:54.459103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571204000812391:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:54.459318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571204000812391:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:54.459416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571204000812391:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:54.459496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571204000812391:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:54.459573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571204000812391:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:54.459645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571204000812391:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:54.459739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571204000812391:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:54.459853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571204000812391:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:54.459955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571204000812391:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:54.460047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571204000812391:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:54.460129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571204000812391:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:54.551134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571204000812396:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:54.551204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571204000812396:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:54.551380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571204000812396:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:54.551475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571204000812396:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:54.551574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571204000812396:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:54.551692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571204000812396:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:54.551773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571204000812396:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:54.551853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571204000812396:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:32.506651Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:32.685358Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:32.685432Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:32.777171Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:32.777267Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:32.863912Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:32.863994Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:32.954633Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:32.954713Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:33.053720Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1215:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:33.069351Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1215:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:02:33.132183Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:33.132259Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:33.226615Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:33.226690Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:33.330633Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:33.330716Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:33.437026Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:33.437089Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:33.537057Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:33.537119Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:33.593281Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1215:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:33.712606Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:33.712688Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:33.832994Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:33.833050Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:33.931035Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:33.931093Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:34.031563Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:34.031640Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:34.129053Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:34.129111Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1881:3011], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jr88yc3p1jwnkp5e872cnbxa. SessionId : ydb://session/3?node_id=2&id=NDMzNWNiYjktZWU5ZGQyOGMtYmRlZTQ5NTQtNzZjYTczN2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:34.205217Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1215:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:34.233177Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1215:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpOlapWrite::WriteFails [GOOD] >> KqpOlap::EmptyRange >> KqpOlap::OlapRead_UsesScanOnJoinWithDataShardTable [GOOD] >> KqpOlapAggregations::JsonDoc_Exists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::WriteFails [GOOD] Test command err: Trying to start YDB, gRPC: 4025, MsgBus: 1370 2025-04-07T14:02:32.420577Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571366743057478:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:32.421339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000bfd/r3tmp/tmpZpY4Rq/pdisk_1.dat 2025-04-07T14:02:33.108092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:33.108187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:33.139823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:33.149565Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4025, node 1 2025-04-07T14:02:33.404880Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:33.404905Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:33.404913Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:33.405026Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1370 TClient is connected to server localhost:1370 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:34.267841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:34.287056Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:34.298985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:34.484003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:34.484247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:34.484547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:34.484654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:34.484777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:34.484900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:34.485045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:34.485140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:34.485272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:34.485376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:34.485477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:34.485565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:34.519815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:34.519882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:34.520087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:34.520221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:34.520311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:34.520392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:34.520478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:34.520552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:34.520651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:34.520786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:34.520862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:34.520969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:34.551101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571375332992631:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:34.551154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571375332992631:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:34.551321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571375332992631:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:34.551405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571375332992631:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:34.551483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571375332992631:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:34.551578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571375332992631:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:34.551667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571375332992631:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:34.551793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571375332992631:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descrip ... =SyncPortionFromChunks;id=11; 2025-04-07T14:02:34.598688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:34.598803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:34.598827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:34.599005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:34.599024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:34.599117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:34.599135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:34.599908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:34.599942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:34.600026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:34.600045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:34.600187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:34.600216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:34.600312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:34.600335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:34.600372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:34.600391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:34.600415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:34.600432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:34.601417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:34.601477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:34.601663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:34.601703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:34.601801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:34.601819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:34.601978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:34.602008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:34.602097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:34.602116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:34.653729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2025-04-07T14:02:35.121424Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490571375332992599:2291];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:35.121461Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7490571375332992602:2292];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:35.121476Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490571375332992598:2290];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:35.128795Z node 1 :TX_COLUMNSHARD DEBUG: Server pipe connected at tablet 72075186224037889 2025-04-07T14:02:35.133362Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:02:35.133404Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:02:35.134025Z node 1 :TX_COLUMNSHARD DEBUG: Server pipe connected at tablet 72075186224037890 2025-04-07T14:02:35.138172Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:02:35.138207Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:02:35.138393Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7490571375332992631:2293];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:35.142329Z node 1 :TX_COLUMNSHARD DEBUG: Server pipe connected at tablet 72075186224037888 2025-04-07T14:02:35.147184Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571375332992598:2290];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:02:35.147234Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7490571375332992598:2290];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:02:35.202915Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=3;fline=write_actor.cpp:40;event=TEvPutResult;blob_id=[72075186224037889:1:1:3:0:134848:0];status=BLOCKED;error=; 2025-04-07T14:02:35.202978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=3;fline=write_controller.h:65;event=IWriteController aborted;reason=cannot write blob [72075186224037889:1:1:3:0:134848:0], status: BLOCKED. reason: ; 2025-04-07T14:02:35.203112Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=3;fline=write_actor.cpp:40;event=TEvPutResult;blob_id=[72075186224037890:1:1:3:0:131944:0];status=BLOCKED;error=; 2025-04-07T14:02:35.203139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=3;fline=write_controller.h:65;event=IWriteController aborted;reason=cannot write blob [72075186224037890:1:1:3:0:131944:0], status: BLOCKED. reason: ; 2025-04-07T14:02:35.209208Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037889;self_id=[1:7490571375332992599:2291];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037889;event=TEvWritePortionResult;tablet_id=72075186224037889;local_tx_no=6;tx_info=;fline=events.h:99;event=ev_write_error;status=STATUS_INTERNAL_ERROR;details=cannot write blob: BLOCKED;tx_id=140737488355329; 2025-04-07T14:02:35.209491Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037890;self_id=[1:7490571375332992602:2292];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037890;event=TEvWritePortionResult;tablet_id=72075186224037890;local_tx_no=6;tx_info=;fline=events.h:99;event=ev_write_error;status=STATUS_INTERNAL_ERROR;details=cannot write blob: BLOCKED;tx_id=140737488355330; 2025-04-07T14:02:35.209544Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=3;fline=write_actor.cpp:40;event=TEvPutResult;blob_id=[72075186224037888:1:1:3:0:136168:0];status=BLOCKED;error=; 2025-04-07T14:02:35.209560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=3;fline=write_controller.h:65;event=IWriteController aborted;reason=cannot write blob [72075186224037888:1:1:3:0:136168:0], status: BLOCKED. reason: ; 2025-04-07T14:02:35.209665Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037888;self_id=[1:7490571375332992598:2290];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037888;event=TEvWritePortionResult;tablet_id=72075186224037888;local_tx_no=6;tx_info=;fline=events.h:99;event=ev_write_error;status=STATUS_INTERNAL_ERROR;details=cannot write blob: BLOCKED;tx_id=140737488355331; Cannot write data into shard(Request aborted) 72075186224037889 in longTx ydb://long-tx/01jr88ys3yce1z2ft51dch8zf8?node_id=1 >> KqpOlap::TableSinkWithOlapStore [GOOD] >> KqpOlap::SimpleCount [GOOD] >> KqpYql::FromBytes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_UsesScanOnJoinWithDataShardTable [GOOD] Test command err: Trying to start YDB, gRPC: 26192, MsgBus: 2433 2025-04-07T14:02:31.378394Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571362518254824:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:31.378530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c17/r3tmp/tmpLris9R/pdisk_1.dat 2025-04-07T14:02:32.101522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:32.101617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:32.109884Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:32.120267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26192, node 1 2025-04-07T14:02:32.357210Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:32.357246Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:32.357271Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:32.357376Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2433 TClient is connected to server localhost:2433 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:33.024859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:33.062998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:33.173576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571371108189956:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:33.173770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571371108189956:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:33.174027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571371108189956:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:33.174122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571371108189956:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:33.174210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571371108189956:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:33.174301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571371108189956:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:33.174392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571371108189956:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:33.174493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571371108189956:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:33.174610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571371108189956:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:33.174700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571371108189956:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:33.174792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571371108189956:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:33.174883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571371108189956:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:33.239286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571371108189955:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:33.239336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571371108189955:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:33.239526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571371108189955:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:33.239616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571371108189955:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:33.239708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571371108189955:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:33.239810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571371108189955:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:33.239928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571371108189955:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:33.240047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571371108189955:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:33.240155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571371108189955:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:33.240268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571371108189955:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:33.240369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571371108189955:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:33.244000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571371108189955:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:33.284297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571371108189957:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:33.284363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571371108189957:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:33.284570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571371108189957:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:33.284702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571371108189957:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:33.284788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571371108189957:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:33.284871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571371108189957:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:33.284952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571371108189957:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:33.285055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571371108189957:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:33.285160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571371108189957: ... Schema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:33.722474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:33.722516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:33.722591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:33.722615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:33.722662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:33.722693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:33.722725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:33.722766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:33.723078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:33.723138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:33.723302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:33.723327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:33.723448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:33.723472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:33.724656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:33.724719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:33.724853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:33.724887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:33.725573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:33.725612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:33.725696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:33.725728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:33.725914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:33.725965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:33.726044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:33.726097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:33.726145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:33.726165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:33.726195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:33.726225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:33.726526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:33.726574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:33.726747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:33.726783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:33.726973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:33.727005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:33.727150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:33.727194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:33.727317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:33.727343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 2025-04-07T14:02:33.786553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2025-04-07T14:02:35.725107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571379698125078:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:35.745598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:35.842918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571379698125102:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:35.843018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:35.848906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571379698125107:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:35.859098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:02:35.870774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571379698125109:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:02:36.367353Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571362518254824:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:36.367416Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:36.911099Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034556000, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_Exists [GOOD] Test command err: Trying to start YDB, gRPC: 21190, MsgBus: 21793 2025-04-07T14:02:32.475158Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571368069823751:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:32.475196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000bed/r3tmp/tmpBhn0D1/pdisk_1.dat 2025-04-07T14:02:32.860344Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21190, node 1 2025-04-07T14:02:32.907879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:32.907988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:32.917973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:32.994580Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:32.994606Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:32.994617Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:32.994714Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21793 TClient is connected to server localhost:21793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:33.654309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:33.686624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:33.769600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372364791711:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:33.772645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372364791711:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:33.772979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372364791711:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:33.773113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372364791711:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:33.773225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372364791711:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:33.773346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372364791711:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:33.773440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372364791711:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:33.773540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372364791711:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:33.773647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372364791711:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:33.773767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372364791711:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:33.773881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372364791711:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:33.773966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372364791711:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:33.833458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571372364791712:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:33.833514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571372364791712:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:33.833754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571372364791712:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:33.833873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571372364791712:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:33.833965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571372364791712:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:33.834082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571372364791712:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:33.834194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571372364791712:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:33.834301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571372364791712:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:33.834429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571372364791712:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:33.834544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571372364791712:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:33.834663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571372364791712:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:33.834760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571372364791712:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:33.876010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372364791713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:33.876068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372364791713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:33.876343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372364791713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:33.876471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372364791713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:33.876566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372364791713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:33.876696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372364791713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:33.876782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372364791713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:33.876892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372364791713:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:33.876998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:74905713723647917 ... ct.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:33.928015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:33.928052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:33.928146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:33.928176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:33.928208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:33.928226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:33.928553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:33.928637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:33.928847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:33.928882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:33.929009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:33.929038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:33.929212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:33.929249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:33.929336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:33.929361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsondoc, "$.col1") AND id = 6; 2025-04-07T14:02:35.717635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571380954726612:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:35.717696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571380954726601:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:35.717823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:35.721182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:02:35.739711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571380954726615:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:02:36.846728Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034556000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsondoc, "$.col1") AND id = 6; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Filter":{"Predicate":{"Id":9}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Filter":{"Predicate":{"Id":9}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1296) '('"_id" '"acafb429-ee0f7532-1baa0277-ad60ffad") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $18 (Int32 '"6")) (let $19 (Just $18)) (let $20 (Int32 '1)) (let $21 '($19 $20)) (let $22 (If (== $18 (Int32 '2147483647)) $21 '((+ $19 $20) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($21 $22)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (OptionalType (DataType 'Bool))) (let $11 (DqPhyStage '() (lambda '() (block '( (let $23 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $24 '('"id" '"jsondoc" '"jsonval")) (let $25 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $26 (OptionalType (DataType 'JsonDocument))) (let $27 (DataType 'Json)) (let $28 '($10)) (let $29 '((ResourceType '"JsonPath"))) (let $30 (DataType 'Utf8)) (let $31 (ResourceType '"JsonNode")) (let $32 (DictType $30 $31)) (let $33 '($32)) (let $34 (CallableType '() $28 '($26) $29 $33 $28)) (let $35 '('('"strict"))) (let $36 (Udf '"Json2.JsonDocumentSqlExists" (Void) (VoidType) '"" $34 (VoidType) '"" $35)) (let $37 (CallableType '() $29 '($30))) (let $38 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $37 (VoidType) '"" '())) (let $39 (Apply $38 (Utf8 '"$.col1"))) (let $40 (Dict $32)) (let $41 (Bool 'false)) (let $42 (Just $41)) (let $43 (KqpWideReadOlapTableRanges $23 %kqp%tx_result_binding_0_0 $24 '() $25 (lambda '($44) (block '( (let $45 (StructType $9 '('"jsondoc" $26) '('"jsonval" (OptionalType $27)))) (let $46 (KqpOlapApply $45 '('"jsondoc") (lambda '($48) (Apply $36 $48 $39 $40 $42)))) (let $47 '('?? $46 $41)) (return (KqpOlapFilter $44 $47)) ))))) (return (FromFlow (NarrowMap $43 (lambda '($49 $50 $51) (block '( (let $52 (OptionalType $31)) (let $53 (CallableType '() $28 '($52) $29 $33 $28)) (let $54 (Udf '"Json2.SqlExists" (Void) (VoidType) '"" $53 (VoidType) '"" $35)) (let $55 (IfPresent $51 (lambda '($58) (block '( (let $59 '($27 '"" '1)) (let $60 (CallableType '() '($31) $59)) (let $61 (Udf '"Json2.Parse" (Void) (VoidType) '"" $60 (VoidType) '"" '())) (return (Just (Apply $61 $58))) ))) (Nothing $52))) (let $56 (Apply $54 $55 $39 $40 $42)) (let $57 (Apply $36 $50 $39 $40 $42)) (return (AsStruct '('"column1" $56) '('"column2" $57) '('"id" $49))) )))))) ))) '('('"_logical_id" '1366) '('"_id" '"6e1d1032-43be0f71-3457bad2-74cc1398")))) (let $12 (DqCnUnionAll (TDqOutput $11 '0))) (let $13 (DqPhyStage '($12) (lambda '($62) $62) '('('"_logical_id" '2056) '('"_id" '"d2ccab48-12a30ba9-a1c036e0-80eb9008")))) (let $14 '('"id" '"column1" '"column2")) (let $15 (DqCnResult (TDqOutput $13 '0) $14)) (let $16 (KqpTxResultBinding $8 '0 '0)) (let $17 (KqpPhysicalTx '($11 $13) '($15) '('($5 $16)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $17) '((KqpTxResultBinding (ListType (StructType '('"column1" $10) '('"column2" $10) $9)) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::TableSinkWithOlapStore [GOOD] Test command err: Trying to start YDB, gRPC: 28758, MsgBus: 10394 2025-04-07T14:02:31.255982Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571360428156071:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:31.268783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c10/r3tmp/tmp01wG4a/pdisk_1.dat 2025-04-07T14:02:31.816153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:31.816227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:31.818392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:31.821615Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28758, node 1 2025-04-07T14:02:32.069110Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:32.069134Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:32.069141Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:32.069225Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10394 TClient is connected to server localhost:10394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:33.042979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:33.080787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:33.285901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571369018091187:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:33.286131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571369018091187:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:33.286622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571369018091187:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:33.286793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571369018091187:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:33.286924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571369018091187:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:33.287070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571369018091187:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:33.287159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571369018091187:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:33.287305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571369018091187:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:33.287445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571369018091187:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:33.287561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571369018091187:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:33.287699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571369018091187:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:33.287810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571369018091187:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:33.344011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571369018091188:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:33.344068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571369018091188:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:33.344473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571369018091188:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:33.344568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571369018091188:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:33.344681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571369018091188:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:33.344787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571369018091188:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:33.344894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571369018091188:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:33.345004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571369018091188:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:33.345089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571369018091188:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:33.345182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571369018091188:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:33.345329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571369018091188:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:33.347267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571369018091188:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:33.394774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571369018091227:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:33.394840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571369018091227:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:33.395024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571369018091227:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:33.395131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571369018091227:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:33.395210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571369018091227:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:33.395315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571369018091227:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:33.395699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571369018091227:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:33.395831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571369018091227:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:33.395955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74905713690180912 ... process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:33.445245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:33.445824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:33.445859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:33.445978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:33.446002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:33.446180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:33.446215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:33.446325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:33.446345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:33.456489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:33.456544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:33.456638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:33.456659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:33.456822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:33.456843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:33.456962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:33.457002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:33.457088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:33.457112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:33.457161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:33.457195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:33.457493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:33.457575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:33.457774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:33.457802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:33.457915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:33.457938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:33.458129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:33.458162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:33.458273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:33.458293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:33.522170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-07T14:02:33.563492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2616;columns=5; 2025-04-07T14:02:35.382784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571377608026135:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:35.382866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571377608026144:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:35.382914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:35.389059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:02:35.396575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571377608026149:2391], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:02:36.252975Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571360428156071:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:36.253020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:36.969641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;local_tx_no=9;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:01.000002 ] uid: [ "uid_1000002" ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"1970-01-01 00:00:01.000002;uid_1000002;"}}]}; 2025-04-07T14:02:36.969992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;local_tx_no=8;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:01.000000, 1970-01-01 00:00:01.000001 ] uid: [ "uid_1000000", "uid_1000001" ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"1970-01-01 00:00:01.000000;uid_1000000;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"1970-01-01 00:00:01.000001;uid_1000001;"}}]}; 2025-04-07T14:02:37.168141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;local_tx_no=7;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037890;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:01.000002 ] uid: [ "uid_1000002" ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976710663}],"starts":[{"inc":{"count_not_include":2},"id":281474976710663}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976710663}]},"p":{"include":2147483647}}]}; 2025-04-07T14:02:37.168141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;local_tx_no=10;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:01.000000, 1970-01-01 00:00:01.000001 ] uid: [ "uid_1000000", "uid_1000001" ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976710663}],"starts":[{"inc":{"count_not_include":2},"id":281474976710663}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976710663}]},"p":{"include":2147483647}}]}; >> KqpYql::EvaluateExprYsonAndType [GOOD] >> KqpDataIntegrityTrails::Select [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleCount [GOOD] Test command err: Trying to start YDB, gRPC: 5854, MsgBus: 23547 2025-04-07T14:02:33.003919Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571370098315572:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:33.003961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000be9/r3tmp/tmpJf9FRP/pdisk_1.dat 2025-04-07T14:02:33.666131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:33.666246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:33.669178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:33.700105Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5854, node 1 2025-04-07T14:02:33.897189Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:33.897207Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:33.897214Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:33.897287Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23547 TClient is connected to server localhost:23547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:34.607681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:34.645327Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:34.662517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:34.758169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571374393283531:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:34.758397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571374393283531:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:34.758684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571374393283531:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:34.758807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571374393283531:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:34.758927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571374393283531:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:34.759044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571374393283531:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:34.759138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571374393283531:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:34.759268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571374393283531:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:34.759393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571374393283531:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:34.759480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571374393283531:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:34.759609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571374393283531:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:34.759736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571374393283531:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:34.851227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571374393283534:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:34.851302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571374393283534:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:34.851540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571374393283534:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:34.851655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571374393283534:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:34.851779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571374393283534:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:34.851872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571374393283534:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:34.851975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571374393283534:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:34.852090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571374393283534:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:34.852188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571374393283534:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:34.852296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571374393283534:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:34.852396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571374393283534:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:34.852489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571374393283534:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:34.901306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571374393283536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:34.901391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571374393283536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:34.901658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571374393283536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:34.901823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571374393283536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:34.901928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571374393283536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:34.902035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571374393283536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:34.902170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571374393283536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:34.902292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571374393283536:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... 76,"d":89},{"name":"task_result","f":1744034557920553,"d_finished":2110,"c":9,"l":1744034557932370,"d":2110}],"id":"72075186224037888::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;) 2025-04-07T14:02:37.934984Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571387278186011:2479];TabletId=72075186224037888;ScanId=1;TxId=281474976710663;ScanGen=1;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:02:37.897195Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4552;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4552;selected_rows=0; 2025-04-07T14:02:37.935005Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571387278186011:2479];TabletId=72075186224037888;ScanId=1;TxId=281474976710663;ScanGen=1;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:02:37.935062Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7490571387278186011:2479];TabletId=72075186224037888;ScanId=1;TxId=281474976710663;ScanGen=1;fline=context.h:77;profile=;; 2025-04-07T14:02:37.935150Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7490571387278186011:2479];TabletId=72075186224037888;ScanId=1;TxId=281474976710663;ScanGen=1;fline=context.h:78;fetching=ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;; 2025-04-07T14:02:37.935730Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571387278186012:2480];TabletId=72075186224037889;ScanId=1;TxId=281474976710663;ScanGen=1;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:02:37.935819Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571387278186012:2480];TabletId=72075186224037889;ScanId=1;TxId=281474976710663;ScanGen=1;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;); 2025-04-07T14:02:37.935887Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571387278186012:2480];TabletId=72075186224037889;ScanId=1;TxId=281474976710663;ScanGen=1;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;); 2025-04-07T14:02:37.935919Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7490571387278186012:2480] finished for tablet 72075186224037889 2025-04-07T14:02:37.935948Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7490571387278186012:2480] send ScanData to [1:7490571387278185951:2472] txId: 281474976710663 scanId: 1 gen: 1 tablet: 72075186224037889 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:02:37.936263Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:7490571387278186012:2480] and sent to [1:7490571387278185951:2472] packs: 0 txId: 281474976710663 scanId: 1 gen: 1 tablet: 72075186224037889 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.013},{"events":["f_ProduceResults"],"t":0.014},{"events":["l_bootstrap"],"t":0.015},{"events":["f_ack","f_processing","f_task_result"],"t":0.019},{"events":["l_task_result"],"t":0.033},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.036}],"full":{"a":1744034557899426,"name":"_full_task","f":1744034557899426,"d_finished":0,"c":0,"l":1744034557935963,"d":36537},"events":[{"name":"bootstrap","f":1744034557913327,"d_finished":1491,"c":1,"l":1744034557914818,"d":1491},{"a":1744034557935711,"name":"ack","f":1744034557918523,"d_finished":137,"c":1,"l":1744034557918660,"d":389},{"a":1744034557935696,"name":"processing","f":1744034557918512,"d_finished":1960,"c":10,"l":1744034557932966,"d":2227},{"name":"ProduceResults","f":1744034557914377,"d_finished":1674,"c":13,"l":1744034557935903,"d":1674},{"a":1744034557935906,"name":"Finish","f":1744034557935906,"d_finished":0,"c":0,"l":1744034557935963,"d":57},{"name":"task_result","f":1744034557918675,"d_finished":1707,"c":9,"l":1744034557932964,"d":1707}],"id":"72075186224037889::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;) 2025-04-07T14:02:37.936310Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571387278186012:2480];TabletId=72075186224037889;ScanId=1;TxId=281474976710663;ScanGen=1;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:02:37.899133Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=65368;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=65368;selected_rows=0; 2025-04-07T14:02:37.936325Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571387278186012:2480];TabletId=72075186224037889;ScanId=1;TxId=281474976710663;ScanGen=1;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:02:37.936358Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7490571387278186012:2480];TabletId=72075186224037889;ScanId=1;TxId=281474976710663;ScanGen=1;fline=context.h:77;profile=;; 2025-04-07T14:02:37.936403Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7490571387278186012:2480];TabletId=72075186224037889;ScanId=1;TxId=281474976710663;ScanGen=1;fline=context.h:78;fetching=ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;; 2025-04-07T14:02:37.936820Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571387278186010:2478];TabletId=72075186224037890;ScanId=1;TxId=281474976710663;ScanGen=1;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:02:37.936890Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571387278186010:2478];TabletId=72075186224037890;ScanId=1;TxId=281474976710663;ScanGen=1;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;); 2025-04-07T14:02:37.936961Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571387278186010:2478];TabletId=72075186224037890;ScanId=1;TxId=281474976710663;ScanGen=1;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;); 2025-04-07T14:02:37.936982Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7490571387278186010:2478] finished for tablet 72075186224037890 2025-04-07T14:02:37.937006Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7490571387278186010:2478] send ScanData to [1:7490571387278185951:2472] txId: 281474976710663 scanId: 1 gen: 1 tablet: 72075186224037890 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:02:37.937347Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:7490571387278186010:2478] and sent to [1:7490571387278185951:2472] packs: 0 txId: 281474976710663 scanId: 1 gen: 1 tablet: 72075186224037890 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.011},{"events":["f_ProduceResults"],"t":0.014},{"events":["l_bootstrap"],"t":0.015},{"events":["f_ack","f_processing"],"t":0.02},{"events":["f_task_result"],"t":0.024},{"events":["l_task_result"],"t":0.037},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.041}],"full":{"a":1744034557895765,"name":"_full_task","f":1744034557895765,"d_finished":0,"c":0,"l":1744034557937019,"d":41254},"events":[{"name":"bootstrap","f":1744034557907729,"d_finished":3558,"c":1,"l":1744034557911287,"d":3558},{"a":1744034557936803,"name":"ack","f":1744034557915982,"d_finished":289,"c":1,"l":1744034557916271,"d":505},{"a":1744034557936791,"name":"processing","f":1744034557915954,"d_finished":2019,"c":10,"l":1744034557933447,"d":2247},{"name":"ProduceResults","f":1744034557910549,"d_finished":1733,"c":13,"l":1744034557936971,"d":1733},{"a":1744034557936972,"name":"Finish","f":1744034557936972,"d_finished":0,"c":0,"l":1744034557937019,"d":47},{"name":"task_result","f":1744034557920363,"d_finished":1595,"c":9,"l":1744034557933446,"d":1595}],"id":"72075186224037890::1"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;) 2025-04-07T14:02:37.937396Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571387278186010:2478];TabletId=72075186224037890;ScanId=1;TxId=281474976710663;ScanGen=1;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:02:37.894844Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4352;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4352;selected_rows=0; 2025-04-07T14:02:37.937415Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7490571387278186010:2478];TabletId=72075186224037890;ScanId=1;TxId=281474976710663;ScanGen=1;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:02:37.937458Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7490571387278186010:2478];TabletId=72075186224037890;ScanId=1;TxId=281474976710663;ScanGen=1;fline=context.h:77;profile=;; 2025-04-07T14:02:37.937525Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7490571387278186010:2478];TabletId=72075186224037890;ScanId=1;TxId=281474976710663;ScanGen=1;fline=context.h:78;fetching=ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;; 2025-04-07T14:02:38.013681Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571370098315572:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:38.013939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:38.120842Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034557000, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::FromBytes [GOOD] Test command err: Trying to start YDB, gRPC: 7301, MsgBus: 27329 2025-04-07T14:02:24.626061Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571330761420230:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:24.626103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e3d/r3tmp/tmpwpCka9/pdisk_1.dat 2025-04-07T14:02:25.287577Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:25.329556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:25.329658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:25.337835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7301, node 1 2025-04-07T14:02:25.699980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:25.700001Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:25.700007Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:25.700093Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27329 TClient is connected to server localhost:27329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:26.656607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:26.688918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:26.887619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:27.129225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:27.211810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:30.061577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571352236258196:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:30.061712Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571330761420230:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:30.095518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:30.096252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:30.175544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:30.253920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:30.313650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:30.382960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:30.424414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:30.542728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:30.643326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571356531226006:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:30.643407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:30.643579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571356531226011:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:30.646610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:02:30.672898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571356531226013:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 64042, MsgBus: 13759 2025-04-07T14:02:32.758305Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571365126123121:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:32.864458Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e3d/r3tmp/tmpfpryEj/pdisk_1.dat 2025-04-07T14:02:33.016991Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:33.087987Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:33.096881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:33.098975Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64042, node 2 2025-04-07T14:02:33.201386Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:33.201411Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:33.201420Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:33.201534Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13759 TClient is connected to server localhost:13759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:33.722825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:33.730151Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:02:33.738084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:33.831441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:34.035624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:34.107525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:36.545774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571382305993822:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:36.545845Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:36.594770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:36.676980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:36.722497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:36.789240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:36.819335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:36.873427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:36.961959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571382305994320:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:36.962015Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:36.962086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571382305994325:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:36.965548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:02:36.977836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571382305994327:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:02:37.732554Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571365126123121:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:37.732598Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select [GOOD] Test command err: Trying to start YDB, gRPC: 6675, MsgBus: 23340 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001041/r3tmp/tmpEJ7uH2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6675, node 1 TClient is connected to server localhost:23340 TClient is connected to server localhost:23340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExprYsonAndType [GOOD] Test command err: Trying to start YDB, gRPC: 9042, MsgBus: 10917 2025-04-07T14:02:25.825621Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571335527150332:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:25.825668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e4a/r3tmp/tmpb2UC2f/pdisk_1.dat 2025-04-07T14:02:26.685734Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:26.732578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:26.732726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:26.737079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9042, node 1 2025-04-07T14:02:26.926110Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:26.926135Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:26.926141Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:26.926241Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10917 TClient is connected to server localhost:10917 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:27.900267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:27.926521Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:27.939546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:28.111402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:28.350591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:28.508410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:30.545519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571357001988288:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:30.545667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:30.867895Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571335527150332:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:30.868181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:30.944957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:30.996912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:31.029572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:31.078013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:31.125818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:31.188452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:31.283799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571361296956089:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:31.283878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:31.284073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571361296956094:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:31.287140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:02:31.316999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571361296956096:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 17952, MsgBus: 8367 2025-04-07T14:02:33.738572Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571369003963708:2189];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e4a/r3tmp/tmpV796K9/pdisk_1.dat 2025-04-07T14:02:33.827329Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:02:33.953214Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:34.000340Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:34.000428Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:34.005458Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17952, node 2 2025-04-07T14:02:34.150499Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:34.150527Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:34.150534Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:34.150640Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8367 TClient is connected to server localhost:8367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:34.746180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:34.754976Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:34.774148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:34.857334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:35.031776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:35.103879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:37.069587Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571386183834428:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:37.069730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:37.129457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:37.169979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:37.242503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:37.274364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:37.348031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:37.396742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:37.482709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571386183834934:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:37.482802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:37.483067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571386183834939:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:37.486798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:02:37.529784Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:02:37.530068Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571386183834941:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:02:38.722994Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571369003963708:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:38.723066Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg >> KqpOlapAggregations::Aggregation_ResultT_FilterL_Limit2 [GOOD] >> KqpOlap::EmptyRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::EmptyRange [GOOD] Test command err: Trying to start YDB, gRPC: 2547, MsgBus: 7894 2025-04-07T14:02:37.231229Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571387301259480:2159];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:37.237037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000bb1/r3tmp/tmpNiomxh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2547, node 1 2025-04-07T14:02:37.607189Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:02:37.607207Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:02:37.643744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:37.643845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:37.667087Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:37.683918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:37.716137Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:37.716160Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:37.716170Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:37.716278Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7894 TClient is connected to server localhost:7894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:38.280644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:38.300512Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:38.316172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:38.409614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571391596227349:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:38.409849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571391596227349:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:38.410125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571391596227349:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:38.410264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571391596227349:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:38.410363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571391596227349:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:38.410439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571391596227349:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:38.410530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571391596227349:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:38.410658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571391596227349:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:38.410749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571391596227349:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:38.411144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571391596227349:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:38.411240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571391596227349:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:38.411332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571391596227349:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:38.474244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571391596227350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:38.474321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571391596227350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:38.474563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571391596227350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:38.474685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571391596227350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:38.474771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571391596227350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:38.474927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571391596227350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:38.475036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571391596227350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:38.475150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571391596227350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:38.475276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571391596227350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:38.475383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571391596227350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:38.475476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571391596227350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:38.475579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571391596227350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:38.509375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571391596227351:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:38.509445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571391596227351:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:38.509648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571391596227351:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:38.509758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571391596227351:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:38.509888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571391596227351:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:38.509972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571391596227351:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:38.510070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571391596227351:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:38.510173Z node 1 :TX_COLUMNSHARD WARN: table ... 2Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:38.561834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:38.562258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:38.562298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:38.562393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:38.562415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:38.562551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:38.562574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:38.562637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:38.562660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:38.562707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:38.562725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:38.562765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:38.562785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:38.563076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:38.563111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:38.563272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:38.563297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:38.563436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:38.563464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:38.563652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:38.563675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:38.563780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:38.563797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:38.564509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:38.564537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:38.564614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:38.564657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:38.567898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:38.567931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:38.568006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:38.568034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:38.568087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:38.568106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:38.568136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:38.568157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:38.568498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:38.568534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:38.568706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:38.568737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:38.569349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:38.569389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:38.569550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:38.569579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:38.569700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:38.569736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:38.613453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; 2025-04-07T14:02:40.520087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571400186162284:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:40.520175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571400186162264:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:40.520522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:40.525551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:02:40.539980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571400186162287:2395], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:02:41.230119Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034561000, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultT_FilterL_Limit2 [GOOD] Test command err: Trying to start YDB, gRPC: 14551, MsgBus: 19926 2025-04-07T14:02:08.133720Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571260875673566:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:08.133982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000cac/r3tmp/tmp7414nS/pdisk_1.dat 2025-04-07T14:02:09.481013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:02:10.038094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:10.038178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:10.046090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:10.202529Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14551, node 1 2025-04-07T14:02:10.889213Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:10.889240Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:10.889248Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:10.889327Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19926 TClient is connected to server localhost:19926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:12.156849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:12.205657Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:12.221275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:12.334150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571278055543286:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:12.334479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571278055543286:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:12.334782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571278055543286:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:12.334900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571278055543286:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:12.335022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571278055543286:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:12.335143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571278055543286:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:12.335238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571278055543286:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:12.335346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571278055543286:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:12.335455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571278055543286:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:12.335541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571278055543286:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:12.335653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571278055543286:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:12.335757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571278055543286:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:12.394680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571278055543282:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:12.394750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571278055543282:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:12.394994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571278055543282:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:12.395104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571278055543282:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:12.395202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571278055543282:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:12.395317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571278055543282:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:12.395450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571278055543282:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:12.395547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571278055543282:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:12.395634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571278055543282:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:12.395763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571278055543282:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:12.395868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571278055543282:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:12.395975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571278055543282:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:12.488173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571278055543283:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:12.488245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571278055543283:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:12.488471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571278055543283:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:12.488575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571278055543283:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:12.500783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571278055543283:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:12.501088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571278055543283:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:12.501203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571278055543283:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04- ... TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:39.190070Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:39.411877Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:39.411967Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:39.503717Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:39.503795Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:39.604747Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:39.604819Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:39.700793Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:39.700864Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:39.794453Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1214:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:39.809144Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1214:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:02:39.877559Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:39.877630Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:39.966968Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:39.967035Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:40.057489Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:40.057571Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:40.145892Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:40.145984Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:40.236032Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:40.236111Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:40.287300Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1214:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:40.398718Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:40.398779Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:40.522681Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:40.522741Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:40.614255Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:40.614318Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:40.699832Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:40.699914Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:40.785437Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:40.785507Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1737:3007], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhlMTVlOTYtNWFhZDBhZDAtODEwMjI5MjgtMmJhYWFkMjI=. TraceId : 01jr88ykd92sg2pznw9gtgb715. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:40.859959Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1214:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:40.885130Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1214:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpOlapBlobsSharing::TableReshardingModuloN [FAIL] >> KqpOlapWrite::TierDraftsGCWithRestart [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers >> TKesusTest::TestQuoterAccountResourcesBurst >> TKesusTest::TestSessionDestroy >> TKesusTest::TestSessionDetach >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestQuoterResourceCreation >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestAttachOutOfSequence >> TKesusTest::TestRegisterProxyBadGeneration >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease >> TKesusTest::TestAcquireUpgrade >> TKesusTest::TestAcquireSemaphoreTimeout >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestUnregisterProxy >> TKesusTest::TestAcquireWaiterDowngrade ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::TierDraftsGCWithRestart [GOOD] Test command err: Trying to start YDB, gRPC: 21082, MsgBus: 24781 2025-04-07T14:02:20.453945Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571316497754213:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:20.454163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c68/r3tmp/tmpmmNIhu/pdisk_1.dat 2025-04-07T14:02:21.355562Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:21.399279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:21.399378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:21.403322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:21.403703Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 21082, node 1 2025-04-07T14:02:21.639931Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:21.639953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:21.639962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:21.640048Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24781 TClient is connected to server localhost:24781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:22.398453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:22.440816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:22.568292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571325087689350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:22.568496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571325087689350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:22.568844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571325087689350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:22.569007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571325087689350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:22.569139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571325087689350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:22.569266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571325087689350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:22.569397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571325087689350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:22.569519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571325087689350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:22.569639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571325087689350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:22.569757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571325087689350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:22.569857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571325087689350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:22.569974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571325087689350:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:22.637914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571325087689352:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:22.638000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571325087689352:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:22.638233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571325087689352:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:22.638337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571325087689352:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:22.638443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571325087689352:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:22.638556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571325087689352:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:22.638658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571325087689352:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:22.638770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571325087689352:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:22.638891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571325087689352:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:22.638986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571325087689352:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:22.639101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571325087689352:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:22.639231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571325087689352:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:22.700532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571325087689353:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:22.704793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571325087689353:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:22.705086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571325087689353:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:22.705207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571325087689353:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:22.705320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571325087689353:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:22.705410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571325087689353:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:22.705501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571325087689353:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:22.705588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571325087689353:2293];tablet_id=72075186224037890;process=TTxInitSch ... _flight_readsLoadingTime=28; 2025-04-07T14:02:42.792558Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=11045; 2025-04-07T14:02:42.792686Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=2;blobs=2;rows=7196;bytes=263888;raw_bytes=8451436; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 72075186224037890 2025-04-07T14:02:42.792765Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:02:42.792811Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:02:42.792854Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=72075186224037890; 2025-04-07T14:02:42.792914Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:02:42.792943Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:02:42.792975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:02:42.793002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:42.793028Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:02:42.793069Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:02:42.793107Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:02:42.793125Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:02:42.793148Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:02:42.793192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:42.793218Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:02:42.793521Z node 1 :TX_COLUMNSHARD DEBUG: fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:02:42.793641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:02:42.793806Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2025-04-07T14:02:42.793863Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890; 2025-04-07T14:02:42.793873Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:02:42.793882Z node 1 :TX_COLUMNSHARD DEBUG: Create periodic stats pipe to 72057594046644480 at tablet 72075186224037890 2025-04-07T14:02:42.793948Z node 1 :TX_COLUMNSHARD DEBUG: There are stats for 1 tables 2025-04-07T14:02:42.794000Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:02:42.794034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:02:42.794065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:42.794087Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:02:42.794123Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:02:42.794155Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:02:42.794172Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:02:42.794193Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:02:42.794231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:02:42.794254Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:02:42.794365Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:02:42.794395Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=column_engine_logs.cpp:557;event=OnTieringModified;new_count_tierings=0; 2025-04-07T14:02:42.794456Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=88; 2025-04-07T14:02:42.794498Z node 1 :TX_COLUMNSHARD INFO: fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=2;path_id=3; 2025-04-07T14:02:42.794539Z node 1 :TX_COLUMNSHARD INFO: fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=2;path_id=3; 2025-04-07T14:02:42.794749Z node 1 :TX_COLUMNSHARD INFO: fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=3; 2025-04-07T14:02:42.794783Z node 1 :TX_COLUMNSHARD INFO: fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; 2025-04-07T14:02:42.794917Z node 1 :TX_COLUMNSHARD DEBUG: Registered with mediator time cast at tablet 72075186224037890 2025-04-07T14:02:42.794948Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1744034562000 at tablet 72075186224037890 2025-04-07T14:02:42.794977Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2025-04-07T14:02:42.795009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:02:42.795026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:02:42.795049Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:02:42.795084Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:02:42.795108Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:02:42.795126Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:02:42.795149Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:02:42.795199Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7490571410987038252:3081];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:02:42.798334Z node 1 :TX_COLUMNSHARD DEBUG: Connected to 72057594046644480 at tablet 72075186224037890 2025-04-07T14:02:43.232913Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7490571410987038222:3080];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:43.244732Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7490571410987038217:3079];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:43.246657Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7490571410987038216:3078];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:43.279567Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7490571410987038252:3081];fline=actor.cpp:33;event=skip_flush_writing; |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestKesusConfig |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dqrun/dqrun |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |90.4%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::TableReshardingModuloN [FAIL] Test command err: Trying to start YDB, gRPC: 29278, MsgBus: 23945 2025-04-07T14:01:51.959319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571191282779627:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:51.959364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000dab/r3tmp/tmp0K19nD/pdisk_1.dat 2025-04-07T14:01:52.413613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 29278, node 1 2025-04-07T14:01:52.413727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:52.418234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:52.451056Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:01:52.451441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:52.463950Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:01:52.511204Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:52.511226Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:52.511240Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:52.511347Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23945 TClient is connected to server localhost:23945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:53.170189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:53.224288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:53.573636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571199872714899:2310];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:53.573875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571199872714899:2310];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:53.574154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571199872714899:2310];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:53.574330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571199872714899:2310];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:53.574424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571199872714899:2310];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:53.574533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571199872714899:2310];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:53.574664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571199872714899:2310];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:53.574809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571199872714899:2310];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:53.574931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571199872714899:2310];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:53.575042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571199872714899:2310];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:53.575192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571199872714899:2310];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:53.575301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571199872714899:2310];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:53.612720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7490571199872714865:2307];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:53.612800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7490571199872714865:2307];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:53.613022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7490571199872714865:2307];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:53.613107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7490571199872714865:2307];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:53.613227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7490571199872714865:2307];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:53.613331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7490571199872714865:2307];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:53.613429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7490571199872714865:2307];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:53.613514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7490571199872714865:2307];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:53.613620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7490571199872714865:2307];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:53.613727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7490571199872714865:2307];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:53.613821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7490571199872714865:2307];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:53.613966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7490571199872714865:2307];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:53.646963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7490571199872714846:2300];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:53.647024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7490571199872714846:2300];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:53.647221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7490571199872714846:2300];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:53.647354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7490571199872714846:2300];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:53.647475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7490571199872714846:2300];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:53.647581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7490571199872714846:2300];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:53.647671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7490571199872714846:2300];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:53.647778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7490571199872714846:2300];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normaliz ... ulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } } ; 2025-04-07T14:02:36.870796Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 7 AppropriateMods: 15 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } } ; 2025-04-07T14:02:36.871583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725680:0, at schemeshard: 72057594046644480 2025-04-07T14:02:36.873156Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 7 AppropriateMods: 15 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } } ; 2025-04-07T14:02:36.878589Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 7 AppropriateMods: 15 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } } ; 2025-04-07T14:02:36.886621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725681:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) 2025-04-07T14:02:37.838611Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 15 } } ; 2025-04-07T14:02:37.839664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725682:0, at schemeshard: 72057594046644480 2025-04-07T14:02:37.841033Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 15 } } ; 2025-04-07T14:02:37.846211Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 15 } } ; RESHARDING_WAIT_FINISHED... (SPLIT:1) RESHARDING_FINISHED 2025-04-07T14:02:39.687446Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034559001, txId: 18446744073709551615] shutting down [[57643u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:553, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[57643u]]|[[230000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:553: CompareYson @ 0x463AF800 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x17AD8DC2 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:351: Execute @ 0x17AAF25E 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:405: Execute_ @ 0x17AB51FA 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7EFC7C007D8F 18. ??:0: ?? @ 0x7EFC7C007E3F 19. ??:0: ?? @ 0x15851028 >> TKesusTest::TestSessionTimeoutAfterDetach >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterHDRRParametersValidation >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSemaphoreData >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeout >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionStealing >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> KqpJoinOrder::CanonizedJoinOrderLookupBug [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] >> TKesusTest::TestReleaseLockFailure >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease >> KqpOlapSysView::StatsSysViewAggregation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2025-04-07T14:02:46.439346Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.439497Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.509686Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.510137Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.561706Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:46.562553Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=7277762517509535035, session=0, seqNo=0) 2025-04-07T14:02:46.562671Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:46.584084Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=7277762517509535035, session=1) 2025-04-07T14:02:46.584393Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=3593460004796544276, session=0, seqNo=0) 2025-04-07T14:02:46.584488Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:02:46.596361Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=3593460004796544276, session=2) 2025-04-07T14:02:46.597606Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-07T14:02:46.607175Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-07T14:02:46.607347Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:46.619403Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-04-07T14:02:46.619747Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-07T14:02:46.619872Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-07T14:02:46.619956Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-07T14:02:46.637337Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=112) 2025-04-07T14:02:46.637679Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:130:2156], cookie=333, name="Lock1") 2025-04-07T14:02:46.637766Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-07T14:02:46.637974Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-07T14:02:46.638055Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 3 "Lock1" 2025-04-07T14:02:46.638121Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-04-07T14:02:46.638209Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2157], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-07T14:02:46.650377Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:130:2156], cookie=333) 2025-04-07T14:02:46.650457Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2157], cookie=222) 2025-04-07T14:02:46.650500Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2157], cookie=223) 2025-04-07T14:02:46.650841Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:130:2156], cookie=334, name="Lock2") 2025-04-07T14:02:46.650965Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-04-07T14:02:46.651022Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-07T14:02:46.663165Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:130:2156], cookie=334) 2025-04-07T14:02:46.663623Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:159:2183], cookie=15500029621447798715, name="Lock1") 2025-04-07T14:02:46.663693Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:159:2183], cookie=15500029621447798715) 2025-04-07T14:02:46.664031Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:162:2186], cookie=14765013314122400853, name="Lock2") 2025-04-07T14:02:46.664090Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:162:2186], cookie=14765013314122400853) 2025-04-07T14:02:46.674668Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.674756Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.675289Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.676038Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.692616Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:46.692773Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-07T14:02:46.692820Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-04-07T14:02:46.693179Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:202:2216], cookie=11849149276819997759, name="Lock1") 2025-04-07T14:02:46.693260Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:202:2216], cookie=11849149276819997759) 2025-04-07T14:02:46.693847Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:210:2223], cookie=3724968237885972390, name="Lock2") 2025-04-07T14:02:46.693902Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:210:2223], cookie=3724968237885972390) 2025-04-07T14:02:47.460340Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.460449Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.476605Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.476732Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.502902Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.503764Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:128:2154], cookie=11631358742326202880, session=0, seqNo=0) 2025-04-07T14:02:47.503917Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:47.516192Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:128:2154], cookie=11631358742326202880, session=1) 2025-04-07T14:02:47.516436Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:129:2155], cookie=14579347855967836487, session=0, seqNo=0) 2025-04-07T14:02:47.516581Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:02:47.531907Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:129:2155], cookie=14579347855967836487, session=2) 2025-04-07T14:02:47.533005Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:128:2154], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-07T14:02:47.533154Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-07T14:02:47.533246Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:47.545343Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:128:2154], cookie=111) 2025-04-07T14:02:47.545667Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:128:2154], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-07T14:02:47.545800Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-07T14:02:47.545884Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-07T14:02:47.558042Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:128:2154], cookie=112) 2025-04-07T14:02:47.558417Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:128:2154], cookie=333, session=1, semaphore="Lock1" count=1) 2025-04-07T14:02:47.558661Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:129:2155], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-07T14:02:47.558761Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-07T14:02:47.558870Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:129:2155], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-07T14:02:47.570763Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:128:2154], cookie=333) 2025-04-07T14:02:47.570846Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:129:2155], cookie=222) 2025-04-07T14:02:47.570874Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:129:2155], cookie=223) 2025-04-07T14:02:47.571326Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:155:2179], cookie=12382846263522123708, name="Lock1") 2025-04-07T14:02:47.571405Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:155:2179], cookie=12382846263522123708) 2025-04-07T14:02:47.571801Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:158:2182], cookie=10521133496589590521, name="Lock2") 2025-04-07T14:02:47.571864Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:158:2182], cookie=10521133496589590521) 2025-04-07T14:02:47.572232Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:161:2185], cookie=3589103276330884823, name="Lock1") 2025-04-07T14:02:47.572296Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:161:2185], cookie=3589103276330884823) 2025-04-07T14:02:47.572786Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:164:2188], cookie=10932866988250606291, name="Lock2") 2025-04-07T14:02:47.572861Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:164:2188], cookie=10932866988250606291) 2025-04-07T14:02:47.573129Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:129:2155], cookie=444, session=2, semaphore="Lock2" count=1) 2025-04-07T14:02:47.573240Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-07T14:02:47.585780Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:129:2155], cookie=444) 2025-04-07T14:02:47.586515Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:169:2193], cookie=16636988962167303006, name="Lock2") 2025-04-07T14:02:47.586624Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:169:2193], cookie=16636988962167303006) 2025-04-07T14:02:47.587137Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:172:2196], cookie=5313964329107900822, name="Lock2") 2025-04-07T14:02:47.587220Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:172:2196], cookie=5313964329107900822) 2025-04-07T14:02:47.602053Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.602180Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.602761Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.603366Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.665690Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.665853Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:47.665904Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-07T14:02:47.665933Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-07T14:02:47.665957Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-07T14:02:47.666237Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:212:2226], cookie=12362130838627737445, name="Lock1") 2025-04-07T14:02:47.666301Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:212:2226], cookie=12362130838627737445) 2025-04-07T14:02:47.666745Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:220:2233], cookie=4630016909039369790, name="Lock2") 2025-04-07T14:02:47.666798Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:220:2233], cookie=4630016909039369790) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 10106, MsgBus: 14938 2025-04-07T14:02:23.530402Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571328752751809:2248];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:23.530454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e3a/r3tmp/tmpnHDBcq/pdisk_1.dat 2025-04-07T14:02:24.246603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:24.246711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:24.249862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:24.292411Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10106, node 1 2025-04-07T14:02:24.509187Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:24.509208Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:24.509218Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:24.509294Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14938 TClient is connected to server localhost:14938 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:25.609353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:25.621795Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:25.628180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:25.810786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:26.051878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:26.131539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:28.233040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571350227589789:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:28.254134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:28.532775Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571328752751809:2248];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:28.532852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:28.741298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:28.780789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:28.884773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:28.957855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:29.026546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:29.107799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:29.161982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571354522557589:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:29.162090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:29.163932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571354522557594:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:29.168280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:02:29.178722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571354522557596:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:02:30.309660Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODllZjM1NDAtNGFhOTk5ODYtYmFhYWEzYmQtZjA2ZThiYTU=, ActorId: [1:7490571358817525206:2464], ActorState: ExecuteState, TraceId: 01jr88ymgk4kczkzggvyq4v4qq, Create QueryResponse for error on request, msg: 2025-04-07T14:02:30.359652Z node 1 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 5 2025-04-07T14:02:30.371990Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjNkZGI0ZS0yYTY4NGI4Zi0xNDQ5MTkwNi0zYmMyMTRlNQ==, ActorId: [1:7490571358817525227:2474], ActorState: ExecuteState, TraceId: 01jr88ymjt7d7k8khe5ab83xn6, Create QueryResponse for error on request, msg: 2025-04-07T14:02:30.416739Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmM3YjAyMDgtZjU0ZWQ2ZmUtYzU3MWUyZTctOGYzN2I0OWY=, ActorId: [1:7490571358817525238:2478], ActorState: ExecuteState, TraceId: 01jr88ymkk4nm93pxz3p3rdqpq, Create QueryResponse for error on request, msg: 2025-04-07T14:02:30.441946Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzEwMGE2MTQtNzA3ODZjZjYtOGFjMzIyMC1hYWMwMzNhNQ==, ActorId: [1:7490571358817525261:2482], ActorState: ExecuteState, TraceId: 01jr88ymmxbgazck78b1jjybtd, Create QueryResponse for error on request, msg: 2025-04-07T14:02:30.480649Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWEwZWQwNDMtMjk1Njg5ZjYtMzBiNzczZC04ODg0NzU2NA==, ActorId: [1:7490571358817525270:2486], ActorState: ExecuteState, TraceId: 01jr88ymnmdjea9gy4m6j2d7ya, Create QueryResponse for error on request, msg: 2025-04-07T14:02:30.580631Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODNiM2YwMTAtYWRkN2JkNWEtNTEyNGQ5ZjktN2I4YjI0ZmM=, ActorId: [1:7490571358817525307:2490], ActorState: ExecuteState, TraceId: 01jr88ymqt0nkhes02nx4x66xq, Create QueryResponse for error on request, msg: 2025-04-07T14:02:30.584766Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034550559, txId: 281474976710671] shutting down 2025-04-07T14:02:30.697732Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTBjMjA1YjMtNzdmZTliNjMtYTQ2ZjdiMi0yMTMyYjdiMA==, ActorId: [1:7490571358817525401:2509], ActorState: ExecuteState, TraceId: 01jr88ymtddy99tb96rq1cfxvm, Create QueryResponse for error on request, msg: 2025-04-07T14:02:30.778878Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWNjMDY2NjctYWUzYzNhNDktYzFhMmQwOTAtZWM5MTY1N2M=, ActorId: [1:7490571358817525417:2516], ActorState: ExecuteState, TraceId: 01jr88ymxw48n7abr8e4bj0dyc, Create QueryResponse for error on request, msg: 2025-04-07T14:02:30.791866Z node 1 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 15 2025-04-07T14:02:30.842969Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034550811, txId: 281474976710673] shutting down 2025-04-07T14:02:30.855965Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034550811, txId: 281474976710674] shutting down 2025-04-07T14:02:30.908960Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTJkMTZiNS02MmUyMjQ5NS1lMjFhMWEzYy01MmFmNTA4Yg==, ActorId: [1:7490571358817525590:2543], ActorState: ExecuteState, TraceId: 01jr88yn193amp7xg80ybhgq78, Create QueryResponse for error on request, msg: 2025-04-07T14:02:30.998594Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034550972, txId: 281474976710677] shutting down 2025-04-07T14:02:31.015193Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034550986, txId: 281474976710678] shutting down 2025-04-07T14:02:31.016122Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWQ3MGI2ZDAtYTQxYjFiMzQtZWFlZDYyZGEtM2M5MjVlYTA=, ActorId: [1:7490571358817525657:2549], ActorState: ExecuteState, TraceId: 01jr88yn4eek3j3an8m9v57b4a, Create QueryResponse for error on request, msg: 2025-04-07T14:02:31.026136Z node 1 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 19 2025-04-07T14:02:31.132573Z node 1 :KQP_SESSION WARN: SessionId: ydb:/ ... r snapshot: [step: 1744034558287, txId: 281474976710779] shutting down 2025-04-07T14:02:38.444923Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjUyYzZjZTktZmVkNjJmYzgtNTZhYTU3MDctNGNiNGQ4NWU=, ActorId: [1:7490571393177269368:3477], ActorState: ExecuteState, TraceId: 01jr88yw9c6fts6x5zn0gkbtfh, Create QueryResponse for error on request, msg: 2025-04-07T14:02:38.495310Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034558511, txId: 281474976710781] shutting down 2025-04-07T14:02:38.640775Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2Q3YzRmZGItNDhhZmRiMjItMmZlZDc2ZWMtZjdkZTQxMzA=, ActorId: [1:7490571393177269449:3496], ActorState: ExecuteState, TraceId: 01jr88ywfn6skc6vc8n09fth0f, Create QueryResponse for error on request, msg: 2025-04-07T14:02:38.657964Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034558686, txId: 281474976710783] shutting down 2025-04-07T14:02:38.835290Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034558861, txId: 281474976710785] shutting down 2025-04-07T14:02:39.044623Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTU3NzVmNzktMTQ2Yzg0MGYtZGI4Nzc2M2EtNTA3M2ZlYg==, ActorId: [1:7490571393177269762:3545], ActorState: ExecuteState, TraceId: 01jr88yww17kjzfmvts1x1vqry, Create QueryResponse for error on request, msg: 2025-04-07T14:02:39.094687Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034559092, txId: 281474976710787] shutting down 2025-04-07T14:02:39.258524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:02:39.258569Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:39.263005Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODAxNzQ3ZGEtOGVjOWQ1ODEtMWM1NThjODAtZTg1ODA5ZDc=, ActorId: [1:7490571397472237104:3555], ActorState: ExecuteState, TraceId: 01jr88yx2p5b262f20rh22ysqm, Create QueryResponse for error on request, msg: 2025-04-07T14:02:39.301356Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034559295, txId: 281474976710789] shutting down 2025-04-07T14:02:39.480408Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2Q1YmFiYmYtN2IxZmE1NC00M2QxZjE1Yi1hNjg0YTk1ZQ==, ActorId: [1:7490571397472237225:3577], ActorState: ExecuteState, TraceId: 01jr88yx9gfz2vne7bt7t44njk, Create QueryResponse for error on request, msg: 2025-04-07T14:02:39.577091Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034559575, txId: 281474976710791] shutting down 2025-04-07T14:02:39.707767Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWE5ZmNiYzktYmQzNjgyNC01NDQ0NDcxMi1lYTVhNmE4Yg==, ActorId: [1:7490571397472237306:3596], ActorState: ExecuteState, TraceId: 01jr88yxg68hdvj96wr8d0pqmw, Create QueryResponse for error on request, msg: 2025-04-07T14:02:39.735736Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034559743, txId: 281474976710793] shutting down 2025-04-07T14:02:39.927338Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034559932, txId: 281474976710795] shutting down 2025-04-07T14:02:39.931165Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjY1ZDM0YzgtN2M3ODBkMDMtMzZjMjQ0YmQtNTQzNmEwNDY=, ActorId: [1:7490571397472237491:3624], ActorState: ExecuteState, TraceId: 01jr88yxq600r2vnzwe773w19x, Create QueryResponse for error on request, msg: 2025-04-07T14:02:40.153618Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjQyNDljZC01ZTllNzg3MS1iOGI4N2JiNS05N2YyNmNmZg==, ActorId: [1:7490571397472237615:3643], ActorState: ExecuteState, TraceId: 01jr88yxy6414n51qv6s5y6ark, Create QueryResponse for error on request, msg: 2025-04-07T14:02:40.168381Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034560191, txId: 281474976710797] shutting down 2025-04-07T14:02:40.379426Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034560401, txId: 281474976710799] shutting down Trying to start YDB, gRPC: 13257, MsgBus: 63181 2025-04-07T14:02:41.247604Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571406087993923:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:41.247667Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e3a/r3tmp/tmpGKp1zy/pdisk_1.dat 2025-04-07T14:02:41.326413Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:41.348867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:41.348978Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:41.350768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13257, node 2 2025-04-07T14:02:41.391338Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:41.391365Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:41.391371Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:41.391477Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63181 TClient is connected to server localhost:63181 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:02:41.863991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.880635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:41.968155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:42.096275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:42.159635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:44.484454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571418972897518:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:44.484538Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:44.528359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:44.608704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:44.636618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:44.667765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:44.695129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:44.724132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:44.779805Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571418972898013:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:44.779917Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:44.780569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571418972898018:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:44.784371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:02:44.793472Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571418972898020:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:02:46.247884Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571406087993923:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:46.247976Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore >> TKesusTest::TestSessionStealingAnyKey [GOOD] >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2025-04-07T14:02:46.439399Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.439550Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.510472Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.510877Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.561999Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:46.562603Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=5157074127673437286, session=0, seqNo=0) 2025-04-07T14:02:46.562773Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:46.585160Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=5157074127673437286, session=1) 2025-04-07T14:02:46.586853Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:130:2156], cookie=2307946630543783422, session=2) 2025-04-07T14:02:46.586949Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:130:2156], cookie=2307946630543783422) 2025-04-07T14:02:46.587485Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:130:2156], cookie=84763856631190725 2025-04-07T14:02:46.588173Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=6749044010304816474, session=1, seqNo=0) 2025-04-07T14:02:46.600172Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=6749044010304816474, session=1) 2025-04-07T14:02:46.600423Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-07T14:02:46.607193Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-07T14:02:46.607382Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:46.607666Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:130:2156], cookie=5994889659139873191, session=1) 2025-04-07T14:02:46.617996Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-07T14:02:46.618065Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-07T14:02:46.618106Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-07T14:02:46.633358Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-04-07T14:02:46.633414Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:130:2156], cookie=5994889659139873191) 2025-04-07T14:02:46.633461Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-07T14:02:47.464651Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.464772Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.479365Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.479447Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.509691Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.510013Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[2:128:2154], cookie=917616490568558246, path="") 2025-04-07T14:02:47.522867Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[2:128:2154], cookie=917616490568558246, status=SUCCESS) 2025-04-07T14:02:47.523523Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2161], cookie=111, session=0, seqNo=0) 2025-04-07T14:02:47.523639Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:47.523800Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[2:137:2161], cookie=4016961753264738044, session=1) 2025-04-07T14:02:47.534114Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-07T14:02:47.534202Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-07T14:02:47.546191Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2161], cookie=111, session=1) 2025-04-07T14:02:47.546338Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[2:137:2161], cookie=4016961753264738044) 2025-04-07T14:02:47.546392Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-07T14:02:47.897641Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.897747Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.913441Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.913902Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.943461Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.944352Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=12345, session=0, seqNo=0) 2025-04-07T14:02:47.944507Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:47.956390Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=12345, session=1) 2025-04-07T14:02:47.957107Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:137:2161], cookie=23456, session=1, seqNo=0) 2025-04-07T14:02:47.971345Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:137:2161], cookie=23456, session=1) 2025-04-07T14:02:48.312103Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:48.312194Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:48.330207Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:48.330316Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:48.345430Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:48.346090Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=12345, session=0, seqNo=0) 2025-04-07T14:02:48.346250Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:48.373484Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=12345, session=1) 2025-04-07T14:02:48.374705Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:136:2161], cookie=23456, session=1, seqNo=0) 2025-04-07T14:02:48.388164Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:136:2161], cookie=23456, session=1) >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestCreateSemaphore [GOOD] >> TKesusTest::TestAllocatesResources [GOOD] >> TKesusTest::TestSemaphoreSessionFailures [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewAggregation [GOOD] Test command err: Trying to start YDB, gRPC: 24166, MsgBus: 61787 2025-04-07T14:00:32.117249Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570851628084931:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:32.117471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fbb/r3tmp/tmprSEzwk/pdisk_1.dat 2025-04-07T14:00:33.418211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:33.418291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:33.441501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:33.498146Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:33.501437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24166, node 1 2025-04-07T14:00:33.851565Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:33.851586Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:33.851591Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:33.851660Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61787 TClient is connected to server localhost:61787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:35.262659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:35.280976Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:35.299513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:35.409262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570864512987365:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:35.409464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570864512987365:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:35.409698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570864512987365:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:35.409805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570864512987365:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:35.409890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570864512987365:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:35.409988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570864512987365:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:35.410070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570864512987365:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:35.410163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570864512987365:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:35.410272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570864512987365:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:35.410371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570864512987365:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:35.410455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570864512987365:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:35.410564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570864512987365:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:35.468500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570864512987401:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:35.474894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570864512987401:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:35.475151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570864512987401:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:35.475262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570864512987401:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:35.475349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570864512987401:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:35.475423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570864512987401:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:35.475501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570864512987401:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:35.475597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570864512987401:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:35.475727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570864512987401:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:35.475858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570864512987401:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:35.475943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570864512987401:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:35.476035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570864512987401:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:35.538147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570864512987367:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:35.538210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570864512987367:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:35.538404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570864512987367:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:35.538505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570864512987367:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:35.538631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570864512987367:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:35.538755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570864512987367:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:35.538936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570864512987367:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04- ... 22808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3522808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3522808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3522808;columns=5; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT SUM(Rows) as rows, FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Kind != 'INACTIVE' RESULT: 2025-04-07T14:01:16.254718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571040606653504:6638], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:16.254719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571040606653512:6641], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:16.254825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:16.259697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-04-07T14:01:16.275302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571040606653518:6642], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-04-07T14:01:18.035813Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034477201, txId: 281474976710666] shutting down rows: 3000000 ==================================== QUERY: SELECT PathId, SUM(Rows) as rows, FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Kind != 'INACTIVE' GROUP BY PathId ORDER BY PathId RESULT: 2025-04-07T14:01:22.715164Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034478594, txId: 281474976710669] shutting down rows: 500000 PathId: 3 rows: 1000000 PathId: 4 rows: 1500000 PathId: 5 ==================================== QUERY: SELECT PathId, SUM(Rows) as rows, SUM(BlobRangeSize) as bytes, SUM(RawBytes) as bytes_raw FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Kind IN ('INSERTED', 'SPLIT_COMPACTED', 'COMPACTED') GROUP BY PathId ORDER BY rows DESC LIMIT 10 RESULT: 2025-04-07T14:01:36.799387Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88wjh8cfsg5c1s00nw3s9m", SessionId: ydb://session/3?node_id=1&id=ODg0MjE1ZDgtNWFjZTUzODItMzFhODRjMzEtZmM2M2JlYTc=, Slow query, duration: 14.068203s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT\n PathId,\n SUM(Rows) as rows,\n SUM(BlobRangeSize) as bytes,\n SUM(RawBytes) as bytes_raw\n FROM `/Root/olapStore/.sys/store_primary_index_stats`\n WHERE\n Kind IN ('INSERTED', 'SPLIT_COMPACTED', 'COMPACTED')\n GROUP BY PathId\n ORDER BY rows DESC\n LIMIT 10\n ", parameters: 0b rows: 1500000 bytes: 11064488 bytes_raw: 352239000 PathId: 5 rows: 1000000 bytes: 7465168 bytes_raw: 234789000 PathId: 4 rows: 500000 bytes: 3848160 bytes_raw: 117339000 PathId: 3 ==================================== QUERY: SELECT PathId, SUM(Rows) as rows, SUM(BlobRangeSize) as bytes, SUM(RawBytes) as bytes_raw FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE PathId == UInt64("3") AND Kind IN ('INSERTED', 'SPLIT_COMPACTED', 'COMPACTED') GROUP BY PathId ORDER BY rows DESC LIMIT 10 RESULT: 2025-04-07T14:01:36.805192Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034483694, txId: 281474976710671] shutting down 2025-04-07T14:01:51.007907Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88x09a3nnd19y4xh924jte", SessionId: ydb://session/3?node_id=1&id=OTBlYmEwZTQtY2ZiYjU5MzAtZWE2OGI5N2MtNTU1ZjJkNjU=, Slow query, duration: 14.195572s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT\n PathId,\n SUM(Rows) as rows,\n SUM(BlobRangeSize) as bytes,\n SUM(RawBytes) as bytes_raw\n FROM `/Root/olapStore/.sys/store_primary_index_stats`\n WHERE\n PathId == UInt64(\"3\") AND Kind IN ('INSERTED', 'SPLIT_COMPACTED', 'COMPACTED')\n GROUP BY PathId\n ORDER BY rows DESC\n LIMIT 10\n ", parameters: 0b rows: 500000 bytes: 3848160 bytes_raw: 117339000 PathId: 3 2025-04-07T14:01:51.012039Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034498238, txId: 281474976710673] shutting down ==================================== QUERY: SELECT PathId, SUM(Rows) as rows, SUM(BlobRangeSize) as bytes, SUM(RawBytes) as bytes_raw FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE PathId >= UInt64("4") AND Kind IN ('INSERTED', 'SPLIT_COMPACTED', 'COMPACTED') GROUP BY PathId ORDER BY rows DESC LIMIT 10 RESULT: 2025-04-07T14:02:15.337044Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88xe5b2dwpceycj3rnatqq", SessionId: ydb://session/3?node_id=1&id=N2FkYjVjMy03OTFhNzZiNS02ZWZhOTQ0OS1hZjYyNmRkNQ==, Slow query, duration: 24.315461s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT\n PathId,\n SUM(Rows) as rows,\n SUM(BlobRangeSize) as bytes,\n SUM(RawBytes) as bytes_raw\n FROM `/Root/olapStore/.sys/store_primary_index_stats`\n WHERE\n PathId >= UInt64(\"4\") AND Kind IN ('INSERTED', 'SPLIT_COMPACTED', 'COMPACTED')\n GROUP BY PathId\n ORDER BY rows DESC\n LIMIT 10\n ", parameters: 0b rows: 1500000 bytes: 11064488 bytes_raw: 352239000 PathId: 5 rows: 1000000 bytes: 7465168 bytes_raw: 234789000 PathId: 4 ==================================== QUERY: SELECT PathId, TabletId, Kind FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Activity == 1 GROUP BY PathId, TabletId, Kind RESULT: 2025-04-07T14:02:15.345030Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034512212, txId: 281474976710675] shutting down 2025-04-07T14:02:30.370213Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88y5y600ysrdh09j6312pk", SessionId: ydb://session/3?node_id=1&id=ODE5ZmIyMTMtZGJkOWE4NTgtY2I1YjlmNjItNmNhZjc2ZjM=, Slow query, duration: 15.000406s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT PathId, TabletId, Kind\n FROM `/Root/olapStore/.sys/store_primary_index_stats`\n WHERE Activity == 1\n GROUP BY PathId, TabletId, Kind\n ", parameters: 0b TabletId: 72075186224037888 Kind: INSERTED PathId: 4 TabletId: 72075186224037889 Kind: INSERTED PathId: 4 TabletId: 72075186224037891 Kind: INSERTED PathId: 4 TabletId: 72075186224037888 Kind: INSERTED PathId: 5 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Kind: INSERTED PathId: 5 TabletId: 72075186224037891 Kind: INSERTED PathId: 5 ==================================== QUERY: SELECT count(distinct(PathId)) as PathsCount, count(distinct(Kind)) as KindsCount, count(distinct(TabletId)) as TabletsCount FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Activity == 1 RESULT: 2025-04-07T14:02:30.373865Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034536428, txId: 281474976710677] shutting down 2025-04-07T14:02:41.567788Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88ymkh9xqppvwmtn8w4tsd", SessionId: ydb://session/3?node_id=1&id=Y2RjMjhiZDAtYWY5NmI4ZC0yYjllOGUzYS03OWI3ZTA0Yw==, Slow query, duration: 11.179671s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT\n count(distinct(PathId)) as PathsCount,\n count(distinct(Kind)) as KindsCount,\n count(distinct(TabletId)) as TabletsCount\n FROM `/Root/olapStore/.sys/store_primary_index_stats`\n WHERE Activity == 1\n ", parameters: 0b 2025-04-07T14:02:41.568678Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034552974, txId: 281474976710679] shutting down PathsCount: 3 TabletsCount: 4 KindsCount: 1 ==================================== QUERY: SELECT PathId, count(*), sum(Rows), sum(BlobRangeSize), sum(RawBytes) FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Activity == 1 GROUP BY PathId ORDER BY PathId RESULT: column2: 500000 column3: 3848160 column1: 1500 column4: 117339000 PathId: 3 column2: 1000000 column3: 7465168 column1: 1500 column4: 234789000 PathId: 4 column2: 1500000 column3: 11064488 column1: 1500 column4: 352239000 PathId: 5 2025-04-07T14:02:47.592949Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034562284, txId: 281474976710682] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestReleaseSemaphore [GOOD] Test command err: 2025-04-07T14:02:46.439748Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.439865Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.508998Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.509520Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.562430Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.343210Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.343320Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.361016Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.361136Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.389567Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.790760Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.790852Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.805305Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.805414Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.824809Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.825891Z node 3 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 4 2025-04-07T14:02:47.826206Z node 3 :KESUS_TABLET TRACE: Got TEvServerDisconnected([3:181:2155]) 2025-04-07T14:02:48.399976Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:48.400062Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:48.415290Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:48.415433Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:48.433120Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:48.433887Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=15058902208417138226, session=0, seqNo=0) 2025-04-07T14:02:48.434021Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:48.457057Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=15058902208417138226, session=1) 2025-04-07T14:02:48.457359Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=4165524100914684862, session=0, seqNo=0) 2025-04-07T14:02:48.457489Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:02:48.469624Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=4165524100914684862, session=2) 2025-04-07T14:02:48.469984Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:130:2156], cookie=111, name="Lock1") 2025-04-07T14:02:48.483299Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:130:2156], cookie=111) 2025-04-07T14:02:48.483538Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-07T14:02:48.483698Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-07T14:02:48.483788Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:48.501412Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=222) 2025-04-07T14:02:48.501681Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:130:2156], cookie=333, name="Lock1") 2025-04-07T14:02:48.513669Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:130:2156], cookie=333) 2025-04-07T14:02:48.864333Z node 6 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:48.864439Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:48.881680Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:48.881771Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:48.896363Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:48.896838Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[6:128:2154], cookie=10482646784329102889, session=0, seqNo=0) 2025-04-07T14:02:48.897001Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:48.920187Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[6:128:2154], cookie=10482646784329102889, session=1) 2025-04-07T14:02:48.920542Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[6:128:2154], cookie=937881262510096634, session=0, seqNo=0) 2025-04-07T14:02:48.920722Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:02:48.932972Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[6:128:2154], cookie=937881262510096634, session=2) 2025-04-07T14:02:48.933667Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[6:139:2163], cookie=1560872240598018161, name="Sem1", limit=1) 2025-04-07T14:02:48.933840Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-07T14:02:48.946503Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[6:139:2163], cookie=1560872240598018161) 2025-04-07T14:02:48.946774Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[6:128:2154], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-07T14:02:48.946902Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-07T14:02:48.947055Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[6:128:2154], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-07T14:02:48.961523Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[6:128:2154], cookie=111) 2025-04-07T14:02:48.961609Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[6:128:2154], cookie=222) 2025-04-07T14:02:48.962316Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:147:2171], cookie=2712725921772026462, name="Sem1") 2025-04-07T14:02:48.962429Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:147:2171], cookie=2712725921772026462) 2025-04-07T14:02:48.962924Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:150:2174], cookie=13349366014250193199, name="Sem1") 2025-04-07T14:02:48.963000Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:150:2174], cookie=13349366014250193199) 2025-04-07T14:02:48.963264Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[6:128:2154], cookie=333, name="Sem1") 2025-04-07T14:02:48.963387Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2025-04-07T14:02:48.975332Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[6:128:2154], cookie=333) 2025-04-07T14:02:48.975974Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:155:2179], cookie=5242277885898691553, name="Sem1") 2025-04-07T14:02:48.976061Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:155:2179], cookie=5242277885898691553) 2025-04-07T14:02:48.976562Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:158:2182], cookie=12360917682162667849, name="Sem1") 2025-04-07T14:02:48.976636Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:158:2182], cookie=12360917682162667849) 2025-04-07T14:02:48.976950Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[6:128:2154], cookie=444, name="Sem1") 2025-04-07T14:02:48.977056Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-07T14:02:48.989105Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[6:128:2154], cookie=444) 2025-04-07T14:02:48.989862Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:163:2187], cookie=16171332816442980588, name="Sem1") 2025-04-07T14:02:48.989956Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:163:2187], cookie=16171332816442980588) 2025-04-07T14:02:48.990544Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:166:2190], cookie=2602929261415845974, name="Sem1") 2025-04-07T14:02:48.990616Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:166:2190], cookie=2602929261415845974) >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2025-04-07T14:02:46.439375Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.439514Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.508327Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.508851Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.561885Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:46.562666Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=2267004295820479961, session=0, seqNo=222) 2025-04-07T14:02:46.562794Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:46.584856Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=2267004295820479961, session=1) 2025-04-07T14:02:46.585157Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=5494849904589244246, session=1, seqNo=111) 2025-04-07T14:02:46.596954Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=5494849904589244246, session=1) 2025-04-07T14:02:47.454167Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.454274Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.470093Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.470189Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.494527Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.495000Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:128:2154], cookie=111, session=0, seqNo=42) 2025-04-07T14:02:47.495130Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:47.495294Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:128:2154], cookie=222, session=1, seqNo=41) 2025-04-07T14:02:47.509399Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:128:2154], cookie=111, session=1) 2025-04-07T14:02:47.509489Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:128:2154], cookie=222, session=1) 2025-04-07T14:02:47.881338Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.881432Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.896062Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.896450Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.919963Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.920428Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=18242542651529502676, session=0, seqNo=0) 2025-04-07T14:02:47.920576Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:47.932487Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=18242542651529502676, session=1) 2025-04-07T14:02:47.934208Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:147:2171], cookie=10788623380663875008) 2025-04-07T14:02:47.934299Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:147:2171], cookie=10788623380663875008) 2025-04-07T14:02:48.286651Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:48.286770Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:48.309156Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:48.309295Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:48.324147Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:48.743246Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:48.743371Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:48.760203Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:48.760311Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:48.777820Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:48.778338Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=15442522384918981612, session=0, seqNo=0) 2025-04-07T14:02:48.778492Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:48.802218Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=15442522384918981612, session=1) 2025-04-07T14:02:48.802584Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-07T14:02:48.802751Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-07T14:02:48.802844Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:48.815468Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=111) 2025-04-07T14:02:48.816681Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:144:2168], cookie=1816027524431947396, name="Sem1", limit=42) 2025-04-07T14:02:48.816846Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-04-07T14:02:48.829371Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:144:2168], cookie=1816027524431947396) 2025-04-07T14:02:48.830020Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:149:2173], cookie=1612469165100911493, name="Sem1", limit=42) 2025-04-07T14:02:48.841948Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:149:2173], cookie=1612469165100911493) 2025-04-07T14:02:48.842410Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:154:2178], cookie=1498826905781201361, name="Sem1", limit=51) 2025-04-07T14:02:48.854524Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:154:2178], cookie=1498826905781201361) 2025-04-07T14:02:48.854967Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:159:2183], cookie=5753208190777502389, name="Lock1", limit=42) 2025-04-07T14:02:48.867183Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:159:2183], cookie=5753208190777502389) 2025-04-07T14:02:48.867754Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:164:2188], cookie=2403880691103517747, name="Lock1", limit=18446744073709551615) 2025-04-07T14:02:48.880833Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:164:2188], cookie=2403880691103517747) 2025-04-07T14:02:48.881567Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:169:2193], cookie=6582353580688135054, name="Sem1") 2025-04-07T14:02:48.881671Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:169:2193], cookie=6582353580688135054) 2025-04-07T14:02:48.882258Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:172:2196], cookie=3772197639469171148, name="Sem2") 2025-04-07T14:02:48.882328Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:172:2196], cookie=3772197639469171148) 2025-04-07T14:02:48.895787Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:48.895908Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:48.896374Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:48.896818Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:48.941037Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:48.941179Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:48.941527Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:212:2226], cookie=257238364603285949, name="Sem1") 2025-04-07T14:02:48.941602Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:212:2226], cookie=257238364603285949) 2025-04-07T14:02:48.942228Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:219:2232], cookie=3429339819050796976, name="Sem2") 2025-04-07T14:02:48.942294Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:219:2232], cookie=3429339819050796976) >> KqpOlapAggregations::Aggregation_MinL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2025-04-07T14:02:46.439400Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.439554Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.509539Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.510471Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.562145Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:46.562614Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=8101997517852677949, session=0, seqNo=0) 2025-04-07T14:02:46.562771Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:46.585691Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=8101997517852677949, session=1) 2025-04-07T14:02:46.585963Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=8362349455712931853, session=0, seqNo=0) 2025-04-07T14:02:46.586065Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:02:46.598457Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=8362349455712931853, session=2) 2025-04-07T14:02:46.598753Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=1) 2025-04-07T14:02:46.607175Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-07T14:02:46.607376Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:46.619554Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-04-07T14:02:46.619948Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2025-04-07T14:02:46.620252Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-07T14:02:46.620351Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2025-04-07T14:02:46.632619Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-04-07T14:02:46.632738Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=333) 2025-04-07T14:02:46.633276Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:146:2170], cookie=1417871980130007576, name="Lock1") 2025-04-07T14:02:46.633365Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:146:2170], cookie=1417871980130007576) 2025-04-07T14:02:47.475916Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.476023Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.493747Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.493854Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.525545Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.525991Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:128:2154], cookie=4491012615908703974, session=0, seqNo=0) 2025-04-07T14:02:47.526104Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:47.539801Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:128:2154], cookie=4491012615908703974, session=1) 2025-04-07T14:02:47.540142Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:128:2154], cookie=18235816021966692167, session=0, seqNo=0) 2025-04-07T14:02:47.540258Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:02:47.552006Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:128:2154], cookie=18235816021966692167, session=2) 2025-04-07T14:02:47.552273Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:128:2154], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-07T14:02:47.552392Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-07T14:02:47.552494Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:47.564223Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:128:2154], cookie=111) 2025-04-07T14:02:47.564483Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:128:2154], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-07T14:02:47.564755Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:128:2154], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2025-04-07T14:02:47.576394Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:128:2154], cookie=222) 2025-04-07T14:02:47.576462Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:128:2154], cookie=333) 2025-04-07T14:02:47.576917Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:144:2168], cookie=3832709540227773002, name="Lock1") 2025-04-07T14:02:47.576999Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:144:2168], cookie=3832709540227773002) 2025-04-07T14:02:47.577436Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:147:2171], cookie=684897734348272773, name="Lock1") 2025-04-07T14:02:47.577501Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:147:2171], cookie=684897734348272773) 2025-04-07T14:02:47.902157Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.902306Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.912919Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.913350Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.940323Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.940799Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=16176466480251069518, session=0, seqNo=0) 2025-04-07T14:02:47.940953Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:47.956791Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=16176466480251069518, session=1) 2025-04-07T14:02:47.957156Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=13568593970642227473, session=0, seqNo=0) 2025-04-07T14:02:47.957288Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:02:47.969295Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=13568593970642227473, session=2) 2025-04-07T14:02:47.969924Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-07T14:02:47.970043Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-07T14:02:47.970109Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:47.982251Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=111) 2025-04-07T14:02:47.982614Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-07T14:02:47.982973Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-07T14:02:47.983064Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-07T14:02:47.995366Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=222) 2025-04-07T14:02:47.995461Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=333) 2025-04-07T14:02:47.995919Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:149:2173], cookie=17246944702746101265, name="Lock1") 2025-04-07T14:02:47.995985Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:149:2173], cookie=17246944702746101265) 2025-04-07T14:02:47.996285Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:152:2176], cookie=18204269749024084827, name="Lock1") 2025-04-07T14:02:47.996334Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:152:2176], cookie=18204269749024084827) 2025-04-07T14:02:48.007920Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:48.008040Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:48.008632Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:48.009329Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:48.055665Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:48.055802Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:48.056176Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:192:2206], cookie=10730086921874239456, name="Lock1") 2025-04-07T14:02:48.056260Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:192:2206], cookie=10730086921874239456) 2025-04-07T14:02:48.056804Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:200:2213], cookie=18104642926173455760, name="Lock1") 2025-04-07T14:02:48.056872Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:200:2213], cookie=18104642926173455760) 2025-04-07T14:02:48.543505Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:48.543644Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:48.561374Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:48.561517Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:48.575636Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:48.576137Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=15505099228040335924, session=0, seqNo=0) 2025-04-07T14:02:48.576286Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:48.601143Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=15505099228040335924, session=1) 2025-04-07T14:02:48.601478Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=16378602687499853953, session=0, seqNo=0) 2025-04-07T14:02:48.601614Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:02:48.613729Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=16378602687499853953, session=2) 2025-04-07T14:02:48.614070Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-07T14:02:48.614211Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-07T14:02:48.614298Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:48.626600Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=111) 2025-04-07T14:02:48.626928Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-07T14:02:48.627276Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=333, name="Lock1") 2025-04-07T14:02:48.627367Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-07T14:02:48.640508Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=222) 2025-04-07T14:02:48.640616Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=333) 2025-04-07T14:02:48.975886Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:48.975991Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:48.998256Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:48.998408Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:49.012778Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:49.021664Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:130:2156], cookie=8351491368357987495, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-07T14:02:49.021978Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-07T14:02:49.044864Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:130:2156], cookie=8351491368357987495) 2025-04-07T14:02:49.045529Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:139:2163], cookie=17504892563824738459, path="/Root/Res", config={ }) 2025-04-07T14:02:49.045790Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-07T14:02:49.058167Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:139:2163], cookie=17504892563824738459) 2025-04-07T14:02:49.062074Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:144:2168]. Cookie: 12757080035380600963. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:49.062148Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:144:2168], cookie=12757080035380600963) 2025-04-07T14:02:49.062662Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:144:2168]. Cookie: 431683346343949879. Data: { } 2025-04-07T14:02:49.062711Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:144:2168], cookie=431683346343949879) 2025-04-07T14:02:49.106011Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-07T14:02:49.148891Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-07T14:02:49.179881Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-07T14:02:49.210764Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-07T14:02:49.252827Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2025-04-07T14:02:46.439334Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.439510Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.509220Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.509789Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.562972Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:46.563469Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=16295274877069571518, session=0, seqNo=0) 2025-04-07T14:02:46.563611Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:46.585237Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=16295274877069571518, session=1) 2025-04-07T14:02:46.588070Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[1:130:2156], cookie=3603319337751959042, session=1) 2025-04-07T14:02:46.588161Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-07T14:02:46.601343Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[1:130:2156], cookie=3603319337751959042) 2025-04-07T14:02:46.602233Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:147:2171], cookie=11595504084543047210) 2025-04-07T14:02:46.602306Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:147:2171], cookie=11595504084543047210) 2025-04-07T14:02:46.602910Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:150:2174], cookie=13226778778415391254, session=0, seqNo=0) 2025-04-07T14:02:46.603011Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:02:46.616431Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:150:2174], cookie=13226778778415391254, session=2) 2025-04-07T14:02:46.617667Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[1:130:2156], cookie=6574448506033449430, session=2) 2025-04-07T14:02:46.617775Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 2025-04-07T14:02:46.633012Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[1:130:2156], cookie=6574448506033449430) 2025-04-07T14:02:47.498234Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.498323Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.509573Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.509642Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.533444Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.533733Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:128:2154], cookie=2483198279188600737, name="Sem1", limit=1) 2025-04-07T14:02:47.533851Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-07T14:02:47.546087Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:128:2154], cookie=2483198279188600737) 2025-04-07T14:02:47.546554Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:137:2161], cookie=5874434821020903056, name="Sem2", limit=1) 2025-04-07T14:02:47.546663Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-04-07T14:02:47.559002Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:137:2161], cookie=5874434821020903056) 2025-04-07T14:02:47.559543Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:142:2166], cookie=13019473103654564279, name="Sem1") 2025-04-07T14:02:47.559635Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:142:2166], cookie=13019473103654564279) 2025-04-07T14:02:47.559981Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:145:2169], cookie=12636074273945578669, name="Sem2") 2025-04-07T14:02:47.560019Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:145:2169], cookie=12636074273945578669) 2025-04-07T14:02:47.569413Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.569500Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.569892Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.570331Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.606619Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.607035Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:185:2199], cookie=10466038327168710971, name="Sem1") 2025-04-07T14:02:47.607144Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:185:2199], cookie=10466038327168710971) 2025-04-07T14:02:47.607753Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:191:2204], cookie=2516382374606361462, name="Sem2") 2025-04-07T14:02:47.607821Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:191:2204], cookie=2516382374606361462) 2025-04-07T14:02:47.608254Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:194:2207], cookie=2709221624038862978, name="Sem1", limit=1) 2025-04-07T14:02:47.620530Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:194:2207], cookie=2709221624038862978) 2025-04-07T14:02:47.621221Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:199:2212], cookie=17578091245116888077, name="Sem2", limit=1) 2025-04-07T14:02:47.633630Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:199:2212], cookie=17578091245116888077) 2025-04-07T14:02:47.634292Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:204:2217], cookie=4832079707484668000, name="Sem1") 2025-04-07T14:02:47.634431Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:204:2217], cookie=4832079707484668000) 2025-04-07T14:02:47.634982Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:207:2220], cookie=9485720881245643061, name="Sem2") 2025-04-07T14:02:47.635054Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:207:2220], cookie=9485720881245643061) 2025-04-07T14:02:47.635450Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[2:210:2223], cookie=577345163166421799, name="Sem1") 2025-04-07T14:02:47.649683Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[2:210:2223], cookie=577345163166421799) 2025-04-07T14:02:47.651835Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[2:215:2228], cookie=12831580209105853397, name="Sem2") 2025-04-07T14:02:47.663671Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[2:215:2228], cookie=12831580209105853397) 2025-04-07T14:02:47.664389Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[2:220:2233], cookie=8030732456780696944, name="Sem3") 2025-04-07T14:02:47.685373Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[2:220:2233], cookie=8030732456780696944) 2025-04-07T14:02:47.686044Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:225:2238], cookie=3208882666696659878, name="Sem1") 2025-04-07T14:02:47.686129Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:225:2238], cookie=3208882666696659878) 2025-04-07T14:02:47.686614Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:228:2241], cookie=11576853942255238057, name="Sem2") 2025-04-07T14:02:47.686670Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:228:2241], cookie=11576853942255238057) 2025-04-07T14:02:47.687123Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:231:2244], cookie=8415671771939816144, name="Sem3") 2025-04-07T14:02:47.687172Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:231:2244], cookie=8415671771939816144) 2025-04-07T14:02:47.705736Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.705834Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.706363Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.706906Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.748056Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.748394Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:271:2274], cookie=15553609268436799293, name="Sem1") 2025-04-07T14:02:47.748471Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:271:2274], cookie=15553609268436799293) 2025-04-07T14:02:47.749260Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:277:2279], cookie=11112294325746027502, name="Sem2") 2025-04-07T14:02:47.749338Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:277:2279], cookie=11112294325746027502) 2025-04-07T14:02:47.750013Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:280:2282], cookie=12913262893316557101, name="Sem3") 2025-04-07T14:02:47.750072Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:280:2282], cookie=12913262893316557101) 2025-04-07T14:02:48.091034Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:48.091117Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:48.102149Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:48.102610Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:48.126177Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:48.126608Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=9453752098177686867, session=0, seqNo=0) 2025-04-07T14:02:48.126745Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:48.138309Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=9453752098177686867, session=1) 2025-04-07T14:02:48.138507Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=14300390402021148841, session=0, seqNo=0) 2025-04-07T14:02:48.138590Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:02:48.150513Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=14300390402021148841, session=2) 2025-04-07T14:02:48.151103Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:141:2165], cookie=15494948246620970869, name="Sem1", limit=1) 2025-04-07T14:02:48.151254Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04- ... ESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 9 "Sem1" 2025-04-07T14:02:48.386347Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:222:2245], cookie=4042188766886261930) 2025-04-07T14:02:48.386916Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:227:2250], cookie=14949178901595120570, name="Sem1", force=0) 2025-04-07T14:02:48.386988Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 9 "Sem1" 2025-04-07T14:02:48.399667Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:227:2250], cookie=14949178901595120570) 2025-04-07T14:02:48.400287Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:232:2255], cookie=16370853850010110328, name="Sem1", limit=1) 2025-04-07T14:02:48.400389Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 10 "Sem1" 2025-04-07T14:02:48.413318Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:232:2255], cookie=16370853850010110328) 2025-04-07T14:02:48.414040Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:237:2260], cookie=12818612808482800956, name="Sem1", force=0) 2025-04-07T14:02:48.414138Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 10 "Sem1" 2025-04-07T14:02:48.429558Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:237:2260], cookie=12818612808482800956) 2025-04-07T14:02:48.430342Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:242:2265], cookie=2219282501433182154, name="Sem1", limit=1) 2025-04-07T14:02:48.430505Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 11 "Sem1" 2025-04-07T14:02:48.444249Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:242:2265], cookie=2219282501433182154) 2025-04-07T14:02:48.444947Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-07T14:02:48.445139Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2025-04-07T14:02:48.456970Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=111) 2025-04-07T14:02:48.457492Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-07T14:02:48.482453Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=222) 2025-04-07T14:02:48.483097Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[3:130:2156], cookie=333, name="Sem1") 2025-04-07T14:02:48.483256Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2025-04-07T14:02:48.495787Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[3:130:2156], cookie=333) 2025-04-07T14:02:48.496399Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=444, session=2, semaphore="Sem1" count=1) 2025-04-07T14:02:48.508445Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=444) 2025-04-07T14:02:48.509018Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[3:130:2156], cookie=555, name="Sem1") 2025-04-07T14:02:48.509127Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2025-04-07T14:02:48.509197Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2025-04-07T14:02:48.527251Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[3:130:2156], cookie=555) 2025-04-07T14:02:48.973817Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:48.973892Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:48.996306Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:48.996446Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:49.014192Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:49.014570Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=12031512686196935023, session=0, seqNo=0) 2025-04-07T14:02:49.014709Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:49.037541Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=12031512686196935023, session=1) 2025-04-07T14:02:49.037840Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:130:2156], cookie=112, name="Sem1", limit=5) 2025-04-07T14:02:49.037972Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-07T14:02:49.049859Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:130:2156], cookie=112) 2025-04-07T14:02:49.050148Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:130:2156], cookie=113, name="Sem1") 2025-04-07T14:02:49.065431Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:130:2156], cookie=113) 2025-04-07T14:02:49.065761Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:130:2156], cookie=114, name="Sem1", force=0) 2025-04-07T14:02:49.065866Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-04-07T14:02:49.079254Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:130:2156], cookie=114) 2025-04-07T14:02:49.079548Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[4:130:2156], cookie=9731449500173268382 2025-04-07T14:02:49.079844Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:130:2156], cookie=115, name="Sem1", limit=5) 2025-04-07T14:02:49.091910Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:130:2156], cookie=115) 2025-04-07T14:02:49.092147Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:130:2156], cookie=116, name="Sem1") 2025-04-07T14:02:49.107524Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:130:2156], cookie=116) 2025-04-07T14:02:49.107891Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:130:2156], cookie=117, name="Sem1", force=0) 2025-04-07T14:02:49.121168Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:130:2156], cookie=117) 2025-04-07T14:02:49.121503Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=118, session=1, semaphore="Sem1" count=1) 2025-04-07T14:02:49.133598Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=118) 2025-04-07T14:02:49.133950Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=119, name="Sem1") 2025-04-07T14:02:49.146414Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=119) 2025-04-07T14:02:49.146714Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=120, name="Sem1") 2025-04-07T14:02:49.146796Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=120) 2025-04-07T14:02:49.147011Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[4:130:2156], cookie=10337513024930052132, session=1) 2025-04-07T14:02:49.147090Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-07T14:02:49.159585Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[4:130:2156], cookie=10337513024930052132) 2025-04-07T14:02:49.159811Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:130:2156], cookie=121, name="Sem1", limit=5) 2025-04-07T14:02:49.171680Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:130:2156], cookie=121) 2025-04-07T14:02:49.172000Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:130:2156], cookie=122, name="Sem1") 2025-04-07T14:02:49.183635Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:130:2156], cookie=122) 2025-04-07T14:02:49.183862Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:130:2156], cookie=123, name="Sem1", force=0) 2025-04-07T14:02:49.195856Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:130:2156], cookie=123) 2025-04-07T14:02:49.196136Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=124, session=1, semaphore="Sem1" count=1) 2025-04-07T14:02:49.207927Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=124) 2025-04-07T14:02:49.208146Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=125, name="Sem1") 2025-04-07T14:02:49.219866Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=125) 2025-04-07T14:02:49.220089Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=126, name="Sem1") 2025-04-07T14:02:49.220141Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=126) 2025-04-07T14:02:49.220610Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:130:2156], cookie=127, name="Sem1", limit=5) 2025-04-07T14:02:49.220655Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:130:2156], cookie=127) 2025-04-07T14:02:49.220937Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:130:2156], cookie=128, name="Sem1") 2025-04-07T14:02:49.220991Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:130:2156], cookie=128) 2025-04-07T14:02:49.221186Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:130:2156], cookie=129, name="Sem1", force=0) 2025-04-07T14:02:49.221242Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:130:2156], cookie=129) 2025-04-07T14:02:49.221418Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=130, session=1, semaphore="Sem1" count=1) 2025-04-07T14:02:49.221472Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=130) 2025-04-07T14:02:49.221651Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=131, name="Sem1") 2025-04-07T14:02:49.221705Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=131) 2025-04-07T14:02:49.221888Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=132, name="Sem1") 2025-04-07T14:02:49.221939Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=132) >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced >> TKesusTest::TestGetQuoterResourceCounters [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2025-04-07T14:02:46.439361Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.439555Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.508326Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.508850Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.561676Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:46.562096Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:130:2156], cookie=9990902420451150669, path="/foo/bar/baz") 2025-04-07T14:02:46.584455Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:130:2156], cookie=9990902420451150669, status=SUCCESS) 2025-04-07T14:02:46.585038Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:139:2163], cookie=6215358972958675390) 2025-04-07T14:02:46.596807Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:139:2163], cookie=6215358972958675390) 2025-04-07T14:02:46.597322Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:144:2168], cookie=8585538439502492721, path="/foo/bar/baz") 2025-04-07T14:02:46.609293Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:144:2168], cookie=8585538439502492721, status=SUCCESS) 2025-04-07T14:02:46.609762Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:149:2173], cookie=5863966452913120694) 2025-04-07T14:02:46.621832Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:149:2173], cookie=5863966452913120694) 2025-04-07T14:02:46.665000Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.665077Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.665587Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.666131Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.691059Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:46.691423Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:191:2205], cookie=14700959862514853372) 2025-04-07T14:02:46.703528Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:191:2205], cookie=14700959862514853372) 2025-04-07T14:02:46.704122Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:199:2212], cookie=10203605988564144116, path="/foo/bar/baz") 2025-04-07T14:02:46.715941Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:199:2212], cookie=10203605988564144116, status=SUCCESS) 2025-04-07T14:02:46.716366Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:204:2217], cookie=5235693886219966407, path="/foo/bar/baz") 2025-04-07T14:02:46.716420Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:204:2217], cookie=5235693886219966407, status=PRECONDITION_FAILED) 2025-04-07T14:02:47.470014Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.470076Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.480446Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.480535Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.509666Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.510015Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:128:2154], cookie=3425730048322658574, name="Lock1") 2025-04-07T14:02:47.510109Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:128:2154], cookie=3425730048322658574) 2025-04-07T14:02:47.914115Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.914207Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.930531Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.930979Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.956466Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.956914Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=13441281378907920821, session=0, seqNo=0) 2025-04-07T14:02:47.957056Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:47.969056Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=13441281378907920821, session=1) 2025-04-07T14:02:47.969352Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-07T14:02:47.969498Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-07T14:02:47.969675Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:47.982361Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=111) 2025-04-07T14:02:47.982915Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:141:2165], cookie=8712621126353240916, name="Lock1", force=0) 2025-04-07T14:02:47.995019Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:141:2165], cookie=8712621126353240916) 2025-04-07T14:02:47.995522Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:146:2170], cookie=12959708389689244234, name="Sem1", force=0) 2025-04-07T14:02:48.007630Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:146:2170], cookie=12959708389689244234) 2025-04-07T14:02:48.008191Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:151:2175], cookie=15561074796108765260, name="Sem1", limit=42) 2025-04-07T14:02:48.008331Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-04-07T14:02:48.020312Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:151:2175], cookie=15561074796108765260) 2025-04-07T14:02:48.020849Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:156:2180], cookie=15445565093508223869, name="Sem1", force=0) 2025-04-07T14:02:48.020961Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 2 "Sem1" 2025-04-07T14:02:48.032942Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:156:2180], cookie=15445565093508223869) 2025-04-07T14:02:48.033462Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:161:2185], cookie=13004502451025981902, name="Sem1", force=0) 2025-04-07T14:02:48.047993Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:161:2185], cookie=13004502451025981902) 2025-04-07T14:02:48.345763Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:48.345863Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:48.366865Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:48.366995Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:48.381283Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:48.381721Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=2325744183106305966, session=0, seqNo=0) 2025-04-07T14:02:48.381861Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:48.406449Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=2325744183106305966, session=1) 2025-04-07T14:02:48.406752Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=14867188989521103246, session=0, seqNo=0) 2025-04-07T14:02:48.406877Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:02:48.419266Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=14867188989521103246, session=2) 2025-04-07T14:02:48.419496Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=2 from sender=[4:130:2156], cookie=3813487810680541054 2025-04-07T14:02:48.419849Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:142:2166], cookie=13126741067219639778, name="Sem1", limit=3) 2025-04-07T14:02:48.419970Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-07T14:02:48.435213Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:142:2166], cookie=13126741067219639778) 2025-04-07T14:02:48.435492Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=112, name="Sem1") 2025-04-07T14:02:48.435569Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=112) 2025-04-07T14:02:48.435796Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=113, name="Sem1") 2025-04-07T14:02:48.435852Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=113) 2025-04-07T14:02:48.436029Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=1429234279064805815, session=2, seqNo=0) 2025-04-07T14:02:48.447850Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=1429234279064805815, session=2) 2025-04-07T14:02:48.448114Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=114, name="Sem1") 2025-04-07T14:02:48.448179Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=114) 2025-04-07T14:02:48.448333Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=115, name="Sem1") 2025-04-07T14:02:48.448377Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=115) 2025-04-07T14:02:48.448766Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:149:2173], cookie=14852469391802999993, name="Sem1") 2025-04-07T14:02:48.461088Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:149:2173], cookie=14852469391802999993) 2025-04-07T14:02:48.461463Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=116, session=1, semaphore="Sem1" count=1) 2025-04-07T14:02:48.461620Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-07T14:02:48.473315Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=116) 2025-04-07T14:02:48.473678Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=117, session=2, semaphore="Sem1" count=2) 2025-04-07T14:02:48.473834Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-07T14:02:48.486011Z node 4 :KESUS_TABLET DEBUG: [720575 ... 49.251384Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-04-07T14:02:49.263308Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:200:2218], cookie=469759594788718611) 2025-04-07T14:02:49.263695Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=126, session=1, semaphore="Sem2" count=3) 2025-04-07T14:02:49.263835Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Sem2" queue: next order #5 session 1 2025-04-07T14:02:49.275635Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=126) 2025-04-07T14:02:49.275968Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=127, name="Sem2") 2025-04-07T14:02:49.276029Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=127) 2025-04-07T14:02:49.276233Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=128, session=1, semaphore="Sem2" count=3) 2025-04-07T14:02:49.288022Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=128) 2025-04-07T14:02:49.547793Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.560417Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.571069Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=129, session=1, semaphore="Sem2" count=2) 2025-04-07T14:02:49.583509Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=129) 2025-04-07T14:02:49.583982Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=130, name="Sem2") 2025-04-07T14:02:49.584075Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=130) 2025-04-07T14:02:49.584350Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=131, session=1, semaphore="Sem2" count=1) 2025-04-07T14:02:49.596914Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=131) 2025-04-07T14:02:49.597391Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=132, name="Sem2") 2025-04-07T14:02:49.597498Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=132) 2025-04-07T14:02:49.597821Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=133, name="Sem2") 2025-04-07T14:02:49.597903Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=133) 2025-04-07T14:02:49.954140Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:49.954248Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:49.970752Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:49.970886Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:49.985218Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:49.993668Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:130:2156], cookie=411086392428556966, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2025-04-07T14:02:49.993919Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root1" 2025-04-07T14:02:50.016466Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:130:2156], cookie=411086392428556966) 2025-04-07T14:02:50.017107Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:139:2163], cookie=17620454066174052665, path="/Root1/Res", config={ }) 2025-04-07T14:02:50.017317Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2025-04-07T14:02:50.029142Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:139:2163], cookie=17620454066174052665) 2025-04-07T14:02:50.029658Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:144:2168], cookie=14464192625128823804, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2025-04-07T14:02:50.029792Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root2" 2025-04-07T14:02:50.041693Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:144:2168], cookie=14464192625128823804) 2025-04-07T14:02:50.042267Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:149:2173], cookie=3756148449302946119, path="/Root2/Res", config={ }) 2025-04-07T14:02:50.042457Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2025-04-07T14:02:50.054649Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:149:2173], cookie=3756148449302946119) 2025-04-07T14:02:50.055257Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:154:2178], cookie=6929178751945991544, path="/Root2/Res/Subres", config={ }) 2025-04-07T14:02:50.055498Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2025-04-07T14:02:50.067369Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:154:2178], cookie=6929178751945991544) 2025-04-07T14:02:50.068437Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:159:2183]. Cookie: 8967173895620809169. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:50.068497Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:159:2183], cookie=8967173895620809169) 2025-04-07T14:02:50.110355Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:159:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-07T14:02:50.152188Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:159:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-07T14:02:50.183251Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:159:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-07T14:02:50.184003Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:166:2187]. Cookie: 7168754403253270211. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-04-07T14:02:50.184891Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:169:2190]. Cookie: 18408144656051573404. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:50.184950Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:169:2190], cookie=18408144656051573404) 2025-04-07T14:02:50.216941Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:169:2190]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-07T14:02:50.258761Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:169:2190]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-07T14:02:50.259562Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:174:2194]. Cookie: 7993842676811602220. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-04-07T14:02:50.260350Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:159:2183]. Cookie: 4640906290755572661. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:50.260401Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:159:2183], cookie=4640906290755572661) 2025-04-07T14:02:50.261089Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:169:2190]. Cookie: 2521202321795187084. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:50.261151Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:169:2190], cookie=2521202321795187084) 2025-04-07T14:02:50.292404Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:159:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2025-04-07T14:02:50.292521Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:169:2190]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2025-04-07T14:02:50.293324Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:181:2201]. Cookie: 16894349926557843341. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_MinL [GOOD] Test command err: Trying to start YDB, gRPC: 31446, MsgBus: 4623 2025-04-07T14:02:18.774423Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571305141776553:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:18.774592Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c7c/r3tmp/tmp6YhgSz/pdisk_1.dat 2025-04-07T14:02:19.409609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:19.409683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:19.412448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31446, node 1 2025-04-07T14:02:19.434855Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:19.478789Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:02:19.478837Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:02:19.665110Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:19.665135Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:19.665142Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:19.665234Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4623 TClient is connected to server localhost:4623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:20.608384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:20.652197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:20.809058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571313731711666:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:20.809229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571313731711666:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:20.809454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571313731711666:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:20.809561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571313731711666:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:20.809648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571313731711666:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:20.809727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571313731711666:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:20.809807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571313731711666:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:20.809908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571313731711666:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:20.810027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571313731711666:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:20.810148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571313731711666:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:20.810260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571313731711666:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:20.810383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571313731711666:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:20.817330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:20.817391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:20.817485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:20.817510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:20.817693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:20.817721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:20.817789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:20.817820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:20.817866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:20.817885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:20.817923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:20.817945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:20.818371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:20.818403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:20.818574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:20.818597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:20.818747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:20.818767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:20.818911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:20.818930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:20.819050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:20.819087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:20.872932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571313731711669:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:20.873007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571313731711669:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:47.257299Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:47.419664Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:47.419775Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:47.510080Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:47.510139Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:47.594378Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:47.594454Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:47.679499Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:47.679583Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:47.764653Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:47.776058Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1216:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:02:47.830916Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:47.830984Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:47.915411Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:47.915493Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:47.999858Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:47.999952Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:48.084112Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:48.084198Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:48.170132Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:48.170201Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:48.215291Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:48.342846Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:48.342918Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:48.449140Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:48.449204Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:48.535375Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:48.535449Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:48.619684Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:48.619755Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:48.704331Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:48.704383Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1878:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWU0NzRkY2QtNzVkMDAwMWYtMjc2OWNmMjctZWIwY2M5NjQ=. TraceId : 01jr88yw2xdkqxsmhc9w0a8xnq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:48.769246Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:48.791360Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1216:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T13:59:36.234045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T13:59:36.234171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:59:36.234221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T13:59:36.234258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T13:59:36.234306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T13:59:36.234335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T13:59:36.234398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T13:59:36.234755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T13:59:36.414428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T13:59:36.414483Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:59:36.429552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T13:59:36.429686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T13:59:36.429871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T13:59:36.436718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T13:59:36.436941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T13:59:36.437601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:36.437826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T13:59:36.439791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:36.441058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:36.441115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:36.441271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T13:59:36.441327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:36.441371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T13:59:36.441541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.448280Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T13:59:36.570386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T13:59:36.570657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.570917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T13:59:36.571105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T13:59:36.571161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.573644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:36.573815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T13:59:36.574014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.574062Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T13:59:36.574112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T13:59:36.574148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T13:59:36.576243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.576347Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T13:59:36.576390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T13:59:36.578440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.578492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.578524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:36.578577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T13:59:36.593569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T13:59:36.595704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T13:59:36.595920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T13:59:36.596974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T13:59:36.597121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T13:59:36.597189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:36.597477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T13:59:36.597532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T13:59:36.597722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:36.597819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T13:59:36.599926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T13:59:36.599971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T13:59:36.600127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T13:59:36.600162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T13:59:36.600416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T13:59:36.600458Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T13:59:36.600563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T13:59:36.600598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:36.600641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T13:59:36.600696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T13:59:36.600743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T13:59:36.600773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T13:59:36.600853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T13:59:36.600891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T13:59:36.600922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T13:59:36.602827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:36.602926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T13:59:36.602956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T13:59:36.603018Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T13:59:36.603057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T13:59:36.603151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... emeShard::TEvMeasureSelfResponseTime 2025-04-07T14:02:45.966776Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:306:2294]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:02:45.966873Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409546 2025-04-07T14:02:45.967020Z node 3 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186233409546, for tableId 2: RowCount 100, DataSize 13940, IndexSize 102, PartCount 1 2025-04-07T14:02:45.967149Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-07T14:02:45.967551Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:306:2294], Recipient [3:121:2147]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 440 Memory: 124000 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 40 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-07T14:02:45.967602Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-07T14:02:45.967654Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.044 2025-04-07T14:02:45.967785Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-07T14:02:45.967828Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-07T14:02:45.978371Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:02:45.978462Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:02:45.978566Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:121:2147], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:02:45.978617Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:02:46.011217Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T14:02:46.011319Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T14:02:46.011362Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-07T14:02:46.011441Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-07T14:02:46.011479Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-07T14:02:46.011601Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-07T14:02:46.011675Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-04-07T14:02:46.011721Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-04-07T14:02:46.011863Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:02:46.022385Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T14:02:46.022462Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T14:02:46.022502Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:02:46.043458Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:708:2677]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:02:46.043572Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409547 2025-04-07T14:02:46.043691Z node 3 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186233409547, for tableId 3: RowCount 100, DataSize 13940, IndexSize 102, PartCount 1 2025-04-07T14:02:46.043817Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 3 2025-04-07T14:02:46.044300Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:708:2677], Recipient [3:121:2147]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 3 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 74 Memory: 124000 } ShardState: 2 UserTablePartOwners: 72075186233409547 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 212 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-07T14:02:46.044350Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-07T14:02:46.044406Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0074 2025-04-07T14:02:46.044530Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-07T14:02:46.044583Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-07T14:02:46.075928Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue wakeup 2025-04-07T14:02:46.076028Z node 3 :FLAT_TX_SCHEMESHARD INFO: Borrowed compaction timeout for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, in queue# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-07T14:02:46.076091Z node 3 :FLAT_TX_SCHEMESHARD INFO: RunBorrowedCompaction for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-07T14:02:46.076178Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 3 seconds 2025-04-07T14:02:46.076213Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-04-07T14:02:46.076367Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T14:02:46.076414Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T14:02:46.076447Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-07T14:02:46.076511Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-07T14:02:46.076588Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-07T14:02:46.076727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 13940 row count 100 2025-04-07T14:02:46.076799Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=CopyTable, is column=0, is olap=0 2025-04-07T14:02:46.076848Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 3: RowCount 100, DataSize 13940, with borrowed parts 2025-04-07T14:02:46.076988Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-04-07T14:02:46.077071Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:02:46.087630Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T14:02:46.087716Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T14:02:46.087753Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:02:46.278675Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:02:46.278757Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:02:46.278826Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:121:2147], Recipient [3:121:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:02:46.278852Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> KqpOlapAggregations::Aggregation_ResultT_FilterL_OrderT_Limit2 [GOOD] >> KqpDataIntegrityTrails::Ddl >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultT_FilterL_OrderT_Limit2 [GOOD] Test command err: Trying to start YDB, gRPC: 15033, MsgBus: 26829 2025-04-07T14:02:21.539747Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571317639729052:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:21.539866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c57/r3tmp/tmpnquPl6/pdisk_1.dat 2025-04-07T14:02:22.196087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:22.196169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:22.201504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:22.262364Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15033, node 1 2025-04-07T14:02:22.492998Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:22.493019Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:22.493025Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:22.493097Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26829 TClient is connected to server localhost:26829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:23.359748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:23.436311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:23.629721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571326229664149:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:23.629898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571326229664149:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:23.630137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571326229664149:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:23.630238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571326229664149:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:23.630352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571326229664149:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:23.630451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571326229664149:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:23.630560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571326229664149:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:23.630662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571326229664149:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:23.630757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571326229664149:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:23.630849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571326229664149:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:23.630951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571326229664149:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:23.631060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571326229664149:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:23.688599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571326229664174:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:23.688653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571326229664174:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:23.688907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571326229664174:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:23.689024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571326229664174:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:23.689115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571326229664174:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:23.689210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571326229664174:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:23.689306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571326229664174:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:23.689417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571326229664174:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:23.689519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571326229664174:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:23.689612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571326229664174:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:23.689706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571326229664174:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:23.689795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571326229664174:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:23.751747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571326229664175:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:23.751810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571326229664175:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:23.752011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571326229664175:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:23.752166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571326229664175:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:23.752317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571326229664175:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:23.752430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571326229664175:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:23.752562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571326229664175:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:23.754677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571326229664175:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:23.754853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74905713262296641 ... TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:48.872741Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:49.022949Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:49.023032Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:49.113338Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:49.113408Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:49.197657Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:49.197730Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:49.281763Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:49.281853Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:49.366532Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:49.377884Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1216:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:02:49.430525Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:49.430593Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:49.515086Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:49.515164Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:49.599510Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:49.599599Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:49.685233Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:49.685319Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:49.769766Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:49.769860Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:49.814929Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:49.939479Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:49.939533Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:50.045899Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:50.045970Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:50.129997Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:50.130082Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:50.214137Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:50.214192Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:50.298452Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:02:50.298527Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1739:3009], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jr88yya92rxs6ddtrnj4rspv. SessionId : ydb://session/3?node_id=2&id=MTE2OTJhYy0xOTNhYzRjNi04NWI1Njc2Zi01NTc2ZTdkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:02:50.361703Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:50.383548Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1216:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; |90.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> SlowTopicAutopartitioning::CDC_Write [GOOD] Test command err: 2025-04-07T14:00:31.133654Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570847585529810:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:31.157330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001824/r3tmp/tmpqjjrXV/pdisk_1.dat 2025-04-07T14:00:31.696609Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:00:32.184686Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:32.196793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:00:32.202284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:32.202382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:32.210005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30833, node 1 2025-04-07T14:00:32.529214Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001824/r3tmp/yandexDZJcYi.tmp 2025-04-07T14:00:32.529236Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001824/r3tmp/yandexDZJcYi.tmp 2025-04-07T14:00:32.529420Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001824/r3tmp/yandexDZJcYi.tmp 2025-04-07T14:00:32.529503Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:00:32.808024Z INFO: TTestServer started on Port 3495 GrpcPort 30833 TClient is connected to server localhost:3495 PQClient connected to localhost:30833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:33.951647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:34.009001Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:34.029658Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:00:34.034971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:00:34.261221Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:36.109114Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570847585529810:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:36.109267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:37.141114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570873355334248:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.141416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570873355334239:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.141549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:00:37.161357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T14:00:37.266417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570873355334261:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T14:00:37.531782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:00:37.564901Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490570873355334346:2326], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:00:37.569492Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDU3OGJkODgtODA4MGEyZWUtNjA4NWY3ZDktY2FiNWI3Zjk=, ActorId: [1:7490570873355334235:2310], ActorState: ExecuteState, TraceId: 01jr88v6094qj6fr3e9k7rhzc4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:00:37.589225Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:00:37.602976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:00:37.735492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7490570877650301900:2625] === CheckClustersList. Ok 2025-04-07T14:00:44.371293Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-07T14:00:44.447682Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7490570903420106016:2810], Recipient [1:7490570847585530092:2196]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:44.447728Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:00:44.447752Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T14:00:44.447797Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7490570903420106012:2807], Recipient [1:7490570847585530092:2196]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-07T14:00:44.447813Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T14:00:44.559935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "origin" Columns { Name: "id" Type: "Uint64" NotNull: false } Columns { Name: "order" Type: "Uint64" NotNull: false } Columns { Name: "value" Type: "Utf8" NotNull: false } KeyColumnNames: "id" KeyColumnNames: "order" UniformPartitionsCount: 64 PartitionConfig { PartitioningPolicy { MinPartitionsCount: 64 MaxPartitionsCount: 64 } } Temporary: false } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:00:44.560340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/origin, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:00:44.560462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/origin, opId: 281474976710673:0, schema: Name: "origin" Columns { Name: "id" Type: "Uint64" NotNull: false } Columns { Name: "order" Type: "Uint64" NotNull: false } Columns { Name: "value" Type: "Utf8" NotNull: false } KeyColumnNames: "id" KeyColumnNames: "order" UniformPartitionsCount: 64 PartitionConfig { PartitioningPolicy { MinPartitionsCount: 64 MaxPartitionsCount: 64 } } Temporary: false, at schemeshard: 72057594046644480 2025-04-07T14:00:44.560931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: origin, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-07T14:00:44.560979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-07T14:00:44.561021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-07T14:00:44.561038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-07T14:00:44.561061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-07T14:00:44.561076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2025-04-07T14:00:44.561092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDb ... gin/feed/streamImpl' partition 1 process write for '\00072075186224037899' DisableDeduplication=0 SeqNo=2172 LocalSeqNo=2170 InitialSeqNo=(NULL) 2025-04-07T14:01:48.356428Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob complete sourceId '\00072075186224037944' seqNo 1718 partNo 0 FormedBlobsCount 0 NewHead: Offset 36700 PartNo 0 PackedSize 6776483 count 27865 nextOffset 64565 batches 15 2025-04-07T14:01:48.356435Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037899' seqNo 2172 partNo 0 2025-04-07T14:01:48.356455Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037899' seqNo 2172 partNo 0 FormedBlobsCount 0 NewHead: Offset 38772 PartNo 0 PackedSize 6517866 count 26921 nextOffset 65693 batches 15 2025-04-07T14:01:48.356468Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 process write for '\00072075186224037944' DisableDeduplication=0 SeqNo=1719 LocalSeqNo=1718 InitialSeqNo=(NULL) 2025-04-07T14:01:48.356490Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob processing sourceId '\00072075186224037944' seqNo 1719 partNo 0 2025-04-07T14:01:48.356492Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037899' DisableDeduplication=0 SeqNo=2173 LocalSeqNo=2172 InitialSeqNo=(NULL) 2025-04-07T14:01:48.356511Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob complete sourceId '\00072075186224037944' seqNo 1719 partNo 0 FormedBlobsCount 0 NewHead: Offset 36700 PartNo 0 PackedSize 6776759 count 27866 nextOffset 64566 batches 15 2025-04-07T14:01:48.356512Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037899' seqNo 2173 partNo 0 2025-04-07T14:01:48.356534Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037899' seqNo 2173 partNo 0 FormedBlobsCount 0 NewHead: Offset 38772 PartNo 0 PackedSize 6518141 count 26922 nextOffset 65694 batches 15 2025-04-07T14:01:48.356541Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 process write for '\00072075186224037944' DisableDeduplication=0 SeqNo=1721 LocalSeqNo=1719 InitialSeqNo=(NULL) 2025-04-07T14:01:48.356557Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob processing sourceId '\00072075186224037944' seqNo 1721 partNo 0 2025-04-07T14:01:48.356569Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037899' DisableDeduplication=0 SeqNo=2174 LocalSeqNo=2173 InitialSeqNo=(NULL) 2025-04-07T14:01:48.356579Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob complete sourceId '\00072075186224037944' seqNo 1721 partNo 0 FormedBlobsCount 0 NewHead: Offset 36700 PartNo 0 PackedSize 6777035 count 27867 nextOffset 64567 batches 15 2025-04-07T14:01:48.356585Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037899' seqNo 2174 partNo 0 2025-04-07T14:01:48.356607Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037899' seqNo 2174 partNo 0 FormedBlobsCount 0 NewHead: Offset 38772 PartNo 0 PackedSize 6518416 count 26923 nextOffset 65695 batches 15 2025-04-07T14:01:48.356608Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 process write for '\00072075186224037944' DisableDeduplication=0 SeqNo=1723 LocalSeqNo=1721 InitialSeqNo=(NULL) 2025-04-07T14:01:48.356624Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob processing sourceId '\00072075186224037944' seqNo 1723 partNo 0 2025-04-07T14:01:48.356637Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037899' DisableDeduplication=0 SeqNo=2175 LocalSeqNo=2174 InitialSeqNo=(NULL) 2025-04-07T14:01:48.356643Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob complete sourceId '\00072075186224037944' seqNo 1723 partNo 0 FormedBlobsCount 0 NewHead: Offset 36700 PartNo 0 PackedSize 6777311 count 27868 nextOffset 64568 batches 15 2025-04-07T14:01:48.356652Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037899' seqNo 2175 partNo 0 2025-04-07T14:01:48.356701Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 process write for '\00072075186224037944' DisableDeduplication=0 SeqNo=1724 LocalSeqNo=1723 InitialSeqNo=(NULL) 2025-04-07T14:01:48.356705Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037899' seqNo 2175 partNo 0 FormedBlobsCount 0 NewHead: Offset 38772 PartNo 0 PackedSize 6518691 count 26924 nextOffset 65696 batches 15 2025-04-07T14:01:48.356720Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob processing sourceId '\00072075186224037944' seqNo 1724 partNo 0 2025-04-07T14:01:48.356739Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob complete sourceId '\00072075186224037944' seqNo 1724 partNo 0 FormedBlobsCount 0 NewHead: Offset 36700 PartNo 0 PackedSize 6777587 count 27869 nextOffset 64569 batches 15 2025-04-07T14:01:48.356744Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037899' DisableDeduplication=0 SeqNo=2178 LocalSeqNo=2175 InitialSeqNo=(NULL) 2025-04-07T14:01:48.356762Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037899' seqNo 2178 partNo 0 2025-04-07T14:01:48.356763Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 process write for '\00072075186224037944' DisableDeduplication=0 SeqNo=1726 LocalSeqNo=1724 InitialSeqNo=(NULL) 2025-04-07T14:01:48.356776Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob processing sourceId '\00072075186224037944' seqNo 1726 partNo 0 2025-04-07T14:01:48.356787Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037899' seqNo 2178 partNo 0 FormedBlobsCount 0 NewHead: Offset 38772 PartNo 0 PackedSize 6518966 count 26925 nextOffset 65697 batches 15 2025-04-07T14:01:48.356795Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob complete sourceId '\00072075186224037944' seqNo 1726 partNo 0 FormedBlobsCount 0 NewHead: Offset 36700 PartNo 0 PackedSize 6777863 count 27870 nextOffset 64570 batches 15 2025-04-07T14:01:48.356814Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037899' DisableDeduplication=0 SeqNo=2181 LocalSeqNo=2178 InitialSeqNo=(NULL) 2025-04-07T14:01:48.356824Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 process write for '\00072075186224037944' DisableDeduplication=0 SeqNo=1727 LocalSeqNo=1726 InitialSeqNo=(NULL) 2025-04-07T14:01:48.356831Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037899' seqNo 2181 partNo 0 2025-04-07T14:01:48.356840Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob processing sourceId '\00072075186224037944' seqNo 1727 partNo 0 2025-04-07T14:01:48.356852Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037899' seqNo 2181 partNo 0 FormedBlobsCount 0 NewHead: Offset 38772 PartNo 0 PackedSize 6519241 count 26926 nextOffset 65698 batches 15 2025-04-07T14:01:48.356870Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob complete sourceId '\00072075186224037944' seqNo 1727 partNo 0 FormedBlobsCount 0 NewHead: Offset 36700 PartNo 0 PackedSize 6778139 count 27871 nextOffset 64571 batches 15 2025-04-07T14:01:48.356882Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037899' DisableDeduplication=0 SeqNo=2183 LocalSeqNo=2181 InitialSeqNo=(NULL) 2025-04-07T14:01:48.356897Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 process write for '\00072075186224037944' DisableDeduplication=0 SeqNo=1728 LocalSeqNo=1727 InitialSeqNo=(NULL) 2025-04-07T14:01:48.356899Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037899' seqNo 2183 partNo 0 2025-04-07T14:01:48.356913Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob processing sourceId '\00072075186224037944' seqNo 1728 partNo 0 2025-04-07T14:01:48.356923Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037899' seqNo 2183 partNo 0 FormedBlobsCount 0 NewHead: Offset 38772 PartNo 0 PackedSize 6519516 count 26927 nextOffset 65699 batches 15 2025-04-07T14:01:48.356934Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Topic 'origin/feed/streamImpl' partition 0 part blob complete sourceId '\00072075186224037944' seqNo 1728 partNo 0 FormedBlobsCount 0 NewHead: Offset 36700 PartNo 0 PackedSize 6778415 count 27872 nextOffset 64572 batches 15 2025-04-07T14:01:48.356948Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037899' DisableDeduplication=0 SeqNo=2184 LocalSeqNo=2183 InitialSeqNo=(NULL) >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient >> KqpOlapAggregations::Aggregation_MinR_GroupL_OrderL [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderLookupBug [GOOD] Test command err: Trying to start YDB, gRPC: 9427, MsgBus: 24970 2025-04-07T13:57:05.732655Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569961287062128:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:05.732747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001071/r3tmp/tmpxAhRGc/pdisk_1.dat 2025-04-07T13:57:06.039818Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9427, node 1 2025-04-07T13:57:06.089151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:06.089243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:06.105545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:57:06.133486Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:06.133513Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:06.133524Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:06.133649Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24970 TClient is connected to server localhost:24970 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:06.758674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:06.777457Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:06.789000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.923216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:07.074811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:07.149345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:08.794245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569974171965702:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:08.794357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:09.054652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:09.096537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:09.148265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:09.184458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:09.222801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:09.270318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:09.360893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569978466933498:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:09.360976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:09.361237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569978466933503:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:09.364757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:09.383628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569978466933505:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:10.736944Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569961287062128:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:10.737006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:11.010610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:11.064973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:11.094222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:11.134267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:11.164492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:11.340237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:57:11.421976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:57:11.484097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:57:11.541917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:57:11.615288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:57:11.659629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:57:11.693550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:57:11.787192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:57:12.498491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:57:12.587857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:57:12.643321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:57:12.684041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:57:12.762722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:57:12.821610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ... access permissions } 2025-04-07T14:02:37.584610Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:02:37.602950Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490571386613061402:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:02:39.367304Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:02:39.417091Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:02:39.459370Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:02:39.508693Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:02:39.567423Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:02:39.797570Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T14:02:39.844896Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T14:02:39.928797Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-07T14:02:39.972277Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-07T14:02:40.015215Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-07T14:02:40.083744Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-07T14:02:40.132873Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-07T14:02:40.184068Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.110597Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2025-04-07T14:02:41.179384Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.220224Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.270094Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.315353Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.363766Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.437122Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.473103Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.514588Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.555879Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.603529Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.647912Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.704070Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.769412Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.811184Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.859733Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.902113Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.940592Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.980280Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.016552Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.060307Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.121533Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.173776Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.215747Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.465692Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2025-04-07T14:02:42.507581Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.551556Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.599839Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.664359Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.742769Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.804817Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.844103Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.886447Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2025-04-07T14:02:43.014619Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-04-07T14:02:43.059624Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-04-07T14:02:43.111943Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-04-07T14:02:43.151972Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 2025-04-07T14:02:46.460722Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:02:46.460754Z node 5 :IMPORT WARN: Table profiles were not loaded >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] >> KqpDataIntegrityTrails::Ddl [GOOD] >> KqpOlapSysView::StatsSysViewEnumStringBytes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2025-04-07T14:02:46.439379Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.439550Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.509151Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.509676Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.551016Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:46.575092Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:130:2156], cookie=2145386073945600928, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-07T14:02:46.575445Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-07T14:02:46.589073Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:130:2156], cookie=2145386073945600928) 2025-04-07T14:02:46.589616Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:140:2164], cookie=16427340953552938467, path="/Root/Res", config={ }) 2025-04-07T14:02:46.589827Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-07T14:02:46.601750Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:140:2164], cookie=16427340953552938467) 2025-04-07T14:02:46.620778Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:145:2169]. Cookie: 6180433052878508573. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:46.620871Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[1:145:2169], cookie=6180433052878508573) 2025-04-07T14:02:46.621517Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [1:145:2169]. Cookie: 9888331002721278141. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-04-07T14:02:46.621570Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[1:145:2169], cookie=9888331002721278141) 2025-04-07T14:02:48.889224Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:48.889390Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:48.902246Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:48.902329Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:48.927227Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:48.927615Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:128:2154], cookie=4141200036004250223, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-07T14:02:48.927885Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-07T14:02:48.939386Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:128:2154], cookie=4141200036004250223) 2025-04-07T14:02:48.940054Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:138:2162]. Cookie: 8712848361523561820. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:48.940099Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:138:2162], cookie=8712848361523561820) 2025-04-07T14:02:48.940583Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:138:2162]. Cookie: 4995959533797222883. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:48.940622Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:138:2162], cookie=4995959533797222883) 2025-04-07T14:02:48.941131Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:138:2162]. Cookie: 17483396674850077576. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-04-07T14:02:48.941186Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:138:2162], cookie=17483396674850077576) 2025-04-07T14:02:48.941566Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:138:2162]. Cookie: 12950705168487883346. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-04-07T14:02:48.941604Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:138:2162], cookie=12950705168487883346) 2025-04-07T14:02:50.951346Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:50.951449Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:50.969462Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:50.969989Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:50.994291Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:50.994721Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:130:2156], cookie=13687133192895600108, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-07T14:02:50.995019Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-07T14:02:51.006996Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:130:2156], cookie=13687133192895600108) 2025-04-07T14:02:51.007524Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:140:2164], cookie=13206721079349343254, path="/Root/Res1", config={ }) 2025-04-07T14:02:51.007735Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2025-04-07T14:02:51.019667Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:140:2164], cookie=13206721079349343254) 2025-04-07T14:02:51.020246Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:145:2169], cookie=4710665919406084524, path="/Root/Res2", config={ }) 2025-04-07T14:02:51.020437Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2025-04-07T14:02:51.032541Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:145:2169], cookie=4710665919406084524) 2025-04-07T14:02:51.033468Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:150:2174]. Cookie: 15331601366699550972. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:51.033527Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:150:2174], cookie=15331601366699550972) 2025-04-07T14:02:51.034127Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:150:2174]. Cookie: 16189197116099397354. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:51.034183Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:150:2174], cookie=16189197116099397354) 2025-04-07T14:02:51.034653Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:150:2174]. Cookie: 11992243311974507657. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1020500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1020500 } } 2025-04-07T14:02:51.034695Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:150:2174], cookie=11992243311974507657) 2025-04-07T14:02:53.143601Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:53.143717Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:53.160960Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:53.161105Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:53.175368Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:53.175671Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=10899041257355470425, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-07T14:02:53.175920Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-07T14:02:53.198238Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=10899041257355470425) 2025-04-07T14:02:53.199041Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:140:2164]. Cookie: 1308228626759863593. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:53.199098Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:140:2164], cookie=1308228626759863593) 2025-04-07T14:02:53.199567Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 1629638298347718937. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-04-07T14:02:53.199612Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=1629638298347718937) 2025-04-07T14:02:53.199986Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 10226195233217838221. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-04-07T14:02:53.200025Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=10226195233217838221) 2025-04-07T14:02:55.362451Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:55.362568Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:55.380712Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:55.380901Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:55.394826Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:55.395171Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:130:2156], cookie=7427836175378476967, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-07T14:02:55.395352Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-07T14:02:55.417731Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:130:2156], cookie=7427836175378476967) 2025-04-07T14:02:55.418323Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:139:2163], cookie=2766449351271276730, path="/Root/Res", config={ }) 2025-04-07T14:02:55.418562Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-07T14:02:55.430782Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:139:2163], cookie=2766449351271276730) 2025-04-07T14:02:55.431601Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:144:2168]. Cookie: 17777248402165420367. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:55.431659Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:144:2168], cookie=17777248402165420367) 2025-04-07T14:02:55.432147Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:148:2172], cookie=5810239728297983483, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2025-04-07T14:02:55.432312Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Updated quoter resource 1 "Root" 2025-04-07T14:02:55.432489Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2025-04-07T14:02:55.444360Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:148:2172], cookie=5810239728297983483) 2025-04-07T14:02:55.444978Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:144:2168]. Cookie: 14733675281130148671. Data: { } 2025-04-07T14:02:55.445027Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:144:2168], cookie=14733675281130148671) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_MinR_GroupL_OrderL [GOOD] Test command err: Trying to start YDB, gRPC: 31325, MsgBus: 22012 2025-04-07T14:01:52.030828Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571193821334142:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:52.031088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d31/r3tmp/tmpXqmGaF/pdisk_1.dat 2025-04-07T14:01:52.780352Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:52.791778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:52.791866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:52.805916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31325, node 1 2025-04-07T14:01:52.989173Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:52.989193Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:52.989200Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:52.989286Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22012 TClient is connected to server localhost:22012 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:01:53.901409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:01:53.951418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:54.138603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571202411269255:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:54.138760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571202411269255:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:54.138977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571202411269255:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:54.139084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571202411269255:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:54.139194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571202411269255:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:54.139303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571202411269255:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:54.139389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571202411269255:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:54.139492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571202411269255:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:54.139604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571202411269255:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:54.139701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571202411269255:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:54.139791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571202411269255:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:54.139888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571202411269255:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:54.233115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571202411269257:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:54.233169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571202411269257:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:54.233399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571202411269257:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:54.233504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571202411269257:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:54.233672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571202411269257:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:54.233775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571202411269257:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:54.233868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571202411269257:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:54.234016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571202411269257:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:54.234138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571202411269257:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:54.234247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571202411269257:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:54.234343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571202411269257:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:54.234444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571202411269257:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:54.303691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571202411269272:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:54.303751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571202411269272:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:54.303970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571202411269272:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:54.304103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571202411269272:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:54.304220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571202411269272:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:54.304318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571202411269272:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:54.304409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571202411269272:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:54.304503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571202411269272:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:54.304591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:74905712024112692 ... 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:52.139723Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:52.323189Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:52.323270Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:52.408112Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:52.408191Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:52.495713Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:52.495770Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:52.587705Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:52.587796Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:52.688541Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:52.704900Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1216:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:02:52.748740Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:52.748805Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:52.833114Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:52.833193Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:52.917247Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:52.917333Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:53.001725Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:53.001794Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:53.086046Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:53.086130Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:53.142103Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:53.243865Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:53.243940Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:53.350562Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:53.350643Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:53.434445Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:53.434518Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:53.518901Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:53.518983Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:53.603180Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-04-07T14:02:53.603247Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1783:3053], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjA4MmZiNC1mZDgxZjFhMy01ZjY0YjFhMi05YzE3ZDU4Mw==. CustomerSuppliedId : . TraceId : 01jr88yjp34tdtw183qdc3pg7r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-04-07T14:02:53.687048Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1216:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:02:53.708885Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1216:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] Test command err: Trying to start YDB, gRPC: 21636, MsgBus: 12797 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001005/r3tmp/tmpTKQaIg/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21636, node 1 TClient is connected to server localhost:12797 TClient is connected to server localhost:12797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewEnumStringBytes [GOOD] Test command err: Trying to start YDB, gRPC: 12019, MsgBus: 26547 2025-04-07T14:01:23.277591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571070735414829:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:23.277643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ee1/r3tmp/tmp3OL4Co/pdisk_1.dat 2025-04-07T14:01:23.936739Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:23.941910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:23.941978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:23.949770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12019, node 1 2025-04-07T14:01:24.183743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:24.183764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:24.183771Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:24.183863Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26547 TClient is connected to server localhost:26547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:25.029796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:25.045627Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:25.064436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:25.216027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571079325350089:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:25.216249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571079325350089:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:25.216495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571079325350089:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:25.216603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571079325350089:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:25.216706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571079325350089:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:25.216791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571079325350089:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:25.216881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571079325350089:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:25.216998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571079325350089:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:25.217116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571079325350089:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:25.217246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571079325350089:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:25.217334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571079325350089:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:25.217411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571079325350089:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:25.313447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571079325350095:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:25.313513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571079325350095:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:25.313728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571079325350095:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:25.313827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571079325350095:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:25.313938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571079325350095:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:25.314062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571079325350095:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:25.314151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571079325350095:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:25.314243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571079325350095:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:25.314389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571079325350095:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:25.314506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571079325350095:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:25.314614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571079325350095:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:25.314709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571079325350095:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:25.317529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:25.317638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:25.317756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:25.317791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:25.317969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:25.317998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:25.318097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:25.318135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:25.318213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:25.318236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_ ... TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 5071136 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 5071136 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Wait changes: 18421472/51200000 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 RESULT: 2025-04-07T14:02:18.556356Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034538326, txId: 281474976715669] shutting down Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 5065184 PathId: 3 Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 5065184 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 5071136 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 5071136 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 5074568 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 5074568 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 18421472/51200000 2025-04-07T14:02:18.607810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715671:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=51200216;columns=2; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 RESULT: Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 5071136 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 5071136 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Rows: 266834 RawBytes: 14942704 BlobRangeOffset: 1070888 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1470968:0] EntityType: COL BlobRangeSize: 400080 PathId: 3 Rows: 266834 RawBytes: 2134672 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1470968:0] EntityType: COL BlobRangeSize: 1070888 PathId: 3 Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 2025-04-07T14:02:30.815888Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034550696, txId: 281474976715673] shutting down 5065184 PathId: 3 Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 5065184 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Rows: 266574 RawBytes: 14928144 BlobRangeOffset: 1069840 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1469904:0] EntityType: COL BlobRangeSize: 400064 PathId: 3 Rows: 266574 RawBytes: 2132592 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1469904:0] EntityType: COL BlobRangeSize: 1069840 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 5074568 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 5074568 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Rows: 266592 RawBytes: 14929152 BlobRangeOffset: 1069880 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1467984:0] EntityType: COL BlobRangeSize: 398104 PathId: 3 Rows: 266592 RawBytes: 2132736 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1467984:0] EntityType: COL BlobRangeSize: 1069880 PathId: 3 Wait changes: 22830328/102400000 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 RESULT: Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 5065184 PathId: 3 Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 5065184 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 5074568 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 5074568 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Rows: 266574 RawBytes: 14928144 BlobRangeOffset: 1069840 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1469904:0] EntityType: COL BlobRangeSize: 400064 PathId: 3 Rows: 266574 RawBytes: 2132592 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1469904:0] EntityType: COL BlobRangeSize: 1069840 PathId: 3 Rows: 266592 RawBytes: 14929152 BlobRangeOffset: 1069880 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1467984:0] EntityType: COL BlobRangeSize: 398104 PathId: 3 Rows: 266592 RawBytes: 2132736 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1467984:0] EntityType: COL BlobRangeSize: 1069880 PathId: 3 Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 5071136 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 5071136 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Rows: 266834 RawBytes: 14942704 BlobRangeOffset: 1070888 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1470968:0] EntityType: 2025-04-07T14:02:36.198102Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034556086, txId: 281474976715675] shutting down COL BlobRangeSize: 400080 PathId: 3 Rows: 266834 RawBytes: 2134672 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1470968:0] EntityType: COL BlobRangeSize: 1070888 PathId: 3 22830328/102400000 ==================================== QUERY: SELECT COUNT(*), MAX(pk_int), MIN(pk_int) FROM `/Root/olapStore/olapTable` RESULT: 2025-04-07T14:02:38.441465Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034556215, txId: 18446744073709551615] shutting down column2: 0 column0: 1600000 column1: 1599999 column2: int64_value: 0 column0: uint64_value: 1600000 column1: int64_value: 1599999 2025-04-07T14:02:45.307951Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034558000, txId: 18446744073709551615] shutting down count=1600000;min_count=3124;max_count=3126;groups_count=512; 2025-04-07T14:02:45.362754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715682:0, at schemeshard: 72057594046644480 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 unpacked data: 44800000 / 15210888 packed data: 44800000 / 1198272 frq_diff: 0.07877725482 frq_compression: 0.02674714286 pk_size : 6400000 / 3210584 >> TKesusTest::TestAcquireLocks >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] >> KqpOlapSysView::StatsSysViewFilter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2025-04-07T14:02:46.439416Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.439585Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.510612Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.511086Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.551187Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:46.556005Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:130:2156], cookie=8199120442298151393, path="/a/b", config={ }) 2025-04-07T14:02:46.556144Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:130:2156], cookie=8199120442298151393) 2025-04-07T14:02:46.565457Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:140:2164], cookie=4688540034462039553, path="", config={ }) 2025-04-07T14:02:46.565557Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:140:2164], cookie=4688540034462039553) 2025-04-07T14:02:46.565961Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:143:2167], cookie=10172419687655076327, path="/", config={ }) 2025-04-07T14:02:46.566019Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:143:2167], cookie=10172419687655076327) 2025-04-07T14:02:46.566336Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:146:2170], cookie=1250917515598346261, path="//", config={ }) 2025-04-07T14:02:46.566377Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:146:2170], cookie=1250917515598346261) 2025-04-07T14:02:46.575094Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:155:2179], cookie=15338886407593384255, path="RootQuoter", config={ MaxUnitsPerSecond: 42 }) 2025-04-07T14:02:46.575408Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "RootQuoter" 2025-04-07T14:02:46.588104Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:155:2179], cookie=15338886407593384255) 2025-04-07T14:02:46.588823Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:160:2184], cookie=13897764008301202278, path="/RootQuoter/", config={ MaxUnitsPerSecond: 42 }) 2025-04-07T14:02:46.588994Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:160:2184], cookie=13897764008301202278) 2025-04-07T14:02:46.589582Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:163:2187], cookie=10973347075917185691, path="/RootQuoter/", config={ MaxUnitsPerSecond: 100500 }) 2025-04-07T14:02:46.589662Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:163:2187], cookie=10973347075917185691) 2025-04-07T14:02:46.590184Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:166:2190], cookie=1979800410521567995, path="RootQuoter/SubQuoter", config={ }) 2025-04-07T14:02:46.590361Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "RootQuoter/SubQuoter" 2025-04-07T14:02:46.602244Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:166:2190], cookie=1979800410521567995) 2025-04-07T14:02:46.602846Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:171:2195], cookie=14335145659454514526, path="/RootQuoter//OtherSubQuoter/", config={ MaxUnitsPerSecond: 100500 }) 2025-04-07T14:02:46.603049Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "RootQuoter/OtherSubQuoter" 2025-04-07T14:02:46.615657Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:171:2195], cookie=14335145659454514526) 2025-04-07T14:02:46.616411Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:176:2200], cookie=15819913061166980521, ids=[], paths=[], recursive=1) 2025-04-07T14:02:46.616507Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:176:2200], cookie=15819913061166980521) 2025-04-07T14:02:46.665167Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.665252Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.665832Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.666406Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.693150Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:46.693517Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:216:2230], cookie=15381973439551693111, ids=[], paths=[], recursive=1) 2025-04-07T14:02:46.693626Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:216:2230], cookie=15381973439551693111) 2025-04-07T14:02:46.694480Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:222:2235], cookie=4055743298276430524, path="/RootQuoter", config={ MaxUnitsPerSecond: 42 }) 2025-04-07T14:02:46.694576Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:222:2235], cookie=4055743298276430524) 2025-04-07T14:02:46.695180Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:225:2238], cookie=14162878075814637894, path="OtherRootQuoter", config={ MaxUnitsPerSecond: 100 }) 2025-04-07T14:02:46.695359Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "OtherRootQuoter" 2025-04-07T14:02:46.708117Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:225:2238], cookie=14162878075814637894) 2025-04-07T14:02:46.708908Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:230:2243], cookie=8130417885879180028, ids=[], paths=[OtherRootQuoter], recursive=0) 2025-04-07T14:02:46.709010Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:230:2243], cookie=8130417885879180028) 2025-04-07T14:02:47.477675Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.477763Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.493132Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.493246Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.521372Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.521800Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:128:2154], cookie=5171826321141451873, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2025-04-07T14:02:47.521934Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:128:2154], cookie=5171826321141451873) 2025-04-07T14:02:47.522396Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:135:2159], cookie=805960498508418494, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2025-04-07T14:02:47.522515Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:135:2159], cookie=805960498508418494) 2025-04-07T14:02:47.522984Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:138:2162], cookie=10973766016230792089, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2025-04-07T14:02:47.523181Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2025-04-07T14:02:47.535375Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:138:2162], cookie=10973766016230792089) 2025-04-07T14:02:47.535939Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:143:2167], cookie=6249530857025333439, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2025-04-07T14:02:47.536166Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2025-04-07T14:02:47.548383Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:143:2167], cookie=6249530857025333439) 2025-04-07T14:02:47.921130Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.921231Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.936329Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.936694Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.959925Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.960314Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:130:2156], cookie=2094250252489592979, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-04-07T14:02:47.960641Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-07T14:02:47.972397Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:130:2156], cookie=2094250252489592979) 2025-04-07T14:02:47.972981Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:140:2164], cookie=4732006925662918790, path="/Root/Res", config={ }) 2025-04-07T14:02:47.973215Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-07T14:02:47.985438Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:140:2164], cookie=4732006925662918790) 2025-04-07T14:02:47.987401Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:145:2169]. Cookie: 748998043540982268. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:47.987470Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:145:2169], cookie=748998043540982268) 2025-04-07T14:02:47.988003Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:145:2169]. Cookie: 16313288220585455072. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-04-07T14:02:47.988054Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:145:2169], cookie=16313288220585455072) 2025-04-07T14:02:50.148019Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:50.148120Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:50.165903Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:50.166055Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:50.180003Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:50.180340Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=7279987071998631419, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-07T14:02:50.180628Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-07T14:02:50.203096Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=7279987071998631419) 2025-04-07T14:02:50.203709Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:140:2164], cookie=11152765450144590812, path="/Root/Res", config={ }) 2025-04-07T14:02:50.203962Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-07T14:02:50.216125Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:140:2164], cookie=11152765450144590812) 2025-04-07T14:02:50.217030Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:145:2169]. Cookie: 1274641952581034046. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:50.217100Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:145:2169], cookie=1274641952581034046) 2025-04-07T14:02:50.217627Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:145:2169]. Cookie: 14399671340981288137. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019000 } } 2025-04-07T14:02:50.217673Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:145:2169], cookie=14399671340981288137) 2025-04-07T14:02:52.364447Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:52.364555Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:52.380465Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:52.380604Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:52.394610Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:52.395105Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:130:2156], cookie=14555168863162459981, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-07T14:02:52.395386Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-07T14:02:52.418075Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:130:2156], cookie=14555168863162459981) 2025-04-07T14:02:52.418886Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:140:2164]. Cookie: 17824502577367709802. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:52.418938Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:140:2164], cookie=17824502577367709802) 2025-04-07T14:02:52.419447Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:140:2164]. Cookie: 602601863124144284. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2025-04-07T14:02:52.419492Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:140:2164], cookie=602601863124144284) 2025-04-07T14:02:54.766602Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:171:2188]. Cookie: 10127153374916985666. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:54.766677Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:171:2188], cookie=10127153374916985666) 2025-04-07T14:02:54.767320Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:171:2188]. Cookie: 17089115950762977030. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2025-04-07T14:02:54.767382Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:171:2188], cookie=17089115950762977030) 2025-04-07T14:02:56.777085Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:198:2214]. Cookie: 15179078109521312197. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-07T14:02:56.777175Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:198:2214], cookie=15179078109521312197) 2025-04-07T14:02:56.777723Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:198:2214]. Cookie: 14086826453073574087. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2025-04-07T14:02:56.777767Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:198:2214], cookie=14086826453073574087) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewFilter [GOOD] Test command err: Trying to start YDB, gRPC: 27093, MsgBus: 19840 2025-04-07T14:02:18.570730Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571305991682276:2225];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:18.570801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c7a/r3tmp/tmp6qpv04/pdisk_1.dat 2025-04-07T14:02:19.347844Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:19.352746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:19.352835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:19.357415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27093, node 1 2025-04-07T14:02:19.650858Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:19.650881Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:19.650889Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:19.650992Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19840 TClient is connected to server localhost:19840 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:20.321651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:20.365494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:20.498395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571314581617386:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:20.498618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571314581617386:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:20.498921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571314581617386:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:20.499125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571314581617386:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:20.499234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571314581617386:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:20.499373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571314581617386:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:20.499522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571314581617386:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:20.499643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571314581617386:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:20.499777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571314581617386:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:20.499907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571314581617386:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:20.500014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571314581617386:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:20.500124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571314581617386:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:20.546528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571314581617382:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:20.546588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571314581617382:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:20.546826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571314581617382:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:20.546956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571314581617382:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:20.547055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571314581617382:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:20.547144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571314581617382:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:20.547245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571314581617382:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:20.547350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571314581617382:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:20.547470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571314581617382:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:20.547577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571314581617382:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:20.547684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571314581617382:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:20.547782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571314581617382:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:20.585763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571314581617383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:20.585827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571314581617383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:20.586046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571314581617383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:20.586155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571314581617383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:20.586274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571314581617383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:20.586372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571314581617383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:20.586469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571314581617383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:20.586562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571314581617383:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:20.586674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:74905713145816173 ... ocess=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:20.647181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:20.647764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:20.647792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:20.647892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:20.647924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:20.648051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:20.649051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:20.649211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:20.649294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:20.649340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:20.649360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:20.649390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:20.649410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:20.649695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:20.649726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:20.649912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:20.649938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:20.650063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:20.650097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:20.650283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:20.650306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:20.650413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:20.650440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:20.701584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-04-07T14:02:23.544768Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571305991682276:2225];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:23.544849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT PathId, Kind, TabletId, Sum(BlobRangeSize) as Bytes FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Activity == 1 GROUP BY PathId, Kind, TabletId ORDER BY PathId, Kind, TabletId; RESULT: 2025-04-07T14:02:32.089028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571366121225590:2575], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:32.089134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:32.089387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571366121225602:2578], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:32.094315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:02:32.106439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571366121225604:2579], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:02:34.292088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:02:34.292138Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:39.547861Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034553304, txId: 281474976710662] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 3 Bytes: 250032 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 Bytes: 246760 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 Bytes: 249560 ==================================== QUERY: SELECT PathId, Kind, TabletId, Sum(BlobRangeSize) as Bytes FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Activity == 1 GROUP BY PathId, Kind, TabletId ORDER BY PathId, Kind, TabletId; RESULT: 2025-04-07T14:02:45.713091Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034560101, txId: 281474976710664] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 3 Bytes: 250032 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 Bytes: 246760 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 Bytes: 249560 ==================================== QUERY: SELECT * FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Kind == 'EVICTED' ORDER BY PathId, Kind, TabletId; RESULT: ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Kind IN ('SPLIT_COMPACTED', 'INACTIVE', 'EVICTED', 'INSERTED') AND Activity == 1 GROUP BY PathId, Kind, TabletId ORDER BY PathId, Kind, TabletId; RESULT: 2025-04-07T14:02:46.117032Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034566001, txId: 281474976710666] shutting down 2025-04-07T14:02:58.501951Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88z3z94v55xrgvs7xmmdew", SessionId: ydb://session/3?node_id=1&id=YzZkN2Q1ZDItMzdjZmI1ODAtYjI0YjE3YWYtZWQyOWQwNzg=, Slow query, duration: 12.378354s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT PathId, Kind, TabletId\n FROM `/Root/olapStore/.sys/store_primary_index_stats`\n WHERE Kind IN ('SPLIT_COMPACTED', 'INACTIVE', 'EVICTED', 'INSERTED')\n AND Activity == 1\n GROUP BY PathId, Kind, TabletId\n ORDER BY PathId, Kind, TabletId;\n ", parameters: 0b TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 2025-04-07T14:02:58.503686Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034566647, txId: 281474976710668] shutting down >> KqpOlapSysView::StatsSysViewRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewRanges [GOOD] Test command err: Trying to start YDB, gRPC: 13093, MsgBus: 23390 2025-04-07T14:02:07.183536Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571254821899625:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:07.183659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000cbe/r3tmp/tmpusV0Xb/pdisk_1.dat 2025-04-07T14:02:07.894451Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:07.923159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:07.923249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:07.985549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13093, node 1 2025-04-07T14:02:08.337161Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:08.337183Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:08.337205Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:08.337286Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23390 TClient is connected to server localhost:23390 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:10.612775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:10.749293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:10.957665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571272001769243:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:10.957894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571272001769243:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:10.958117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571272001769243:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:10.958209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571272001769243:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:10.958303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571272001769243:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:10.958415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571272001769243:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:10.958505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571272001769243:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:10.958603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571272001769243:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:10.958700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571272001769243:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:10.964934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571272001769243:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:10.965100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571272001769243:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:10.965216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571272001769243:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:11.035043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571272001769244:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:11.035131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571272001769244:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:11.035362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571272001769244:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:11.035476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571272001769244:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:11.035581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571272001769244:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:11.035695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571272001769244:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:11.035785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571272001769244:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:11.035885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571272001769244:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:11.035994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571272001769244:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:11.036087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571272001769244:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:11.036181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571272001769244:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:11.036303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571272001769244:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:11.094162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571272001769261:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:11.094232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571272001769261:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:11.094443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571272001769261:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:11.094532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571272001769261:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:11.094628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571272001769261:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:11.094734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571272001769261:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:11.094829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571272001769261:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:11.094931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571272001769261:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:11.095022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:74905712720017692 ... 1 RawBytes: 4925 BlobRangeOffset: 15064 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: resource_id PortionId: 13 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:13:15:0:24672:0] EntityType: COL BlobRangeSize: 5064 PathId: 3 Rows: 661 RawBytes: 9915 BlobRangeOffset: 9384 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: uid PortionId: 13 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037888:1:13:15:0:24672:0] EntityType: COL BlobRangeSize: 5680 PathId: 3 Rows: 661 RawBytes: 2644 BlobRangeOffset: 22976 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: level PortionId: 13 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037888:1:13:15:0:24672:0] EntityType: COL BlobRangeSize: 1696 PathId: 3 Rows: 661 RawBytes: 753176 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: message PortionId: 13 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037888:1:13:15:0:24672:0] EntityType: COL BlobRangeSize: 9384 PathId: 3 Rows: 684 RawBytes: 5472 BlobRangeOffset: 20808 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: timestamp PortionId: 16 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:16:18:0:25480:0] EntityType: COL BlobRangeSize: 2936 PathId: 3 Rows: 684 RawBytes: 5103 BlobRangeOffset: 15584 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: resource_id PortionId: 16 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:16:18:0:25480:0] EntityType: COL BlobRangeSize: 5224 PathId: 3 Rows: 684 RawBytes: 10260 BlobRangeOffset: 9712 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: uid PortionId: 16 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037888:1:16:18:0:25480:0] EntityType: COL BlobRangeSize: 5872 PathId: 3 Rows: 684 RawBytes: 2736 BlobRangeOffset: 23744 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: level PortionId: 16 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037888:1:16:18:0:25480:0] EntityType: COL BlobRangeSize: 1736 PathId: 3 Rows: 684 RawBytes: 781654 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: message PortionId: 16 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037888:1:16:18:0:25480:0] EntityType: COL BlobRangeSize: 9712 PathId: 3 Rows: 649 RawBytes: 5192 BlobRangeOffset: 19808 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: timestamp PortionId: 19 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:19:21:0:24344:0] EntityType: COL BlobRangeSize: 2800 PathId: 3 Rows: 649 RawBytes: 4852 BlobRangeOffset: 14800 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: resource_id PortionId: 19 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:19:21:0:24344:0] EntityType: COL BlobRangeSize: 5008 PathId: 3 Rows: 649 RawBytes: 9735 BlobRangeOffset: 9208 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: uid PortionId: 19 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037888:1:19:21:0:24344:0] EntityType: COL BlobRangeSize: 5592 PathId: 3 Rows: 649 RawBytes: 2596 BlobRangeOffset: 22608 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: level PortionId: 19 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037888:1:19:21:0:24344:0] EntityType: COL BlobRangeSize: 1736 PathId: 3 Rows: 649 RawBytes: 740626 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: message PortionId: 19 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037888:1:19:21:0:24344:0] EntityType: COL BlobRangeSize: 9208 PathId: 3 Rows: 658 RawBytes: 5264 BlobRangeOffset: 20048 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: timestamp PortionId: 22 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:22:24:0:24608:0] EntityType: COL BlobRangeSize: 2832 PathId: 3 Rows: 658 RawBytes: 4911 BlobRangeOffset: 15016 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: resource_id PortionId: 22 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:22:24:0:24608:0] EntityType: COL BlobRangeSize: 5032 PathId: 3 Rows: 658 RawBytes: 9870 BlobRangeOffset: 9344 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: uid PortionId: 22 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037888:1:22:24:0:24608:0] EntityType: COL BlobRangeSize: 5672 PathId: 3 Rows: 658 RawBytes: 2632 BlobRangeOffset: 22880 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: level PortionId: 22 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037888:1:22:24:0:24608:0] EntityType: COL BlobRangeSize: 1728 PathId: 3 Rows: 658 RawBytes: 749757 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: message PortionId: 22 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037888:1:22:24:0:24608:0] EntityType: COL BlobRangeSize: 9344 PathId: 3 Rows: 664 RawBytes: 5312 BlobRangeOffset: 20248 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: timestamp PortionId: 25 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:25:27:0:24848:0] EntityType: COL BlobRangeSize: 2856 PathId: 3 Rows: 664 RawBytes: 4933 BlobRangeOffset: 15168 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: resource_id PortionId: 25 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:25:27:0:24848:0] EntityType: COL BlobRangeSize: 5080 PathId: 3 Rows: 664 RawBytes: 9960 BlobRangeOffset: 9448 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: uid PortionId: 25 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037888:1:25:27:0:24848:0] EntityType: COL BlobRangeSize: 5720 PathId: 3 Rows: 664 RawBytes: 2656 BlobRangeOffset: 23104 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: level PortionId: 25 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037888:1:25:27:0:24848:0] EntityType: COL BlobRangeSize: 1744 PathId: 3 Rows: 664 RawBytes: 755994 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: message PortionId: 25 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037888:1:25:27:0:24848:0] EntityType: COL BlobRangeSize: 9448 PathId: 3 Rows: 699 RawBytes: 5592 BlobRangeOffset: 21232 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: timestamp PortionId: 28 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:28:30:0:26000:0] EntityType: COL BlobRangeSize: 3000 PathId: 3 Rows: 699 RawBytes: 5207 BlobRangeOffset: 15920 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: resource_id PortionId: 28 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:28:30:0:26000:0] EntityType: COL BlobRangeSize: 5312 PathId: 3 Rows: 699 RawBytes: 10485 BlobRangeOffset: 9920 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: uid PortionId: 28 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037888:1:28:30:0:26000:0] EntityType: COL BlobRangeSize: 6000 PathId: 3 Rows: 699 RawBytes: 2796 BlobRangeOffset: 24232 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: level PortionId: 28 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037888:1:28:30:0:26000:0] EntityType: COL BlobRangeSize: 1768 PathId: 3 Rows: 699 RawBytes: 796874 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: message PortionId: 28 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037888:1:28:30:0:26000:0] EntityType: COL BlobRangeSize: 9920 PathId: 3 ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE PathId == UInt64("3") AND Activity == 1 GROUP BY TabletId, PathId, Kind ORDER BY TabletId, Kind RESULT: 2025-04-07T14:02:42.260738Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88yn3t8mqcdpzqx1db80dd", SessionId: ydb://session/3?node_id=1&id=NWQ1MWMxMWQtNGNjZWJkOWUtNDRkZTY3YmQtNzI2ZDU3ZTg=, Slow query, duration: 11.349634s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT PathId, Kind, TabletId\n FROM `/Root/olapStore/.sys/store_primary_index_stats`\n WHERE\n PathId == UInt64(\"3\") AND Activity == 1\n GROUP BY TabletId, PathId, Kind\n ORDER BY TabletId, Kind\n ", parameters: 0b 2025-04-07T14:02:42.263103Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034551639, txId: 281474976710668] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` GROUP BY PathId, Kind, TabletId ORDER BY PathId DESC, Kind DESC, TabletId DESC ; RESULT: 2025-04-07T14:02:52.436526Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88z070cqq6kvg562gh98xc", SessionId: ydb://session/3?node_id=1&id=NDY4YzQ4N2MtMjU1MTlhNTMtZmZmOWVmMWEtNDM4NmU5YzM=, Slow query, duration: 10.162230s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT PathId, Kind, TabletId\n FROM `/Root/olapStore/.sys/store_primary_index_stats`\n GROUP BY PathId, Kind, TabletId\n ORDER BY PathId DESC, Kind DESC, TabletId DESC\n ;\n ", parameters: 0b TabletId: 72075186224037891 Kind: INSERTED PathId: 5 TabletId: 72075186224037890 Kind: INSERTED PathId: 5 TabletId: 72075186224037888 Kind: INSERTED PathId: 5 TabletId: 72075186224037891 Kind: INSERTED PathId: 4 TabletId: 72075186224037889 Kind: INSERTED PathId: 4 TabletId: 72075186224037888 Kind: INSERTED PathId: 4 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 2025-04-07T14:02:52.439140Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034562619, txId: 281474976710670] shutting down ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE PathId > UInt64("0") AND PathId < UInt32("4") OR PathId > UInt64("4") AND PathId <= UInt64("5") GROUP BY PathId, Kind, TabletId ORDER BY PathId DESC, Kind DESC, TabletId DESC ; RESULT: TabletId: 72075186224037891 Kind: INSERTED PathId: 5 TabletId: 72075186224037890 Kind: INSERTED PathId: 5 TabletId: 72075186224037888 Kind: INSERTED PathId: 5 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 2025-04-07T14:03:00.712302Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034572924, txId: 281474976710672] shutting down >> KqpOlap::PredicatePushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::UpsertWhileSplitTest Test command err: Trying to start YDB, gRPC: 25466, MsgBus: 21467 2025-04-07T14:02:23.157774Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571326461286321:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:23.157871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c29/r3tmp/tmprNjfg2/pdisk_1.dat 2025-04-07T14:02:23.758041Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25466, node 1 2025-04-07T14:02:23.876256Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:23.876279Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:23.876290Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:23.876378Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:02:23.896748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:23.896858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:23.898300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21467 TClient is connected to server localhost:21467 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:24.687708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:24.721748Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:25.388691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:28.144751Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571326461286321:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:28.144807Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:32.076816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571360821028399:2313];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:32.077130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571360821028399:2313];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:32.077302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571360821028399:2313];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:32.077373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571360821028399:2313];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:32.077471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571360821028399:2313];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:32.077590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571360821028399:2313];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:32.077688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571360821028399:2313];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:32.077787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571360821028399:2313];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:32.077898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571360821028399:2313];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:32.079686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571360821028399:2313];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:32.079816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571360821028399:2313];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:32.079879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571360821028399:2313];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:32.103297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571360821028398:2312];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:32.103357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571360821028398:2312];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:32.103587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571360821028398:2312];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:32.103688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571360821028398:2312];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:32.103789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571360821028398:2312];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:32.103895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571360821028398:2312];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:32.103974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571360821028398:2312];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:32.104075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571360821028398:2312];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:32.104199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571360821028398:2312];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:32.104294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571360821028398:2312];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:32.104403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571360821028398:2312];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:32.104502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571360821028398:2312];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:32.344264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571360821028400:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:32.344335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571360821028400:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:32.352854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571360821028400:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:32.352991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571360821028400:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:32.353081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571360821028400:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:32.353171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571360821028400:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:32.353255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571360 ... :45;event=normalization_finished; 2025-04-07T14:02:53.831852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:53.831898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:53.832009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:53.832048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:53.832212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:53.832250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:53.832324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:53.832372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:53.832450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:53.832487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:53.832528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:53.832572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:53.832910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:53.832950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:53.833133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:53.833167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:53.833310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:53.833354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:53.833576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:53.833609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:53.833748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:53.833775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:54.689559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; 2025-04-07T14:02:55.264143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571463900287564:7707], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:55.269846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:55.279428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571463900287626:7720], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:55.289222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:02:55.297380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571463900287691:7743], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:02:56.394214Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034576000, txId: 18446744073709551615] shutting down [[10000u]] 2025-04-07T14:02:56.441385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:56.445892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-07T14:02:56.573839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720658:0, at schemeshard: 72057594046644480 2025-04-07T14:02:56.589583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 2025-04-07T14:02:57.343651Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034576634, txId: 18446744073709551615] shutting down [[10000u]] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; VERIFY failed (2025-04-07T14:02:57.534069Z): verification=found;fline=hash_intervals.h:133; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed 2025-04-07T14:02:57.906800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720660:0, at schemeshard: 72057594046644480 2025-04-07T14:02:57.976654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-04-07T14:02:57.989354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:58.585072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:58.598841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:58.609842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x180DC7E2 1. /-S/util/system/yassert.cpp:55: Panic @ 0x180CBE3A 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x1AF00E25 3. /-S/ydb/core/tx/sharding/hash_intervals.h:133: MakeShardingWrite @ 0x159B6B60 4. /-S/ydb/core/tx/sharding/hash_intervals.cpp:184: MakeSharding @ 0x159B4636 2025-04-07T14:02:59.830212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:59.841939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:59.854919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720668:0, at schemeshard: 72057594046644480 5. /-S/ydb/core/tx/sharding/sharding.cpp:245: SplitByShardsToArrowBatches @ 0x217531D7 6. /-S/ydb/core/tx/sharding/sharding.cpp:257: SplitByShards @ 0x21754491 7. /-S/ydb/core/tx/data_events/columnshard_splitter.cpp:64: SplitImpl @ 0x391305C4 8. /-S/ydb/core/tx/data_events/columnshard_splitter.cpp:57: DoSplitData @ 0x3912E09B 9. /-S/ydb/core/tx/data_events/shards_splitter.h:73: SplitData @ 0x39120CBA 10. /-S/ydb/core/tx/tx_proxy/rpc_long_tx.cpp:92: ProceedWithSchema @ 0x3911D88A 11. /-S/ydb/core/tx/tx_proxy/rpc_long_tx.cpp:250: Bootstrap @ 0x3911C8E1 12. /-S/ydb/library/actors/core/actor_bootstrapped.h:26: StateBootstrap @ 0x3911C8E1 13. /-S/ydb/library/actors/core/actor.h:533: Receive @ 0x1AEB7A46 14. /-S/ydb/library/actors/core/executor_thread.cpp:248: Execute @ 0x1AEB1818 15. /-S/ydb/library/actors/core/executor_thread.cpp:425: operator() @ 0x1AEBAC74 16. /-S/ydb/library/actors/core/executor_thread.cpp:479: ProcessExecutorPool @ 0x1AEB9B7D 17. /-S/ydb/library/actors/core/executor_thread.cpp:510: ThreadProc @ 0x1AEBC56A 18. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x180E8B44 19. /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239: asan_thread_start @ 0x17DA03F8 20. ??:0: ?? @ 0x7F1F1102FAC2 21. ??:0: ?? @ 0x7F1F110C184F >> KqpOlapAggregations::Aggregation_MaxR_GroupL_OrderL [GOOD] |90.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdown [GOOD] Test command err: Trying to start YDB, gRPC: 6260, MsgBus: 20166 2025-04-07T14:00:54.010041Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570947016103763:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:54.010076Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f94/r3tmp/tmpzojvQD/pdisk_1.dat 2025-04-07T14:00:54.517319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:54.517407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:54.551093Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:54.551905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6260, node 1 2025-04-07T14:00:54.746134Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:54.746166Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:54.746175Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:54.746258Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20166 TClient is connected to server localhost:20166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:55.602808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:55.617255Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:55.630532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:55.757630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570951311071742:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:55.757818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570951311071742:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:55.758057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570951311071742:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:55.758183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570951311071742:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:55.758275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570951311071742:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:55.758406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570951311071742:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:55.758516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570951311071742:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:55.758648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570951311071742:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:55.758814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570951311071742:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:55.758919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570951311071742:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:55.759036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570951311071742:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:55.759150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570951311071742:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:55.807698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570951311071782:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:55.807770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570951311071782:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:55.807989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570951311071782:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:55.808118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570951311071782:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:55.808211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570951311071782:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:55.808316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570951311071782:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:55.808424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570951311071782:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:55.808537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570951311071782:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:55.808658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570951311071782:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:55.808789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570951311071782:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:55.808890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570951311071782:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:55.808988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570951311071782:2293];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:55.840482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570951311071740:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:55.840552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570951311071740:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:55.840753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570951311071740:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:55.840891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570951311071740:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:55.841003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570951311071740:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:55.841110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570951311071740:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:55.841228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570951311071740:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:55.841341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570951311071740:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... T14:02:44.856994Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034564412, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` + 2. < 5.f ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` + 2. < 5.f ORDER BY `timestamp` 2025-04-07T14:02:45.596747Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034565000, txId: 18446744073709551615] shutting down 2025-04-07T14:02:46.047534Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034565567, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` - 2.f >= 1. ORDER BY `timestamp` 2025-04-07T14:02:46.809998Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034566000, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` - 2.f >= 1. ORDER BY `timestamp` 2025-04-07T14:02:47.328898Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034567023, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` * 3. > 4.f ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` * 3. > 4.f ORDER BY `timestamp` 2025-04-07T14:02:47.974143Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034567142, txId: 18446744073709551615] shutting down 2025-04-07T14:02:48.336301Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034568000, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` / 2.f <= 1. ORDER BY `timestamp` 2025-04-07T14:02:49.092632Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034569000, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` / 2.f <= 1. ORDER BY `timestamp` 2025-04-07T14:02:49.470646Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034569053, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` % 3. != 1.f ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` % 3. != 1.f ORDER BY `timestamp` 2025-04-07T14:02:50.122646Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034570000, txId: 18446744073709551615] shutting down 2025-04-07T14:02:50.486641Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034570068, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `timestamp` >= Timestamp("1970-01-01T00:00:03.000001Z") AND `level` < 4 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `timestamp` >= Timestamp("1970-01-01T00:00:03.000001Z") AND `level` < 4 ORDER BY `timestamp` 2025-04-07T14:02:51.283732Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034571000, txId: 18446744073709551615] shutting down 2025-04-07T14:02:51.721804Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034571251, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE (`timestamp`, `level`) >= (Timestamp("1970-01-01T00:00:03.000001Z"), 3) ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE (`timestamp`, `level`) >= (Timestamp("1970-01-01T00:00:03.000001Z"), 3) ORDER BY `timestamp` 2025-04-07T14:02:52.676505Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034572000, txId: 18446744073709551615] shutting down 2025-04-07T14:02:53.247751Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034573000, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` != "10001" XOR "XXX" == "YYY" ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` != "10001" XOR "XXX" == "YYY" ORDER BY `timestamp` 2025-04-07T14:02:53.935253Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034573190, txId: 18446744073709551615] shutting down 2025-04-07T14:02:54.246301Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034574000, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE IF(`level` > 3, -`level`, +`level`) < 2 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE IF(`level` > 3, -`level`, +`level`) < 2 ORDER BY `timestamp` 2025-04-07T14:02:54.814598Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034574226, txId: 18446744073709551615] shutting down 2025-04-07T14:02:55.093769Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034575000, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE StartsWith(`message` ?? `resource_id`, "10000") ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE StartsWith(`message` ?? `resource_id`, "10000") ORDER BY `timestamp` 2025-04-07T14:02:55.757627Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034575087, txId: 18446744073709551615] shutting down 2025-04-07T14:02:56.096124Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034576000, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT EndsWith(`message` ?? `resource_id`, "xxx") ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT EndsWith(`message` ?? `resource_id`, "xxx") ORDER BY `timestamp` 2025-04-07T14:02:56.739443Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034576067, txId: 18446744073709551615] shutting down 2025-04-07T14:02:57.090232Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034577000, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE ChooseMembers(TableRow(), ['level', 'uid', 'resource_id']) == <|level:1, uid:"uid_3000001", resource_id:"10001"|> ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE ChooseMembers(TableRow(), ['level', 'uid', 'resource_id']) == <|level:1, uid:"uid_3000001", resource_id:"10001"|> ORDER BY `timestamp` 2025-04-07T14:02:57.741063Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034577075, txId: 18446744073709551615] shutting down 2025-04-07T14:02:58.054001Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034578000, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE ChooseMembers(TableRow(), ['level', 'uid', 'resource_id']) != <|level:1, uid:"uid_3000001", resource_id:"10001"|> ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE ChooseMembers(TableRow(), ['level', 'uid', 'resource_id']) != <|level:1, uid:"uid_3000001", resource_id:"10001"|> ORDER BY `timestamp` 2025-04-07T14:02:58.677850Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034578041, txId: 18446744073709551615] shutting down 2025-04-07T14:02:58.991586Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034578650, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `uid` LIKE "_id%000_" ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `uid` LIKE "_id%000_" ORDER BY `timestamp` 2025-04-07T14:02:59.820289Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034579000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:00.277819Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034580000, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `uid` ILIKE "UID%002" ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `uid` ILIKE "UID%002" ORDER BY `timestamp` 2025-04-07T14:03:00.940471Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034580239, txId: 18446744073709551615] shutting down 2025-04-07T14:03:01.358423Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034581000, txId: 18446744073709551615] shutting down >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_MaxR_GroupL_OrderL [GOOD] Test command err: Trying to start YDB, gRPC: 25044, MsgBus: 25844 2025-04-07T14:02:15.440193Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571294714358378:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:15.440240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c85/r3tmp/tmpYHS5Ar/pdisk_1.dat 2025-04-07T14:02:16.087856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:16.087921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:16.094712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:16.116400Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25044, node 1 2025-04-07T14:02:16.294394Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:16.294439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:16.294448Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:16.294549Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25844 TClient is connected to server localhost:25844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:16.967691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:16.994950Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:17.015279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:17.111432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571303304293413:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:17.111587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571303304293413:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:17.111816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571303304293413:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:17.111925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571303304293413:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:17.112037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571303304293413:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:17.112161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571303304293413:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:17.112265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571303304293413:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:17.112378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571303304293413:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:17.112487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571303304293413:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:17.112575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571303304293413:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:17.113075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571303304293413:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:17.113208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571303304293413:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:17.157214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571303304293414:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:17.157287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571303304293414:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:17.157513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571303304293414:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:17.157646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571303304293414:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:17.157748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571303304293414:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:17.157842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571303304293414:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:17.157953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571303304293414:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:17.158076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571303304293414:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:17.158175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571303304293414:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:17.158281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571303304293414:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:17.158378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571303304293414:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:17.158489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571303304293414:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:17.193036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571303304293460:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:17.193100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571303304293460:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:17.193287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571303304293460:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:17.193393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571303304293460:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:17.193478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571303304293460:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:17.193601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571303304293460:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:17.193705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571303304293460:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:17.193793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571303304293460:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:00.262868Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:00.410779Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:00.410846Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:00.494526Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:00.494613Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:00.578311Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:00.578395Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:00.662338Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:00.662406Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:00.747069Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1214:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:03:00.758334Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1214:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:03:00.810751Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:00.810806Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:00.893980Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:00.894033Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:00.977360Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:00.977414Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:01.060592Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:01.060650Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:01.143893Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:01.143936Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:01.187385Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1214:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:03:01.307513Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:01.307567Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:01.412647Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:01.412732Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:01.496058Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:01.496103Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:01.579075Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:01.579126Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:01.662447Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:01.662495Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1785:3055], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTNiZjhjMTAtMWU4NzdmYy00ZGEyZThmZi0xMmUzZTczYg==. CustomerSuppliedId : . TraceId : 01jr88ywbd59mpxcpbt6cgqc97. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:01.725219Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1214:2356];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:03:01.746569Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1214:2356];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] >> KqpOlapBlobsSharing::SplitEmpty [GOOD] >> KqpJoinOrder::TPCDS34-StreamLookupJoin+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2025-04-07T14:02:46.439405Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.439569Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.509454Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.509944Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.563172Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.274189Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.274296Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.291021Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.291151Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.316350Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.698665Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.698780Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.715507Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.716043Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.740299Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.740849Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=8632559016181672931, session=0, seqNo=0) 2025-04-07T14:02:47.741029Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:47.755521Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=8632559016181672931, session=1) 2025-04-07T14:02:47.755993Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:139:2163], cookie=3355003473972886421) 2025-04-07T14:02:47.756056Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:139:2163], cookie=3355003473972886421) 2025-04-07T14:02:48.075209Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.087263Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.344541Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.356485Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.606464Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.619125Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.873543Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.886928Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.146226Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.158428Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.405251Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.417274Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.654598Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.667419Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.918241Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.930289Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:50.178671Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.190936Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:50.476192Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.488572Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:50.747611Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.759806Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:51.023878Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:51.035684Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:51.292584Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:51.304584Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:51.574233Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:51.587343Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:51.888993Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:51.901111Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.160363Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.172389Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.433057Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.445215Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.696843Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.710384Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.970923Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.983016Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.251967Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:53.264039Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.531355Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:53.543066Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.800083Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:53.812035Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:54.068961Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:54.080910Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:54.337846Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:54.349641Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:54.607149Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:54.619193Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:54.886066Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:54.897740Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.155043Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:55.167049Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.424794Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:55.436877Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.694159Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:55.705819Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.994817Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:56.006784Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:56.275347Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:56.287044Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:56.545035Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:56.557297Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:56.805389Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:56.817694Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:57.077566Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:57.089724Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:57.346875Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:57.358961Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:57.629101Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:57.641348Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:57.899308Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:57.911067Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:58.168563Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:58.180399Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:58.437390Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:58.449359Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:58.717867Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:58.729550Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:58.987669Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:58.999665Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:59.267432Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:59.279342Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:59.537109Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:59.548772Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:59.805353Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:59.817237Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:00.106834Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:00.118879Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:00.376537Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:00.388346Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:00.656506Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:00.668302Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:00.925383Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:00.937379Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:01.193883Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:01.205873Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:01.464033Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:01.475885Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:01.722466Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:01.734296Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:01.991331Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:02.003352Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:02.249868Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:02.261595Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:02.508485Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:02.520025Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:02.767003Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:02.778870Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:03.026311Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:03.037898Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:03.274538Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:03.286319Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:03.544821Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:03.556414Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:03.804721Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:03.816402Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:04.117114Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-07T14:03:04.117194Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-07T14:03:04.128915Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-07T14:03:04.139898Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:470:2477], cookie=8481244582487261942) 2025-04-07T14:03:04.139991Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:470:2477], cookie=8481244582487261942) 2025-04-07T14:03:04.515286Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:03:04.515366Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:03:04.525567Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:03:04.525658Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:03:04.538354Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:03:04.544727Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=10764011767833042319, path="Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-07T14:03:04.544927Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-07T14:03:04.566797Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=10764011767833042319) 2025-04-07T14:03:04.568224Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:139:2163]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-07T14:03:04.568294Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:139:2163], cookie=0) 2025-04-07T14:03:04.568481Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:141:2165]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-07T14:03:04.568500Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:141:2165], cookie=0) 2025-04-07T14:03:04.610145Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-04-07T14:03:04.610251Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:139:2163]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-04-07T14:03:04.610500Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:144:2168]) 2025-04-07T14:03:04.610640Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2025-04-07T14:03:04.652115Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:139:2163]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-StreamLookup |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead >> KqpCost::RangeFullScan >> KqpCost::IndexLookupAtLeast8BytesInStorage+StreamLookup >> KqpCost::IndexLookupJoin+StreamLookupJoin >> KqpCost::IndexLookupAndTake+StreamLookup >> KqpCost::ScanScriptingRangeFullScan+SourceRead >> KqpCost::IndexLookupJoin-StreamLookupJoin |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead >> KqpCost::ScanScriptingRangeFullScan-SourceRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::SplitEmpty [GOOD] Test command err: Trying to start YDB, gRPC: 3574, MsgBus: 3359 2025-04-07T14:01:33.956004Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571114419444443:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:33.956060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eb0/r3tmp/tmpZaPlAx/pdisk_1.dat 2025-04-07T14:01:34.683699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:34.683793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:34.693772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:34.745780Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3574, node 1 2025-04-07T14:01:34.901239Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:34.901258Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:34.901271Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:34.901377Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3359 TClient is connected to server localhost:3359 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:35.541566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:36.165242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:38.955957Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571114419444443:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:38.956042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:41.617401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571148779187594:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:41.617403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7490571148779187654:2324];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:41.617577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7490571148779187654:2324];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:41.617805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7490571148779187654:2324];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:41.617925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7490571148779187654:2324];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:41.618041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571148779187594:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:41.618045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7490571148779187654:2324];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:41.618167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7490571148779187654:2324];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:41.618214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571148779187594:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:41.618262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7490571148779187654:2324];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:41.618305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571148779187594:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:41.618371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7490571148779187654:2324];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:41.618401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571148779187594:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:41.618704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571148779187594:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:41.618704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7490571148779187654:2324];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:41.618809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571148779187594:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:41.618816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7490571148779187654:2324];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:41.618896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571148779187594:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:41.618904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7490571148779187654:2324];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:41.618988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571148779187594:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:41.618989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7490571148779187654:2324];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:41.619089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571148779187594:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:41.619171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571148779187594:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:41.619259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571148779187594:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:41.658532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490571148779187647:2318];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:41.658592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490571148779187647:2318];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:41.658797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490571148779187647:2318];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:41.658929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490571148779187647:2318];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:41.659026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490571148779187647:2318];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:41.659113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490571148779187647:2318];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:41.659186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490571148779187647:2318];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalize ... 1474976720705:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.398594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720706:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.415146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720707:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:41.653891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720708:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.673584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720709:0, at schemeshard: 72057594046644480 2025-04-07T14:02:41.696292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720710:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:42.632015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720711:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.655652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720712:0, at schemeshard: 72057594046644480 2025-04-07T14:02:42.683194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720713:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:43.690859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720714:0, at schemeshard: 72057594046644480 2025-04-07T14:02:43.723642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720715:0, at schemeshard: 72057594046644480 2025-04-07T14:02:43.741109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720716:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:45.563998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720717:0, at schemeshard: 72057594046644480 2025-04-07T14:02:45.584198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720718:0, at schemeshard: 72057594046644480 2025-04-07T14:02:45.610984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720719:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:45.727258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720720:0, at schemeshard: 72057594046644480 2025-04-07T14:02:45.743409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720721:0, at schemeshard: 72057594046644480 2025-04-07T14:02:45.761026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720722:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:47.629103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720723:0, at schemeshard: 72057594046644480 2025-04-07T14:02:47.648156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720724:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:47.683730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720725:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:49.508977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720726:0, at schemeshard: 72057594046644480 2025-04-07T14:02:49.526081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720727:0, at schemeshard: 72057594046644480 2025-04-07T14:02:49.544546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720728:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:50.634290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720729:0, at schemeshard: 72057594046644480 2025-04-07T14:02:50.650025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720730:0, at schemeshard: 72057594046644480 2025-04-07T14:02:50.667143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720731:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:51.718993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720732:0, at schemeshard: 72057594046644480 2025-04-07T14:02:51.742205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720733:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:51.765244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720734:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:53.572147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720735:0, at schemeshard: 72057594046644480 2025-04-07T14:02:53.591079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720736:0, at schemeshard: 72057594046644480 2025-04-07T14:02:53.608799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720737:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:55.404272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720738:0, at schemeshard: 72057594046644480 2025-04-07T14:02:55.420599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720739:0, at schemeshard: 72057594046644480 2025-04-07T14:02:55.435793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720740:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:56.671798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720741:0, at schemeshard: 72057594046644480 2025-04-07T14:02:56.687668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720742:0, at schemeshard: 72057594046644480 2025-04-07T14:02:56.701897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720743:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:58.435955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720744:0, at schemeshard: 72057594046644480 2025-04-07T14:02:58.453666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720745:0, at schemeshard: 72057594046644480 2025-04-07T14:02:58.468847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720746:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:02:59.544166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720747:0, at schemeshard: 72057594046644480 2025-04-07T14:02:59.559539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720748:0, at schemeshard: 72057594046644480 2025-04-07T14:02:59.575924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720749:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:03:00.701971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720750:0, at schemeshard: 72057594046644480 2025-04-07T14:03:00.721247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720751:0, at schemeshard: 72057594046644480 2025-04-07T14:03:00.736090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720752:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:03:01.628785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720753:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_FINISHED 2025-04-07T14:03:02.460946Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034582000, txId: 18446744073709551615] shutting down [[0u]] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS34-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 1386, MsgBus: 28107 2025-04-07T13:57:15.020493Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570004394000275:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:15.023880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001060/r3tmp/tmp4rQvaA/pdisk_1.dat 2025-04-07T13:57:15.636261Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1386, node 1 2025-04-07T13:57:15.788595Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:15.788618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:15.788627Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:15.788761Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T13:57:15.807277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:15.807372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:15.811609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28107 TClient is connected to server localhost:28107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:16.470487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:16.521480Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T13:57:16.543636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:16.762327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:17.048426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:17.148464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:19.472416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570021573871031:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:19.472541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:19.763159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.791531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.833688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.866986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.903745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:19.975604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:20.008786Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570004394000275:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:20.008838Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:20.057326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570025868838828:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:20.057415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:20.057811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570025868838833:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:20.060853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:20.073344Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T13:57:20.074270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490570025868838835:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:22.024469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:22.058292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:22.094551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:22.181543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:22.218212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:22.401423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:57:22.473858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:57:22.519533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:57:22.569328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:57:22.607950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:57:22.642893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:57:22.684407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:57:22.721245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:57:23.445588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T13:57:23.496428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:57:23.550503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:57:23.698347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:57:23.728441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72 ... CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:50.543524Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:50.543549Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:50.543590Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:50.543741Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:50.543795Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:50.543846Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:50.543889Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:50.543916Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:50.543958Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:50.544000Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:50.544028Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:50.544461Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:50.544513Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:50.544627Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:50.544688Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:50.544853Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:50.544898Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:50.544984Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:50.545016Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:50.545076Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:50.545101Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:50.545136Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:50.545164Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:50.545480Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:50.545525Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:50.545722Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:50.545783Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:50.545988Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:50.546038Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:50.546275Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:50.546320Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:50.546485Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:50.546533Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:50.547301Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:02:50.547355Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:50.547464Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:50.547529Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:50.547769Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:50.547826Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:50.547967Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:50.548025Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:50.548110Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:50.548148Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:50.548216Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:50.548261Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:50.548724Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:50.548782Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:50.549057Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:50.549112Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:50.549307Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:50.549395Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:50.549661Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:50.549719Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:50.549851Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:50.549887Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup-StreamLookup ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWrite 2025-04-07 14:03:02,373 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-07 14:03:02,486 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 301966 75.5M 74.8M 21.3M test_tool run_ut @/home/runner/.ya/build/build_root/u6pr/001074/ydb/core/kqp/ut/data_integrity/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args 302188 1.6G 1.6G 861M └─ ydb-core-kqp-ut-data_integrity --trace-path-append /home/runner/.ya/build/build_root/u6pr/001074/ydb/core/kqp/ut/data_integrity/test-results/unittest/testing_out_stuff/c Test command err: Trying to start YDB, gRPC: 25224, MsgBus: 1717 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001074/r3tmp/tmpklSQql/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25224, node 1 TClient is connected to server localhost:1717 TClient is connected to server localhost:1717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7573699373/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/u6pr/001074/ydb/core/kqp/ut/data_integrity/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1747, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7573699373/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/u6pr/001074/ydb/core/kqp/ut/data_integrity/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots >> KqpJoinOrder::Chain65Nodes [FAIL] >> KqpJoinOrder::DatetimeConstantFold+StreamLookupJoin-ColumnStore >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] >> KqpCost::PointLookup [GOOD] >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] >> KqpCost::RangeFullScan [GOOD] >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] >> KqpCost::QuerySeviceRangeFullScan [GOOD] >> KqpOlapAggregations::Aggregation_ResultDistinctCountRI_GroupByL [GOOD] >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] >> JsonProtoConversion::JsonToProtoMap [GOOD] >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> JsonProtoConversion::ProtoMapToJson [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |90.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |90.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |90.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |90.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2025-04-07T14:02:46.439394Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.439546Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.508292Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.508806Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.562095Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:46.562583Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=7501669013344920920, session=0, seqNo=0) 2025-04-07T14:02:46.562735Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:46.584271Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=7501669013344920920, session=1) 2025-04-07T14:02:46.584544Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=3508034455260580169, session=0, seqNo=0) 2025-04-07T14:02:46.584738Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:02:46.596456Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=3508034455260580169, session=2) 2025-04-07T14:02:46.596998Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:141:2165], cookie=14514895143371002445, name="Sem1", limit=1) 2025-04-07T14:02:46.597183Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-07T14:02:46.609308Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:141:2165], cookie=14514895143371002445) 2025-04-07T14:02:46.609619Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-07T14:02:46.609788Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-07T14:02:46.609970Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-07T14:02:46.622123Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-04-07T14:02:46.622187Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-04-07T14:02:46.622724Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:149:2173], cookie=13145253719374510961, name="Sem1") 2025-04-07T14:02:46.622795Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:149:2173], cookie=13145253719374510961) 2025-04-07T14:02:46.632392Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:152:2176], cookie=12804101969588775459, name="Sem1") 2025-04-07T14:02:46.632505Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:152:2176], cookie=12804101969588775459) 2025-04-07T14:02:47.284000Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:47.308958Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:47.558064Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:47.570395Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:47.842196Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:47.855871Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.103616Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.122357Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.391927Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.403568Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.655573Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.667653Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.897734Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.909393Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.158979Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.171115Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.418889Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.433285Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.731724Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.743730Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:50.012144Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.024239Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:50.285594Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.297779Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:50.555023Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.567188Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:50.824617Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.841659Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:51.134316Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:51.146523Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:51.414922Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:51.427284Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:51.689822Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:51.709528Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:51.976168Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:51.987983Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.247206Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.259598Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.539690Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.552520Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.831177Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.843234Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.101818Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:53.113551Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.373038Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:53.384797Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.642163Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:53.653873Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.921802Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:53.933596Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:54.191018Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:54.202920Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:54.470528Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:54.482617Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:54.740053Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:54.751813Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.009069Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:55.021409Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.301562Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:55.313722Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.571438Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:55.583380Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.851311Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:55.863434Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:56.110745Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:56.122892Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:56.380470Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:56.392571Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:56.661912Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:56.673990Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:56.932366Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:56.944031Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:57.212291Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:57.224027Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:57.481967Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:57.494123Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:57.751600Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:57.763257Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:58.052048Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:58.063668Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:58.320314Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:58.332292Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:58.600207Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:58.612192Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:58.869377Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:58.881120Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:59.137784Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:59.149712Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:59.438229Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:59.45 ... 4037927937] TTxSelfCheck::Execute 2025-04-07T14:03:09.492926Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:09.750548Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:09.762436Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:10.019860Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:10.031661Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:10.310797Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:10.322648Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:10.579353Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:10.590935Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:10.847981Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:10.860005Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:11.121003Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:11.135108Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:11.392988Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:11.404991Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:11.677308Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:11.696812Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:11.955190Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:11.966828Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:12.214269Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:12.226303Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:12.484313Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:12.496098Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:12.754958Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:12.766896Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:13.067817Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:13.079464Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:13.355445Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:13.367129Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:13.625402Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:13.637404Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:13.894194Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:13.905929Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:14.162827Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:14.174539Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:14.424590Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:14.436735Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:14.704784Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:14.716471Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:14.974005Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:14.986153Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:15.244628Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:15.256885Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:15.520220Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:15.532161Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:15.771426Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-07T14:03:15.771509Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-07T14:03:15.771562Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-07T14:03:15.783769Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-07T14:03:15.794721Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:409:2409], cookie=18112303947607969340, name="Sem1") 2025-04-07T14:03:15.794821Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:409:2409], cookie=18112303947607969340) 2025-04-07T14:03:16.180384Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:03:16.180490Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:03:16.197080Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:03:16.197223Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:03:16.211389Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:03:16.211794Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=15710872676846321026, session=0, seqNo=0) 2025-04-07T14:03:16.211924Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:03:16.235165Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=15710872676846321026, session=1) 2025-04-07T14:03:16.235470Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=15467008424398779328, session=0, seqNo=0) 2025-04-07T14:03:16.235619Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:03:16.247489Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=15467008424398779328, session=2) 2025-04-07T14:03:16.247744Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=2401539605325064633, session=0, seqNo=0) 2025-04-07T14:03:16.247859Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-04-07T14:03:16.260714Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=2401539605325064633, session=3) 2025-04-07T14:03:16.261413Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:143:2167], cookie=13434526697615777611, name="Sem1", limit=3) 2025-04-07T14:03:16.261588Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-07T14:03:16.276299Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:143:2167], cookie=13434526697615777611) 2025-04-07T14:03:16.276651Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=111, session=1, semaphore="Sem1" count=2) 2025-04-07T14:03:16.277392Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-07T14:03:16.277610Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-07T14:03:16.277699Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-07T14:03:16.277786Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=333, session=3, semaphore="Sem1" count=1) 2025-04-07T14:03:16.290234Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=111) 2025-04-07T14:03:16.290336Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=222) 2025-04-07T14:03:16.290368Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=333) 2025-04-07T14:03:16.291021Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:151:2175], cookie=2470911243505047087, name="Sem1") 2025-04-07T14:03:16.291117Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:151:2175], cookie=2470911243505047087) 2025-04-07T14:03:16.291569Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:154:2178], cookie=6987833697106746185, name="Sem1") 2025-04-07T14:03:16.291637Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:154:2178], cookie=6987833697106746185) 2025-04-07T14:03:16.291906Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=444, session=1, semaphore="Sem1" count=1) 2025-04-07T14:03:16.292025Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-07T14:03:16.304139Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=444) 2025-04-07T14:03:16.304740Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:159:2183], cookie=7649496040547840029, name="Sem1") 2025-04-07T14:03:16.304821Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:159:2183], cookie=7649496040547840029) 2025-04-07T14:03:16.305159Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2186], cookie=13864045016321303959, name="Sem1") 2025-04-07T14:03:16.305208Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2186], cookie=13864045016321303959) 2025-04-07T14:03:16.314730Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:03:16.314805Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:03:16.315257Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:03:16.315718Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:03:16.363983Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:03:16.364146Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-07T14:03:16.364193Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-07T14:03:16.364218Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-07T14:03:16.364569Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:202:2216], cookie=10119719982880530263, name="Sem1") 2025-04-07T14:03:16.364650Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:202:2216], cookie=10119719982880530263) 2025-04-07T14:03:16.365375Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:211:2224], cookie=10299634416543000474, name="Sem1") 2025-04-07T14:03:16.365456Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:211:2224], cookie=10299634416543000474) >> KqpCost::IndexLookup-StreamLookup [GOOD] >> KqpCost::IndexLookupAndTake+StreamLookup [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage+StreamLookup [GOOD] >> KqpCost::IndexLookupAndTake-StreamLookup [GOOD] >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoMap [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 65386, MsgBus: 63983 2025-04-07T14:03:08.823193Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571521367907480:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:08.823735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001676/r3tmp/tmpYpiPow/pdisk_1.dat 2025-04-07T14:03:09.094662Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65386, node 1 2025-04-07T14:03:09.179437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:09.179577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:09.184938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:09.433616Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:09.433645Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:09.433655Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:09.433775Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63983 TClient is connected to server localhost:63983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:11.336628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.450044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.687131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.783837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.851829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:12.110693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571538547778271:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:12.110864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:13.823731Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571521367907480:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:13.823783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:14.307847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.378186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.402656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.422678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.448561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.475737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.556941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571547137713466:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571547137713471:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.562057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:14.573738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571547137713473:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 18996, MsgBus: 30201 2025-04-07T14:03:08.004230Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571519921322237:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:08.004266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001715/r3tmp/tmpUg5VGl/pdisk_1.dat 2025-04-07T14:03:08.548006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:08.549591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:08.549671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:08.588410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18996, node 1 2025-04-07T14:03:09.433674Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:09.433707Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:09.433714Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:09.433817Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30201 TClient is connected to server localhost:30201 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:11.336642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.449535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.683632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.783994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.819461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:12.111657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571537101193063:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:12.111749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:13.007177Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571519921322237:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:13.007224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:14.307773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.333579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.358252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.381350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.420583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.446690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.556943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571545691128253:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571545691128258:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.562041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:14.572418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571545691128260:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:15.948771Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 2025-04-07T14:03:16.044845Z, after 0.096654s 2025-04-07T14:03:15.948903Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:03:15.979953Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2025-04-07T14:03:16.046394Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: batching, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744034596 AvailableComputeActors: 10000 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 10000 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 1 DataCenterId: "1" } 2025-04-07T14:03:16.185399Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7490571554281063206:2469] 2025-04-07T14:03:16.185432Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7490571549986095873:2469] 2025-04-07T14:03:16.186793Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037914 2025-04-07T14:03:16.186930Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710671 at tablet 72075186224037914 2025-04-07T14:03:16.188355Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037914 2025-04-07T14:03:16.198230Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710671 at step 1744034596241 at tablet 72075186224037914 { Transactions { TxId: 281474976710671 AckTo { RawX1: 7490571519921322697 RawX2: 4294969515 } } Step: 1744034596241 MediatorID: 72057594046382081 TabletID: 72075186224037914 } 2025-04-07T14:03:16.198270Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037914 2025-04-07T14:03:16.198376Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037914 2025-04-07T14:03:16.198394Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037914 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:03:16.198453Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744034596241:281474976710671] in PlanQueue unit at 72075186224037914 2025-04-07T14:03:16.198602Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037914 loaded tx from db 1744034596241:281474976710671 keys extracted: 0 2025-04-07T14:03:16.198865Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037914 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:03:16.199660Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037914 step# 1744034596241 txid# 281474976710671} 2025-04-07T14:03:16.199698Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037914 step# 1744034596241} 2025-04-07T14:03:16.199735Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037914 2025-04-07T14:03:16.199798Z node 1 :TX_DATASHARD DEBUG: Complete [1744034596241 : 281474976710671] from 72075186224037914 at tablet 72075186224037914 send result to client [1:7490571554281063208:3675], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:03:16.199818Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037914 2025-04-07T14:03:16.200000Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1744034596241:281474976710671 created 2025-04-07T14:03:16.200194Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490571554281063217:2469] TxId: 281474976710672. Ctx: { TraceId: 01jr89013acm00cbaxaae490yp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2025-04-07T14:03:16.200257Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2025-04-07T14:03:16.200268Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2025-04-07T14:03:16.200391Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key sets: 1 2025-04-07T14:03:16.200556Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key: { TableId: ... 0cbaxaae490yp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:03:16.227893Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571554281063221:2478], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jr89013acm00cbaxaae490yp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:03:16.227939Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-07T14:03:16.227961Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571554281063223:2479], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr89013acm00cbaxaae490yp. SessionId : ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646926 2025-04-07T14:03:16.228070Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571554281063223:2479], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr89013acm00cbaxaae490yp. SessionId : ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7490571554281063221 RawX2: 4503603922340270 } } DstEndpoint { ActorId { RawX1: 7490571554281063223 RawX2: 4503603922340271 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7490571554281063223 RawX2: 4503603922340271 } } DstEndpoint { ActorId { RawX1: 7490571554281063217 RawX2: 4503603922340261 } } InMemory: true } 2025-04-07T14:03:16.228092Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571554281063223:2479], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr89013acm00cbaxaae490yp. SessionId : ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Update input channelId: 1, peer: [1:7490571554281063221:2478] 2025-04-07T14:03:16.228146Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571554281063223:2479], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr89013acm00cbaxaae490yp. SessionId : ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646923 2025-04-07T14:03:16.228194Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Finish input channelId: 1, from: [1:7490571554281063221:2478] 2025-04-07T14:03:16.228247Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571554281063223:2479], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr89013acm00cbaxaae490yp. SessionId : ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:03:16.228434Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571554281063223:2479], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr89013acm00cbaxaae490yp. SessionId : ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:03:16.228462Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571554281063221:2478], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jr89013acm00cbaxaae490yp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-07T14:03:16.228483Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571554281063221:2478], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jr89013acm00cbaxaae490yp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:03:16.228498Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished 2025-04-07T14:03:16.228513Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571554281063221:2478], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jr89013acm00cbaxaae490yp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:03:16.228612Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. pass away 2025-04-07T14:03:16.228761Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710672;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:03:16.228964Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 1. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2025-04-07T14:03:16.229210Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490571554281063217:2469] TxId: 281474976710672. Ctx: { TraceId: 01jr89013acm00cbaxaae490yp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7490571549986095873:2469], seqNo: 1, nRows: 1 2025-04-07T14:03:16.229390Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490571554281063217:2469] TxId: 281474976710672. Ctx: { TraceId: 01jr89013acm00cbaxaae490yp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7490571554281063221:2478], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 9211 DurationUs: 11000 Tasks { TaskId: 1 CpuTimeUs: 946 FinishTimeMs: 1744034596228 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } IngressRows: 3 ComputeCpuTimeUs: 250 BuildCpuTimeUs: 696 WaitInputTimeUs: 2335 HostName: "ghrun-lqtfcktism" NodeId: 1 StartTimeMs: 1744034596217 } MaxMemoryUsage: 1048576 } 2025-04-07T14:03:16.229433Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jr89013acm00cbaxaae490yp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7490571554281063221:2478] 2025-04-07T14:03:16.229490Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490571554281063217:2469] TxId: 281474976710672. Ctx: { TraceId: 01jr89013acm00cbaxaae490yp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7490571554281063223:2479], 2025-04-07T14:03:16.230058Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7490571554281063224:2479] 2025-04-07T14:03:16.230104Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571554281063223:2479], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr89013acm00cbaxaae490yp. SessionId : ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:03:16.230144Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-07T14:03:16.230154Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished 2025-04-07T14:03:16.230164Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571554281063223:2479], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr89013acm00cbaxaae490yp. SessionId : ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:03:16.230228Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. pass away 2025-04-07T14:03:16.230299Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710672;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:03:16.230440Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 2. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2025-04-07T14:03:16.230617Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490571554281063217:2469] TxId: 281474976710672. Ctx: { TraceId: 01jr89013acm00cbaxaae490yp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7490571554281063223:2479], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 7390 DurationUs: 13000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 786 FinishTimeMs: 1744034596230 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 174 BuildCpuTimeUs: 612 WaitInputTimeUs: 9704 HostName: "ghrun-lqtfcktism" NodeId: 1 StartTimeMs: 1744034596217 } MaxMemoryUsage: 1048576 } 2025-04-07T14:03:16.230650Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jr89013acm00cbaxaae490yp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7490571554281063223:2479] 2025-04-07T14:03:16.230765Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490571554281063217:2469] TxId: 281474976710672. Ctx: { TraceId: 01jr89013acm00cbaxaae490yp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:03:16.230813Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490571554281063217:2469] TxId: 281474976710672. Ctx: { TraceId: 01jr89013acm00cbaxaae490yp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI2YWM4N2ItYTQ3ZWE5MWEtZDM4NWVmZWUtZTMwMmZjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.016601s ReadRows: 1 ReadBytes: 20 ru: 11 rate limiter was not found force flag: 1 2025-04-07T14:03:16.231512Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034596241, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::RangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 4204, MsgBus: 63614 2025-04-07T14:03:08.003251Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571522435780750:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:08.003329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001701/r3tmp/tmpa19B4n/pdisk_1.dat 2025-04-07T14:03:08.543385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:08.580474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:08.580618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:08.582624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4204, node 1 2025-04-07T14:03:09.433650Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:09.433698Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:09.433712Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:09.433846Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63614 TClient is connected to server localhost:63614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:11.337174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.449638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:11.711869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:03:11.798615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.835436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:12.111151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571539615651551:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:12.111227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:13.003701Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571522435780750:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:13.003773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:14.308329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.333061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.359852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.384860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.420634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.442349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.556856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571548205586744:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.556907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571548205586749:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.556941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.561961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:14.570940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571548205586751:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } query_phases { duration_us: 16703 table_access { name: "/Root/Test" reads { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 23219 affected_shards: 1 } compilation { duration_us: 223209 cpu_time_us: 220931 } process_cpu_time_us: 194 total_duration_us: 241301 total_cpu_time_us: 244344 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 10756, MsgBus: 2112 2025-04-07T14:03:08.002663Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571522597557902:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:08.003507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001716/r3tmp/tmph8QB5K/pdisk_1.dat 2025-04-07T14:03:08.543700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:08.543804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:08.544099Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:08.576933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10756, node 1 2025-04-07T14:03:09.433685Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:09.433728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:09.433743Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:09.433872Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2112 TClient is connected to server localhost:2112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:11.336897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.449485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.717197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.825628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.858719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:12.110749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571539777428709:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:12.110908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:13.008755Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571522597557902:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:13.008806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:14.307993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.377806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.402242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.423299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.448577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.472478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.556843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571548367363907:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.556910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571548367363912:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.562220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:14.572278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571548367363914:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:16.240798Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034596241, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 17720, MsgBus: 29117 2025-04-07T14:03:08.003612Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571521105543995:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:08.003671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001705/r3tmp/tmphPnqhg/pdisk_1.dat 2025-04-07T14:03:08.522557Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:08.544387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:08.544483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:08.554341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17720, node 1 2025-04-07T14:03:09.433668Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:09.433696Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:09.433702Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:09.433894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29117 TClient is connected to server localhost:29117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:11.336680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.449546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.713642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.824290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.861243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:12.113682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571538285414658:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:12.113740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:13.003644Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571521105543995:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:13.003715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:14.308303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.336282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.366429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.434619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.454937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.481719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.556855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571546875349862:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.556929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571546875349867:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.561993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:14.569205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571546875349869:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:16.246083Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034596241, txId: 281474976710671] shutting down >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 29558, MsgBus: 18480 2025-04-07T14:03:08.641985Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571521512411933:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:08.642034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016bb/r3tmp/tmpQV6oKV/pdisk_1.dat 2025-04-07T14:03:08.888166Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29558, node 1 2025-04-07T14:03:08.987858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:08.987992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:08.991235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:09.433722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:09.433747Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:09.433759Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:09.433858Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18480 TClient is connected to server localhost:18480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:11.341254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.449545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.712062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.811453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.879544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:12.110690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571538692282738:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:12.110799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:13.648318Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571521512411933:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:13.648377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:14.308421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.376715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.401802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.422329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.446230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.471291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.556886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571547282217935:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.556963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571547282217940:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.562189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:14.571118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571547282217942:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 18539, MsgBus: 9625 2025-04-07T14:03:08.005995Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571518754470041:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:08.006146Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016e1/r3tmp/tmpYIPAra/pdisk_1.dat 2025-04-07T14:03:08.531300Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:08.543531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:08.543629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:08.570276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18539, node 1 2025-04-07T14:03:09.433718Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:09.433747Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:09.433752Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:09.433923Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9625 TClient is connected to server localhost:9625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:11.336630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.449538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.740115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.817371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.904736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:12.110743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571535934340696:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:12.116318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:13.005997Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571518754470041:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:13.006036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:14.309209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.350117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.388397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.410003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.428642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.492071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.556849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571544524275902:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.556929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571544524275907:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.562170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:14.570700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571544524275909:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:15.937529Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 2025-04-07T14:03:16.060658Z, after 0.123264s 2025-04-07T14:03:15.937693Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:03:15.979822Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2025-04-07T14:03:16.061944Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: batching, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744034596 AvailableComputeActors: 10000 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 10000 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 1 DataCenterId: "1" } 2025-04-07T14:03:16.188223Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7490571553114210856:2469] 2025-04-07T14:03:16.188264Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7490571548819243523:2469] 2025-04-07T14:03:16.190763Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037914 2025-04-07T14:03:16.190919Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710671 at tablet 72075186224037914 2025-04-07T14:03:16.192426Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037914 2025-04-07T14:03:16.198975Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710671 at step 1744034596241 at tablet 72075186224037914 { Transactions { TxId: 281474976710671 AckTo { RawX1: 7490571518754470376 RawX2: 4294969519 } } Step: 1744034596241 MediatorID: 72057594046382081 TabletID: 72075186224037914 } 2025-04-07T14:03:16.199015Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037914 2025-04-07T14:03:16.199133Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037914 2025-04-07T14:03:16.199159Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037914 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:03:16.199188Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744034596241:281474976710671] in PlanQueue unit at 72075186224037914 2025-04-07T14:03:16.199301Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037914 loaded tx from db 1744034596241:281474976710671 keys extracted: 0 2025-04-07T14:03:16.199563Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037914 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:03:16.200158Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037914 step# 1744034596241 txid# 281474976710671} 2025-04-07T14:03:16.200196Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037914 step# 1744034596241} 2025-04-07T14:03:16.200239Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037914 2025-04-07T14:03:16.200303Z node 1 :TX_DATASHARD DEBUG: Complete [1744034596241 : 281474976710671] from 72075186224037914 at tablet 72075186224037914 send result to client [1:7490571553114210858:3669], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:03:16.200339Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037914 2025-04-07T14:03:16.200422Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1744034596241:281474976710671 created 2025-04-07T14:03:16.200737Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490571553114210867:2469] TxId: 281474976710672. Ctx: { TraceId: 01jr890138br69e9f1xn0tjben, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2025-04-07T14:03:16.200836Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-07T14:03:16.200873Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2025-04-07T14:03:16.201006Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key sets: 1 2025-04-07T14:03:16.201149Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key: { TableId: [O ... _compute_manager.cpp:40;event=receive_data;actor_id=[1:7490571553114210878:2050];count_chunks=1; 2025-04-07T14:03:16.232760Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:245;event=on_receive;compute_shard_id=NO_VALUE_OPTIONAL; 2025-04-07T14:03:16.232781Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:192;event=add_data_to_compute;rows=3; 2025-04-07T14:03:16.232814Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:162;event=send_data_to_compute;space=8388608;queue=1;compute_actor_id=[1:7490571553114210871:2478];rows=3; 2025-04-07T14:03:16.232846Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:146;event=stop_scanner; 2025-04-07T14:03:16.232869Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:38;event=scan_ack_on_finished;actor_id=[1:7490571553114210878:2050]; 2025-04-07T14:03:16.232889Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571553114210874:2480]. EVLOGKQP:0/0/3/3 2025-04-07T14:03:16.232915Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:337;event=scanner_finished;tablet_id=72075186224037914;stop_shard=1; 2025-04-07T14:03:16.232942Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:88;event=stop_scanner;actor_id=[1:7490571553114210878:2050];message=;final_flag=1; 2025-04-07T14:03:16.233031Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571553114210874:2480]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, pending resolve shards: 0, average read rows: 3, average read bytes: 0, 2025-04-07T14:03:16.233096Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:384;event=wait_all_scanner_finished;scans=0; 2025-04-07T14:03:16.233152Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571553114210874:2480]. EVLOGKQP(max_in_flight:1) InFlightScans:InFlightShards:;wScans=0;wShards=0; {SHARD(72075186224037914):CHUNKS=1;D=0.000000s;PacksCount=1;RowsCount=3;BytesCount=0;MinPackSize=3;MaxPackSize=3;CAVG=0.000000s;CMIN=0.000000s;CMAX=0.000000s;}; 2025-04-07T14:03:16.233334Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:147 :TEvSendData: [1:7490571553114210874:2480]/[1:7490571553114210871:2478] 2025-04-07T14:03:16.233687Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571553114210871:2478], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=. TraceId : 01jr890138br69e9f1xn0tjben. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:03:16.233701Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571553114210873:2479], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=. CustomerSuppliedId : . TraceId : 01jr890138br69e9f1xn0tjben. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646923 2025-04-07T14:03:16.233718Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:177 :TEvFetcherFinished: [1:7490571553114210874:2480] 2025-04-07T14:03:16.233738Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Finish input channelId: 1, from: [1:7490571553114210871:2478] 2025-04-07T14:03:16.233779Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571553114210873:2479], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=. CustomerSuppliedId : . TraceId : 01jr890138br69e9f1xn0tjben. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:03:16.234386Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571553114210873:2479], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=. CustomerSuppliedId : . TraceId : 01jr890138br69e9f1xn0tjben. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:03:16.234422Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037914 2025-04-07T14:03:16.234495Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037914 2025-04-07T14:03:16.234511Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037914 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:03:16.234526Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037914 TxInFly 0 2025-04-07T14:03:16.234533Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-07T14:03:16.234553Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037914 2025-04-07T14:03:16.234601Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished 2025-04-07T14:03:16.234618Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571553114210871:2478], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=. TraceId : 01jr890138br69e9f1xn0tjben. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:03:16.234755Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. pass away 2025-04-07T14:03:16.234833Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710672;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:03:16.234980Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490571553114210867:2469] TxId: 281474976710672. Ctx: { TraceId: 01jr890138br69e9f1xn0tjben, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7490571548819243523:2469], seqNo: 1, nRows: 1 2025-04-07T14:03:16.235074Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 1. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2025-04-07T14:03:16.235168Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490571553114210867:2469] TxId: 281474976710672. Ctx: { TraceId: 01jr890138br69e9f1xn0tjben, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7490571553114210871:2478], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 3799 DurationUs: 12000 Tasks { TaskId: 1 CpuTimeUs: 971 FinishTimeMs: 1744034596234 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ComputeCpuTimeUs: 232 BuildCpuTimeUs: 739 WaitInputTimeUs: 4307 HostName: "ghrun-lqtfcktism" NodeId: 1 StartTimeMs: 1744034596222 } MaxMemoryUsage: 1048576 } 2025-04-07T14:03:16.235201Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jr890138br69e9f1xn0tjben, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7490571553114210871:2478] 2025-04-07T14:03:16.235245Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490571553114210867:2469] TxId: 281474976710672. Ctx: { TraceId: 01jr890138br69e9f1xn0tjben, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7490571553114210873:2479], 2025-04-07T14:03:16.235647Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7490571553114210875:2479] 2025-04-07T14:03:16.235709Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571553114210873:2479], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=. CustomerSuppliedId : . TraceId : 01jr890138br69e9f1xn0tjben. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:03:16.235749Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-07T14:03:16.235772Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished 2025-04-07T14:03:16.235795Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490571553114210873:2479], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=. CustomerSuppliedId : . TraceId : 01jr890138br69e9f1xn0tjben. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:03:16.235867Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. pass away 2025-04-07T14:03:16.235956Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710672;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:03:16.235998Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490571553114210867:2469] TxId: 281474976710672. Ctx: { TraceId: 01jr890138br69e9f1xn0tjben, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7490571553114210873:2479], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2133 DurationUs: 7000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 1171 FinishTimeMs: 1744034596235 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 507 BuildCpuTimeUs: 664 WaitInputTimeUs: 4029 HostName: "ghrun-lqtfcktism" NodeId: 1 StartTimeMs: 1744034596228 } MaxMemoryUsage: 1048576 } 2025-04-07T14:03:16.236031Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jr890138br69e9f1xn0tjben, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7490571553114210873:2479] 2025-04-07T14:03:16.236116Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 2. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2025-04-07T14:03:16.236130Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490571553114210867:2469] TxId: 281474976710672. Ctx: { TraceId: 01jr890138br69e9f1xn0tjben, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:03:16.236175Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7490571553114210867:2469] TxId: 281474976710672. Ctx: { TraceId: 01jr890138br69e9f1xn0tjben, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFjZTIzNjUtYTM3NGI0MjctMjQxMDcwM2YtOTQxN2U4MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.005932s ReadRows: 3 ReadBytes: 96 ru: 3 rate limiter was not found force flag: 1 2025-04-07T14:03:16.236878Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034596241, txId: 281474976710671] shutting down |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |90.6%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 20746, MsgBus: 17341 2025-04-07T14:03:08.889194Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571520698611636:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:08.889237Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00167f/r3tmp/tmpi687EO/pdisk_1.dat 2025-04-07T14:03:09.175084Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20746, node 1 2025-04-07T14:03:09.266927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:09.267093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:09.268741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:09.433631Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:09.433684Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:09.433694Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:09.433805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17341 TClient is connected to server localhost:17341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:11.336734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.449533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.699354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.772932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.819064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:12.112429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571537878482306:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:12.112560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:13.889313Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571520698611636:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:13.900330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:14.307888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.333534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.360192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.386177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.420583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.447014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.557372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571546468417488:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571546468417493:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.562041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:14.569504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571546468417495:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:15.995875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys 1 8 /Root/SecondaryKeys/Index/indexImplTable 1 8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 25355, MsgBus: 9300 2025-04-07T14:03:08.003110Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571522047261279:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:08.003171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016dc/r3tmp/tmpWhq2oX/pdisk_1.dat 2025-04-07T14:03:08.532741Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:08.569366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:08.569458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:08.571627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25355, node 1 2025-04-07T14:03:09.435434Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:09.435466Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:09.435472Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:09.435623Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9300 TClient is connected to server localhost:9300 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:11.336882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.449567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.715321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.798214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.849685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:12.117342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571539227132076:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:12.146044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:13.003487Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571522047261279:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:13.003542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:14.308501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.335938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.399009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.421005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.440582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.460646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.556792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571547817067266:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.556870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.556949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571547817067271:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.562004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:14.570477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571547817067273:2439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:15.978445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 4355, MsgBus: 2191 2025-04-07T14:03:08.003228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571522458611045:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:08.003280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016da/r3tmp/tmpjFuIho/pdisk_1.dat 2025-04-07T14:03:08.547667Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:08.569193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:08.569277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:08.574493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4355, node 1 2025-04-07T14:03:09.433830Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:09.433857Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:09.433871Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:09.434096Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2191 TClient is connected to server localhost:2191 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:11.336888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.449491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.682844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.764395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.847016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:12.110812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571539638481723:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:12.110918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:13.005925Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571522458611045:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:13.005981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:14.307764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.371342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.397120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.418628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.436625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.500009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.556750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571548228416910:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.556852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571548228416915:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.562003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:14.572331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571548228416917:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:15.984010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 1979, MsgBus: 62195 2025-04-07T14:03:08.004276Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571520887632701:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:08.004331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016e7/r3tmp/tmpZWN2b6/pdisk_1.dat 2025-04-07T14:03:08.546063Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:08.592392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:08.592510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:08.595783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1979, node 1 2025-04-07T14:03:09.433836Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:09.433870Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:09.433898Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:09.434082Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62195 TClient is connected to server localhost:62195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:11.336799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.449536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.722424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:11.806323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:03:11.872683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:12.112207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571538067503499:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:12.112276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:13.004765Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571520887632701:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:13.004824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:14.307935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.377112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.402634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.428483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.448467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.473865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.556898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571546657438700:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.556969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571546657438705:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.562105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:14.572415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571546657438707:2439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:15.967791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys 1 8 /Root/SecondaryKeys/Index/indexImplTable 2 16 >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 24081, MsgBus: 9132 2025-04-07T14:03:08.004225Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571521324688989:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:08.004279Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001710/r3tmp/tmpsUUUlr/pdisk_1.dat 2025-04-07T14:03:08.534086Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:08.571134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:08.571237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:08.583547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24081, node 1 2025-04-07T14:03:09.434897Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:09.434933Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:09.434939Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:09.435106Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9132 TClient is connected to server localhost:9132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:11.336787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.449567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.715289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:11.786345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:03:11.822773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:12.111677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571538504559798:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:12.111753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:13.005812Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571521324688989:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:13.005874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:14.308801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.339083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.366972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.395737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.459778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.484966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.556956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571547094494993:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571547094494998:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.562315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:14.571931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571547094495001:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:15.985453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:16.011954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:03:16.076131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 /Root/Join1_2 1 19 /Root/Join1_1 8 136 |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 25473, MsgBus: 22556 2025-04-07T14:03:08.003148Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571522056559883:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:08.003241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00170a/r3tmp/tmpqYj3WE/pdisk_1.dat 2025-04-07T14:03:08.542790Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:08.564286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:08.564369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:08.567696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25473, node 1 2025-04-07T14:03:09.433692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:09.433723Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:09.433730Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:09.436788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22556 TClient is connected to server localhost:22556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:11.336386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.449533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.704702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.807939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:11.849772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:12.113727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571539236430624:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:12.113842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:13.003781Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571522056559883:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:13.003848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:14.308293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.373563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.397607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.418020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.437319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.460485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:14.556897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571547826365818:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.556958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.557019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571547826365823:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:14.561949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:14.569156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571547826365825:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:15.980377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:16.007807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:03:16.031040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 /Root/Join1_2 1 19 /Root/Join1_1 8 136 |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |90.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest |90.7%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultDistinctCountRI_GroupByL [GOOD] Test command err: Trying to start YDB, gRPC: 62401, MsgBus: 27783 2025-04-07T14:01:52.082408Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571192460602067:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:52.082579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d14/r3tmp/tmp8R4X4a/pdisk_1.dat 2025-04-07T14:01:52.737871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:52.737945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:52.741061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62401, node 1 2025-04-07T14:01:52.929927Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:52.985132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:52.985153Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:52.985172Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:52.985274Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27783 TClient is connected to server localhost:27783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:53.886230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:53.901306Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:53.914010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:54.046038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571196755569897:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:54.046229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571196755569897:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:54.046484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571196755569897:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:54.046609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571196755569897:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:54.046716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571196755569897:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:54.047400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571196755569897:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:54.047517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571196755569897:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:54.047626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571196755569897:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:54.047736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571196755569897:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:54.048005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571196755569897:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:54.048122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571196755569897:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:54.048279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571196755569897:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:54.100417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571201050537197:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:54.100475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571201050537197:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:54.100824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571201050537197:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:54.100970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571201050537197:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:54.101108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571201050537197:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:54.101213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571201050537197:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:54.101299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571201050537197:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:54.101383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571201050537197:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:54.101482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571201050537197:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:54.101579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571201050537197:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:54.101697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571201050537197:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:54.101782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571201050537197:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:54.131137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571201050537207:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:54.131205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571201050537207:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:54.131403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571201050537207:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:54.131530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571201050537207:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:54.131640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571201050537207:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:54.131745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571201050537207:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:54.131858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571201050537207:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:54.131962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571201050537207:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:14.198529Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:14.400626Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:14.400721Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:14.485283Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:14.485337Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:14.569522Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:14.569617Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:14.653716Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:14.653810Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:14.759704Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:03:14.759826Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:03:14.815621Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:14.815683Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:14.899648Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:14.899717Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:14.983592Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:14.983677Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:15.068481Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:15.068595Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:15.153117Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:15.153202Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:15.208956Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2359];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-07T14:03:15.312384Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:15.312447Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:15.422721Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:15.422837Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:15.509736Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:15.509825Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:15.594999Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:15.595088Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:15.679553Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-04-07T14:03:15.679632Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1831:3098], TxId: 281474976715662, task: 161. Ctx: { TraceId : 01jr88yv968j34fvshm89vyqn6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2M3YTlkODctNDVjNjEwMDUtZmMxYWQyZjYtYTViMGRlOGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-04-07T14:03:15.785119Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2359];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-07T14:03:15.785224Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2359];fline=actor.cpp:33;event=skip_flush_writing; >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay_yt/query_replay_yt |90.7%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |90.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] >> KqpJoinOrder::DatetimeConstantFold+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin+ColumnStore |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |90.7%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest >> TGRpcCmsTest::SimpleTenantsTest >> TGRpcCmsTest::RemoveWithAnotherTokenTest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation >> TGRpcCmsTest::DisabledTxTest >> TGRpcCmsTest::AuthTokenTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2025-04-07T14:02:46.439370Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.439523Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.508771Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.509293Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.561948Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:46.562518Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=6238964181837874694, session=0, seqNo=0) 2025-04-07T14:02:46.562695Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:46.588012Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=6238964181837874694, session=1) 2025-04-07T14:02:46.588728Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=1) 2025-04-07T14:02:46.607218Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-07T14:02:46.607376Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:46.619519Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-04-07T14:02:46.619844Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-07T14:02:46.631789Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-04-07T14:02:46.632310Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:146:2170], cookie=10824321475862911054, name="Lock1") 2025-04-07T14:02:46.632409Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:146:2170], cookie=10824321475862911054) 2025-04-07T14:02:47.469493Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:47.469583Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:47.486157Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:47.486255Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:47.511401Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:47.512172Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:128:2154], cookie=10205624706536741526, session=0, seqNo=0) 2025-04-07T14:02:47.512291Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:47.524264Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:128:2154], cookie=10205624706536741526, session=1) 2025-04-07T14:02:47.524540Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:129:2155], cookie=15103150381388343485, session=0, seqNo=0) 2025-04-07T14:02:47.524683Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:02:47.536337Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:129:2155], cookie=15103150381388343485, session=2) 2025-04-07T14:02:47.537462Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:128:2154], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-07T14:02:47.537614Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-07T14:02:47.537702Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:47.549659Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:128:2154], cookie=111) 2025-04-07T14:02:47.549982Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:128:2154], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-07T14:02:47.550093Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-07T14:02:47.550165Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-07T14:02:47.562220Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:128:2154], cookie=112) 2025-04-07T14:02:47.562657Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:129:2155], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-07T14:02:47.562877Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:129:2155], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-07T14:02:47.574514Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:129:2155], cookie=222) 2025-04-07T14:02:47.574598Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:129:2155], cookie=223) 2025-04-07T14:02:47.574868Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:129:2155], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-07T14:02:47.575072Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:129:2155], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-07T14:02:47.586859Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:129:2155], cookie=333) 2025-04-07T14:02:47.586922Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:129:2155], cookie=334) 2025-04-07T14:02:47.894617Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:47.906424Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.165955Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.178372Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.428307Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.440917Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.693001Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.706512Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.954806Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.969137Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.228619Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.240954Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.481151Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.493144Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.731664Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.743865Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.995359Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.007324Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:50.312020Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.324217Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:50.602951Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.615283Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:50.891192Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.905547Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:51.174781Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:51.186820Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:51.445165Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:51.457198Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:51.761759Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:51.776580Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.043179Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.057415Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.315520Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.327591Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.587172Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.605043Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.871141Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.883263Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.163409Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:53.175246Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.446936Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:53.458926Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.716644Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:53.728336Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.986612Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:53.998874Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:54.256691Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:54.268971Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:54.526960Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:54.539149Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:54.797612Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:54.809509Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.077811Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:55.089908Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.338799Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:55.350887Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.609076Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:55.621102Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.890760Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=2) 2025-04-07T14:02:55.890873Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 2 "Lock2" waiter link 2025-04-07T14:02:55.902645Z node 2 :KESUS_TABLET DEBUG: ... -07T14:03:14.465789Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:14.735482Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:14.747455Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:15.005971Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:15.018364Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:15.280631Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:15.293008Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:15.551480Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:15.563704Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:15.826093Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:15.838440Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:16.115713Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:16.128341Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:16.401536Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:16.413930Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:16.675812Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:16.689159Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:16.951458Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:16.963513Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:17.225000Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:17.237103Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:17.545088Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:17.557352Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:17.834161Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:17.849619Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:18.129288Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:18.141483Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:18.408081Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:18.419835Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:18.687745Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:18.699841Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:18.962951Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:18.974691Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:19.233680Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:19.245341Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:19.504504Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:19.516692Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:19.756532Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:19.768465Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:20.016725Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:20.028834Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:20.285757Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:20.300946Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:20.553716Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:20.569560Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:20.826704Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:20.839462Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:21.088901Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:21.101403Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:21.341736Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:21.353680Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:21.706231Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2025-04-07T14:03:21.706331Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-07T14:03:21.718159Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2025-04-07T14:03:21.739260Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:534:2530], cookie=10708813917417710145) 2025-04-07T14:03:21.739356Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:534:2530], cookie=10708813917417710145) 2025-04-07T14:03:21.739737Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:537:2533], cookie=15619546799545454412) 2025-04-07T14:03:21.739787Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:537:2533], cookie=15619546799545454412) 2025-04-07T14:03:21.740133Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:540:2536], cookie=14671374071407295877, name="Lock1") 2025-04-07T14:03:21.740187Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:540:2536], cookie=14671374071407295877) 2025-04-07T14:03:21.740553Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:543:2539], cookie=12992883057112922190, name="Lock1") 2025-04-07T14:03:21.740598Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:543:2539], cookie=12992883057112922190) 2025-04-07T14:03:22.220275Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:03:22.220354Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:03:22.236339Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:03:22.236455Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:03:22.250873Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:03:22.251412Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=13867743677911151154, session=0, seqNo=0) 2025-04-07T14:03:22.251569Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:03:22.274239Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=13867743677911151154, session=1) 2025-04-07T14:03:22.274496Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=10569388320767123413, session=0, seqNo=0) 2025-04-07T14:03:22.274604Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:03:22.286698Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=10569388320767123413, session=2) 2025-04-07T14:03:22.286972Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=13174305280958680640, session=0, seqNo=0) 2025-04-07T14:03:22.287089Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-04-07T14:03:22.299854Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=13174305280958680640, session=3) 2025-04-07T14:03:22.300388Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:143:2167], cookie=13385725997384293154, name="Sem1", limit=3) 2025-04-07T14:03:22.300539Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-07T14:03:22.312578Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:143:2167], cookie=13385725997384293154) 2025-04-07T14:03:22.312937Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=111, session=1, semaphore="Sem1" count=2) 2025-04-07T14:03:22.313087Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-07T14:03:22.313302Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=222, session=2, semaphore="Sem1" count=2) 2025-04-07T14:03:22.313474Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=333, session=3, semaphore="Sem1" count=1) 2025-04-07T14:03:22.325321Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=111) 2025-04-07T14:03:22.325400Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=222) 2025-04-07T14:03:22.325433Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=333) 2025-04-07T14:03:22.325941Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:152:2176], cookie=2221668377300432855, name="Sem1") 2025-04-07T14:03:22.326013Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:152:2176], cookie=2221668377300432855) 2025-04-07T14:03:22.326457Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:155:2179], cookie=2847897656596124971, name="Sem1") 2025-04-07T14:03:22.326523Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:155:2179], cookie=2847897656596124971) 2025-04-07T14:03:22.326748Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:130:2156], cookie=444, name="Sem1") 2025-04-07T14:03:22.326839Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-07T14:03:22.326894Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-07T14:03:22.326939Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-07T14:03:22.338830Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:130:2156], cookie=444) 2025-04-07T14:03:22.339429Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:160:2184], cookie=6371067652187033369, name="Sem1") 2025-04-07T14:03:22.339516Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:160:2184], cookie=6371067652187033369) 2025-04-07T14:03:22.340036Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:163:2187], cookie=5320358124706030609, name="Sem1") 2025-04-07T14:03:22.340106Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:163:2187], cookie=5320358124706030609) >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/kqprun |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |90.7%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |90.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |90.7%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey >> TKesusTest::TestSessionStealingDifferentKey [GOOD] >> IncrementalRestoreScan::ChangeSenderSimple >> IncrementalRestoreScan::Empty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2025-04-07T14:02:46.724855Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:46.725008Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:46.741285Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:46.741871Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:46.766268Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:46.766751Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=18219059463366291055, session=0, seqNo=0) 2025-04-07T14:02:46.766880Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:46.779103Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=18219059463366291055, session=1) 2025-04-07T14:02:46.779799Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:130:2156], cookie=1653359194857481408 2025-04-07T14:02:46.780184Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:143:2167], cookie=14880671559427902365) 2025-04-07T14:02:46.780247Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:143:2167], cookie=14880671559427902365) 2025-04-07T14:02:47.379391Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:47.393584Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:47.657958Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:47.671690Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:47.925626Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:47.938247Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.186131Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.198581Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.468561Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.481440Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.733807Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:48.746135Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:48.987139Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.001313Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.253902Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.266220Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.517367Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.529781Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:49.832179Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:49.844313Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:50.102923Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.115103Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:50.373824Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.386046Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:50.644089Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.656524Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:50.907202Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:50.925638Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:51.226204Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:51.238357Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:51.499005Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:51.512377Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:51.778546Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:51.791034Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.049551Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.062292Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.320961Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.333138Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.613010Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.625214Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:52.899824Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:52.913713Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.172505Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:53.184443Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.444551Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:53.456468Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.714514Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:53.726499Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:53.994090Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:54.006066Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:54.273736Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:54.285671Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:54.543565Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:54.555730Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:54.813525Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:54.825420Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.083534Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:55.095336Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.389258Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:55.401161Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.670023Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:55.682415Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:55.940683Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:55.952741Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:56.200711Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:56.212638Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:56.472001Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:56.484518Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:56.753606Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:56.765852Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:57.035782Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:57.047959Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:57.306206Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:57.318319Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:57.576263Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:57.588157Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:57.846030Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:57.858366Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:58.148456Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:58.160903Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:58.418751Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:58.430733Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:58.698461Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:58.710232Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:58.967914Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:58.979631Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:59.240628Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:59.254004Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:59.543290Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:59.555375Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:59.813234Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:59.825312Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:00.094021Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:00.106022Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:00.364049Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:00.375675Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:00.633790Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:00.645653Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:00.915096Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:00.927105Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:01.174520Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:01.186267Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:01.444651Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:01.456919Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:01.704730Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:01.716854Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:01.965331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:01.977538Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] T ... Check::Execute 2025-04-07T14:03:10.663329Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:10.931274Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:10.943483Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:11.212281Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:11.224517Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:11.482378Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:11.494290Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:11.756787Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:11.769991Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:12.028396Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:12.040162Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:12.351125Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:12.362707Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:12.620380Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:12.632050Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:12.890063Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:12.901767Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:13.159502Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:13.171744Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:13.430163Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:13.442032Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:13.710295Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:13.721977Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:13.979191Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:13.991006Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:14.248606Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:14.260390Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:14.523450Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:14.535206Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:14.792994Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:14.804889Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:15.100955Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:15.113198Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:15.383261Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:15.400429Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:15.664210Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:15.675953Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:15.947213Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:15.959226Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:16.218550Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:16.230427Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:16.531501Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:16.543297Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:16.814113Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:16.826443Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:17.096966Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:17.109205Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:17.373538Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:17.385890Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:17.651669Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:17.665507Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:17.955765Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:17.972269Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:18.237007Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:18.248793Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:18.507318Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:18.522871Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:18.779639Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:18.791726Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:19.043320Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:19.055632Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:19.305324Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:19.317574Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:19.566631Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:19.578707Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:19.827994Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:19.839738Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:20.087741Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:20.099639Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:20.356201Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:20.368561Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:20.731207Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:20.743070Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:21.001945Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:21.014276Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:21.295348Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:21.307457Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:21.576349Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:21.588476Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:21.836699Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:21.849109Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:22.113441Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:22.125882Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:22.387207Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:22.401040Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:22.679585Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:22.693545Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:22.960994Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:22.975900Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:23.240519Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:23.253071Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:23.522008Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-07T14:03:23.522102Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-07T14:03:23.534314Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-07T14:03:23.545469Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:569:2563], cookie=347841995664604531) 2025-04-07T14:03:23.545561Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:569:2563], cookie=347841995664604531) 2025-04-07T14:03:23.884418Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:03:23.884519Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:03:23.901550Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:03:23.902095Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:03:23.926175Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:03:23.926980Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=12345, session=0, seqNo=0) 2025-04-07T14:03:23.927119Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:03:23.942129Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=12345, session=1) 2025-04-07T14:03:23.942742Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:137:2161], cookie=23456, session=1, seqNo=0) 2025-04-07T14:03:23.960307Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:137:2161], cookie=23456, session=1) 2025-04-07T14:03:24.348072Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:03:24.348183Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:03:24.366858Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:03:24.367010Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:03:24.382829Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:03:24.383672Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=12345, session=0, seqNo=0) 2025-04-07T14:03:24.383818Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:03:24.410488Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=12345, session=1) 2025-04-07T14:03:24.411281Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:136:2161], cookie=23456, session=1, seqNo=0) 2025-04-07T14:03:24.423527Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:136:2161], cookie=23456, session=1) |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] |90.8%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] Test command err: Trying to start YDB, gRPC: 19960, MsgBus: 12430 2025-04-07T14:01:54.753128Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571203325860157:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:54.753181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e2c/r3tmp/tmpeY9JL7/pdisk_1.dat 2025-04-07T14:01:55.522686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:55.522836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:55.525842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:55.542724Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19960, node 1 2025-04-07T14:01:55.809243Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:55.809261Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:55.809266Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:55.809343Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12430 TClient is connected to server localhost:12430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:56.891741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:56.952029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:57.179952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:57.474575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:01:57.610810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:01:59.760776Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571203325860157:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:59.760842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:59.962092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571224800698331:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:59.962213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:00.974633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:02:01.042544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:02:01.123215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:02:01.179549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:02:01.257021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:02:01.345830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:02:01.439896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571233390633433:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:01.439944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:01.440214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571233390633439:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:01.443993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:02:01.456228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571233390633441:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:02:04.008829Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034524015, txId: 281474976710672] shutting down 2025-04-07T14:02:04.665360Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034524659, txId: 281474976710675] shutting down 2025-04-07T14:02:05.499634Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034525443, txId: 281474976710678] shutting down 2025-04-07T14:02:06.374956Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034526360, txId: 281474976710681] shutting down 2025-04-07T14:02:06.839396Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034526857, txId: 281474976710684] shutting down 2025-04-07T14:02:07.605151Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034527599, txId: 281474976710687] shutting down 2025-04-07T14:02:08.559540Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034528551, txId: 281474976710690] shutting down 2025-04-07T14:02:09.659938Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034529629, txId: 281474976710693] shutting down 2025-04-07T14:02:10.473185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:02:10.473238Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:10.589304Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034530602, txId: 281474976710696] shutting down 2025-04-07T14:02:11.219492Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034531232, txId: 281474976710699] shutting down 2025-04-07T14:02:11.645193Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034531645, txId: 281474976710702] shutting down 2025-04-07T14:02:12.167740Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034532170, txId: 281474976710705] shutting down 2025-04-07T14:02:12.770005Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034532765, txId: 281474976710708] shutting down 2025-04-07T14:02:13.460467Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034533465, txId: 281474976710711] shutting down 2025-04-07T14:02:13.893454Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034533878, txId: 281474976710714] shutting down 2025-04-07T14:02:14.967650Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034534900, txId: 281474976710717] shutting down 2025-04-07T14:02:15.749701Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034535775, txId: 281474976710720] shutting down 2025-04-07T14:02:16.349340Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034536349, txId: 281474976710723] shutting down 2025-04-07T14:02:16.939901Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034536944, txId: 281474976710726] shutting down 2025-04-07T14:02:17.718587Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034537735, txId: 281474976710729] shutting down 2025-04-07T14:02:18.154189Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034538162, txId: 281474976710732] shutting down 2025-04-07T14:02:18.548936Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034538568, txId: 281474976710735] shutting down 2025-04-07T14:02:18.996054Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034539023, txId: 281474976710738] shutting down 2025-04-07T14:02:19.456533Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 17440 ... R WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034591285, txId: 281474976711200] shutting down 2025-04-07T14:03:11.509878Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034591544, txId: 281474976711203] shutting down 2025-04-07T14:03:11.888963Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034591887, txId: 281474976711206] shutting down 2025-04-07T14:03:12.168555Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034592202, txId: 281474976711209] shutting down 2025-04-07T14:03:12.459635Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034592475, txId: 281474976711212] shutting down 2025-04-07T14:03:12.722157Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034592734, txId: 281474976711215] shutting down 2025-04-07T14:03:12.990108Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034593014, txId: 281474976711218] shutting down 2025-04-07T14:03:13.257770Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034593287, txId: 281474976711221] shutting down 2025-04-07T14:03:13.523166Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034593553, txId: 281474976711224] shutting down 2025-04-07T14:03:13.789980Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034593826, txId: 281474976711227] shutting down 2025-04-07T14:03:14.080045Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034594106, txId: 281474976711230] shutting down 2025-04-07T14:03:14.377276Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034594400, txId: 281474976711233] shutting down 2025-04-07T14:03:14.681144Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034594715, txId: 281474976711236] shutting down 2025-04-07T14:03:14.987674Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034595002, txId: 281474976711239] shutting down 2025-04-07T14:03:15.291653Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034595324, txId: 281474976711242] shutting down 2025-04-07T14:03:15.672312Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034595702, txId: 281474976711245] shutting down 2025-04-07T14:03:16.017474Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034596017, txId: 281474976711248] shutting down 2025-04-07T14:03:16.349794Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034596381, txId: 281474976711251] shutting down 2025-04-07T14:03:16.696858Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034596703, txId: 281474976711254] shutting down 2025-04-07T14:03:17.045060Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034597074, txId: 281474976711257] shutting down 2025-04-07T14:03:17.398803Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034597417, txId: 281474976711260] shutting down 2025-04-07T14:03:17.745185Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034597774, txId: 281474976711263] shutting down 2025-04-07T14:03:18.121864Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034598152, txId: 281474976711266] shutting down 2025-04-07T14:03:18.520544Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034598537, txId: 281474976711269] shutting down Trying to start YDB, gRPC: 18304, MsgBus: 10918 2025-04-07T14:03:19.395623Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571565923173226:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:19.395679Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e2c/r3tmp/tmpKYe463/pdisk_1.dat 2025-04-07T14:03:19.510590Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:19.531980Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:19.532088Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:19.533669Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18304, node 2 2025-04-07T14:03:19.580079Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:19.580110Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:19.580118Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:19.580239Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10918 TClient is connected to server localhost:10918 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:20.006925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:20.017076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:20.089701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:20.236211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:20.347389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:22.119036Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571578808076809:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:22.119124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:22.161009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:22.192397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:22.217902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:22.247182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:22.276572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:22.344530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:22.445650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571578808077314:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:22.445786Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:22.446169Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571578808077319:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:22.450197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:22.462645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571578808077321:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:24.396639Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571565923173226:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:24.396717Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:24.662355Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034604634, txId: 281474976715671] shutting down |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore >> TKesusTest::TestAcquireSemaphore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2025-04-07T14:02:58.238389Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:58.238500Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:58.249536Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:58.250086Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:58.273621Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:58.274044Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=1152649971758481480, session=0, seqNo=0) 2025-04-07T14:02:58.274144Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:02:58.285791Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=1152649971758481480, session=1) 2025-04-07T14:02:58.285995Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=3657988171071486057, session=0, seqNo=0) 2025-04-07T14:02:58.286084Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:02:58.297746Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=3657988171071486057, session=2) 2025-04-07T14:02:58.298482Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-07T14:02:58.298628Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-07T14:02:58.298689Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:58.298849Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=2, semaphore="Lock2" count=1) 2025-04-07T14:02:58.298901Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-07T14:02:58.298975Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-04-07T14:02:58.299061Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=333, session=1, semaphore="Lock2" count=1) 2025-04-07T14:02:58.299092Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-04-07T14:02:58.310942Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-04-07T14:02:58.311020Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-04-07T14:02:58.311043Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=333) 2025-04-07T14:02:58.311577Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:146:2170], cookie=14936286312802076271, name="Lock1") 2025-04-07T14:02:58.311671Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:146:2170], cookie=14936286312802076271) 2025-04-07T14:02:58.312110Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:149:2173], cookie=18165250276207870123, name="Lock2") 2025-04-07T14:02:58.312165Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:149:2173], cookie=18165250276207870123) 2025-04-07T14:02:58.322873Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:02:58.322933Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:02:58.323286Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:02:58.323805Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:02:58.359582Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:02:58.359711Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-07T14:02:58.359765Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-04-07T14:02:58.359789Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-04-07T14:02:58.360132Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:189:2203], cookie=17991068965016045499, name="Lock1") 2025-04-07T14:02:58.360212Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:189:2203], cookie=17991068965016045499) 2025-04-07T14:02:58.360753Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:197:2210], cookie=1636989474345325539, name="Lock2") 2025-04-07T14:02:58.360818Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:197:2210], cookie=1636989474345325539) 2025-04-07T14:02:58.689150Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:58.700844Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:58.956826Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:58.968547Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:59.214285Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:59.226113Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:59.481983Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:59.493436Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:02:59.749157Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:02:59.761096Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:00.006982Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:00.018816Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:00.254743Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:00.266625Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:00.512870Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:00.524443Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:00.780855Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:00.792763Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:01.080990Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:01.092567Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:01.348958Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:01.360505Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:01.617744Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:01.629474Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:01.885610Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:01.897401Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:02.153813Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:02.165591Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:02.473710Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:02.485474Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:02.741904Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:02.753532Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:03.010676Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:03.022633Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:03.280028Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:03.291603Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:03.547881Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:03.559209Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:03.847203Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:03.858782Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:04.114887Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:04.126498Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:04.383464Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:04.395643Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:04.653878Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:04.665717Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:04.923922Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:04.935568Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:05.214331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:05.226080Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:05.484958Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:05.497128Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:05.745200Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:05.756859Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:06.015732Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:06.027756Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:06.284684Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:06.296225Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:06.565901Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:363:2365], cookie=12570756438004897983, name="Lock1") 2025-04-07T14:03:06.566012Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:363:2365], cookie=12570756438004897983) 2025-04-07T14:03:06.566687Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:366:2368], cookie=13907220411462669254, name="Lock2") 2025-04-07T14:03:06.566750Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:366:2368], cookie=13907220411462669254) 2025-04-07T14:03:06.612995Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:06.625277Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:06.894115Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:06.906549Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:07.166341Z node 1 :KESUS_TABLET DEBUG: [7205 ... 4-07T14:03:22.064149Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:22.333792Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:22.345566Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:22.602630Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:22.616072Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:22.900494Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:22.912960Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:23.174703Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:23.189176Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:23.482060Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:23.496000Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:23.778379Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:23.790128Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:24.044418Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:24.065379Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:24.322263Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:24.334940Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:24.588959Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:24.601828Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:24.873520Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:24.885578Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:25.175406Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:25.197539Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:25.485398Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:25.505426Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:25.809030Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:25.837346Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:26.129058Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:26.141381Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:26.417642Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:26.433362Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:26.716905Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:26.731881Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:27.021128Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:27.037571Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:27.326875Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:27.343531Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:27.605616Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-07T14:03:27.621343Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-07T14:03:27.883497Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-07T14:03:27.883586Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-07T14:03:27.883637Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-07T14:03:27.883717Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-07T14:03:27.883764Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-04-07T14:03:27.883789Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-07T14:03:27.895741Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-07T14:03:27.896378Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:329:2342], cookie=8998963933969481622, name="Lock1") 2025-04-07T14:03:27.896456Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:329:2342], cookie=8998963933969481622) 2025-04-07T14:03:27.896921Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:332:2345], cookie=16171546821201622485, name="Lock2") 2025-04-07T14:03:27.896976Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:332:2345], cookie=16171546821201622485) 2025-04-07T14:03:27.897357Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:335:2348], cookie=10175261460658044201) 2025-04-07T14:03:27.897409Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:335:2348], cookie=10175261460658044201) 2025-04-07T14:03:27.917041Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:03:27.917155Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:03:27.917779Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:03:27.918168Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:03:27.969390Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:03:27.969539Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-07T14:03:27.969588Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-07T14:03:27.969921Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:375:2378], cookie=12583392359867879511) 2025-04-07T14:03:27.969998Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:375:2378], cookie=12583392359867879511) 2025-04-07T14:03:27.970718Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:382:2384], cookie=1868391360648174222, name="Lock1") 2025-04-07T14:03:27.970802Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:382:2384], cookie=1868391360648174222) 2025-04-07T14:03:27.971310Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:385:2387], cookie=9003781314966660316, name="Lock2") 2025-04-07T14:03:27.971374Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:385:2387], cookie=9003781314966660316) 2025-04-07T14:03:28.542602Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-07T14:03:28.542729Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-07T14:03:28.569495Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-07T14:03:28.569776Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-07T14:03:28.594564Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-07T14:03:28.595090Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=949631274375210563, session=0, seqNo=0) 2025-04-07T14:03:28.595278Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-07T14:03:28.607548Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=949631274375210563, session=1) 2025-04-07T14:03:28.607858Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=3941535846248086805, session=0, seqNo=0) 2025-04-07T14:03:28.607991Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-07T14:03:28.621457Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=3941535846248086805, session=2) 2025-04-07T14:03:28.621843Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-07T14:03:28.637944Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=111) 2025-04-07T14:03:28.638642Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:143:2167], cookie=6195475541027933848, name="Sem1", limit=1) 2025-04-07T14:03:28.638817Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-07T14:03:28.654534Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:143:2167], cookie=6195475541027933848) 2025-04-07T14:03:28.654953Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=333, session=1, semaphore="Sem1" count=100500) 2025-04-07T14:03:28.668111Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=333) 2025-04-07T14:03:28.668509Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=222, session=1, semaphore="Sem1" count=1) 2025-04-07T14:03:28.668697Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-07T14:03:28.668929Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=333, session=2, semaphore="Sem1" count=1) 2025-04-07T14:03:28.683399Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=222) 2025-04-07T14:03:28.683491Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=333) 2025-04-07T14:03:28.684092Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:153:2177], cookie=13169188492738412732, name="Sem1") 2025-04-07T14:03:28.684189Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:153:2177], cookie=13169188492738412732) 2025-04-07T14:03:28.684733Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:156:2180], cookie=13751033537910219154, name="Sem1") 2025-04-07T14:03:28.684826Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:156:2180], cookie=13751033537910219154) 2025-04-07T14:03:28.685290Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:159:2183], cookie=17458679570273987449, name="Sem1", force=0) 2025-04-07T14:03:28.702713Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:159:2183], cookie=17458679570273987449) 2025-04-07T14:03:28.703370Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:164:2188], cookie=11375878783868360126, name="Sem1", force=1) 2025-04-07T14:03:28.703478Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-04-07T14:03:28.722802Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:164:2188], cookie=11375878783868360126) |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots >> TGRpcCmsTest::AlterRemoveTest [GOOD] >> IncrementalRestoreScan::Empty [GOOD] >> TGRpcCmsTest::DisabledTxTest [GOOD] >> JsonProtoConversion::JsonToProtoArray [GOOD] |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |90.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] |90.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] >> TGRpcCmsTest::SimpleTenantsTest [GOOD] >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] |90.8%| [TA] $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcCmsTest::AuthTokenTest [GOOD] >> KqpQueryServiceScripts::TestPaging >> KqpQueryService::FlowControllOnHugeRealTable-LongRow >> KqpQueryService::Ddl >> KqpService::SessionBusy >> KqpQueryService::TableSink_Htap-withOltpSink >> KqpQueryService::FlowControllOnHugeLiteralAsTable >> KqpQueryService::DmlNoTx >> KqpQueryService::ExecuteQueryWithWorkloadManager >> KqpQueryService::AlterTempTable >> KqpQueryService::TableSink_ReplaceDuplicatesOlap >> KqpService::CloseSessionsWithLoad >> KqpQueryService::StreamExecuteQueryPure >> KqpQueryService::QueryOnClosedSession >> KqpService::RangeCache+UseCache >> KqpQueryService::AlterTable_DropNotNull_Valid >> KqpQueryService::Write >> KqpQueryService::ExecuteQueryPure >> KqpQueryService::TableSink_Olap_Replace >> KqpQueryServiceScripts::ExecuteScriptWithTimeout >> KqpQueryServiceScripts::ForgetScriptExecution >> KqpQueryService::DdlColumnTable >> KqpQueryService::ExecuteQuery >> KqpService::ToDictCache+UseCache >> KqpQueryServiceScripts::ParseScript >> KqpQueryService::Tcl >> KqpQueryServiceScripts::ExecuteScript >> KqpDocumentApi::AllowRead >> KqpQueryService::DdlGroup >> KqpOlapBlobsSharing::MultipleMerge [FAIL] >> KqpOlapClickbench::ClickBenchSmoke [GOOD] >> KqpQueryService::TableSink_ReplaceDuplicatesOlap [GOOD] >> KqpService::RangeCache+UseCache [GOOD] >> KqpQueryService::AlterTable_DropNotNull_Valid [GOOD] >> KqpQueryServiceScripts::TestPaging [GOOD] >> KqpService::ToDictCache+UseCache [GOOD] >> KqpQueryService::QueryOnClosedSession [GOOD] >> KqpQueryService::Tcl [GOOD] >> KqpQueryService::AlterTempTable [GOOD] >> KqpQueryService::TableSink_Oltp_Replace+UseSink >> KqpQueryService::StreamExecuteQueryPure [GOOD] >> KqpQueryService::ExecuteQueryPure [GOOD] >> KqpQueryService::ExecuteQueryWithWorkloadManager [GOOD] >> KqpQueryService::TableSink_Htap-withOltpSink [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable-LongRow [GOOD] >> KqpQueryService::FlowControllOnHugeLiteralAsTable [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithTimeout [GOOD] >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid >> KqpQueryService::DmlNoTx [GOOD] >> KqpService::SessionBusy [GOOD] >> KqpQueryService::Write [GOOD] >> KqpQueryService::Ddl [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecution [GOOD] >> KqpQueryService::TableSink_Olap_Replace [GOOD] >> KqpQueryService::ExecuteQuery [GOOD] >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback >> KqpService::ToDictCache-UseCache |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoArray [GOOD] >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle >> KqpQueryService::TableSink_ReplaceFromSelectOlap >> KqpDocumentApi::AllowRead [GOOD] >> KqpQueryService::DdlGroup [GOOD] >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] >> KqpQueryServiceScripts::ParseScript [GOOD] >> KqpQueryServiceScripts::ExecuteScript [GOOD] >> KqpQueryService::CTASWithoutPerStatement >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] >> KqpQueryService::IssuesInCaseOfSuccess >> KqpService::RangeCache-UseCache >> KqpQueryService::ExecuteQueryPgTableSelect >> KqpQueryServiceScripts::TestTruncatedByRows >> KqpQueryService::TempTablesDrop >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback [GOOD] >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] >> KqpQueryService::DdlPermission >> KqpQueryService::TableSink_Oltp_Replace-UseSink >> KqpQueryService::CTASWithoutPerStatement [GOOD] >> KqpQueryServiceScripts::ListScriptExecutions >> KqpQueryServiceScripts::ExecuteMultiScript >> KqpQueryService::StreamExecuteQuery >> KqpQueryService::AlterCdcTopic >> KqpQueryService::FlowControllOnHugeRealTable+LongRow >> KqpQueryService::TableSink_HtapComplex+withOltpSink >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier [GOOD] >> KqpQueryService::IssuesInCaseOfSuccess [GOOD] >> KqpQueryService::DdlWithExplicitTransaction >> KqpQueryService::CreateTempTable >> KqpService::SessionBusyRetryOperation >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery >> KqpQueryService::ExecuteQueryPgTableSelect [GOOD] >> KqpQueryService::TempTablesDrop [GOOD] >> KqpQueryService::TableSink_OlapUpsert >> KqpQueryService::ExecuteDDLStatusCodeSchemeError >> KqpDocumentApi::RestrictAlter >> KqpQueryService::DdlPermission [GOOD] >> KqpQueryService::TableSink_Oltp_Replace-UseSink [GOOD] >> KqpQueryServiceScripts::ListScriptExecutions [GOOD] >> KqpQueryService::StreamExecuteQuery [GOOD] >> KqpQueryServiceScripts::ExecuteMultiScript [GOOD] >> KqpQueryService::AlterCdcTopic [GOOD] >> KqpQueryService::StreamExecuteQueryMultiResult >> KqpQueryService::ExecuteRetryQuery >> KqpQueryService::ForbidInteractiveTxOnImplicitSession >> KqpQueryServiceScripts::CancelScriptExecution >> KqpService::SessionBusyRetryOperation [GOOD] >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap >> KqpQueryService::CheckIsolationLevelFroPerStatementMode >> KqpQueryServiceScripts::InvalidFetchToken >> KqpQueryServiceScripts::EmptyNextFetchToken >> KqpQueryService::DdlWithExplicitTransaction [GOOD] >> KqpQueryService::ExecuteQueryScalar >> KqpQueryService::FlowControllOnHugeRealTable+LongRow [GOOD] >> KqpQueryService::CreateTempTable [GOOD] >> KqpQueryServiceScripts::CancelScriptExecution [GOOD] >> KqpQueryService::TableSink_OlapUpsert [GOOD] >> KqpQueryService::StreamExecuteQueryMultiResult [GOOD] >> KqpQueryService::Explain >> KqpQueryService::CreateOrDropTopicOverTable >> KqpDocumentApi::RestrictAlter [GOOD] >> KqpQueryService::DdlMixedDml >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] >> KqpQueryService::TableSink_OltpDelete >> KqpQueryService::Explain [GOOD] >> KqpDocumentApi::RestrictDrop >> KqpQueryService::CheckIsolationLevelFroPerStatementMode [GOOD] >> KqpQueryService::Ddl_Dml >> KqpQueryService::ExecuteQueryScalar [GOOD] >> KqpDocumentApi::RestrictDrop [GOOD] >> KqpQueryService::DdlMixedDml [GOOD] >> KqpQueryService::Ddl_Dml [GOOD] |90.8%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest [GOOD] Test command err: 2025-04-07T14:03:24.341812Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571588130164943:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:24.341859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0011e0/r3tmp/tmpe1sZmZ/pdisk_1.dat 2025-04-07T14:03:25.444810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:03:25.464866Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:25.467880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:25.467978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:25.482524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16872, node 1 2025-04-07T14:03:26.632734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:26.632760Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:26.632789Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:26.632939Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:28.297641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.338644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:03:28.342227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.364882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-04-07T14:03:28.365338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:03:28.365358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T14:03:28.374150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:28.378642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:03:28.378670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:03:28.381384Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.401361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034608435, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:03:28.401410Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:03:28.401710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:03:28.403901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:03:28.404065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:03:28.404107Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:03:28.404198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:03:28.404234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:03:28.404277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:03:28.406937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:03:28.406978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:03:28.406993Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:03:28.407060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:03:28.579284Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7490571605310034864:2296], Recipient [1:7490571588130165296:2194]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-07T14:03:28.579333Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-07T14:03:28.579357Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.579373Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.579469Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" 2025-04-07T14:03:28.579616Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744034608578964) 2025-04-07T14:03:28.580102Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744034608578964 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-07T14:03:28.580275Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-07T14:03:28.598035Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-07T14:03:28.598856Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608578964&action=1" } } } 2025-04-07T14:03:28.599011Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:28.599075Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-07T14:03:28.599196Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-07T14:03:28.599540Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-07T14:03:28.599650Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-07T14:03:28.604243Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7490571605310034873:2297], Recipient [1:7490571588130165296:2194]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608578964&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-07T14:03:28.604270Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-07T14:03:28.604453Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608578964&action=1" } } 2025-04-07T14:03:28.611688Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-07T14:03:28.611748Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-07T14:03:28.611830Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7490571605310034869:2194], Recipient [1:7490571588130165296:2194]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-07T14:03:28.611847Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-07T14:03:28.611874Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.611882Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.611929Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-07T14:03:28.611949Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-07T14:03:28.611997Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-07T14:03:28.619018Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-07T14:03:28.619053Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tena ... tin*\017**** (B6C6F477)" } 2025-04-07T14:03:29.439928Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-07T14:03:29.439970Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-07T14:03:29.440052Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7490571588130165183:2191], Recipient [1:7490571588130165296:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-07T14:03:29.440065Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-07T14:03:29.440544Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-07T14:03:29.446264Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:29.464253Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710659:0 HandleReply TEvConfigureStatus operationId:281474976710659:0 at schemeshard:72057594046644480 2025-04-07T14:03:29.464314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 3 -> 128 2025-04-07T14:03:29.446355Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:29.491198Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:03:29.493368Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:29.500446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034609548, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:03:29.500490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710659:0, at tablet# 72057594046644480 2025-04-07T14:03:29.500872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 128 -> 240 2025-04-07T14:03:29.501024Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 3], Generation: 1, ActorId:[3:7490571606473894166:2275], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 72075186224037888, TenantSysViewProcessor: 72075186224037896, TenantStatisticsAggregator: 72075186224037897, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186224037888, tenantSysViewProcessor: 72075186224037896, at schemeshard: 72057594046644480 2025-04-07T14:03:29.502956Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7490571609605002708:2352], Recipient [1:7490571588130165296:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-07T14:03:29.502980Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-07T14:03:29.503017Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-07T14:03:29.503183Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7490571588130165183:2191], Recipient [1:7490571588130165296:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-07T14:03:29.503207Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-07T14:03:29.503247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:03:29.503366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:03:29.503457Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TSyncHive, operationId 281474976710659:0, ProgressState, NeedSyncHive: 0 2025-04-07T14:03:29.503472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 240 -> 240 2025-04-07T14:03:29.503704Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-07T14:03:29.504617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T14:03:29.504679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T14:03:29.504692Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2025-04-07T14:03:29.505751Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:0 ProgressState 2025-04-07T14:03:29.505850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:03:29.505899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:03:29.506610Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7490571609605002724:2353], Recipient [1:7490571588130165296:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-07T14:03:29.506629Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-07T14:03:29.506665Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-07T14:03:29.506764Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7490571588130165183:2191], Recipient [1:7490571588130165296:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-07T14:03:29.506777Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-07T14:03:29.509080Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-07T14:03:29.509740Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2025-04-07T14:03:29.509751Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-07T14:03:29.509823Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-04-07T14:03:29.509892Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435076, Sender [1:7490571605310034973:2194], Recipient [1:7490571588130165296:2194]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-04-07T14:03:29.509906Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-04-07T14:03:29.509919Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:29.509928Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:29.509986Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain for tenant /Root/users/user-1 to 2 2025-04-07T14:03:29.510008Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=RUNNING txid=1744034608578964 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-07T14:03:29.510053Z node 1 :CMS_TENANTS TRACE: Update database for /Root/users/user-1 confirmedsubdomain=2 2025-04-07T14:03:29.511833Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain complete for /Root/users/user-1 2025-04-07T14:03:29.511859Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:29.514215Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7490571609605002733:2354], Recipient [1:7490571588130165296:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-07T14:03:29.514240Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-07T14:03:29.514277Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-07T14:03:29.514480Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7490571588130165183:2191], Recipient [1:7490571588130165296:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-07T14:03:29.514501Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-07T14:03:29.514926Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: RUNNING required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } TClient is connected to server localhost:6417 TClient::Ls request: /Root/users/user-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root/users/user-1" PathId: 1 SchemeshardId: 72075186224037889 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037889 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanReso... (TRUNCATED) 2025-04-07T14:03:29.939836Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-07T14:03:29.940938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:03:30.141033Z node 3 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2025-04-07T14:03:24.341806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571589574174019:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:24.341925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0011d5/r3tmp/tmphF07Xt/pdisk_1.dat 2025-04-07T14:03:25.259617Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:25.367611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:25.367719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:25.368289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:03:25.424969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27909, node 1 2025-04-07T14:03:26.641322Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:26.641357Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:26.641365Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:26.641463Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21516 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:28.299223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.344172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:03:28.352642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.361341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:03:28.361570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:03:28.361581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:03:28.373067Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:28.375005Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:03:28.375034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:03:28.376420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.401745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034608435, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:03:28.401787Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:03:28.402027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:03:28.408899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:03:28.409057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:03:28.409096Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:03:28.409181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:03:28.409213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:03:28.409252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:03:28.411444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:03:28.411498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:03:28.411513Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:03:28.411586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:21516 2025-04-07T14:03:28.612572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.612852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:03:28.612872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.612935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:03:28.613029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:03:28.613044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:03:28.621415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:user-1@builtin 2025-04-07T14:03:28.621579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:03:28.621811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:03:28.622229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:03:28.622280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:03:28.622295Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:03:28.622368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 2025-04-07T14:03:28.678732Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7490571606754043954:2296], Recipient [1:7490571593869141687:2198]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-04-07T14:03:28.678784Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-07T14:03:28.678802Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.678815Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.678927Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" 2025-04-07T14:03:28.679056Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744034608678586) 2025-04-07T14:03:28.679526Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744034608678586 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-07T14:03:28.679662Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-07T14:03:28.684569Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-07T14:03:28.685515Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608678586&action=1" } } } 2025-04-07T14:03:28.685646Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:28.685709Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-07T14:03:28.704320Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-07T14:03:28.705037Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7490571606754043963:2297], Recipient [1:7490571593869141687:2198]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608678586&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-04-07T14:03:28.705064Z node 1 :CMS_TENANTS ... 2025-04-07T14:03:29.558459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.558464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.558500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:9 2025-04-07T14:03:29.558508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.558514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.558548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:6 2025-04-07T14:03:29.558552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.558557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.558604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:3 2025-04-07T14:03:29.558609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.558619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.558650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:8 2025-04-07T14:03:29.558655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.558660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.558691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:5 2025-04-07T14:03:29.558707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.558717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.557251Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found - using supplied 72075186224037889 2025-04-07T14:03:29.557370Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found - using supplied 72075186224037894 2025-04-07T14:03:29.557420Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found - using supplied 72075186224037891 2025-04-07T14:03:29.557454Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,10) wasn't found - using supplied 72075186224037897 2025-04-07T14:03:29.557496Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-04-07T14:03:29.557526Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,9) wasn't found - using supplied 72075186224037896 2025-04-07T14:03:29.557557Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found - using supplied 72075186224037893 2025-04-07T14:03:29.557587Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found - using supplied 72075186224037890 2025-04-07T14:03:29.557616Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found - using supplied 72075186224037895 2025-04-07T14:03:29.557669Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found - using supplied 72075186224037892 2025-04-07T14:03:29.562428Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-07T14:03:29.562459Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-04-07T14:03:29.562474Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-07T14:03:29.562256Z node 3 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:03:29.564414Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2025-04-07T14:03:29.564540Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7490571611049011876:2198], Recipient [1:7490571593869141687:2198]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-07T14:03:29.564572Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-07T14:03:29.564609Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:29.564643Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:29.564697Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-07T14:03:29.564719Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-07T14:03:29.566073Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult retrying for 72075186224037889 because of ERROR 2025-04-07T14:03:29.566151Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult retrying for 72075186224037894 because of ERROR 2025-04-07T14:03:29.566182Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult retrying for 72075186224037891 because of ERROR 2025-04-07T14:03:29.584628Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037897 2025-04-07T14:03:29.585625Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-04-07T14:03:29.585668Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037893 2025-04-07T14:03:29.585708Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2025-04-07T14:03:29.585737Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-07T14:03:29.585757Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-04-07T14:03:29.595622Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037896 2025-04-07T14:03:29.595754Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037890 2025-04-07T14:03:29.595800Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037895 2025-04-07T14:03:29.615887Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-04-07T14:03:29.615934Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-04-07T14:03:29.615951Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-07T14:03:29.616018Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037892 2025-04-07T14:03:29.654737Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-07T14:03:29.660362Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7490571611049011986:2350], Recipient [1:7490571593869141687:2198]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034609522022&action=2" } UserToken: "" } 2025-04-07T14:03:29.663208Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-07T14:03:29.663409Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034609522022&action=2" } } 2025-04-07T14:03:29.665018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:03:29.665060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:03:29.665637Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-07T14:03:29.665660Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:29.665667Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:29.665674Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:29.665724Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1744034609522022 2025-04-07T14:03:29.665743Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744034609522022 issue=AccessDenied: Access denied for request 2025-04-07T14:03:29.665776Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1744034609522022 issue=AccessDenied: Access denied for request 2025-04-07T14:03:29.665785Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-07T14:03:29.665873Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1744034609522022 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-04-07T14:03:29.680692Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-07T14:03:29.680739Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:29.721222Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7490571611049012028:2352], Recipient [1:7490571593869141687:2198]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034609522022&action=2" } UserToken: "" } 2025-04-07T14:03:29.721253Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-07T14:03:29.721425Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034609522022&action=2" ready: true status: SUCCESS } } 2025-04-07T14:03:29.732239Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-07T14:03:29.732426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: 2025-04-07T14:03:29.353846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:03:29.354242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:03:29.354486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001685/r3tmp/tmpjEnmp9/pdisk_1.dat 2025-04-07T14:03:30.379278Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:562:2488] Exhausted 2025-04-07T14:03:30.379444Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:562:2488] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2025-04-07T14:03:30.379496Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:562:2488] Finish 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: 2025-04-07T14:03:29.353859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:03:29.354240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:03:29.354486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001696/r3tmp/tmp4FrvBQ/pdisk_1.dat 2025-04-07T14:03:30.523315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-07T14:03:30.537081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:03:30.556808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:03:30.580882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:03:30.581043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:03:30.593667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:03:30.601370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:03:30.601723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:03:30.601794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:03:30.601903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:03:30.601944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:03:30.608895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:03:30.608984Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:03:30.609028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:03:30.609625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:03:30.609665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:03:30.609715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-07T14:03:30.609771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:03:30.622406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:03:30.623073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:03:30.623293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:03:30.631703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-07T14:03:30.631773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-07T14:03:30.631831Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-07T14:03:30.836992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-07T14:03:30.837071Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:30.929978Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:03:30.941100Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:03:30.941352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:30.941476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:30.952990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:31.086607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:03:31.086786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:03:31.086847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-07T14:03:31.087126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:03:31.087193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-07T14:03:31.087355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:03:31.087434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:03:31.088531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:03:31.088578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:03:31.092871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:03:31.092941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-04-07T14:03:31.093055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:03:31.093101Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-04-07T14:03:31.093217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:03:31.093252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:03:31.093294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:03:31.093351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:03:31.093396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:03:31.093432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:03:31.093523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:03:31.093570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-07T14:03:31.093600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-07T14:03:31.096197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-07T14:03:31.096306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-07T14:03:31.096343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-04-07T14:03:31.096387Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:03:31.096440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:03:31.096552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-07T14:03:31.096597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2025-04-07T14:03:31.105382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-04-07T14:03:31.128882Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T14:03:31.128967Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:03:31.132891Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T14:03:31.156998Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:03:31.157680Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:03:31.157862Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:03:31.158283Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:03:31.158461Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057 ... 025-04-07T14:03:32.168933Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037889 state Ready 2025-04-07T14:03:32.169009Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-07T14:03:32.169121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:03:32.169164Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T14:03:32.169237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T14:03:32.169279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-07T14:03:32.169316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 1/1, is published: true 2025-04-07T14:03:32.169378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2485] message: TxId: 281474976715658 2025-04-07T14:03:32.169429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-07T14:03:32.169457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T14:03:32.169484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715658:0 2025-04-07T14:03:32.169569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-07T14:03:32.170003Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-04-07T14:03:32.170069Z node 1 :TX_PROXY DEBUG: Actor# [1:780:2641] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-04-07T14:03:32.171161Z node 1 :TX_PROXY DEBUG: Actor# [1:780:2641] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T14:03:32.171236Z node 1 :TX_PROXY DEBUG: Actor# [1:780:2641] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2025-04-07T14:03:32.171883Z node 1 :TX_PROXY DEBUG: Actor# [1:780:2641] Handle TEvDescribeSchemeResult Forward to# [1:558:2485] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-07T14:03:32.176452Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:790:2645], serverId# [1:791:2646], sessionId# [0:0:0] 2025-04-07T14:03:32.181721Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:792:2647] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T14:03:32.181925Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:792:2647] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T14:03:32.182178Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:792:2647] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-07T14:03:32.182372Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:792:2647] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] BodySize: 18 }] } 2025-04-07T14:03:32.182471Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:792:2647] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:03:32.184926Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvGetProxyServicesRequest 2025-04-07T14:03:32.185035Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:796:2647] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-07T14:03:32.185400Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:797:2651], serverId# [1:798:2652], sessionId# [0:0:0] 2025-04-07T14:03:32.227684Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:796:2647] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:03:32.227803Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:792:2647] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-07T14:03:32.227917Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:796:2647] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:03:32.228021Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:792:2647] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-07T14:03:32.228197Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:792:2647] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery [GOOD] >> KqpOlapStats::AddRowsSomeTablesInTableStore [GOOD] >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] >> KqpQueryService::TableSink_HtapComplex+withOltpSink [GOOD] >> KqpQueryService::TableSink_OltpDelete [GOOD] >> KqpService::RangeCache-UseCache [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl [GOOD] >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] >> KqpOlapBlobsSharing::ChangeSchemaAndSplit [FAIL] >> KqpQueryService::ExecuteRetryQuery [GOOD] >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle [GOOD] >> KqpOlapSparsed::SwitchingStandalone [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecutionRace >> KqpQueryService::TableSink_HtapComplex-withOltpSink >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter >> KqpQueryService::ReadDatashardAndColumnshard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::StreamExecuteQueryMultiResult [GOOD] Test command err: Trying to start YDB, gRPC: 62598, MsgBus: 11058 2025-04-07T14:03:34.975636Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571633858424094:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00107c/r3tmp/tmpAfvIY4/pdisk_1.dat 2025-04-07T14:03:35.593822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.593911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.596418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:35.612511Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62598, node 1 2025-04-07T14:03:35.681624Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:03:35.681651Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:03:36.065238Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.065266Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.065295Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.065397Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11058 TClient is connected to server localhost:11058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.941528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.075115Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:37.095344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.313050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.528641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.615413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.400391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571651038294817:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.400472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.975967Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571633858424094:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.976053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.454094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.482787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.508497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.535608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.559426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.582202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.655866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571659628229941:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.655946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.656182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571659628229946:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.659254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.668589Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:03:40.668796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571659628229948:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 28695, MsgBus: 12053 2025-04-07T14:03:42.468737Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571665733184791:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:42.468780Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00107c/r3tmp/tmp9U5R0L/pdisk_1.dat 2025-04-07T14:03:42.563584Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28695, node 2 2025-04-07T14:03:42.615199Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:42.615278Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:42.625392Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:42.665252Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:42.665280Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:42.665287Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:42.665374Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12053 TClient is connected to server localhost:12053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:43.103822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.113664Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:43.129331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.202494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.377602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.474970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:45.598318Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571678618088383:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:45.598432Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:45.636874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:45.667106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:45.695789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:45.733134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:45.774247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:45.842967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:45.883176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571678618088881:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:45.883278Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:45.883422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571678618088886:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:45.887242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:45.901694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571678618088888:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:47.469047Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571665733184791:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:47.469127Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28772, MsgBus: 30561 2025-04-07T14:03:48.100940Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571690980842743:2051];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00107c/r3tmp/tmpX7tjl8/pdisk_1.dat 2025-04-07T14:03:48.151380Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:03:48.217442Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:48.246997Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:48.247074Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28772, node 3 2025-04-07T14:03:48.249747Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:48.287444Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:48.287468Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:48.287480Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:48.287582Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30561 TClient is connected to server localhost:30561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:48.688645Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:48.701256Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:48.712798Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:48.801253Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:48.966043Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:49.044823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:51.183205Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571703865746346:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.183296Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.218896Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.256187Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.290977Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.328758Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.367369Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.413014Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.483248Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571703865746841:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.483344Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.483565Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571703865746846:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.487635Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:51.500883Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T14:03:51.501093Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571703865746848:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] Test command err: Trying to start YDB, gRPC: 3923, MsgBus: 2251 2025-04-07T14:03:34.977051Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571630757035392:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.977154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001130/r3tmp/tmpIBVDqp/pdisk_1.dat 2025-04-07T14:03:35.661645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.661732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.693994Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.694215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3923, node 1 2025-04-07T14:03:36.056826Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.056846Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.056851Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.056939Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2251 TClient is connected to server localhost:2251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:37.084548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.110674Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:37.119155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.302654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.536200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.641741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.747298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571647936906149:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.747393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.008714Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571630757035392:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:40.008997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.454156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.481214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.512931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.549239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.577326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.615363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.693145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571656526841247:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.693208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.693492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571656526841252:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.698460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.711783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571656526841254:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:41.609138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:41.826171Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710674, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl_0', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-07T14:03:41.826385Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWVmYTQyNTItMmQxOWU3Y2ItNjcwNTYwYTktZDk4ZGQ4Zjg=, ActorId: [1:7490571660821809005:2490], ActorState: ExecuteState, TraceId: 01jr890tbn69s0nvb5pq3x5ebq, Create QueryResponse for error on request, msg: 2025-04-07T14:03:41.918437Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-04-07T14:03:41.927754Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571660821809137:2515], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:03:41.928064Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjRmZjA0YjctYjA3ZjM3YzktZDIzYTYxMjAtMmNkZTA3Njk=, ActorId: [1:7490571660821809131:2514], ActorState: ExecuteState, TraceId: 01jr890tew4prf3yrs8g18efwn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:03:41.941996Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571660821809145:2519], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiDropTable!
:2:29: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:03:41.942179Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGNkYzEzNGYtYWRmN2NmN2YtMTQ5OWFlM2UtNzZhNTBiYg==, ActorId: [1:7490571660821809143:2518], ActorState: ExecuteState, TraceId: 01jr890tfc4a87m6bkmsr0fqvt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:03:41.998596Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571660821809168:2527], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:03:41.998775Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWM5MmNkZTItZjMzNGQ3MWMtNzg0NjQwZmUtMmI2NzliMTU=, ActorId: [1:7490571660821809166:2526], ActorState: ExecuteState, TraceId: 01jr890th06exsd2nqj0exxx8k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:03:42.011464Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571665116776472:2531], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiDropTable!
:2:29: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:03:42.011619Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTQxNGExNTktZDM4MGJlMzMtYmE1YzUyMDItZTMxZGNmNTA=, ActorId: [1:7490571665116776470:2530], ActorState: ExecuteState, TraceId: 01jr890thkeybj3vf359q1v5tw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:03:42.033081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... do unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.104287Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.171610Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571709900454277:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:52.171682Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:52.171829Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571709900454282:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:52.175080Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:52.186598Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571709900454284:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:53.308626Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.440088Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715672, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup 2025-04-07T14:03:53.440249Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Yzg4YjNmMTEtYWFjYjZjNDUtYmVmMGU1YzItYTMxMDI2NTk=, ActorId: [3:7490571714195421999:2477], ActorState: ExecuteState, TraceId: 01jr8915pje5ejmc3dqrka7re9, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1744034632228, tx_id: 281474976715668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034629974, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034629652, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632137, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632193, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034631962, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632018, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632053, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034629757, tx_id: 281474976715660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632088, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034633390, tx_id: 281474976715671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034629498, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-04-07T14:03:53.485958Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715674, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup 2025-04-07T14:03:53.486078Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Yzg4YjNmMTEtYWFjYjZjNDUtYmVmMGU1YzItYTMxMDI2NTk=, ActorId: [3:7490571714195421999:2477], ActorState: ExecuteState, TraceId: 01jr8915qzd9ge6nyss1b62yyc, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1744034632228, tx_id: 281474976715668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034629974, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034629652, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632137, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632193, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034631962, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632018, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632053, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034629757, tx_id: 281474976715660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632088, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034633390, tx_id: 281474976715671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034629498, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-04-07T14:03:53.515435Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715676, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-07T14:03:53.515579Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Yzg4YjNmMTEtYWFjYjZjNDUtYmVmMGU1YzItYTMxMDI2NTk=, ActorId: [3:7490571714195421999:2477], ActorState: ExecuteState, TraceId: 01jr8915s09b8dqrkwpe8ewwjr, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-07T14:03:53.537510Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715678, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-07T14:03:53.537675Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Yzg4YjNmMTEtYWFjYjZjNDUtYmVmMGU1YzItYTMxMDI2NTk=, ActorId: [3:7490571714195421999:2477], ActorState: ExecuteState, TraceId: 01jr8915snb2r37jcrjqgwx63y, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1744034632228, tx_id: 281474976715668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034629974, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034629652, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632137, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632193, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034631962, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632018, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632053, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034629757, tx_id: 281474976715660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034632088, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034633390, tx_id: 281474976715671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034629498, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-04-07T14:03:53.584719Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.605865Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571692720582894:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:53.605935Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryScalar [GOOD] Test command err: Trying to start YDB, gRPC: 30698, MsgBus: 23351 2025-04-07T14:03:34.976897Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571630676306324:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.976985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001141/r3tmp/tmpfAssis/pdisk_1.dat 2025-04-07T14:03:35.675579Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.678738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.678833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.686282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30698, node 1 2025-04-07T14:03:36.060851Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.060879Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.060892Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.061308Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23351 TClient is connected to server localhost:23351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.831907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:36.853592Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:36.877806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.166230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.393207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.467806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.566871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571647856177067:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.566983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.977008Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571630676306324:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.977078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.454353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.478866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.499905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.520206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.557799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.579866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.656702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571656446112162:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.656834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.657877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571656446112167:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.662744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.674697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571656446112169:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 26908, MsgBus: 8439 2025-04-07T14:03:43.405242Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571669258825296:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:43.405288Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001141/r3tmp/tmpyaMi8a/pdisk_1.dat 2025-04-07T14:03:43.594271Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:43.611755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:43.611838Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:43.613322Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26908, node 2 2025-04-07T14:03:43.687894Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:43.687915Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:43.687923Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:43.688023Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8439 TClient is connected to server localhost:8439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:44.191696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:44.196681Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:46.653921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571682143727808:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.654015Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.662341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.728903Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571682143727908:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.729001Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.732827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571682143727914:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.754636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:03:46.764031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571682143727916:2318], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } Trying to start YDB, gRPC: 12584, MsgBus: 4873 2025-04-07T14:03:48.035596Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571692144469208:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:48.035657Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001141/r3tmp/tmpy7eZQY/pdisk_1.dat 2025-04-07T14:03:48.132562Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:48.193441Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:48.193533Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:48.194670Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12584, node 3 2025-04-07T14:03:48.265208Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:48.265228Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:48.265236Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:48.265389Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4873 TClient is connected to server localhost:4873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:48.790777Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:48.821081Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:48.894673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:49.103047Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:49.191835Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:51.286990Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571705029372796:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.287075Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.317731Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.346579Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.379998Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.419676Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.457701Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.533296Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.617907Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571705029373302:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.617980Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.618383Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571705029373308:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.622424Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:51.636270Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571705029373310:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:53.036300Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571692144469208:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:53.036360Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CheckIsolationLevelFroPerStatementMode [GOOD] Test command err: Trying to start YDB, gRPC: 24047, MsgBus: 25702 2025-04-07T14:03:34.975682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571633085253096:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00114b/r3tmp/tmpdtPGCr/pdisk_1.dat 2025-04-07T14:03:35.643431Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.671004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.671110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.672640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24047, node 1 2025-04-07T14:03:36.062825Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.062844Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.062850Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.062941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25702 TClient is connected to server localhost:25702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.824056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.290284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571650265122779:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.298157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.298637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571650265122791:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.334265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:03:38.344452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571650265122793:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:03:39.975928Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571633085253096:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.975981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.534602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:1, at schemeshard: 72057594046644480 2025-04-07T14:03:40.740533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.822467Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-07T14:03:40.838578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:41.525415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-07T14:03:41.645400Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-07T14:03:41.660996Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571663150025264:2385], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:21: Error: At function: KiReadTable!
:3:21: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:03:41.661174Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzEzYTI0ZjUtZTdhMjhmZDUtOTQzMTM0MWUtOTNkZWE1Zg==, ActorId: [1:7490571663150025262:2384], ActorState: ExecuteState, TraceId: 01jr890t6h95y0vsd3by6a1brj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:03:41.700780Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571663150025274:2390], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:03:41.700942Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjY2YzNlZjgtMWNjOWEwYTUtZjFjMGJhOWYtYzEyZGIwNDE=, ActorId: [1:7490571663150025272:2389], ActorState: ExecuteState, TraceId: 01jr890t7s1d0kwh26r6nv5v3f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 11640, MsgBus: 23334 2025-04-07T14:03:42.395433Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571665849233087:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:42.395481Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00114b/r3tmp/tmpYaecWI/pdisk_1.dat 2025-04-07T14:03:42.502392Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:42.522803Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:42.522900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:42.524528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11640, node 2 2025-04-07T14:03:42.567717Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:42.567738Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:42.567745Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:42.567845Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23334 TClient is connected to server localhost:23334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:42.994041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.001340Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:45.178699Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571678734135594:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:45.178784Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571678734135602:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:45.178841Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:45.183419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:03:45.191249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571678734135608:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:03:45.426864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27790, MsgBus: 6710 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00114b/r3tmp/tmpWxSAeh/pdisk_1.dat 2025-04-07T14:03:46.702469Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:03:46.710609Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:46.733559Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:46.733647Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:46.735411Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27790, node 3 2025-04-07T14:03:46.814630Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:46.814654Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:46.814665Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:46.814762Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6710 TClient is connected to server localhost:6710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:47.314912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:47.322109Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:47.335404Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:47.428144Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:47.608055Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:47.687180Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:49.921299Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571694987059294:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:49.921399Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:49.964237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.994199Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:50.032100Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:50.070038Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:50.143927Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:50.192147Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:50.286759Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571699282027093:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:50.286837Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:50.286891Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571699282027098:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:50.291951Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:50.305235Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571699282027100:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:51.495132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.620751Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.669911Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.073914Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.278980Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.382611Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.515113Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.773799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.944586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlMixedDml [GOOD] Test command err: Trying to start YDB, gRPC: 6289, MsgBus: 62101 2025-04-07T14:03:34.975421Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571631205814178:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001161/r3tmp/tmpAvncnl/pdisk_1.dat 2025-04-07T14:03:35.763985Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.764617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.764700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.767800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6289, node 1 2025-04-07T14:03:36.059531Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.059551Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.059556Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.059632Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62101 TClient is connected to server localhost:62101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.933068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.063135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.202968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.374025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.455190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.545718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571648385684925:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.545803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.975667Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571631205814178:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.975782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.453905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.491453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.536431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.565743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.595068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.641586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.726917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571656975620028:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.726978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.727216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571656975620033:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.729920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.736912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571656975620035:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:41.716366Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDVmMGY3YjgtZTA0NDNiNjItYzEyMTJmYmItOTZhMDQwMzE=, ActorId: [1:7490571661270587665:2471], ActorState: ExecuteState, TraceId: 01jr890t80e9jwvcas1hbyxywg, Create QueryResponse for error on request, msg: 2025-04-07T14:03:41.800281Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJhMGRkMjgtYWQwMDI3ZTktN2M2NmQ1YS02YjllNjA5Ng==, ActorId: [1:7490571661270587724:2480], ActorState: ExecuteState, TraceId: 01jr890tay76pc2mek4m62sg37, Create QueryResponse for error on request, msg: 2025-04-07T14:03:41.867719Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDExOGYzNjUtN2QwMzgyNmYtZWFkODJmMzgtZTViZjk5YzY=, ActorId: [1:7490571661270587784:2492], ActorState: ExecuteState, TraceId: 01jr890td0engarjdt699h63qa, Create QueryResponse for error on request, msg: 2025-04-07T14:03:42.048469Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjhkNjQ5Y2EtYWNlODQ2ZTAtZDIzYTgzNC03ZjE0ZjE1ZA==, ActorId: [1:7490571665565555174:2509], ActorState: ExecuteState, TraceId: 01jr890tjpan901yv7y4vydfzs, Create QueryResponse for error on request, msg: 2025-04-07T14:03:42.121870Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzJmYmVlNDYtZWM1ZjMwZmUtMWEwOGY5OGQtZjEzZWUxYTg=, ActorId: [1:7490571665565555247:2521], ActorState: ExecuteState, TraceId: 01jr890tmx4bcpma59zdbdxaek, Create QueryResponse for error on request, msg: 2025-04-07T14:03:42.279865Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTY3NjU5ZTMtZWQzMTk2ODYtNzJkYmE5NTktMzA1NTZhMzk=, ActorId: [1:7490571665565555413:2545], ActorState: ExecuteState, TraceId: 01jr890tsz18fmqdnd8r412vxt, Create QueryResponse for error on request, msg: 2025-04-07T14:03:42.341178Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTRmMTE1Yi1jOGM5ZTA2OC03YzJiOTJhMy0yNzQ0NmIwYQ==, ActorId: [1:7490571665565555467:2554], ActorState: ExecuteState, TraceId: 01jr890tvr5aqt5kq4jvsemkpj, Create QueryResponse for error on request, msg: 2025-04-07T14:03:42.358089Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzZhNGFmZDgtY2U2YWQ5NjgtZjMzZTVlYTEtYmJhYmI3NDY=, ActorId: [1:7490571665565555503:2558], ActorState: ExecuteState, TraceId: 01jr890twbdkekct8mzpqg3bn6, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 6012, MsgBus: 63309 2025-04-07T14:03:43.453812Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571671767881208:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:43.453853Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001161/r3tmp/tmpVElcD2/pdisk_1.dat 2025-04-07T14:03:43.613170Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:43.629306Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:43.629393Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:43.630912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6012, node 2 2025-04-07T14:03:43.677972Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:43.677995Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:43.678003Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:43.678092Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63309 TClient is connected to server localhost:63309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ... 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:03:48.652622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:03:48.718007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:03:48.721035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T14:03:48.781807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T14:03:48.837935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-07T14:03:48.910245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-07T14:03:48.991458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.065835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.117711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.176685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.248882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.336758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.445044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.594851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.597116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715688:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10018, MsgBus: 26208 2025-04-07T14:03:50.535150Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571702040556606:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:50.535286Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001161/r3tmp/tmpsAopGj/pdisk_1.dat 2025-04-07T14:03:50.634499Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:50.645163Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:50.645241Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:50.652238Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10018, node 3 2025-04-07T14:03:50.725190Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:50.725215Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:50.725227Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:50.725359Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26208 TClient is connected to server localhost:26208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:51.310002Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:51.316732Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:51.327093Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:51.428296Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:51.627187Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:51.709646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.205353Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571719220427479:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:54.205452Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:54.254524Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.289295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.320322Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.346920Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.376750Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.449956Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.541358Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571719220427979:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:54.541439Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:54.541878Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571719220427984:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:54.545799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:54.559188Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571719220427986:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:55.539273Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571702040556606:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:55.539320Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:55.602170Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490571723515395599:2464], status: GENERIC_ERROR, issues:
: Error: Optimization, code: 1070
:8:25: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 2025-04-07T14:03:55.604337Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTdkNzFjMjgtZGNhMWFhYTMtYzkxMDZjOGUtODQzZDViY2I=, ActorId: [3:7490571723515395592:2460], ActorState: ExecuteState, TraceId: 01jr8917sv4te5sm7nq77gycpk, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_Oltp_Replace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16834, MsgBus: 7050 2025-04-07T14:03:34.975362Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571630542109152:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010ed/r3tmp/tmpEixFpL/pdisk_1.dat 2025-04-07T14:03:35.635414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.635495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.637489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16834, node 1 2025-04-07T14:03:35.685312Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:03:35.685332Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:03:35.689093Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:36.058503Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.058530Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.058537Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.058646Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7050 TClient is connected to server localhost:7050 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:37.143698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.156192Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:38.706199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571647721978836:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.706308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.986930Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571630542109152:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.986976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.468039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.606218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571656311913564:2314];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:03:40.606434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571656311913564:2314];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:03:40.606673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571656311913564:2314];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:03:40.606786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571656311913564:2314];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:03:40.606885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571656311913564:2314];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:03:40.606979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571656311913564:2314];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:03:40.607067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571656311913564:2314];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:03:40.607161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571656311913564:2314];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:03:40.607265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571656311913564:2314];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:03:40.607356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571656311913564:2314];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:03:40.607460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571656311913564:2314];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:03:40.607563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571656311913564:2314];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:03:40.614245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571656311913555:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:03:40.614322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571656311913555:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:03:40.614555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571656311913555:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:03:40.614653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571656311913555:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:03:40.614741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571656311913555:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:03:40.614842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571656311913555:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:03:40.614926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571656311913555:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:03:40.615019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571656311913555:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:03:40.615104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571656311913555:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:03:40.615198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571656311913555:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:03:40.615297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571656311913555:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:03:40.615376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571656311913555:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:03:40.643085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571656311913560:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:03:40.643140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571656311913560:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:03:40.643316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571656311913560:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:03:40.643418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571656311913560:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14: ... , doublechecking } Trying to start YDB, gRPC: 28878, MsgBus: 10466 2025-04-07T14:03:43.136532Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571672585558412:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:43.136603Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010ed/r3tmp/tmpUARpPw/pdisk_1.dat 2025-04-07T14:03:43.298781Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:43.307861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:43.307933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:43.309811Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28878, node 2 2025-04-07T14:03:43.467263Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:43.467286Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:43.467295Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:43.467375Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10466 TClient is connected to server localhost:10466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:03:43.918765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.395637Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571685470460921:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.395847Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.409270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.629818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.872884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571685470462221:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.872972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.873288Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571685470462226:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.876924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:03:46.890182Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571685470462228:2417], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:03:47.596418Z node 2 :KQP_EXECUTER WARN: ActorId: [2:7490571689765429849:2441] TxId: 281474976715664. Ctx: { TraceId: 01jr890zw76dxv445kwk640e1z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzhmYWNmNWEtNzExZjE0YmUtNjQxYjhmZmItNTFhYTYzZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event: NYql::NDq::TEvDqCompute::TEvState, at state: FinalizeState 2025-04-07T14:03:48.136076Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571672585558412:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:48.136129Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25745, MsgBus: 62709 2025-04-07T14:03:49.622076Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571694969541145:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:49.622129Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010ed/r3tmp/tmpewPdYH/pdisk_1.dat 2025-04-07T14:03:49.725819Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:49.725906Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:49.726217Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:49.737994Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25745, node 3 2025-04-07T14:03:49.793250Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:49.793277Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:49.793287Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:49.793413Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62709 TClient is connected to server localhost:62709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:03:50.317343Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.884820Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571707854443648:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:52.884921Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:52.903324Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.124186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.339275Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571712149412250:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:53.339358Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:53.339645Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571712149412255:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:53.342572Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:03:53.350712Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571712149412257:2417], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:03:54.622566Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571694969541145:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:54.622618Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] Test command err: Trying to start YDB, gRPC: 19689, MsgBus: 65127 2025-04-07T14:03:34.975380Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571632231573760:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00111c/r3tmp/tmpXeEjUx/pdisk_1.dat 2025-04-07T14:03:35.717167Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.728803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.728878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.735792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19689, node 1 2025-04-07T14:03:36.063895Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.063911Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.063920Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.063979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65127 TClient is connected to server localhost:65127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.915824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:36.967548Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:36.983335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.134104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.391188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.479115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.796352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571649411444541:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.796454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.975762Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571632231573760:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.975822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.454051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.480830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.504612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.529356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.557754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.627309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.681500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571658001379640:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.681559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.681587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571658001379645:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.684085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.695991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571658001379647:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 8677, MsgBus: 2130 2025-04-07T14:03:43.398111Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571672538217235:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00111c/r3tmp/tmp6NxoiM/pdisk_1.dat 2025-04-07T14:03:43.426037Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:03:43.512571Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:43.544642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:43.551478Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:43.556236Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8677, node 2 2025-04-07T14:03:43.622644Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:43.622661Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:43.622668Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:43.622770Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2130 TClient is connected to server localhost:2130 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:44.088071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:44.125188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:44.206334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:03:44.351906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:44.477184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:46.505877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571685423120632:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.505976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.543621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.575047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.620200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.674960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.710682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.776209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.824110Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571685423121130:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.824220Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.824467Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571685423121135:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.827906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:46.838942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571685423121137:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:47.857681Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2VjYzFlZS01OTYzZGQ5YS0zZGExZmE5Zi00YTVkYTI4OQ==, ActorId: [2:7490571689718088740:2460], ActorState: ReadyState, TraceId: 01jr8910867jfcga3yvg3j0dx8, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 26761, MsgBus: 13282 2025-04-07T14:03:48.625346Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571694626572383:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:48.625401Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00111c/r3tmp/tmpOIZoqA/pdisk_1.dat 2025-04-07T14:03:48.756062Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:48.768378Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:48.768467Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:48.770019Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26761, node 3 2025-04-07T14:03:48.821034Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:48.821059Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:48.821067Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:48.821180Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13282 TClient is connected to server localhost:13282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:49.325292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:49.331743Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:51.886305Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571707511474888:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.886397Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.886790Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571707511474900:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.891111Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:03:51.906542Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571707511474902:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:03:52.010506Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTRiN2U3NzgtOTc2NGQ0YjgtYWMxNTUzZTEtNmY0ZWZjNmY=, ActorId: [3:7490571707511474886:2301], ActorState: ExecuteState, TraceId: 01jr8911q2ds8xbppch23aqxbp, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::CancelScriptExecution [GOOD] Test command err: Trying to start YDB, gRPC: 19646, MsgBus: 8738 2025-04-07T14:03:34.975239Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571634474769904:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010e5/r3tmp/tmpL7Yrlj/pdisk_1.dat 2025-04-07T14:03:35.796450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.796545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.806619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:35.831652Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19646, node 1 2025-04-07T14:03:36.067526Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.067550Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.067555Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.067638Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8738 TClient is connected to server localhost:8738 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:37.205261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.221513Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:37.235556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.386691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.600395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:37.683770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:03:38.947787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571651654640675:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.947892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.977709Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571634474769904:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.977759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.454530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.479165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.501755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.529564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.562487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.600994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.656379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571660244575768:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.656452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.656864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571660244575773:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.660493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.672595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571660244575775:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 5313, MsgBus: 9810 2025-04-07T14:03:43.354076Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571669943511135:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:43.354131Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010e5/r3tmp/tmplmpBOw/pdisk_1.dat 2025-04-07T14:03:43.492096Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:43.531715Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:43.531801Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:43.537588Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5313, node 2 2025-04-07T14:03:43.637900Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:43.637923Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:43.637929Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:43.638019Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9810 TClient is connected to server localhost:9810 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:44.121253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:44.130821Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:46.629497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571682828413645:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.629586Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.629992Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571682828413657:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.634235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:03:46.645165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571682828413659:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:03:46.783529Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-04-07T14:03:46.788348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:2, at schemeshard: 72057594046644480 2025-04-07T14:03:47.019987Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-07T14:03:47.026700Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490571687123381265:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:03:47.026926Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTJmZTk4YmEtZDhjNTBiY2UtZGY5OGI3MC04MTdiOTFkNQ==, ActorId: [2:7490571682828413626:2301], ActorState: ExecuteState, TraceId: 01jr890ze8dzpr3zf229pz03av, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:03:47.069528Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490571687123381290:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:03:47.069872Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTQ3ZjkwNWEtYWZmNjNhY2UtOTNmYTBhNi1jNmFkZDgwOQ==, ActorId: [2:7490571687123381286:2341], ActorState: ExecuteState, TraceId: 01jr890zfk8n5zmh7bcnvmdj2z, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 16063, MsgBus: 32535 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010e5/r3tmp/tmpJFbEXb/pdisk_1.dat 2025-04-07T14:03:47.792963Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:03:47.837009Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:47.857929Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:47.858008Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:47.861002Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16063, node 3 2025-04-07T14:03:47.905321Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:47.905344Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:47.905350Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:47.905443Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32535 TClient is connected to server localhost:32535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:48.393862Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:48.401558Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:48.413588Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:48.476455Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:48.662105Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:48.756192Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:51.115945Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571705993736017:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.116024Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.173790Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.208984Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.246495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.282424Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.317457Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.395283Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.439514Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571705993736510:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.439601Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.439643Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571705993736515:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.442811Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:51.461160Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571705993736517:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:52.643760Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.646164Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.648228Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Ddl_Dml [GOOD] Test command err: Trying to start YDB, gRPC: 25319, MsgBus: 13995 2025-04-07T14:03:34.975536Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571633779452190:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001127/r3tmp/tmpdZtETw/pdisk_1.dat 2025-04-07T14:03:35.662205Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.673190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.673272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.675379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25319, node 1 2025-04-07T14:03:36.066155Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.066174Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.066179Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.066247Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13995 TClient is connected to server localhost:13995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.795312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:36.872158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:37.124319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:03:37.350787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.463951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.483864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571650959322943:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.483938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.976107Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571633779452190:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.976160Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.453736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.476645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.499799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.562334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.584597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.610220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.686022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571659549258057:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.686107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.686173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571659549258062:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.689267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.703743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571659549258064:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 9759, MsgBus: 19126 2025-04-07T14:03:42.487184Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571668041564784:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:42.487218Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001127/r3tmp/tmppjNZfc/pdisk_1.dat 2025-04-07T14:03:42.588082Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9759, node 2 2025-04-07T14:03:42.628093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:42.628174Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:42.636470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:42.676838Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:42.676859Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:42.676870Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:42.676960Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19126 TClient is connected to server localhost:19126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:43.129259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.146740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.200826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.346627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.437984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:45.519402Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571680926468354:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:45.524344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Ro ... itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.491400Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.550991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.605655Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571703508710673:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.605730Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.605786Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571703508710678:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.611344Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:51.620098Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571703508710680:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:52.734582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.924503Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571686328839361:2105];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:52.924569Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:53.060241Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.077806Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODVlMzEwYzctNzFiYTZlZGEtYzljODUxOGUtNWRiYzY0YTI=, ActorId: [3:7490571707803678375:2474], ActorState: ExecuteState, TraceId: 01jr891540fk96xhqwtj59hd8y, Create QueryResponse for error on request, msg: 2025-04-07T14:03:53.193083Z node 3 :KQP_COMPILE_SERVICE WARN: queryId in recompile request and queryId in cache are different, queryId in request: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT INTO TestDdlDml2 (Key, Value1) VALUES (1, \"1\");\n SELECT * FROM TestDdlDml2;\n UPSERT INTO TestDdlDml2 (Key, Value1) VALUES (2, \"2\");\n SELECT * FROM TestDdlDml2;\n CREATE TABLE TestDdlDml33 (\n Key Uint64,\n PRIMARY KEY (Key)\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_database":"Root" }, "settings": { "ydb_database":"Root" }, "rollback_settings": { } } }}, queryId in cache: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT INTO TestDdlDml2 (Key, Value1, Value2) VALUES (1, \"1\", \"1\");\n SELECT * FROM TestDdlDml2;\n ALTER TABLE TestDdlDml2 DROP COLUMN Value2;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_database":"Root" }, "settings": { "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-07T14:03:53.392283Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.647168Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.932384Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490571712098646175:2557], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:11:17: Error: At function: KiReadTable!
:11:17: Error: Cannot find table 'db.[/Root/TestDdlDml5]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:03:53.933879Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGQ0YzEyMTYtZTU4ZTc5ZWQtYWJjYWMyNWEtNTVkY2ZlYTY=, ActorId: [3:7490571712098646024:2534], ActorState: ExecuteState, TraceId: 01jr8915w5ayker0c0w6ccj4sq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:03:54.026046Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.137822Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.571664Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715697, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl1', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 19], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-07T14:03:54.571823Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTdjNDgzZGQtYWJkYmQ5MmUtODY2M2NmODQtNDM0NWI2ZQ==, ActorId: [3:7490571716393613774:2614], ActorState: ExecuteState, TraceId: 01jr8916sm122nt9xj7r12j4st, Create QueryResponse for error on request, msg: 2025-04-07T14:03:54.602263Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715699, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-07T14:03:54.602407Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGI2ZjViYjEtZTcyNDBhNGMtNzZhMGEzNjMtYWFkMzUyZGI=, ActorId: [3:7490571716393613799:2621], ActorState: ExecuteState, TraceId: 01jr8916tmbqe0220t4nx0qnyv, Create QueryResponse for error on request, msg: 2025-04-07T14:03:54.986203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-04-07T14:03:55.113393Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715705, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-07T14:03:55.113515Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjE0ZjgzOGUtNjk3NGNkMWMtYzFmN2FkN2UtYWU2MWViZjc=, ActorId: [3:7490571716393613886:2646], ActorState: ExecuteState, TraceId: 01jr89172f9twt23p2daa6f6yd, Create QueryResponse for error on request, msg: 2025-04-07T14:03:55.244955Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490571720688581352:2686], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl4]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:03:55.246600Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWZlM2RjM2QtOTkzYzM0MzMtN2FhNmIwZjAtMmZlMDk5NDY=, ActorId: [3:7490571720688581349:2684], ActorState: ExecuteState, TraceId: 01jr8917ew0z1ehnhd3gzf0qxt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:03:55.493055Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-07T14:03:56.084902Z node 3 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [3:7490571724983548889:2739], owner: [3:7490571703508710142:2369], statement id: 1 2025-04-07T14:03:56.085214Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=M2YxZmJmYjYtYmU3N2VmOTYtODJjZWM5ZTQtYmRjZjI3MGI=, ActorId: [3:7490571724983548887:2738], ActorState: ExecuteState, TraceId: 01jr89189d5w2x92v5zsryn9a7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:03:56.267947Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490571724983548936:2757], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:3:44: Error: Failed to convert 'Value': String to Optional
:3:44: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-07T14:03:56.268188Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OGU4ZmEzMGEtYWY1M2FlN2ItZDFjN2JmOTgtZWFlMzlmYQ==, ActorId: [3:7490571724983548915:2749], ActorState: ExecuteState, TraceId: 01jr8918ct0q3xcrvxaxy71dks, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:03:56.319802Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715722:0, at schemeshard: 72057594046644480 2025-04-07T14:03:56.418673Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490571724983549053:2780], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:8:29: Error: At function: KiWriteTable!
:8:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:8:44: Error: Failed to convert 'Value': String to Optional
:8:44: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-07T14:03:56.418889Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTYwOTFjY2ItZjIwMTI4NDgtY2RiMzA4NTAtZDhjYmMwZDE=, ActorId: [3:7490571724983548963:2766], ActorState: ExecuteState, TraceId: 01jr8918g64d67trrcc5245rzw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpDelete [GOOD] Test command err: Trying to start YDB, gRPC: 15333, MsgBus: 27898 2025-04-07T14:03:34.975337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571632799033241:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001082/r3tmp/tmpitYQyr/pdisk_1.dat 2025-04-07T14:03:35.711524Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.720617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.720852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.722911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15333, node 1 2025-04-07T14:03:36.056592Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.056618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.056646Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.056774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27898 TClient is connected to server localhost:27898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.804902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.313475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571649978902928:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.313568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.975743Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571632799033241:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.975854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.468207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.598270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571658568837655:2313];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:03:40.598417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571658568837655:2313];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:03:40.598460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571658568837651:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:03:40.598530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571658568837651:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:03:40.598625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571658568837655:2313];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:03:40.598699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571658568837655:2313];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:03:40.598706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571658568837651:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:03:40.598783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571658568837655:2313];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:03:40.598815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571658568837651:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:03:40.598873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571658568837655:2313];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:03:40.598901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571658568837651:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:03:40.598948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571658568837655:2313];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:03:40.598996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571658568837651:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:03:40.599010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571658568837655:2313];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:03:40.599084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571658568837655:2313];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:03:40.599282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571658568837651:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:03:40.599288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571658568837655:2313];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:03:40.599348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571658568837655:2313];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:03:40.599385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571658568837651:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:03:40.599401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571658568837655:2313];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:03:40.599466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571658568837651:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:03:40.599547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571658568837651:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:03:40.600040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571658568837651:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:03:40.600165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571658568837651:2311];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:03:40.632780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571658568837663:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:03:40.632836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571658568837663:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:03:40.633038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571658568837663:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:03:40.633131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571658568837663:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:03:40.633219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571658568837663:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:03:40.633299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=7207518622 ... te;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:03:46.886564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:03:46.891391Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:03:46.891502Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:03:46.891561Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:03:46.891618Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:03:46.891669Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:03:46.891714Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:03:46.911254Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571681928729053:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.911352Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.911579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571681928729058:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.915399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:03:46.925686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571681928729060:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:03:47.109414Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037890;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 0 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"0;"}}]}; 2025-04-07T14:03:47.118903Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037894;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 1 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"1;"}}]}; 2025-04-07T14:03:47.127725Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:03:47.128111Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:03:47.592531Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;local_tx_no=11;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037890;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 0 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"0;"}}]}; 2025-04-07T14:03:47.614170Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;local_tx_no=11;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037894;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 1 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"1;"}}]}; 2025-04-07T14:03:47.629530Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:03:47.631872Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:03:47.662414Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490571686223696772:2515], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Missing key column in input: Col1 for table: /Root/DataShard, code: 2029 2025-04-07T14:03:47.664112Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2Q2ZDMzMTgtOTIzMDcyMzYtMTNiNmZhNjMtNWRkZjkyOWY=, ActorId: [2:7490571686223696770:2514], ActorState: ExecuteState, TraceId: 01jr89101n4nrrhp7792x1fkv2, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: WAIT_INDEXATION: 0 2025-04-07T14:03:48.305167Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571669043826118:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:48.308769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 11203, MsgBus: 12932 2025-04-07T14:03:53.568517Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571714566429653:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:53.568584Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001082/r3tmp/tmpWXC4b5/pdisk_1.dat 2025-04-07T14:03:53.703968Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:53.729811Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:53.729902Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:53.731145Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11203, node 3 2025-04-07T14:03:53.812704Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:53.812725Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:53.812733Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:53.812828Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12932 TClient is connected to server localhost:12932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:54.297871Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:54.310768Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:56.801696Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571727451332156:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:56.801789Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:56.820791Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:03:56.912059Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571727451332257:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:56.912147Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:56.912296Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571727451332262:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:56.915401Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:03:56.924617Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571727451332264:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-07T14:03:58.566952Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571714566429653:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:58.567038Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::RangeCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 22386, MsgBus: 25986 2025-04-07T14:03:34.975874Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571632984985925:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.976026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010b8/r3tmp/tmp6MpK8R/pdisk_1.dat 2025-04-07T14:03:35.685759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.685851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.693691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:35.774773Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22386, node 1 2025-04-07T14:03:36.057655Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.057678Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.057691Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.057782Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25986 TClient is connected to server localhost:25986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.827644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:36.838458Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:36.872612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:37.103022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.270413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:03:37.337182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.717220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571650164856685:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.717318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.992687Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571632984985925:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.992747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.454087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.481791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.510157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.541924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.585024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.654011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.726874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571658754791793:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.726937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.726943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571658754791798:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.729264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.736427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571658754791800:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } took: 8.040110s took: 8.054149s took: 8.061254s took: 8.055849s took: 8.070941s took: 8.070998s took: 8.077694s took: 8.078544s took: 8.080403s took: 8.115743s Trying to start YDB, gRPC: 64158, MsgBus: 22628 2025-04-07T14:03:50.662717Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571700973275943:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:50.662769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010b8/r3tmp/tmpw9uGHA/pdisk_1.dat 2025-04-07T14:03:50.875362Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64158, node 2 2025-04-07T14:03:50.970577Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:50.970599Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:50.970605Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:50.970705Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:03:51.012611Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:51.012744Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:51.013925Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22628 TClient is connected to server localhost:22628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:51.496133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:51.509493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:51.596364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:51.769126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:51.863635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:54.194642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571718153146825:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:54.194728Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:54.231288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.260403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.300319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.324853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.354156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.390454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:54.447942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571718153147318:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:54.448111Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571718153147323:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:54.448144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:54.451424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:54.462527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571718153147325:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:55.664987Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571700973275943:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:55.665042Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 7.086810s took: 7.088143s took: 7.087882s took: 7.088317s took: 7.089388s took: 7.089950s took: 7.090229s took: 7.089922s took: 7.089879s took: 7.090108s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 23939, MsgBus: 17066 2025-04-07T14:03:34.975699Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571632530280458:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010c7/r3tmp/tmptZE1Yt/pdisk_1.dat 2025-04-07T14:03:35.615183Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.667788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.667877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.669449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23939, node 1 2025-04-07T14:03:36.061838Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.063180Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.063197Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.063282Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17066 TClient is connected to server localhost:17066 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.827478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:36.841252Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:36.873854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.126090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:37.302015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.387535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:03:38.474015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571649710151215:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.474186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.980298Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571632530280458:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.980354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.453959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.486908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.514080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.542856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.574507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.643355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.687578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571658300086314:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.687650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.687900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571658300086319:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.692148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.708093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571658300086321:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:41.706648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:41.708108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:03:41.710132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:03:43.995667Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034624003, txId: 281474976710703] shutting down Trying to start YDB, gRPC: 29876, MsgBus: 64543 2025-04-07T14:03:45.509720Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571681725512164:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:45.509791Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010c7/r3tmp/tmpubzrTf/pdisk_1.dat 2025-04-07T14:03:45.634737Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:45.668053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:45.668153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:45.669768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29876, node 2 2025-04-07T14:03:45.746981Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:45.747008Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:45.747017Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:45.747118Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64543 TClient is connected to server localhost:64543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:46.249641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:46.256700Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:46.271071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 720575940 ... d: 72057594046644480 2025-04-07T14:03:48.996066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.037610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.087089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.143991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571698905383531:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:49.144070Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:49.144432Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571698905383536:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:49.148818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:49.202757Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571698905383538:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:50.271345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:50.281442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:03:50.282878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:03:50.512780Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571681725512164:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:50.512837Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:52.742698Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034632760, txId: 281474976715707] shutting down 2025-04-07T14:03:53.000905Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034633033, txId: 281474976715710] shutting down Trying to start YDB, gRPC: 17829, MsgBus: 14274 2025-04-07T14:03:53.872380Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571713521784208:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:53.872421Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010c7/r3tmp/tmpUZBVKk/pdisk_1.dat 2025-04-07T14:03:53.976095Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:53.994773Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:53.994856Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:53.996092Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17829, node 3 2025-04-07T14:03:54.041226Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:54.041252Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:54.041259Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:54.041374Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14274 TClient is connected to server localhost:14274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:54.559921Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:54.576502Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:54.657943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:54.831581Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:54.913373Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:57.618490Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571730701655077:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:57.618571Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:57.648592Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:57.677593Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:57.707162Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:57.735679Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:57.766451Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:57.809235Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:57.852049Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571730701655572:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:57.852121Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:57.852126Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571730701655577:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:57.854917Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:57.864654Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571730701655579:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:58.865494Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:58.867586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:03:58.869143Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:03:58.872424Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571713521784208:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:58.872511Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:01.088619Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034641125, txId: 281474976715705] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpDocumentApi::RestrictDrop [GOOD] Test command err: Trying to start YDB, gRPC: 65161, MsgBus: 1782 2025-04-07T14:03:34.974666Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571633351968992:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.974722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001139/r3tmp/tmpUQot0X/pdisk_1.dat 2025-04-07T14:03:35.629603Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.630252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.630337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.666511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65161, node 1 2025-04-07T14:03:36.058894Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.058918Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.058923Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.059007Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1782 TClient is connected to server localhost:1782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:37.057880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:37.090414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:03:37.242756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.418471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.507722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.487763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571650531839721:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.487878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.974753Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571633351968992:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.974824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.454046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.480747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.505302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.548399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.579732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.653487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.701184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571659121774833:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.701283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.701396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571659121774838:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.704711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.716636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571659121774840:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:41.731336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3565, MsgBus: 23838 2025-04-07T14:03:42.667798Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571668416132055:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:42.667891Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001139/r3tmp/tmpZp5rGX/pdisk_1.dat 2025-04-07T14:03:42.772573Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3565, node 2 2025-04-07T14:03:42.803569Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:42.803644Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:42.809441Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:42.817000Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:42.817024Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:42.817030Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:42.817127Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23838 TClient is connected to server localhost:23838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:43.269072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.274896Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:43.284052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.358108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.526797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.584609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:45.918641Z node 2 :KQP_WORKLOAD_SERVICE WARN: ... 6Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.006193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.080974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.124065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.201235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.273149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.317526Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571685596003471:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.317607Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.317942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571685596003476:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.321061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:46.331521Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571685596003478:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:47.392206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:61: Error: At function: KiAlterTable!
:2:61: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-04-07T14:03:47.491159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:03:47.667726Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571668416132055:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:47.667792Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27593, MsgBus: 18385 2025-04-07T14:03:48.331769Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571694596962660:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:48.332114Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001139/r3tmp/tmp6fLIfW/pdisk_1.dat 2025-04-07T14:03:48.468772Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:48.512012Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:48.512102Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:48.513309Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27593, node 3 2025-04-07T14:03:48.581176Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:48.581198Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:48.581204Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:48.581294Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18385 TClient is connected to server localhost:18385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:49.066870Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:49.076519Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:49.086662Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:49.161748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.331965Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:49.406069Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:51.826419Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571707481866247:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.826508Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:51.873030Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.913788Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.946543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.975158Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.011914Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.093997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.142275Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571711776834041:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:52.142405Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:52.142858Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571711776834047:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:52.146967Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:52.160332Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571711776834049:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:53.294030Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.329442Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571694596962660:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:53.331391Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:2:24: Error: At function: KiDropTable!
:2:24: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Explain [GOOD] Test command err: Trying to start YDB, gRPC: 21265, MsgBus: 24242 2025-04-07T14:03:34.975139Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571634328277429:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001069/r3tmp/tmpzN24YF/pdisk_1.dat 2025-04-07T14:03:35.617006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.617133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.637422Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.637669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21265, node 1 2025-04-07T14:03:35.682257Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:03:35.682279Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:03:36.057171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.057207Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.057216Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.057305Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24242 TClient is connected to server localhost:24242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.892400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:36.918944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.152535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.412247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.528860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.341083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571651508148167:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.341177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.975388Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571634328277429:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.975461Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.453776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.480258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.505058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.534274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.564561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.633208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.717694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571660098083297:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.717752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.717989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571660098083302:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.720987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.727331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571660098083304:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 9350, MsgBus: 17206 2025-04-07T14:03:43.393667Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571669704492865:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001069/r3tmp/tmpebJNMy/pdisk_1.dat 2025-04-07T14:03:43.428194Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:03:43.513551Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:43.535800Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:43.535875Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:43.537465Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9350, node 2 2025-04-07T14:03:43.670991Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:43.671028Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:43.671037Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:43.671115Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17206 TClient is connected to server localhost:17206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:44.166725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:44.173756Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:44.192231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:44.263483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:44.470890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:44.556083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:46.688800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571682589396293:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.688885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.729712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.767739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.798561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.829465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.859685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.898145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.954938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571682589396787:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.955010Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.955109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571682589396792:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.957974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:46.969114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571682589396794:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:47.883231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 10 Trying to start YDB, gRPC: 8629, MsgBus: 6299 2025-04-07T14:03:49.194467Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571694672448615:2134];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001069/r3tmp/tmpvRRXNh/pdisk_1.dat 2025-04-07T14:03:49.293119Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:03:49.383044Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:49.383138Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:49.384527Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:49.399083Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8629, node 3 2025-04-07T14:03:49.469185Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:49.469208Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:49.469215Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:49.469316Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6299 TClient is connected to server localhost:6299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:50.008580Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:50.016885Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:50.034519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:50.113490Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:50.280070Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:50.344457Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:52.722728Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571707557352105:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:52.722833Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:52.753911Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.789180Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.824003Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.857328Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.909246Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.945242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.003762Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571711852319893:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:53.003864Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:53.004275Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571711852319898:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:53.008916Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:53.018984Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571711852319900:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:54.194734Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571694672448615:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:54.194829Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSparsed::SwitchingStandalone [GOOD] Test command err: Trying to start YDB, gRPC: 27511, MsgBus: 62546 2025-04-07T14:02:23.564220Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571326166553380:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:23.564258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c3d/r3tmp/tmpA8w4AR/pdisk_1.dat 2025-04-07T14:02:24.254647Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27511, node 1 2025-04-07T14:02:24.376828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:24.377269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:24.396356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:24.409232Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:24.409279Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:24.409287Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:24.409396Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62546 TClient is connected to server localhost:62546 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:25.242630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:27.748357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571343346423196:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:27.754241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:02:27.772354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:02:28.225395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7490571343346423669:2312];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:28.225613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7490571343346423669:2312];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:28.225907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7490571343346423669:2312];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:28.226256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7490571343346423669:2312];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:28.226394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7490571343346423669:2312];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:28.226510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7490571343346423669:2312];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:28.226641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7490571343346423669:2312];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:28.226732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571343346423689:2332];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:28.226780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7490571343346423669:2312];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:28.226809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571343346423689:2332];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:28.226903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7490571343346423669:2312];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:28.226996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571343346423689:2332];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:28.227012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7490571343346423669:2312];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:28.227106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571343346423689:2332];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:28.227115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7490571343346423669:2312];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:28.227207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571343346423689:2332];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:28.227240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7490571343346423669:2312];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:28.227301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571343346423689:2332];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:28.227398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571343346423689:2332];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:28.227492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571343346423689:2332];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:28.227611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571343346423689:2332];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:28.227767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571343346423689:2332];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:28.227878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571343346423689:2332];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:28.227979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571343346423689:2332];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:28.288008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571343346423693:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:28.288070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571343346423693:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:28.288298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571343346423693:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:28.288402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571343346423693:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:28.288497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571343346423693:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:28.288583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571343346423693:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:28.288971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571343346423693:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11; ... Kikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037909;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.304344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037948;self_id=[1:7490571343346423680:2323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037948;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.304559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[1:7490571343346423692:2334];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037937;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.304838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;self_id=[1:7490571343346423683:2326];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037944;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.305577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571347641391259:2364];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.305756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490571347641391259:2364];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.308994Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034632000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:53.310624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[1:7490571347641391252:2362];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037905;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.310850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[1:7490571343346423682:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037947;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.311013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;self_id=[1:7490571343346423678:2321];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037941;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.311364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[1:7490571347641391252:2362];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037905;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.311522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[1:7490571343346423682:2325];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037947;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.311660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;self_id=[1:7490571343346423678:2321];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037941;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.314462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[1:7490571343346423671:2314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037929;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.314699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490571347641391190:2345];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037920;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.314874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7490571347641391248:2360];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037900;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; count: 14000 2025-04-07T14:03:53.315284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[1:7490571343346423671:2314];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037929;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.315456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7490571347641391190:2345];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037920;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.315591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7490571347641391248:2360];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037900;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.316796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490571347641391192:2346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037921;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.316998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7490571347641391192:2346];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037921;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.317159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571347641391409:2371];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.317327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490571347641391409:2371];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.317472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7490571343346423694:2336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037934;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.317685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7490571343346423694:2336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037934;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.317835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7490571347641391228:2355];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037910;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.317991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571343346423602:2308];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.319045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571343346423687:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.319275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7490571347641391228:2355];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037910;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.319410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571343346423602:2308];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.319552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571347641391224:2352];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.319703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7490571343346423685:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037949;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.319853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571347641391311:2367];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.319996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571343346423687:2330];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.320203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571347641391224:2352];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.320344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7490571343346423685:2328];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037949;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.320516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571347641391311:2367];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037897;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.320767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;self_id=[1:7490571343346423681:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037943;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.320939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;self_id=[1:7490571343346423681:2324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037943;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.321104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7490571347641391203:2348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037908;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:03:53.321212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7490571347641391203:2348];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037908;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; Timing: checkTable took 4 seconds WAIT_COMPACTION: 192 WAIT_COMPACTION: 256 WAIT_COMPACTION: 256 WAIT_COMPACTION: 256 WAIT_COMPACTION: 256 WAIT_COMPACTION: 256 Timing: wait took 6 seconds ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapTable` WHERE field == 'abcde' RESULT: 2025-04-07T14:04:00.406132Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034639000, txId: 18446744073709551615] shutting down count: 12546 ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapTable` RESULT: 2025-04-07T14:04:01.425505Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034640000, txId: 18446744073709551615] shutting down count: 14000 Timing: checkTable took 2 seconds Timing: wait took 0 seconds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteRetryQuery [GOOD] Test command err: Trying to start YDB, gRPC: 26061, MsgBus: 20659 2025-04-07T14:03:34.975582Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571632020414281:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00112e/r3tmp/tmpbg8qm7/pdisk_1.dat 2025-04-07T14:03:35.815826Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.820992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.821086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.824010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26061, node 1 2025-04-07T14:03:36.060113Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.060132Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.060140Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.060224Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20659 TClient is connected to server localhost:20659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:37.149180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.167818Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:37.180834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.347356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.606805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.714919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.853526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571649200285028:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.853634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.975682Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571632020414281:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.975794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.453924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.527440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.558034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.591945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.626553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.694531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.767464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571657790220141:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.767549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.767749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571657790220146:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.770670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.778006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571657790220148:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:41.965320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571662085187827:2480], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-07T14:03:41.965403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571662085187825:2478], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-07T14:03:41.965470Z node 1 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [1:7490571662085187826:2479], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=ZWVlNTcxMzAtYWI0NGY1NzEtOTcwNjBiMWUtNDJmZDY5NWE=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-07T14:03:41.965566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [1:7490571662085187826:2479], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=ZWVlNTcxMzAtYWI0NGY1NzEtOTcwNjBiMWUtNDJmZDY5NWE=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-07T14:03:41.965641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-07T14:03:41.965748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [1:7490571662085187823:2477]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-07T14:03:41.965853Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWVlNTcxMzAtYWI0NGY1NzEtOTcwNjBiMWUtNDJmZDY5NWE=, ActorId: [1:7490571662085187823:2477], ActorState: ExecuteState, TraceId: 01jr890tga2rdvfe0x0q7ny4z6, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-04-07T14:03:41.966067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7490571662085187823:2477]: Pool another_pool_id not found Trying to start YDB, gRPC: 9518, MsgBus: 17074 2025-04-07T14:03:43.400911Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571669858818226:2237];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00112e/r3tmp/tmppW7re5/pdisk_1.dat 2025-04-07T14:03:43.413863Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:03:43.474616Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9518, node 2 2025-04-07T14:03:43.511594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:43.511697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:43.525410Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:43.589202Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:43.589229Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:43.589235Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:43.589329Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClie ... hemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.692347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.722103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.758320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.863553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571682743722099:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.863660Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.864796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571682743722104:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.870709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:46.882306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571682743722106:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:48.069045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:48.079555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:1, at schemeshard: 72057594046644480 2025-04-07T14:03:48.384805Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571669858818226:2237];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:48.384857Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:48.651728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.103196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:1, at schemeshard: 72057594046644480 2025-04-07T14:03:49.633333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-07T14:03:50.147924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-07T14:03:50.619992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-07T14:03:52.632971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715717:0, at schemeshard: 72057594046644480 Wait resource pool classifier 0.116489s: status = SUCCESS, issues = 2025-04-07T14:03:53.774151Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzlmN2QwMGEtZGUzNDRkZjYtNzY3NWNjMWMtZjA2NmU1YTg=, ActorId: [2:7490571712808494862:2798], ActorState: ExecuteState, TraceId: 01jr89161b1y19rxygh7q56fcc, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool MyPool Trying to start YDB, gRPC: 22265, MsgBus: 20646 2025-04-07T14:03:55.392165Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571724554783714:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:55.392204Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00112e/r3tmp/tmpJYbsdj/pdisk_1.dat 2025-04-07T14:03:55.498151Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:55.522826Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:55.522942Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:55.525586Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22265, node 3 2025-04-07T14:03:55.601849Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:55.601887Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:55.601894Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:55.601979Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20646 TClient is connected to server localhost:20646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:56.072406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:56.086429Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:56.166888Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:56.330701Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:56.396813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:58.548771Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571737439687313:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:58.548857Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:58.578931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:58.606254Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:58.635511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:58.679336Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:58.708192Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:58.737437Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:58.774171Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571737439687806:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:58.774247Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:58.774278Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571737439687811:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:58.777258Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:58.786083Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571737439687813:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] Test command err: Trying to start YDB, gRPC: 24557, MsgBus: 11048 2025-04-07T14:03:34.975653Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571634200943407:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00111b/r3tmp/tmpOOW6Hl/pdisk_1.dat 2025-04-07T14:03:35.743566Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.754893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.754973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.770744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24557, node 1 2025-04-07T14:03:36.061234Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.061252Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.061264Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.061340Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11048 TClient is connected to server localhost:11048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:37.059634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.073394Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:37.079550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.239024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.437428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.536713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.814318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571651380814178:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.814438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.975504Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571634200943407:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.975572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.454097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.479126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.500594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.578375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.613145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.680783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.718058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571659970749279:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.718111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.718217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571659970749284:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.720935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.727289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571659970749286:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:41.661149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 120 Trying to start YDB, gRPC: 8385, MsgBus: 12851 2025-04-07T14:03:50.063469Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571701565434230:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00111b/r3tmp/tmp6Ymv48/pdisk_1.dat 2025-04-07T14:03:50.136607Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:03:50.198663Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:50.208399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:50.215194Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:50.220861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8385, node 2 2025-04-07T14:03:50.292603Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:50.292624Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:50.292637Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:50.292772Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12851 TClient is connected to server localhost:12851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:50.796885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:50.817655Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:50.835260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:50.921882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:51.135289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:51.226063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.507453Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571714450337666:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:53.507506Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:53.562105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.595987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.633521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.715519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.758590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.841033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:53.926442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571714450338171:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:53.926507Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:53.926675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571714450338176:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:53.930799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:53.943216Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571714450338178:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:54.996736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:55.032631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:03:55.065359Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571701565434230:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:55.065405Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:55.079092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23797, MsgBus: 18683 2025-04-07T14:03:57.852639Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571730844576057:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:57.852756Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00111b/r3tmp/tmp6RjDp4/pdisk_1.dat 2025-04-07T14:03:57.943375Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:57.966271Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:57.966336Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:57.967804Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23797, node 3 2025-04-07T14:03:58.010004Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:58.010028Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:58.010037Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:58.010172Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18683 TClient is connected to server localhost:18683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:58.444465Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:58.463098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:58.541384Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:58.688797Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:58.745721Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:00.841889Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571743729479643:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:00.841982Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:00.911569Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:00.939479Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:00.968874Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:01.004650Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:01.044804Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:01.086842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:01.162092Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571748024447442:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:01.162182Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:01.162481Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571748024447447:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:01.165924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:01.174223Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571748024447449:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] Test command err: 2025-04-07T14:03:24.349784Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571587533318456:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:24.349909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0011cf/r3tmp/tmp57UiJV/pdisk_1.dat 2025-04-07T14:03:25.294009Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:25.326995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:25.327106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:25.363023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:25.376928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 23019, node 1 2025-04-07T14:03:26.634671Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:26.634697Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:26.634708Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:26.634833Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:28.297719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.341947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:03:28.343438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.360852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:03:28.361095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:03:28.361110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:03:28.372146Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:28.376090Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:03:28.376122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:03:28.379465Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.397037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034608435, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:03:28.397086Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:03:28.397445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:03:28.400835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:03:28.401014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:03:28.401072Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:03:28.401194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:03:28.401237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:03:28.401284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:03:28.403437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:03:28.403521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:03:28.403542Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:03:28.403620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:03:28.560874Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7490571604713188372:2296], Recipient [1:7490571591828286114:2194]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-04-07T14:03:28.560932Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-07T14:03:28.560962Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.560983Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.561130Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-04-07T14:03:28.561271Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744034608560777) 2025-04-07T14:03:28.561753Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744034608560777 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-07T14:03:28.561924Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-07T14:03:28.564990Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-07T14:03:28.565813Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608560777&action=1" } } } 2025-04-07T14:03:28.565948Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:28.566004Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-07T14:03:28.566135Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-07T14:03:28.581037Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-07T14:03:28.581191Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-07T14:03:28.585636Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-07T14:03:28.585693Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-07T14:03:28.585753Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7490571604713188380:2194], Recipient [1:7490571591828286114:2194]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-07T14:03:28.585780Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-07T14:03:28.585792Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.585801Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.585841Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-07T14:03:28.595941Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-07T14:03:28.596032Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-07T14:03:28.602374Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-07T14:03:28.602409Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.602418Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:28.602425Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.602471Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-07T14:03:28.602493Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1744034608560777 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-07T14:03:28.603424Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7490571604713188383:2297], Recipient [1:7490571591828286114:2194]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608560777&action=1" } UserToken: "" } 2025-04-07T14:03:28.603452Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-07T14:03:28.605500Z node 1 :CMS_TENANT ... e 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.280966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.281019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:9 2025-04-07T14:03:29.281025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.281033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.281071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:6 2025-04-07T14:03:29.281127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.281141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.281183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:3 2025-04-07T14:03:29.281192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.281199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.281231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:8 2025-04-07T14:03:29.281236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.281241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.281269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:5 2025-04-07T14:03:29.281274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.281282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.284936Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-07T14:03:29.284982Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:29.285012Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-07T14:03:29.285138Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-07T14:03:29.287400Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-07T14:03:29.287441Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-04-07T14:03:29.287472Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-04-07T14:03:29.289137Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-04-07T14:03:29.289226Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-04-07T14:03:29.288923Z node 3 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:03:29.294420Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-04-07T14:03:29.294451Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-07T14:03:29.294467Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-07T14:03:29.294480Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-04-07T14:03:29.294495Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-04-07T14:03:29.294513Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-07T14:03:29.295043Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-07T14:03:29.299157Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-04-07T14:03:29.299253Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7490571609008156321:2194], Recipient [1:7490571591828286114:2194]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-07T14:03:29.299286Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-07T14:03:29.299304Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:29.299316Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:29.299350Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-07T14:03:29.299372Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-07T14:03:29.299942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:03:29.299968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:03:29.334014Z node 1 :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:2710} PDiskId# 1 request from unregistered ownerId# 5 error in TLogWrite for ownerId# 5 ownerRound# 5 lsn# 159 PDiskId# 1 2025-04-07T14:03:29.334067Z node 1 :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:2710} PDiskId# 1 request from unregistered ownerId# 5 error in TLogWrite for ownerId# 5 ownerRound# 5 lsn# 160 PDiskId# 1 2025-04-07T14:03:29.352809Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571587533318456:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:29.352867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:29.383331Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7490571609008156434:2359], Recipient [1:7490571591828286114:2194]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034609242827&action=2" } UserToken: "" } 2025-04-07T14:03:29.383371Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-07T14:03:29.383568Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034609242827&action=2" } } 2025-04-07T14:03:29.385722Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-07T14:03:29.385747Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:29.385757Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:29.385764Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:29.385813Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1744034609242827 2025-04-07T14:03:29.385827Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744034609242827 issue= 2025-04-07T14:03:29.385841Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1744034609242827 issue= 2025-04-07T14:03:29.385851Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-07T14:03:29.385945Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1744034609242827 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-07T14:03:29.408784Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-07T14:03:29.408874Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:29.450934Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7490571609008156486:2362], Recipient [1:7490571591828286114:2194]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034609242827&action=2" } UserToken: "" } 2025-04-07T14:03:29.450974Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-07T14:03:29.451153Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034609242827&action=2" ready: true status: SUCCESS } } 2025-04-07T14:03:29.461677Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7490571609008156489:2364], Recipient [1:7490571591828286114:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2025-04-07T14:03:29.461708Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-07T14:03:29.461861Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-04-07T14:03:29.464445Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7490571609008156492:2365], Recipient [1:7490571591828286114:2194]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2025-04-07T14:03:29.464473Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-04-07T14:03:29.464648Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-04-07T14:03:29.468527Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-07T14:03:29.468700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:03:29.669283Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; |90.9%| [TA] {RESULT} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::SessionBusyRetryOperation [GOOD] Test command err: Trying to start YDB, gRPC: 4317, MsgBus: 22982 2025-04-07T14:03:34.974948Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571631698979554:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00115b/r3tmp/tmppzGCQP/pdisk_1.dat 2025-04-07T14:03:35.740022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.740102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.744224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4317, node 1 2025-04-07T14:03:35.792509Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:03:35.794231Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.798480Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:03:36.059790Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.059820Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.059827Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.059908Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22982 TClient is connected to server localhost:22982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:37.154634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.182189Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:37.193575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.350657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:37.553471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.634814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:03:38.758717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571648878850293:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.758796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.975400Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571631698979554:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.975489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.453977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.520859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.545959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.575452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.598732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.663628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.698459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571657468785393:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.698531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.698701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571657468785398:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.702246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.711334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571657468785400:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:41.701349Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThiZWE0ZGQtMTljYzhmYzQtOTZjMWJlOGItOGZkNGI1ZjE=, ActorId: [1:7490571661763753009:2464], ActorState: ExecuteState, TraceId: 01jr890t7j0sk2bc2a93q4bfxz, Reply query error, msg: Pending previous query completion proxyRequestId: 7 2025-04-07T14:03:41.701434Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThiZWE0ZGQtMTljYzhmYzQtOTZjMWJlOGItOGZkNGI1ZjE=, ActorId: [1:7490571661763753009:2464], ActorState: ExecuteState, TraceId: 01jr890t7j0sk2bc2a93q4bfxz, Reply query error, msg: Pending previous query completion proxyRequestId: 8 2025-04-07T14:03:41.701486Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThiZWE0ZGQtMTljYzhmYzQtOTZjMWJlOGItOGZkNGI1ZjE=, ActorId: [1:7490571661763753009:2464], ActorState: ExecuteState, TraceId: 01jr890t7j0sk2bc2a93q4bfxz, Reply query error, msg: Pending previous query completion proxyRequestId: 9 2025-04-07T14:03:41.701512Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThiZWE0ZGQtMTljYzhmYzQtOTZjMWJlOGItOGZkNGI1ZjE=, ActorId: [1:7490571661763753009:2464], ActorState: ExecuteState, TraceId: 01jr890t7j0sk2bc2a93q4bfxz, Reply query error, msg: Pending previous query completion proxyRequestId: 10 2025-04-07T14:03:41.701534Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThiZWE0ZGQtMTljYzhmYzQtOTZjMWJlOGItOGZkNGI1ZjE=, ActorId: [1:7490571661763753009:2464], ActorState: ExecuteState, TraceId: 01jr890t7j0sk2bc2a93q4bfxz, Reply query error, msg: Pending previous query completion proxyRequestId: 11 2025-04-07T14:03:41.701560Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThiZWE0ZGQtMTljYzhmYzQtOTZjMWJlOGItOGZkNGI1ZjE=, ActorId: [1:7490571661763753009:2464], ActorState: ExecuteState, TraceId: 01jr890t7j0sk2bc2a93q4bfxz, Reply query error, msg: Pending previous query completion proxyRequestId: 12 2025-04-07T14:03:41.701623Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThiZWE0ZGQtMTljYzhmYzQtOTZjMWJlOGItOGZkNGI1ZjE=, ActorId: [1:7490571661763753009:2464], ActorState: ExecuteState, TraceId: 01jr890t7j0sk2bc2a93q4bfxz, Reply query error, msg: Pending previous query completion proxyRequestId: 13 2025-04-07T14:03:41.701654Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThiZWE0ZGQtMTljYzhmYzQtOTZjMWJlOGItOGZkNGI1ZjE=, ActorId: [1:7490571661763753009:2464], ActorState: ExecuteState, TraceId: 01jr890t7j0sk2bc2a93q4bfxz, Reply query error, msg: Pending previous query completion proxyRequestId: 14 2025-04-07T14:03:41.701690Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThiZWE0ZGQtMTljYzhmYzQtOTZjMWJlOGItOGZkNGI1ZjE=, ActorId: [1:7490571661763753009:2464], ActorState: ExecuteState, TraceId: 01jr890t7j0sk2bc2a93q4bfxz, Reply query error, msg: Pending previous query completion proxyRequestId: 15 Trying to start YDB, gRPC: 3497, MsgBus: 10043 2025-04-07T14:03:43.381063Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571671085868180:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:43.381810Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00115b/r3tmp/tmpw1jgn3/pdisk_1.dat 2025-04-07T14:03:43.564530Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:43.601982Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:43.602062Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:43.603962Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3497, node 2 2025-04-07T14:03:43.693176Z node 2 :NET_CLASSIF ... node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2U2NDZkYTAtZjRlYTg5Y2QtNjdlNTJmNWYtYmJiOGM2ZjU=, ActorId: [2:7490571692560707227:2479], ActorState: ExecuteState, TraceId: 01jr8910pqc88nq5qk4jye9xb7, Reply query error, msg: Pending previous query completion proxyRequestId: 18 2025-04-07T14:03:48.314073Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2U2NDZkYTAtZjRlYTg5Y2QtNjdlNTJmNWYtYmJiOGM2ZjU=, ActorId: [2:7490571692560707227:2479], ActorState: ExecuteState, TraceId: 01jr8910pqc88nq5qk4jye9xb7, Reply query error, msg: Pending previous query completion proxyRequestId: 19 2025-04-07T14:03:48.314559Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2U2NDZkYTAtZjRlYTg5Y2QtNjdlNTJmNWYtYmJiOGM2ZjU=, ActorId: [2:7490571692560707227:2479], ActorState: ExecuteState, TraceId: 01jr8910pqc88nq5qk4jye9xb7, Reply query error, msg: Pending previous query completion proxyRequestId: 20 2025-04-07T14:03:48.314964Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2U2NDZkYTAtZjRlYTg5Y2QtNjdlNTJmNWYtYmJiOGM2ZjU=, ActorId: [2:7490571692560707227:2479], ActorState: ExecuteState, TraceId: 01jr8910pqc88nq5qk4jye9xb7, Reply query error, msg: Pending previous query completion proxyRequestId: 21 2025-04-07T14:03:48.316179Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2U2NDZkYTAtZjRlYTg5Y2QtNjdlNTJmNWYtYmJiOGM2ZjU=, ActorId: [2:7490571692560707227:2479], ActorState: ExecuteState, TraceId: 01jr8910pqc88nq5qk4jye9xb7, Reply query error, msg: Pending previous query completion proxyRequestId: 22 2025-04-07T14:03:48.316220Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2U2NDZkYTAtZjRlYTg5Y2QtNjdlNTJmNWYtYmJiOGM2ZjU=, ActorId: [2:7490571692560707227:2479], ActorState: ExecuteState, TraceId: 01jr8910pqc88nq5qk4jye9xb7, Reply query error, msg: Pending previous query completion proxyRequestId: 23 2025-04-07T14:03:48.316245Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2U2NDZkYTAtZjRlYTg5Y2QtNjdlNTJmNWYtYmJiOGM2ZjU=, ActorId: [2:7490571692560707227:2479], ActorState: ExecuteState, TraceId: 01jr8910pqc88nq5qk4jye9xb7, Reply query error, msg: Pending previous query completion proxyRequestId: 24 2025-04-07T14:03:48.318692Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2U2NDZkYTAtZjRlYTg5Y2QtNjdlNTJmNWYtYmJiOGM2ZjU=, ActorId: [2:7490571692560707227:2479], ActorState: ExecuteState, TraceId: 01jr8910pqc88nq5qk4jye9xb7, Reply query error, msg: Pending previous query completion proxyRequestId: 25 2025-04-07T14:03:48.382502Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571671085868180:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:48.382558Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:48.431274Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTI0Mzc2ZGEtZWMxNWI0ZTEtMmJjZTAyYjgtZDg4ZDFmOTY=, ActorId: [2:7490571692560707264:2493], ActorState: ExecuteState, TraceId: 01jr8910tc5cq9qzbatdaz3ck0, Reply query error, msg: Pending previous query completion proxyRequestId: 28 2025-04-07T14:03:48.431339Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTI0Mzc2ZGEtZWMxNWI0ZTEtMmJjZTAyYjgtZDg4ZDFmOTY=, ActorId: [2:7490571692560707264:2493], ActorState: ExecuteState, TraceId: 01jr8910tc5cq9qzbatdaz3ck0, Reply query error, msg: Pending previous query completion proxyRequestId: 29 2025-04-07T14:03:48.432149Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTI0Mzc2ZGEtZWMxNWI0ZTEtMmJjZTAyYjgtZDg4ZDFmOTY=, ActorId: [2:7490571692560707264:2493], ActorState: ExecuteState, TraceId: 01jr8910tc5cq9qzbatdaz3ck0, Reply query error, msg: Pending previous query completion proxyRequestId: 30 2025-04-07T14:03:48.434945Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTI0Mzc2ZGEtZWMxNWI0ZTEtMmJjZTAyYjgtZDg4ZDFmOTY=, ActorId: [2:7490571692560707264:2493], ActorState: ExecuteState, TraceId: 01jr8910tc5cq9qzbatdaz3ck0, Reply query error, msg: Pending previous query completion proxyRequestId: 31 2025-04-07T14:03:48.452463Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTI0Mzc2ZGEtZWMxNWI0ZTEtMmJjZTAyYjgtZDg4ZDFmOTY=, ActorId: [2:7490571692560707264:2493], ActorState: ExecuteState, TraceId: 01jr8910tc5cq9qzbatdaz3ck0, Reply query error, msg: Pending previous query completion proxyRequestId: 32 2025-04-07T14:03:48.452532Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTI0Mzc2ZGEtZWMxNWI0ZTEtMmJjZTAyYjgtZDg4ZDFmOTY=, ActorId: [2:7490571692560707264:2493], ActorState: ExecuteState, TraceId: 01jr8910tc5cq9qzbatdaz3ck0, Reply query error, msg: Pending previous query completion proxyRequestId: 33 2025-04-07T14:03:48.458095Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTI0Mzc2ZGEtZWMxNWI0ZTEtMmJjZTAyYjgtZDg4ZDFmOTY=, ActorId: [2:7490571692560707264:2493], ActorState: ExecuteState, TraceId: 01jr8910tc5cq9qzbatdaz3ck0, Reply query error, msg: Pending previous query completion proxyRequestId: 34 2025-04-07T14:03:48.565283Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTdmYzQwMmEtZTc0NTUyZjItNmU5YTY2MmUtNjcwZjFlN2Q=, ActorId: [2:7490571692560707321:2509], ActorState: ExecuteState, TraceId: 01jr8910yc87vqscpyz5zfa5r0, Reply query error, msg: Pending previous query completion proxyRequestId: 37 2025-04-07T14:03:48.565351Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTdmYzQwMmEtZTc0NTUyZjItNmU5YTY2MmUtNjcwZjFlN2Q=, ActorId: [2:7490571692560707321:2509], ActorState: ExecuteState, TraceId: 01jr8910yc87vqscpyz5zfa5r0, Reply query error, msg: Pending previous query completion proxyRequestId: 38 2025-04-07T14:03:48.565378Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTdmYzQwMmEtZTc0NTUyZjItNmU5YTY2MmUtNjcwZjFlN2Q=, ActorId: [2:7490571692560707321:2509], ActorState: ExecuteState, TraceId: 01jr8910yc87vqscpyz5zfa5r0, Reply query error, msg: Pending previous query completion proxyRequestId: 39 2025-04-07T14:03:48.565404Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTdmYzQwMmEtZTc0NTUyZjItNmU5YTY2MmUtNjcwZjFlN2Q=, ActorId: [2:7490571692560707321:2509], ActorState: ExecuteState, TraceId: 01jr8910yc87vqscpyz5zfa5r0, Reply query error, msg: Pending previous query completion proxyRequestId: 40 2025-04-07T14:03:48.565430Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTdmYzQwMmEtZTc0NTUyZjItNmU5YTY2MmUtNjcwZjFlN2Q=, ActorId: [2:7490571692560707321:2509], ActorState: ExecuteState, TraceId: 01jr8910yc87vqscpyz5zfa5r0, Reply query error, msg: Pending previous query completion proxyRequestId: 41 2025-04-07T14:03:48.565456Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTdmYzQwMmEtZTc0NTUyZjItNmU5YTY2MmUtNjcwZjFlN2Q=, ActorId: [2:7490571692560707321:2509], ActorState: ExecuteState, TraceId: 01jr8910yc87vqscpyz5zfa5r0, Reply query error, msg: Pending previous query completion proxyRequestId: 42 2025-04-07T14:03:48.662645Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGEwNDAzNzYtNmVjZWEyMWItZjc3ZmM2ODgtYzI0MTg5MTc=, ActorId: [2:7490571692560707374:2522], ActorState: ExecuteState, TraceId: 01jr89111n40rmed2kptzw7pkh, Reply query error, msg: Pending previous query completion proxyRequestId: 45 2025-04-07T14:03:48.662716Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGEwNDAzNzYtNmVjZWEyMWItZjc3ZmM2ODgtYzI0MTg5MTc=, ActorId: [2:7490571692560707374:2522], ActorState: ExecuteState, TraceId: 01jr89111n40rmed2kptzw7pkh, Reply query error, msg: Pending previous query completion proxyRequestId: 46 2025-04-07T14:03:48.666001Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGEwNDAzNzYtNmVjZWEyMWItZjc3ZmM2ODgtYzI0MTg5MTc=, ActorId: [2:7490571692560707374:2522], ActorState: ExecuteState, TraceId: 01jr89111n40rmed2kptzw7pkh, Reply query error, msg: Pending previous query completion proxyRequestId: 47 2025-04-07T14:03:48.666951Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGEwNDAzNzYtNmVjZWEyMWItZjc3ZmM2ODgtYzI0MTg5MTc=, ActorId: [2:7490571692560707374:2522], ActorState: ExecuteState, TraceId: 01jr89111n40rmed2kptzw7pkh, Reply query error, msg: Pending previous query completion proxyRequestId: 48 2025-04-07T14:03:48.667300Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGEwNDAzNzYtNmVjZWEyMWItZjc3ZmM2ODgtYzI0MTg5MTc=, ActorId: [2:7490571692560707374:2522], ActorState: ExecuteState, TraceId: 01jr89111n40rmed2kptzw7pkh, Reply query error, msg: Pending previous query completion proxyRequestId: 49 2025-04-07T14:03:48.792086Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2NhZmRhNTYtYWRhNDRlODEtNDRjMjI1NmEtMjdkZWQ1NDg=, ActorId: [2:7490571692560707407:2535], ActorState: ExecuteState, TraceId: 01jr89115b8a3k9t9xh07gh2q0, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2025-04-07T14:03:48.793019Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2NhZmRhNTYtYWRhNDRlODEtNDRjMjI1NmEtMjdkZWQ1NDg=, ActorId: [2:7490571692560707407:2535], ActorState: ExecuteState, TraceId: 01jr89115b8a3k9t9xh07gh2q0, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2025-04-07T14:03:48.793477Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2NhZmRhNTYtYWRhNDRlODEtNDRjMjI1NmEtMjdkZWQ1NDg=, ActorId: [2:7490571692560707407:2535], ActorState: ExecuteState, TraceId: 01jr89115b8a3k9t9xh07gh2q0, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2025-04-07T14:03:48.795404Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2NhZmRhNTYtYWRhNDRlODEtNDRjMjI1NmEtMjdkZWQ1NDg=, ActorId: [2:7490571692560707407:2535], ActorState: ExecuteState, TraceId: 01jr89115b8a3k9t9xh07gh2q0, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2025-04-07T14:03:48.898185Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWQ0MjgwNzYtMzJkNGRkNTMtMTQxZDE5NDctY2RjY2E4ZDI=, ActorId: [2:7490571692560707455:2546], ActorState: ExecuteState, TraceId: 01jr89118t6cae7y1hrk962j72, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2025-04-07T14:03:48.898726Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWQ0MjgwNzYtMzJkNGRkNTMtMTQxZDE5NDctY2RjY2E4ZDI=, ActorId: [2:7490571692560707455:2546], ActorState: ExecuteState, TraceId: 01jr89118t6cae7y1hrk962j72, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2025-04-07T14:03:48.899031Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWQ0MjgwNzYtMzJkNGRkNTMtMTQxZDE5NDctY2RjY2E4ZDI=, ActorId: [2:7490571692560707455:2546], ActorState: ExecuteState, TraceId: 01jr89118t6cae7y1hrk962j72, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2025-04-07T14:03:49.001976Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzhlNWRkNGYtMzY3M2RmNDYtOTRlMDFlYjAtYzBkYTdkNTc=, ActorId: [2:7490571692560707500:2555], ActorState: ExecuteState, TraceId: 01jr8911c9amdxxtwwj2kb2cmb, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2025-04-07T14:03:49.002047Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzhlNWRkNGYtMzY3M2RmNDYtOTRlMDFlYjAtYzBkYTdkNTc=, ActorId: [2:7490571692560707500:2555], ActorState: ExecuteState, TraceId: 01jr8911c9amdxxtwwj2kb2cmb, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2025-04-07T14:03:49.132765Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRlYzRkODUtZTE4NzIwZGQtN2JiODgxOWUtNTY1Yjc5NDg=, ActorId: [2:7490571696855674831:2565], ActorState: ExecuteState, TraceId: 01jr8911g658nw2tw8pspbdjg7, Reply query error, msg: Pending previous query completion proxyRequestId: 67 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 5692, MsgBus: 15618 2025-04-07T14:03:34.976063Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571630276753682:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.976201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010d6/r3tmp/tmprtqxOU/pdisk_1.dat 2025-04-07T14:03:35.806376Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.814217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.814425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.817014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5692, node 1 2025-04-07T14:03:36.056953Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.056973Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.056978Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.057051Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15618 TClient is connected to server localhost:15618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.980675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.073427Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:37.086671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.252371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.416158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.490027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.620503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571647456624436:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.620583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.986405Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571630276753682:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.986496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.454948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.484809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.511088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.536841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.563286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.616152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.696246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571656046559537:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.696334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.696519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571656046559542:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.700307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.710177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571656046559544:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 11397, MsgBus: 28093 2025-04-07T14:03:42.507745Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571668239071842:2121];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:42.509315Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010d6/r3tmp/tmpvjTzPs/pdisk_1.dat 2025-04-07T14:03:42.597632Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:42.612371Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:42.612455Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:42.614238Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11397, node 2 2025-04-07T14:03:42.656488Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:42.656509Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:42.656515Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:42.656588Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28093 TClient is connected to server localhost:28093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:03:43.092777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:43.101849Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:43.121023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.196709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.359775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:43.460453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:45.713058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571681123 ... suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:45.791777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:45.823910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:45.899535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:45.928940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:45.997956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:46.098005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571685418943151:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.098080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.098600Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571685418943156:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:46.101439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:46.113872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571685418943158:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:47.189913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:47.191016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:03:47.192412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:03:47.512549Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571668239071842:2121];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:47.512606Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28900, MsgBus: 22732 2025-04-07T14:03:56.712758Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571725051800505:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:56.712863Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010d6/r3tmp/tmpMjolJt/pdisk_1.dat 2025-04-07T14:03:56.796118Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:56.862587Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:56.862690Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:56.864058Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28900, node 3 2025-04-07T14:03:56.909555Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:56.909587Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:56.909595Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:56.909730Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22732 TClient is connected to server localhost:22732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:57.392518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:57.411348Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:57.485874Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:57.634375Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:57.735683Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:00.108525Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571742231671402:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:00.108714Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:00.128499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:00.152083Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:00.177833Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:00.205471Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:00.233181Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:00.303768Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:00.341255Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571742231671902:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:00.341396Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571742231671907:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:00.341396Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:00.344521Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:00.352171Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571742231671909:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:01.396834Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:01.398455Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:01.399712Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:01.713884Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571725051800505:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:01.713940Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] Test command err: 2025-04-07T14:03:24.341715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571591430329839:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:24.341754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0011d2/r3tmp/tmpYAKq6N/pdisk_1.dat 2025-04-07T14:03:25.389918Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:25.430674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:03:25.437651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:25.437766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:25.450029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25273, node 1 2025-04-07T14:03:26.633326Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:26.633351Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:26.633357Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:26.633498Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:28.298029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.345475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:03:28.347136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.359843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:03:28.360091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:03:28.360192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:03:28.373136Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:28.377505Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:03:28.377550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:03:28.381414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.401918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034608435, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:03:28.401977Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:03:28.402215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:03:28.409656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:03:28.409808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:03:28.409857Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:03:28.409939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:03:28.409969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:03:28.410009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:03:28.412523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:03:28.412561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:03:28.412579Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:03:28.412685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:03:28.567576Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7490571608610199757:2296], Recipient [1:7490571595725297529:2191]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-04-07T14:03:28.567623Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-07T14:03:28.567649Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.567662Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.567758Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-04-07T14:03:28.567888Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744034608564684) 2025-04-07T14:03:28.568326Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744034608564684 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-07T14:03:28.568494Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-07T14:03:28.581117Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-07T14:03:28.581871Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608564684&action=1" } } } 2025-04-07T14:03:28.582016Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:28.582082Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-07T14:03:28.582218Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-07T14:03:28.582590Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-07T14:03:28.582709Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-07T14:03:28.596203Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-07T14:03:28.596258Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-07T14:03:28.596323Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7490571608610199765:2191], Recipient [1:7490571595725297529:2191]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-07T14:03:28.596338Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-07T14:03:28.596350Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.596357Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.596403Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-07T14:03:28.596431Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-07T14:03:28.596473Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-07T14:03:28.598678Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7490571608610199774:2297], Recipient [1:7490571595725297529:2191]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608564684&action=1" } UserToken: "" } 2025-04-07T14:03:28.598701Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-07T14:03:28.598863Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608564684&action=1" } } 2025-04-07T14:03:28.601503Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-07T14:03:28.601541Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.601549Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:28.601557Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.601626Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-07T14 ... 594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:03:28.780365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:03:28.780375Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T14:03:28.783918Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715660 2025-04-07T14:03:28.785662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034608827, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:03:28.785705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 281474976715660:0 HandleReply TEvOperationPlan, step: 1744034608827, at schemeshard: 72057594046644480 2025-04-07T14:03:28.785760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 134 2025-04-07T14:03:28.788278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 281474976715660:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:03:28.788360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 134 -> 135 2025-04-07T14:03:28.793843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:03:28.794084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:03:28.794142Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDeleteParts opId# 281474976715660:0 ProgressState 2025-04-07T14:03:28.794166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 135 -> 240 2025-04-07T14:03:28.795364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:03:28.795399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:03:28.795411Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2025-04-07T14:03:28.795576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:03:28.795596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:03:28.795604Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2025-04-07T14:03:28.810024Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2025-04-07T14:03:28.810159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2025-04-07T14:03:28.810217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-07T14:03:28.816151Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715660 2025-04-07T14:03:28.816171Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-07T14:03:28.818983Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-07T14:03:28.819076Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7490571608610199955:2191], Recipient [1:7490571595725297529:2191]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-07T14:03:28.819099Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-04-07T14:03:28.819114Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.819121Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.819158Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-04-07T14:03:28.819183Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1744034608719119 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-07T14:03:28.819223Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744034608719119 issue= 2025-04-07T14:03:28.825958Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7490571608610200064:2305], Recipient [1:7490571595725297529:2191]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608719119&action=2" } UserToken: "" } 2025-04-07T14:03:28.825981Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-07T14:03:28.826102Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608719119&action=2" } } 2025-04-07T14:03:28.832830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:03:28.832859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:03:28.833078Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-04-07T14:03:28.833124Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-04-07T14:03:28.833136Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:28.833370Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7490571591430330080:2192], Recipient [1:7490571595725297529:2191]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-07T14:03:28.833382Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-07T14:03:28.833399Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.833406Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.833428Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-04-07T14:03:28.833450Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1744034608719119 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-07T14:03:28.835872Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-07T14:03:28.835918Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:28.835939Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-07T14:03:28.836032Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-07T14:03:28.836378Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-04-07T14:03:28.836788Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-04-07T14:03:28.840179Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-04-07T14:03:28.840257Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7490571608610200184:2191], Recipient [1:7490571595725297529:2191]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-07T14:03:28.853123Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-07T14:03:28.853167Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.853178Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.853242Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-07T14:03:28.853272Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-07T14:03:28.892497Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-07T14:03:28.892535Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.892555Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:28.892563Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.892675Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1744034608719119 2025-04-07T14:03:28.892689Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744034608719119 issue= 2025-04-07T14:03:28.892701Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1744034608719119 issue= 2025-04-07T14:03:28.892713Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-07T14:03:28.901626Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1744034608719119 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-07T14:03:28.906943Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7490571608610200199:2307], Recipient [1:7490571595725297529:2191]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608719119&action=2" } UserToken: "" } 2025-04-07T14:03:28.906968Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-07T14:03:28.907113Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608719119&action=2" } } 2025-04-07T14:03:28.917275Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-07T14:03:28.917351Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:28.973852Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7490571608610200209:2310], Recipient [1:7490571595725297529:2191]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608719119&action=2" } UserToken: "" } 2025-04-07T14:03:28.973879Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-07T14:03:28.974039Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608719119&action=2" ready: true status: SUCCESS } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapClickbench::ClickBenchSmoke [GOOD] Test command err: Trying to start YDB, gRPC: 31377, MsgBus: 27811 2025-04-07T14:01:13.881594Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571025348274993:2118];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:13.881646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f0b/r3tmp/tmpzOKs7t/pdisk_1.dat 2025-04-07T14:01:14.525734Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:14.527264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:14.527351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:14.531032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31377, node 1 2025-04-07T14:01:14.665308Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:14.665332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:14.665344Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:14.665425Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27811 TClient is connected to server localhost:27811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:15.367696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:15.426529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:15.551074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033938210189:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:15.551385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033938210189:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:15.551785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033938210189:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:15.551922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033938210189:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:15.552030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033938210189:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:15.552151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033938210189:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:15.552262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033938210189:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:15.552381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033938210189:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:15.552537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033938210189:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:15.552647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033938210189:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:15.552774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033938210189:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:15.552882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571033938210189:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:15.641313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033938210193:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:15.641387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033938210193:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:15.641650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033938210193:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:15.641762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033938210193:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:15.641858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033938210193:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:15.641968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033938210193:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:15.642069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033938210193:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:15.642162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033938210193:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:15.642728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033938210193:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:15.642847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033938210193:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:15.642925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033938210193:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:15.643026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571033938210193:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:15.681288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571033938210194:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:15.681365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571033938210194:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:15.681603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571033938210194:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:15.681720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571033938210194:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:15.681844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571033938210194:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:15.681957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571033938210194:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:15.682055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571033938210194:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:15.682174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571033938210194:2292];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:15.682296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74905710339382101 ... .cpp:45;event=normalization_finished; 2025-04-07T14:01:44.109646Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:01:44.109732Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:01:44.109844Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:01:44.109885Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:01:44.110060Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:01:44.110106Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:01:44.110197Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:01:44.110239Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:01:44.110305Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:01:44.110342Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:01:44.110389Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:01:44.110428Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:01:44.110709Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:01:44.110755Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:01:44.110938Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:01:44.110989Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:01:44.111114Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:01:44.111158Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:01:44.111321Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:01:44.111364Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:01:44.111477Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:01:44.111512Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=17120;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=17120;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=17120;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=17120;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=17120;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=17120;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=17120;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=17120;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=17120;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=17568;columns=105; REQUEST: --!syntax_v1 PRAGMA Kikimr.UseLlvm = "false"; PRAGMA Kikimr.OptUseFinalizeByKey; SELECT AdvEngineID, COUNT(*) as c FROM `/Root/benchTable` WHERE AdvEngineID != 0 GROUP BY AdvEngineID ORDER BY c DESC 2025-04-07T14:01:49.250141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2097:3241], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:49.250301Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2108:3247], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:49.250598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:01:49.258789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:01:49.908115Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2114:3250], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:01:50.679683Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr88xcbq2zvrspr1rykd4eev, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzUwMjg2NjAtNWUwMDJiZjktMWJmNTM2ZS1lNTUyZWZiMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST: --!syntax_v1 PRAGMA Kikimr.UseLlvm = "false"; PRAGMA Kikimr.OptUseFinalizeByKey; SELECT RegionID, SUM(AdvEngineID), COUNT(*) AS c, avg(ResolutionWidth), COUNT(DISTINCT UserID) FROM `/Root/benchTable` GROUP BY RegionID ORDER BY c DESC LIMIT 10 2025-04-07T14:02:09.140789Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr88xwm7dftcncagt9dn38fg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZjYTE4NDUtZGEyMWM1OTQtZmY1ZjQzN2UtMzgyYzI1YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:02:19.414199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:02:19.414282Z node 2 :IMPORT WARN: Table profiles were not loaded REQUEST: --!syntax_v1 PRAGMA Kikimr.UseLlvm = "false"; PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SearchPhrase, count(*) AS c FROM `/Root/benchTable` WHERE SearchPhrase != '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10; 2025-04-07T14:02:29.137525Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr88yjmp5mk23rgqg783b47f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGY1OGMzZDktMzcyNzE4MDAtYzljZTVhZDYtNDliNTY2MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST: --!syntax_v1 PRAGMA Kikimr.UseLlvm = "false"; PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SearchEngineID, SearchPhrase, count(*) AS c FROM `/Root/benchTable` WHERE SearchPhrase != '' GROUP BY SearchEngineID, SearchPhrase ORDER BY c DESC LIMIT 10; 2025-04-07T14:02:47.976521Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr88z57p9wdr5c5cvtgrk8j1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjliNTA2MGMtNDdjNjExMzgtMjQyNjE0M2MtNTBhZTg1NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST: --!syntax_v1 PRAGMA Kikimr.UseLlvm = "false"; PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SearchPhrase, MIN(URL), MIN(Title), COUNT(*) AS c, COUNT(DISTINCT UserID) FROM `/Root/benchTable` WHERE Title LIKE '%Google%' AND URL NOT LIKE '%.google.%' AND SearchPhrase <> '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10; 2025-04-07T14:03:05.943515Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr88zp00bbk8n08kyxrb6yny, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2YzNmMzNjctZGQxMjhiYy1hNDQ0MDg2Zi1hNGU0ZTUwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST: --!syntax_v1 PRAGMA Kikimr.UseLlvm = "false"; PRAGMA Kikimr.OptUseFinalizeByKey; SELECT TraficSourceID, SearchEngineID, AdvEngineID, Src, Dst, COUNT(*) AS PageViews FROM `/Root/benchTable` WHERE CounterID = 62 AND EventDate >= Date('2013-07-01') AND EventDate <= Date('2013-07-31') AND IsRefresh == 0 GROUP BY TraficSourceID, SearchEngineID, AdvEngineID, IF (SearchEngineID = 0 AND AdvEngineID = 0, Referer, '') AS Src, URL AS Dst ORDER BY PageViews DESC LIMIT 10; 2025-04-07T14:03:23.582843Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr8907hq1wbr42w0rq11c7qc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODIxMGRmZmYtOWJmYmRlZGEtZWJmNGQ4ZjQtMTIxMWExNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] Test command err: 2025-04-07T14:03:24.397018Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571589622354212:2092];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:24.397795Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0011ea/r3tmp/tmpebs1uW/pdisk_1.dat 2025-04-07T14:03:25.445705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:25.447589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:25.447696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:25.448910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:03:25.471509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17484, node 1 2025-04-07T14:03:26.636811Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:26.636841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:26.636855Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:26.636995Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:28.297249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.367450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:03:28.368700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.370863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:03:28.371054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:03:28.371067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:03:28.373633Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:03:28.373662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:03:28.375513Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.384450Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:28.397265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034608435, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:03:28.397304Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:03:28.397805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:03:28.399973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:03:28.400156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:03:28.400207Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:03:28.400317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:03:28.400352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:03:28.400402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:03:28.403515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:03:28.403565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:03:28.403580Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:03:28.403652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:03:28.557496Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7490571606802224220:2296], Recipient [1:7490571593917321999:2195]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-04-07T14:03:28.557540Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-07T14:03:28.557570Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.557595Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.557749Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-04-07T14:03:28.557864Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744034608556821) 2025-04-07T14:03:28.558383Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744034608556821 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-07T14:03:28.558523Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-07T14:03:28.562495Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-07T14:03:28.563064Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608556821&action=1" } } } 2025-04-07T14:03:28.566136Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:28.566221Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-07T14:03:28.566384Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-07T14:03:28.566504Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285139, Sender [1:7490571606802224220:2296], Recipient [1:7490571593917321999:2195]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608556821&action=1" } UserToken: "" } 2025-04-07T14:03:28.566522Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2025-04-07T14:03:28.596439Z node 1 :CMS_TENANTS DEBUG: Add subscription to /Root/users/user-1 for [1:7490571606802224220:2296] 2025-04-07T14:03:28.596582Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034608556821&action=1" } } 2025-04-07T14:03:28.596719Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-07T14:03:28.596888Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-07T14:03:28.601358Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-07T14:03:28.601413Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-07T14:03:28.601485Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7490571606802224228:2195], Recipient [1:7490571593917321999:2195]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-07T14:03:28.601502Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-07T14:03:28.601515Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.601524Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:28.601564Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-07T14:03:28.601605Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-07T14:03:28.601662Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-07T14:03:28.603992Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-07T14:03:28.604022Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:28.604030Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tena ... 7594037968897 shardIdx 72057594046644480:7 2025-04-07T14:03:29.192100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.192109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.192435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:4 2025-04-07T14:03:29.192445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.192457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.192527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:10 2025-04-07T14:03:29.192534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.192541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.192578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-04-07T14:03:29.192605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.192615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.192651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:9 2025-04-07T14:03:29.192656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.192681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.192746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:6 2025-04-07T14:03:29.192752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.192759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.192811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:3 2025-04-07T14:03:29.192816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.192828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.192858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:8 2025-04-07T14:03:29.192863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.192880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.192930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:5 2025-04-07T14:03:29.192937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-04-07T14:03:29.192946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-04-07T14:03:29.193811Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-04-07T14:03:29.193888Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7490571611097192052:2195], Recipient [1:7490571593917321999:2195]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-07T14:03:29.193912Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-07T14:03:29.193927Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:29.193935Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:29.193963Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-07T14:03:29.193990Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-07T14:03:29.209137Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-07T14:03:29.209176Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-07T14:03:29.217038Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-07T14:03:29.217067Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-07T14:03:29.217074Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:29.217081Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-07T14:03:29.217126Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1744034609118534 2025-04-07T14:03:29.217889Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744034609118534 issue= 2025-04-07T14:03:29.222667Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1744034609118534 issue= 2025-04-07T14:03:29.222697Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-07T14:03:29.222769Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1744034609118534 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-07T14:03:29.230093Z node 3 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:03:29.328332Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-07T14:03:29.328368Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-04-07T14:03:29.328389Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-04-07T14:03:29.328456Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037889 2025-04-07T14:03:29.328544Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037891 2025-04-07T14:03:29.328575Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037895 2025-04-07T14:03:29.328600Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-07T14:03:29.328613Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-04-07T14:03:29.328627Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-04-07T14:03:29.328640Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-04-07T14:03:29.328656Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037892 2025-04-07T14:03:29.333742Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037890 2025-04-07T14:03:29.333796Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037894 2025-04-07T14:03:29.333827Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2025-04-07T14:03:29.333854Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037893 2025-04-07T14:03:29.333949Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037897 2025-04-07T14:03:29.333978Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037896 2025-04-07T14:03:29.396808Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571589622354212:2092];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:29.405345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:29.487157Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-07T14:03:29.487792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:03:29.487819Z node 1 :CMS_TENANTS TRACE: Send /Root/users/user-1 notification to [1:7490571611097191953:2333]: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744034609118534&action=2" ready: true status: SUCCESS } } 2025-04-07T14:03:29.487838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:03:29.487906Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-07T14:03:29.500448Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7490571611097192221:2335], Recipient [1:7490571593917321999:2195]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2025-04-07T14:03:29.500477Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-07T14:03:29.500623Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-04-07T14:03:29.506176Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7490571611097192224:2336], Recipient [1:7490571593917321999:2195]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2025-04-07T14:03:29.506206Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-04-07T14:03:29.506373Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-04-07T14:03:29.513332Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-07T14:03:29.513463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleMerge [FAIL] Test command err: Trying to start YDB, gRPC: 9635, MsgBus: 21687 2025-04-07T14:01:48.562520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571178784472809:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:48.562797Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e37/r3tmp/tmpWc85HY/pdisk_1.dat 2025-04-07T14:01:49.084420Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:49.106582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:49.106683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 9635, node 1 2025-04-07T14:01:49.108359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:01:49.165192Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:49.165213Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:49.165219Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:49.165290Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21687 TClient is connected to server localhost:21687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:49.810702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:49.829990Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:50.277148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:53.545889Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571178784472809:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:53.545950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:55.822024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571208849247318:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:55.822246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571208849247318:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:55.833572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571208849247318:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:55.833712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571208849247318:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:55.833790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571208849247318:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:55.833884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571208849247318:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:55.833956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571208849247318:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:55.834042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571208849247318:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:55.834136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571208849247318:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:55.834226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571208849247318:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:55.834293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571208849247318:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:55.834367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571208849247318:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:55.853124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571208849246445:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:55.853197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571208849246445:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:55.853403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571208849246445:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:55.853549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571208849246445:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:55.853652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571208849246445:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:55.853745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571208849246445:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:55.853839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571208849246445:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:55.853936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571208849246445:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:55.854038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571208849246445:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:55.854152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571208849246445:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:55.854311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571208849246445:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:55.854433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571208849246445:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:55.873193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571208849247476:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:55.873260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571208849247476:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:55.873399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571208849247476:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:55.873498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571208849247476:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:55.873602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571208849247476:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:55.873709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571208849247476:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:55.873817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:749057120884 ... ion_start;last_saved_id=16; 2025-04-07T14:03:37.835510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038418;self_id=[1:7490571638346050697:12780];tablet_id=72075186224038418;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:37.837962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;self_id=[1:7490571638346050661:12778];tablet_id=72075186224038730;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:37.882187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038612;self_id=[1:7490571638346051082:12868];tablet_id=72075186224038612;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:37.895493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038126;self_id=[1:7490571638346050972:12847];tablet_id=72075186224038126;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:37.921606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038633;self_id=[1:7490571638346051128:12878];tablet_id=72075186224038633;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:37.930982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038562;self_id=[1:7490571642641018781:12965];tablet_id=72075186224038562;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:37.956057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038619;self_id=[1:7490571638346051239:12902];tablet_id=72075186224038619;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:37.971048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038356;self_id=[1:7490571638346051111:12876];tablet_id=72075186224038356;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:37.991914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038446;self_id=[1:7490571638346051453:12955];tablet_id=72075186224038446;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.008056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7490571638346051415:12946];tablet_id=72075186224038053;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.020446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037989;self_id=[1:7490571638346051223:12901];tablet_id=72075186224037989;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.037829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038145;self_id=[1:7490571642641018842:12971];tablet_id=72075186224038145;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.050532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038639;self_id=[1:7490571638346051018:12859];tablet_id=72075186224038639;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.077482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038501;self_id=[1:7490571638346051139:12879];tablet_id=72075186224038501;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.085360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038785;self_id=[1:7490571638346051299:12917];tablet_id=72075186224038785;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.116940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038883;self_id=[1:7490571638346051298:12916];tablet_id=72075186224038883;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.147290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038531;self_id=[1:7490571642641018912:12993];tablet_id=72075186224038531;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.183366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038842;self_id=[1:7490571638346051383:12937];tablet_id=72075186224038842;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.207388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038557;self_id=[1:7490571642641018927:12994];tablet_id=72075186224038557;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.224792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038388;self_id=[1:7490571638346051311:12924];tablet_id=72075186224038388;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.260134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038261;self_id=[1:7490571642641018814:12969];tablet_id=72075186224038261;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.277714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038576;self_id=[1:7490571642641018890:12989];tablet_id=72075186224038576;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.293291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038279;self_id=[1:7490571638346051426:12947];tablet_id=72075186224038279;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.316709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038463;self_id=[1:7490571642641018975:13001];tablet_id=72075186224038463;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.333929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038277;self_id=[1:7490571642641018786:12968];tablet_id=72075186224038277;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.406490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038482;self_id=[1:7490571642641019051:13026];tablet_id=72075186224038482;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.425534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038280;self_id=[1:7490571642641019068:13027];tablet_id=72075186224038280;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.450978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038137;self_id=[1:7490571642641019201:13060];tablet_id=72075186224038137;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.460530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038704;self_id=[1:7490571642641018819:12970];tablet_id=72075186224038704;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.487966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038676;self_id=[1:7490571638346051354:12929];tablet_id=72075186224038676;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.493985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038289;self_id=[1:7490571642641019149:13047];tablet_id=72075186224038289;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.514152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038599;self_id=[1:7490571642641018955:12996];tablet_id=72075186224038599;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.533361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038302;self_id=[1:7490571642641019041:13025];tablet_id=72075186224038302;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.545082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038331;self_id=[1:7490571642641019096:13036];tablet_id=72075186224038331;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.576824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038236;self_id=[1:7490571642641018930:12995];tablet_id=72075186224038236;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.594280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[1:7490571642641019100:13037];tablet_id=72075186224038035;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.615470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[1:7490571642641019189:13059];tablet_id=72075186224038060;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:38.634166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038329;self_id=[1:7490571642641019124:13039];tablet_id=72075186224038329;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:39.052164Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr88zyce4jtvy3wzkvcc0zhy", SessionId: ydb://session/3?node_id=1&id=MzhlYzIwYzktM2E1OTI4OWItODM5NjIzMTMtZGRiZjJhNzU=, Slow query, duration: 24.499785s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n --!syntax_v1\n\n SELECT\n COUNT(*)\n FROM `/Root/olapStore/olapTable`\n ", parameters: 0b 2025-04-07T14:03:39.052525Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034618000, txId: 18446744073709551615] shutting down [[0u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:553, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[0u]]|[[10000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:553: CompareYson @ 0x463AF800 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x17AD8DC2 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x17AC162F 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:580: Execute_ @ 0x17AC162F 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7FBCCB133D8F 18. ??:0: ?? @ 0x7FBCCB133E3F 19. ??:0: ?? @ 0x15851028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterCdcTopic [GOOD] Test command err: Trying to start YDB, gRPC: 27868, MsgBus: 20777 2025-04-07T14:03:34.974557Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571633255673849:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.974633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010de/r3tmp/tmpURWm84/pdisk_1.dat 2025-04-07T14:03:35.621778Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.658967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.659091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.665860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27868, node 1 2025-04-07T14:03:36.056533Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.056551Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.056557Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.056645Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20777 TClient is connected to server localhost:20777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.874960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:36.898000Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:38.343841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571650435543541:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.343931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571650435543533:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.344084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.349394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:03:38.358935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571650435543547:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:03:39.974603Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571633255673849:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.974663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.507420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-07T14:03:41.330019Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571663320445654:2334], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:65: Error: Failed to convert type: Struct<'id':Int32,'val':Null> to Struct<'id':Int32,'val':Int32>
:2:65: Error: Failed to convert 'val': Null to Int32
:2:65: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-07T14:03:41.331047Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjhkNTg5NC04OTY4NmRiNi00NGRlNmRlMS05ZThlMGU5YQ==, ActorId: [1:7490571663320445652:2333], ActorState: ExecuteState, TraceId: 01jr890svk9xjxj1de79s2tpv3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:03:41.376153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19373, MsgBus: 20171 2025-04-07T14:03:42.406122Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571668367493776:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:42.406214Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010de/r3tmp/tmpF2bO6X/pdisk_1.dat 2025-04-07T14:03:42.515851Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:42.541206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:42.541284Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:42.543107Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19373, node 2 2025-04-07T14:03:42.585060Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:42.585106Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:42.585113Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:42.585205Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20171 TClient is connected to server localhost:20171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:03:42.960072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:42.969421Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:45.431756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571681252396292:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:45.431820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571681252396270:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:45.432039Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:45.437414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:03:45.463508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571681252396299:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:03:45.604932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-04-07T14:03:45.787042Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490571681252396494:2328], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:84: Error: Failed to convert type: Struct<'id':Int32,'val1':Null,'val2':Int32> to Struct<'id':Int32,'val1':Int32,'val2':Int32?>
:2:84: Error: Failed to convert 'val1': Null to Int32
:2:84: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-07T14:03:45.788091Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmU2NGNkNWEtMzdkZDIwNi00MGJhZDljMS1lNDUzM2QyOQ==, ActorId: [2:7490571681252396492:2327], ActorState: ExecuteState, TraceId: 01jr890y7b5z8ewyqgp0bx4nwp, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:03:45.814704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:45.847772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29673, MsgBus: 17665 2025-04-07T14:03:46.865588Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571682760399803:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:46.866150Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010de/r3tmp/tmpHPMdxq/pdisk_1.dat 2025-04-07T14:03:46.996158Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:47.005068Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:47.005152Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:47.006625Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29673, node 3 2025-04-07T14:03:47.087733Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:47.087762Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:47.087785Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:47.087876Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17665 TClient is connected to server localhost:17665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:47.461133Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:47.473438Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:47.487940Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:47.583673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:47.736469Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:47.820816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.704408Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571695645303398:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:49.704514Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:49.754642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.812213Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.847057Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.887121Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:49.956236Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:50.002029Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:50.098811Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571699940271197:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:50.098895Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:50.099121Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571699940271202:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:50.103137Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:50.115709Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571699940271204:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:03:51.302428Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.610114Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:03:51.697485Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZmIxZTM5ZmUtNjYwMzQ3ZDktNjkyYjg4MzYtODY1ZDg0YmY=, ActorId: [3:7490571704235239096:2496], ActorState: ExecuteState, TraceId: 01jr8913zh93ezwfnv0g8y7tvj, Create QueryResponse for error on request, msg: Query failed, status: BAD_REQUEST:
: Error: Cannot change partition count. Use split/merge instead, code: 2017 2025-04-07T14:03:51.866528Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571682760399803:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:51.866605Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] Test command err: 2025-04-07T14:03:24.340936Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571590998067361:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:24.340997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0011de/r3tmp/tmpJmjDhd/pdisk_1.dat 2025-04-07T14:03:25.274525Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:25.371582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:25.371690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:25.377677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:03:25.378203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27351, node 1 2025-04-07T14:03:26.633337Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:26.633364Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:26.633379Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:26.633486Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12143 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:28.297338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.338237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:03:28.339003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.360971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:03:28.361195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:03:28.361212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:03:28.377133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:28.379522Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:03:28.379543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:03:28.383127Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.403970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034608449, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:03:28.404009Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:03:28.404276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:03:28.406400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:03:28.406543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:03:28.406585Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:03:28.406671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:03:28.406704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:03:28.406740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:03:28.409612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:03:28.409657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:03:28.409672Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:03:28.409744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:03:28.612337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/users, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.612478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:03:28.612504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/users/user-1, opId: 281474976715658:1, at schemeshard: 72057594046644480 2025-04-07T14:03:28.612873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:03:28.612894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480 2025-04-07T14:03:28.623918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/users/user-1 2025-04-07T14:03:28.624107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:03:28.627688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:03:28.627984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:1 ProgressState, operation type: TxCreateSubDomain, at tablet# 72057594046644480 2025-04-07T14:03:28.627996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 2 -> 3 2025-04-07T14:03:28.628224Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:03:28.629565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:03:28.629596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:03:28.629613Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:03:28.629800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:03:28.629817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:03:28.629828Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:03:28.629945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:03:28.629960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:03:28.629968Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 2 2025-04-07T14:03:28.630335Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T14:03:28.633827Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:03:28.633852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 3 -> 128 2025-04-07T14:03:28.635902Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2025-04-07T14:03:28.639529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034608687, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:03:28.639576Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744034608687, at schemeshard: 72057594046644480 2025-04-07T14:03:28.639676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-04-07T14:03:28.639792Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715658:1, at tablet# 72057594046644480 2025-04-07T14:03:28.639895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2025-04-07T14:03:28.641838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:03:28.642043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:03:28.642084Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2025-04-07T14:03:28.642300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 1/2 2025-04-07T14:03:28.642440Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T14:03:28.642495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 2/2 2025-04-07T14:03:28.642518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T14:03:28.642544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2025-04-07T14:03:28.642561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 3, subscribers: 1 2025-04-07T14:03:28.645310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:03:28.645345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:03:28.645359Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:03:28.645549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:03:28.645568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:03:28.645577Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2025-04-07T14:03:28.645693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:03:28.645713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:03:28.645722Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T14:03:28.645753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-04-07T14:03:28.653303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/users/user-1, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.653533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:03:28.653551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.656171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/users/user-1 2025-04-07T14:03:28.656288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:03:28.656300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2025-04-07T14:03:28.657832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T14:03:28.660094Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:03:28.660116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 3 -> 128 2025-04-07T14:03:28.669505Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:03:28.683797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034608722, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:03:28.683830Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715659:0, at tablet# 72057594046644480 2025-04-07T14:03:28.683999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2025-04-07T14:03:28.715311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:03:28.715459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:03:28.715502Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2025-04-07T14:03:28.715568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2025-04-07T14:03:28.715601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T14:03:28.715642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2025-04-07T14:03:28.716464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:03:28.716495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:03:28.716509Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2025-04-07T14:03:28.716575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 >> KqpQueryService::ReadDatashardAndColumnshard [GOOD] |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |90.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |90.9%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |90.9%| [TA] $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |90.9%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.9%| [TA] $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |90.9%| [TA] {RESULT} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |90.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |90.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots >> KqpQueryServiceScripts::TestFetchMoreThanLimit >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex >> KqpQueryService::TableSink_HtapInteractive+withOltpSink >> KqpService::SwitchCache+UseCache >> KqpQueryService::ExecuteQueryPg >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadDatashardAndColumnshard [GOOD] Test command err: Trying to start YDB, gRPC: 62742, MsgBus: 12962 2025-04-07T14:03:34.975554Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571630414436273:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00110b/r3tmp/tmp3LvFeT/pdisk_1.dat 2025-04-07T14:03:35.685758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.685837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.687217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:35.744919Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62742, node 1 2025-04-07T14:03:36.059421Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.059440Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.059446Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.059514Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12962 TClient is connected to server localhost:12962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.855321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:36.938692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:03:37.134600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.363912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.463989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.707024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571647594307020:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.707112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.000376Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571630414436273:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:40.000541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.453773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.542005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.574877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.627132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.662519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.697815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.768136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571656184242126:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.768202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.768271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571656184242131:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.770976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.782564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571656184242133:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 14473, MsgBus: 14048 2025-04-07T14:03:43.602853Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571671439128338:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:43.663571Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00110b/r3tmp/tmp4VYdCm/pdisk_1.dat 2025-04-07T14:03:43.752461Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:43.784436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:43.785846Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:43.790360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14473, node 2 2025-04-07T14:03:43.853200Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:43.853222Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:43.853229Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:43.853322Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14048 TClient is connected to server localhost:14048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:44.323418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:44.334635Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:44.346467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:44.424307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:44.656552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:44.741843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:46.962860Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571684324031771:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't ... =RestoreV1Chunks_V2;id=15; 2025-04-07T14:04:04.142456Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:04:04.142542Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:04:04.142554Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:04:04.142559Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:04:04.142568Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:04:04.142645Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:04:04.142663Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:04:04.142830Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:04:04.142853Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:04:04.142966Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:04:04.142989Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:04:04.143097Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:04:04.143119Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:04:04.143158Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:04:04.143174Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:04:04.143273Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:04:04.143291Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:04:04.143328Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:04:04.143366Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:04:04.143401Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:04:04.143417Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:04:04.143442Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:04:04.143462Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:04:04.143728Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:04:04.143754Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:04:04.143840Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:04:04.143861Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:04:04.143957Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:04:04.143977Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:04:04.144082Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:04:04.144099Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:04:04.144147Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:04:04.144160Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:04:04.144574Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:04:04.144598Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:04:04.144653Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:04:04.144699Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:04:04.144806Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:04:04.144823Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:04:04.144884Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:04:04.144904Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:04:04.144941Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:04:04.144957Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:04:04.144973Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:04:04.144984Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:04:04.145174Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:04:04.145196Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:04:04.145299Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:04:04.145333Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:04:04.145417Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:04:04.145451Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:04:04.145569Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:04:04.145587Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:04:04.145649Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:04:04.145664Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; |90.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |90.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |90.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |90.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots >> KqpQueryService::TableSink_Htap+withOltpSink >> KqpQueryService::ExecuteQueryExplicitTxTLI >> KqpQueryService::PeriodicTaskInSessionPool >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex |91.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpQueryService::DdlUser |91.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpQueryServiceScripts::ExecuteScriptWithParameters >> KqpQueryServiceScripts::ValidateScript >> KqpQueryService::ReplaceIntoWithDefaultValue |91.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |91.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |91.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |91.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDocumentApi::RestrictWrite >> KqpDataIntegrityTrails::Upsert+LogEnabled >> KqpDataIntegrityTrails::Upsert-LogEnabled >> KqpQueryService::ExecStats >> KqpService::Shutdown >> KqpQueryServiceScripts::ExecuteScriptStatsProfile |91.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |91.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |91.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |91.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |91.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpQueryService::SessionFromPoolError |91.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpOlapBlobsSharing::MultipleSchemaVersions [FAIL] |91.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |91.0%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |91.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut >> KqpQueryServiceScripts::ForgetScriptExecutionRace [GOOD] |91.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |91.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::ChangeSchemaAndSplit [FAIL] Test command err: Trying to start YDB, gRPC: 16353, MsgBus: 61868 2025-04-07T14:02:07.756973Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571258151536209:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:07.757134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000cb1/r3tmp/tmpr8tfTy/pdisk_1.dat 2025-04-07T14:02:09.003813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:09.003912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:09.020790Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:09.031358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:02:09.031896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 16353, node 1 2025-04-07T14:02:09.602622Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:09.602645Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:09.602654Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:09.602747Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61868 TClient is connected to server localhost:61868 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:11.641465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:11.728375Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:12.628009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:12.722104Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571258151536209:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:12.722172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:20.015635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571309691147401:2317];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:20.015927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571309691147401:2317];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:20.016134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571309691147401:2317];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:20.016257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571309691147401:2317];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:20.016368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571309691147401:2317];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:20.016488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571309691147401:2317];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:20.016606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571309691147401:2317];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:20.016747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571309691147401:2317];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:20.016855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571309691147401:2317];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:20.017005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571309691147401:2317];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:20.017118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571309691147401:2317];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:20.017218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7490571309691147401:2317];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:20.093705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571309691147455:2319];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:20.093774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571309691147455:2319];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:20.093942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571309691147455:2319];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:20.094048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571309691147455:2319];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:20.094158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571309691147455:2319];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:20.094272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571309691147455:2319];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:20.094427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571309691147455:2319];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:20.094528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571309691147455:2319];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:20.096166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571309691147455:2319];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:20.096318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571309691147455:2319];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:20.096459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571309691147455:2319];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:20.096562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571309691147455:2319];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:20.113135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571309691147402:2318];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:20.113212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571309691147402:2318];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:20.113420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571309691147402:2318];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:20.113892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571309691147402:2318];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:20.114072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571309691147402:2318];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:20.114237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571309691147402:2318];tablet_id=72075186224037919;proces ... art;last_saved_id=16; 2025-04-07T14:03:56.634512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;self_id=[1:7490571717713120322:13120];tablet_id=72075186224038693;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.665917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038602;self_id=[1:7490571717713120362:13127];tablet_id=72075186224038602;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.681984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038613;self_id=[1:7490571717713120323:13121];tablet_id=72075186224038613;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.707412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038478;self_id=[1:7490571717713120404:13133];tablet_id=72075186224038478;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.734773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038238;self_id=[1:7490571717713120299:13118];tablet_id=72075186224038238;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.746424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;self_id=[1:7490571722008087901:13170];tablet_id=72075186224038692;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.771159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038542;self_id=[1:7490571717713120293:13117];tablet_id=72075186224038542;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.809153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038575;self_id=[1:7490571717713120534:13153];tablet_id=72075186224038575;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.842740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038853;self_id=[1:7490571717713120377:13129];tablet_id=72075186224038853;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.868729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038486;self_id=[1:7490571717713120502:13146];tablet_id=72075186224038486;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.871621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038599;self_id=[1:7490571717713120485:13145];tablet_id=72075186224038599;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.903812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038342;self_id=[1:7490571717713120458:13138];tablet_id=72075186224038342;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.912856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038499;self_id=[1:7490571717713120447:13137];tablet_id=72075186224038499;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.932166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038551;self_id=[1:7490571717713120428:13134];tablet_id=72075186224038551;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.971348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038755;self_id=[1:7490571722008087904:13171];tablet_id=72075186224038755;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:56.974609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038897;self_id=[1:7490571722008087944:13173];tablet_id=72075186224038897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.005511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038788;self_id=[1:7490571722008088019:13195];tablet_id=72075186224038788;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.013693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7490571717713120508:13148];tablet_id=72075186224038053;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.037544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038684;self_id=[1:7490571717713120544:13154];tablet_id=72075186224038684;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.048369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038889;self_id=[1:7490571722008088060:13204];tablet_id=72075186224038889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.077158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038137;self_id=[1:7490571722008087905:13172];tablet_id=72075186224038137;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.079166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038777;self_id=[1:7490571722008087973:13183];tablet_id=72075186224038777;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.120763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038766;self_id=[1:7490571722008087882:13165];tablet_id=72075186224038766;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.125606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038597;self_id=[1:7490571722008088086:13212];tablet_id=72075186224038597;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.164091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038654;self_id=[1:7490571722008088031:13196];tablet_id=72075186224038654;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.177827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038166;self_id=[1:7490571722008088159:13234];tablet_id=72075186224038166;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.201018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038783;self_id=[1:7490571722008088201:13245];tablet_id=72075186224038783;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.219153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038387;self_id=[1:7490571722008088304:13283];tablet_id=72075186224038387;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.247941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038365;self_id=[1:7490571722008088148:13233];tablet_id=72075186224038365;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.284570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038875;self_id=[1:7490571722008088403:13304];tablet_id=72075186224038875;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.301667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038426;self_id=[1:7490571722008088271:13274];tablet_id=72075186224038426;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.334752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038217;self_id=[1:7490571722008088190:13244];tablet_id=72075186224038217;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.345582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038479;self_id=[1:7490571722008088318:13285];tablet_id=72075186224038479;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.379433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038450;self_id=[1:7490571722008088360:13294];tablet_id=72075186224038450;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.385983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038433;self_id=[1:7490571722008088461:13320];tablet_id=72075186224038433;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.422333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038728;self_id=[1:7490571722008088391:13303];tablet_id=72075186224038728;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.425050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038660;self_id=[1:7490571722008088333:13286];tablet_id=72075186224038660;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:57.467536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7490571722008088449:13319];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:03:58.314892Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr890gmvc8w0cb8wma30xx67", SessionId: ydb://session/3?node_id=1&id=MjAxMTcyZWMtN2I5ZTNkMTMtNjA4OWUzNzYtNjY2YmZhMmU=, Slow query, duration: 25.213754s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n --!syntax_v1\n\n SELECT\n COUNT(*)\n FROM `/Root/olapStore/olapTable`\n ", parameters: 0b 2025-04-07T14:03:58.315500Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034637000, txId: 18446744073709551615] shutting down [[9974u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:553, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[9974u]]|[[20000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:553: CompareYson @ 0x463AF800 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x17AD8DC2 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x17ABC328 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:528: Execute_ @ 0x17ABC328 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7F5A23523D8F 18. ??:0: ?? @ 0x7F5A23523E3F 19. ??:0: ?? @ 0x15851028 >> KqpYql::NonStrictDml >> KqpYql::RefSelect |91.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ForgetScriptExecutionRace [GOOD] Test command err: Trying to start YDB, gRPC: 20348, MsgBus: 27886 2025-04-07T14:03:34.975366Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571632547923685:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00112b/r3tmp/tmpeOMyrE/pdisk_1.dat 2025-04-07T14:03:35.744405Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.766768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.766845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.776403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20348, node 1 2025-04-07T14:03:36.062601Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.062627Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.062646Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.062714Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27886 TClient is connected to server localhost:27886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.932855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:36.969133Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:36.982536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.159970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.344110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:03:37.428691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:03:38.623219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571649727794421:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.623337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.975499Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571632547923685:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.975569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.453740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.479222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.501333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.530610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.557584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.580311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.665408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571658317729518:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.665521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.666045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571658317729524:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.669239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.678726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571658317729526:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:41.588116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:41.589911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:03:41.591616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:03:50.732144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:03:50.732169Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 8512, MsgBus: 5398 2025-04-07T14:03:52.467246Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571711262393562:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:52.467329Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00112b/r3tmp/tmpACK8zC/pdisk_1.dat 2025-04-07T14:03:52.594659Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:52.632455Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:52.632552Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:52.637219Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8512, node 2 2025-04-07T14:03:52.730788Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:52.730813Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:52.730819Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:52.730930Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5398 TClient is connected to server localhost:5398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:53.276739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:53.285867Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:03:53.296962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 wait ... OperationActor] ExecutionId: a213197c-7829d264-198c1cda-52939a9e, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-07T14:03:58.688109Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: a213197c-7829d264-198c1cda-52939a9e, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-07T14:03:58.707794Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: a213197c-7829d264-198c1cda-52939a9e, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-07T14:03:58.724009Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: a213197c-7829d264-198c1cda-52939a9e, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-07T14:03:58.739144Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: a213197c-7829d264-198c1cda-52939a9e, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-07T14:03:58.794935Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: a213197c-7829d264-198c1cda-52939a9e, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-07T14:03:58.819116Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: a213197c-7829d264-198c1cda-52939a9e, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-07T14:03:58.834205Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: a213197c-7829d264-198c1cda-52939a9e, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-07T14:03:58.849952Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: a213197c-7829d264-198c1cda-52939a9e, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-07T14:03:58.881715Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: a213197c-7829d264-198c1cda-52939a9e, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-07T14:03:59.059015Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: a213197c-7829d264-198c1cda-52939a9e, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=ZjczMzUwNmUtNGMxOGE2NTUtYzRkNTFmOTYtODgyOTljZGI=, TxId: 2025-04-07T14:03:59.069172Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: a213197c-7829d264-198c1cda-52939a9e, reply NOT_FOUND, issues: {
: Error: No such execution } Trying to start YDB, gRPC: 11298, MsgBus: 12116 2025-04-07T14:04:00.228702Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571743972073603:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:00.228766Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00112b/r3tmp/tmpX9h2WG/pdisk_1.dat 2025-04-07T14:04:00.330920Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:00.348433Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:00.348527Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:00.350088Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11298, node 3 2025-04-07T14:04:00.386857Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:00.386885Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:00.386897Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:00.387022Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12116 TClient is connected to server localhost:12116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:00.832830Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:00.850026Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:00.923742Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:01.111963Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:01.184226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:03.438550Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571756856977207:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:03.438612Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:03.480748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:03.508844Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:03.537570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:03.566716Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:03.596758Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:03.669289Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:03.711394Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571756856977704:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:03.711480Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:03.711641Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571756856977709:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:03.714908Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:03.723906Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571756856977711:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:04.758796Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:04.759933Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:04.761248Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:05.296693Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571743972073603:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:05.303368Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:07.606865Z node 3 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: 95f792e7-ff8ff2a9-1c1ea3dd-9fff5579, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=3&id=NzA5ODhjOWEtZjE3ZjdlOWYtZWQ0ZjliMzEtZTViMjc0NzA=, TxId: 2025-04-07T14:04:07.643545Z node 3 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 95f792e7-ff8ff2a9-1c1ea3dd-9fff5579, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=3&id=OTJhZmY3YjQtNmVhYjE4NDYtZGJkN2Q0NTUtM2E3MWY0N2M=, TxId: 2025-04-07T14:04:07.681406Z node 3 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 95f792e7-ff8ff2a9-1c1ea3dd-9fff5579, reply NOT_FOUND, issues: {
: Error: No such execution } >> KqpQueryService::TableSink_HtapComplex-withOltpSink [GOOD] >> KqpDataIntegrityTrails::UpsertViaLegacyScripting+Streaming >> KqpService::ToDictCache-UseCache [GOOD] |91.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpYql::UpdatePk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::ToDictCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 27561, MsgBus: 2728 2025-04-07T14:03:34.975322Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571630916347916:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001113/r3tmp/tmpmO5UXb/pdisk_1.dat 2025-04-07T14:03:35.730133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.730257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.735399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:35.790034Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27561, node 1 2025-04-07T14:03:36.056533Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.056558Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.056569Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.056676Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2728 TClient is connected to server localhost:2728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.868741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.516482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571648096217634:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.516486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571648096217630:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.516544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571648096217633:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.516647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571648096217635:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.516782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571648096217610:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.516860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.520011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:03:38.531972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571648096217642:2325], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:03:38.531972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571648096217643:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:03:38.532027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571648096217644:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:03:38.532045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571648096217645:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:03:39.975298Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571630916347916:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.975353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 5.009173s took: 5.014677s took: 5.019099s took: 5.019890s took: 0.741005s took: 0.741559s took: 0.741584s took: 0.743682s took: 0.721129s took: 0.721630s took: 0.724655s took: 0.739596s took: 0.878524s took: 0.880308s took: 0.883258s took: 0.883653s took: 0.770635s took: 0.773001s took: 0.773849s took: 0.773332s took: 0.746454s took: 0.746659s took: 0.749230s took: 0.766794s took: 0.860917s took: 0.862803s took: 0.865384s took: 0.867082s took: 0.773368s took: 0.775348s took: 0.776603s took: 0.777821s took: 0.817258s took: 0.817146s took: 0.818810s took: 0.822016s took: 0.902375s took: 0.905198s took: 0.910976s took: 0.921337s took: 0.778465s took: 0.778402s took: 0.781987s took: 0.783129s 2025-04-07T14:03:50.704254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:03:50.704281Z node 1 :IMPORT WARN: Table profiles were not loaded took: 0.757958s took: 0.756833s took: 0.757211s took: 0.760496s took: 0.940036s took: 0.943056s took: 0.944202s took: 0.946750s took: 0.802763s took: 0.805839s took: 0.805926s took: 0.806584s took: 0.842885s took: 0.842881s took: 0.843743s took: 0.841022s took: 0.790194s took: 0.790308s took: 0.790140s took: 0.791663s Trying to start YDB, gRPC: 65005, MsgBus: 20721 2025-04-07T14:03:55.023931Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571722629953268:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:55.023994Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001113/r3tmp/tmpTetqC0/pdisk_1.dat 2025-04-07T14:03:55.135547Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:55.154471Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:55.154556Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:55.157747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65005, node 2 2025-04-07T14:03:55.206936Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:55.206959Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:55.206967Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:55.207068Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20721 TClient is connected to server localhost:20721 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:55.666049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:57.930792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571731219888506:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:57.930823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571731219888504:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:57.930855Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571731219888502:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:57.930897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571731219888482:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:57.930905Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571731219888505:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:57.931045Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:57.933787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:03:57.942482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571731219888515:2325], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:03:57.942484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571731219888516:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:03:57.942534Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571731219888517:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:03:57.942581Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571731219888514:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } took: 3.049375s took: 3.049875s took: 3.050645s took: 3.051442s took: 0.679214s took: 0.680134s took: 0.680121s took: 0.680122s 2025-04-07T14:04:00.025876Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571722629953268:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:00.025983Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 0.683611s took: 0.684090s took: 0.684939s took: 0.686187s took: 0.638298s took: 0.638962s took: 0.639222s took: 0.639732s took: 0.715983s took: 0.716686s took: 0.716677s took: 0.717356s took: 0.747551s took: 0.748806s took: 0.748920s took: 0.749961s took: 0.720400s took: 0.722063s took: 0.723173s took: 0.722844s took: 0.670959s took: 0.671517s took: 0.674891s took: 0.678645s took: 0.696917s took: 0.697898s took: 0.698832s took: 0.699261s took: 0.510709s took: 0.511474s took: 0.512048s took: 0.512130s took: 0.657198s took: 0.657768s took: 0.657879s took: 0.658966s took: 0.894158s took: 0.895921s took: 0.898737s took: 0.900332s took: 1.021175s took: 1.022032s took: 1.025566s took: 1.158674s took: 0.962049s took: 0.965347s took: 0.965053s took: 0.968571s took: 1.061743s took: 1.062750s took: 1.069046s took: 1.076956s 2025-04-07T14:04:10.124927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:04:10.124953Z node 2 :IMPORT WARN: Table profiles were not loaded took: 0.997396s took: 0.996497s took: 0.996109s took: 1.000530s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_HtapComplex-withOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 16919, MsgBus: 6839 2025-04-07T14:03:34.976860Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571633663261465:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.976942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010f8/r3tmp/tmptDt5EO/pdisk_1.dat 2025-04-07T14:03:35.714142Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.717281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.717359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.719584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16919, node 1 2025-04-07T14:03:36.060494Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.060510Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.060514Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.060590Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6839 TClient is connected to server localhost:6839 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.835735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.417270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571650843131151:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.417368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.976937Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571633663261465:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.977005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.468020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.598323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571659433065892:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:03:40.598575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571659433065892:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:03:40.598815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571659433065892:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:03:40.598907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571659433065892:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:03:40.599005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571659433065892:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:03:40.599115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571659433065892:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:03:40.599221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571659433065892:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:03:40.599325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571659433065892:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:03:40.599431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571659433065892:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:03:40.599522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571659433065892:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:03:40.599623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571659433065892:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:03:40.599712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571659433065892:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:03:40.602332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571659433065883:2312];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:03:40.602372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571659433065883:2312];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:03:40.602589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571659433065883:2312];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:03:40.602687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571659433065883:2312];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:03:40.602791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571659433065883:2312];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:03:40.602898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571659433065883:2312];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:03:40.603010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571659433065883:2312];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:03:40.603127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571659433065883:2312];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:03:40.603222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571659433065883:2312];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:03:40.603302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571659433065883:2312];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:03:40.603388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571659433065883:2312];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:03:40.603473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490571659433065883:2312];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:03:40.647584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490571659433065888:2314];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:03:40.647860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490571659433065888:2314];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:03:40.648056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490571659433065888:2314];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:03:40.648139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490571659433065888:2314];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:03:40.648225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490571659433065888:2314];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:03:40.648301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=7207518622403 ... torePortionFromChunks;id=10; 2025-04-07T14:04:04.351768Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:04:04.351964Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:04:04.351997Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:04:04.352145Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:04:04.352179Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:04:04.352360Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:04:04.352390Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:04:04.352495Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:04:04.352521Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:04:04.357108Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:04:04.357167Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:04:04.357294Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:04:04.357360Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:04:04.357554Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:04:04.357592Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:04:04.357704Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:04:04.357756Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:04:04.357826Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:04:04.357852Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:04:04.357893Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:04:04.357927Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:04:04.358224Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:04:04.358257Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:04:04.358416Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:04:04.358445Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:04:04.358586Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:04:04.358615Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:04:04.358806Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:04:04.358835Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:04:04.358944Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:04:04.358969Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:04:04.409131Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:04:04.456308Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571760753308193:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:04.456368Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:04.456456Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571760753308198:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:04.459026Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-04-07T14:04:04.466025Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571760753308200:2421], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-07T14:04:05.632495Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571743573437325:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:05.636365Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:07.379432Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;local_tx_no=11;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037909;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 2 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715672}],"starts":[{"inc":{"count_not_include":2},"id":281474976715672}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715672}]},"p":{"include":2147483647}}]}; 2025-04-07T14:04:07.379949Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;local_tx_no=27;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037906;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 1 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715672}],"starts":[{"inc":{"count_not_include":2},"id":281474976715672}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715672}]},"p":{"include":2147483647}}]}; 2025-04-07T14:04:07.381450Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;local_tx_no=11;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037905;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 3 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715672}],"starts":[{"inc":{"count_not_include":2},"id":281474976715672}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715672}]},"p":{"include":2147483647}}]}; 2025-04-07T14:04:10.056161Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;local_tx_no=41;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037909;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 20, 40 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715681}],"starts":[{"inc":{"count_not_include":2},"id":281474976715681}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715681}]},"p":{"include":2147483647}}]}; 2025-04-07T14:04:10.060195Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;local_tx_no=16;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037901;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 30 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715681}],"starts":[{"inc":{"count_not_include":2},"id":281474976715681}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715681}]},"p":{"include":2147483647}}]}; 2025-04-07T14:04:10.060621Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;local_tx_no=16;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037900;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 10 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715681}],"starts":[{"inc":{"count_not_include":2},"id":281474976715681}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715681}]},"p":{"include":2147483647}}]}; >> KqpQueryService::ReplaceIntoWithDefaultValue [GOOD] >> KqpQueryService::ReadManyRanges >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpUpsert >> KqpQueryService::ExecStats [GOOD] >> KqpQueryService::ExecStatsPlan >> KqpScripting::ScanQueryInvalid ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleSchemaVersions [FAIL] Test command err: Trying to start YDB, gRPC: 24776, MsgBus: 8665 2025-04-07T14:02:20.989479Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571314087029075:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:20.989525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c51/r3tmp/tmp361iHi/pdisk_1.dat 2025-04-07T14:02:21.699953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:21.700038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:21.708494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:21.710946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24776, node 1 2025-04-07T14:02:21.962059Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:21.962080Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:21.962088Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:21.962171Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8665 TClient is connected to server localhost:8665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:22.909401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:23.666365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:25.992777Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571314087029075:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:25.992840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:02:31.132374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571357036704671:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:31.133743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571357036704671:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:31.133999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571357036704671:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:31.134168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571357036704671:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:31.134286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571357036704671:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:31.134409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571357036704671:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:31.134526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571357036704671:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:31.134692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571357036704671:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:31.134833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571357036704671:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:31.134975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571357036704671:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:31.135084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571357036704671:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:31.135241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571357036704671:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:31.136828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571357036704667:2315];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:31.136863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571357036704667:2315];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:31.137041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571357036704667:2315];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:31.137136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571357036704667:2315];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:31.137235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571357036704667:2315];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:31.137342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571357036704667:2315];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:31.137433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571357036704667:2315];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:31.137517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571357036704667:2315];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:31.137605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571357036704667:2315];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:31.137687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571357036704667:2315];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:31.137793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571357036704667:2315];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:31.137891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7490571357036704667:2315];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:31.206089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571357036704674:2321];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:31.206778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571357036704674:2321];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:31.206914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571357036704674:2321];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:31.207033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571357036704674:2321];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:31.207127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571357036704674:2321];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:31.207210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571357036704674:2321];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:31.207326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571357036704674:2321];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormali ... ization_start;last_saved_id=16; 2025-04-07T14:04:00.087838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[1:7490571734993919536:12605];tablet_id=72075186224038020;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.100551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[1:7490571734993919596:12618];tablet_id=72075186224038000;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.119731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038442;self_id=[1:7490571734993919525:12604];tablet_id=72075186224038442;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.135269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038840;self_id=[1:7490571734993919500:12598];tablet_id=72075186224038840;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.148817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038322;self_id=[1:7490571734993919418:12586];tablet_id=72075186224038322;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.167490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038708;self_id=[1:7490571734993919580:12616];tablet_id=72075186224038708;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.210134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038667;self_id=[1:7490571734993919714:12646];tablet_id=72075186224038667;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.247570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038738;self_id=[1:7490571734993919388:12582];tablet_id=72075186224038738;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.265717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038351;self_id=[1:7490571734993919569:12615];tablet_id=72075186224038351;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.281768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038448;self_id=[1:7490571734993919777:12658];tablet_id=72075186224038448;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.306988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038488;self_id=[1:7490571734993919644:12631];tablet_id=72075186224038488;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.311817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;self_id=[1:7490571734993919893:12687];tablet_id=72075186224038269;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.346893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7490571734993919669:12636];tablet_id=72075186224038063;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.388410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038874;self_id=[1:7490571734993919701:12645];tablet_id=72075186224038874;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.406822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038783;self_id=[1:7490571734993919869:12685];tablet_id=72075186224038783;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.427277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;self_id=[1:7490571734993919742:12655];tablet_id=72075186224037992;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.441251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038541;self_id=[1:7490571734993919762:12657];tablet_id=72075186224038541;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.467190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038375;self_id=[1:7490571734993919632:12630];tablet_id=72075186224038375;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.488983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038630;self_id=[1:7490571734993919924:12695];tablet_id=72075186224038630;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.510772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038717;self_id=[1:7490571734993919844:12679];tablet_id=72075186224038717;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.528613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038481;self_id=[1:7490571734993919975:12706];tablet_id=72075186224038481;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.549698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038213;self_id=[1:7490571739288887408:12745];tablet_id=72075186224038213;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.568412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038518;self_id=[1:7490571734993919866:12684];tablet_id=72075186224038518;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.585728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038299;self_id=[1:7490571734993920062:12731];tablet_id=72075186224038299;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.615349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038216;self_id=[1:7490571739288887506:12760];tablet_id=72075186224038216;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.622821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038507;self_id=[1:7490571734993919987:12707];tablet_id=72075186224038507;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.661182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[1:7490571734993920034:12727];tablet_id=72075186224038002;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.709238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038190;self_id=[1:7490571739288887464:12753];tablet_id=72075186224038190;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.717570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038674;self_id=[1:7490571739288887441:12748];tablet_id=72075186224038674;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.746689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038519;self_id=[1:7490571739288887430:12747];tablet_id=72075186224038519;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.767716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[1:7490571739288887495:12759];tablet_id=72075186224038006;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.788016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038170;self_id=[1:7490571739288887398:12743];tablet_id=72075186224038170;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.799090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038530;self_id=[1:7490571739288887536:12770];tablet_id=72075186224038530;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.842217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038462;self_id=[1:7490571739288887558:12775];tablet_id=72075186224038462;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.881269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038157;self_id=[1:7490571739288887596:12783];tablet_id=72075186224038157;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.910725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038564;self_id=[1:7490571739288887556:12774];tablet_id=72075186224038564;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.945295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038631;self_id=[1:7490571739288887651:12800];tablet_id=72075186224038631;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:00.968547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038508;self_id=[1:7490571739288887623:12791];tablet_id=72075186224038508;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:01.702467Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr890k6x9qz186wrzsww52dq", SessionId: ydb://session/3?node_id=1&id=ZTBmNzRkMzAtOGFkYTk2OGMtMTE3YTA2YjEtOTZmYmI0Y2Y=, Slow query, duration: 25.618079s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n --!syntax_v1\n\n SELECT\n COUNT(*)\n FROM `/Root/olapStore/olapTable`\n ", parameters: 0b 2025-04-07T14:04:01.703067Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034639000, txId: 18446744073709551615] shutting down [[2u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:553, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[2u]]|[[3u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:553: CompareYson @ 0x463AF800 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x17AD8DC2 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x17ABE84F 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:545: Execute_ @ 0x17ABE84F 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7F32EE7B2D8F 18. ??:0: ?? @ 0x7F32EE7B2E3F 19. ??:0: ?? @ 0x15851028 >> KqpDataIntegrityTrails::Upsert-LogEnabled [GOOD] >> KqpQueryServiceScripts::TestTruncatedByRows [GOOD] >> KqpDataIntegrityTrails::Upsert+LogEnabled [GOOD] >> KqpQueryServiceScripts::ValidateScript [GOOD] >> KqpQueryServiceScripts::TestTruncatedBySize >> KqpQueryService::DdlUser [GOOD] >> KqpQueryService::DdlSecret >> KqpQueryService::SessionFromPoolError [GOOD] >> KqpQueryService::SessionFromPoolSuccess >> KqpDocumentApi::RestrictWrite [GOOD] >> KqpDocumentApi::RestrictWriteExplicitPrepare >> KqpQueryService::ExecuteQueryPg [GOOD] >> KqpQueryService::ExecuteQueryMultiResult >> KqpQueryService::TableSink_HtapInteractive+withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapInteractive-withOltpSink >> KqpQueryService::ExecuteQueryExplicitTxTLI [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTx ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled [GOOD] Test command err: Trying to start YDB, gRPC: 28653, MsgBus: 11855 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00113a/r3tmp/tmpEKdZhg/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28653, node 1 TClient is connected to server localhost:11855 TClient is connected to server localhost:11855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestTruncatedByRows [GOOD] Test command err: Trying to start YDB, gRPC: 4954, MsgBus: 20275 2025-04-07T14:03:34.975381Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571634275088629:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00106e/r3tmp/tmpKK1zFZ/pdisk_1.dat 2025-04-07T14:03:35.611441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.658240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.658345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.660043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4954, node 1 2025-04-07T14:03:35.686933Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:03:35.686988Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:03:36.058799Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.058829Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.058833Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.058911Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20275 TClient is connected to server localhost:20275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:37.022535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.033996Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:37.042891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.188630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.344992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.431185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.559753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571651454959384:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.559862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.975670Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571634275088629:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.975726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.454441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.489421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.515039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.572033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.598835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.633713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.682469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571660044894480:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.682524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.682652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571660044894485:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.685423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.693624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571660044894487:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:41.651400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:41.654847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:03:41.659651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:03:44.025923Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:03:44.026797Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034624066, txId: 281474976710705] shutting down 2025-04-07T14:03:44.230182Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:03:44.231110Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034624269, txId: 281474976710708] shutting down 2025-04-07T14:03:44.521339Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:03:44.521562Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490571677224765292:2806] TxId: 281474976710712. Ctx: { TraceId: 01jr890wr38c22kqczfc4fm341, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzhiZjJjNzQtMmIwMTk0ZTItYjFhZmUwMTgtOWExMThmZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-07T14:03:44.522296Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzhiZjJjNzQtMmIwMTk0ZTItYjFhZmUwMTgtOWExMThmZDM=, ActorId: [1:7490571677224765267:2806], ActorState: ExecuteState, TraceId: 01jr890wr38c22kqczfc4fm341, Create QueryResponse for error on request, msg: 2025-04-07T14:03:44.522593Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034624549, txId: 281474976710711] shutting down 2025-04-07T14:03:44.523126Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571677224765297:2811], TxId: 281474976710712, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr890wr38c22kqczfc4fm341. SessionId : ydb://session/3?node_id=1&id=NzhiZjJjNzQtMmIwMTk0ZTItYjFhZmUwMTgtOWExMThmZDM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571677224765292:2806], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:03:44.524502Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571677224765296:2810], TxId: 281474976710712, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NzhiZjJjNzQtMmIwMTk0ZTItYjFhZmUwMTgtOWExMThmZDM=. TraceId : 01jr890wr38c22kqczfc4fm341. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571677224765292:2806], status: ABORTED, reason: {
: Error: Terminate execution } Trying to start YDB, gRPC: 29386, MsgBus: 9546 2025-04-07T14:03:45.880915Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571679944927230:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:45.881040Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00106e/r3tmp/tmpVzJLZP/pdisk_1.dat 2025-04-07T14:03:45.981288Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:46.010338Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:46.010430Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:46.012613Z nod ... |idx:1092|>,<|idx:1093|>,<|idx:1094|>,<|idx:1095|>,<|idx:1096|>,<|idx:1097|>,<|idx:1098|>,<|idx:1099|>,<|idx:1100|>,<|idx:1101|>,<|idx:1102|>,<|idx:1103|>,<|idx:1104|>,<|idx:1105|>,<|idx:1106|>,<|idx:1107|>,<|idx:1108|>,<|idx:1109|>,<|idx:1110|>,<|idx:1111|>,<|idx:1112|>,<|idx:1113|>,<|idx:1114|>,<|idx:1115|>,<|idx:1116|>,<|idx:1117|>,<|idx:1118|>,<|idx:1119|>,<|idx:1120|>,<|idx:1121|>,<|idx:1122|>,<|idx:1123|>,<|idx:1124|>,<|idx:1125|>,<|idx:1126|>,<|idx:1127|>,<|idx:1128|>,<|idx:1129|>,<|idx:1130|>,<|idx:1131|>,<|idx:1132|>,<|idx:1133|>,<|idx:1134|>,<|idx:1135|>,<|idx:1136|>,<|idx:1137|>,<|idx:1138|>,<|idx:1139|>,<|idx:1140|>,<|idx:1141|>,<|idx:1142|>,<|idx:1143|>,<|idx:1144|>,<|idx:1145|>,<|idx:1146|>,<|idx:1147|>,<|idx:1148|>,<|idx:1149|>,<|idx:1150|>,<|idx:1151|>,<|idx:1152|>,<|idx:1153|>,<|idx:1154|>,<|idx:1155|>,<|idx:1156|>,<|idx:1157|>,<|idx:1158|>,<|idx:1159|>,<|idx:1160|>,<|idx:1161|>,<|idx:1162|>,<|idx:1163|>,<|idx:1164|>,<|idx:1165|>,<|idx:1166|>,<|idx:1167|>,<|idx:1168|>,<|idx:1169|>,<|idx:1170|>,<|idx:1171|>,<|idx:1172|>,<|idx:1173|>,<|idx:1174|>,<|idx:1175|>,<|idx:1176|>,<|idx:1177|>,<|idx:1178|>,<|idx:1179|>,<|idx:1180|>,<|idx:1181|>,<|idx:1182|>,<|idx:1183|>,<|idx:1184|>,<|idx:1185|>,<|idx:1186|>,<|idx:1187|>,<|idx:1188|>,<|idx:1189|>,<|idx:1190|>,<|idx:1191|>,<|idx:1192|>,<|idx:1193|>,<|idx:1194|>,<|idx:1195|>,<|idx:1196|>,<|idx:1197|>,<|idx:1198|>,<|idx:1199|>,<|idx:1200|>,<|idx:1201|>,<|idx:1202|>,<|idx:1203|>,<|idx:1204|>,<|idx:1205|>,<|idx:1206|>,<|idx:1207|>,<|idx:1208|>,<|idx:1209|>,<|idx:1210|>,<|idx:1211|>,<|idx:1212|>,<|idx:1213|>,<|idx:1214|>,<|idx:1215|>,<|idx:1216|>,<|idx:1217|>,<|idx:1218|>,<|idx:1219|>,<|idx:1220|>,<|idx:1221|>,<|idx:1222|>,<|idx:1223|>,<|idx:1224|>,<|idx:1225|>,<|idx:1226|>,<|idx:1227|>,<|idx:1228|>,<|idx:1229|>,<|idx:1230|>,<|idx:1231|>,<|idx:1232|>,<|idx:1233|>,<|idx:1234|>,<|idx:1235|>,<|idx:1236|>,<|idx:1237|>,<|idx:1238|>,<|idx:1239|>,<|idx:1240|>,<|idx:1241|>,<|idx:1242|>,<|idx:1243|>,<|idx:1244|>,<|idx:1245|>,<|idx:1246|>,<|idx:1247|>,<|idx:1248|>,<|idx:1249|>,<|idx:1250|>,<|idx:1251|>,<|idx:1252|>,<|idx:1253|>,<|idx:1254|>,<|idx:1255|>,<|idx:1256|>,<|idx:1257|>,<|idx:1258|>,<|idx:1259|>,<|idx:1260|>,<|idx:1261|>,<|idx:1262|>,<|idx:1263|>,<|idx:1264|>,<|idx:1265|>,<|idx:1266|>,<|idx:1267|>,<|idx:1268|>,<|idx:1269|>,<|idx:1270|>,<|idx:1271|>,<|idx:1272|>,<|idx:1273|>,<|idx:1274|>,<|idx:1275|>,<|idx:1276|>,<|idx:1277|>,<|idx:1278|>,<|idx:1279|>,<|idx:1280|>,<|idx:1281|>,<|idx:1282|>,<|idx:1283|>,<|idx:1284|>,<|idx:1285|>,<|idx:1286|>,<|idx:1287|>,<|idx:1288|>,<|idx:1289|>,<|idx:1290|>,<|idx:1291|>,<|idx:1292|>,<|idx:1293|>,<|idx:1294|>,<|idx:1295|>,<|idx:1296|>,<|idx:1297|>,<|idx:1298|>,<|idx:1299|>,<|idx:1300|>,<|idx:1301|>,<|idx:1302|>,<|idx:1303|>,<|idx:1304|>,<|idx:1305|>,<|idx:1306|>,<|idx:1307|>,<|idx:1308|>,<|idx:1309|>,<|idx:1310|>,<|idx:1311|>,<|idx:1312|>,<|idx:1313|>,<|idx:1314|>,<|idx:1315|>,<|idx:1316|>,<|idx:1317|>,<|idx:1318|>,<|idx:1319|>,<|idx:1320|>,<|idx:1321|>,<|idx:1322|>,<|idx:1323|>,<|idx:1324|>,<|idx:1325|>,<|idx:1326|>,<|idx:1327|>,<|idx:1328|>,<|idx:1329|>,<|idx:1330|>,<|idx:1331|>,<|idx:1332|>,<|idx:1333|>,<|idx:1334|>,<|idx:1335|>,<|idx:1336|>,<|idx:1337|>,<|idx:1338|>,<|idx:1339|>,<|idx:1340|>,<|idx:1341|>,<|idx:1342|>,<|idx:1343|>,<|idx:1344|>,<|idx:1345|>,<|idx:1346|>,<|idx:1347|>,<|idx:1348|>,<|idx:1349|>,<|idx:1350|>,<|idx:1351|>,<|idx:1352|>,<|idx:1353|>,<|idx:1354|>,<|idx:1355|>,<|idx:1356|>,<|idx:1357|>,<|idx:1358|>,<|idx:1359|>,<|idx:1360|>,<|idx:1361|>,<|idx:1362|>,<|idx:1363|>,<|idx:1364|>,<|idx:1365|>,<|idx:1366|>,<|idx:1367|>,<|idx:1368|>,<|idx:1369|>,<|idx:1370|>,<|idx:1371|>,<|idx:1372|>,<|idx:1373|>,<|idx:1374|>,<|idx:1375|>,<|idx:1376|>,<|idx:1377|>,<|idx:1378|>,<|idx:1379|>,<|idx:1380|>,<|idx:1381|>,<|idx:1382|>,<|idx:1383|>,<|idx:1384|>,<|idx:1385|>,<|idx:1386|>,<|idx:1387|>,<|idx:1388|>,<|idx:1389|>,<|idx:1390|>,<|idx:1391|>,<|idx:1392|>,<|idx:1393|>,<|idx:1394|>,<|idx:1395|>,<|idx:1396|>,<|idx:1397|>,<|idx:1398|>,<|idx:1399|>,<|idx:1400|>,<|idx:1401|>,<|idx:1402|>,<|idx:1403|>,<|idx:1404|>,<|idx:1405|>,<|idx:1406|>,<|idx:1407|>,<|idx:1408|>,<|idx:1409|>,<|idx:1410|>,<|idx:1411|>,<|idx:1412|>,<|idx:1413|>,<|idx:1414|>,<|idx:1415|>,<|idx:1416|>,<|idx:1417|>,<|idx:1418|>,<|idx:1419|>,<|idx:1420|>,<|idx:1421|>,<|idx:1422|>,<|idx:1423|>,<|idx:1424|>,<|idx:1425|>,<|idx:1426|>,<|idx:1427|>,<|idx:1428|>,<|idx:1429|>,<|idx:1430|>,<|idx:1431|>,<|idx:1432|>,<|idx:1433|>,<|idx:1434|>,<|idx:1435|>,<|idx:1436|>,<|idx:1437|>,<|idx:1438|>,<|idx:1439|>,<|idx:1440|>,<|idx:1441|>,<|idx:1442|>,<|idx:1443|>,<|idx:1444|>,<|idx:1445|>,<|idx:1446|>,<|idx:1447|>,<|idx:1448|>,<|idx:1449|>,<|idx:1450|>,<|idx:1451|>,<|idx:1452|>,<|idx:1453|>,<|idx:1454|>,<|idx:1455|>,<|idx:1456|>,<|idx:1457|>,<|idx:1458|>,<|idx:1459|>,<|idx:1460|>,<|idx:1461|>,<|idx:1462|>,<|idx:1463|>,<|idx:1464|>,<|idx:1465|>,<|idx:1466|>,<|idx:1467|>,<|idx:1468|>,<|idx:1469|>,<|idx:1470|>,<|idx:1471|>,<|idx:1472|>,<|idx:1473|>,<|idx:1474|>,<|idx:1475|>,<|idx:1476|>,<|idx:1477|>,<|idx:1478|>,<|idx:1479|>,<|idx:1480|>,<|idx:1481|>,<|idx:1482|>,<|idx:1483|>,<|idx:1484|>,<|idx:1485|>,<|idx:1486|>,<|idx:1487|>,<|idx:1488|>,<|idx:1489|>,<|idx:1490|>,<|idx:1491|>,<|idx:1492|>,<|idx:1493|>,<|idx:1494|>,<|idx:1495|>,<|idx:1496|>,<|idx:1497|>,<|idx:1498|>,<|idx:1499|>,<|idx:1500|>,<|idx:1501|>,<|idx:1502|>,<|idx:1503|>,<|idx:1504|>,<|idx:1505|>,<|idx:1506|>,<|idx:1507|>,<|idx:1508|>,<|idx:1509|>,<|idx:1510|>,<|idx:1511|>,<|idx:1512|>,<|idx:1513|>,<|idx:1514|>,<|idx:1515|>,<|idx:1516|>,<|idx:1517|>,<|idx:1518|>,<|idx:1519|>,<|idx:1520|>,<|idx:1521|>,<|idx:1522|>,<|idx:1523|>,<|idx:1524|>,<|idx:1525|>,<|idx:1526|>,<|idx:1527|>,<|idx:1528|>,<|idx:1529|>,<|idx:1530|>,<|idx:1531|>,<|idx:1532|>,<|idx:1533|>,<|idx:1534|>,<|idx:1535|>,<|idx:1536|>,<|idx:1537|>,<|idx:1538|>,<|idx:1539|>,<|idx:1540|>,<|idx:1541|>,<|idx:1542|>,<|idx:1543|>,<|idx:1544|>,<|idx:1545|>,<|idx:1546|>,<|idx:1547|>,<|idx:1548|>,<|idx:1549|>,<|idx:1550|>,<|idx:1551|>,<|idx:1552|>,<|idx:1553|>,<|idx:1554|>,<|idx:1555|>,<|idx:1556|>,<|idx:1557|>,<|idx:1558|>,<|idx:1559|>,<|idx:1560|>,<|idx:1561|>,<|idx:1562|>,<|idx:1563|>,<|idx:1564|>,<|idx:1565|>,<|idx:1566|>,<|idx:1567|>,<|idx:1568|>,<|idx:1569|>,<|idx:1570|>,<|idx:1571|>,<|idx:1572|>,<|idx:1573|>,<|idx:1574|>,<|idx:1575|>,<|idx:1576|>,<|idx:1577|>,<|idx:1578|>,<|idx:1579|>,<|idx:1580|>,<|idx:1581|>,<|idx:1582|>,<|idx:1583|>,<|idx:1584|>,<|idx:1585|>,<|idx:1586|>,<|idx:1587|>,<|idx:1588|>,<|idx:1589|>,<|idx:1590|>,<|idx:1591|>,<|idx:1592|>,<|idx:1593|>,<|idx:1594|>,<|idx:1595|>,<|idx:1596|>,<|idx:1597|>,<|idx:1598|>,<|idx:1599|>,<|idx:1600|>,<|idx:1601|>,<|idx:1602|>,<|idx:1603|>,<|idx:1604|>,<|idx:1605|>,<|idx:1606|>,<|idx:1607|>,<|idx:1608|>,<|idx:1609|>,<|idx:1610|>,<|idx:1611|>,<|idx:1612|>,<|idx:1613|>,<|idx:1614|>,<|idx:1615|>,<|idx:1616|>,<|idx:1617|>,<|idx:1618|>,<|idx:1619|>,<|idx:1620|>,<|idx:1621|>,<|idx:1622|>,<|idx:1623|>,<|idx:1624|>,<|idx:1625|>,<|idx:1626|>,<|idx:1627|>,<|idx:1628|>,<|idx:1629|>,<|idx:1630|>,<|idx:1631|>,<|idx:1632|>,<|idx:1633|>,<|idx:1634|>,<|idx:1635|>,<|idx:1636|>,<|idx:1637|>,<|idx:1638|>,<|idx:1639|>,<|idx:1640|>,<|idx:1641|>,<|idx:1642|>,<|idx:1643|>,<|idx:1644|>,<|idx:1645|>,<|idx:1646|>,<|idx:1647|>,<|idx:1648|>,<|idx:1649|>,<|idx:1650|>,<|idx:1651|>,<|idx:1652|>,<|idx:1653|>,<|idx:1654|>,<|idx:1655|>,<|idx:1656|>,<|idx:1657|>,<|idx:1658|>,<|idx:1659|>,<|idx:1660|>,<|idx:1661|>,<|idx:1662|>,<|idx:1663|>,<|idx:1664|>,<|idx:1665|>,<|idx:1666|>,<|idx:1667|>,<|idx:1668|>,<|idx:1669|>,<|idx:1670|>,<|idx:1671|>,<|idx:1672|>,<|idx:1673|>,<|idx:1674|>,<|idx:1675|>,<|idx:1676|>,<|idx:1677|>,<|idx:1678|>,<|idx:1679|>,<|idx:1680|>,<|idx:1681|>,<|idx:1682|>,<|idx:1683|>,<|idx:1684|>,<|idx:1685|>,<|idx:1686|>,<|idx:1687|>,<|idx:1688|>,<|idx:1689|>,<|idx:1690|>,<|idx:1691|>,<|idx:1692|>,<|idx:1693|>,<|idx:1694|>,<|idx:1695|>,<|idx:1696|>,<|idx:1697|>,<|idx:1698|>,<|idx:1699|>,<|idx:1700|>,<|idx:1701|>,<|idx:1702|>,<|idx:1703|>,<|idx:1704|>,<|idx:1705|>,<|idx:1706|>,<|idx:1707|>,<|idx:1708|>,<|idx:1709|>,<|idx:1710|>,<|idx:1711|>,<|idx:1712|>,<|idx:1713|>,<|idx:1714|>,<|idx:1715|>,<|idx:1716|>,<|idx:1717|>,<|idx:1718|>,<|idx:1719|>,<|idx:1720|>,<|idx:1721|>,<|idx:1722|>,<|idx:1723|>,<|idx:1724|>,<|idx:1725|>,<|idx:1726|>,<|idx:1727|>,<|idx:1728|>,<|idx:1729|>,<|idx:1730|>,<|idx:1731|>,<|idx:1732|>,<|idx:1733|>,<|idx:1734|>,<|idx:1735|>,<|idx:1736|>,<|idx:1737|>,<|idx:1738|>,<|idx:1739|>,<|idx:1740|>,<|idx:1741|>,<|idx:1742|>,<|idx:1743|>,<|idx:1744|>,<|idx:1745|>,<|idx:1746|>,<|idx:1747|>,<|idx:1748|>,<|idx:1749|>,<|idx:1750|>,<|idx:1751|>,<|idx:1752|>,<|idx:1753|>,<|idx:1754|>,<|idx:1755|>,<|idx:1756|>,<|idx:1757|>,<|idx:1758|>,<|idx:1759|>,<|idx:1760|>,<|idx:1761|>,<|idx:1762|>,<|idx:1763|>,<|idx:1764|>,<|idx:1765|>,<|idx:1766|>,<|idx:1767|>,<|idx:1768|>,<|idx:1769|>,<|idx:1770|>,<|idx:1771|>,<|idx:1772|>,<|idx:1773|>,<|idx:1774|>,<|idx:1775|>,<|idx:1776|>,<|idx:1777|>,<|idx:1778|>,<|idx:1779|>,<|idx:1780|>,<|idx:1781|>,<|idx:1782|>,<|idx:1783|>,<|idx:1784|>,<|idx:1785|>,<|idx:1786|>,<|idx:1787|>,<|idx:1788|>,<|idx:1789|>,<|idx:1790|>,<|idx:1791|>,<|idx:1792|>,<|idx:1793|>,<|idx:1794|>,<|idx:1795|>,<|idx:1796|>,<|idx:1797|>,<|idx:1798|>,<|idx:1799|>,<|idx:1800|>,<|idx:1801|>,<|idx:1802|>,<|idx:1803|>,<|idx:1804|>,<|idx:1805|>,<|idx:1806|>,<|idx:1807|>,<|idx:1808|>,<|idx:1809|>,<|idx:1810|>,<|idx:1811|>,<|idx:1812|>,<|idx:1813|>,<|idx:1814|>,<|idx:1815|>,<|idx:1816|>,<|idx:1817|>,<|idx:1818|>,<|idx:1819|>,<|idx:1820|>,<|idx:1821|>,<|idx:1822|>,<|idx:1823|>,<|idx:1824|>,<|idx:1825|>,<|idx:1826|>,<|idx:1827|>,<|idx:1828|>,<|idx:1829|>,<|idx:1830|>,<|idx:1831|>,<|idx:1832|>,<|idx:1833|>,<|idx:1834|>,<|idx:1835|>,<|idx:1836|>,<|idx:1837|>,<|idx:1838|>,<|idx:1839|>,<|idx:1840|>,<|idx:1841|>,<|idx:1842|>,<|idx:1843|>,<|idx:1844|>,<|idx:1845|>,<|idx:1846|>,<|idx:1847|>,<|idx:1848|>,<|idx:1849|>,<|idx:1850|>,<|idx:1851|>,<|idx:1852|>,<|idx:1853|>,<|idx:1854|>,<|idx:1855|>,<|idx:1856|>,<|idx:1857|>,<|idx:1858|>,<|idx:1859|>,<|idx:1860|>,<|idx:1861|>,<|idx:1862|>,<|idx:1863|>,<|idx:1864|>,<|idx:1865|>,<|idx:1866|>,<|idx:1867|>,<|idx:1868|>,<|idx:1869|>,<|idx:1870|>,<|idx:1871|>,<|idx:1872|>,<|idx:1873|>,<|idx:1874|>,<|idx:1875|>,<|idx:1876|>,<|idx:1877|>,<|idx:1878|>,<|idx:1879|>,<|idx:1880|>,<|idx:1881|>,<|idx:1882|>,<|idx:1883|>,<|idx:1884|>,<|idx:1885|>,<|idx:1886|>,<|idx:1887|>,<|idx:1888|>,<|idx:1889|>,<|idx:1890|>,<|idx:1891|>,<|idx:1892|>,<|idx:1893|>,<|idx:1894|>,<|idx:1895|>,<|idx:1896|>,<|idx:1897|>,<|idx:1898|>,<|idx:1899|>,<|idx:1900|>,<|idx:1901|>,<|idx:1902|>,<|idx:1903|>,<|idx:1904|>,<|idx:1905|>,<|idx:1906|>,<|idx:1907|>,<|idx:1908|>,<|idx:1909|>,<|idx:1910|>,<|idx:1911|>,<|idx:1912|>,<|idx:1913|>,<|idx:1914|>,<|idx:1915|>,<|idx:1916|>,<|idx:1917|>,<|idx:1918|>,<|idx:1919|>,<|idx:1920|>,<|idx:1921|>,<|idx:1922|>,<|idx:1923|>,<|idx:1924|>,<|idx:1925|>,<|idx:1926|>,<|idx:1927|>,<|idx:1928|>,<|idx:1929|>,<|idx:1930|>,<|idx:1931|>,<|idx:1932|>,<|idx:1933|>,<|idx:1934|>,<|idx:1935|>,<|idx:1936|>,<|idx:1937|>,<|idx:1938|>,<|idx:1939|>,<|idx:1940|>,<|idx:1941|>,<|idx:1942|>,<|idx:1943|>,<|idx:1944|>,<|idx:1945|>,<|idx:1946|>,<|idx:1947|>,<|idx:1948|>,<|idx:1949|>,<|idx:1950|>,<|idx:1951|>,<|idx:1952|>,<|idx:1953|>,<|idx:1954|>,<|idx:1955|>,<|idx:1956|>,<|idx:1957|>,<|idx:1958|>,<|idx:1959|>,<|idx:1960|>,<|idx:1961|>,<|idx:1962|>,<|idx:1963|>,<|idx:1964|>,<|idx:1965|>,<|idx:1966|>,<|idx:1967|>,<|idx:1968|>,<|idx:1969|>,<|idx:1970|>,<|idx:1971|>,<|idx:1972|>,<|idx:1973|>,<|idx:1974|>,<|idx:1975|>,<|idx:1976|>,<|idx:1977|>,<|idx:1978|>,<|idx:1979|>,<|idx:1980|>,<|idx:1981|>,<|idx:1982|>,<|idx:1983|>,<|idx:1984|>,<|idx:1985|>,<|idx:1986|>,<|idx:1987|>,<|idx:1988|>,<|idx:1989|>,<|idx:1990|>,<|idx:1991|>,<|idx:1992|>,<|idx:1993|>,<|idx:1994|>,<|idx:1995|>,<|idx:1996|>,<|idx:1997|>,<|idx:1998|>,<|idx:1999|>]);", parameters: 0b 2025-04-07T14:04:13.679570Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034653648, txId: 281474976716178] shutting down ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled [GOOD] Test command err: Trying to start YDB, gRPC: 23296, MsgBus: 20276 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001129/r3tmp/tmpnn2sHs/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23296, node 1 TClient is connected to server localhost:20276 TClient is connected to server localhost:20276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpYql::RefSelect [GOOD] >> KqpYql::ScriptUdf >> KqpService::Shutdown [GOOD] >> KqpService::SessionBusyRetryOperationSync >> KqpService::CloseSessionsWithLoad [GOOD] >> KqpService::PatternCache >> KqpQueryServiceScripts::TestFetchMoreThanLimit [GOOD] >> KqpQueryServiceScripts::Tcl >> KqpQueryService::CloseConnection >> KqpYql::NonStrictDml [GOOD] >> KqpYql::JsonNumberPrecision >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager [GOOD] >> KqpQueryServiceScripts::ExplainScript >> KqpDataIntegrityTrails::UpsertViaLegacyScripting+Streaming [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsProfile [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsNone >> KqpQueryServiceScripts::ExecuteScriptWithParameters [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout >> KqpYql::TableConcat ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertViaLegacyScripting+Streaming [GOOD] Test command err: Trying to start YDB, gRPC: 25575, MsgBus: 15613 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fd1/r3tmp/tmpkkW4aA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25575, node 1 TClient is connected to server localhost:15613 TClient is connected to server localhost:15613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpYql::UpdatePk [GOOD] |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |91.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots >> KqpQueryService::ExecStatsPlan [GOOD] >> KqpQueryService::ExecStatsAst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdatePk [GOOD] Test command err: Trying to start YDB, gRPC: 4860, MsgBus: 19429 2025-04-07T14:04:12.963190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571795621225950:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:12.963324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e0a/r3tmp/tmpumzW6Y/pdisk_1.dat 2025-04-07T14:04:13.481532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:13.481659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:13.483145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4860, node 1 2025-04-07T14:04:13.525461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:13.564821Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:13.580738Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:13.636405Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:13.636446Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:13.636459Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:13.636647Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19429 TClient is connected to server localhost:19429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:14.407160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:14.436869Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:14.456168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:14.731859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:14.890551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:14.972591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:17.621756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571817096063979:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:17.651294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:17.732085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:17.823017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:17.860023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:17.902534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:17.938531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:17.988171Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571795621225950:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:17.988495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:18.066725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:18.248654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571821391031785:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:18.248748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:18.249112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571821391031790:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:18.258724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:18.280889Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:04:18.281215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571821391031792:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Type annotation, code: 1030
:3:20: Warning: At function: AsStruct
:4:31: Warning: At function: +
:4:31: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
:5:27: Error: At function: KiUpdateTable!
:5:27: Error: Cannot update primary key column: Group >> KqpQueryService::ReadManyRanges [GOOD] >> KqpQueryService::ReadManyShardsRange >> KqpScripting::ScanQueryInvalid [GOOD] >> KqpScripting::ScanQueryTruncate >> KqpQueryService::TableSink_OltpUpsert [GOOD] >> KqpQueryService::TableSink_OltpUpdate >> KqpDocumentApi::RestrictWriteExplicitPrepare [GOOD] >> KqpDocumentApi::Scripting >> KqpQueryService::ExecuteQueryMultiResult [GOOD] >> KqpQueryService::ExecuteQueryMultiScalar >> KqpQueryService::SessionFromPoolSuccess [GOOD] >> KqpQueryService::SeveralCTAS >> KqpQueryService::PeriodicTaskInSessionPool [GOOD] >> KqpQueryService::MaterializeTxResults >> KqpQueryService::ExecuteQueryInteractiveTx [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery >> KqpQueryService::TableSink_HtapInteractive-withOltpSink [GOOD] >> KqpQueryService::TableSink_OlapDelete >> KqpYql::BinaryJsonOffsetNormal |91.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |91.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 >> KqpYql::ScriptUdf [GOOD] |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |91.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots >> KqpQueryService::TableSink_Htap+withOltpSink [GOOD] >> KqpQueryService::TableSink_BadTransactions |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> KqpService::SessionBusyRetryOperationSync [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ScriptUdf [GOOD] Test command err: Trying to start YDB, gRPC: 29360, MsgBus: 25147 2025-04-07T14:04:10.855533Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571785715005085:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:10.855941Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e25/r3tmp/tmp3p7qfq/pdisk_1.dat 2025-04-07T14:04:11.175133Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29360, node 1 2025-04-07T14:04:11.226681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:11.226786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:11.228680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:11.332110Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:11.332134Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:11.332150Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:11.332238Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25147 TClient is connected to server localhost:25147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:11.944046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:11.996039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.244107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.451177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.588189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:14.745569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571802894875837:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:14.745676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:15.028610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:15.058598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:15.097120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:15.126464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:15.159536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:15.205482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:15.317140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571807189843637:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:15.317254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:15.317510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571807189843642:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:15.325830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:15.352916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571807189843644:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:15.860798Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571785715005085:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:15.860880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Optimization, code: 1070
:4:20: Error: RefSelect mode isn't supported by provider: kikimr Trying to start YDB, gRPC: 28988, MsgBus: 22226 2025-04-07T14:04:17.654456Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571817998526374:2134];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e25/r3tmp/tmpnYz6rp/pdisk_1.dat 2025-04-07T14:04:17.807921Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:17.915451Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:17.950223Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:17.950309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:17.955506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28988, node 2 2025-04-07T14:04:18.104915Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:18.104941Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:18.104948Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:18.105042Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22226 TClient is connected to server localhost:22226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:18.902593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:18.913557Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:04:18.925989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:19.032271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:19.248546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:19.344643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:22.656887Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571817998526374:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:22.656964Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:23.231205Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571843768331748:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:23.231312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:23.255357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:23.346569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:23.394683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:23.439394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:23.531524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:23.619883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:23.693992Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571843768332253:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:23.694064Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:23.694402Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571843768332258:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:23.698537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:23.733598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571843768332260:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Type annotation, code: 1030
:10:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:10:20: Error: At function: Apply
:8:28: Error: At function: ScriptUdf
:8:28: Error: Module not loaded for script type: Python3 >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter [GOOD] >> KqpYql::JsonNumberPrecision [GOOD] |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |91.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |91.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> KqpYql::TableConcat [GOOD] >> KqpYql::SelectNoAsciiValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::SessionBusyRetryOperationSync [GOOD] Test command err: Trying to start YDB, gRPC: 9485, MsgBus: 27626 2025-04-07T14:04:08.612675Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571777940560852:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.612758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001044/r3tmp/tmplzOeX6/pdisk_1.dat 2025-04-07T14:04:09.103633Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:09.112113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:09.112195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:09.116577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9485, node 1 2025-04-07T14:04:09.220297Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:09.220317Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:09.220334Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:09.220412Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27626 TClient is connected to server localhost:27626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:09.782062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.813092Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:09.857612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.003072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.197398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.295987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:11.985660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571790825464437:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.021884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.286968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.328008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.379455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.440375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.484371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.528385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.663694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571795120432236:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.663746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.663854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571795120432241:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.667703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:12.680758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571795120432243:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:13.617307Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571777940560852:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.617371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61041, MsgBus: 13676 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001044/r3tmp/tmpvFcF9a/pdisk_1.dat 2025-04-07T14:04:17.867552Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:04:17.972358Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:17.992509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:17.992643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:17.996234Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61041, node 2 2025-04-07T14:04:18.057193Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:18.057216Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:18.057224Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:18.057339Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13676 TClient is connected to server localhost:13676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:18.979132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:18.988334Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:04:18.995688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:19.105102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:19.303220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:19.391185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:22.683478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571839065529600:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:22. ... ing previous query completion proxyRequestId: 14 2025-04-07T14:04:24.698103Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODdkNTI1MzEtYzRiOTk5MDAtYTQ5NGQ2MDgtNGE3Y2EwZjM=, ActorId: [2:7490571847655465036:2462], ActorState: ExecuteState, TraceId: 01jr89246c3cjnj0n1zs8bp952, Reply query error, msg: Pending previous query completion proxyRequestId: 15 2025-04-07T14:04:24.871131Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGMwOWQyYmQtM2UzMTU3NjItNTAwZjA4NmQtMmU1N2QzYmI=, ActorId: [2:7490571847655465102:2482], ActorState: ExecuteState, TraceId: 01jr8924d6cjgwexn2yfwdkxt6, Reply query error, msg: Pending previous query completion proxyRequestId: 18 2025-04-07T14:04:24.871206Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGMwOWQyYmQtM2UzMTU3NjItNTAwZjA4NmQtMmU1N2QzYmI=, ActorId: [2:7490571847655465102:2482], ActorState: ExecuteState, TraceId: 01jr8924d6cjgwexn2yfwdkxt6, Reply query error, msg: Pending previous query completion proxyRequestId: 19 2025-04-07T14:04:24.907382Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGMwOWQyYmQtM2UzMTU3NjItNTAwZjA4NmQtMmU1N2QzYmI=, ActorId: [2:7490571847655465102:2482], ActorState: ExecuteState, TraceId: 01jr8924d6cjgwexn2yfwdkxt6, Reply query error, msg: Pending previous query completion proxyRequestId: 20 2025-04-07T14:04:24.907466Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGMwOWQyYmQtM2UzMTU3NjItNTAwZjA4NmQtMmU1N2QzYmI=, ActorId: [2:7490571847655465102:2482], ActorState: ExecuteState, TraceId: 01jr8924d6cjgwexn2yfwdkxt6, Reply query error, msg: Pending previous query completion proxyRequestId: 21 2025-04-07T14:04:24.907498Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGMwOWQyYmQtM2UzMTU3NjItNTAwZjA4NmQtMmU1N2QzYmI=, ActorId: [2:7490571847655465102:2482], ActorState: ExecuteState, TraceId: 01jr8924d6cjgwexn2yfwdkxt6, Reply query error, msg: Pending previous query completion proxyRequestId: 22 2025-04-07T14:04:24.907526Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGMwOWQyYmQtM2UzMTU3NjItNTAwZjA4NmQtMmU1N2QzYmI=, ActorId: [2:7490571847655465102:2482], ActorState: ExecuteState, TraceId: 01jr8924d6cjgwexn2yfwdkxt6, Reply query error, msg: Pending previous query completion proxyRequestId: 23 2025-04-07T14:04:24.907555Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGMwOWQyYmQtM2UzMTU3NjItNTAwZjA4NmQtMmU1N2QzYmI=, ActorId: [2:7490571847655465102:2482], ActorState: ExecuteState, TraceId: 01jr8924d6cjgwexn2yfwdkxt6, Reply query error, msg: Pending previous query completion proxyRequestId: 24 2025-04-07T14:04:24.907584Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGMwOWQyYmQtM2UzMTU3NjItNTAwZjA4NmQtMmU1N2QzYmI=, ActorId: [2:7490571847655465102:2482], ActorState: ExecuteState, TraceId: 01jr8924d6cjgwexn2yfwdkxt6, Reply query error, msg: Pending previous query completion proxyRequestId: 25 2025-04-07T14:04:25.031591Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODBmNjM0OWMtYzM5YWFiMzItZjZkN2UxZmItYmJlNWEzOQ==, ActorId: [2:7490571851950432458:2498], ActorState: ExecuteState, TraceId: 01jr8924j62mrs7d9nwh5sqq1z, Reply query error, msg: Pending previous query completion proxyRequestId: 28 2025-04-07T14:04:25.031664Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODBmNjM0OWMtYzM5YWFiMzItZjZkN2UxZmItYmJlNWEzOQ==, ActorId: [2:7490571851950432458:2498], ActorState: ExecuteState, TraceId: 01jr8924j62mrs7d9nwh5sqq1z, Reply query error, msg: Pending previous query completion proxyRequestId: 29 2025-04-07T14:04:25.031697Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODBmNjM0OWMtYzM5YWFiMzItZjZkN2UxZmItYmJlNWEzOQ==, ActorId: [2:7490571851950432458:2498], ActorState: ExecuteState, TraceId: 01jr8924j62mrs7d9nwh5sqq1z, Reply query error, msg: Pending previous query completion proxyRequestId: 30 2025-04-07T14:04:25.031748Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODBmNjM0OWMtYzM5YWFiMzItZjZkN2UxZmItYmJlNWEzOQ==, ActorId: [2:7490571851950432458:2498], ActorState: ExecuteState, TraceId: 01jr8924j62mrs7d9nwh5sqq1z, Reply query error, msg: Pending previous query completion proxyRequestId: 31 2025-04-07T14:04:25.031779Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODBmNjM0OWMtYzM5YWFiMzItZjZkN2UxZmItYmJlNWEzOQ==, ActorId: [2:7490571851950432458:2498], ActorState: ExecuteState, TraceId: 01jr8924j62mrs7d9nwh5sqq1z, Reply query error, msg: Pending previous query completion proxyRequestId: 32 2025-04-07T14:04:25.031812Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODBmNjM0OWMtYzM5YWFiMzItZjZkN2UxZmItYmJlNWEzOQ==, ActorId: [2:7490571851950432458:2498], ActorState: ExecuteState, TraceId: 01jr8924j62mrs7d9nwh5sqq1z, Reply query error, msg: Pending previous query completion proxyRequestId: 33 2025-04-07T14:04:25.031848Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODBmNjM0OWMtYzM5YWFiMzItZjZkN2UxZmItYmJlNWEzOQ==, ActorId: [2:7490571851950432458:2498], ActorState: ExecuteState, TraceId: 01jr8924j62mrs7d9nwh5sqq1z, Reply query error, msg: Pending previous query completion proxyRequestId: 34 2025-04-07T14:04:25.161075Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGFkYjhhMzAtMTc4YzkzZTctYjFjZjIyYWUtNzAxODhmZTE=, ActorId: [2:7490571851950432511:2512], ActorState: ExecuteState, TraceId: 01jr8924p8612rq3kde3r4h24j, Reply query error, msg: Pending previous query completion proxyRequestId: 37 2025-04-07T14:04:25.162120Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGFkYjhhMzAtMTc4YzkzZTctYjFjZjIyYWUtNzAxODhmZTE=, ActorId: [2:7490571851950432511:2512], ActorState: ExecuteState, TraceId: 01jr8924p8612rq3kde3r4h24j, Reply query error, msg: Pending previous query completion proxyRequestId: 38 2025-04-07T14:04:25.162180Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGFkYjhhMzAtMTc4YzkzZTctYjFjZjIyYWUtNzAxODhmZTE=, ActorId: [2:7490571851950432511:2512], ActorState: ExecuteState, TraceId: 01jr8924p8612rq3kde3r4h24j, Reply query error, msg: Pending previous query completion proxyRequestId: 39 2025-04-07T14:04:25.162215Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGFkYjhhMzAtMTc4YzkzZTctYjFjZjIyYWUtNzAxODhmZTE=, ActorId: [2:7490571851950432511:2512], ActorState: ExecuteState, TraceId: 01jr8924p8612rq3kde3r4h24j, Reply query error, msg: Pending previous query completion proxyRequestId: 40 2025-04-07T14:04:25.162247Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGFkYjhhMzAtMTc4YzkzZTctYjFjZjIyYWUtNzAxODhmZTE=, ActorId: [2:7490571851950432511:2512], ActorState: ExecuteState, TraceId: 01jr8924p8612rq3kde3r4h24j, Reply query error, msg: Pending previous query completion proxyRequestId: 41 2025-04-07T14:04:25.162283Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGFkYjhhMzAtMTc4YzkzZTctYjFjZjIyYWUtNzAxODhmZTE=, ActorId: [2:7490571851950432511:2512], ActorState: ExecuteState, TraceId: 01jr8924p8612rq3kde3r4h24j, Reply query error, msg: Pending previous query completion proxyRequestId: 42 2025-04-07T14:04:25.287342Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDk3NjhiNWUtYmMxYWI4NDAtYTY4NjdkNS0xZjBiYTIwOQ==, ActorId: [2:7490571851950432563:2525], ActorState: ExecuteState, TraceId: 01jr8924t6aq5x4qkaxrr9g077, Reply query error, msg: Pending previous query completion proxyRequestId: 45 2025-04-07T14:04:25.287433Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDk3NjhiNWUtYmMxYWI4NDAtYTY4NjdkNS0xZjBiYTIwOQ==, ActorId: [2:7490571851950432563:2525], ActorState: ExecuteState, TraceId: 01jr8924t6aq5x4qkaxrr9g077, Reply query error, msg: Pending previous query completion proxyRequestId: 46 2025-04-07T14:04:25.287480Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDk3NjhiNWUtYmMxYWI4NDAtYTY4NjdkNS0xZjBiYTIwOQ==, ActorId: [2:7490571851950432563:2525], ActorState: ExecuteState, TraceId: 01jr8924t6aq5x4qkaxrr9g077, Reply query error, msg: Pending previous query completion proxyRequestId: 47 2025-04-07T14:04:25.287511Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDk3NjhiNWUtYmMxYWI4NDAtYTY4NjdkNS0xZjBiYTIwOQ==, ActorId: [2:7490571851950432563:2525], ActorState: ExecuteState, TraceId: 01jr8924t6aq5x4qkaxrr9g077, Reply query error, msg: Pending previous query completion proxyRequestId: 48 2025-04-07T14:04:25.287541Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDk3NjhiNWUtYmMxYWI4NDAtYTY4NjdkNS0xZjBiYTIwOQ==, ActorId: [2:7490571851950432563:2525], ActorState: ExecuteState, TraceId: 01jr8924t6aq5x4qkaxrr9g077, Reply query error, msg: Pending previous query completion proxyRequestId: 49 2025-04-07T14:04:25.427984Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2M4MGM2YTUtMzEyYTk3NWYtNjViYjg1MWItYTczYTE4MmE=, ActorId: [2:7490571851950432608:2537], ActorState: ExecuteState, TraceId: 01jr8924yj3kjx6fnr17tvz0xp, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2025-04-07T14:04:25.428068Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2M4MGM2YTUtMzEyYTk3NWYtNjViYjg1MWItYTczYTE4MmE=, ActorId: [2:7490571851950432608:2537], ActorState: ExecuteState, TraceId: 01jr8924yj3kjx6fnr17tvz0xp, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2025-04-07T14:04:25.428100Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2M4MGM2YTUtMzEyYTk3NWYtNjViYjg1MWItYTczYTE4MmE=, ActorId: [2:7490571851950432608:2537], ActorState: ExecuteState, TraceId: 01jr8924yj3kjx6fnr17tvz0xp, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2025-04-07T14:04:25.428164Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2M4MGM2YTUtMzEyYTk3NWYtNjViYjg1MWItYTczYTE4MmE=, ActorId: [2:7490571851950432608:2537], ActorState: ExecuteState, TraceId: 01jr8924yj3kjx6fnr17tvz0xp, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2025-04-07T14:04:25.622554Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2UxNzBjM2ItYTY3MDkyZTItMTA1NzAyMGEtOGJjY2I3ZDU=, ActorId: [2:7490571851950432660:2548], ActorState: ExecuteState, TraceId: 01jr89254n6sp4cwe384t9z8a7, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2025-04-07T14:04:25.622635Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2UxNzBjM2ItYTY3MDkyZTItMTA1NzAyMGEtOGJjY2I3ZDU=, ActorId: [2:7490571851950432660:2548], ActorState: ExecuteState, TraceId: 01jr89254n6sp4cwe384t9z8a7, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2025-04-07T14:04:25.622668Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2UxNzBjM2ItYTY3MDkyZTItMTA1NzAyMGEtOGJjY2I3ZDU=, ActorId: [2:7490571851950432660:2548], ActorState: ExecuteState, TraceId: 01jr89254n6sp4cwe384t9z8a7, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2025-04-07T14:04:25.777409Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjcwNjA1YjYtMzkxYzFjMjktOTI1NjA5NzktZmQ2YmU5NGQ=, ActorId: [2:7490571851950432689:2559], ActorState: ExecuteState, TraceId: 01jr89259f1ds06gwrcnrpvhzy, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2025-04-07T14:04:25.785433Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjcwNjA1YjYtMzkxYzFjMjktOTI1NjA5NzktZmQ2YmU5NGQ=, ActorId: [2:7490571851950432689:2559], ActorState: ExecuteState, TraceId: 01jr89259f1ds06gwrcnrpvhzy, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2025-04-07T14:04:25.933884Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzhkZGE4N2YtOWM2YTRiZmItNmNlZDg1MTktZDZjYmUyZDU=, ActorId: [2:7490571851950432715:2568], ActorState: ExecuteState, TraceId: 01jr8925edcm58yf895wrzrdav, Reply query error, msg: Pending previous query completion proxyRequestId: 67 >> KqpQueryServiceScripts::Tcl [GOOD] >> KqpQueryServiceScripts::TestAstWithCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonNumberPrecision [GOOD] Test command err: Trying to start YDB, gRPC: 26056, MsgBus: 24696 2025-04-07T14:04:10.820840Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571786621492404:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:10.821098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e1c/r3tmp/tmpAXvKJQ/pdisk_1.dat 2025-04-07T14:04:11.229426Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:11.243774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:11.243869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:11.248527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26056, node 1 2025-04-07T14:04:11.380412Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:11.380446Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:11.380452Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:11.384835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24696 TClient is connected to server localhost:24696 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:11.962571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:11.976575Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:11.984110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.223063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.413561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.556118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:14.480520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571803801363142:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:14.480651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:14.858056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:14.936743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:14.977586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:15.013085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:15.094983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:15.145479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:15.211433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571808096330942:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:15.211490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:15.211837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571808096330947:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:15.215122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:15.226292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571808096330949:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:15.823027Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571786621492404:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:15.823096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13392, MsgBus: 13636 2025-04-07T14:04:18.730518Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571819396975914:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:18.730568Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e1c/r3tmp/tmpbjAWr2/pdisk_1.dat 2025-04-07T14:04:19.016052Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:19.060560Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:19.060711Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:19.065038Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13392, node 2 2025-04-07T14:04:19.213561Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:19.213586Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:19.213593Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:19.213723Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13636 TClient is connected to server localhost:13636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:20.071984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.085269Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:04:20.099476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.156542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.341158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.437257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:23.732887Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571819396975914:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:23.732970Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:24.811426Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571845166781386:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:24.811543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:24.856141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:24.897319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:24.971477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:24.998465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:25.073895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:25.123206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:25.215442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571849461749190:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:25.215553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:25.216183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571849461749195:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:25.221028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:25.248962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571849461749198:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter [GOOD] Test command err: Trying to start YDB, gRPC: 14989, MsgBus: 25385 2025-04-07T14:03:34.975408Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571633819634774:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975460Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010cf/r3tmp/tmpIgOz8C/pdisk_1.dat 2025-04-07T14:03:35.661062Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14989, node 1 2025-04-07T14:03:35.679413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.679505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.685446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:36.056727Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.056763Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.056773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.056872Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25385 TClient is connected to server localhost:25385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:37.070310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.087580Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:37.099783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.267842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.532511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.619672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.802941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571650999505540:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.803033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.983445Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571633819634774:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.983497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.453773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.483129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.518583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.548634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.589647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.655057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.690587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571659589440640:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.690654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.690824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571659589440645:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.693923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.705152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571659589440647:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:41.559210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:41.560654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:03:41.561810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:03:41.924902Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzAyMjU2NTYtYmQ2ZjA3NmItMjliOTRlZTktZjgxMjQ0NTA=, ActorId: [1:7490571663884408538:2486], ActorState: ExecuteState, TraceId: 01jr890t3f8mj15zt5fzy6ggem, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 2752, MsgBus: 13750 2025-04-07T14:03:43.993541Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571673017469676:2189];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010cf/r3tmp/tmpRpczNJ/pdisk_1.dat 2025-04-07T14:03:44.083415Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:03:44.115446Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:44.125981Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:44.126061Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:44.132164Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2752, node 2 2025-04-07T14:03:44.204772Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:44.204796Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:44.204806Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:44.204919Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13750 TClient is connected to server localhost:13750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:44.701537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:44.719253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:44.7976 ... pose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:06.557919Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:06.559209Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:06.688896Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490571749526228503:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:06.688996Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:09.673791Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034649707, txId: 281474976715703] shutting down 2025-04-07T14:04:10.156103Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034650162, txId: 281474976715706] shutting down 2025-04-07T14:04:10.490168Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034650519, txId: 281474976715709] shutting down 2025-04-07T14:04:10.533152Z node 4 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: 9d982be3-1a3c43dd-7ce67823-b621731e, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Results are expired }, SessionId: ydb://session/3?node_id=4&id=NTg0NjcxN2UtN2Q4MjlmNGQtMjIyOGYyZC1mYzI5NjliNA==, TxId: Trying to start YDB, gRPC: 28376, MsgBus: 6305 2025-04-07T14:04:11.642891Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490571791883796530:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:11.643330Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010cf/r3tmp/tmppt0gYa/pdisk_1.dat 2025-04-07T14:04:11.766200Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:11.788375Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:11.796554Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:11.805236Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28376, node 5 2025-04-07T14:04:11.933406Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:11.933438Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:11.933448Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:11.933595Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6305 TClient is connected to server localhost:6305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:12.802843Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.811992Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:04:12.828371Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.944786Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:13.185479Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:13.282399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:16.645109Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490571791883796530:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:16.645206Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:17.738168Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490571817653602023:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:17.738277Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:17.807318Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:17.859537Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:17.928292Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:17.995787Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:18.106079Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:18.246099Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:18.347211Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490571821948569838:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:18.347338Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:18.347598Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490571821948569843:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:18.357428Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:18.394433Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490571821948569845:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:19.805700Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:19.807752Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:19.810150Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:23.794017Z node 5 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: 1cec6b88-c91f3d02-6a671c15-8987cd5e, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=NGJjN2U3OS1mMmJlYjVkMC04NjQ2Y2ExNy03ZjI5MzU1MQ==, TxId: 2025-04-07T14:04:25.349854Z node 5 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 1cec6b88-c91f3d02-6a671c15-8987cd5e, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=NGFkZmM0YzktNzQyNmVkZjItZWMwYzgxM2YtNTZlOGJjY2U=, TxId: 2025-04-07T14:04:25.593969Z node 5 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 1cec6b88-c91f3d02-6a671c15-8987cd5e, reply NOT_FOUND, issues: {
: Error: No such execution } 2025-04-07T14:04:25.631949Z node 5 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 1cec6b88-c91f3d02-6a671c15-8987cd5e, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=NWM3ZmJkNDItYTBlMDZjMS0xODcyMmU3ZS1iNzMyMWVjNA==, TxId: 2025-04-07T14:04:25.632103Z node 5 :KQP_PROXY WARN: [ScriptExecutions] [TCancelScriptExecutionOperationActor] ExecutionId: 1cec6b88-c91f3d02-6a671c15-8987cd5e, check lease failed 2025-04-07T14:04:26.159584Z node 5 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: 1cec6b88-c91f3d02-6a671c15-8987cd5e, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Script execution not found }, SessionId: ydb://session/3?node_id=5&id=ZTgyNzJhM2MtMjIwNGNhYmEtOTdkYjUzOWEtODJiZjY2MDc=, TxId: |91.1%| [TA] $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} >> TCacheTest::SystemView |91.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} >> TCacheTest::MigrationLostMessage >> KqpScripting::EndOfQueryCommit >> KqpQueryServiceScripts::ExplainScript [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode >> TCacheTest::MigrationCommon >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin+ColumnStore >> KqpQueryService::ExecStatsAst [GOOD] >> KqpQueryService::ReadManyShardsRange [GOOD] >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] >> KqpScripting::ScanQueryTruncate [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsNone [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter >> KqpDocumentApi::Scripting [GOOD] >> KqpQueryService::SeveralCTAS [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecStatsAst [GOOD] Test command err: Trying to start YDB, gRPC: 19667, MsgBus: 28923 2025-04-07T14:04:08.543154Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571778853987838:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.548450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001014/r3tmp/tmpllemAd/pdisk_1.dat 2025-04-07T14:04:09.026205Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:09.033028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:09.033095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:09.034823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19667, node 1 2025-04-07T14:04:09.123583Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:09.123604Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:09.123610Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:09.123719Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28923 TClient is connected to server localhost:28923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:09.723374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.741794Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:09.836946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.997421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.181946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.275664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:11.783107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571791738891259:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:11.783241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.118325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.198357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.242972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.282404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.311679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.358956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.453720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571796033859056:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.453788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.454071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571796033859061:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.458334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:12.489117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571796033859063:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:13.549704Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571778853987838:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.549782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13804, MsgBus: 18209 2025-04-07T14:04:14.473392Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571805416265314:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:14.473433Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001014/r3tmp/tmpMTriNF/pdisk_1.dat 2025-04-07T14:04:14.742442Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13804, node 2 2025-04-07T14:04:14.844463Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:14.844483Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:14.844490Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:14.844595Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:04:14.852258Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:14.852340Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:14.853927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18209 TClient is connected to server localhost:18209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:15.223129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:15.229750Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:04:15.261671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:15.368144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:04:15.614279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:04:15.685031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:18.689745Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:74905718225 ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:18.854853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:18.883336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:18.951330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:19.052951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571826891103990:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:19.053056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:19.059756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571826891103995:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:19.072978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:19.097384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571826891103997:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:19.478807Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571805416265314:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:19.478863Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14590, MsgBus: 11614 2025-04-07T14:04:22.555583Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571837965298293:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001014/r3tmp/tmpQ1z30Q/pdisk_1.dat 2025-04-07T14:04:22.782657Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:22.933380Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:22.933678Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:22.933754Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:22.955199Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14590, node 3 2025-04-07T14:04:23.147060Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:23.147079Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:23.147087Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:23.147189Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11614 TClient is connected to server localhost:11614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:24.019989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:24.026552Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:24.050225Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:24.143641Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:24.354106Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:24.442495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:27.520829Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571837965298293:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:27.520891Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:28.164254Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571863735103631:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:28.164403Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:28.183639Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:28.263156Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:28.315180Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:28.402707Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:28.457419Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:28.540346Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:28.589337Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571863735104126:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:28.589415Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:28.589521Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571863735104131:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:28.593341Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:28.607036Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571863735104133:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:29.699137Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490571868030071782:2471], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject, At function: SqlProjectItem
:1:8: Error: At function: Member
:1:8: Error: Member not found: test_ast_column 2025-04-07T14:04:29.699797Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTYyNzAxNy01ZjI0ZTY1My1iYWNkZGE5Yi04ZTZjYzc0YQ==, ActorId: [3:7490571868030071780:2470], ActorState: ExecuteState, TraceId: 01jr8929362cz9szrtcbc4nhw0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:04:29.925464Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490571868030071811:2475], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=NWQyNzgxNmUtN2UxZTk2MDgtOTUyZGVhZjItZmY1OTBhYzU=. TraceId : 01jr89294a57nb4q5k6hy0dtvs. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(43):
:1:8: Failed to unwrap empty optional }. 2025-04-07T14:04:29.958742Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWQyNzgxNmUtN2UxZTk2MDgtOTUyZGVhZjItZmY1OTBhYzU=, ActorId: [3:7490571868030071792:2475], ActorState: ExecuteState, TraceId: 01jr89294a57nb4q5k6hy0dtvs, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadManyShardsRange [GOOD] Test command err: Trying to start YDB, gRPC: 12070, MsgBus: 21749 2025-04-07T14:04:08.523373Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571777652104243:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.523413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001029/r3tmp/tmpN9iEai/pdisk_1.dat 2025-04-07T14:04:09.147718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:09.147829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:09.149971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12070, node 1 2025-04-07T14:04:09.180196Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:09.180220Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:09.199298Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:09.245241Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:09.245267Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:09.245279Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:09.245396Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21749 TClient is connected to server localhost:21749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:09.819149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.830397Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:11.996002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571790537006760:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:11.996158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:11.996448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571790537006787:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.001501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:04:12.014259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571790537006789:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:04:12.371905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8517, MsgBus: 30194 2025-04-07T14:04:13.574161Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571797828349335:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.579120Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001029/r3tmp/tmpjqefqk/pdisk_1.dat 2025-04-07T14:04:13.888513Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:13.907492Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:13.907621Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:13.909936Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8517, node 2 2025-04-07T14:04:13.990436Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:13.990458Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:13.990465Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:13.990569Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30194 TClient is connected to server localhost:30194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:14.758087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:18.258123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571819303186299:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:18.258230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:18.321875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:04:18.574663Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571797828349335:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:18.577107Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:18.604970Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571819303186856:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:18.605042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:18.607426Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571819303186861:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:18.610889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:04:18.634670Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571819303186863:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } Trying to start YDB, gRPC: 24096, MsgBus: 7563 2025-04-07T14:04:22.900762Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571839195843607:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:23.002108Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001029/r3tmp/tmpe2l1LF/pdisk_1.dat 2025-04-07T14:04:23.333713Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:23.383798Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:23.383887Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:23.390421Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24096, node 3 2025-04-07T14:04:23.613361Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:23.613387Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:23.613395Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:23.613755Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7563 TClient is connected to server localhost:7563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:24.627281Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:24.639917Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:27.836821Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571839195843607:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:27.844388Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:29.173793Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571869260615193:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.173897Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.193453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:04:29.331746Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571869260615410:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.331828Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.332076Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571869260615415:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.336097Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:04:29.361336Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571869260615417:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapStats::AddRowsSomeTablesInTableStore [GOOD] Test command err: Trying to start YDB, gRPC: 12775, MsgBus: 63040 2025-04-07T13:59:46.361567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570655125610398:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:59:46.362075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ff9/r3tmp/tmpKPpfre/pdisk_1.dat 2025-04-07T13:59:46.808978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:59:46.809053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:59:46.814537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T13:59:46.834174Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12775, node 1 2025-04-07T13:59:46.969244Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:59:46.969264Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:59:46.969271Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:59:46.969359Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63040 TClient is connected to server localhost:63040 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:59:47.602661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:59:47.624712Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLESTORE `/Root/TableStoreTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T13:59:49.609404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490570668010512913:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:49.609533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:59:49.881116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T13:59:49.963460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570668010512981:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T13:59:49.963667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570668010512981:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T13:59:49.963889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570668010512981:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T13:59:49.963994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570668010512981:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T13:59:49.964083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570668010512981:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T13:59:49.964166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570668010512981:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T13:59:49.964253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570668010512981:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T13:59:49.964339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570668010512981:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T13:59:49.964422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570668010512981:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T13:59:49.964507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570668010512981:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T13:59:49.964590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570668010512981:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T13:59:49.964692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570668010512981:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T13:59:49.967442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T13:59:49.967489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T13:59:49.967574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T13:59:49.967606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T13:59:49.967752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T13:59:49.967769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T13:59:49.967842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T13:59:49.967863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T13:59:49.967912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T13:59:49.967931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T13:59:49.967961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T13:59:49.967979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T13:59:49.968357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T13:59:49.968382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T13:59:49.968523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T13:59:49.968546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T13:59:49.968652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T13:59:49.968687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T13:59:49.968874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T13:59:49.968904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T13:59:49.969007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T13:59:49.9 ... gTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=55496;columns=3; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] Test command err: Trying to start YDB, gRPC: 16146, MsgBus: 16220 2025-04-07T14:04:08.460591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571776704451850:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.460907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001048/r3tmp/tmpA4KbD7/pdisk_1.dat 2025-04-07T14:04:09.008581Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:09.053669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:09.053775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:09.061468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16146, node 1 2025-04-07T14:04:09.305222Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:09.305238Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:09.305243Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:09.305317Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16220 TClient is connected to server localhost:16220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:10.103405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.113682Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:10.141500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.378536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.663019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.767305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.905497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571793884322587:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.923130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.961276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.997544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.027765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.055786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.091259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.178571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.283145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571798179290388:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.283221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.283399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571798179290393:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.286762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:13.312543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571798179290395:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:13.456652Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571776704451850:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.456721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28202, MsgBus: 19027 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001048/r3tmp/tmpIP3EOS/pdisk_1.dat 2025-04-07T14:04:15.796847Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:04:15.819185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:15.819259Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:15.828317Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:15.836069Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28202, node 2 2025-04-07T14:04:16.092697Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:16.092727Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:16.092732Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:16.092828Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19027 TClient is connected to server localhost:19027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:16.680542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:16.692713Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:16.699785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:04:16.839171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:04:17.083018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:17.215103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.209053Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571827904550672:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.209132Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.253572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.353414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.430769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.473884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.539938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.578288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.696650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571827904551180:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.696780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.697078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571827904551185:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.701337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:20.720323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571827904551187:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:22.727514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:22.734867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:22.736019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:29.363649Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034669384, txId: 281474976710735] shutting down >> KqpScripting::StreamExecuteYqlScriptData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 14989, MsgBus: 15937 2025-04-07T14:04:14.671513Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571803983931235:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:14.671631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e05/r3tmp/tmpmXlcdB/pdisk_1.dat 2025-04-07T14:04:15.220552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:15.221239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:15.229799Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:15.245184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14989, node 1 2025-04-07T14:04:15.433184Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:15.433202Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:15.433209Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:15.433279Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15937 TClient is connected to server localhost:15937 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:16.279522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:16.339168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:16.559490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:04:16.817047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:04:16.915835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:19.285766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571825458769297:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:19.285993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:19.531584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:19.579781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:19.600033Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571803983931235:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:19.600209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:19.609308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:19.638410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:19.670222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:19.720086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:19.795478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571825458769792:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:19.795555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:19.795564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571825458769797:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:19.799638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:19.808611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571825458769799:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:22.046065Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571834048704828:2475], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2025-04-07T14:04:22.047728Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDgxYTNmZDQtZWM1MGMzYTAtZTQ4MzA1ZWYtZThiZmI0ZDU=, ActorId: [1:7490571834048704826:2474], ActorState: ExecuteState, TraceId: 01jr8921kabw0n6ygkg7v0m6fq, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2025-04-07T14:04:22.363207Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571838343672200:2488], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 2025-04-07T14:04:22.369334Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjIyNjRhNzItNTA1Y2RhYzYtZGFhZmFmZWQtNDVlN2YxYTY=, ActorId: [1:7490571838343672198:2487], ActorState: ExecuteState, TraceId: 01jr8921xw54m4gnv88tvx8jsn, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 Trying to start YDB, gRPC: 3684, MsgBus: 13224 2025-04-07T14:04:23.730696Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571844656173377:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e05/r3tmp/tmpPCFPLT/pdisk_1.dat 2025-04-07T14:04:23.808490Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:23.947705Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:23.948149Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:23.948220Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:23.950008Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3684, node 2 2025-04-07T14:04:24.016378Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:24.016402Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:24.016407Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:24.016500Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13224 TClient is connected to server localhost:13224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:24.642933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:24.666655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:24.735819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:25.019601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:25.120641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:28.714511Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571844656173377:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:28.715049Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:28.754360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571866131011402:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:28.754436Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:28.779043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:28.818195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:28.881638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:28.912069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:28.940505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:28.975229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:29.033168Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571870425979193:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.033260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.033697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571870425979198:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.037468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:29.047938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571870425979200:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:30.412301Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034670259, txId: 281474976710671] shutting down >> TCacheTest::SystemView [GOOD] >> TCacheTest::SysLocks >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] >> TCacheTest::SysLocks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpDocumentApi::Scripting [GOOD] Test command err: Trying to start YDB, gRPC: 9009, MsgBus: 5344 2025-04-07T14:04:08.573310Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571777975643675:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.573506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00103d/r3tmp/tmpu5Oqe2/pdisk_1.dat 2025-04-07T14:04:09.229016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:09.229090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:09.232133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9009, node 1 2025-04-07T14:04:09.281988Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:09.453118Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:09.453145Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:09.453151Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:09.453223Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5344 TClient is connected to server localhost:5344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:10.179633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.229128Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:10.247746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.451958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.612840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.708673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.457619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571795155514415:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.457737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.836848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.872901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.935489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.994956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.030878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.076059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.163581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571799450482216:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.163653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.163871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571799450482221:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.167119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:13.176488Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:04:13.177092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571799450482223:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:13.572760Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571777975643675:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.572881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:14.419216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:14.541826Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571803745449921:2472], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-04-07T14:04:14.542968Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDNmYmQ0YzYtMjVkMTNkNjItMTBmNDlkMzgtZGM2Y2RhMDc=, ActorId: [1:7490571803745449834:2461], ActorState: ExecuteState, TraceId: 01jr891t9day6aqybxdm95byz3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 Trying to start YDB, gRPC: 1564, MsgBus: 8782 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00103d/r3tmp/tmpLPK9iS/pdisk_1.dat 2025-04-07T14:04:15.968808Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:04:15.992979Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:16.009814Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:16.009888Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:16.017797Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1564, node 2 2025-04-07T14:04:16.225429Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:16.225449Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:16.225455Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:16.225530Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8782 TClient is connected to server localhost:8782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:04:16.913272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:04:16.918651Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:1 ... rt proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.091364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.171888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.259770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.289154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.344273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.415875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571829409189040:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.415978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.416477Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571829409189045:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.420182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:20.433386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571829409189047:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:22.616229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:22.771555Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490571837999124069:2473], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:25: Error: At function: KiWriteTable!
:3:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-04-07T14:04:22.777597Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzRiZTQ0ZjYtYzQ1N2U3MWYtN2Y1NGVmNWMtNDBiMWU1NmI=, ActorId: [2:7490571837999123983:2463], ActorState: ExecuteState, TraceId: 01jr89229s93mrtd0ncvrxt256, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:3:25: Error: At function: KiWriteTable!
:3:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 Trying to start YDB, gRPC: 27121, MsgBus: 30763 2025-04-07T14:04:24.153140Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571844986748971:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:24.153695Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00103d/r3tmp/tmpxY3IdH/pdisk_1.dat 2025-04-07T14:04:24.423980Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:24.455537Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:24.455621Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:24.457350Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27121, node 3 2025-04-07T14:04:24.541318Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:24.541346Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:24.541355Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:24.541461Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30763 TClient is connected to server localhost:30763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:25.075317Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:25.093245Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:25.114294Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:04:25.208999Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:25.426408Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:25.528637Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:29.153347Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571844986748971:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:29.153412Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:29.252486Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571866461587149:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.252568Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.304943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:29.334282Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:29.390865Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:29.419654Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:29.447116Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:29.521201Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:29.575149Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571866461587646:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.575216Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.575385Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571866461587651:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.579733Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:29.588908Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571866461587653:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:30.672417Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:4:61: Error: At function: KiAlterTable!
:4:61: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::SeveralCTAS [GOOD] Test command err: Trying to start YDB, gRPC: 11835, MsgBus: 31633 2025-04-07T14:04:08.637413Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571778253412836:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.637566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001024/r3tmp/tmp59CFRA/pdisk_1.dat 2025-04-07T14:04:09.303374Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:09.370623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:09.370725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:09.377588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11835, node 1 2025-04-07T14:04:09.632100Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:09.632115Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:09.632124Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:09.632188Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31633 TClient is connected to server localhost:31633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:10.319540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.342387Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:10.368108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.542409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.721965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.820081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.749518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571795433283752:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.749634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.046369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.091555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.170097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.224028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.261516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.345841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.407444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571799728251556:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.407511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.407687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571799728251561:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.410917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:13.420277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571799728251563:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:13.641437Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571778253412836:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.642502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30407, MsgBus: 11065 2025-04-07T14:04:15.637438Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571810230524688:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:15.637480Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001024/r3tmp/tmpH9V9wQ/pdisk_1.dat 2025-04-07T14:04:15.891837Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:15.891923Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:15.893781Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:15.902994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30407, node 2 2025-04-07T14:04:16.121146Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:16.121174Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:16.121180Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:16.121262Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11065 TClient is connected to server localhost:11065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:16.834612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:16.845410Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:04:16.853538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:17.029658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:17.234129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:17.321582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.089035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571831705362882:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.089124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.145345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.184736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.225483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.308993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.396204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.499536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.637401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571831705363388:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.637451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.637804Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571810230524688:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:20.637876Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:20.638004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571831705363393:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.641633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:20.653170Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T14:04:20.653443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571831705363395:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 26848, MsgBus: 29111 2025-04-07T14:04:25.049133Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571852818726675:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:25.049180Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001024/r3tmp/tmp0YwuF4/pdisk_1.dat 2025-04-07T14:04:25.220848Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:25.238755Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:25.238840Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:25.241823Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26848, node 3 2025-04-07T14:04:25.309283Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:25.309312Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:25.309320Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:25.309459Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29111 TClient is connected to server localhost:29111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:26.034374Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:26.045329Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:29.580840Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571869998596478:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.580945Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.581473Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571869998596490:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.587974Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:04:29.600833Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571869998596492:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:04:29.782701Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:1, at schemeshard: 72057594046644480 2025-04-07T14:04:30.035313Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:30.049447Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571852818726675:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:30.049522Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:30.208122Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 >> KqpQueryService::MaterializeTxResults [GOOD] >> KqpQueryService::MixedReadQueryWithoutStreamLookup >> KqpYql::BinaryJsonOffsetNormal [GOOD] >> KqpYql::Closure >> TCacheTest::MigrationCommon [GOOD] >> TCacheTest::MigrationCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::SysLocks [GOOD] Test command err: 2025-04-07T14:04:31.444463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:31.444514Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T14:04:32.103396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-04-07T14:04:32.468880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:32.468926Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T14:04:32.508881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery [GOOD] >> TCacheTest::MigrationLostMessage [GOOD] >> TCacheTest::MigrationDeletedPathNavigate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] Test command err: Trying to start YDB, gRPC: 6127, MsgBus: 10523 2025-04-07T14:04:08.131906Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571778720670056:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.132199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00104f/r3tmp/tmpggdL9B/pdisk_1.dat 2025-04-07T14:04:08.628541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:08.628628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:08.678700Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:08.678948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6127, node 1 2025-04-07T14:04:08.846735Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:08.846758Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:08.846763Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:08.846840Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10523 TClient is connected to server localhost:10523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:09.827538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.891672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.087130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.276010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.413255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.521404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571795900540794:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.531113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.573042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.643550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.718188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.801269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.874601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.939229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.016154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571800195508597:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.016231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.016592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571800195508602:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.020361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:13.037368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571800195508604:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:13.123990Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571778720670056:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.124239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61103, MsgBus: 13416 2025-04-07T14:04:15.742646Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571808281831644:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00104f/r3tmp/tmp9DanS5/pdisk_1.dat 2025-04-07T14:04:15.811476Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:15.899351Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:15.929685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:15.929763Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:15.934021Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61103, node 2 2025-04-07T14:04:16.153168Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:16.153187Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:16.153193Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:16.153286Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13416 TClient is connected to server localhost:13416 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:16.910732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:16.935262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:17.030739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:17.189826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:17.390169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.525774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571829756669655:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.525869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:20.654092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.723992Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571808281831644:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:20.724061Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:20.754794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.811537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.893779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.947083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:21.020603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:21.138451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571834051637453:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:21.138546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:21.138962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571834051637458:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:21.143328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:21.492766Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T14:04:21.493217Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571834051637460:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 27559, MsgBus: 19459 2025-04-07T14:04:24.934777Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571848415516532:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:24.934809Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00104f/r3tmp/tmpxfeHNV/pdisk_1.dat 2025-04-07T14:04:25.146818Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:25.146896Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:25.161907Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:25.163888Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27559, node 3 2025-04-07T14:04:25.348503Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:25.348527Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:25.348535Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:25.348655Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19459 TClient is connected to server localhost:19459 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:04:25.972239Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:04:25.990133Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:04:26.003204Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:26.097442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:26.333635Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:26.460777Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:29.851917Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571869890354578:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.852000Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.876875Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:29.923141Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:29.934859Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571848415516532:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:29.934903Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:29.987794Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:30.022355Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:30.058932Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:30.136898Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:30.185138Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571874185322377:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:30.185248Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:30.185305Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571874185322382:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:30.189236Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:30.199263Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571874185322384:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpQueryService::TableSink_BadTransactions [GOOD] >> KqpQueryService::TableSink_DisableSink >> TCacheTest::MigrationCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery [GOOD] Test command err: Trying to start YDB, gRPC: 14776, MsgBus: 16891 2025-04-07T14:04:08.248908Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571776859506112:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.250517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001034/r3tmp/tmpRLrc7s/pdisk_1.dat 2025-04-07T14:04:08.762212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:08.762303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:08.802498Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:08.818882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14776, node 1 2025-04-07T14:04:08.969155Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:08.969173Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:08.969183Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:08.969255Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16891 TClient is connected to server localhost:16891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:09.507714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.529709Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:09.558376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.777204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.078671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.155157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.108328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571794039376973:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.108472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.646744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.731637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.809327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.882422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.918374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.987992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.053610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571798334344775:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.053702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.054024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571798334344780:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.057548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:13.078149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571798334344782:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:13.284983Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571776859506112:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.286444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:14.877337Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzBmYjQ1MGUtMjNkMjc1YzMtYmI5Y2JjM2QtNGEwY2QzNWY=, ActorId: [1:7490571802629312399:2464], ActorState: ExecuteState, TraceId: 01jr891tks2fw6tthr818zdqsh, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 6555, MsgBus: 21159 2025-04-07T14:04:16.149220Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571812706625003:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001034/r3tmp/tmpWhMeJN/pdisk_1.dat 2025-04-07T14:04:16.217855Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:16.378115Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:16.397962Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:16.398057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:16.406044Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6555, node 2 2025-04-07T14:04:16.737225Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:16.737249Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:16.737257Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:16.737358Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21159 TClient is connected to server localhost:21159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:17.505904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:17.555692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:17.673316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:17.884797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:18.039393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:21.072829Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571834181463049:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:21.072938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:21.160928Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571812706625003:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:21.161054Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:21.597199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:21.735697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:21.958929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:22.045174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:22.243193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:22.338772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:22.457887Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571838476430856:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:22.457983Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:22.458447Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571838476430861:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:22.462691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:22.479854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571838476430863:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 24443, MsgBus: 22593 2025-04-07T14:04:25.643296Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571849850734073:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001034/r3tmp/tmpt6aB3X/pdisk_1.dat 2025-04-07T14:04:25.744510Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:25.788078Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:25.788141Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:25.789762Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:25.790010Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24443, node 3 2025-04-07T14:04:26.073803Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:26.073831Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:26.073840Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:26.073956Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22593 TClient is connected to server localhost:22593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:28.040881Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:28.059811Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:28.215407Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:28.445742Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:28.563100Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:30.646278Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571849850734073:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:30.646382Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:30.844320Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571871325572097:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:30.844428Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:30.880252Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:30.913958Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:30.945263Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:30.977318Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:31.044698Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:31.116064Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:31.153582Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571875620539894:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:31.153635Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:31.153715Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571875620539899:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:31.156446Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:31.164389Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571875620539901:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpScripting::StreamExecuteYqlScriptScanCancelation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationCommit [GOOD] Test command err: 2025-04-07T14:04:31.444097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:31.444170Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T14:04:32.103312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-04-07T14:04:32.171206Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [1:171:2168], Recipient [1:68:2107]: NActors::TEvents::TEvPoison 2025-04-07T14:04:32.172035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:172:2067] recipient: [1:45:2092] Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:175:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:176:2067] recipient: [1:174:2169] Leader for TabletID 72057594046678944 is [1:177:2170] sender: [1:178:2067] recipient: [1:174:2169] 2025-04-07T14:04:32.212118Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828672, Sender [1:174:2169], Recipient [1:177:2170]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:04:32.233879Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828673, Sender [1:174:2169], Recipient [1:177:2170]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:04:32.234138Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828684, Sender [1:174:2169], Recipient [1:177:2170]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:04:32.246043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:04:32.246145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:04:32.246183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:04:32.246242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:04:32.246282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:04:32.246315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:04:32.246396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:04:32.246742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:04:32.262211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:04:32.263312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:04:32.263463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:04:32.263642Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 65542, Sender [1:7238242728502259555:7369577], Recipient [1:177:2170]: TSystem::Undelivered 2025-04-07T14:04:32.263681Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, processing event TEvents::TEvUndelivered 2025-04-07T14:04:32.263718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:32.263746Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:32.263955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:04:32.264675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-07T14:04:32.264788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.264848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.271030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.275788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.275949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.276035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.276145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.276231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.276365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.276646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.276783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.277123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.277205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.288546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.288717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.300863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.301077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.301160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.301273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.301437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.301528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.301567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.301604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.308131Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:04:32.309692Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:04:32.310240Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:177:2170], Recipient [1:177:2170]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-07T14:04:32.310285Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-07T14:04:32.310933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:04:32.310981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:04:32.311072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:04:32.311116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:04:32.311151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:04:32.311189Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:04:32.311452Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:192:2170], Recipient [1:177:2170]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T14:04:32.311494Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T14:04:32.311529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:177:2170] sender: [1:207:2067] recipient: [1:24:2071] 2025-04-07T14:04:32.349045Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:206:2187], Recipient [1:177:2170]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2025-04-07T14:04:32.349126Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T14:04:32.460034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:04:32.460333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/USER_0, opId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.460455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: Root, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:04:32.460636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-07T14:04:32.461155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:04:32.461290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:04:32.461342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:04:32.461449Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:04:32.461511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:04:32.461566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:04:32.462200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678 ... 5-04-07T14:04:32.995399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-07T14:04:32.995496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:04:32.995542Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:04:32.995618Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:416:2333], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:04:32.995729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:04:32.995766Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T14:04:33.303269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:33.303331Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T14:04:33.349859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:172:2067] recipient: [2:45:2092] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:175:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:176:2067] recipient: [2:174:2169] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:178:2067] recipient: [2:174:2169] 2025-04-07T14:04:33.395628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:33.395692Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:207:2067] recipient: [2:24:2071] 2025-04-07T14:04:33.424062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-07T14:04:33.431698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:243:2067] recipient: [2:234:2211] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:243:2067] recipient: [2:234:2211] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:244:2067] recipient: [2:237:2213] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:244:2067] recipient: [2:237:2213] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:246:2217] sender: [2:249:2067] recipient: [2:234:2211] Leader for TabletID 72075186233409547 is [2:251:2219] sender: [2:252:2067] recipient: [2:237:2213] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-04-07T14:04:33.450331Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:246:2217] sender: [2:285:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:251:2219] sender: [2:286:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-04-07T14:04:33.493382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:336:2067] recipient: [2:331:2282] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:336:2067] recipient: [2:331:2282] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:338:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:338:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:339:2286] sender: [2:340:2067] recipient: [2:331:2282] TestWaitNotification: OK eventTxId 103 Leader for TabletID 72075186233409548 is [2:339:2286] sender: [2:355:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-07T14:04:33.624929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:414:2067] recipient: [2:409:2329] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:414:2067] recipient: [2:409:2329] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:415:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:415:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:417:2333] sender: [2:418:2067] recipient: [2:409:2329] 2025-04-07T14:04:33.665897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:33.665960Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [2:417:2333] sender: [2:444:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } TestModificationResults wait txId: 106 2025-04-07T14:04:33.706801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:04:33.706854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T14:04:33.707357Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-04-07T14:04:33.707496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-07T14:04:33.724562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-04-07T14:04:33.724885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:504:2067] recipient: [2:45:2092] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:507:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:509:2067] recipient: [2:508:2404] Leader for TabletID 72057594046678944 is [2:510:2405] sender: [2:511:2067] recipient: [2:508:2404] 2025-04-07T14:04:33.770430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:33.770495Z node 2 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046678944 is [2:510:2405] sender: [2:537:2067] recipient: [2:24:2071] { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } >> KqpYql::SelectNoAsciiValue [GOOD] >> KqpYql::TestUuidPrimaryKeyPrefixSearch >> KqpScripting::EndOfQueryCommit [GOOD] >> KqpScripting::ExecuteYqlScriptPg >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::SelectNoAsciiValue [GOOD] Test command err: Trying to start YDB, gRPC: 18037, MsgBus: 17509 2025-04-07T14:04:19.529576Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571826344807030:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:19.529869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001df0/r3tmp/tmpZiIDdp/pdisk_1.dat 2025-04-07T14:04:20.132960Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18037, node 1 2025-04-07T14:04:20.155655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:20.155763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:20.157563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:20.312020Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:20.312046Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:20.312057Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:20.312142Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17509 TClient is connected to server localhost:17509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:21.895652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:21.918046Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:21.946873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:22.405994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:22.807024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:23.040978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:24.520916Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571826344807030:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:24.520985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:25.929528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571852114612376:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:25.955363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:26.018992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:26.100428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:26.133121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:26.206582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:26.256419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:26.327284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:26.409677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571856409580171:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:26.409778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:26.410244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571856409580176:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:26.414915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:26.434440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571856409580178:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Table intent determination, code: 1040
:3:27: Error: CONCAT is not supported on Kikimr clusters. Trying to start YDB, gRPC: 29899, MsgBus: 29325 2025-04-07T14:04:29.584990Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571869112189232:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:29.585042Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001df0/r3tmp/tmpvoYDqB/pdisk_1.dat 2025-04-07T14:04:29.694043Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:29.708916Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:29.709002Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:29.711575Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29899, node 2 2025-04-07T14:04:29.795198Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:29.795231Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:29.795239Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:29.795339Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29325 TClient is connected to server localhost:29325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:04:30.247379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:04:30.265091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:30.336748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:30.510462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:30.588364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:32.736922Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571881997092817:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:32.737068Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:32.762129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:32.790224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:32.824633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:32.850212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:32.885904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:32.953754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:32.987730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571881997093318:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:32.987834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:32.987912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571881997093323:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:32.991221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:33.000893Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571881997093325:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:33.836062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:34.323893Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034674368, txId: 281474976715675] shutting down >> KqpQueryService::TableSink_OltpUpdate [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 29767, MsgBus: 10891 2025-04-07T14:04:08.302626Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571778760156818:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.302665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001030/r3tmp/tmp5GlQ8P/pdisk_1.dat 2025-04-07T14:04:08.826700Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:08.829175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:08.829275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:08.832894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29767, node 1 2025-04-07T14:04:08.917652Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:08.917683Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:08.917703Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:08.917810Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10891 TClient is connected to server localhost:10891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:09.651472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.682097Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:11.837637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571791645059335:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:11.837752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.090354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.238503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571795940026733:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.238582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.239523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571795940026738:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.245782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:04:12.285893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571795940026740:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } Trying to start YDB, gRPC: 1784, MsgBus: 17480 2025-04-07T14:04:13.736310Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571800673101773:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.744797Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001030/r3tmp/tmpgONi7m/pdisk_1.dat 2025-04-07T14:04:13.867572Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:13.893188Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:13.893248Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:13.894653Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1784, node 2 2025-04-07T14:04:13.985211Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:13.985235Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:13.985244Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:13.985364Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17480 TClient is connected to server localhost:17480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:14.686152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:17.181683Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571817852971599:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:17.181777Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:17.213513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:04:17.368968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571817852971700:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:17.369044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:17.369391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571817852971705:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:17.373355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:04:17.395436Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T14:04:17.395713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571817852971707:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:04:18.061160Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490571822147939153:2342], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Missing key column in input: Col1 for table: /Root/DataShard, code: 2029 2025-04-07T14:04:18.062817Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWIzZWM1NGUtN2FlODRkZS1lNTMxMDBhYS03ZDdmNmJhMQ==, ActorId: [2:7490571822147939151:2341], ActorState: ExecuteState, TraceId: 01jr891xqhchbgs4438kva94wk, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: WAIT_INDEXATION: 0 2025-04-07T14:04:18.734553Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571800673101773:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:18.734620Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 32498, MsgBus: 1468 2025-04-07T14:04:23.868763Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571842406763041:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001030/r3tmp/tmpf7rARo/pdisk_1.dat 2025-04-07T14:04:23.985287Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:24.100525Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:24.101664Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:24.101734Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:24.103364Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32498, node 3 2025-04-07T14:04:24.304584Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:24.304625Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:24.304635Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:24.304773Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1468 TClient is connected to server localhost:1468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:24.816914Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:24.829148Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:28.840796Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571842406763041:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:28.840908Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:29.306071Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571868176567318:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.306152Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.328225Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:04:29.385184Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571868176567416:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.385255Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.385408Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571868176567421:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:29.389828Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:04:29.402313Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571868176567423:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode [GOOD] Test command err: Trying to start YDB, gRPC: 13711, MsgBus: 61985 2025-04-07T14:04:08.177671Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571779374234886:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.177713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00102d/r3tmp/tmpKde8Xe/pdisk_1.dat 2025-04-07T14:04:08.755074Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:08.773585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:08.773703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:08.788043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13711, node 1 2025-04-07T14:04:09.025291Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:09.025311Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:09.025316Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:09.025403Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61985 TClient is connected to server localhost:61985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:09.764752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.854621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.071552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.337786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.496627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.378081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571796554105760:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.378175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.699600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.736275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.784696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.819172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.845515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.887422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.950001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571796554106257:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.950082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.950337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571796554106263:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.954868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:12.964468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571796554106265:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:13.190973Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571779374234886:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.191024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:14.230101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:14.231881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:14.233139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:17.324583Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034657330, txId: 281474976710699] shutting down 2025-04-07T14:04:17.368374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571818028944156:2727], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-07T14:04:17.368434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571818028944158:2729], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-07T14:04:17.368474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-07T14:04:17.368538Z node 1 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [1:7490571818028944157:2728], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=MTg5OWUyNy02YjIxN2M1Ni0yNDk4MzQ5Yi1iZmNmNWFhNw==, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-07T14:04:17.368604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [1:7490571818028944157:2728], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=MTg5OWUyNy02YjIxN2M1Ni0yNDk4MzQ5Yi1iZmNmNWFhNw==, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-07T14:04:17.368720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [1:7490571818028944153:2726]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-07T14:04:17.368794Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTg5OWUyNy02YjIxN2M1Ni0yNDk4MzQ5Yi1iZmNmNWFhNw==, ActorId: [1:7490571818028944153:2726], ActorState: ExecuteState, TraceId: 01jr891x1q9q17a2bq62na2g97, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-04-07T14:04:17.369007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7490571818028944153:2726]: Pool another_pool_id not found Trying to start YDB, gRPC: 14009, MsgBus: 27939 2025-04-07T14:04:18.964754Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571822319513478:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:19.090654Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00102d/r3tmp/tmpaI1gL2/pdisk_1.dat 2025-04-07T14:04:19.256136Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:19.282066Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:19.282146Z node 2 :HIVE WARN: HIVE# ... k, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.735438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:23.940864Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571822319513478:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:23.940935Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:24.888303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571848089318816:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:24.888552Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:24.915480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:24.991278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:25.037605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:25.116690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:25.158514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:25.230105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:25.316918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571852384286619:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:25.316994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:25.317415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571852384286625:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:25.330826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:25.341605Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571852384286627:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:27.715824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:27.719652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:27.727856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20257, MsgBus: 22728 2025-04-07T14:04:30.492240Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571873526034244:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:30.492294Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00102d/r3tmp/tmpHuLuLq/pdisk_1.dat 2025-04-07T14:04:30.625692Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:30.636792Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:30.636884Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:30.638456Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20257, node 3 2025-04-07T14:04:30.700247Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:30.700274Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:30.700282Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:30.700406Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22728 TClient is connected to server localhost:22728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:31.234068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:31.246312Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:31.334243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:31.559626Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:31.631102Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:33.795253Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571886410937824:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.795325Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.824831Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.849915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.874571Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.898735Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.925235Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.951971Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.990363Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571886410938316:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.990437Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.990564Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571886410938321:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.993466Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:34.000899Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571886410938323:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpTx::RollbackManyTx >> KqpScripting::StreamExecuteYqlScriptData [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] >> KqpYql::Closure [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter >> KqpService::SwitchCache+UseCache [GOOD] >> KqpService::SwitchCache-UseCache >> KqpOlapIndexes::IndexesInBS [GOOD] >> KqpYql::TableRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Closure [GOOD] Test command err: Trying to start YDB, gRPC: 26277, MsgBus: 13041 2025-04-07T14:04:26.099198Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571855230244285:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:26.099248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001de4/r3tmp/tmp0vQ7p5/pdisk_1.dat 2025-04-07T14:04:26.983908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:26.983995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:26.993467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26277, node 1 2025-04-07T14:04:27.393531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:04:27.412912Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:27.440867Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:27.440902Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:27.492223Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:27.492245Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:27.492252Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:27.492341Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13041 TClient is connected to server localhost:13041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:28.643575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:28.668263Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:28.688534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:04:28.926023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:04:29.115922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:29.208437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:30.914602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571872410115050:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:30.914722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:31.099302Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571855230244285:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:31.099403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:31.191926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:31.216542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:31.241249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:31.285279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:31.309570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:31.340440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:31.406644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571876705082840:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:31.406723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:31.406870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571876705082845:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:31.410422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:31.421270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571876705082847:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 13633, MsgBus: 20384 2025-04-07T14:04:33.085190Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571883950340040:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:33.085258Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001de4/r3tmp/tmpZdepPk/pdisk_1.dat 2025-04-07T14:04:33.166422Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13633, node 2 2025-04-07T14:04:33.198230Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:33.198448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:33.204343Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:33.229776Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:33.229802Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:33.229808Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:33.229895Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20384 TClient is connected to server localhost:20384 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:33.616763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:33.625555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:33.668658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:33.790517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:33.861693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:35.501451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571892540276327:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:35.501510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:35.531842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.552756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.573055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.595204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.621927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.687403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.721199Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571892540276823:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:35.721276Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:35.721483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571892540276828:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:35.725358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:35.735806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571892540276830:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpYql::ColumnNameConflict >> KqpQueryService::TableSink_DisableSink [GOOD] >> KqpQueryService::TableSink_OlapDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] Test command err: Trying to start YDB, gRPC: 6766, MsgBus: 5574 2025-04-07T14:04:07.957120Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571772543573774:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:07.957155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001057/r3tmp/tmpLH3kjR/pdisk_1.dat 2025-04-07T14:04:08.368608Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:08.379988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:08.380084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:08.385728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6766, node 1 2025-04-07T14:04:08.568048Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:08.568075Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:08.568087Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:08.568180Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5574 TClient is connected to server localhost:5574 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:09.491761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.553396Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.574845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:04:09.782406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.010987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.078404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:11.869303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571789723444469:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:11.904334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.191478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.271598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.306643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.350706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.390752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.437528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.523152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571794018412267:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.523241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.528830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571794018412272:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.536185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:12.548868Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:04:12.549142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571794018412274:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:12.961507Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571772543573774:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:12.961559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:13.611588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.612911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.614565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:16.894212Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034656910, txId: 281474976710704] shutting down Trying to start YDB, gRPC: 23144, MsgBus: 12602 2025-04-07T14:04:18.309311Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571820968358993:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:18.309390Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001057/r3tmp/tmpDJXJub/pdisk_1.dat 2025-04-07T14:04:18.549512Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:18.565345Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:18.565432Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:18.566693Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23144, node 2 2025-04-07T14:04:18.699431Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:18.699466Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:18.699476Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:18.699584Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12602 TClient is connected to server localhost:12602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:04:19.271746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:04:19.278167Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:19.288079Z node 2 :FLAT_TX_SCHEMESHARD WARN: ... proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:24.067200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:24.149131Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571846738164976:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:24.149236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:24.149527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571846738164981:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:24.153764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:24.182710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571846738164983:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:25.270898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:25.272378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:25.274972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:25.739366Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490571851033132907:2488], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: COMMIT not supported inside YDB query, code: 2008 2025-04-07T14:04:25.748754Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjBhYTdhZTgtZTlkMTcyOC1kODNlMTcyMS01OWJlOTVhZA==, ActorId: [2:7490571851033132905:2487], ActorState: ExecuteState, TraceId: 01jr8924sge64yw4cvt9y3g46m, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:04:27.955562Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490571859623067996:2639], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: ROLLBACK not supported inside YDB query, code: 2008 2025-04-07T14:04:27.973715Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2NlYTVhMmItY2RlY2U3NGQtZTQyMDkxNS1hYjZmMDMwMQ==, ActorId: [2:7490571859623067994:2638], ActorState: ExecuteState, TraceId: 01jr8926zhfnkmcfe415tj889w, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 23938, MsgBus: 6601 2025-04-07T14:04:29.704989Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571868089986872:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:29.705057Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001057/r3tmp/tmpTl71JE/pdisk_1.dat 2025-04-07T14:04:29.852425Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:29.866251Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:29.866345Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:29.869694Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23938, node 3 2025-04-07T14:04:29.940053Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:29.940084Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:29.940098Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:29.940220Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6601 TClient is connected to server localhost:6601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:30.467926Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:30.499525Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:30.579578Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:30.768787Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:30.853048Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:33.235531Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571885269857741:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.235841Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.255407Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.284844Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.313098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.341386Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.383139Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.426675Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.463464Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571885269858233:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.463524Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.463631Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571885269858238:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.467670Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:33.477277Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571885269858240:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:34.426613Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:34.428385Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:34.429751Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:34.705465Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571868089986872:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:34.705553Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::CloseConnection [GOOD] >> KqpQueryService::CreateAndDropTopic >> KqpYql::PgIntPrimaryKey+EnableKqpDataQueryStreamLookup >> TSchemeShardMoveTest::Replace >> TSchemeShardMoveTest::MoveTableForBackup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::IndexesInBS [GOOD] Test command err: Trying to start YDB, gRPC: 4456, MsgBus: 23908 2025-04-07T14:02:12.725026Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571279079290284:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:12.725070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c87/r3tmp/tmpbAoQk1/pdisk_1.dat 2025-04-07T14:02:13.720394Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:13.723817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:13.723887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:13.734586Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:02:13.757906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4456, node 1 2025-04-07T14:02:14.041151Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:14.041174Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:14.041180Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:14.041276Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23908 TClient is connected to server localhost:23908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:15.370371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:15.411779Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:15.431413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:15.715696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571291964192869:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:15.715898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571291964192869:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:15.716147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571291964192869:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:15.716269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571291964192869:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:15.716370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571291964192869:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:15.716467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571291964192869:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:15.716554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571291964192869:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:15.716640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571291964192869:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:15.716760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571291964192869:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:15.716853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571291964192869:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:15.716946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571291964192869:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:15.717069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571291964192869:2293];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:15.821462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571291964192867:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:15.821641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571291964192867:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:15.821857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571291964192867:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:15.821949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571291964192867:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:15.822046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571291964192867:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:15.822130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571291964192867:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:15.822207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571291964192867:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:15.822282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571291964192867:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:15.822379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571291964192867:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:15.822460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571291964192867:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:15.822560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571291964192867:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:15.822641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571291964192867:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:15.892101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571291964192899:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:15.892172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571291964192899:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:15.892368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571291964192899:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:15.892466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571291964192899:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:15.892624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571291964192899:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:15.895333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571291964192899:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:15.895511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571291964192899:2294];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07 ... 9551615] shutting down 2025-04-07T14:03:20.815387Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034600000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:21.804720Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034601000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:22.933053Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034602000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:23.968610Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034603000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:25.123987Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034604000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:26.658473Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034605000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:28.070363Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034607000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:29.336079Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034608001, txId: 18446744073709551615] shutting down 2025-04-07T14:03:30.556252Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034609000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:31.926014Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034611000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:32.713299Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034612000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:33.570401Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034613000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:34.542758Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034614000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:35.683101Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034615000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:36.879284Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034616000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:38.097004Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034617000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:38.943568Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034618000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:39.653919Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034619000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:40.440818Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034620000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:41.542157Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034621000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:42.714589Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034622001, txId: 18446744073709551615] shutting down 2025-04-07T14:03:43.982880Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034623002, txId: 18446744073709551615] shutting down 2025-04-07T14:03:45.087840Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034624000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:46.109002Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034625000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:47.393384Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034626000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:48.562972Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034628000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:49.795866Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034629000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:51.017194Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034630000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:52.177697Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034631003, txId: 18446744073709551615] shutting down 2025-04-07T14:03:53.342811Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034632000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:54.554772Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034634000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:54.558971Z node 1 :KQP_NODE ERROR: node service cancelled the task, because it executer lost: 1, NodeId: 1, TxId: 281474976710867 2025-04-07T14:03:55.599580Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034635000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:56.580928Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034636000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:57.623343Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034637000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:58.621219Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034638000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:59.611349Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034639000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:00.814319Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034640000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:01.721017Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034641000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:02.660282Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034642000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:03.476412Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034643000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:04.339852Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034643176, txId: 18446744073709551615] shutting down 2025-04-07T14:04:05.125283Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034644000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:05.998682Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034645000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:07.382509Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034646000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:08.675987Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034648000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:10.277300Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034649000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:11.630851Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034650001, txId: 18446744073709551615] shutting down 2025-04-07T14:04:12.866735Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034652000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:14.112967Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034653000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:15.831198Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034655000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:17.690591Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034656007, txId: 18446744073709551615] shutting down 2025-04-07T14:04:19.155812Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034658002, txId: 18446744073709551615] shutting down 2025-04-07T14:04:21.029366Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034660000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:23.427375Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034662006, txId: 18446744073709551615] shutting down 2025-04-07T14:04:25.379007Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034664001, txId: 18446744073709551615] shutting down 2025-04-07T14:04:26.728009Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034666000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:28.767899Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034668000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:30.091692Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034669000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:31.229735Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034670000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:32.358411Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034671000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:33.440611Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034672000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:34.437417Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034673000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:35.410437Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034674000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:36.414770Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034675000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:37.307459Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034676000, txId: 18446744073709551615] shutting down >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] >> KqpUserConstraint::KqpReadNull+UploadNull >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapDelete [GOOD] Test command err: Trying to start YDB, gRPC: 30536, MsgBus: 26658 2025-04-07T14:04:08.107483Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571780185207945:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.108845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00105d/r3tmp/tmpsd6ebx/pdisk_1.dat 2025-04-07T14:04:08.722654Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:08.783319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:08.783418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:08.797941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30536, node 1 2025-04-07T14:04:09.153143Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:09.153166Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:09.153173Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:09.153264Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26658 TClient is connected to server localhost:26658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:10.095148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.117656Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:12.373686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571797365077607:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.373855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.759797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.943989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571797365077775:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:04:12.944225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571797365077775:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:04:12.944479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571797365077775:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:04:12.944591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571797365077775:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:04:12.946269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571797365077779:2316];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:04:12.946319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571797365077779:2316];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:04:12.946500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571797365077779:2316];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:04:12.946599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571797365077779:2316];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:04:12.946700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571797365077779:2316];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:04:12.946806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571797365077779:2316];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:04:12.946899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571797365077779:2316];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:04:12.947028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571797365077779:2316];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:04:12.947132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571797365077779:2316];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:04:12.947232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571797365077779:2316];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:04:12.947334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571797365077779:2316];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:04:12.947422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490571797365077779:2316];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:04:12.950739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571797365077775:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:04:12.950905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571797365077775:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:04:12.950992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571797365077775:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:04:12.951083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571797365077775:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:04:12.951171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571797365077775:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:04:12.951262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571797365077775:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:04:12.951347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571797365077775:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:04:12.951435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571797365077775:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:04:12.991815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571797365077776:2315];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:04:12.991930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571797365077776:2315];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:04:12.992125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571797365077776:2315];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:04:12.992223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571797365077776:2315];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:04:12.992314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571797365077776:2315];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:04:12.992403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490571797365077776:2315];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:04:12.992511Z node 1 :TX_COLU ... -04-07T14:04:31.360273Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:04:31.369019Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571878039975145:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:04:31.608790Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037890;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 0 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710661}],"finishes":[{"inc":{"count_include":1},"id":281474976710661}]},"p":{"include":0,"pk":"0;"}}]}; 2025-04-07T14:04:31.618572Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037894;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 1 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710661}],"finishes":[{"inc":{"count_include":1},"id":281474976710661}]},"p":{"include":0,"pk":"1;"}}]}; 2025-04-07T14:04:31.629071Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037897;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 2 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710661}],"finishes":[{"inc":{"count_include":1},"id":281474976710661}]},"p":{"include":0,"pk":"2;"}}]}; 2025-04-07T14:04:31.644981Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:31.645193Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:31.645972Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.239134Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[3:7490571873745007499:2309];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.249371Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[3:7490571873745007499:2309];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037892;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.249808Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[3:7490571878039974817:2314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.250090Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[3:7490571878039974817:2314];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.254521Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[3:7490571878039974818:2315];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.254889Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[3:7490571878039974818:2315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.259013Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7490571878039974826:2316];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.259374Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7490571878039974826:2316];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.264675Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[3:7490571878039974798:2312];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.265031Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[3:7490571878039974798:2312];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037897;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.270926Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[3:7490571878039974797:2311];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.271374Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[3:7490571878039974797:2311];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.273822Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[3:7490571878039974801:2313];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.274160Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[3:7490571878039974801:2313];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.280060Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7490571878039974845:2318];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.280470Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7490571878039974845:2318];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.293600Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7490571878039974831:2317];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.294114Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7490571878039974831:2317];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.294402Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[3:7490571873745007500:2310];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.294744Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[3:7490571873745007500:2310];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.461324Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;local_tx_no=13;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037897;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 2 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976710666}],"starts":[{"inc":{"count_not_include":1},"id":281474976710666}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976710666}]},"p":{"include":2147483647}}]}; 2025-04-07T14:04:32.475379Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.475451Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.475805Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.475848Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.475880Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.476200Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.476276Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.476294Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.476352Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.476583Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.925861Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.929740Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.929914Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.930015Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.930403Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.930491Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.930570Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.930641Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.930714Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:32.930947Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:04:33.016491Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> GroupWriteTest::WithRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_DisableSink [GOOD] Test command err: Trying to start YDB, gRPC: 18646, MsgBus: 15715 2025-04-07T14:04:08.191053Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571777360453365:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.191259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001038/r3tmp/tmpgguhEe/pdisk_1.dat 2025-04-07T14:04:08.733935Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:08.759169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:08.764847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:08.767659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18646, node 1 2025-04-07T14:04:08.953241Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:08.953268Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:08.953274Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:08.953370Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15715 TClient is connected to server localhost:15715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:09.940512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.957434Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:12.035846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571794540323018:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.035971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.278565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.463503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571794540323149:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:04:12.463686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571794540323149:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:04:12.463934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571794540323149:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:04:12.464038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571794540323149:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:04:12.464124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571794540323149:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:04:12.464222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571794540323149:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:04:12.464320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571794540323149:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:04:12.464409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571794540323149:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:04:12.464494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571794540323149:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:04:12.464588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571794540323149:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:04:12.470227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571794540323148:2310];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:04:12.470323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571794540323148:2310];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:04:12.470537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571794540323148:2310];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:04:12.470631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571794540323148:2310];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:04:12.470719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571794540323148:2310];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:04:12.470805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571794540323148:2310];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:04:12.470886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571794540323148:2310];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:04:12.470972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571794540323148:2310];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:04:12.471056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571794540323148:2310];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:04:12.471137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571794540323148:2310];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:04:12.471231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571794540323148:2310];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:04:12.471319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490571794540323148:2310];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:04:12.472796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571794540323149:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:04:12.472946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571794540323149:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:04:12.478376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:04:12.478432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:04:12.478516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:04:12.478540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:04:12.478685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:04:12.478706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:04:12.478802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:04:12.478838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;eve ... =TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:04:37.368493Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:04:37.369062Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:04:37.369099Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:04:37.369168Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:04:37.369197Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:04:37.369383Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:04:37.369414Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:04:37.369504Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:04:37.369539Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:04:37.369592Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:04:37.369618Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:04:37.369651Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:04:37.369678Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:04:37.369961Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:04:37.370003Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:04:37.370147Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:04:37.370177Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:04:37.370284Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:04:37.370315Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:04:37.370470Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:04:37.370502Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:04:37.370591Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:04:37.370615Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:04:37.375595Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:04:37.375641Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:04:37.375747Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:04:37.375778Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:04:37.375944Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:04:37.375975Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:04:37.376073Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:04:37.376106Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:04:37.376182Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:04:37.376212Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:04:37.376246Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:04:37.376275Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:04:37.376601Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:04:37.376641Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:04:37.376854Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:04:37.376887Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:04:37.377016Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:04:37.377047Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:04:37.377220Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:04:37.377250Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:04:37.377399Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:04:37.377429Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:04:37.487265Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571900978944601:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:37.487344Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:37.487361Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571900978944606:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:37.490605Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:04:37.498588Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571900978944608:2382], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:04:37.671330Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7490571900978944684:2377] TxId: 281474976715661. Ctx: { TraceId: 01jr892gqc88va5hfp3yfmaraf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGMxNzdjMmYtYjA2YjNkLTJjN2YwMDNmLTU0ZDc1ZDhk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Data manipulation queries do not support column shard tables. 2025-04-07T14:04:37.677289Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGMxNzdjMmYtYjA2YjNkLTJjN2YwMDNmLTU0ZDc1ZDhk, ActorId: [3:7490571900978944599:2377], ActorState: ExecuteState, TraceId: 01jr892gqc88va5hfp3yfmaraf, Create QueryResponse for error on request, msg: >> KqpScripting::ExecuteYqlScriptPg [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] Test command err: Trying to start YDB, gRPC: 18389, MsgBus: 17064 2025-04-07T14:04:35.423392Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571893593459704:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:35.423546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dcc/r3tmp/tmpcz7Cjx/pdisk_1.dat 2025-04-07T14:04:35.697768Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18389, node 1 2025-04-07T14:04:35.703386Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:35.734508Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:35.734535Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:35.734542Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:35.734643Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:04:35.797076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:35.797172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:35.798583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17064 TClient is connected to server localhost:17064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:36.209256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:37.856808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571902183394925:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:37.856918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:38.035239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:04:38.128045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571906478362322:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:38.128111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:38.128174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571906478362327:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:38.131204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:04:38.140376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571906478362329:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/benchmark/benchmark >> KqpScripting::StreamExecuteYqlScriptScanCancelation [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce |91.2%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/benchmark |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/benchmark ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 15641, MsgBus: 31878 2025-04-07T14:04:08.274096Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571777467009890:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.274145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001040/r3tmp/tmpSb2CY9/pdisk_1.dat 2025-04-07T14:04:08.795775Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:08.804709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:08.804804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:08.810080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15641, node 1 2025-04-07T14:04:08.999878Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:08.999898Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:08.999906Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:08.999965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31878 TClient is connected to server localhost:31878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:09.579736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.600910Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:09.611182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.782727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.943941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.027373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:11.726846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571790351913484:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:11.726952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.113488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.148564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.201594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.275698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.301365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.339270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.378900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571794646881279:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.378918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571794646881284:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.378955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.382541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:12.396077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571794646881286:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:13.275932Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571777467009890:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.276000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:23.785118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:04:23.785146Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 22999, MsgBus: 15496 2025-04-07T14:04:25.678008Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571850309023386:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:25.678062Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001040/r3tmp/tmp5kHrT2/pdisk_1.dat 2025-04-07T14:04:26.100785Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:26.190206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:26.190314Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:26.195514Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22999, node 2 2025-04-07T14:04:26.573349Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:26.573385Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:26.573398Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:26.573524Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15496 TClient is connected to server localhost:15496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:28.125817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:28.159841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:28.264962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:28.467306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:28.581288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:30.680779Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;even ... 25-04-07T14:04:37.116335Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:04:37.116582Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:04:37.116618Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:04:37.116788Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:04:37.116825Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:04:37.116994Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:04:37.117020Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:04:37.117085Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:04:37.117102Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:04:37.126038Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:04:37.126077Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:04:37.126142Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:04:37.126159Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:04:37.126368Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:04:37.126424Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:04:37.126525Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:04:37.126571Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:04:37.126632Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:04:37.126666Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:04:37.126727Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:04:37.126758Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:04:37.127117Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:04:37.127168Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:04:37.127344Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:04:37.127376Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:04:37.127502Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:04:37.127526Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:04:37.127649Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:04:37.127670Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:04:37.127764Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:04:37.127784Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:04:37.127931Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:04:37.127989Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:04:37.128092Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:04:37.128126Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:04:37.128291Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:04:37.128325Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:04:37.128411Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:04:37.128457Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:04:37.128538Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:04:37.128580Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:04:37.128647Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:04:37.128701Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:04:37.129054Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:04:37.129107Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:04:37.129279Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:04:37.129319Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:04:37.129469Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:04:37.129507Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:04:37.129667Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:04:37.129705Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:04:37.129812Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:04:37.129842Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:04:38.082222Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571886521402117:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:38.082305Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpYql::TableNameConflict |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |91.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge >> GroupWriteTest::Simple >> KqpYql::TableUseBeforeCreate |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 61492, MsgBus: 14980 2025-04-07T14:04:30.385673Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571872361032799:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:30.385742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dd5/r3tmp/tmpeFlubg/pdisk_1.dat 2025-04-07T14:04:30.767947Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:30.790064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:30.790159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:30.792061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61492, node 1 2025-04-07T14:04:30.854626Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:30.854656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:30.854663Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:30.854768Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14980 TClient is connected to server localhost:14980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:31.412437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:31.427855Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:31.444438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:31.598956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:31.733911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:31.814215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:33.339193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571885245936414:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.339280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.604991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.626624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.649328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.669046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.689491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.717846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:33.753519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571885245936908:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.753566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.753601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571885245936913:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:33.756705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:33.764146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571885245936915:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:34.544042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.045223Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034675075, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 25119, MsgBus: 28244 2025-04-07T14:04:35.644172Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571895901315469:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:35.644254Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dd5/r3tmp/tmpr95ohF/pdisk_1.dat 2025-04-07T14:04:35.729848Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:35.752883Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:35.752982Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:35.754707Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25119, node 2 2025-04-07T14:04:35.791764Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:35.791797Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:35.791804Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:35.791922Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28244 TClient is connected to server localhost:28244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:36.099281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:36.113848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:36.185944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:36.318293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:36.390450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:38.250903Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571908786219079:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:38.250996Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:38.270054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:38.314602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:38.352001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:38.379047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:38.408369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:38.435416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:38.475409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571908786219574:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:38.475510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:38.475528Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571908786219579:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:38.478554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:38.486336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571908786219581:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] >> TSchemeShardMoveTest::MoveTableForBackup [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence >> KqpQueryService::CreateAndDropTopic [GOOD] >> KqpQueryService::CreateAndAlterTopic >> KqpYql::TableRange [GOOD] >> KqpYql::ColumnNameConflict [GOOD] >> KqpYql::ColumnTypeMismatch >> TSchemeShardMoveTest::Replace [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] Test command err: Trying to start YDB, gRPC: 26169, MsgBus: 20542 2025-04-07T14:04:32.218815Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571879800434933:2105];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:32.218888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dce/r3tmp/tmpWBIY4a/pdisk_1.dat 2025-04-07T14:04:32.478428Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26169, node 1 2025-04-07T14:04:32.549374Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:32.549417Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:32.549428Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:32.549533Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:04:32.575330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:32.575452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:32.577203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20542 TClient is connected to server localhost:20542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:33.013939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:33.031610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:33.139602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:33.268159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:33.325397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:34.747450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571888390371158:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:34.747555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:34.985388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.007458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.026731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.052976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.077218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.102116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.177880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571892685338953:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:35.177928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:35.178025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571892685338958:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:35.180938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:35.188731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571892685338960:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:36.503086Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034676524, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 10626, MsgBus: 3056 2025-04-07T14:04:37.153624Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571903427098671:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:37.154252Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dce/r3tmp/tmpqeuO1M/pdisk_1.dat 2025-04-07T14:04:37.231252Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:37.257451Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:37.257507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 10626, node 2 2025-04-07T14:04:37.258978Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:37.294251Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:37.294276Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:37.294282Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:37.294378Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3056 TClient is connected to server localhost:3056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:37.687869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:37.695936Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:37.706056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:37.763132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:37.904341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:37.978475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:39.951617Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571912017034951:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:39.951709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:39.988758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:40.016911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:40.039198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:40.062755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:40.131083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:40.164962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:40.239026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571916312002748:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:40.239107Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:40.239252Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571916312002753:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:40.242404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:40.251608Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571916312002755:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:41.376801Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034681417, txId: 281474976710671] shutting down 2025-04-07T14:04:41.516895Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034681550, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableRange [GOOD] Test command err: Trying to start YDB, gRPC: 22310, MsgBus: 23564 2025-04-07T14:04:38.263693Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571906192822819:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:38.263763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dc9/r3tmp/tmpy4rz3u/pdisk_1.dat 2025-04-07T14:04:38.619824Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:38.655796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:38.655900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:38.657728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22310, node 1 2025-04-07T14:04:38.693457Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:38.693485Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:38.693491Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:38.693579Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23564 TClient is connected to server localhost:23564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:39.123648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:39.133484Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:39.147576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:39.271984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:39.415252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:39.490002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:40.941533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571914782759116:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:40.941685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.174660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.196081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.220313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.241320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.265712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.294011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.396597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571919077726913:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.396687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.396820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571919077726918:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.400024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:41.409486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571919077726920:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Table intent determination, code: 1040
:3:27: Error: RANGE is not supported on Kikimr clusters. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Replace [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:04:41.015106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:04:41.015224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:04:41.015281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:04:41.015324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:04:41.023620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:04:41.023692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:04:41.023789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:04:41.037139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:04:41.142895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:41.142963Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:41.172611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:04:41.172766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:04:41.175937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:04:41.187579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:04:41.193390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:04:41.205216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:04:41.205591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:04:41.223813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:04:41.233012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:04:41.233104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:04:41.233259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:04:41.233312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:04:41.233362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:04:41.233552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.240020Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:04:41.386696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:04:41.386942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.387173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:04:41.387498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:04:41.387553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.390265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:04:41.390516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:04:41.390688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.390834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:04:41.390884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:04:41.390933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:04:41.392840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.392889Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:04:41.392922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:04:41.394625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.394665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.394704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:04:41.394767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:04:41.404414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:04:41.406769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:04:41.406928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:04:41.407776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:04:41.407879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:04:41.407922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:04:41.408129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:04:41.408169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:04:41.408316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:04:41.408387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:04:41.410648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:04:41.410692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:04:41.410846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:04:41.410899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:04:41.411094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.411131Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:04:41.411220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:04:41.411263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:04:41.411307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:04:41.411339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:04:41.411367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:04:41.411393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:04:41.411459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:04:41.411497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:04:41.411525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:04:41.413403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:04:41.413500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:04:41.413540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:04:41.413575Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:04:41.413635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:04:41.413728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... bRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 Forgetting tablet 72075186233409548 2025-04-07T14:04:42.978795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:04:42.978905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2025-04-07T14:04:42.979214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Transaction 105 reset current state at schemeshard 72057594046678944 because pipe to tablet 72075186233409547 disconnected 2025-04-07T14:04:42.979312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Transaction 105 reset current state at schemeshard 72057594046678944 because pipe to tablet 72075186233409548 disconnected 2025-04-07T14:04:42.982019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 105:2 2025-04-07T14:04:42.982074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:04:42.982277Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409546 2025-04-07T14:04:42.982403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:04:42.982570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 1 Forgetting tablet 72075186233409546 2025-04-07T14:04:42.983844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T14:04:42.983898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:1398:3170] 2025-04-07T14:04:42.984212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 5 paths, skipped 0, left 2 candidates, at schemeshard: 72057594046678944 2025-04-07T14:04:42.984511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:04:42.984562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-04-07T14:04:42.984628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 1 2025-04-07T14:04:42.984658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 15], at schemeshard: 72057594046678944 2025-04-07T14:04:42.984703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 2 2025-04-07T14:04:42.984731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 14], at schemeshard: 72057594046678944 2025-04-07T14:04:42.984759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-04-07T14:04:42.984780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-04-07T14:04:42.984803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 1 2025-04-07T14:04:42.984826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 12], at schemeshard: 72057594046678944 2025-04-07T14:04:42.984848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:04:42.984975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:04:42.985007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:04:42.985098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:04:42.985114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2025-04-07T14:04:42.986376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:04:42.986418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2025-04-07T14:04:42.986535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 5 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-04-07T14:04:42.986865Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-07T14:04:42.986902Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-04-07T14:04:42.986930Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-04-07T14:04:42.987283Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Src" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:04:42.987400Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Src" took 141us result status StatusPathDoesNotExist 2025-04-07T14:04:42.987496Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Src\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Src" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:04:42.987834Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:04:42.988043Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dst" took 211us result status StatusSuccess 2025-04-07T14:04:42.988323Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dst" PathDescription { Self { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Async" LocalPathId: 23 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 25 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 22 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:04:42.988941Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:04:42.989045Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 119us result status StatusSuccess 2025-04-07T14:04:42.989262Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 21 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 21 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 19 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:04:41.015121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:04:41.015244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:04:41.015279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:04:41.015311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:04:41.023624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:04:41.023695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:04:41.023800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:04:41.037100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:04:41.142560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:41.142643Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:41.172530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:04:41.172716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:04:41.176128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:04:41.186943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:04:41.193405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:04:41.205214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:04:41.205594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:04:41.222396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:04:41.233014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:04:41.233106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:04:41.233229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:04:41.233284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:04:41.233433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:04:41.233583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.240019Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:04:41.368329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:04:41.376478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.384594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:04:41.386585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:04:41.386694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.389782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:04:41.390508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:04:41.390755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.390828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:04:41.390926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:04:41.390982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:04:41.392819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.392868Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:04:41.392896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:04:41.394437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.394484Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.394515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:04:41.394591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:04:41.398532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:04:41.405622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:04:41.406707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:04:41.407559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:04:41.407676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:04:41.407711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:04:41.407972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:04:41.408020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:04:41.408268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:04:41.408336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:04:41.410688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:04:41.410729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:04:41.410869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:04:41.410905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:04:41.411105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:04:41.411149Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:04:41.411231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:04:41.411257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:04:41.411297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:04:41.411335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:04:41.411364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:04:41.411401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:04:41.411454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:04:41.411488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:04:41.411518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:04:41.413413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:04:41.413513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:04:41.413549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:04:41.413594Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:04:41.413631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:04:41.413714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 72075186233409546 TxId: 102 TxPartId: 1 2025-04-07T14:04:43.157265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:1, at schemeshard: 72057594046678944, message: Status: SUCCESS Origin: 72075186233409546 TxId: 102 TxPartId: 1 2025-04-07T14:04:43.157317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveSequence TDropParts HandleReply TEvDropSequenceResult shardId# 72075186233409546 status# SUCCESS operationId# 102:1 at tablet 72057594046678944 2025-04-07T14:04:43.157426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:04:43.157474Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 4 -> 240 2025-04-07T14:04:43.159235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-07T14:04:43.159403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-07T14:04:43.159447Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveSequence TDone, operationId: 102:1 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:04:43.159497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveSequence TDone, operationId: 102:1 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-07T14:04:43.159599Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-04-07T14:04:43.159632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-07T14:04:43.159668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-04-07T14:04:43.159731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:377:2342] message: TxId: 102 2025-04-07T14:04:43.159778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-07T14:04:43.159824Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:04:43.159874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:04:43.160001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T14:04:43.160036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:04:43.160078Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-07T14:04:43.160097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-07T14:04:43.160137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-07T14:04:43.160158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:04:43.160465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:04:43.160511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:04:43.160591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:04:43.160628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:04:43.160656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:04:43.162449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:04:43.162497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:464:2422] 2025-04-07T14:04:43.162596Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-07T14:04:43.165993Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:04:43.166189Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/myseq" took 229us result status StatusPathDoesNotExist 2025-04-07T14:04:43.166338Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/myseq\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table/myseq" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:04:43.166727Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:04:43.166881Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 181us result status StatusPathDoesNotExist 2025-04-07T14:04:43.167020Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:04:43.167387Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:04:43.167607Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove" took 233us result status StatusSuccess 2025-04-07T14:04:43.167987Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove" PathDescription { Self { Name: "TableMove" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "TableMove" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:04:43.168448Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:04:43.168610Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/myseq" took 180us result status StatusSuccess 2025-04-07T14:04:43.168865Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/myseq" PathDescription { Self { Name: "myseq" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpYql::PgIntPrimaryKey+EnableKqpDataQueryStreamLookup [GOOD] >> KqpYql::PgIntPrimaryKey-EnableKqpDataQueryStreamLookup >> KqpOlapBlobsSharing::HugeSchemeHistory [FAIL] >> TCacheTest::MigrationDeletedPathNavigate [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter [GOOD] >> KqpQueryService::DdlSecret [GOOD] >> KqpQueryService::DdlTx >> KqpYql::TableNameConflict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] Test command err: 2025-04-07T14:04:31.444108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:31.444170Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T14:04:32.104418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:172:2067] recipient: [1:45:2092] Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:175:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:176:2067] recipient: [1:174:2169] Leader for TabletID 72057594046678944 is [1:177:2170] sender: [1:178:2067] recipient: [1:174:2169] 2025-04-07T14:04:32.260374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:32.260432Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:177:2170] sender: [1:207:2067] recipient: [1:24:2071] 2025-04-07T14:04:32.349744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-07T14:04:32.365694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:243:2067] recipient: [1:234:2211] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:243:2067] recipient: [1:234:2211] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:244:2067] recipient: [1:236:2213] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:244:2067] recipient: [1:236:2213] Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:246:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:246:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:250:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:250:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409546 is [1:247:2217] sender: [1:251:2067] recipient: [1:234:2211] Leader for TabletID 72075186233409547 is [1:249:2219] sender: [1:252:2067] recipient: [1:236:2213] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-04-07T14:04:32.424851Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [1:247:2217] sender: [1:285:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [1:249:2219] sender: [1:286:2067] recipient: [1:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-04-07T14:04:32.535239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:336:2067] recipient: [1:331:2282] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:336:2067] recipient: [1:331:2282] Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:338:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:338:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409548 is [1:339:2286] sender: [1:340:2067] recipient: [1:331:2282] TestWaitNotification: OK eventTxId 103 Leader for TabletID 72075186233409548 is [1:339:2286] sender: [1:355:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-07T14:04:32.837591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:412:2067] recipient: [1:409:2329] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:412:2067] recipient: [1:409:2329] Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:414:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:414:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409549 is [1:416:2333] sender: [1:417:2067] recipient: [1:409:2329] Leader for TabletID 72075186233409549 is [1:416:2333] sender: [1:418:2067] recipient: [1:24:2071] 2025-04-07T14:04:32.883288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:32.883363Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } TestModificationResults wait txId: 106 2025-04-07T14:04:32.913980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:04:32.914037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T14:04:32.914384Z node 1 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-04-07T14:04:32.914558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-07T14:04:32.953265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-04-07T14:04:32.953608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 skipDeleteNotification path: /Root/USER_0/DirA/Table1 pathId: [OwnerId: 72057594046678944, LocalPathId: 4] Strong: 1 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-07T14:04:33.000798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 108:0, at schemeshard: 72075186233409549 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 TestWaitNotification: OK eventTxId 109 TestModificationResults wait txId: 110 2025-04-07T14:04:33.105301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 110:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:615:2067] recipient: [1:610:2496] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:615:2067] recipient: [1:610:2496] Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:617:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:617:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409550 is [1:618:2500] sender: [1:619:2067] recipient: [1:610:2496] TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 Leader for TabletID 72075186233409550 is [1:618:2500] sender: [1:636:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 110 2025-04-07T14:04:33.455182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:33.455238Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-07T14:04:33.504276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:172:2067] recipient: [2:45:2092] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:175:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:176:2067] recipient: [2:174:2169] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:178:2067] recipient: [2:174:2169] 2025-04-07T14:04:33.546887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:33.546962Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:207:2067] recipient: [2:24:2071] 2025-04-07T14:04:33.573310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-07T14:04:33.580482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:243:2067] recipient: [2:234:2211] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:243:2067] recipient: [2:234:2211] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:244:2067] recipient: [2:237:2213] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:244:2067] recipient: [2:237:2213] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:246:2217] sender: [2:249:2067] recipient: [2:234:2211] Leader for TabletID 72075186233409547 is [2:251:2219] sender: [2:252:2067] recipient: [2:237:2213] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-04-07T14:04:33.594025Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:246:2217] sender: [2:285:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:251:2219] sender: [2:286:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-04-07T14:04:33.635569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:336:2067] recipient: [2:331:2282] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:336:2067] recipient: [2:331:2282] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:338:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:338:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:339:2286] sender: [2:340:2067] recipient: [2:331:2282] TestWaitNotification: OK eventTxId 103 Leader for TabletID 72075186233409548 is [2:339:2286] sender: [2:355:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-07T14:04:33.753804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:414:2067] recipient: [2:409:2329] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:414:2067] recipient: [2:409:2329] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:415:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:415:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:417:2333] sender: [2:418:2067] recipient: [2:409:2329] 2025-04-07T14:04:33.788290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:04:33.788338Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [2:417:2333] sender: [2:444:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-04-07T14:04:33.826870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:04:33.826924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T14:04:33.827214Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-04-07T14:04:33.827333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-07T14:04:33.844161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-04-07T14:04:33.844295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-07T14:04:33.891264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 108:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:548:2067] recipient: [2:543:2435] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:548:2067] recipient: [2:543:2435] Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:550:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:550:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409550 is [2:551:2439] sender: [2:552:2067] recipient: [2:543:2435] TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 Forgetting tablet 72075186233409548 Leader for TabletID 72075186233409550 is [2:551:2439] sender: [2:579:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 108 2025-04-07T14:04:34.886859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:04:34.886960Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:34.939553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:04:34.939667Z node 2 :IMPORT WARN: Table profiles were not loaded >> KqpYql::TableUseBeforeCreate [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter [GOOD] Test command err: Trying to start YDB, gRPC: 24063, MsgBus: 27982 2025-04-07T14:04:08.774190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571778052011798:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.774226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001019/r3tmp/tmpdJPHNy/pdisk_1.dat 2025-04-07T14:04:09.363976Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:09.405223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:09.405314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:09.406658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24063, node 1 2025-04-07T14:04:09.612771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:09.612793Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:09.612798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:09.612885Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27982 TClient is connected to server localhost:27982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:10.399775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.443357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.596518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.820386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.882233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:13.133390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571795231882506:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.144582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.212304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.265333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.311332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.359116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.408526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.486630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.555703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571799526850303:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.555782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.556179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571799526850308:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.559570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:13.579089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571799526850310:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:13.776351Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571778052011798:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.776440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:15.044239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:15.053777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:15.056832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7321, MsgBus: 17307 2025-04-07T14:04:19.106372Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571826883686338:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:19.106437Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001019/r3tmp/tmp0Sn93g/pdisk_1.dat 2025-04-07T14:04:19.260112Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:19.296468Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:19.296569Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:19.299596Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7321, node 2 2025-04-07T14:04:19.522891Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:19.522917Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:19.522923Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:19.523019Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17307 TClient is connected to server localhost:17307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:20.171009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.184867Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:20.195461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.284015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.502280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part propo ... t schemeshard: 72057594046644480 2025-04-07T14:04:34.311483Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:34.339028Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:34.364166Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:34.396275Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:34.429327Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571888627061835:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:34.429424Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:34.429461Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571888627061840:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:34.432801Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:34.441538Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571888627061842:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:35.293130Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.294775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.295996Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.571516Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTUwYWQ1MzUtOGE4MDI4NzYtNmYyZjRhOTctZDE4Y2U3OGM=, ActorId: [3:7490571892922029729:2481], ActorState: ExecuteState, TraceId: 01jr892ejr3m646t15y2rxqhsq, Create QueryResponse for error on request, msg: 2025-04-07T14:04:36.238902Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571875742157750:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:36.245318Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6855, MsgBus: 22988 2025-04-07T14:04:37.586811Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490571900913935034:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:37.586857Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001019/r3tmp/tmpXpIqjq/pdisk_1.dat 2025-04-07T14:04:37.654525Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6855, node 4 2025-04-07T14:04:37.688154Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:37.688294Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:37.690705Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:37.721209Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:37.721233Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:37.721241Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:37.721342Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22988 TClient is connected to server localhost:22988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:04:38.167164Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:04:38.177992Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:38.234642Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:38.377161Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:38.449476Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:40.861858Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490571913798838640:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:40.861994Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:40.981830Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.010893Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.037781Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.066140Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.097337Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.147096Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.183938Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490571918093806430:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.184022Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490571918093806435:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.184057Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.187516Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:41.196869Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490571918093806437:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:42.333773Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:42.335781Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:42.337341Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:42.593623Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490571900913935034:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:42.593671Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableNameConflict [GOOD] Test command err: Trying to start YDB, gRPC: 25383, MsgBus: 3985 2025-04-07T14:04:41.177157Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571922146182652:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:41.177215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d76/r3tmp/tmpNaPsZh/pdisk_1.dat 2025-04-07T14:04:41.447832Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25383, node 1 2025-04-07T14:04:41.525732Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:41.525753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:41.525758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:41.525826Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:04:41.550665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:41.550798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:41.552320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3985 TClient is connected to server localhost:3985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:41.948272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:41.969345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:42.089785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:42.198387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:42.250595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:43.725493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571930736118962:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:43.725581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:43.943939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.965878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.988811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:44.011042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:44.033557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:44.057311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:44.089258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571935031086750:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:44.089321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571935031086755:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:44.089324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:44.091900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:44.098753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571935031086757:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiCreateTable!
:12:30: Error: Table name conflict: db.[/Root/Test] is used to reference multiple tables. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableUseBeforeCreate [GOOD] Test command err: Trying to start YDB, gRPC: 17296, MsgBus: 3341 2025-04-07T14:04:41.779886Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571918124224931:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:41.779935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d2e/r3tmp/tmpr2kb1y/pdisk_1.dat 2025-04-07T14:04:42.081083Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17296, node 1 2025-04-07T14:04:42.144788Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:42.144815Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:42.144831Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:42.144961Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:04:42.146938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:42.147079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:42.148808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3341 TClient is connected to server localhost:3341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:42.525919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:42.552394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:42.658413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:42.780933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:42.830906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:44.095375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571931009128539:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:44.095495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:44.296102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:44.320514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:44.341478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:44.365720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:44.387672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:44.420437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:44.449769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571931009129030:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:44.449831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:44.449843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571931009129035:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:44.452753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:44.460339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571931009129037:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Type annotation, code: 1030
:3:13: Error: At function: KiReadTable!
:3:13: Error: Cannot find table 'db.[/Root/NewTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 >> KqpYql::ColumnTypeMismatch [GOOD] >> KqpQueryService::CreateAndAlterTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter [GOOD] Test command err: Trying to start YDB, gRPC: 2239, MsgBus: 64522 2025-04-07T14:04:08.426463Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571776918416169:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.428218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00104b/r3tmp/tmpW4d4QC/pdisk_1.dat 2025-04-07T14:04:09.075076Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:09.149972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:09.150054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:09.157718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2239, node 1 2025-04-07T14:04:09.347317Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:09.347334Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:09.347344Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:09.347443Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64522 TClient is connected to server localhost:64522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:10.223961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.245965Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:10.267221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.516856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:04:10.709024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:04:10.840509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.734335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571794098286901:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.734580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.105521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.154658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.202522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.233236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.268987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.323561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:13.425454Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571776918416169:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.425526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:13.449107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571798393254701:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.449463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.449776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571798393254706:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:13.453777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:13.469673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571798393254708:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:14.832572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:14.834716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:14.837181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:18.161500Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034658184, txId: 281474976710705] shutting down Trying to start YDB, gRPC: 25460, MsgBus: 17975 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00104b/r3tmp/tmpPrynQ5/pdisk_1.dat 2025-04-07T14:04:19.416882Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:04:19.436484Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:19.462151Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:19.462230Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:19.463339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25460, node 2 2025-04-07T14:04:19.674276Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:19.674300Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:19.674307Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:19.674407Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17975 TClient is connected to server localhost:17975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:04:20.392240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:04:20.443389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.569509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.874952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself ... Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:34.444862Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:34.453345Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571889885826337:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:35.357813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.359548Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.360619Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:35.643329Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzA2MjUyYTktNzg4OGI1ZWYtYzVmMDRiNWEtYjI0OWRhZDk=, ActorId: [3:7490571894180794212:2481], ActorState: ExecuteState, TraceId: 01jr892ems0935ntn2kz6gy0g7, Create QueryResponse for error on request, msg: 2025-04-07T14:04:35.877883Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571872705954947:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:35.886359Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24412, MsgBus: 28008 2025-04-07T14:04:37.681563Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490571901748649448:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:37.681651Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00104b/r3tmp/tmp9VX50c/pdisk_1.dat 2025-04-07T14:04:37.792529Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:37.799384Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:37.799472Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:37.800934Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24412, node 4 2025-04-07T14:04:37.840845Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:37.840868Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:37.840873Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:37.840969Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28008 TClient is connected to server localhost:28008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:38.240096Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:38.247159Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:38.311881Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:38.499160Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:38.587227Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:40.863045Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490571914633553036:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:40.863127Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:40.902401Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:40.929993Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:40.955727Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:40.993221Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.020113Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.055409Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.099779Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490571918928520828:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.099939Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.100287Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490571918928520833:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.103623Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:41.115640Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490571918928520835:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:42.428650Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:42.430062Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:42.431811Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:42.681740Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490571901748649448:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:42.681798Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:43.916604Z node 4 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: ccaae19a-3c88858f-c894337-200eee1d, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=4&id=MjZlYTA0OGQtMmJkN2ZlMjgtN2I0NGNjNi1jZTRiZjYzNg==, TxId: 2025-04-07T14:04:44.856585Z node 4 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: ccaae19a-3c88858f-c894337-200eee1d, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=4&id=NmExYWUwMDktY2RmYmM2OTMtYWNjMWYzODctNDc3ZmJiN2Y=, TxId: 2025-04-07T14:04:44.981154Z node 4 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: ccaae19a-3c88858f-c894337-200eee1d, reply NOT_FOUND, issues: {
: Error: No such execution } 2025-04-07T14:04:44.994588Z node 4 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: ccaae19a-3c88858f-c894337-200eee1d, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=4&id=ZTQ0Y2VlZTctN2NhY2QyMDQtYWFmN2I5NjItYzBiMzMyMmI=, TxId: 2025-04-07T14:04:44.994659Z node 4 :KQP_PROXY WARN: [ScriptExecutions] [TCancelScriptExecutionOperationActor] ExecutionId: ccaae19a-3c88858f-c894337-200eee1d, check lease failed 2025-04-07T14:04:45.288856Z node 4 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: ccaae19a-3c88858f-c894337-200eee1d, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Script execution not found }, SessionId: ydb://session/3?node_id=4&id=ZGQxMTA3MDQtM2VlYTZiZjEtOTJiNGE0MWYtY2EzMzJhY2Y=, TxId: |91.3%| [TA] $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ColumnTypeMismatch [GOOD] Test command err: Trying to start YDB, gRPC: 4408, MsgBus: 30943 2025-04-07T14:04:38.877666Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571905519389290:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:38.878108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dc0/r3tmp/tmpPcEYOf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4408, node 1 2025-04-07T14:04:39.182840Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:39.182862Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:39.184797Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:39.219882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:39.219904Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:39.219929Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:39.220032Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:04:39.249626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:39.249743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:39.251427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30943 TClient is connected to server localhost:30943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:39.694870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:39.715607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:39.838357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:39.977386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:40.027009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:41.477561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571918404292760:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.477689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.697212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.720036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.739921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.764331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.789080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.813622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.847933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571918404293251:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.847997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.848057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571918404293256:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.851224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:41.859430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571918404293258:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Type annotation, code: 1030
:7:30: Error: At function: KiCreateTable!
:7:30: Error: Duplicate column: Value. Trying to start YDB, gRPC: 14050, MsgBus: 4248 2025-04-07T14:04:43.131143Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571928296861571:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:43.131208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dc0/r3tmp/tmppM3vhZ/pdisk_1.dat 2025-04-07T14:04:43.198994Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14050, node 2 2025-04-07T14:04:43.231242Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:43.231321Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:43.232943Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:43.240840Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:43.240863Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:43.240869Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:43.240943Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4248 TClient is connected to server localhost:4248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:43.617840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:43.635443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:43.682994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:43.817806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:43.887930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:45.506389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571936886797848:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:45.506448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:45.532596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:45.557496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:45.581353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:45.601191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:45.624601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:45.653794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:45.685476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571936886798339:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:45.685546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:45.685755Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571936886798344:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:45.688968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:45.697240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571936886798346:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:46.444039Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490571941181765951:2463], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-07T14:04:46.445892Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NThiODgxNGQtMTU0NmFkNDYtOTdkNmY4ZjctMjdlNzNhYzg=, ActorId: [2:7490571941181765943:2458], ActorState: ExecuteState, TraceId: 01jr892sew7ezkw7xdr2k8anzc, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CreateAndAlterTopic [GOOD] Test command err: Trying to start YDB, gRPC: 25128, MsgBus: 24345 2025-04-07T14:04:18.401240Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571820292122425:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:18.401494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00100e/r3tmp/tmpxN9mVI/pdisk_1.dat 2025-04-07T14:04:19.111545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:19.115400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:19.115494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:19.118580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25128, node 1 2025-04-07T14:04:19.253107Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:19.253133Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:19.253139Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:19.253212Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24345 TClient is connected to server localhost:24345 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:19.877717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:19.905196Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:19.919682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.065703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.228239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:20.334688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:23.435643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571841766960488:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:23.435791Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571820292122425:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:23.435859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:23.435907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:23.500409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:23.540408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:23.583388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:23.627528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:23.658800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:23.730526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:23.824885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571841766960990:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:23.825038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:23.828182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571841766960995:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:23.832121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:23.869969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571841766960997:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:25.210367Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:04:25.238158Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:04:25.249736Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:04:25.269038Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:04:25.278713Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:04:25.495752Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:04:25.535485Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:04:25.549784Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:04:25.569665Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:04:25.605661Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:04:25.606283Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490571850356896004:2490] TxId: 281474976710671. Ctx: { TraceId: 01jr8925380ps7mf44qfr4xg9f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY3ODFmY2UtZWQxMWFlZmEtNjFiY2ZlMzctNTcxMmJkMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-07T14:04:25.625208Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571850356896017:2495], TxId: 281474976710671, task: 4. Ctx: { TraceId : 01jr8925380ps7mf44qfr4xg9f. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NGY3ODFmY2UtZWQxMWFlZmEtNjFiY2ZlMzctNTcxMmJkMDk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571850356896004:2490], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:04:25.626154Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571850356896016:2494], TxId: 281474976710671, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=NGY3ODFmY2UtZWQxMWFlZmEtNjFiY2ZlMzctNTcxMmJkMDk=. CustomerSuppliedId : . TraceId : 01jr8925380ps7mf44qfr4xg9f. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571850356896004:2490], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:04:25.626353Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571850356896014:2492], TxId: 281474976710671, task: 1. Ctx: { TraceId : 01jr8925380ps7mf44qfr4xg9f. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NGY3ODFmY2UtZWQxMWFlZmEtNjFiY2ZlMzctNTcxMmJkMDk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571850356896004:2490], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:04:25.626495Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571850356896018:2496], TxId: 281474976710671, task: 5. Ctx: { TraceId : 01jr8925380ps7mf44qfr4xg9f. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NGY3ODFmY2UtZWQxMWFlZmEtNjFiY2ZlMzctNTcxMmJkMDk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571850356896004:2490], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:04:25.636833Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:04:25.637341Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490571850356896037:2498] TxId: 281474976710672. Ctx: { TraceId: 01jr89254w1d9fqmrfyzzntvbw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTY5MDA4MzktMmUwMTUyNjgtODUwYTlmZTgtZGYxYjllZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-07T14:04:25.637663Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTY5MDA4MzktMmUwMTUyNjgtODUwYTlmZTgtZGYxYjllZTg=, ActorId: [1:7490571850356896034:2498], ActorState: ExecuteState, TraceId: 01jr89254w1d9fqmrfyzzntvbw, Create QueryResponse for error on request, msg: 2025-04-07T14:04:25.660228Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571850356896015:2493], TxId: 281474976710671, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NGY3ODFmY2UtZWQxMWFlZmEtNjFiY2ZlMzctNTcxMmJkMDk=. TraceId : 01jr8925380ps7mf44qfr4xg9f. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: ... nce;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00100e/r3tmp/tmpzCdYUv/pdisk_1.dat 2025-04-07T14:04:42.712444Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:42.730061Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:42.730122Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:42.731328Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3902, node 3 2025-04-07T14:04:42.769089Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:42.769109Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:42.769117Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:42.769185Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19784 TClient is connected to server localhost:19784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:43.111371Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:43.118290Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:43.162882Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:43.299212Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:43.371516Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:45.053417Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571936183598761:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:45.053486Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:45.113252Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:45.136292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:45.161669Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:45.187275Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:45.213662Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:45.241596Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:45.275565Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571936183599253:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:45.275657Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:45.275904Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571936183599258:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:45.279532Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:45.288023Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571936183599260:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:46.315809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:04:46.371676Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:04:46.420300Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490571940478567686:2585], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:3:78: Error: Executing ALTER TOPIC
: Error: path 'Root/NoSuchTopic' does not exist or you do not have access rights, code: 500018 2025-04-07T14:04:46.420515Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzAyMTdhNmEtMmUwMzBhZDQtMzY4M2EzOWYtNDE1NjUxZjA=, ActorId: [3:7490571940478566876:2463], ActorState: ExecuteState, TraceId: 01jr892se834tcfgk90fx80bxh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Query failed, status: GENERIC_ERROR:
: Error: Execution, code: 1060
:3:78: Error: Executing ALTER TOPIC
: Error: path 'Root/NoSuchTopic' does not exist or you do not have access rights, code: 500018 Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1744034685330, tx_id: 281474976715668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034683559, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034683328, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034685281, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034685309, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034685176, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034685204, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034685225, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034683405, tx_id: 281474976715660 } } Scheme entry: { name: TempTopic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1744034686324, tx_id: 281474976715671 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034685253, tx_id: 281474976715665 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034683202, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1744034685330, tx_id: 281474976715668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034683559, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034683328, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034685281, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034685309, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034685176, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034685204, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034685225, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034683405, tx_id: 281474976715660 } } Scheme entry: { name: TempTopic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1744034686324, tx_id: 281474976715671 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034685253, tx_id: 281474976715665 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744034683202, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } |91.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |91.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots >> GroupWriteTest::WithRead [GOOD] >> KqpYql::PgIntPrimaryKey-EnableKqpDataQueryStreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::HugeSchemeHistory [FAIL] Test command err: Trying to start YDB, gRPC: 16861, MsgBus: 18047 2025-04-07T14:00:30.469132Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570843505629794:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:30.469343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fc4/r3tmp/tmpYmrzOR/pdisk_1.dat 2025-04-07T14:00:31.153197Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:31.170974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:31.171069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:31.174228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16861, node 1 2025-04-07T14:00:31.306655Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:31.306684Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:31.306692Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:31.306817Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18047 TClient is connected to server localhost:18047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:32.015772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:33.132978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:35.460815Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570843505629794:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:35.460886Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:39.993334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7490570882160340176:2319];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:39.993614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7490570882160340176:2319];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:39.993908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7490570882160340176:2319];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:39.994045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7490570882160340176:2319];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:39.994167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7490570882160340176:2319];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:39.994306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7490570882160340176:2319];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:39.994417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7490570882160340176:2319];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:39.994538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7490570882160340176:2319];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:39.994722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7490570882160340176:2319];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:39.994843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7490570882160340176:2319];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:39.994982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7490570882160340176:2319];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:39.995083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7490570882160340176:2319];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:39.998441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570882160340166:2314];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:39.998514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570882160340166:2314];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:39.998717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570882160340166:2314];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:39.998998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570882160340166:2314];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:39.999103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570882160340166:2314];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:39.999196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570882160340166:2314];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:39.999305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570882160340166:2314];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:39.999416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570882160340166:2314];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:39.999506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570882160340166:2314];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:39.999614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570882160340166:2314];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:39.999721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570882160340166:2314];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:39.999816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7490570882160340166:2314];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:40.074074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038901;self_id=[1:7490570882160340217:2326];tablet_id=72075186224038901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:40.074134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038901;self_id=[1:7490570882160340217:2326];tablet_id=72075186224038901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:40.074335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038901;self_id=[1:7490570882160340217:2326];tablet_id=72075186224038901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:40.074452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038901;self_id=[1:7490570882160340217:2326];tablet_id=72075186224038901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:40.074634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038901;self_id=[1:7490570882160340217:2326];tablet_id=72075186224038901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:40.074737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038901;self_id=[1:7490570882160340217:2326];tablet_id=72075186224038901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:40.074845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038901;self_id=[1:7490570882160340217:2326];tablet_id=72075186224038901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNorm ... ion_start;last_saved_id=16; 2025-04-07T14:04:35.006081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038598;self_id=[1:7490571887183470146:12564];tablet_id=72075186224038598;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.031117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038431;self_id=[1:7490571887183469967:12521];tablet_id=72075186224038431;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.075173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038642;self_id=[1:7490571887183470027:12537];tablet_id=72075186224038642;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.106353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038584;self_id=[1:7490571887183470226:12581];tablet_id=72075186224038584;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.112301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038839;self_id=[1:7490571887183470291:12594];tablet_id=72075186224038839;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.147663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038405;self_id=[1:7490571887183470019:12535];tablet_id=72075186224038405;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.157319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038239;self_id=[1:7490571887183470345:12604];tablet_id=72075186224038239;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.184468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038506;self_id=[1:7490571887183470047:12539];tablet_id=72075186224038506;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.197410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038292;self_id=[1:7490571887183470180:12573];tablet_id=72075186224038292;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.230736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038895;self_id=[1:7490571887183470626:12667];tablet_id=72075186224038895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.240518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[1:7490571887183470455:12635];tablet_id=72075186224038001;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.273142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038813;self_id=[1:7490571887183470260:12590];tablet_id=72075186224038813;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.284491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038135;self_id=[1:7490571887183470279:12592];tablet_id=72075186224038135;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.306948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038667;self_id=[1:7490571887183470102:12555];tablet_id=72075186224038667;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.329819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038341;self_id=[1:7490571887183470437:12633];tablet_id=72075186224038341;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.346685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038265;self_id=[1:7490571887183470758:12698];tablet_id=72075186224038265;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.381903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038285;self_id=[1:7490571887183470502:12643];tablet_id=72075186224038285;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.387262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038863;self_id=[1:7490571887183470669:12673];tablet_id=72075186224038863;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.415156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038226;self_id=[1:7490571887183470309:12595];tablet_id=72075186224038226;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.426599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038197;self_id=[1:7490571887183470692:12675];tablet_id=72075186224038197;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.461669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038653;self_id=[1:7490571887183470634:12668];tablet_id=72075186224038653;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.469834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038410;self_id=[1:7490571887183470553:12651];tablet_id=72075186224038410;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.520911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038163;self_id=[1:7490571887183470351:12605];tablet_id=72075186224038163;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.547414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038188;self_id=[1:7490571887183470481:12641];tablet_id=72075186224038188;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.570018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038473;self_id=[1:7490571887183470659:12671];tablet_id=72075186224038473;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.600799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038444;self_id=[1:7490571887183470799:12707];tablet_id=72075186224038444;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.627188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038520;self_id=[1:7490571891478438112:12711];tablet_id=72075186224038520;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.637327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038780;self_id=[1:7490571887183470395:12619];tablet_id=72075186224038780;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.689735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038328;self_id=[1:7490571887183470514:12645];tablet_id=72075186224038328;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.697140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038515;self_id=[1:7490571887183470602:12662];tablet_id=72075186224038515;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.734821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038453;self_id=[1:7490571887183470575:12656];tablet_id=72075186224038453;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.760755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038591;self_id=[1:7490571887183470531:12646];tablet_id=72075186224038591;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.775250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038887;self_id=[1:7490571887183470698:12676];tablet_id=72075186224038887;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.809694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038745;self_id=[1:7490571887183470772:12699];tablet_id=72075186224038745;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.825915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038477;self_id=[1:7490571891478438145:12722];tablet_id=72075186224038477;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.845489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038524;self_id=[1:7490571891478438192:12733];tablet_id=72075186224038524;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.865127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038823;self_id=[1:7490571891478438204:12735];tablet_id=72075186224038823;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:35.907267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[1:7490571891478438165:12727];tablet_id=72075186224038023;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:04:36.731753Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr891axtecmwswmm1h5mt3e0", SessionId: ydb://session/3?node_id=1&id=NGU0NmE3MjAtYzYwNDEwODItNzRjZDJkMjItZTM0Mzk1NzE=, Slow query, duration: 35.783309s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n --!syntax_v1\n\n SELECT\n COUNT(*)\n FROM `/Root/olapStore/olapTable`\n ", parameters: 0b 2025-04-07T14:04:36.732293Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034675000, txId: 18446744073709551615] shutting down [[53u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:553, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[53u]]|[[100u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:553: CompareYson @ 0x463AF800 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x17AD8DC2 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x17ABEEBC 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:564: Execute_ @ 0x17ABEEBC 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7FD1D8EFFD8F 18. ??:0: ?? @ 0x7FD1D8EFFE3F 19. ??:0: ?? @ 0x15851028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 6074, MsgBus: 3532 2025-04-07T14:04:34.650044Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571891443784343:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:34.650119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dcd/r3tmp/tmpDvpbB3/pdisk_1.dat 2025-04-07T14:04:34.916867Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6074, node 1 2025-04-07T14:04:34.980434Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:34.980458Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:34.980465Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:34.980600Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:04:34.991141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:34.991258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:34.992949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3532 TClient is connected to server localhost:3532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:35.394428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:35.413436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:35.514516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:35.645030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:35.720264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:37.066733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571904328687942:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:37.066842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:37.325864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:37.348127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:37.371759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:37.390625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:37.412432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:37.438092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:37.469223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571904328688434:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:37.469286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:37.469287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571904328688439:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:37.472330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:37.479767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571904328688441:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:39.356887Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490571908623656046:2458] 2025-04-07T14:04:39.368420Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490571908623656115:2464] TxId: 281474976710672. Ctx: { TraceId: 01jr892hjf41ctzesrt561xm9k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTFhNzI3YzYtYTI0MjgyOWYtNzg0MGU1YmYtYzVmOWQzOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-07T14:04:39.376734Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTFhNzI3YzYtYTI0MjgyOWYtNzg0MGU1YmYtYzVmOWQzOQ==, ActorId: [1:7490571908623656062:2464], ActorState: ExecuteState, TraceId: 01jr892hjf41ctzesrt561xm9k, Create QueryResponse for error on request, msg: 2025-04-07T14:04:39.378845Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571908623656125:2474], TxId: 281474976710672, task: 7. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTFhNzI3YzYtYTI0MjgyOWYtNzg0MGU1YmYtYzVmOWQzOQ==. TraceId : 01jr892hjf41ctzesrt561xm9k. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571908623656115:2464], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:04:39.379313Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571908623656126:2475], TxId: 281474976710672, task: 8. Ctx: { TraceId : 01jr892hjf41ctzesrt561xm9k. SessionId : ydb://session/3?node_id=1&id=OTFhNzI3YzYtYTI0MjgyOWYtNzg0MGU1YmYtYzVmOWQzOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571908623656115:2464], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:04:39.379475Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571908623656127:2476], TxId: 281474976710672, task: 9. Ctx: { SessionId : ydb://session/3?node_id=1&id=OTFhNzI3YzYtYTI0MjgyOWYtNzg0MGU1YmYtYzVmOWQzOQ==. TraceId : 01jr892hjf41ctzesrt561xm9k. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571908623656115:2464], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:04:39.379819Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571908623656119:2468], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTFhNzI3YzYtYTI0MjgyOWYtNzg0MGU1YmYtYzVmOWQzOQ==. TraceId : 01jr892hjf41ctzesrt561xm9k. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571908623656115:2464], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:04:39.379965Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571908623656120:2469], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=OTFhNzI3YzYtYTI0MjgyOWYtNzg0MGU1YmYtYzVmOWQzOQ==. CustomerSuppliedId : . TraceId : 01jr892hjf41ctzesrt561xm9k. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571908623656115:2464], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:04:39.380098Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571908623656121:2470], TxId: 281474976710672, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr892hjf41ctzesrt561xm9k. SessionId : ydb://session/3?node_id=1&id=OTFhNzI3YzYtYTI0MjgyOWYtNzg0MGU1YmYtYzVmOWQzOQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571908623656115:2464], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:04:39.380226Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571908623656122:2471], TxId: 281474976710672, task: 4. Ctx: { TraceId : 01jr892hjf41ctzesrt561xm9k. SessionId : ydb://session/3?node_id=1&id=OTFhNzI3YzYtYTI0MjgyOWYtNzg0MGU1YmYtYzVmOWQzOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490571908623656115:2464], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:04:39.380352Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490571908623656123:2472], TxId: 281474976710672, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTFhNzI3YzYtYTI0MjgyOWYtNzg0MGU1YmYtYzVmOWQzOQ==. TraceId : 01jr892hjf41ctzesrt561xm9k. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490571908623656115:2464], status: ABORTED, reason: {
: Error: ... 2025-04-07T14:04:39.650513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21033, MsgBus: 14876 2025-04-07T14:04:40.901827Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571916086040374:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:40.901891Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dcd/r3tmp/tmpbfzeqX/pdisk_1.dat 2025-04-07T14:04:40.995071Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:41.035171Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:41.035259Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:41.037290Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21033, node 2 2025-04-07T14:04:41.087354Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:41.087379Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:41.087384Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:41.087480Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14876 TClient is connected to server localhost:14876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:41.450631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:41.466024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:41.535216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:41.681561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:41.733959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:43.674247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571928970943961:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:43.674327Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:43.705130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.728088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.793769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.816244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.839923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.868174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.907970Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571928970944456:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:43.908096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:43.908306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571928970944462:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:43.911208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:43.918329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571928970944464:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:44.752397Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571933265912059:2458] 2025-04-07T14:04:44.835856Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571933265912078:2463] 2025-04-07T14:04:44.847439Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571933265912088:2467] 2025-04-07T14:04:44.862537Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571933265912102:2471] 2025-04-07T14:04:44.910467Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571933265912119:2475] 2025-04-07T14:04:44.924401Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571933265912129:2480] 2025-04-07T14:04:44.974821Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571933265912143:2485] 2025-04-07T14:04:44.975552Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571933265912165:2492] 2025-04-07T14:04:45.001539Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571933265912169:2494] 2025-04-07T14:04:45.035570Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571937560879487:2502] 2025-04-07T14:04:45.071489Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571937560879513:2508] 2025-04-07T14:04:45.110747Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571937560879527:2514] 2025-04-07T14:04:45.173137Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571937560879551:2522] 2025-04-07T14:04:45.218754Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571937560879567:2528] 2025-04-07T14:04:45.268910Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571937560879588:2532] 2025-04-07T14:04:45.377856Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571937560879628:2546] 2025-04-07T14:04:45.435869Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571937560879650:2551] 2025-04-07T14:04:45.495568Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571937560879673:2558] 2025-04-07T14:04:45.558639Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571937560879696:2567] 2025-04-07T14:04:45.624996Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571937560879722:2573] 2025-04-07T14:04:45.693396Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571937560879743:2580] 2025-04-07T14:04:45.760478Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034685792, txId: 281474976715671] shutting down 2025-04-07T14:04:45.841791Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571937560879882:2604] 2025-04-07T14:04:45.904755Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571916086040374:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:45.906189Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:45.919726Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571937560879903:2613] 2025-04-07T14:04:45.995037Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034686030, txId: 281474976715673] shutting down 2025-04-07T14:04:46.083727Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571941855847326:2638] 2025-04-07T14:04:46.171675Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571941855847348:2647] 2025-04-07T14:04:46.261654Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571941855847369:2656] 2025-04-07T14:04:46.354738Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571941855847390:2665] 2025-04-07T14:04:46.450661Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571941855847411:2674] 2025-04-07T14:04:46.549823Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571941855847436:2686] 2025-04-07T14:04:46.628017Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034686667, txId: 281474976715675] shutting down 2025-04-07T14:04:46.736585Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571941855847565:2710] 2025-04-07T14:04:46.845055Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571941855847595:2720] 2025-04-07T14:04:46.955447Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571941855847619:2729] 2025-04-07T14:04:47.069539Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7490571941855847652:2740] 2025-04-07T14:04:47.131025Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034687171, txId: 281474976715677] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: 2025-04-07T14:04:41.431158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:04:41.431528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:04:41.431798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fb5/r3tmp/tmpmKT4gI/pdisk_1.dat 2025-04-07T14:04:42.545715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:04:42.661353Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:42.755440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:42.755608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:42.767132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:42.989635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.728504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:824:2676], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:43.728639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:835:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:43.728764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:43.744875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:04:43.930274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:838:2684], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:04:47.227591Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr892ptf39h1npj3y0e78qxw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZmN2E5NzItYjYyNzVjNjUtNTdjODY4NjMtMjM4YTIzYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: 2025-04-07T14:04:42.092621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:04:42.092844Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:04:42.092994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f9e/r3tmp/tmp1LPWsO/pdisk_1.dat 2025-04-07T14:04:42.543989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:04:42.661316Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:42.755396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:42.755600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:42.767633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:42.989606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.728452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:824:2676], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:43.728568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:835:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:43.728645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:43.744792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:04:43.927960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:838:2684], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:04:47.227616Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr892ptf59smzvpymqk8hwvv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWIxMmI1NTQtNDc1ZGY0MzktMTI1MmNmYTctNTA0ZmE1ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:04:47.588116Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:951:2756], TxId: 281474976715660, task: 1. Ctx: { TraceId : 01jr892ptf59smzvpymqk8hwvv. SessionId : ydb://session/3?node_id=1&id=MWIxMmI1NTQtNDc1ZGY0MzktMTI1MmNmYTctNTA0ZmE1ZTM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2025-04-07T14:04:47.590740Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:951:2756], TxId: 281474976715660, task: 1. Ctx: { TraceId : 01jr892ptf59smzvpymqk8hwvv. SessionId : ydb://session/3?node_id=1&id=MWIxMmI1NTQtNDc1ZGY0MzktMTI1MmNmYTctNTA0ZmE1ZTM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2025-04-07T14:04:47.641971Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:952:2757], TxId: 281474976715660, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWIxMmI1NTQtNDc1ZGY0MzktMTI1MmNmYTctNTA0ZmE1ZTM=. TraceId : 01jr892ptf59smzvpymqk8hwvv. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-04-07T14:04:47.650587Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWIxMmI1NTQtNDc1ZGY0MzktMTI1MmNmYTctNTA0ZmE1ZTM=, ActorId: [1:822:2674], ActorState: ExecuteState, TraceId: 01jr892ptf59smzvpymqk8hwvv, Create QueryResponse for error on request, msg: 2025-04-07T14:04:47.651747Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr892ptf59smzvpymqk8hwvv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWIxMmI1NTQtNDc1ZGY0MzktMTI1MmNmYTctNTA0ZmE1ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WithRead [GOOD] Test command err: RandomSeed# 1459468173014646753 2025-04-07T14:04:41.789888Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 3 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-07T14:04:41.899041Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-07T14:04:41.899128Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 going to send TEvBlock {TabletId# 3 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-07T14:04:41.902190Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-07T14:04:41.923140Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:04:41.925917Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-07T14:04:48.477875Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-07T14:04:48.477987Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:04:48.478043Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-07T14:04:48.478085Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:04:48.570569Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2025-04-07T14:04:48.570642Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} |91.3%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::PgIntPrimaryKey-EnableKqpDataQueryStreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 22896, MsgBus: 28033 2025-04-07T14:04:39.149032Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571912918209970:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:39.149130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d81/r3tmp/tmpn4yoTQ/pdisk_1.dat 2025-04-07T14:04:39.444237Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22896, node 1 2025-04-07T14:04:39.515403Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:39.515419Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:39.515427Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:39.515509Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:04:39.518798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:39.518913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:39.520569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28033 TClient is connected to server localhost:28033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:39.942691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:41.520496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571921508145202:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.520637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.736792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.834561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571921508145302:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.834617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.834678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571921508145307:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.837670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:04:41.845843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571921508145309:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } Trying to start YDB, gRPC: 11575, MsgBus: 9071 2025-04-07T14:04:44.222298Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571933694711169:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:44.222403Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d81/r3tmp/tmpROrlb7/pdisk_1.dat 2025-04-07T14:04:44.304226Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:44.326587Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:44.326668Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11575, node 2 2025-04-07T14:04:44.327941Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:44.354351Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:44.354381Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:44.354397Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:44.354503Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9071 TClient is connected to server localhost:9071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:44.670449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:46.708362Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571942284646375:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:46.708449Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:46.721786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:04:46.793481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571942284646476:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:46.793549Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:46.793628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571942284646481:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:46.796905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:04:46.804744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571942284646483:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } >> KqpQueryService::DdlTx [GOOD] >> KqpErrors::ProposeError >> KqpErrors::ProposeResultLost_RwTx >> TSourceIdTests::SourceIdStorageMinDS [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig >> TPartitionTests::CorrectRange_Commit >> TPQTest::TestSeveralOwners ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlTx [GOOD] Test command err: Trying to start YDB, gRPC: 20861, MsgBus: 23483 2025-04-07T14:04:08.252460Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571776904007770:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.252536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00101f/r3tmp/tmpgtuZzx/pdisk_1.dat 2025-04-07T14:04:08.879280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:08.879363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:08.893308Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:08.895515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20861, node 1 2025-04-07T14:04:09.220349Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:09.220366Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:09.220378Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:09.220450Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23483 TClient is connected to server localhost:23483 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:10.290425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.305062Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:10.316065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.521185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.699436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:10.780628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:12.287121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571794083878646:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.287224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.601546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.698755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.731146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.777318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.808355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.857482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.924201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571794083879144:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.924314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.929291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571794083879149:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.933539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:12.949399Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:04:12.949639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571794083879151:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:13.256192Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571776904007770:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.256267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:14.254042Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjIyNjUxMzItZDlkOTY1ZjktNjE0OTE1NGEtZTM4OTBiZTg=, ActorId: [1:7490571802673814089:2467], ActorState: ExecuteState, TraceId: 01jr891t0m8n7mqr82bfsmyhjx, Create QueryResponse for error on request, msg: 2025-04-07T14:04:14.378204Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Zjc1MjUxY2ItODk1MzY1NWYtY2IxNjJjMGYtNGQzMDNiNmM=, ActorId: [1:7490571802673814149:2479], ActorState: ExecuteState, TraceId: 01jr891t4s1q78369kxdjtbmec, Create QueryResponse for error on request, msg: 2025-04-07T14:04:14.399450Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWE2M2UyMmYtZDQ0MDE5Yy00MDIxMmQyNi0zNzQ1N2U2OA==, ActorId: [1:7490571802673814174:2482], ActorState: ExecuteState, TraceId: 01jr891t5m39wjtmmdztaamd86, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29944, MsgBus: 19470 2025-04-07T14:04:15.716132Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571809831662837:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00101f/r3tmp/tmp2oUxMG/pdisk_1.dat 2025-04-07T14:04:15.774074Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:15.832142Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:15.858722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:15.858831Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:15.862094Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29944, node 2 2025-04-07T14:04:16.005376Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:16.005402Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:16.005410Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:16.005518Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19470 TClient is connected to server localhost:19470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:16.544426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:16.550397Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:04:16.570993Z ... ers { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "value" type { type_id: UTF8 } } } } } } value { items { items { text_value: "" } items { text_value: "my_secret_1" } items { text_value: "qwerty" } } } } } ; Execute SQL: DROP OBJECT IF EXISTS my_secret_1 (TYPE SECRET); Execute SQL: DROP OBJECT IF EXISTS my_secret_1 (TYPE SECRET); Execute SQL: UPSERT OBJECT my_secret_2 (TYPE SECRET) WITH value = "edcba"; Execute SQL: CREATE OBJECT my_secret_2 (TYPE SECRET) WITH (value="qwerty"); 2025-04-07T14:04:42.884040Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490571925795787828:4237], TxId: 281474976715896, task: 1. Ctx: { TraceId : 01jr892npf56krk69vx13zkfbc. SessionId : ydb://session/3?node_id=2&id=Y2Q4OTFjMWUtZDA2YTkzYWEtYjA1ZWY4YzgtNjBmNDhhNjQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T14:04:42.884158Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490571925795787829:4238], TxId: 281474976715896, task: 2. Ctx: { TraceId : 01jr892npf56krk69vx13zkfbc. SessionId : ydb://session/3?node_id=2&id=Y2Q4OTFjMWUtZDA2YTkzYWEtYjA1ZWY4YzgtNjBmNDhhNjQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490571925795787825:4132], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:04:42.884982Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Q4OTFjMWUtZDA2YTkzYWEtYjA1ZWY4YzgtNjBmNDhhNjQ=, ActorId: [2:7490571921500820217:4132], ActorState: ExecuteState, TraceId: 01jr892npf56krk69vx13zkfbc, Create QueryResponse for error on request, msg: 2025-04-07T14:04:42.887824Z node 2 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jr892neye7zvrae0m101bwgt" } } } } ;request=session_id: "ydb://session/3?node_id=2&id=Y2Q4OTFjMWUtZDA2YTkzYWEtYjA1ZWY4YzgtNjBmNDhhNjQ=" tx_control { tx_id: "01jr892neye7zvrae0m101bwgt" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/values`\nSELECT ownerUserId,secretId,value FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "value" type { type_id: UTF8 } } } } } } value { items { items { text_value: "" } items { text_value: "my_secret_2" } items { text_value: "qwerty" } } } } } ; 2025-04-07T14:04:42.888079Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjI5YTNmZmItNjgwOTc3ZGYtOTZhOTViNTMtMTcwMGM4NzM=, ActorId: [2:7490571921500820208:4127], ActorState: ExecuteState, TraceId: 01jr892n1y6b687tte4cz5wxng, Create QueryResponse for error on request, msg: Execute SQL: UPSERT OBJECT my_secret_2 (TYPE SECRET) WITH value = "edcba"; Trying to start YDB, gRPC: 14976, MsgBus: 5358 2025-04-07T14:04:45.224398Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490571936492731399:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:45.224498Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00101f/r3tmp/tmpbU7Hfu/pdisk_1.dat 2025-04-07T14:04:45.322601Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:45.359837Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:45.359949Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:45.362134Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14976, node 3 2025-04-07T14:04:45.400089Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:45.400117Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:45.400127Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:45.400255Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5358 TClient is connected to server localhost:5358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:45.843439Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:45.861143Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:45.934383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:46.092798Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:46.145973Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:48.138045Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571949377635002:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:48.138131Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:48.170665Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:48.197605Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:48.225780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:48.253391Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:48.279815Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:48.305867Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:48.337124Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571949377635495:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:48.337211Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:48.337344Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490571949377635500:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:48.340336Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:48.346504Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490571949377635502:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:49.335994Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzdlMmNiY2EtZDc1YTI3MDctMzJjMTE5MmEtYjgzYzgyODA=, ActorId: [3:7490571953672603109:2460], ActorState: ExecuteState, TraceId: 01jr892w98a42cjydqyfs7v9e5, Create QueryResponse for error on request, msg: Scheme operations cannot be executed inside transaction >> TPartitionChooserSuite::TBoundaryChooser_GetTabletIdTest [GOOD] >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions >> TPartitionChooserSuite::THashChooserTest [GOOD] >> TPartitionChooserSuite::THashChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test >> TPartitionTests::SetOffset >> TPQTest::TestDirectReadHappyWay >> TPQTabletTests::UpdateConfig_1 >> TSourceIdTests::SourceIdStorageAdd [GOOD] >> TPQTest::TestPartitionTotalQuota >> TPartitionChooserSuite::TBoundaryChooserTest [GOOD] >> TPQTest::TestPQPartialRead >> TSourceIdTests::ProtoSourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::HeartbeatEmitter [GOOD] >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> TPQTestInternal::TestPartitionedBlobSimpleTest [GOOD] >> TPQTestInternal::TestToHex [GOOD] >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] >> TPQUtilsTest::TLastCounter [GOOD] >> TSourceIdTests::SourceIdWriterFormCommand [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount [GOOD] >> TSourceIdTests::SourceIdStorageComplexDelete [GOOD] >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TPQTestInternal::TestPartitionedBigTest >> TTypeCodecsTest::TestFixedLenCodec [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test >> TTypeCodecsTest::TestVarLenCodec [GOOD] >> TPartitionTests::CommitOffsetRanges >> TPartitionTests::TooManyImmediateTxs >> TTypeCodecsTest::TestVarIntCodec [GOOD] >> TTypeCodecsTest::TestZigZagCodec [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline >> TPQTest::TestWriteSplit >> TFetchRequestTests::HappyWay >> TPartitionTests::DifferentWriteTxBatchingOptions >> TMicrosecondsSlidingWindow::Basic [GOOD] >> TMultiBucketCounter::InsertAndUpdate [GOOD] >> TMultiBucketCounter::ManyCounters [GOOD] >> TPQRBDescribes::PartitionLocations >> PQCountersLabeled::Partition >> TPQTest::TestUserInfoCompatibility >> TPQTest::TestWaitInOwners >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test >> TTypeCodecsTest::TestBoolCodec [GOOD] >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQUtilsTest::TLastCounter [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdMinSeqNo [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] Test command err: Size: 128 Create chunk: 0.000038s Read by index: 0.000016s Iterate: 0.000016s Size: 252 Create chunk: 0.000044s Read by index: 0.000016s Iterate: 0.000018s Size: 1887 Create chunk: 0.000081s Read by index: 0.000120s Iterate: 0.000088s Size: 1658 Create chunk: 0.000102s Read by index: 0.000138s Iterate: 0.000072s Size: 1889 Create chunk: 0.000090s Read by index: 0.000083s Iterate: 0.000036s Size: 1660 Create chunk: 0.000097s Read by index: 0.000088s Iterate: 0.000044s Size: 2407 Create chunk: 0.000062s Read by index: 0.000058s Iterate: 0.000033s Size: 2061 Create chunk: 0.000112s Read by index: 0.000092s Iterate: 0.000053s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestZigZagCodec [GOOD] Test command err: Size: 8002 Create chunk: 0.000220s Read by index: 0.000024s Iterate: 0.000158s Size: 8256 Create chunk: 0.000134s Read by index: 0.000035s Iterate: 0.000070s Size: 8532 Create chunk: 0.000077s Read by index: 0.000033s Iterate: 0.000022s Size: 7769 Create chunk: 0.000058s Read by index: 0.000035s Iterate: 0.000030s Size: 2853 Create chunk: 0.000082s Read by index: 0.000057s Iterate: 0.000027s Size: 2419 Create chunk: 0.000058s Read by index: 0.000062s Iterate: 0.000034s Size: 2929 Create chunk: 0.000076s Read by index: 0.000073s Iterate: 0.000036s Size: 2472 Create chunk: 0.000103s Read by index: 0.000086s Iterate: 0.000046s >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions [GOOD] >> TPQTabletTests::UpdateConfig_1 [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline [GOOD] >> TQuotaTracker::TestSmallMessages [GOOD] >> TQuotaTracker::TestBigMessages [GOOD] >> TSourceIdTests::ExpensiveCleanup >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients >> KqpTx::RollbackManyTx [GOOD] >> TPartitionTests::CorrectRange_Commit [GOOD] >> TPartitionTests::SetOffset [GOOD] >> TPartitionTests::CommitOffsetRanges [GOOD] >> TPartitionTests::ShadowPartitionCountersRestore >> TPQTestInternal::TestPartitionedBigTest [GOOD] >> TPQTestInternal::TestBatchPacking [GOOD] >> TPQTestInternal::TestKeyRange [GOOD] >> TPQTestInternal::TestAsInt [GOOD] >> TPQTestInternal::TestAsIntWide [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients [GOOD] >> TPartitionTests::ChangeConfig >> TPartitionTests::CorrectRange_Multiple_Consumers >> TPartitionTests::OldPlanStep >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients >> KqpOlapIndexes::IndexesInLocalMetadata [GOOD] >> TSourceIdTests::ExpensiveCleanup [GOOD] >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::TestAsIntWide [GOOD] >> TPQTabletTests::ProposeTx_Unknown_WriteId >> TPQTest::TestDirectReadHappyWay [GOOD] >> TPQTest::TestDescribeBalancer >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients >> TPQTest::TestSeveralOwners [GOOD] >> TPQTest::TestReserveBytes >> TPQTest::TestWriteSplit [GOOD] >> TPQTest::TestWriteTimeStampEstimate >> TPartitionTests::OldPlanStep [GOOD] >> TPartitionTests::ChangeConfig [GOOD] >> TPartitionTests::CorrectRange_Multiple_Consumers [GOOD] >> TPartitionTests::ReserveSubDomainOutOfSpace >> TPartitionTests::TestNonConflictingActsBatchOk >> TPartitionTests::TooManyImmediateTxs [GOOD] >> TPQTest::TestUserInfoCompatibility [GOOD] >> TPQTest::TestTimeRetention ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackManyTx [GOOD] Test command err: Trying to start YDB, gRPC: 17867, MsgBus: 27032 2025-04-07T14:04:38.282875Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571906540504697:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:38.283409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00113d/r3tmp/tmpAY6mQu/pdisk_1.dat 2025-04-07T14:04:39.166007Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:39.176840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:39.176945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:39.180383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17867, node 1 2025-04-07T14:04:40.186472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:40.186500Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:40.186511Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:40.186619Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27032 TClient is connected to server localhost:27032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:41.219656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:41.302278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:41.455736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:41.533539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:41.569994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:41.880957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571919425408084:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.881063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:43.272204Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571906540504697:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:43.272280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:43.835366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.857984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.878023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.902791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.945401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:43.977425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:44.151300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571932310310566:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:44.151372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571932310310571:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:44.151385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:44.163221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:44.169474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571932310310573:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage >> TPQTabletTests::ProposeTx_Unknown_WriteId [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients [GOOD] >> TPartitionTests::ConflictingActsInSeveralBatches >> TPartitionTests::WriteSubDomainOutOfSpace >> TPartitionTests::ConflictingTxIsAborted >> PQCountersLabeled::Partition [GOOD] >> PQCountersLabeled::PartitionFirstClass >> TPQTabletTests::Read_TEvTxCommit_After_Restart >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |91.4%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::ExpensiveCleanup [GOOD] Test command err: 2025-04-07T14:04:51.786131Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.786229Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.833356Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:175:2190] 2025-04-07T14:04:51.834256Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.835160Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-04-07T14:04:51.835253Z node 1 :PERSQUEUE INFO: new Cookie owner1|c02cd7d4-1bcf8bd4-ce7569c5-d0aa56a_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST processed_blobs=41800 quoted_time=9.980000s Iteration 0 Iteration 1 Iteration 2 Iteration 3 Iteration 4 Iteration 5 Iteration 6 Iteration 7 Iteration 8 Iteration 9 Iteration 10 Iteration 11 Iteration 12 Iteration 13 Iteration 14 Iteration 15 Iteration 16 Iteration 17 Iteration 18 Iteration 19 Iteration 20 Iteration 21 Iteration 22 Iteration 23 Iteration 24 Iteration 25 Iteration 26 Iteration 27 Iteration 28 Iteration 29 Iteration 30 Iteration 31 Iteration 32 Iteration 33 Iteration 34 Iteration 35 Iteration 36 Iteration 37 Iteration 38 Iteration 39 Iteration 40 Iteration 41 Iteration 42 Iteration 43 Iteration 44 Iteration 45 Iteration 46 Iteration 47 Iteration 48 Iteration 49 Iteration 50 Iteration 51 Iteration 52 Iteration 53 Iteration 54 Iteration 55 Iteration 56 Iteration 57 Iteration 58 Iteration 59 Iteration 60 Iteration 61 Iteration 62 Iteration 63 Iteration 64 Iteration 65 Iteration 66 Iteration 67 Iteration 68 Iteration 69 Iteration 70 Iteration 71 Iteration 72 Iteration 73 Iteration 74 Iteration 75 Iteration 76 Iteration 77 Iteration 78 Iteration 79 Iteration 80 Iteration 81 Iteration 82 Iteration 83 Iteration 84 Iteration 85 Iteration 86 Iteration 87 Iteration 88 Iteration 89 Iteration 90 Iteration 91 Iteration 92 Iteration 93 Iteration 94 Iteration 95 Iteration 96 Iteration 97 Iteration 98 Iteration 99 Iteration 100 Iteration 101 Iteration 102 Iteration 103 Iteration 104 Iteration 105 Iteration 106 Iteration 107 Iteration 108 Iteration 109 Iteration 110 Iteration 111 Iteration 112 Iteration 113 Iteration 114 Iteration 115 Iteration 116 Iteration 117 Iteration 118 Iteration 119 Iteration 120 Iteration 121 Iteration 122 Iteration 123 Iteration 124 Iteration 125 Iteration 126 Iteration 127 Iteration 128 Iteration 129 Iteration 130 Iteration 131 Iteration 132 Iteration 133 Iteration 134 Iteration 135 Iteration 136 Iteration 137 Iteration 138 Iteration 139 Iteration 140 Iteration 141 Iteration 142 Iteration 143 Iteration 144 Iteration 145 Iteration 146 Iteration 147 Iteration 148 Iteration 149 Iteration 150 Iteration 151 Iteration 152 Iteration 153 Iteration 154 Iteration 155 Iteration 156 Iteration 157 Iteration 158 Iteration 159 Iteration 160 Iteration 161 Iteration 162 Iteration 163 Iteration 164 Iteration 165 Iteration 166 Iteration 167 Iteration 168 Iteration 169 Iteration 170 Iteration 171 Iteration 172 Iteration 173 Iteration 174 Iteration 175 Iteration 176 Iteration 177 Iteration 178 Iteration 179 Iteration 180 Iteration 181 Iteration 182 Iteration 183 Iteration 184 Iteration 185 Iteration 186 Iteration 187 Iteration 188 Iteration 189 Iteration 190 Iteration 191 Iteration 192 Iteration 193 Iteration 194 Iteration 195 Iteration 196 Iteration 197 Iteration 198 Iteration 199 Iteration 200 Iteration 201 Iteration 202 Iteration 203 Iteration 204 Iteration 205 Iteration 206 Iteration 207 Iteration 208 Iteration 209 Iteration 210 Iteration 211 Iteration 212 Iteration 213 Iteration 214 Iteration 215 Iteration 216 Iteration 217 Iteration 218 Iteration 219 Iteration 220 Iteration 221 Iteration 222 Iteration 223 Iteration 224 Iteration 225 Iteration 226 Iteration 227 Iteration 228 Iteration 229 Iteration 230 Iteration 231 Iteration 232 Iteration 233 Iteration 234 Iteration 235 Iteration 236 Iteration 237 Iteration 238 Iteration 239 Iteration 240 Iteration 241 Iteration 242 Iteration 243 Iteration 244 Iteration 245 Iteration 246 Iteration 247 Iteration 248 Iteration 249 Iteration 250 Iteration 251 Iteration 252 Iteration 253 Iteration 254 Iteration 255 Iteration 256 Iteration 257 Iteration 258 Iteration 259 Iteration 260 Iteration 261 Iteration 262 Iteration 263 Iteration 264 Iteration 265 Iteration 266 Iteration 267 Iteration 268 Iteration 269 Iteration 270 Iteration 271 Iteration 272 Iteration 273 Iteration 274 Iteration 275 Iteration 276 Iteration 277 Iteration 278 Iteration 279 Iteration 280 Iteration 281 Iteration 282 Iteration 283 Iteration 284 Iteration 285 Iteration 286 Iteration 287 Iteration 288 Iteration 289 Iteration 290 Iteration 291 Iteration 292 Iteration 293 Iteration 294 Iteration 295 Iteration 296 Iteration 297 Iteration 298 Iteration 299 Iteration 300 Iteration 301 Iteration 302 Iteration 303 Iteration 304 Iteration 305 Iteration 306 Iteration 307 Iteration 308 Iteration 309 Iteration 310 Iteration 311 Iteration 312 Iteration 313 Iteration 314 Iteration 315 Iteration 316 Iteration 317 Iteration 318 Iteration 319 Iteration 320 Iteration 321 Iteration 322 Iteration 323 Iteration 324 Iteration 325 Iteration 326 Iteration 327 Iteration 328 Iteration 329 Iteration 330 Iteration 331 Iteration 332 Iteration 333 Iteration 334 Iteration 335 Iteration 336 Iteration 337 Iteration 338 Iteration 339 Iteration 340 Iteration 341 Iteration 342 Iteration 343 Iteration 344 Iteration 345 Iteration 346 Iteration 347 Iteration 348 Iteration 349 Iteration 350 Iteration 351 Iteration 352 Iteration 353 Iteration 354 Iteration 355 Iteration 356 Iteration 357 Iteration 358 Iteration 359 Iteration 360 Iteration 361 Iteration 362 Iteration 363 Iteration 364 Iteration 365 Iteration 366 Iteration 367 Iteration 368 Iteration 369 Iteration 370 Iteration 371 Iteration 372 Iteration 373 Iteration 374 Iteration 375 Iteration 376 Iteration 377 Iteration 378 Iteration 379 Iteration 380 Iteration 381 Iteration 382 Iteration 383 Iteration 384 Iteration 385 Iteration 386 Iteration 387 Iteration 388 Iteration 389 Iteration 390 Iteration 391 Iteration 392 Iteration 393 Iteration 394 Iteration 395 Iteration 396 Iteration 397 Iteration 398 Iteration 399 Iteration 400 Iteration 401 Iteration 402 Iteration 403 Iteration 404 Iteration 405 Iteration 406 Iteration 407 Iteration 408 Iteration 409 Iteration 410 Iteration 411 Iteration 412 Iteration 413 Iteration 414 Iteration 415 Iteration 416 Iteration 417 Iteration 418 Iteration 419 Iteration 420 Iteration 421 Iteration 422 Iteration 423 Iteration 424 Iteration 425 Iteration 426 Iteration 427 Iteration 428 Iteration 429 Iteration 430 Iteration 431 Iteration 432 Iteration 433 Iteration 434 Iteration 435 Iteration 436 Iteration 437 Iteration 438 Iteration 439 Iteration 440 Iteration 441 Iteration 442 Iteration 443 Iteration 444 Iteration 445 Iteration 446 Iteration 447 Iteration 448 Iteration 449 Iteration 450 Iteration 451 Iteration 452 Iteration 453 Iteration 454 Iteration 455 Iteration 456 Iteration 457 Iteration 458 Iteration 459 Iteration 460 Iteration 461 Iteration 462 Iteration 463 Iteration 464 Iteration 465 Iteration 466 Iteration 467 Iteration 468 Iteration 469 Iteration 470 Iteration 471 Iteration 472 Iteration 473 Iteration 474 Iteration 475 Iteration 476 Iteration 477 Iteration 478 Iteration 479 Iteration 480 Iteration 481 Iteration 482 Iteration 483 Iteration 484 Iteration 485 Iteration 486 Iteration 487 Iteration 488 Iteration 489 Iteration 490 Iteration 491 Iteration 492 Iteration 493 Iteration 494 Iteration 495 Iteration 496 Iteration 497 Iteration 498 Iteration 499 Iteration 500 Iteration 501 Iteration 502 Iteration 503 Iteration 504 Iteration 505 Iteration 506 Iteration 507 Iteration 508 Iteration 509 Iteration 510 Iteration 511 Iteration 512 Iteration 513 Iteration 514 Iteration 515 Iteration 516 Iteration 517 Iteration 518 Iteration 519 Iteration 520 Iteration 521 Iteration 522 Iteration 523 Iteration 524 Iteration 525 Iteration 526 Iteration 527 Iteration 528 Iteration 529 Iteration 530 Iteration 531 Iteration 532 Iteration 533 Iteration 534 Iteration 535 Iteration 536 Iteration 537 Iteration 538 Iteration 539 Iteration 540 Iteration 541 Iteration 542 Iteration 543 Iteration 544 Iteration 545 Iteration 546 Iteration 547 Iteration 548 Iteration 549 Iteration 550 Iteration 551 Iteration 552 Iteration 553 Iteration 554 Iteration 555 Iteration 556 Iteration 557 Iteration 558 Iteration 559 Iteration 560 Iteration 561 Iteration 562 Iteration 563 Iteration 564 Iteration 565 Iteration 566 Iteration 567 Iteration 568 Iteration 569 Iteration 570 Iteration 571 Iteration 572 Iteration 573 Iteration 574 Iteration 575 Iteration 576 Iteration 577 Iteration 578 Iteration 579 Iteration 580 Iteration 581 Iteration 582 Iteration 583 Iteration 584 Iteration 585 Iteration 586 Iteration 587 Iteration 588 Iteration 589 Iteration 590 Iteration 591 Iteration 592 Iteration 593 Iteration 594 Iteration 595 Iteration 596 Iteration 597 Iteration 598 Iteration 599 Iteration 600 Iteration 601 Iteration 602 Iteration 603 Iteration 604 Iteration 605 Iteration 606 Iteration 607 Iteration 608 Iteration 609 Iteration 610 Iteration 611 Iteration 612 Iteration 613 Iteration 614 Iteration 615 Iteration 616 Iteration 617 Iteration 618 Iteration 619 Iteration 620 Iteration 621 Iteration 622 Iteration 623 Iteration 624 Iteration 625 Iteration 626 Iteration 627 Iteration 628 Iteration 629 Iteration 630 Iteration 631 Iteration 632 Iteration 633 Iteration 634 Iteration 635 Iteration 636 Iteration 637 Iteration 638 Iteration 639 Iteration 640 Iteration 641 Iteration 642 Iteration 643 Iteration 644 Iteration 645 Iteration 646 Iteration 647 Iteration 648 Iteration 649 Iteration 650 Iteration 651 Iteration 652 Iteration 653 Iteration 654 Iteration 655 Iteration 656 Iteration 657 Iteration 658 Iteration 659 Iteration 660 Iteration 661 Iteration 662 Iteration 663 Iteration 664 Iteration 665 Iteration 666 Iteration 667 Iteration 668 Iteration 669 Iteration 670 Iteration 671 Iteration 672 Iteration 673 Iteration 674 Iteration 675 Iteration 676 Iteration 677 Iteration 678 Iteration 679 Iteration 680 Iteration 681 Iteration 682 Iteration 683 Iteration 684 Iteration 685 Iteration 686 Iteration 687 Iteration 688 Iteration 689 Iteration 690 Iteration 691 Iteration 692 Iteration 693 Iteration 694 Iteration 695 Iteration 696 Iteration 697 Iteration 698 Iteration 699 Iteration 700 Iteration 701 Iteration 702 Iteration 703 Iteration 704 Iteration 705 Iteration 706 Iteration 707 Iteration 708 Iteration 709 Iteration 710 Iteration 711 Iteration 712 Iteration 713 Iteration 714 Iteration 715 Iteration 716 Iteration 717 Iteration 718 Iteration 719 Iteration 720 Iteration 721 Iteration 722 Iteration 723 Iteration 724 Iteration 725 Iteration 726 Iteration 727 Iteration 728 Iteration 729 Iteration 730 Iteration 731 Iteration 732 Iteration 733 Iteration 734 Iteration 735 Iteration 736 Iteration 737 Iteration 738 Iteration 739 Iteration 740 Iteration 741 Iteration 742 Iteration 743 Iteration 744 Iteration 745 Iteration 746 Iteration 747 Iteration 748 Iteration 749 Iteration 750 Iteration 751 Iteration 752 Iteration 753 Iteration 754 Iteration 755 Iteration 756 Iteration 757 Iteration 758 Iteration 759 Iteration 760 Iteration 761 Iteration 762 Iteration 763 Iteration 764 Iteration 765 Iteration 766 Iteration 767 Iteration 768 Iteration 769 Iteration 770 Iteration 771 Iteration 772 Iteration 773 Iteration 774 Iteration 775 Iteration 776 Iteration 777 Iteration 778 Iteration 779 Iteration 780 Iteration 781 Iteration 782 Iteration 783 Iteration 784 Iteration 785 Iteration 786 Iteration 787 Iteration 788 Iteration 789 Iteration 790 Iteration 791 Iteration 792 Iteration 793 Iteration 794 Iteration 795 Iteration 796 Iteration 797 Iteration 798 Iteration 799 Iteration 800 Iteration 801 Iteration 802 Iteration 803 Iteration 804 Iteration 805 Iteration 806 Iteration 807 Iteration 808 Iteration 809 Iteration 810 Iteration 811 Iteration 812 Iteration 813 Iteration 814 Iteration 815 Iteration 816 Iteration 817 Iteration 818 Iteration 819 Iteration 820 Iteration 821 Iteration 822 Iteration 823 Iteration 824 Iteration 825 Iteration 826 Iteration 827 Iteration 828 Iteration 829 Iteration 830 Iteration 831 Iteration 832 Iteration 833 Iteration 834 Iteration 835 Iteration 836 Iteration 837 Iteration 838 Iteration 839 Iteration 840 Iteration 841 Iteration 842 Iteration 843 Iteration 844 Iteration 845 Iteration 846 Iteration 847 Iteration 848 Iteration 849 Iteration 850 Iteration 851 Iteration 852 Iteration 853 Iteration 854 Iteration 855 Iteration 856 Iteration 857 Iteration 858 Iteration 859 Iteration 860 Iteration 861 Iteration 862 Iteration 863 Iteration 864 Iteration 865 Iteration 866 Iteration 867 Iteration 868 Iteration 869 Iteration 870 Iteration 871 Iteration 872 Iteration 873 Iteration 874 Iteration 875 Iteration 876 Iteration 877 Iteration 878 Iteration 879 Iteration 880 Iteration 881 Iteration 882 Iteration 883 Iteration 884 Iteration 885 Iteration 886 Iteration 887 Iteration 888 Iteration 889 Iteration 890 Iteration 891 Iteration 892 Iteration 893 Iteration 894 Iteration 895 Iteration 896 Iteration 897 Iteration 898 Iteration 899 Iteration 900 Iteration 901 Iteration 902 Iteration 903 Iteration 904 Iteration 905 Iteration 906 Iteration 907 Iteration 908 Iteration 909 Iteration 910 Iteration 911 Iteration 912 Iteration 913 Iteration 914 Iteration 915 Iteration 916 Iteration 917 Iteration 918 Iteration 919 Iteration 920 Iteration 921 Iteration 922 Iteration 923 Iteration 924 Iteration 925 Iteration 926 Iteration 927 Iteration 928 Iteration 929 Iteration 930 Iteration 931 Iteration 932 Iteration 933 Iteration 934 Iteration 935 Iteration 936 Iteration 937 Iteration 938 Iteration 939 Iteration 940 Iteration 941 Iteration 942 Iteration 943 Iteration 944 Iteration 945 Iteration 946 Iteration 947 Iteration 948 Iteration 949 Iteration 950 Iteration 951 Iteration 952 Iteration 953 Iteration 954 Iteration 955 Iteration 956 Iteration 957 Iteration 958 Iteration 959 Iteration 960 Iteration 961 Iteration 962 Iteration 963 Iteration 964 Iteration 965 Iteration 966 Iteration 967 Iteration 968 Iteration 969 Iteration 970 Iteration 971 Iteration 972 Iteration 973 Iteration 974 Iteration 975 Iteration 976 Iteration 977 Iteration 978 Iteration 979 Iteration 980 Iteration 981 Iteration 982 Iteration 983 Iteration 984 Iteration 985 Iteration 986 Iteration 987 Iteration 988 Iteration 989 Iteration 990 Iteration 991 Iteration 992 Iteration 993 Iteration 994 Iteration 995 Iteration 996 Iteration 997 Iteration 998 Iteration 999 >> TPartitionTests::ConflictingTxIsAborted [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] >> TPQTest::TestWriteTimeStampEstimate [GOOD] >> TPQTest::TestWriteTimeLag >> TPartitionTests::ReserveSubDomainOutOfSpace [GOOD] >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace [GOOD] >> TPartitionTests::ConflictingTxProceedAfterRollback >> TPartitionTests::ShadowPartitionCounters >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::IndexesInLocalMetadata [GOOD] Test command err: Trying to start YDB, gRPC: 11844, MsgBus: 10333 2025-04-07T14:02:33.556653Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571368398159720:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:33.556767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000bbd/r3tmp/tmpqROYJZ/pdisk_1.dat 2025-04-07T14:02:34.052809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:34.052957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:34.054842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11844, node 1 2025-04-07T14:02:34.111048Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:02:34.116861Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:02:34.126915Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:34.199976Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:34.199998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:34.200037Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:34.200147Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10333 TClient is connected to server localhost:10333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:34.833500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:34.861257Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:34.878176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:35.041279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372693127691:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:35.041465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372693127691:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:35.041762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372693127691:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:35.041906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372693127691:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:35.042036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372693127691:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:35.042127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372693127691:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:35.042241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372693127691:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:35.042383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372693127691:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:35.042484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372693127691:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:35.042631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372693127691:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:35.042731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372693127691:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:35.042864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571372693127691:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:35.101217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571372693127692:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:35.101273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571372693127692:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:35.101506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571372693127692:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:35.101615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571372693127692:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:35.101718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571372693127692:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:35.101848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571372693127692:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:35.101945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571372693127692:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:35.102034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571372693127692:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:35.102141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571372693127692:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:35.102267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571372693127692:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:35.102387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571372693127692:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:35.102511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571372693127692:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:35.138572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372693127695:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:35.138637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372693127695:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:35.138856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372693127695:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:35.138953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372693127695:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:35.139042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372693127695:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:35.139125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372693127695:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:35.139212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571372693127695:2292];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:35.139297Z node 1 :TX_COLUMNSHARD WARN: ... 4:03:36.465452Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034615000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:37.792577Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034617000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:38.776624Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034618000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:39.535440Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034619000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:40.487988Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034620000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:41.647845Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034621000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:42.767747Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034622000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:43.958246Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034623000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:45.201793Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034624000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:46.356412Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034625000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:47.639932Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034627000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:48.819143Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034628000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:50.028268Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034629001, txId: 18446744073709551615] shutting down 2025-04-07T14:03:51.307772Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034630000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:52.453488Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034631000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:53.611654Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034633005, txId: 18446744073709551615] shutting down 2025-04-07T14:03:54.710664Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034634000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:55.887474Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034635000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:56.856883Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034636000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:57.925999Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034637000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:58.896256Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034638000, txId: 18446744073709551615] shutting down 2025-04-07T14:03:59.957128Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034639000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:00.872451Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034640000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:01.928026Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034641000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:02.961559Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034642000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:03.928424Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034643000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:04.829609Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034644000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:05.669238Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034645000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:06.883791Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034646000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:08.182902Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034647000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:09.491127Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034648000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:10.973524Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034650001, txId: 18446744073709551615] shutting down 2025-04-07T14:04:12.302341Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034651002, txId: 18446744073709551615] shutting down 2025-04-07T14:04:13.807330Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034653000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:15.400200Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034654000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:16.919899Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034656000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:18.738323Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034657001, txId: 18446744073709551615] shutting down 2025-04-07T14:04:20.191991Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034659000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:23.227400Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034661000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:25.155937Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034664008, txId: 18446744073709551615] shutting down 2025-04-07T14:04:27.578243Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034665002, txId: 18446744073709551615] shutting down 2025-04-07T14:04:29.025495Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034668000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:30.189239Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034669000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:31.283002Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034670000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:32.393602Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034671000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:33.448864Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034673000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:34.365700Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034673024, txId: 18446744073709551615] shutting down 2025-04-07T14:04:35.333050Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034674000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:36.329882Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034675000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:37.237639Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034676000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:38.246220Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034677000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:39.351897Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034678000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:40.263875Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034679000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:41.372996Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034680000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:42.425419Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034681000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:43.370184Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034682000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:44.229175Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034683000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:45.063306Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034684000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:45.927203Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034685000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:46.818746Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034686000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:47.618135Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034687000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:48.436100Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034688000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:49.189873Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034688081, txId: 18446744073709551615] shutting down 2025-04-07T14:04:49.983560Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034689000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:50.911587Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034690000, txId: 18446744073709551615] shutting down 2025-04-07T14:04:51.910565Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034691000, txId: 18446744073709551615] shutting down >> TPQTest::TestDescribeBalancer [GOOD] >> TPQTest::TestCheckACL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] Test command err: 2025-04-07T14:04:51.799465Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:04:51.804408Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:04:51.804693Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-07T14:04:51.804755Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:04:51.804828Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-07T14:04:51.804862Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:04:51.804899Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.804979Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2025-04-07T14:04:51.805012Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T14:04:51.817517Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:51.817601Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:174:2189], now have 1 active actors on pipe 2025-04-07T14:04:51.817675Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:04:51.831180Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.834587Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-07T14:04:51.834695Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.835900Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.836809Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.836903Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:04:51.837313Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:04:51.837710Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:183:2196] 2025-04-07T14:04:51.838467Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-07T14:04:51.838516Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:183:2196] 2025-04-07T14:04:51.838561Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:04:51.838983Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-07T14:04:51.839027Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-07T14:04:51.839134Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.839236Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T14:04:51.840048Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:04:51.840228Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-07T14:04:51.840740Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-04-07T14:04:51.840777Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:185:2198] 2025-04-07T14:04:51.840821Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:04:51.841126Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-04-07T14:04:51.841149Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-04-07T14:04:51.841238Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.841331Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:04:51.841781Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:04:51.841939Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.845389Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:04:51.845553Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:04:51.845919Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:51.845973Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2207], now have 1 active actors on pipe 2025-04-07T14:04:51.848433Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:51.848482Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:203:2211], now have 1 active actors on pipe 2025-04-07T14:04:51.849393Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 4294969484 } TxId: 67890 Config { TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "client-1" Generation: 0 Important: false } Consumers { Name: "client-3" Generation: 7 Important: false } } BootstrapConfig { } } 2025-04-07T14:04:51.850237Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-07T14:04:51.850280Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-07T14:04:51.850313Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-07T14:04:51.850539Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 231 MaxStep: 18446744073709551615 Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 173 RawX2: 4294969484 } Partitions { } 2025-04-07T14:04:51.850630Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.855756Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T14:04:51.855798Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-07T14:04:51.855823Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-07T14:04:51.862564Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo ... NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.542358Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:04:53.542608Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:04:53.542780Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:239:2234] 2025-04-07T14:04:53.543375Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-07T14:04:53.543407Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [5:239:2234] 2025-04-07T14:04:53.543444Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:04:53.543656Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 5 2025-04-07T14:04:53.543686Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 5 done 2025-04-07T14:04:53.543781Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.543892Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:04:53.544067Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:53.545576Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:04:53.545880Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:53.545920Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:246:2239], now have 1 active actors on pipe 2025-04-07T14:04:53.547006Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:53.547043Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:251:2243], now have 1 active actors on pipe 2025-04-07T14:04:53.547145Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 21474838668 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } ReceivingShards: 33333 ReceivingShards: 33334 Immediate: false } 2025-04-07T14:04:53.547171Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-07T14:04:53.547215Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-07T14:04:53.547241Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-07T14:04:53.547273Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-07T14:04:53.547352Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2025-04-07T14:04:53.547401Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:53.549705Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T14:04:53.549745Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-07T14:04:53.549769Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-07T14:04:53.549927Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 173 RawX2: 21474838668 } } Step: 100 2025-04-07T14:04:53.549959Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2025-04-07T14:04:53.549981Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2025-04-07T14:04:53.550005Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2025-04-07T14:04:53.550128Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2025-04-07T14:04:53.550179Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:53.552691Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T14:04:53.552750Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNING 2025-04-07T14:04:53.552786Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNED 2025-04-07T14:04:53.552816Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-04-07T14:04:53.552871Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-04-07T14:04:53.552922Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2025-04-07T14:04:53.552981Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-04-07T14:04:53.553169Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2025-04-07T14:04:53.553200Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-04-07T14:04:53.553229Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2025-04-07T14:04:53.553260Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-04-07T14:04:53.553290Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-07T14:04:53.553324Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-04-07T14:04:53.553461Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2025-04-07T14:04:53.553532Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:53.559495Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T14:04:53.559549Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-04-07T14:04:53.559585Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-07T14:04:53.559627Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 2 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-04-07T14:04:53.559664Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 33334 2025-04-07T14:04:53.559761Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 33333 2025-04-07T14:04:53.559822Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-04-07T14:04:53.559868Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-04-07T14:04:53.559900Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2025-04-07T14:04:53.559962Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-04-07T14:04:53.560014Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 67890 2025-04-07T14:04:53.560164Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T14:04:53.561937Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-07T14:04:53.561980Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 33334 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:53.563011Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-07T14:04:53.563045Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 33333 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:53.564515Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:04:53.564600Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-04-07T14:04:53.564639Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-07T14:04:53.564692Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-07T14:04:53.564731Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-07T14:04:53.564772Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-04-07T14:04:53.564805Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-04-07T14:04:53.564970Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2025-04-07T14:04:53.565040Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:53.567881Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T14:04:53.567930Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-07T14:04:53.567965Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-07T14:04:53.567996Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-07T14:04:53.568032Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/2 2025-04-07T14:04:53.568057Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-07T14:04:53.568083Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] Test command err: 2025-04-07T14:04:51.787393Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:04:51.796021Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:04:51.796285Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-07T14:04:51.796342Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:04:51.796384Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-07T14:04:51.796425Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:04:51.796505Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.796587Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2025-04-07T14:04:51.796628Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T14:04:51.821475Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:51.821556Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:174:2189], now have 1 active actors on pipe 2025-04-07T14:04:51.821636Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:04:51.835522Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.849688Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-07T14:04:51.849823Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.851391Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.851504Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.851572Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:04:51.851951Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:04:51.852341Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:183:2196] 2025-04-07T14:04:51.853298Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-07T14:04:51.853364Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:183:2196] 2025-04-07T14:04:51.853435Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:04:51.853950Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-07T14:04:51.854003Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-07T14:04:51.854164Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.854301Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T14:04:51.854559Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:04:51.854773Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-07T14:04:51.855496Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-04-07T14:04:51.855539Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:185:2198] 2025-04-07T14:04:51.855578Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:04:51.855897Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-04-07T14:04:51.855934Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-04-07T14:04:51.856036Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.856159Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:04:51.856716Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:04:51.856895Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.867815Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:04:51.867980Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:04:51.868309Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:51.868368Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2207], now have 1 active actors on pipe 2025-04-07T14:04:51.869921Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:51.869984Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:203:2211], now have 1 active actors on pipe 2025-04-07T14:04:51.870877Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 4294969484 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Immediate: false } 2025-04-07T14:04:51.870945Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-07T14:04:51.871030Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-07T14:04:51.871090Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-07T14:04:51.871138Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-07T14:04:51.871313Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 231 MaxStep: 30231 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 4294969484 } Partitions { } 2025-04-07T14:04:51.871400Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.875242Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T14:04:51.875309Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-07T14:04:51.875352Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-07T14:04:51.882365Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 173 RawX2: 4294969484 } } Step: 100 2025-04-07T14:04:51.882459Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2025-04-07T14:04:51.882506Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2025-04-07T14:04:51.882552Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2025-04-07T14:04:51.882711Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 231 MaxStep: 30231 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 4294969484 } Partitions { } 2025-04-07T14:04:51.882823Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TE ... 222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2025-04-07T14:04:53.543570Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:53.562573Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:53.564408Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:53.566555Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] has a tx info 2025-04-07T14:04:53.566596Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890, ExecStep 0, ExecTxId 0 2025-04-07T14:04:53.566682Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067890, Status 0 2025-04-07T14:04:53.566830Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Load tx TxId: 67890 State: PLANNED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2025-04-07T14:04:53.566888Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=1, PlannedTxs.size=1 2025-04-07T14:04:53.566922Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] top tx queue (100, 67890) 2025-04-07T14:04:53.567384Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:53.567416Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] has a tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.567534Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:04:53.567797Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:04:53.567997Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:308:2294] 2025-04-07T14:04:53.568544Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.569370Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-04-07T14:04:53.569536Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-04-07T14:04:53.570016Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-04-07T14:04:53.570197Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-04-07T14:04:53.570227Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-07T14:04:53.570252Z node 5 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:04:53.570276Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-07T14:04:53.570310Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [5:308:2294] 2025-04-07T14:04:53.570356Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:04:53.570430Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 5 2025-04-07T14:04:53.570521Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNED 2025-04-07T14:04:53.570543Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-04-07T14:04:53.570566Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-04-07T14:04:53.570604Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:04:53.570767Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-04-07T14:04:53.570872Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:04:53.570988Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2025-04-07T14:04:53.571014Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-04-07T14:04:53.571038Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2025-04-07T14:04:53.571063Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-04-07T14:04:53.571085Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-07T14:04:53.571111Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-04-07T14:04:53.571242Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2025-04-07T14:04:53.571300Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:53.573977Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T14:04:53.574028Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-04-07T14:04:53.574060Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-07T14:04:53.574092Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-07T14:04:53.574119Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-07T14:04:53.574217Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T14:04:53.575479Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-07T14:04:53.575523Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T14:04:53.577129Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:53.577176Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:334:2313], now have 1 active actors on pipe 2025-04-07T14:04:53.577295Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-04-07T14:04:53.577330Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2025-04-07T14:04:53.577356Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2025-04-07T14:04:53.577382Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2025-04-07T14:04:53.577406Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-04-07T14:04:53.577461Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-04-07T14:04:53.577486Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2025-04-07T14:04:53.577524Z node 5 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 2025-04-07T14:04:53.577571Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-04-07T14:04:53.577609Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 67890 2025-04-07T14:04:53.577740Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:53.579304Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:04:53.579360Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-04-07T14:04:53.579389Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-07T14:04:53.579414Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-07T14:04:53.579444Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-07T14:04:53.579470Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-04-07T14:04:53.579495Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-04-07T14:04:53.579628Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2025-04-07T14:04:53.579694Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:53.582002Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T14:04:53.582046Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-07T14:04:53.582083Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-07T14:04:53.582122Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-04-07T14:04:53.582161Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-07T14:04:53.582203Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-07T14:04:53.582250Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-07T14:04:53.582273Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration [GOOD] >> TPQTest::TestWriteTimeLag [GOOD] >> TPQTestInternal::StoreKeys [GOOD] >> TPQTestInternal::RestoreKeys [GOOD] >> TPQTest::TestTimeRetention [GOOD] >> TPQTest::TestStorageRetention >> TPartitionTests::TestTxBatchInFederation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::RestoreKeys [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2025-04-07T14:04:51.789432Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.789506Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:175:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.823029Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.846875Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-04-07T14:04:51.847715Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:182:2195] 2025-04-07T14:04:51.850929Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.853826Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:183:2196] 2025-04-07T14:04:51.855512Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:183:2196] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.868060Z node 1 :PERSQUEUE INFO: new Cookie default|7308c494-30343e44-4f23a5ec-409564fa_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:214:2057] recipient: [1:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:217:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:218:2057] recipient: [1:216:2220] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:219:2221] sender: [1:220:2057] recipient: [1:216:2220] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.935124Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.935190Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:04:51.935878Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:270:2264] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.938492Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:271:2265] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.948048Z node 1 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:04:51.948118Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:271:2265] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:04:51.959925Z node 1 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:04:51.960002Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:270:2264] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:219:2221] sender: [1:299:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2025-04-07T14:04:52.345482Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:52.345543Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:175:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:52.362761Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:52.363581Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "user1" Generation: 2 Important: true } 2025-04-07T14:04:52.364146Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:182:2195] 2025-04-07T14:04:52.366438Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:04:52.368940Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:183:2196] 2025-04-07T14:04:52.370620Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:52.392004Z node 2 :PERSQUEUE INFO: new Cookie default|fa4418de-b4cdb6ad-7e1c96b1-5ca96875_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:214:2057] recipient: [2:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:217:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:218:2057] recipient: [2:216:2220] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:219:2221] sender: [2:220:2057] recipient: [2:216:2220] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:52.444633Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:52.444717Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:04:52.445277Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:270:2264] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:52.447399Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:271:2265] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:52.454994Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:04:52.455067Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:271:2265] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:04:52.467066Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp ... 4-07T14:04:54.454774Z node 5 :PERSQUEUE DEBUG: Prefetched blob in L1. Partition 0 offset 12 count 6 size 6292734 actorID [5:530:2504] 2025-04-07T14:04:54.454949Z node 5 :PERSQUEUE DEBUG: FormAnswer 1 2025-04-07T14:04:54.456450Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 12 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2025-04-07T14:04:54.457384Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 12 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2025-04-07T14:04:54.457499Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 12 totakecount 6 count 1 size 24713 from pos 0 cbcount 1 2025-04-07T14:04:54.458283Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 13 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2025-04-07T14:04:54.459187Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 13 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2025-04-07T14:04:54.459350Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 13 totakecount 6 count 1 size 24713 from pos 0 cbcount 1 2025-04-07T14:04:54.460168Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 14 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2025-04-07T14:04:54.461521Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 14 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2025-04-07T14:04:54.461700Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 14 totakecount 6 count 1 size 24713 from pos 0 cbcount 1 2025-04-07T14:04:54.462884Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 15 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2025-04-07T14:04:54.464035Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 15 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2025-04-07T14:04:54.464209Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 15 totakecount 6 count 1 size 24713 from pos 0 cbcount 1 2025-04-07T14:04:54.465136Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 16 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2025-04-07T14:04:54.466059Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 16 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2025-04-07T14:04:54.466310Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 16 totakecount 6 count 1 size 24713 from pos 0 cbcount 1 2025-04-07T14:04:54.467208Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 17 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2025-04-07T14:04:54.468135Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 17 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2025-04-07T14:04:54.468271Z node 5 :PERSQUEUE DEBUG: FormAnswer processing batch offset 17 totakecount 6 count 1 size 24713 from pos 0 cbcount 1 2025-04-07T14:04:54.468458Z node 5 :PERSQUEUE DEBUG: Topic 'topic' partition 0 user another1 readTimeStamp done, result 282 queuesize 0 startOffset 12 2025-04-07T14:04:54.468735Z node 5 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 12 size 6292734 2025-04-07T14:04:54.472571Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:54.472636Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:631:2581], now have 1 active actors on pipe 2025-04-07T14:04:54.474400Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:54.474459Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:636:2585], now have 1 active actors on pipe 2025-04-07T14:04:54.474560Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:04:54.474919Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 9(current 8) received from actor [5:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 9 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 6 ReadRuleGenerations: 6 ReadRuleGenerations: 8 ReadRuleGenerations: 7 ReadRuleGenerations: 9 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 6 Important: false } Consumers { Name: "aaa" Generation: 6 Important: false } Consumers { Name: "another1" Generation: 8 Important: true } Consumers { Name: "important" Generation: 7 Important: true } Consumers { Name: "another" Generation: 9 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:54.478880Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 9 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 6 ReadRuleGenerations: 6 ReadRuleGenerations: 8 ReadRuleGenerations: 7 ReadRuleGenerations: 9 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 6 Important: false } Consumers { Name: "aaa" Generation: 6 Important: false } Consumers { Name: "another1" Generation: 8 Important: true } Consumers { Name: "important" Generation: 7 Important: true } Consumers { Name: "another" Generation: 9 Important: false } 2025-04-07T14:04:54.478966Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:54.479208Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user another reinit with generation 9 done 2025-04-07T14:04:54.479552Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user another reinit with generation 9 done 2025-04-07T14:04:54.479885Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T14:04:54.480076Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:54.485040Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:04:54.485301Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-04-07T14:04:54.487374Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:04:54.487530Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-04-07T14:04:54.487817Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 9 actor [5:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 9 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 6 ReadRuleGenerations: 6 ReadRuleGenerations: 8 ReadRuleGenerations: 7 ReadRuleGenerations: 9 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 6 Important: false } Consumers { Name: "aaa" Generation: 6 Important: false } Consumers { Name: "another1" Generation: 8 Important: true } Consumers { Name: "important" Generation: 7 Important: true } Consumers { Name: "another" Generation: 9 Important: false } 2025-04-07T14:04:54.488468Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:54.488546Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:654:2600], now have 1 active actors on pipe 2025-04-07T14:04:54.490335Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:54.490399Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:659:2604], now have 1 active actors on pipe 2025-04-07T14:04:54.490511Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-07T14:04:54.490562Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-07T14:04:54.490683Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:04:54.491148Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:54.491204Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:661:2606], now have 1 active actors on pipe 2025-04-07T14:04:54.491277Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-07T14:04:54.491327Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-07T14:04:54.491413Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:04:54.491795Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:54.491845Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:663:2608], now have 1 active actors on pipe 2025-04-07T14:04:54.491943Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-07T14:04:54.491990Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-07T14:04:54.492088Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:04:54.492467Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:54.492531Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:665:2610], now have 1 active actors on pipe 2025-04-07T14:04:54.492641Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-07T14:04:54.492703Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-07T14:04:54.492810Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >> TMeteringSink::FlushPutEventsV1 [GOOD] >> TMeteringSink::FlushResourcesReservedV1 [GOOD] >> TMeteringSink::FlushThroughputV1 [GOOD] >> TMeteringSink::FlushStorageV1 [GOOD] >> TMeteringSink::UsedStorageV1 [GOOD] |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |91.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TMeteringSink::UsedStorageV1 [GOOD] >> TPQTest::TestStorageRetention [GOOD] >> TPQTest::TestStatusWithMultipleConsumers >> GroupWriteTest::Simple [GOOD] >> TPartitionTests::ConflictingActsInSeveralBatches [GOOD] |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay/ydb_query_replay |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |91.4%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay >> TPartitionTests::ConflictingCommitFails >> TPQTest::TestStatusWithMultipleConsumers [GOOD] >> TPQTest::TestTabletRestoreEventsOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple [GOOD] Test command err: RandomSeed# 14383749364444555699 2025-04-07T14:04:42.110379Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-07T14:04:42.127771Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-07T14:04:42.127865Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-07T14:04:42.130316Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-07T14:04:42.141446Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:04:42.143478Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-07T14:04:55.943684Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-07T14:04:55.943780Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:04:55.943820Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-07T14:04:55.943847Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:04:55.986000Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-04-07T14:04:55.986117Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} >> TPartitionTests::ConflictingTxProceedAfterRollback [GOOD] |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |91.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots >> TPartitionTests::ShadowPartitionCounters [GOOD] >> TPartitionTests::ConflictingCommitsInSeveralBatches >> TPQTest::TestTabletRestoreEventsOrder [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2025-04-07T14:04:51.788565Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.788640Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:175:2057] recipient: [1:14:2061] 2025-04-07T14:04:51.816527Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:51.816596Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:174:2189], now have 1 active actors on pipe 2025-04-07T14:04:51.816757Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:04:51.840007Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.846127Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-04-07T14:04:51.846287Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.848472Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-04-07T14:04:51.848574Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:04:51.848622Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:04:51.848645Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:04:51.848769Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:3:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:04:51.849149Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:04:51.849401Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:182:2195] 2025-04-07T14:04:51.850995Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-04-07T14:04:51.851052Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:182:2195] 2025-04-07T14:04:51.851098Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:04:51.853089Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-04-07T14:04:51.853157Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-04-07T14:04:51.853202Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit request with generation 1 2025-04-07T14:04:51.853232Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit with generation 1 done 2025-04-07T14:04:51.853410Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T14:04:51.853455Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.853608Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:04:51.854097Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:04:51.854293Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:183:2196] 2025-04-07T14:04:51.855507Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2025-04-07T14:04:51.855553Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:183:2196] 2025-04-07T14:04:51.855606Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:04:51.857199Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2025-04-07T14:04:51.857244Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2025-04-07T14:04:51.857288Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit request with generation 1 2025-04-07T14:04:51.857314Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit with generation 1 done 2025-04-07T14:04:51.857454Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T14:04:51.857488Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T14:04:51.857625Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:04:51.857809Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [1:184:2197] 2025-04-07T14:04:51.859380Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:2:Initializer] Initializing completed. 2025-04-07T14:04:51.859428Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:184:2197] 2025-04-07T14:04:51.859468Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:04:51.861015Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user user reinit request with generation 1 2025-04-07T14:04:51.861096Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user user reinit with generation 1 done 2025-04-07T14:04:51.861173Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user test reinit request with generation 1 2025-04-07T14:04:51.861199Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user test reinit with generation 1 done 2025-04-07T14:04:51.861307Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T14:04:51.861343Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPart ... red TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.220086Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:651:2645] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.223347Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:656:2650] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.225212Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:661:2655] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.227154Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:666:2660] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.228997Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:671:2665] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.230770Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:676:2670] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.232420Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:681:2675] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.234189Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:686:2680] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.235772Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:691:2685] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.237181Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:696:2690] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.238642Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:701:2695] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.240058Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:706:2700] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.241548Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:711:2705] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.242937Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:716:2710] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.244251Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:721:2715] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.245600Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:726:2720] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.246876Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:731:2725] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.248190Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:736:2730] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.249477Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:741:2735] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.250812Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:746:2740] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.252311Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:56.252364Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:751:2745], now have 1 active actors on pipe 2025-04-07T14:04:56.252463Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:04:56.260550Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:56.260625Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:754:2748], now have 1 active actors on pipe 2025-04-07T14:04:56.260776Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:04:56.261603Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:56.261652Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:757:2751], now have 1 active actors on pipe 2025-04-07T14:04:56.261771Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:04:56.262535Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [10:760:2754] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to ACTORLIB_LONG_TIMER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.674961Z node 11 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:56.675038Z node 11 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.701208Z node 11 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:56.701268Z node 11 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.703986Z node 11 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:56.704846Z node 11 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 15 actor [11:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 15 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 15 ReadRuleGenerations: 15 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 15 Important: false } Consumers { Name: "aaa" Generation: 15 Important: true } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:56.705405Z node 11 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [11:246:2246] 2025-04-07T14:04:56.706130Z node 11 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 3 [11:246:2246] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:56.707094Z node 11 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [11:244:2244] 2025-04-07T14:04:56.707603Z node 11 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [11:244:2244] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:56.732149Z node 11 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:56.732198Z node 11 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:56.732886Z node 11 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [11:325:2308] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:56.733923Z node 11 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [11:327:2310] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:56.737093Z node 11 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:04:56.737142Z node 11 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 4 [11:325:2308] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:04:56.737578Z node 11 :PERSQUEUE INFO: [topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:04:56.737607Z node 11 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 4 [11:327:2310] >> TPartitionTests::TestNonConflictingActsBatchOk [GOOD] >> TPQTest::TestCheckACL [GOOD] >> TPQTest::TestAlreadyWrittenWithoutDeduplication >> TPartitionTests::TestBatchingWithChangeConfig |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fqrun/fqrun |91.4%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun >> PQCountersLabeled::PartitionFirstClass [GOOD] >> PQCountersLabeled::ImportantFlagSwitching >> KqpService::SwitchCache-UseCache [GOOD] >> TPartitionTests::TestTxBatchInFederation [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] >> TPQTest::TestWaitInOwners [GOOD] >> TPQTest::TestWritePQCompact |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::SwitchCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 18814, MsgBus: 15322 2025-04-07T14:04:08.040972Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571777881945792:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:08.041280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001063/r3tmp/tmpNICL0w/pdisk_1.dat 2025-04-07T14:04:08.640189Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:08.670523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:08.675913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:08.677019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18814, node 1 2025-04-07T14:04:08.847433Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:08.847481Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:08.847491Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:08.847580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15322 TClient is connected to server localhost:15322 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:09.563460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.585311Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:04:09.590061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:04:09.742622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:04:09.888204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:09.984787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:11.666188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571790766849257:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:11.666304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:11.931510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.007618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.066181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.109050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.191639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.287555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.356494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571795061817059:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.356584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.357384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571795061817064:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.361439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:12.384293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571795061817066:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:04:13.030306Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571777881945792:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:13.030359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:13.756454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:23.599151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:04:23.599185Z node 1 :IMPORT WARN: Table profiles were not loaded took: 22.890252s took: 22.870263s took: 22.894879s took: 22.896674s took: 22.893690s took: 22.730898s took: 22.895675s took: 22.880362s took: 22.895677s took: 22.898718s Trying to start YDB, gRPC: 63928, MsgBus: 23555 2025-04-07T14:04:37.631896Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571901888832081:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:37.631988Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001063/r3tmp/tmpvlzZ2F/pdisk_1.dat 2025-04-07T14:04:37.747345Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:37.763884Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:37.763979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:37.765766Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63928, node 2 2025-04-07T14:04:37.804653Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:37.804697Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:37.804705Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:37.804826Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23555 TClient is connected to server localhost:23555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:38.223943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:38.242380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:38.308385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:38.471522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:38.559205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:40.778303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571914773735658:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:40.778408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:40.817838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:40.840752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:40.873305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:40.900069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:40.921690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:04:40.987831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:04:41.023108Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571919068703448:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.023181Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.023247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571919068703453:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:41.026536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:04:41.035665Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571919068703455:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:04:42.096637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:04:42.632294Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571901888832081:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:42.632378Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:52.745903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:04:52.745929Z node 2 :IMPORT WARN: Table profiles were not loaded took: 16.831940s took: 16.833333s took: 16.833335s took: 16.834151s took: 16.834455s took: 16.835932s took: 16.837619s took: 16.838520s took: 16.839297s took: 16.862699s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] Test command err: 2025-04-07T14:04:51.804759Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.804847Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.830750Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.832499Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:51.000000Z 2025-04-07T14:04:51.832579Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:175:2190] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\270\357\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\270\357\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\270\357\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-07T14:04:52.226827Z node 1 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic Root/PQ/rt3.dc1--account--topic partition 0 client client EndOffset 10 offset 13 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\270\357\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\n\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\n\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-07T14:04:52.718811Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:52.718902Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:52.733828Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:52.735452Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:52.000000Z 2025-04-07T14:04:52.735517Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.318378Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:53.318450Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.332935Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:175:2190] 2025-04-07T14:04:53.333912Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.334634Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-04-07T14:04:53.334739Z node 3 :PERSQUEUE INFO: new Cookie owner1|172b2123-100bd4bd-86ef5ab-2a6f38d2_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.634846Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2025-04-07T14:04:53.938737Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:53.938804Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.952345Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [4:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.955362Z node 4 :PERSQUEUE INFO: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:53.000000Z 2025-04-07T14:04:53.955407Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [4:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:54.254716Z node 4 :PERSQUEUE INFO: new Cookie owner1|c97bf554-fc02de60-3af03ef2-4b1017d9_0 generated for partition {0, {0, 1111}, 123} topic 'rt3.dc1--account--topic' owner owner1 Send write: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 1 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wake ... titionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 8 Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Send write: 9 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Got write info response. Body keys: 1, head: 10, src id info: 1 2025-04-07T14:04:57.252946Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:57.252993Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:57.262008Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [5:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:57.262924Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:57.000000Z 2025-04-07T14:04:57.262969Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [5:175:2190] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:57.550555Z node 5 :PERSQUEUE INFO: new Cookie owner1|918f7384-2f9956a8-aab7b1-10c59c22_0 generated for partition {0, {0, 1111}, 123} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send write: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 2 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 3 Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 4 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 5 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 6 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 7 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 8 Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send write: 9 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Got write info response. Body keys: 1, head: 10, src id info: 1 >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestTxBatchInFederation [GOOD] Test command err: 2025-04-07T14:04:51.812241Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.812902Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:04:51.831465Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.832852Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:51.000000Z 2025-04-07T14:04:51.832915Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\320\017\030\000(\270\357\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Create distr tx with id = 0 and act no: 1 Got batch complete: 1001 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\320\017\030\000(\270\357\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000000" Value: "\010\001\020\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\350\007\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\350\003\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\320\017\030\000(\270\357\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\352\007\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\352\003\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-07T14:04:53.465901Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:53.465966Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.476696Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:177:2192] 2025-04-07T14:04:53.477286Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.477766Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-04-07T14:04:53.477843Z node 2 :PERSQUEUE INFO: new Cookie owner1|e82b77b8-61b4378d-ab91dfc4-2bfc5ca9_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T14:04:54.075535Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:54.075593Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:54.087215Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:04:54.087493Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:04:54.087710Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:175:2190] 2025-04-07T14:04:54.088521Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Initializing completed. 2025-04-07T14:04:54.088576Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:175:2190] 2025-04-07T14:04:54.088627Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:04:54.089125Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:54.089280Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-04-07T14:04:54.089359Z node 3 :PERSQUEUE INFO: new Cookie owner1|3dab7b2c-5620cd4e-5c74bd75-445d81c2_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 2025-04-07T14:04:54.089687Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:04:54.089778Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2025-04-07T14:04:54.089971Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 1 2025-04-07T14:04:54.090045Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2025-04-07T14:04:54.090147Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2025-04-07T14:04:54.090958Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-04-07T14:04:54.091438Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000| size 104 WTime 127 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T14:04:54.122518Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:04:54.122596Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-04-07T14:04:54.122651Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk 2025-04-07T14:04:54.122768Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 100 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to ... d TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_DEFRAG_SCHEDULER Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create immediate tx with id = 3 and act no: 4 Create immediate tx with id = 6 and act no: 7 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Create distr tx with id = 8 and act no: 9 Create distr tx with id = 10 and act no: 11 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 17 Wait batch completion Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Wait kv request Wait tx committed for tx 0 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Wait immediate tx complete 3 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 6 Wait tx committed for tx 10 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] Test command err: 2025-04-07T14:04:51.807235Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.807316Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.831068Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.832759Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:51.000000Z 2025-04-07T14:04:51.832863Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:175:2190] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\270\357\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\270\357\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-07T14:04:52.660397Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:52.660474Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:52.672537Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:52.673918Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:52.000000Z 2025-04-07T14:04:52.673971Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\367\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\367\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\367\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\003\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\003\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\367\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\001\020\001\030\001\"\tsession-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\006\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\006\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } 2025-04-07T14:04:53.460704Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:53.460755Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:04:53.863226Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:53.863298Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:04:53.882407Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.884170Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:53.000000Z 2025-04-07T14:04:53.884232Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create immediate tx with id = 4 and act no: 5 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvent ... equest Wait tx committed for tx 2 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Wait immediate tx complete 4 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 2025-04-07T14:04:57.017854Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:57.017902Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:04:57.027471Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:57.029312Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:57.000000Z 2025-04-07T14:04:57.029367Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000(\250\236\306\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient-0" Value: "\010\000\020\001\030\001\"\020session-client-0(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient-0" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-client-0" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Created Tx with id 3 as act# 3 Created Tx with id 4 as act# 4 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 2 Wait batch completion Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Wait kv request Got batch complete: 1 Wait batch completion Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Got batch complete: 1 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Wait batch completion Wait batch completion Got batch complete: 1 Wait kv request Wait tx committed for tx 3 Wait tx committed for tx 4 Create distr tx with id = 6 and act no: 7 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 3 Wait batch completion Wait kv request Wait immediate tx complete 8 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 8 Wait immediate tx complete 9 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 9 >> TPQTest::TestReserveBytes [GOOD] >> TPQTest::TestSourceIdDropByUserWrites >> TPartitionTests::TestBatchingWithChangeConfig [GOOD] >> TPartitionTests::TestBatchingWithProposeConfig >> TPartitionTests::ConflictingCommitFails [GOOD] >> TPartitionTests::ConflictingCommitProccesAfterRollback >> KqpErrors::ProposeResultLost_RwTx [GOOD] >> KqpErrors::ProposeError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx [GOOD] Test command err: 2025-04-07T14:04:55.696187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T14:04:55.854712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:04:55.855236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:04:55.855431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:04:55.856196Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:04:55.856275Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f92/r3tmp/tmp0or7fX/pdisk_1.dat 2025-04-07T14:04:56.946451Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:57.355542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:04:57.516402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:57.516551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:57.519542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:57.519612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:57.533214Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:04:57.533715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:57.534068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:57.985135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:04:59.093697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1516:2922], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:59.093873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:59.093991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1526:2927], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:59.107832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:04:59.708239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1530:2930], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:05:02.093539Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2025-04-07T14:05:02.093608Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2025-04-07T14:05:02.101964Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-07T14:05:02.102046Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-04-07T14:05:02.102145Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-07T14:05:02.286054Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2025-04-07T14:05:02.351602Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr8935tb37sa7xfcz1szjej6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI4MmJhYzEtZDBiMzY2ZWEtZTRmNjhjNmUtNGVhNGYyOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution. Operation timeout: 299.438767s, cancelAfter: (empty maybe) 2025-04-07T14:05:02.351700Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr8935tb37sa7xfcz1szjej6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI4MmJhYzEtZDBiMzY2ZWEtZTRmNjhjNmUtNGVhNGYyOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution, txs: 1 2025-04-07T14:05:02.351755Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-07T14:05:02.351814Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr8935tb37sa7xfcz1szjej6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI4MmJhYzEtZDBiMzY2ZWEtZTRmNjhjNmUtNGVhNGYyOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-04-07T14:05:02.351853Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-07T14:05:02.352298Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr8935tb37sa7xfcz1szjej6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI4MmJhYzEtZDBiMzY2ZWEtZTRmNjhjNmUtNGVhNGYyOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Execution is complete, results: 1 2025-04-07T14:05:02.360938Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1707:2920] TxId: 0. Ctx: { TraceId: 01jr8935tb37sa7xfcz1szjej6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI4MmJhYzEtZDBiMzY2ZWEtZTRmNjhjNmUtNGVhNGYyOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2025-04-07T14:05:02.361094Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1707:2920] TxId: 281474976715660. Ctx: { TraceId: 01jr8935tb37sa7xfcz1szjej6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI4MmJhYzEtZDBiMzY2ZWEtZTRmNjhjNmUtNGVhNGYyOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-04-07T14:05:02.369825Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1707:2920] TxId: 281474976715660. Ctx: { TraceId: 01jr8935tb37sa7xfcz1szjej6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI4MmJhYzEtZDBiMzY2ZWEtZTRmNjhjNmUtNGVhNGYyOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2025-04-07T14:05:02.370003Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-07T14:05:02.370193Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2025-04-07T14:05:02.370312Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-07T14:05:02.370471Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1707:2920] TxId: 281474976715660. Ctx: { TraceId: 01jr8935tb37sa7xfcz1szjej6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI4MmJhYzEtZDBiMzY2ZWEtZTRmNjhjNmUtNGVhNGYyOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2025-04-07T14:05:02.413159Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1707:2920] TxId: 281474976715660. Ctx: { TraceId: 01jr8935tb37sa7xfcz1szjej6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI4MmJhYzEtZDBiMzY2ZWEtZTRmNjhjNmUtNGVhNGYyOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] will be executed on 1 shards. 2025-04-07T14:05:02.413289Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1707:2920] TxId: 281474976715660. Ctx: { TraceId: 01jr8935tb37sa7xfcz1szjej6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI4MmJhYzEtZDBiMzY2ZWEtZTRmNjhjNmUtNGVhNGYyOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2025-04-07T14:05:02.439896Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr8935tb37sa7xfcz1szjej6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI4MmJhYzEtZDBiMzY2ZWEtZTRmNjhjNmUtNGVhNGYyOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:02.439971Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jr8935tb37sa7xfcz1szjej6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI4MmJhYzEtZDBiMzY2ZWEtZTRmNjhjNmUtNGVhNGYyOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {0, 0} 2025-04-07T14:05:0 ... 5661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037889 not finished yet: Preparing 2025-04-07T14:05:02.609849Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037890 not finished yet: Preparing 2025-04-07T14:05:02.609863Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037891 not finished yet: Preparing 2025-04-07T14:05:02.609897Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 4 datashard(s): DS 72075186224037888 (Preparing), DS 72075186224037889 (Preparing), DS 72075186224037890 (Preparing), DS 72075186224037891 (Preparing), 2025-04-07T14:05:02.609935Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, not immediate tx, become PrepareState 2025-04-07T14:05:02.613671Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: PREPARED, error: 2025-04-07T14:05:02.613729Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Not all shards are prepared, waiting... 2025-04-07T14:05:02.615108Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shard 72075186224037889 propose error, notDelivered: 0, notPrepared: 0, wasRestart: 0 2025-04-07T14:05:02.615164Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037888 2025-04-07T14:05:02.615210Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037889 2025-04-07T14:05:02.615228Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037890 2025-04-07T14:05:02.615245Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037891 2025-04-07T14:05:02.621193Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-04-07T14:05:02.621255Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 2, does not have the CA id yet or is already complete 2025-04-07T14:05:02.621276Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 3, does not have the CA id yet or is already complete 2025-04-07T14:05:02.621298Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 4, does not have the CA id yet or is already complete 2025-04-07T14:05:02.625121Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ReplyErrorAndDie. Response: Status: UNDETERMINED Issues { message: "State of operation is unknown." issue_code: 2026 severity: 1 issues { message: "Tx state unknown for shard 72075186224037889, txid 281474976715661" issue_code: 200506 severity: 1 } } Result { Stats { CpuTimeUs: 2484 } } , to ActorId: [1:1716:3042] 2025-04-07T14:05:02.625288Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2025-04-07T14:05:02.625359Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:05:02.625394Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1731:3042] TxId: 281474976715661. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-07T14:05:02.625695Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, ActorId: [1:1716:3042], ActorState: ExecuteState, TraceId: 01jr893969bg8qhh1d110b70b8, Create QueryResponse for error on request, msg: 2025-04-07T14:05:02.626653Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1739:3042] TxId: 0. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2025-04-07T14:05:02.626793Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1739:3042] TxId: 281474976715662. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2025-04-07T14:05:02.627157Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715662. Resolved key sets: 0 2025-04-07T14:05:02.627219Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:02.627241Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715662. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2025-04-07T14:05:02.627270Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1739:3042] TxId: 281474976715662. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-07T14:05:02.627292Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1739:3042] TxId: 281474976715662. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-04-07T14:05:02.627360Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1739:3042] TxId: 281474976715662. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:05:02.627391Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1739:3042] TxId: 281474976715662. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-07T14:05:02.627424Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1739:3042] TxId: 281474976715662. Ctx: { TraceId: 01jr893969bg8qhh1d110b70b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYmYwMzktMmMxYjUxOC00ZTM5ZTIwNy1iOTUxZDU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] >> TPartitionTests::FailedTxsDontBlock >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] >> TPQTest::TestPQPartialRead [GOOD] >> TPQTest::TestPQRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeError [GOOD] Test command err: 2025-04-07T14:04:55.702897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T14:04:55.854563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:04:55.854942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:04:55.855095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:04:55.855619Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:04:55.855670Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f95/r3tmp/tmpSCeTWE/pdisk_1.dat 2025-04-07T14:04:56.946475Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:57.361051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:04:57.515957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:57.516083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:57.518825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:57.518878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:57.532264Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:04:57.532719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:57.532998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:57.985240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:04:59.093619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1516:2922], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:59.093795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:59.093909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1526:2927], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:59.107851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:04:59.707533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1530:2930], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:05:02.093536Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2025-04-07T14:05:02.093606Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2025-04-07T14:05:02.101965Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-07T14:05:02.102040Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-04-07T14:05:02.102103Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-07T14:05:02.286354Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2025-04-07T14:05:02.351633Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr8935tb699j38pnsxedveps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhmOGVmMTMtZTRmZWJjMWYtOTUzMzUxM2YtN2FhNjgzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution. Operation timeout: 299.430716s, cancelAfter: (empty maybe) 2025-04-07T14:05:02.351726Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr8935tb699j38pnsxedveps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhmOGVmMTMtZTRmZWJjMWYtOTUzMzUxM2YtN2FhNjgzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution, txs: 1 2025-04-07T14:05:02.351800Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-07T14:05:02.351857Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr8935tb699j38pnsxedveps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhmOGVmMTMtZTRmZWJjMWYtOTUzMzUxM2YtN2FhNjgzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-04-07T14:05:02.351911Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-07T14:05:02.352437Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr8935tb699j38pnsxedveps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhmOGVmMTMtZTRmZWJjMWYtOTUzMzUxM2YtN2FhNjgzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Execution is complete, results: 1 2025-04-07T14:05:02.360967Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1707:2920] TxId: 0. Ctx: { TraceId: 01jr8935tb699j38pnsxedveps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhmOGVmMTMtZTRmZWJjMWYtOTUzMzUxM2YtN2FhNjgzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2025-04-07T14:05:02.361170Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1707:2920] TxId: 281474976715660. Ctx: { TraceId: 01jr8935tb699j38pnsxedveps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhmOGVmMTMtZTRmZWJjMWYtOTUzMzUxM2YtN2FhNjgzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-04-07T14:05:02.369850Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1707:2920] TxId: 281474976715660. Ctx: { TraceId: 01jr8935tb699j38pnsxedveps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhmOGVmMTMtZTRmZWJjMWYtOTUzMzUxM2YtN2FhNjgzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2025-04-07T14:05:02.370068Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-07T14:05:02.370371Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2025-04-07T14:05:02.370517Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-07T14:05:02.370660Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1707:2920] TxId: 281474976715660. Ctx: { TraceId: 01jr8935tb699j38pnsxedveps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhmOGVmMTMtZTRmZWJjMWYtOTUzMzUxM2YtN2FhNjgzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2025-04-07T14:05:02.413197Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1707:2920] TxId: 281474976715660. Ctx: { TraceId: 01jr8935tb699j38pnsxedveps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhmOGVmMTMtZTRmZWJjMWYtOTUzMzUxM2YtN2FhNjgzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] will be executed on 1 shards. 2025-04-07T14:05:02.413323Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1707:2920] TxId: 281474976715660. Ctx: { TraceId: 01jr8935tb699j38pnsxedveps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhmOGVmMTMtZTRmZWJjMWYtOTUzMzUxM2YtN2FhNjgzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2025-04-07T14:05:02.439881Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr8935tb699j38pnsxedveps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhmOGVmMTMtZTRmZWJjMWYtOTUzMzUxM2YtN2FhNjgzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:02.439964Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jr8935tb699j38pnsxedveps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhmOGVmMTMtZTRmZWJjMWYtOTUzMzUxM2YtN2FhNjgzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {0, 0} 2025-04-07T14:05:0 ... OGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:03.301685Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {0, 0} 2025-04-07T14:05:03.302453Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1952:3191] TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. datashard task: 1, proto: Id: 1 Executer { ActorId { RawX1: 1952 RawX2: 4294970487 } } Program { RuntimeVersion: 100000 Raw: "\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/" Settings { LevelDataPrediction: 1 InputDataPrediction: 1 OutputDataPrediction: 1 NodesCount: 52 } } Parameters { key: "%kqp%tx_result_binding_0_0" value { TransportVersion: 20000 Raw: "\t\000\002\n\n" Rows: 1 } } Outputs { Effects { } } Meta { [type.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta] { Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\005\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } } } UseLlvm: false RequestContext { key: "CurrentExecutionId" value: "" } RequestContext { key: "CustomerSuppliedId" value: "" } RequestContext { key: "Database" value: "" } RequestContext { key: "DatabaseId" value: "/Root" } RequestContext { key: "PoolId" value: "default" } RequestContext { key: "SessionId" value: "ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=" } RequestContext { key: "TraceId" value: "01jr8939wva05w49qdbhwy8es4" } EnableSpilling: false 2025-04-07T14:05:03.302582Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1952:3191] TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [1], lockTxId: (empty maybe), locks: , immediate: 1 2025-04-07T14:05:03.302640Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1952:3191] TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2025-04-07T14:05:03.302717Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1952:3191] TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-07T14:05:03.302753Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1952:3191] TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-04-07T14:05:03.302792Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1952:3191] TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2025-04-07T14:05:03.302834Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1952:3191] TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-04-07T14:05:03.302870Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1952:3191] TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-07T14:05:03.316619Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1952:3191] TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: RESPONSE_DATA, error: 2025-04-07T14:05:03.316750Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1952:3191] TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-04-07T14:05:03.316896Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1952:3191] TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ReplyErrorAndDie. Response: Status: GENERIC_ERROR Issues { message: "Error executing transaction: transaction failed." severity: 1 } Result { Stats { CpuTimeUs: 193 } } , to ActorId: [1:1942:3191] 2025-04-07T14:05:03.317002Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1952:3191] TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2025-04-07T14:05:03.317097Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1952:3191] TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:05:03.317130Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1952:3191] TxId: 281474976715683. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-07T14:05:03.317278Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, ActorId: [1:1942:3191], ActorState: ExecuteState, TraceId: 01jr8939wva05w49qdbhwy8es4, Create QueryResponse for error on request, msg: 2025-04-07T14:05:03.317600Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1957:3191] TxId: 0. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2025-04-07T14:05:03.317654Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1957:3191] TxId: 281474976715684. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2025-04-07T14:05:03.317791Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715684. Resolved key sets: 0 2025-04-07T14:05:03.317870Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:03.317895Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715684. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2025-04-07T14:05:03.317936Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1957:3191] TxId: 281474976715684. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-07T14:05:03.317956Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1957:3191] TxId: 281474976715684. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-04-07T14:05:03.318011Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1957:3191] TxId: 281474976715684. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:05:03.318041Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1957:3191] TxId: 281474976715684. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-07T14:05:03.318087Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1957:3191] TxId: 281474976715684. Ctx: { TraceId: 01jr8939wva05w49qdbhwy8es4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NTE0YWQtNzVhZmE5ZTYtOGU4MmI0NjgtZmQzM2UxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] Test command err: 2025-04-07T14:04:51.821658Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.821733Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.846793Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.847974Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:51.000000Z 2025-04-07T14:04:51.848024Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:175:2190] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\270\357\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\270\357\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\270\357\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\004\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\004\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-07T14:04:52.691320Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:52.691376Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:52.702980Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:52.705330Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:52.000000Z 2025-04-07T14:04:52.705396Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Send change config Wait cmd write (initial) Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\367\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } Wait commit 1 done Wait cmd write (change config) Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-2" IncludeFrom: true To: "m0000000003cclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-2" IncludeFrom: true To: "m0000000003uclient-2" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\367\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-3" Value: "\010\000\020\000\030\000\"\000(\0000\007" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-3" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-1@\000H\000\252\002\016\n\010client-3@\007H\000" StorageChannel: INLINE } Wait config changed Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T14:04:53.467907Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:53.467966Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:04:53.477568Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.478565Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:53.000000Z 2025-04-07T14:04:53.478603Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.771000Z node 3 :PERSQUEUE INFO: new Cookie src1|1eba5852-eec4f1ad-c4157d29-743bcf4a_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 Got batch complete: 1 2025-04-07T14:04:53.771288Z node 3 :PERSQUEUE INFO: new Cookie src4|ec9469a8-c7dab391-34dcca4a-785bc1ad_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src4 Got batch complete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id ... ase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 4 and act no: 5 Created Tx with id 7 as act# 7 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 2 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Wait batch completion Wait kv request Got batch complete: 1 Wait batch completion Wait kv request Create distr tx with id = 8 and act no: 9 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 3 Wait kv request Wait immediate tx complete 10 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 10 Wait immediate tx complete 11 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 11 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2025-04-07T14:05:03.008872Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:03.008917Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:05:03.017297Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:03.018783Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:05:03.000000Z 2025-04-07T14:05:03.018823Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Created Tx with id 0 as act# 0 Created Tx with id 1 as act# 1 Got batch complete: 1 Wait batch completion Got batch complete: 1 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Wait batch completion Wait kv request Wait tx committed for tx 1 Wait for no tx committed Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR |91.5%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQRBDescribes::PartitionLocations [GOOD] >> TPQTabletTests::All_New_Partitions_In_Another_Tablet >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] >> IncrementalBackup::SimpleBackup >> IncrementalBackup::SimpleRestore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] Test command err: 2025-04-07T14:04:51.793150Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.793246Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.830790Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.833157Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:51.000000Z 2025-04-07T14:04:51.833253Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:175:2190] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-1" IncludeFrom: true To: "m0000000003cclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-1" IncludeFrom: true To: "m0000000003uclient-1" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\270\357\305\204\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\000\030\000\"\000(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-2@\000H\000" StorageChannel: INLINE } 2025-04-07T14:04:52.517563Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:52.517611Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:52.529371Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:52.532797Z node 2 :PERSQUEUE INFO: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:52.000000Z 2025-04-07T14:04:52.532865Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.331476Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:53.331543Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:04:53.345162Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.346816Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:53.000000Z 2025-04-07T14:04:53.346881Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.646850Z node 3 :PERSQUEUE INFO: new Cookie src3|a8e7eb4d-3b5934b-1b8ed167-f4bbfa3_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src3 Got batch complete: 1 2025-04-07T14:04:53.647114Z node 3 :PERSQUEUE INFO: new Cookie src4|521577a3-a0e76509-686eb4da-ee491990_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src4 Got batch complete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TS ... ents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Wait immediate tx complete 2 Got batch complete: 1 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 Wait batch completion Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Got batch complete: 1 Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 3 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 2025-04-07T14:05:02.883723Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:02.883786Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:05:02.897388Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:02.899558Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:05:02.000000Z 2025-04-07T14:05:02.899619Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait batch completion Got batch complete: 2 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Wait immediate tx complete 2 Got batch complete: 1 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 Wait batch completion Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Got batch complete: 1 Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 4 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true >> TSchemeShardTTLTests::BuildIndexShouldSucceed >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] >> TSchemeShardTTLUtility::ValidateTiers [GOOD] >> TSchemeShardTTLTests::ConditionalErase >> TSchemeShardColumnTableTTL::AlterColumnTable >> TSchemeShardColumnTableTTL::CreateColumnTable >> TSchemeShardTTLTests::ShouldSkipDroppedColumn >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable >> TSchemeShardTTLTestsWithReboots::CreateTable >> TSchemeShardTTLTests::AlterTableShouldSuccess |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TPQTest::TestPartitionTotalQuota [GOOD] >> TPQTest::TestPartitionPerConsumerQuota >> TFetchRequestTests::HappyWay [GOOD] >> TFetchRequestTests::BadTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] Test command err: Bucket: 100 elems count: 97 Bucket: 200 elems count: 104 Bucket: 500 elems count: 288 Bucket: 1000 elems count: 528 Bucket: 2000 elems count: 1008 Bucket: 5000 elems count: 2976 2025-04-07T14:04:51.548295Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571965092892651:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:51.548336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:51.643948Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571963825792508:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:51.644101Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b27/r3tmp/tmpAicWPj/pdisk_1.dat 2025-04-07T14:04:51.773923Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:04:51.777195Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:04:52.006561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:52.006651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:52.008618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:52.008757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:52.011465Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:04:52.012595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:52.013871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:52.087015Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6364, node 1 2025-04-07T14:04:52.101655Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:52.101714Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:52.323327Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001b27/r3tmp/yandexgP3Zfz.tmp 2025-04-07T14:04:52.323357Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001b27/r3tmp/yandexgP3Zfz.tmp 2025-04-07T14:04:52.323486Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001b27/r3tmp/yandexgP3Zfz.tmp 2025-04-07T14:04:52.323568Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:04:53.754698Z INFO: TTestServer started on Port 17187 GrpcPort 6364 TClient is connected to server localhost:17187 PQClient connected to localhost:6364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:54.073359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:04:54.168280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:54.433242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571976710694703:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.433252Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571976710694711:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.433342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.437990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:04:54.477076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571976710694717:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T14:04:55.797502Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571977977795595:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:04:55.844560Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490571976710694760:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:04:55.844917Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmI1ODU2YzAtMWQ2OTkxNi1mNTUzYzNlMS04MGMyMDUyMg==, ActorId: [2:7490571976710694701:2280], ActorState: ExecuteState, TraceId: 01jr89318z7698qt2dhvz6rek3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:04:55.847807Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:04:55.878040Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmE2OGY3ZDctN2U4ZWViOTMtZTE2NDIxNTEtOTIwNDE1NTE=, ActorId: [1:7490571977977795547:2307], ActorState: ExecuteState, TraceId: 01jr8931gg9pgvmrcevrhtb82p, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:04:55.878533Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:04:56.207929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:56.283664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:56.408453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:04:56.548278Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571965092892651:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:56.548330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:56.644007Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571963825792508:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:56.644054Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:57.905891Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr89338tbc8k0ze0v0fwecjn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODM3NzExNmMtNGIzM2YwYTEtYThiYmE0ODUtMzlkODJlZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490571995157665217:3091] 2025-04-07T14:04:58.075305Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item ... 0 rrg 0 2025-04-07T14:05:06.432762Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-3 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:06.432897Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:05:06.433156Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:06.435093Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T14:05:06.435705Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:05:06.435755Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [3:336:2313], now have 1 active actors on pipe 2025-04-07T14:05:06.435825Z node 3 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 2025-04-07T14:05:06.435918Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-04-07T14:05:06.435951Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2025-04-07T14:05:06.435980Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-07T14:05:06.569464Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 100, TxId 67890 2025-04-07T14:05:06.569668Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvProposePartitionConfigResult Step 100, TxId 67890, Partition 1 2025-04-07T14:05:06.569714Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvProposePartitionConfigResult 2025-04-07T14:05:06.569782Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 2/2 2025-04-07T14:05:06.569834Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-04-07T14:05:06.569877Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 2, Expected 2 2025-04-07T14:05:06.569941Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-04-07T14:05:06.570303Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 18446744073709551615 PredicatesReceived { TabletId: 22222 Predicate: true } Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ChildPartitionIds: 0 ChildPartitionIds: 1 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 173 RawX2: 12884904076 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2025-04-07T14:05:06.570456Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:06.574929Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T14:05:06.575007Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-04-07T14:05:06.575073Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-07T14:05:06.575156Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-07T14:05:06.575215Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-04-07T14:05:06.575337Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-04-07T14:05:06.575378Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 2 2025-04-07T14:05:06.575545Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-04-07T14:05:06.576025Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T14:05:06.576147Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-04-07T14:05:06.576252Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-1 reinit with generation 2 done 2025-04-07T14:05:06.576297Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2025-04-07T14:05:06.576336Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user drop done 2025-04-07T14:05:06.576842Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:06.581816Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:05:06.581989Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 1 2025-04-07T14:05:06.582045Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-07T14:05:06.582087Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 2 2025-04-07T14:05:06.583627Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:05:06.583765Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-04-07T14:05:06.583805Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-07T14:05:06.583831Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 2, Expected 2 2025-04-07T14:05:06.583885Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-07T14:05:06.584182Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ChildPartitionIds: 0 ChildPartitionIds: 1 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } 2025-04-07T14:05:06.584275Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:06.584374Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-04-07T14:05:06.584430Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-04-07T14:05:06.584796Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 18446744073709551615 PredicatesReceived { TabletId: 22222 Predicate: true } Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ChildPartitionIds: 0 ChildPartitionIds: 1 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 173 RawX2: 12884904076 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2025-04-07T14:05:06.585072Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:06.588196Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T14:05:06.588232Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-07T14:05:06.588259Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-07T14:05:06.588314Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-04-07T14:05:06.588352Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-07T14:05:06.588398Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/0 2025-04-07T14:05:06.588430Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-07T14:05:06.588475Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/0 2025-04-07T14:05:06.588508Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2025-04-07T14:05:06.588572Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState DELETING 2025-04-07T14:05:06.588609Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete key for TxId 67890 2025-04-07T14:05:06.588688Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:06.593667Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-07T14:05:06.593725Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State DELETING 2025-04-07T14:05:06.593756Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2025-04-07T14:05:06.593791Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete TxId 67890 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::ValidateTiers [GOOD] >> TSchemeShardTTLTests::CheckCounters >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TPartitionTests::FailedTxsDontBlock [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TPartitionTests::EndWriteTimestamp_DataKeysBody >> TPartitionTests::EndWriteTimestamp_DataKeysBody [GOOD] >> TPartitionTests::EndWriteTimestamp_FromMeta >> PQCountersLabeled::ImportantFlagSwitching [GOOD] >> PQCountersSimple::Partition >> TPartitionTests::EndWriteTimestamp_FromMeta [GOOD] >> PQCountersSimple::Partition [GOOD] >> PQCountersSimple::PartitionFirstClass >> TPartitionTests::EndWriteTimestamp_HeadKeys >> PQCountersSimple::PartitionFirstClass [GOOD] >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::PartitionFirstClass [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-07T14:04:51.799039Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.799125Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.843848Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.863591Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-07T14:04:51.864498Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-04-07T14:04:51.866949Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:186:2199] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.868976Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2200] 2025-04-07T14:04:51.870656Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:187:2200] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR 2025-04-07T14:04:51.879424Z node 1 :PERSQUEUE INFO: new Cookie default|5e04bb35-6389cb0b-25a9f7bd-149fdc23_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.886157Z node 1 :PERSQUEUE INFO: new Cookie default|eb1b3379-54acd095-6fea5bae-d4510a13_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.892159Z node 1 :PERSQUEUE INFO: new Cookie default|6938e1e1-893a176c-45e2356c-2fc1ed3d_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Expected: { "sensors": [ { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesQuota" }, "value": 1000000000 }, { "kind": "RATE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadOffsetRewindSum" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadTimeLagMs" }, "value": 0 }, { "kind": "GAUGE", ... TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user/total/total ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic 2025-04-07T14:05:09.884630Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:09.884720Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:09.898480Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:09.899199Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 9 actor [4:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 9 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 9 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 9 Important: false } 2025-04-07T14:05:09.899738Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:184:2197] 2025-04-07T14:05:09.901533Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:05:09.902761Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:185:2198] 2025-04-07T14:05:09.903848Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [4:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:09.909687Z node 4 :PERSQUEUE INFO: new Cookie default|bde5af52-b348d47-3b46e555-b36a6724_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:09.914169Z node 4 :PERSQUEUE INFO: new Cookie default|a48ecc30-35442264-b46ae66f-85fe7b65_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:09.919099Z node 4 :PERSQUEUE INFO: new Cookie default|84d4bfae-124f01df-a9312acb-9bddbb3b_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:09.922978Z node 4 :PERSQUEUE INFO: new Cookie default|3cd40258-5bc49bc9-8bbc1745-358ce869_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-07T14:05:09.924056Z node 4 :PERSQUEUE INFO: new Cookie default|97c07399-2a1647c2-7e62d433-20f63440_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:05:10.253643Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:10.253698Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:10.265053Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:10.265800Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 10 actor [5:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 10 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 10 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 10 Important: false } 2025-04-07T14:05:10.266377Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:184:2197] 2025-04-07T14:05:10.266966Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [5:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:10.267589Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [5:185:2198] 2025-04-07T14:05:10.268019Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [5:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:10.272542Z node 5 :PERSQUEUE INFO: new Cookie default|2898ba45-ff084327-e05f7d46-b31adf87_0 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:10.276821Z node 5 :PERSQUEUE INFO: new Cookie default|87b33341-5c2720c6-b19c5415-ee350bd2_1 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:10.281800Z node 5 :PERSQUEUE INFO: new Cookie default|d1c56caa-845e8d79-5d74c517-c710209a_2 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:10.285698Z node 5 :PERSQUEUE INFO: new Cookie default|f33823e6-ff084d89-c102b4c6-48d2ca62_3 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] Test command err: 2025-04-07T14:04:51.817601Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.817677Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:04:51.851083Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.852352Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:04:51.000000Z 2025-04-07T14:04:51.852418Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:52.160154Z node 1 :PERSQUEUE INFO: new Cookie src1|492956e-c5f64733-2f7c61ce-31e0ebab_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 Got batch complete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create immediate tx with id = 3 and act no: 4 Create immediate tx with id = 6 and act no: 7 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait batch completion Got batch complete: 5 Send disk status response with cookie: 0 Wait immediate tx complete 3 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 6 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 10 and act no: 11 Create distr tx with id = 12 and act no: 13 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured ... Events::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait batch completion Got batch complete: 6 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Send disk status response with cookie: 0 Got batch complete: 2 Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 5 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 5 Got batch complete: 10 Send disk status response with cookie: 0 Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 10 and act no: 11 Create distr tx with id = 12 and act no: 13 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DEFRAG_SCHEDULER Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait batch completion Got batch complete: 3 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Send disk status response with cookie: 0 Wait immediate tx complete 10 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 10 Errors { Kind: BAD_REQUEST Reason: "MinSeqNo violation failure on src2" } Wait tx committed for tx 12 2025-04-07T14:05:09.113442Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:09.113503Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:09.125436Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [3:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:09.126855Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:05:09.000000Z 2025-04-07T14:05:09.126908Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [3:175:2190] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:09.696747Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:09.696810Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:09.710898Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [4:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:09.712310Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:05:09.712357Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [4:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:10.247876Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:10.247942Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:10.259875Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [5:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase >>>> ADD BLOB 0 writeTimestamp=2025-04-07T14:05:10.251106Z >>>> ADD BLOB 1 writeTimestamp=2025-04-07T14:05:10.251124Z >>>> ADD BLOB 2 writeTimestamp=2025-04-07T14:05:10.251137Z >>>> ADD BLOB 3 writeTimestamp=2025-04-07T14:05:10.251147Z >>>> ADD BLOB 4 writeTimestamp=2025-04-07T14:05:10.251156Z >>>> ADD BLOB 5 writeTimestamp=2025-04-07T14:05:10.251165Z >>>> ADD BLOB 6 writeTimestamp=2025-04-07T14:05:10.251172Z >>>> ADD BLOB 7 writeTimestamp=2025-04-07T14:05:10.251180Z >>>> ADD BLOB 8 writeTimestamp=2025-04-07T14:05:10.251187Z >>>> ADD BLOB 9 writeTimestamp=2025-04-07T14:05:10.251196Z 2025-04-07T14:05:10.261930Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-07T14:05:10.000000Z 2025-04-07T14:05:10.261978Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [5:175:2190] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.079301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.079381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.079419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.079461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.092928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.207215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.207267Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.216410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.216534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.216692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.229111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.229254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.354028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.360002Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.493248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.493409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.493532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.493677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.493719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.518992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523044Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.535808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.538713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547547Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.548012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.548086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.549879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.550117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550161Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.550239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.550271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.550344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.551859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.551937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.551964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.551999Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.552041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.552125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T14:05:10.554218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T14:05:10.566993Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T14:05:10.646573Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T14:05:10.661868Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T14:05:10.664161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.664537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.664926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Incorrect ttl column - not found in scheme, at schemeshard: 72057594046678944 2025-04-07T14:05:10.665944Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:05:10.667282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Incorrect ttl column - not found in scheme" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.667412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Incorrect ttl column - not found in scheme, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-04-07T14:05:10.668302Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.079135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.079208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.079240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.079268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.092956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.207781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.207824Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.216695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.216761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.216838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.228298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.228438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.353901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.359257Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.465321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.465573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.472416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523273Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.535665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.537238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.547836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.549770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.550004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550039Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.550108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.550140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.550211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.556315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.556395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.556432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.556482Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.556519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.556579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... :4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:05:11.016391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2025-04-07T14:05:11.016770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:11.016849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:11.016896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-07T14:05:11.017113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-07T14:05:11.017203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:05:11.019154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:11.019185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:05:11.019377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:11.019413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-07T14:05:11.019608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.019635Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-07T14:05:11.019973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:05:11.020029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:05:11.020050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:05:11.020072Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-07T14:05:11.020099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:05:11.020146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T14:05:11.022171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:05:11.033904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 781 } } 2025-04-07T14:05:11.033960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-07T14:05:11.034060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 781 } } 2025-04-07T14:05:11.034109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 781 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:05:11.034662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:11.034701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-07T14:05:11.034773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:11.034849Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:05:11.034906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:11.034953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:11.034992Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.035027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:05:11.035057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T14:05:11.036641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.036857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.036944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.036968Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:05:11.037025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:05:11.037045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:05:11.037075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T14:05:11.037110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:378:2343] message: TxId: 102 2025-04-07T14:05:11.037135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:05:11.037170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:05:11.037188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:05:11.037262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:05:11.038393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:05:11.038424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:447:2411] TestWaitNotification: OK eventTxId 102 2025-04-07T14:05:11.038711Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:05:11.038864Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 151us result status StatusSuccess 2025-04-07T14:05:11.039154Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.079220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.079309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.079341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.079367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.093005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.204174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.204212Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.211877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.211991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.212114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.228469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.228614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.353942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.359349Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.487937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.488117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.488288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.518974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523050Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.534841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.536782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547184Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.547632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.549514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.549748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549785Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.549859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.549886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.549934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.549967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.551660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.551738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.551765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.551805Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.551876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.551949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 5000004 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:11.047392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-07T14:05:11.047547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 104 at step: 5000004 2025-04-07T14:05:11.048012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:11.048116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:11.048169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#104:0 HandleReply TEvOperationPlan, operationId: 104:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-04-07T14:05:11.048356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 129 2025-04-07T14:05:11.048465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-07T14:05:11.051060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:11.051107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:05:11.051314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:11.051359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-07T14:05:11.051609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.051660Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72057594046678944 2025-04-07T14:05:11.052112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:05:11.052206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:05:11.052238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:05:11.052269Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-07T14:05:11.052300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:05:11.052377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-07T14:05:11.054379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T14:05:11.076474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 887 } } 2025-04-07T14:05:11.076539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-07T14:05:11.076644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 887 } } 2025-04-07T14:05:11.076738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 887 } } FAKE_COORDINATOR: Erasing txId 104 2025-04-07T14:05:11.077509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-07T14:05:11.077550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-07T14:05:11.077645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-07T14:05:11.077690Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:05:11.077754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-07T14:05:11.077813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:11.077842Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.077871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:05:11.077914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-07T14:05:11.079757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.080159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.080275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.080311Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-07T14:05:11.080403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-07T14:05:11.080433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:05:11.080487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-07T14:05:11.080560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 104 2025-04-07T14:05:11.080614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:05:11.080646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T14:05:11.080688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T14:05:11.080776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:05:11.082224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:05:11.082270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:430:2404] TestWaitNotification: OK eventTxId 104 2025-04-07T14:05:11.082733Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:05:11.082936Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 227us result status StatusSuccess 2025-04-07T14:05:11.083273Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 TTLSettings { Disabled { } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.079564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.079650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.079687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.079721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.093043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.207378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.207433Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.216115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.216225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.216371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.228777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.228907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.353842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.353973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.358242Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.453221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.462328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.472309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523214Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.535821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.537601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547545Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.548044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.550059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.550290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550331Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.550413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.550445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.550523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.552387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.552569Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.552622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.552731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... Set { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.985883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:05:10.986040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-07T14:05:10.986522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.986635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.986694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-07T14:05:10.986925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-07T14:05:10.987038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-07T14:05:10.989344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.989393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:05:10.989539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.989597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:05:10.989863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.989905Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-07T14:05:10.990370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:05:10.990456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:05:10.990488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:05:10.990517Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-07T14:05:10.990557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:05:10.990608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T14:05:10.992311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:05:11.014047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 877 } } 2025-04-07T14:05:11.014093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T14:05:11.014292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 877 } } 2025-04-07T14:05:11.014377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 877 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:05:11.015064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:11.015099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T14:05:11.015186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:11.015227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:05:11.015294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:11.015340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:11.015380Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.015412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:05:11.015442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T14:05:11.016683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.017412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.017620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.017655Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:05:11.017725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:05:11.017748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:05:11.017789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T14:05:11.017840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 102 2025-04-07T14:05:11.017870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:05:11.017895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:05:11.017920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:05:11.018017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:05:11.019093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:05:11.019128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:382:2356] TestWaitNotification: OK eventTxId 102 2025-04-07T14:05:11.019468Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:05:11.019648Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 192us result status StatusSuccess 2025-04-07T14:05:11.019960Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.079217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.079322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.079357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.079389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.092974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.204150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.204202Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.211379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.211455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.211550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.229222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.229375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.354041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.360162Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.480488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.480691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.480841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521484Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.541497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.543149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547363Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.547822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.549800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.550030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550071Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.550149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.550178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.550259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.557745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.557859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.557891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.557958Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.558010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.558108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... EBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2025-04-07T14:05:10.941698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.941790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.941848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2025-04-07T14:05:10.941954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2025-04-07T14:05:10.942045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.942094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-04-07T14:05:10.946797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.946858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.946991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:05:10.947138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.947190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T14:05:10.947227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T14:05:10.947391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.947436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-04-07T14:05:10.948902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:05:10.948989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:05:10.949020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:05:10.949072Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T14:05:10.949107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.949777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:05:10.949862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:05:10.949892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:05:10.949931Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:05:10.949958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:05:10.950009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:05:10.950849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 9802 } } 2025-04-07T14:05:10.950886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-07T14:05:10.950981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 9802 } } 2025-04-07T14:05:10.951120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 9802 } } 2025-04-07T14:05:10.951894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.951940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-07T14:05:10.952062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.952107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:05:10.952188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.952242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.952276Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.952328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:05:10.952360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-07T14:05:10.954401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:05:10.955670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:05:10.955775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.955864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.956085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.956127Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:05:10.956226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:05:10.956260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:05:10.956295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-07T14:05:10.956365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 101 2025-04-07T14:05:10.956403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:05:10.956434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:05:10.956497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:05:10.956629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:05:10.958060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:05:10.958113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:337:2312] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-07T14:05:10.960954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" DropColumns { Name: "modified_at" } TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.961149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.970533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', at schemeshard: 72057594046678944 2025-04-07T14:05:10.972679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Cannot enable TTL on dropped column: \'modified_at\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.972840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.079185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.079259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.079305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.079340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.092952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.207119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.207162Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.215743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.215840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.215977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.228353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.228520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.353991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.359366Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.478192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.478410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.478568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521597Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523289Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.535586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.537166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.547889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.549753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.549981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550019Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.550087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.550114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.550189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.551905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.551978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.552051Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.552105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.552185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 6233409547, partId: 0 2025-04-07T14:05:10.971635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 837 } } 2025-04-07T14:05:10.971696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 837 } } 2025-04-07T14:05:10.991438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 314 RawX2: 4294969595 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.991492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-04-07T14:05:10.991616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 314 RawX2: 4294969595 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.991672Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:05:10.991766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 314 RawX2: 4294969595 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.991832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.991863Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:05:10.991903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:05:10.991938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-07T14:05:10.992578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.992627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-07T14:05:10.992798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.992831Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:05:10.992929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.992971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.992994Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.993016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:05:10.993056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-07T14:05:11.006876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:05:11.007066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:05:11.011451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:05:11.011657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:05:11.011838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:05:11.012187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.012333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:05:11.012602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:05:11.012642Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-07T14:05:11.012748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-07T14:05:11.012778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-07T14:05:11.012816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-07T14:05:11.013356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.013713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.013760Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:05:11.013818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-07T14:05:11.013838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-07T14:05:11.013872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-07T14:05:11.013933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:378:2343] message: TxId: 101 2025-04-07T14:05:11.013970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-07T14:05:11.014003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:05:11.014033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:05:11.014143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:05:11.014193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-07T14:05:11.014213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-07T14:05:11.014236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:05:11.014253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-07T14:05:11.014269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-07T14:05:11.014299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:05:11.016782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:05:11.016826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:379:2344] TestWaitNotification: OK eventTxId 101 2025-04-07T14:05:11.017284Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:05:11.017614Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 302us result status StatusSuccess 2025-04-07T14:05:11.018071Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPQTest::TestWritePQCompact [GOOD] >> TPQTest::TestWritePQBigMessage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.078477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.078573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.078610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.078656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.093061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.208183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.208231Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.217567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.217638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.217721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.228412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.228576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.354047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.359742Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.475142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.475387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.475575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.518965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523229Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.535719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.537454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547510Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.547999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.549817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.549992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550019Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.550085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.550121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.550178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.551513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.551591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.551620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.551669Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.551704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.551777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 075186233409547, partId: 0 2025-04-07T14:05:10.974415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 797 } } 2025-04-07T14:05:10.974474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 797 } } 2025-04-07T14:05:10.974943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 314 RawX2: 4294969595 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.974999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-04-07T14:05:10.975135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 314 RawX2: 4294969595 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.975184Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:05:10.975276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 314 RawX2: 4294969595 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.975333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.975383Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:05:10.975415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:05:10.975447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-07T14:05:10.975844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.975875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-07T14:05:10.976000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.976030Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:05:10.976091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:05:10.976130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.976153Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.976198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:05:10.976231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-07T14:05:10.978842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:05:10.978913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:05:10.981220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:05:10.981291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:05:10.981365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:05:10.981462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.981556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:05:10.981780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.981958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:05:10.981996Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-07T14:05:10.982089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-07T14:05:10.982121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-07T14:05:10.982156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-07T14:05:10.982290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.982315Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:05:10.982361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-07T14:05:10.982380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-07T14:05:10.982411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-07T14:05:10.982487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:378:2343] message: TxId: 101 2025-04-07T14:05:10.982526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-07T14:05:10.982579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:05:10.982635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:05:10.982759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:05:10.982793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-07T14:05:10.982811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-07T14:05:10.982835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:05:10.982852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-07T14:05:10.982868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-07T14:05:10.982917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:05:10.984737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:05:10.984781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:379:2344] TestWaitNotification: OK eventTxId 101 2025-04-07T14:05:10.985275Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:05:10.985547Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 273us result status StatusSuccess 2025-04-07T14:05:10.986035Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.078268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.078368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.078409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.078437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.093003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.204160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.204206Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.211384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.211463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.211561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.228498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.228652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.354039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.359985Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.483651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.483870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.484027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521553Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.535871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.537634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.548002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.550031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.550273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550318Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.550401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.550430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.550511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.552316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.552500Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.552554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.552642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... SHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:05:11.082401Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T14:05:11.082425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:05:11.082481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-07T14:05:11.088374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:05:11.088487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:05:11.090240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 4 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1206 } } 2025-04-07T14:05:11.090296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-07T14:05:11.090407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 4 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1206 } } 2025-04-07T14:05:11.090509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 4 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1206 } } 2025-04-07T14:05:11.091213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 397 RawX2: 4294969664 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:11.091256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-07T14:05:11.091369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 397 RawX2: 4294969664 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:11.091414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:05:11.091497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 397 RawX2: 4294969664 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:11.091553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:11.091584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-07T14:05:11.093357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.093686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.116014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:11.116080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T14:05:11.116166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:11.116196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:05:11.116259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:11.116309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:11.116337Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.116370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:05:11.116403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:05:11.116427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T14:05:11.117883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.118136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.118189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-04-07T14:05:11.118226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T14:05:11.118257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-04-07T14:05:11.118324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-07T14:05:11.118363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-04-07T14:05:11.119691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.119726Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:05:11.119797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:05:11.119822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:05:11.119855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T14:05:11.119907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 102 2025-04-07T14:05:11.119940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:05:11.120000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:05:11.120027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:05:11.120141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:05:11.120173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:05:11.121315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:05:11.121349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:427:2391] TestWaitNotification: OK eventTxId 102 2025-04-07T14:05:11.121748Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:05:11.121921Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 182us result status StatusSuccess 2025-04-07T14:05:11.122242Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: true } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.079437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.079510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.079545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.079578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.093035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.207037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.207084Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.216228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.216334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.216461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.228885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.229043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.354046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.359664Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.483346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.483586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.483760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521530Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523427Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.540726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.542281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547558Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.548025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.548115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.550046Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.550310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550341Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.550414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.550438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.550498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.551946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.552079Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.552117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.552196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... Progress: Resume: id# 102 2025-04-07T14:05:11.271311Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:373:2347], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:05:11.271496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-07T14:05:11.271574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:05:11.271655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-07T14:05:11.271682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-07T14:05:11.271716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-07T14:05:11.271958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:11.272044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:11.272101Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-07T14:05:11.272139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-07T14:05:11.273436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.273469Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-07T14:05:11.273527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-07T14:05:11.273553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-07T14:05:11.273582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-07T14:05:11.273616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:127:2152] message: TxId: 281474976710760 2025-04-07T14:05:11.273641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-07T14:05:11.273669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-07T14:05:11.273699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-07T14:05:11.273735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-07T14:05:11.274796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-07T14:05:11.274841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-07T14:05:11.274877Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-07T14:05:11.274923Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:373:2347], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:05:11.275821Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T14:05:11.275888Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:373:2347], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:05:11.275924Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-07T14:05:11.276813Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T14:05:11.276882Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:373:2347], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:05:11.276921Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-07T14:05:11.276999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:05:11.277027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:460:2424] TestWaitNotification: OK eventTxId 102 2025-04-07T14:05:11.277394Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:05:11.277586Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 197us result status StatusSuccess 2025-04-07T14:05:11.277881Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.078260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.078333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.078361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.078384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.092927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.204220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.204266Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.212890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.212986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.213108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.228995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.229152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.353914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.358720Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.455010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.462328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.472306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.518970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521506Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.535494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.537053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547386Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.547768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.549597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.549763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549795Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.549865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.549894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.549932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.549958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.549997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.551391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.551457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.551481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.551518Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.551557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.551634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T14:05:11.271171Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:373:2347], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:05:11.271322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-07T14:05:11.271429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-07T14:05:11.271654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-07T14:05:11.271692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-07T14:05:11.271724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-07T14:05:11.271828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:11.271913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:11.271962Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-07T14:05:11.271998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-07T14:05:11.273438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.273471Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-07T14:05:11.273523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-07T14:05:11.273541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-07T14:05:11.273567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-07T14:05:11.273598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:127:2152] message: TxId: 281474976710760 2025-04-07T14:05:11.273622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-07T14:05:11.273640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-07T14:05:11.273656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-07T14:05:11.273694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-07T14:05:11.274718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-07T14:05:11.274760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-07T14:05:11.274793Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-07T14:05:11.274850Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:373:2347], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:05:11.275815Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T14:05:11.275867Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:373:2347], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:05:11.275898Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-07T14:05:11.277020Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T14:05:11.277107Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:373:2347], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:05:11.277141Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-07T14:05:11.277241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:05:11.277281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:460:2424] TestWaitNotification: OK eventTxId 102 2025-04-07T14:05:11.277665Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:05:11.277816Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 162us result status StatusSuccess 2025-04-07T14:05:11.278113Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin+ColumnStore [GOOD] >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex [FAIL] >> KqpQueryService::TableSink_OltpInsert >> TSchemeShardTTLTests::ConditionalErase [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.079554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.079626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.079654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.079683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.092988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.207449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.207507Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.218798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.218903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.219014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.228812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.228957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.353896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.358704Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.454430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.462331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.472310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521510Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.535824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.537601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547538Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.548056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.549937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.550106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550142Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.550232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.550264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.550340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.552135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.552322Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.552371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.552475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... Size 619 rowCount 2 cpuUsage 0 2025-04-07T14:05:14.446453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-07T14:05:14.446537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:05:14.446659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-07T14:05:14.446803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409546, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640230000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-07T14:05:14.446868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409550, request: TableId: 6 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640230000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-07T14:05:14.446906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 5 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640230000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-07T14:05:14.446938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640230000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-07T14:05:14.446968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 3 Expiration { ColumnId: 2 WallClockTimestamp: 1600463040230000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-07T14:05:14.446999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409551, request: TableId: 7 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640230000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-07T14:05:14.447740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-07T14:05:14.448089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-07T14:05:14.448242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2025-04-07T14:05:14.448341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:05:14.448532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:05:14.448637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2025-04-07T14:05:14.448871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-07T14:05:14.448907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-07T14:05:14.449331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-07T14:05:14.449355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-07T14:05:14.453215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-07T14:05:14.453260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2025-04-07T14:05:14.453632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-07T14:05:14.453654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:05:14.454110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-07T14:05:14.454133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:05:14.454372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-07T14:05:14.454461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:05:14.454492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2020-09-18T23:04:00.230000Z, at schemeshard: 72057594046678944 2025-04-07T14:05:14.454553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-07T14:05:14.454583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:05:14.454600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2020-09-18T23:04:00.230000Z, at schemeshard: 72057594046678944 2025-04-07T14:05:14.455673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-07T14:05:14.455728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:05:14.455761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:5, run at: 2020-09-18T23:04:00.232000Z, at schemeshard: 72057594046678944 2025-04-07T14:05:14.455811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-07T14:05:14.455865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:05:14.455880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 2020-09-18T23:04:00.232000Z, at schemeshard: 72057594046678944 2025-04-07T14:05:14.455905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-07T14:05:14.455925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-07T14:05:14.455952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-07T14:05:14.455965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-07T14:05:14.455987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-07T14:05:14.456018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:05:14.456034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:2, run at: 2020-09-18T23:04:00.232000Z, at schemeshard: 72057594046678944 2025-04-07T14:05:14.456055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-07T14:05:14.521624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2025-04-07T14:05:14.521790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-07T14:05:14.521874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable1, is column=0, is olap=0 2025-04-07T14:05:14.521928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-07T14:05:14.522012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-07T14:05:14.522035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=TTLEnabledTable2, is column=0, is olap=0 2025-04-07T14:05:14.522048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 3: RowCount 0, DataSize 0 2025-04-07T14:05:14.522075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 5 shard idx 72057594046678944:4 data size 43 row count 1 2025-04-07T14:05:14.522094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], pathId map=TTLEnabledTable4, is column=0, is olap=0 2025-04-07T14:05:14.522110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 5: RowCount 1, DataSize 43 2025-04-07T14:05:14.522155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 603 row count 2 2025-04-07T14:05:14.522174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTable3, is column=0, is olap=0 2025-04-07T14:05:14.522187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 2, DataSize 603 2025-04-07T14:05:14.522239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 6 shard idx 72057594046678944:5 data size 619 row count 2 2025-04-07T14:05:14.522275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], pathId map=TTLEnabledTable5, is column=0, is olap=0 2025-04-07T14:05:14.522307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409550 followerId=0, pathId 6: RowCount 2, DataSize 619, with borrowed parts 2025-04-07T14:05:14.534482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-07T14:05:14.534540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2025-04-07T14:05:14.536012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-07T14:05:14.536097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:05:14.536132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:6, run at: 2020-09-18T23:04:00.234000Z, at schemeshard: 72057594046678944 2025-04-07T14:05:14.536167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] >> TPQTest::TestPartitionPerConsumerQuota [GOOD] >> TPQTest::TestPartitionWriteQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.078818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.078890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.078921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.078947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.092927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.204150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.204204Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.213246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.213344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.213455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.229083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.229207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.354027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.359911Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.453194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.462328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.472316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521494Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.522987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.535488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.537203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.547710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.549750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.549963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549998Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.550069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.550094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.550167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.551814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.551894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.551939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.551983Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.552030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.552127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 944 2025-04-07T14:05:15.303721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.304500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.304585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.304678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.304729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.304797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.304847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.304933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.305006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.307311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.307409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.307493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.307563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.307662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.307703Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-07T14:05:15.307796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T14:05:15.307825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T14:05:15.307866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-07T14:05:15.307939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2609:3875] message: TxId: 103 2025-04-07T14:05:15.308004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T14:05:15.308059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T14:05:15.308082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T14:05:15.308688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-04-07T14:05:15.311254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:05:15.311328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:3935:5136] TestWaitNotification: OK eventTxId 103 2025-04-07T14:05:15.311863Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:05:15.312144Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 291us result status StatusSuccess 2025-04-07T14:05:15.312613Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" ColumnCodec: ColumnCodecPlain } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2025-04-07T14:05:15.315075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:15.315231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:05:15.340361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2025-04-07T14:05:15.342801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:15.342923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-07T14:05:15.343169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-07T14:05:15.343200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-07T14:05:15.343610Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-07T14:05:15.343703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:05:15.343741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4366:5566] TestWaitNotification: OK eventTxId 104 >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::SimpleRestore [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleRestore [GOOD] Test command err: 2025-04-07T14:05:10.031586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:05:10.031804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:05:10.031952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00214a/r3tmp/tmpDDqsIs/pdisk_1.dat 2025-04-07T14:05:11.287399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-07T14:05:11.289344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.306940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:05:11.323146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:05:11.323270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.331227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.340793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:05:11.341049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.341114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:05:11.341144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:11.341175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:11.350789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.350853Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:05:11.350889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:11.351286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.351312Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.351341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-07T14:05:11.351379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:11.362714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:11.363232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:11.363372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:05:11.364162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.364198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-07T14:05:11.364226Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.433828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-07T14:05:11.433895Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:11.516260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:11.516416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:11.527974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:11.647853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.648034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:05:11.648091Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-07T14:05:11.648347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:11.648400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-07T14:05:11.648578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:05:11.648645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:05:11.655001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:05:11.655050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:05:11.655196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:05:11.655261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-04-07T14:05:11.662940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.663002Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-04-07T14:05:11.663106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:11.663139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:11.663177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:11.663206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:11.663261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:11.663283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:11.663345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:05:11.663386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-07T14:05:11.663417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-07T14:05:11.665445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-07T14:05:11.665527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-07T14:05:11.665552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-04-07T14:05:11.665581Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:05:11.665622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:05:11.665697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-07T14:05:11.665737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2025-04-07T14:05:11.666120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-04-07T14:05:11.718006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:05:11.718304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.718377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, schema: Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.719219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T14:05:11.719279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-07T14:05:11.719330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-07T14:05:11.719549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new ... 4] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 32b TableId: [OwnerId: 72057594046644480, LocalPathId: 7] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:05:16.244381Z node 1 :TX_DATASHARD DEBUG: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037889, generation# 1, at tablet# 72075186224037888 2025-04-07T14:05:16.255312Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186224037889:1][72075186224037888][1:948:2754] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-07T14:05:16.255412Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:939:2754] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-07T14:05:16.255499Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][281474976715663][OwnerId: 72057594046644480, LocalPathId: 7][OwnerId: 72057594046644480, LocalPathId: 2][1:938:2753] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [3] } 2025-04-07T14:05:16.255656Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:939:2754] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] BodySize: 18 }] } 2025-04-07T14:05:16.255766Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][281474976715663][OwnerId: 72057594046644480, LocalPathId: 7][OwnerId: 72057594046644480, LocalPathId: 2][1:938:2753] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 4 BodySize: 18 }] } 2025-04-07T14:05:16.255889Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:939:2754] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 7] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:05:16.256031Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186224037889:1][72075186224037888][1:948:2754] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 7] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:05:16.256150Z node 1 :TX_DATASHARD DEBUG: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037889, generation# 1, at tablet# 72075186224037888 2025-04-07T14:05:16.266955Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186224037889:1][72075186224037888][1:948:2754] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 4 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 4 2025-04-07T14:05:16.267020Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:939:2754] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-07T14:05:16.267089Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][281474976715663][OwnerId: 72057594046644480, LocalPathId: 7][OwnerId: 72057594046644480, LocalPathId: 2][1:938:2753] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [4] } 2025-04-07T14:05:16.267139Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][281474976715663][OwnerId: 72057594046644480, LocalPathId: 7][OwnerId: 72057594046644480, LocalPathId: 2][1:938:2753] Exhausted 2025-04-07T14:05:16.267206Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:939:2754] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData 2025-04-07T14:05:16.267264Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][281474976715663][OwnerId: 72057594046644480, LocalPathId: 7][OwnerId: 72057594046644480, LocalPathId: 2][1:938:2753] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2025-04-07T14:05:16.267298Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][281474976715663][OwnerId: 72057594046644480, LocalPathId: 7][OwnerId: 72057594046644480, LocalPathId: 2][1:938:2753] Finish 0 2025-04-07T14:05:16.267535Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-07T14:05:16.267601Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715663, at: 72075186224037889 2025-04-07T14:05:16.267670Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:05:16.267696Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:05:16.267724Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [2500:281474976715663] at 72075186224037889 for CreateIncrementalRestoreSrc 2025-04-07T14:05:16.267935Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:05:16.278595Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:05:16.278660Z node 1 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2025-04-07T14:05:16.278755Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:05:16.279047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 840 RawX2: 4294969969 } Origin: 72075186224037889 State: 2 TxId: 281474976715663 Step: 0 Generation: 1 2025-04-07T14:05:16.279101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715663, tablet: 72075186224037889, partId: 2 2025-04-07T14:05:16.279225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715663:2, at schemeshard: 72057594046644480, message: Source { RawX1: 840 RawX2: 4294969969 } Origin: 72075186224037889 State: 2 TxId: 281474976715663 Step: 0 Generation: 1 2025-04-07T14:05:16.279262Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715663:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:05:16.279323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715663:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 840 RawX2: 4294969969 } Origin: 72075186224037889 State: 2 TxId: 281474976715663 Step: 0 Generation: 1 2025-04-07T14:05:16.279368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715663:2, shardIdx: 72057594046644480:2, datashard: 72075186224037889, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:16.279396Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715663:2, at schemeshard: 72057594046644480 2025-04-07T14:05:16.279419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715663:2, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-07T14:05:16.279459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715663:2 129 -> 240 2025-04-07T14:05:16.279898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715663:2, at schemeshard: 72057594046644480 2025-04-07T14:05:16.280001Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037889 state Ready 2025-04-07T14:05:16.280037Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-07T14:05:16.280171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715663:2, at schemeshard: 72057594046644480 2025-04-07T14:05:16.280201Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715663:2 ProgressState 2025-04-07T14:05:16.280288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715663:2 progress is 5/5 2025-04-07T14:05:16.280320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715663 ready parts: 5/5 2025-04-07T14:05:16.280353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715663, ready parts: 5/5, is published: true 2025-04-07T14:05:16.280397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2485] message: TxId: 281474976715663 2025-04-07T14:05:16.280443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715663 ready parts: 5/5 2025-04-07T14:05:16.280472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715663:0 2025-04-07T14:05:16.280506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715663:0 2025-04-07T14:05:16.280551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 7] was 5 2025-04-07T14:05:16.280573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715663:1 2025-04-07T14:05:16.280584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715663:1 2025-04-07T14:05:16.280604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-07T14:05:16.280616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715663:2 2025-04-07T14:05:16.280625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715663:2 2025-04-07T14:05:16.280682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 7] was 4 2025-04-07T14:05:16.280699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715663:3 2025-04-07T14:05:16.280720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715663:3 2025-04-07T14:05:16.280749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-07T14:05:16.280767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715663:4 2025-04-07T14:05:16.280778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715663:4 2025-04-07T14:05:16.280801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 7] was 3 2025-04-07T14:05:16.844245Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr893pxk5994q1yca6tnx2cy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTIxZjA0ZmYtMzNiMDk2MWYtODVlNGZmZWMtMWJhMzE5YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] Test command err: 2025-04-07T14:05:10.031876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:05:10.032148Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:05:10.032368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002108/r3tmp/tmpqm2ewx/pdisk_1.dat 2025-04-07T14:05:11.280081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-07T14:05:11.289384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.306940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:05:11.323147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:05:11.323289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.331242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.340792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:05:11.341083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.341147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:05:11.341186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:11.341226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:11.350797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.350879Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:05:11.350917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:11.351380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.351415Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.351454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-07T14:05:11.351508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:11.370113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:11.370808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:11.370966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:05:11.371748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.371791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-07T14:05:11.371825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.433828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-07T14:05:11.433889Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:11.516247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:11.516386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:11.527557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:11.647706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.647862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:05:11.647922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-07T14:05:11.648150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:11.648199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-07T14:05:11.648385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:05:11.648485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:05:11.654993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:05:11.655053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:05:11.655197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:05:11.655273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-04-07T14:05:11.662922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.663002Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-04-07T14:05:11.663112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:11.663148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:11.663193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:11.663253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:11.663307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:11.663351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:11.663418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:05:11.663460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-07T14:05:11.663492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-07T14:05:11.665921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-07T14:05:11.666026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-07T14:05:11.666061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-04-07T14:05:11.666101Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:05:11.666137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:05:11.666232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-07T14:05:11.666297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2025-04-07T14:05:11.666946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-04-07T14:05:11.718554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:05:11.718873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.719001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, schema: Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.719382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T14:05:11.719456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-07T14:05:11.719513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-07T14:05:11.719642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new ... hemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715662 Step: 2500 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 1337 } } 2025-04-07T14:05:16.304620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715662, tablet: 72075186224037889, partId: 1 2025-04-07T14:05:16.304713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715662 Step: 2500 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 1337 } } 2025-04-07T14:05:16.304772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715662 Step: 2500 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 1337 } } 2025-04-07T14:05:16.304982Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715662 2025-04-07T14:05:16.305115Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:05:16.305135Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:05:16.305167Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:05:16.305392Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2500 2025-04-07T14:05:16.305477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-07T14:05:16.305582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 898 RawX2: 4294970005 } Origin: 72075186224037889 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-07T14:05:16.305615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715662, tablet: 72075186224037889, partId: 1 2025-04-07T14:05:16.305717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480, message: Source { RawX1: 898 RawX2: 4294970005 } Origin: 72075186224037889 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-07T14:05:16.305750Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:05:16.305804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 898 RawX2: 4294970005 } Origin: 72075186224037889 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-07T14:05:16.305845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:1, shardIdx: 72057594046644480:2, datashard: 72075186224037889, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:16.305879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-07T14:05:16.306216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-07T14:05:16.316800Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:05:16.316860Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-04-07T14:05:16.316926Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:05:16.316954Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:05:16.317227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 632 RawX2: 4294969833 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-07T14:05:16.317267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715662, tablet: 72075186224037888, partId: 1 2025-04-07T14:05:16.317348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480, message: Source { RawX1: 632 RawX2: 4294969833 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-07T14:05:16.317377Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:05:16.317428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 632 RawX2: 4294969833 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-07T14:05:16.317463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:1, shardIdx: 72057594046644480:1, datashard: 72075186224037888, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:16.317508Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-07T14:05:16.317542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:1, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-07T14:05:16.317577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:1, datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-04-07T14:05:16.317595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:1 129 -> 240 2025-04-07T14:05:16.317950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-07T14:05:16.318032Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-04-07T14:05:16.318072Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:05:16.318178Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2025-04-07T14:05:16.318204Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-07T14:05:16.318319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-07T14:05:16.318377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:1ProgressState, operation type TxCopyTable 2025-04-07T14:05:16.318429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 281474976715662:1, name: CopyTableBarrier, done: 1, blocked: 1, parts count: 2 2025-04-07T14:05:16.318459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715662, done: 1, blocked: 1 2025-04-07T14:05:16.318521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:1 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715662 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-04-07T14:05:16.318561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:1 240 -> 240 2025-04-07T14:05:16.318887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-07T14:05:16.318913Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715662:1 ProgressState 2025-04-07T14:05:16.318985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:1 progress is 2/2 2025-04-07T14:05:16.319008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-04-07T14:05:16.319054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 2/2, is published: true 2025-04-07T14:05:16.319114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:869:2690] message: TxId: 281474976715662 2025-04-07T14:05:16.319159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-04-07T14:05:16.319193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-04-07T14:05:16.319214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-04-07T14:05:16.319253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 2 2025-04-07T14:05:16.319276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:1 2025-04-07T14:05:16.319292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:1 2025-04-07T14:05:16.319352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-04-07T14:05:16.319407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-07T14:05:16.844443Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:982:2775], serverId# [1:983:2776], sessionId# [0:0:0] 2025-04-07T14:05:16.844547Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr893pnv1dgagw98grvert89, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQ2NDZjYjUtYzZmZTU5NTMtNjM2MTE1NmQtYmRkMWM5OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } 2025-04-07T14:05:17.073912Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr893qa40xqq1xbc5cbx6cmp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWY2MTIwMjItZmMwYTM3NjAtOTg1OWVlMzMtNzIxZWM1MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] Test command err: 2025-04-07T14:04:51.548913Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571961318062724:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:51.549050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:51.675195Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571963514508596:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:51.796648Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:04:51.802622Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001afe/r3tmp/tmp0nx2Su/pdisk_1.dat 2025-04-07T14:04:51.853796Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:52.021023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:52.021158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:52.074033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:52.076649Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:04:52.078182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:52.078975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:52.079023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19080, node 1 2025-04-07T14:04:52.086334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:52.101465Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:52.101573Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:52.322439Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001afe/r3tmp/yandex3WsYwE.tmp 2025-04-07T14:04:52.322475Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001afe/r3tmp/yandex3WsYwE.tmp 2025-04-07T14:04:52.322763Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001afe/r3tmp/yandex3WsYwE.tmp 2025-04-07T14:04:52.322880Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:04:53.755080Z INFO: TTestServer started on Port 18493 GrpcPort 19080 TClient is connected to server localhost:18493 PQClient connected to localhost:19080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:54.072995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:04:54.168416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:54.421552Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571976399410638:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.421550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571976399410646:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.422066Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } waiting... 2025-04-07T14:04:54.433107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:54.468923Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571976399410652:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T14:04:55.796214Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490571976399410695:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:04:55.796330Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571974202965646:2308], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:04:55.796542Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjAwOTY5YzYtMzhmMzYyOTEtYjZjNmY5ZGEtZmI0MTRjYTY=, ActorId: [2:7490571976399410636:2280], ActorState: ExecuteState, TraceId: 01jr893181cabt6w21376sz16d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:04:55.816983Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:04:55.872615Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzZjMDhmMTMtMWI2MDA0NS00MjEzZmI4OC00YzA3YTM2OA==, ActorId: [1:7490571974202965614:2302], ActorState: ExecuteState, TraceId: 01jr8931ar7zjx3en9khakhv7f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:04:55.894203Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:04:56.209402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:56.284040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:04:56.407911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:04:56.548751Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571961318062724:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:56.548802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:56.664973Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571963514508596:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:56.665044Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:57.905854Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr89338w6jtsqtngjgtg1neb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGNhYjUzNi1kMTc5MzllZi01NDliNTk4My0yNDgwMGI5NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490571991382835298:3102] 2025-04-07T14:04:58.075343Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 833 } 2025- ... ode 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:05:08.081090Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490572035209675287:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:08.081139Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001afe/r3tmp/tmpJCHqR0/pdisk_1.dat 2025-04-07T14:05:08.095782Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:05:08.098364Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:05:08.161910Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:08.184130Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:08.184194Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:08.186568Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10166, node 3 2025-04-07T14:05:08.201354Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:08.201421Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:08.204787Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-07T14:05:08.205835Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:08.225859Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001afe/r3tmp/yandexRH5Vej.tmp 2025-04-07T14:05:08.225885Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001afe/r3tmp/yandexRH5Vej.tmp 2025-04-07T14:05:08.226032Z node 3 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001afe/r3tmp/yandexRH5Vej.tmp 2025-04-07T14:05:08.226170Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:05:08.266917Z INFO: TTestServer started on Port 7663 GrpcPort 10166 TClient is connected to server localhost:7663 PQClient connected to localhost:10166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:08.413386Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:08.435756Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T14:05:10.440185Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490572043799610264:2287], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:10.440251Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490572043799610251:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:10.440393Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:10.444857Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-04-07T14:05:10.456392Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490572043799610274:2288], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-04-07T14:05:10.489014Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490572045162850958:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:05:10.490596Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA3NjI3NTctYjE0NzgyYzAtYzU4ODczZjgtYmU1ZjZmNTU=, ActorId: [3:7490572045162850932:2303], ActorState: ExecuteState, TraceId: 01jr893gy6ajsefrv2mmfpcay6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:05:10.490913Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:05:10.494674Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:10.541960Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7490572043799610315:2292], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:05:10.542180Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=N2M5OGUwYTQtNWU0ZDFmMDQtZDJjYmFiNzAtMmFkMWQ5YWE=, ActorId: [4:7490572043799610234:2282], ActorState: ExecuteState, TraceId: 01jr893gx6308ta7ht7a5gpp1w, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:05:10.542602Z node 4 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:05:10.567461Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:10.636470Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:05:10.862050Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr893h6f41wjynnhv5y88q39, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmNjZTliYmYtNWI2ZGM4NmItMWU2MzAxNDYtODNkNTFkM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [3:7490572045162851383:3053] 2025-04-07T14:05:13.075040Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490572036572915248:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:13.075096Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:13.081221Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490572035209675287:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:13.081292Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2025-04-07T14:05:16.801618Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490572070932655705:3366] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2025-04-07T14:05:16.801652Z node 3 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [3:7490572070932655705:3366] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId 2025-04-07T14:05:16.945989Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7490572070932655736:2444] TxId: 281474976715678. Ctx: { TraceId: 01jr893q8g4v9rnfncvp04t0q0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzFlMTAwZDYtNzdjZjBlYjItOGYyNDc1MzgtZmZiZDRmNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-04-07T14:05:16.953311Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490572070932655740:2444], TxId: 281474976715678, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=NzFlMTAwZDYtNzdjZjBlYjItOGYyNDc1MzgtZmZiZDRmNTg=. TraceId : 01jr893q8g4v9rnfncvp04t0q0. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7490572070932655736:2444], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test >> IncrementalBackup::SimpleBackup [GOOD] >> IncrementalBackup::BackupRestore >> TPQTest::TestWritePQBigMessage [GOOD] >> TPQTest::TestWritePQ >> TPQTest::TestAlreadyWrittenWithoutDeduplication [GOOD] >> TPQTest::TestChangeConfig |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TFetchRequestTests::BadTopicName [GOOD] >> TFetchRequestTests::CheckAccess >> TPQTest::TestChangeConfig [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestChangeConfig [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2025-04-07T14:04:51.780833Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:04:51.791311Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:04:51.791666Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-07T14:04:51.791713Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:04:51.791760Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-07T14:04:51.791801Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:04:51.791849Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.791893Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2025-04-07T14:04:51.791927Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:176:2057] recipient: [1:14:2061] 2025-04-07T14:04:51.817588Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:51.817673Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:175:2190], now have 1 active actors on pipe 2025-04-07T14:04:51.817753Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:04:51.833490Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.845228Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-04-07T14:04:51.845402Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.846196Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-04-07T14:04:51.846296Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:04:51.846721Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:04:51.847040Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:183:2196] 2025-04-07T14:04:51.849490Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-04-07T14:04:51.849562Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:183:2196] 2025-04-07T14:04:51.849614Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:04:51.851706Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-04-07T14:04:51.851767Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-04-07T14:04:51.851820Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2025-04-07T14:04:51.851853Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2025-04-07T14:04:51.852037Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T14:04:51.852074Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.852232Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:04:51.852628Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.861496Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:04:51.861847Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:51.861893Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:190:2201], now have 1 active actors on pipe 2025-04-07T14:04:51.866078Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:51.866141Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:195:2205], now have 1 active actors on pipe 2025-04-07T14:04:51.866195Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-07T14:04:51.866232Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-07T14:04:51.866907Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 0 messageNo: 0 size: 511957 2025-04-07T14:04:51.867399Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 1 messageNo: 0 size: 511957 2025-04-07T14:04:51.867875Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 2 messageNo: 0 size: 511957 2025-04-07T14:04:51.868343Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 3 messageNo: 0 size: 511957 2025-04-07T14:04:51.868454Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size: 49324 2025-04-07T14:04:51.868506Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size 49324 offset: 0 2025-04-07T14:04:51.868634Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-04-07T14:04:51.868800Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-04-07T14:04:51.868847Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-04-07T14:04:51.869136Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:51.869177Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:197:2207], now have 1 active actors on pipe 2025-04-07T14:04:51.869264Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-07T14:04:51.869300Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-07T14:04:51.869393Z node 1 :PERSQUEUE INFO: new Cookie default|3be1c41-e211524c-1dff03c-2eb987d9_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-07T14:04:51.869502Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T14:04:51.869578Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:04:51.869933Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:04:51.869967Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:199:2209], now have 1 active actors on pipe 2025-04-07T14:04:51.870036Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-07T14:04:51.870076Z ... } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 20 Important: false } Consumers { Name: "bbb" Generation: 21 Important: true } Consumers { Name: "ccc" Generation: 21 Important: true } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:20.264250Z node 25 :PERSQUEUE INFO: new Cookie default|d0ee9d47-4d64e5d4-15493fd1-68306c63_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:20.272194Z node 25 :PERSQUEUE INFO: new Cookie default|c44aa38-6b89c7aa-cf69d1f5-801334cb_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:20.277154Z node 25 :PERSQUEUE INFO: new Cookie default|6aab8173-bfbde9dd-d6a77f26-117e45ba_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] 2025-04-07T14:05:20.726682Z node 26 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:20.726749Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [26:147:2057] recipient: [26:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [26:147:2057] recipient: [26:145:2168] Leader for TabletID 72057594037927938 is [26:151:2172] sender: [26:152:2057] recipient: [26:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:175:2057] recipient: [26:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:20.739081Z node 26 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:20.740439Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 22 actor [26:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 22 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 22 ReadRuleGenerations: 22 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 22 Important: false } Consumers { Name: "aaa" Generation: 22 Important: true } 2025-04-07T14:05:20.741032Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [26:182:2195] 2025-04-07T14:05:20.742743Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [26:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:20.744728Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [26:183:2196] 2025-04-07T14:05:20.746023Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [26:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:05:20.747731Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [26:184:2197] 2025-04-07T14:05:20.749230Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [26:184:2197] 2025-04-07T14:05:20.750924Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [26:185:2198] 2025-04-07T14:05:20.752380Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [26:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:20.754101Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [26:186:2199] 2025-04-07T14:05:20.755315Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [26:186:2199] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:20.764233Z node 26 :PERSQUEUE INFO: new Cookie default|127f0b33-13e10a8e-9c2bc351-e945723e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:20.769773Z node 26 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:20.776198Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 5, State: StateInit] bootstrapping 5 [26:233:2233] 2025-04-07T14:05:20.778148Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 5 generation 2 [26:233:2233] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:05:20.781683Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 6, State: StateInit] bootstrapping 6 [26:234:2234] 2025-04-07T14:05:20.783515Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 6 generation 2 [26:234:2234] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:20.786512Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 7, State: StateInit] bootstrapping 7 [26:235:2235] 2025-04-07T14:05:20.788227Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 7 generation 2 [26:235:2235] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:05:20.791611Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 8, State: StateInit] bootstrapping 8 [26:236:2236] 2025-04-07T14:05:20.793411Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 8 generation 2 [26:236:2236] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:20.796270Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 9, State: StateInit] bootstrapping 9 [26:237:2237] 2025-04-07T14:05:20.798052Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 9 generation 2 [26:237:2237] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:20.820250Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 23 actor [26:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 5 MaxSizeInPartition: 1048576 LifetimeSeconds: 86400 ImportantClientId: "bbb" ImportantClientId: "ccc" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 PartitionIds: 5 PartitionIds: 6 PartitionIds: 7 PartitionIds: 8 PartitionIds: 9 TopicName: "rt3.dc1--asdfgs--topic" Version: 23 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } Partitions { PartitionId: 5 } Partitions { PartitionId: 6 } Partitions { PartitionId: 7 } Partitions { PartitionId: 8 } Partitions { PartitionId: 9 } ReadRuleGenerations: 22 ReadRuleGenerations: 23 ReadRuleGenerations: 23 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 22 Important: false } Consumers { Name: "bbb" Generation: 23 Important: true } Consumers { Name: "ccc" Generation: 23 Important: true } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:20.824920Z node 26 :PERSQUEUE INFO: new Cookie default|3e5d875b-f8ee1583-c5607fd5-636a151d_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:20.831100Z node 26 :PERSQUEUE INFO: new Cookie default|8fadefd2-8c74732a-b3f28001-ca6d052d_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:20.834623Z node 26 :PERSQUEUE INFO: new Cookie default|8c46d306-d116b419-f7b9c90d-1540aa1e_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |91.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test >> KqpQueryService::TableSink_OltpInsert [GOOD] >> KqpQueryService::TableSink_OltpInteractive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 5807, MsgBus: 61570 2025-04-07T13:57:01.577375Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490569945376617962:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:01.577480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010cc/r3tmp/tmpjK4Thu/pdisk_1.dat 2025-04-07T13:57:02.050509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T13:57:02.050594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T13:57:02.057447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5807, node 1 2025-04-07T13:57:02.094169Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T13:57:02.094514Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:57:02.110755Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T13:57:02.144463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T13:57:02.144484Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T13:57:02.144490Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T13:57:02.144609Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61570 TClient is connected to server localhost:61570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T13:57:02.818409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.843098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:02.958194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T13:57:03.128228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T13:57:03.207252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.082229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569962556488841:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.082349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.337936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.385444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.414837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.442213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.469493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.543993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T13:57:05.598738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569962556489341:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.598838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.598852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490569962556489346:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T13:57:05.602058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T13:57:05.609938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490569962556489348:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T13:57:06.579514Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490569945376617962:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T13:57:06.579565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T13:57:06.702045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.771383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.815910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.875868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T13:57:06.941369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.028204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.071661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.136923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.206946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.270835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.341771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.385924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.442012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.498874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.563407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.631595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.702551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.772333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T13:57:07.835112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, subop ... CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:05:05.731290Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:05:05.731348Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:05:05.731374Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:05:05.731421Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:05:05.731445Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:05:05.731495Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:05:05.731517Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:05:05.731519Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:05:05.731539Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:05:05.731595Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:05:05.731618Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:05:05.750907Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:05:05.750961Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:05:05.751034Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:05:05.751055Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:05:05.751178Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:05:05.751204Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:05:05.751271Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:05:05.751300Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:05:05.751350Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:05:05.751378Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:05:05.751405Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:05:05.751432Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:05:05.751757Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:05:05.751821Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:05:05.752030Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:05:05.752075Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:05:05.752235Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:05:05.752281Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:05:05.752487Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:05:05.752519Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:05:05.752623Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:05:05.752678Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:05:05.753154Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:05:05.753202Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:05:05.753271Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:05:05.753299Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:05:05.753414Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:05:05.753440Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:05:05.753505Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:05:05.753532Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:05:05.753583Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:05:05.753621Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:05:05.753671Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:05:05.753708Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:05:05.753957Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:05:05.754000Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:05:05.754159Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:05:05.754200Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:05:05.754316Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:05:05.754344Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:05:05.754463Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:05:05.754489Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:05:05.754566Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:05:05.754589Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> KqpQueryService::DdlColumnTable [GOOD] >> KqpQueryService::DdlCache >> KqpQueryService::TableSink_OltpInteractive [GOOD] >> TPQCachingProxyTest::TestPublishAndForget |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] >> IncrementalBackup::BackupRestore [GOOD] >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.079285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.079356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.079391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.079419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.092983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.207801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.207846Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.216683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.216779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.216895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.228280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.228413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.353918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.359395Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.486754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.486950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.487122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523332Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.541122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.542572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547400Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.547867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.549789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.549993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550026Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.550092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.550122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.550203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.552011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.552161Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.552201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.552271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... Result, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 60029000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 145 } } 2025-04-07T14:05:26.028418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-07T14:05:26.028491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2025-04-07T14:05:26.028523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-04-07T14:05:26.029820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:26.029965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:26.030003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:26.030060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-07T14:05:26.030189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:26.031632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:05:26.031769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-07T14:05:26.032084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:26.032202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:26.032253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-07T14:05:26.032519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-07T14:05:26.032681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-07T14:05:26.036698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:26.036732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:05:26.036903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:26.036938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:05:26.037172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:26.037218Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-07T14:05:26.037601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:05:26.037670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:05:26.037701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:05:26.037737Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-07T14:05:26.037766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:05:26.037819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:05:26.038701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 825 } } 2025-04-07T14:05:26.038726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T14:05:26.038825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 825 } } 2025-04-07T14:05:26.038880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 825 } } 2025-04-07T14:05:26.039638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:26.039682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T14:05:26.039764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:26.039791Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:05:26.039844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:05:26.039885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:26.039917Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:26.039945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:05:26.039978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T14:05:26.041421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:05:26.041569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:26.042197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:26.042344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:05:26.042387Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:05:26.042451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:05:26.042474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:05:26.042510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T14:05:26.042551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 102 2025-04-07T14:05:26.042585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:05:26.042610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:05:26.042631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:05:26.042706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:05:26.043633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:05:26.043667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:603:2560] TestWaitNotification: OK eventTxId 102 2025-04-07T14:05:26.043927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-07T14:05:26.043973Z node 1 :FLAT_TX_SCHEMESHARD ERROR: Unsuccessful conditional erase: tabletId: 72075186233409546, status: SCHEME_ERROR, error: Schema version mismatch: got 1, expected 2, retry after: 300.000000s, at schemeshard: 72057594046678944 2025-04-07T14:05:26.045016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-07T14:05:26.045087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:05:26.045114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T00:06:00.038500Z, at schemeshard: 72057594046678944 2025-04-07T14:05:26.045151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.079428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.079509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.079545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.079578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.093056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.208175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.208230Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.216992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.217091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.217207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.228870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.229018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.353906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.359535Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.472646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.472902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.473108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.518970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.535042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.536612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.547911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.549997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.550256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550297Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.550380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.550414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.550498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.552412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.552604Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.552651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.555084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ard::TEvSchemaChanged> complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.072465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:05:11.072499Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-07T14:05:11.072585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T14:05:11.072618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T14:05:11.072688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-07T14:05:11.072768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:378:2343] message: TxId: 103 2025-04-07T14:05:11.072815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T14:05:11.072847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T14:05:11.072876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T14:05:11.072966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:05:11.074369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:05:11.074416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:490:2454] TestWaitNotification: OK eventTxId 103 2025-04-07T14:05:15.071305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:05:15.071361Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:16.379880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.2293 2025-04-07T14:05:16.379970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0627 2025-04-07T14:05:16.410613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-07T14:05:16.410786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-07T14:05:16.410858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-04-07T14:05:16.410898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-07T14:05:16.410995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-07T14:05:16.411014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-04-07T14:05:16.411027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-07T14:05:16.421326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:05:18.881227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0582 2025-04-07T14:05:18.881326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0165 2025-04-07T14:05:18.922543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-07T14:05:18.922715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-07T14:05:18.922786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-04-07T14:05:18.922837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-07T14:05:18.922956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-07T14:05:18.922999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-04-07T14:05:18.923026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-07T14:05:18.933396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:05:21.367259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0582 2025-04-07T14:05:21.367315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0165 2025-04-07T14:05:21.397920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-07T14:05:21.398049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-07T14:05:21.398101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-04-07T14:05:21.398138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-07T14:05:21.398216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-07T14:05:21.398251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-04-07T14:05:21.398283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-07T14:05:21.408548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:05:23.849664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0153 2025-04-07T14:05:23.849723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0048 2025-04-07T14:05:23.880765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-07T14:05:23.880904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-07T14:05:23.880952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-04-07T14:05:23.880981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-07T14:05:23.881036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-07T14:05:23.881065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-04-07T14:05:23.881079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-07T14:05:23.891304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:05:26.304186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-07T14:05:26.304284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:05:26.304439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-07T14:05:26.304585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 60024000 ColumnUnit: UNIT_AUTO } SchemaVersion: 3 Indexes { OwnerId: 72057594046678944 PathId: 4 SchemaVersion: 1 KeyMap { IndexColumnId: 1 MainColumnId: 3 } KeyMap { IndexColumnId: 2 MainColumnId: 1 } } Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-07T14:05:26.304946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:05:26.305734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-07T14:05:26.305790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:05:26.308051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-07T14:05:26.308163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:05:26.308193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T01:01:00.024000Z, at schemeshard: 72057594046678944 2025-04-07T14:05:26.308226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::BackupRestore [GOOD] Test command err: 2025-04-07T14:05:10.031620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:05:10.031834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:05:10.032005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002126/r3tmp/tmpKRVkiP/pdisk_1.dat 2025-04-07T14:05:11.285835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-07T14:05:11.289422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.306953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:05:11.323147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:05:11.323262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.331214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.340783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:05:11.341046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.341101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:05:11.341172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:11.341197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:11.350792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.350863Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:05:11.350899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:11.351255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.351281Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.351326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-07T14:05:11.351373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:11.362657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:11.363229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:11.363389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:05:11.364280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.364317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-07T14:05:11.364362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.433827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-07T14:05:11.433892Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:11.516257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:11.516353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:11.527522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:11.647545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.647695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:05:11.647742Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-07T14:05:11.647953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:11.647989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-07T14:05:11.648138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:05:11.648197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:05:11.654992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:05:11.655051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:05:11.655194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:05:11.655259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-04-07T14:05:11.662905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.662980Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-04-07T14:05:11.663194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:11.663223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:11.663257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:11.663288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:11.663321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:11.663345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:11.663404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:05:11.663457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-07T14:05:11.663513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-07T14:05:11.665449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-07T14:05:11.665565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-07T14:05:11.665608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-04-07T14:05:11.665639Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:05:11.665669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:05:11.665742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-07T14:05:11.665783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2025-04-07T14:05:11.666121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-04-07T14:05:11.717807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:05:11.718097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:05:11.718192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, schema: Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1, at schemeshard: 72057594046644480 2025-04-07T14:05:11.719220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T14:05:11.719270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-07T14:05:11.719314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-07T14:05:11.719545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new ... dResult from SS at 72075186224037892 2025-04-07T14:05:23.984570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715667:2, at schemeshard: 72057594046644480 2025-04-07T14:05:23.984610Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715667:2 ProgressState 2025-04-07T14:05:23.984736Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715667:2 progress is 5/5 2025-04-07T14:05:23.984791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715667 ready parts: 5/5 2025-04-07T14:05:23.984837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715667, ready parts: 5/5, is published: true 2025-04-07T14:05:23.984898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:558:2485] message: TxId: 281474976715667 2025-04-07T14:05:23.984942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715667 ready parts: 5/5 2025-04-07T14:05:23.984983Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:0 2025-04-07T14:05:23.985014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:0 2025-04-07T14:05:23.985073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 5 2025-04-07T14:05:23.985107Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:1 2025-04-07T14:05:23.985127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:1 2025-04-07T14:05:23.985154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-07T14:05:23.985173Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:2 2025-04-07T14:05:23.985205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:2 2025-04-07T14:05:23.985296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-04-07T14:05:23.985322Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:3 2025-04-07T14:05:23.985339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:3 2025-04-07T14:05:23.985371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-07T14:05:23.985391Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:4 2025-04-07T14:05:23.985409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:4 2025-04-07T14:05:23.985432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-04-07T14:05:24.445571Z node 2 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T14:05:24.445718Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] waiting read cookie 1 partition 0 read timeout for __OFFLOAD_ACTOR__ offset 5 2025-04-07T14:05:24.445825Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:05:24.446101Z node 2 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][2:1003:2791][0][2:1152:2887] Handle NKikimrClient.TResponse Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 5 SizeLag: 0 RealReadOffset: 5 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 18 LastOffset: 4 EndOffset: 5 } } 2025-04-07T14:05:24.446254Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T14:05:24.446309Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2025-04-07T14:05:24.446433Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 endOffset 5 max time lag 0ms effective offset 5 2025-04-07T14:05:24.446478Z node 2 :PERSQUEUE DEBUG: waiting read cookie 2 partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 timeout 1000 2025-04-07T14:05:24.446560Z node 2 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T14:05:24.820213Z node 2 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T14:05:24.820298Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] waiting read cookie 2 partition 0 read timeout for __OFFLOAD_ACTOR__ offset 5 2025-04-07T14:05:24.820368Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:05:24.820555Z node 2 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][2:1003:2791][0][2:1152:2887] Handle NKikimrClient.TResponse Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 5 SizeLag: 0 RealReadOffset: 5 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 18 LastOffset: 4 EndOffset: 5 } } 2025-04-07T14:05:24.820639Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T14:05:24.820700Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2025-04-07T14:05:24.820778Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] read cookie 3 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 endOffset 5 max time lag 0ms effective offset 5 2025-04-07T14:05:24.820801Z node 2 :PERSQUEUE DEBUG: waiting read cookie 3 partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 timeout 1000 2025-04-07T14:05:24.820837Z node 2 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T14:05:25.009300Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:05:25.178981Z node 2 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T14:05:25.179075Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] waiting read cookie 3 partition 0 read timeout for __OFFLOAD_ACTOR__ offset 5 2025-04-07T14:05:25.179162Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:05:25.179378Z node 2 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][2:1003:2791][0][2:1152:2887] Handle NKikimrClient.TResponse Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 5 SizeLag: 0 RealReadOffset: 5 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 18 LastOffset: 4 EndOffset: 5 } } 2025-04-07T14:05:25.179501Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T14:05:25.179537Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2025-04-07T14:05:25.179627Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] read cookie 4 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 endOffset 5 max time lag 0ms effective offset 5 2025-04-07T14:05:25.179656Z node 2 :PERSQUEUE DEBUG: waiting read cookie 4 partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 timeout 1000 2025-04-07T14:05:25.179706Z node 2 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T14:05:25.510115Z node 2 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T14:05:25.510193Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] waiting read cookie 4 partition 0 read timeout for __OFFLOAD_ACTOR__ offset 5 2025-04-07T14:05:25.510299Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:05:25.510520Z node 2 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][2:1003:2791][0][2:1152:2887] Handle NKikimrClient.TResponse Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 5 SizeLag: 0 RealReadOffset: 5 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 18 LastOffset: 4 EndOffset: 5 } } 2025-04-07T14:05:25.510614Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T14:05:25.510644Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2025-04-07T14:05:25.510710Z node 2 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T14:05:25.510743Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] read cookie 5 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 endOffset 5 max time lag 0ms effective offset 5 2025-04-07T14:05:25.510765Z node 2 :PERSQUEUE DEBUG: waiting read cookie 5 partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 timeout 1000 2025-04-07T14:05:25.850989Z node 2 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T14:05:25.851078Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] waiting read cookie 5 partition 0 read timeout for __OFFLOAD_ACTOR__ offset 5 2025-04-07T14:05:25.851167Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:05:25.851361Z node 2 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][2:1003:2791][0][2:1152:2887] Handle NKikimrClient.TResponse Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 5 SizeLag: 0 RealReadOffset: 5 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 18 LastOffset: 4 EndOffset: 5 } } 2025-04-07T14:05:25.851441Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-07T14:05:25.851474Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2025-04-07T14:05:25.851535Z node 2 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T14:05:25.851569Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] read cookie 6 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 endOffset 5 max time lag 0ms effective offset 5 2025-04-07T14:05:25.851591Z node 2 :PERSQUEUE DEBUG: waiting read cookie 6 partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 timeout 1000 2025-04-07T14:05:25.934325Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr893zz82n52f30xy4a7v76g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTMzNzc0YjMtZjQ4Y2YyMzgtMTZiZGI1YTMtYWYyYmUyOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 100 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 5 } items { uint32_value: 200 } } 2025-04-07T14:05:26.009580Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr89401m9ey338xq7fsf9bw2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjliMDEzYTYtZjVlMTcxNGItNzk5YTU2ZjEtMjgzMWIzNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 100 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 5 } items { uint32_value: 200 } } |91.6%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::DdlCache [GOOD] >> KqpQueryService::DdlExecuteScript >> TPQCachingProxyTest::TestPublishAndForget [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2025-04-07T14:05:28.167800Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:28.167900Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:05:28.195787Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:05:28.204019Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-07T14:05:28.204216Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-07T14:05:28.204260Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2025-04-07T14:05:28.204350Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.079463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.079547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.079583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.079629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.093061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.206505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.206556Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.217906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.218011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.218132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.228622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.228783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.353946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.359510Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.491553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.491762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.491921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.492144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.492201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523306Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.539762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.543450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.548073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.548288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.548387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.548779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.548881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.549274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.549408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.551314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.551442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.551740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.551812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.552169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.552227Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.552428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.552496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.552554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.552615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.552693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.552748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.552811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.552849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.552912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.554854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.554971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.555015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.555099Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.555158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.555285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... vNotifyTxCompletionResult> complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.697809Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.698742Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.699066Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.699172Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.699353Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.699520Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.699621Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.699712Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.699809Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.699922Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.700028Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.700099Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.700317Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.700439Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.702774Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.702850Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.702948Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.703036Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.703098Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.703176Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.703305Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.703388Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.705344Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.705422Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.705520Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.705559Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:05:28.705656Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:05:28.705687Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:05:28.705724Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-07T14:05:28.705786Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:2584:3850] message: TxId: 101 2025-04-07T14:05:28.705825Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:05:28.705881Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:05:28.705908Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:05:28.706497Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-04-07T14:05:28.707094Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:28.708946Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:05:28.708987Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [4:2585:3851] TestWaitNotification: OK eventTxId 101 2025-04-07T14:05:28.709406Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:05:28.709637Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 252us result status StatusSuccess 2025-04-07T14:05:28.710072Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Uint64" TypeId: 4 NotNull: true Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" ColumnCodec: ColumnCodecPlain } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_SECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } Version: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.6%| [TA] $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [TA] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSplit::StreamLookupSplitAfterFirstResult >> KqpScan::SelfJoin3xSameLabels >> KqpSplit::AfterResolve+Ascending >> KqpSplit::ChoosePartition+Descending >> KqpScan::CrossJoin >> KqpSplit::AfterResult+Descending >> KqpScan::SecondaryIndexCustomColumnOrder >> KqpScan::LeftSemiJoinSimple >> KqpSplit::IntersectionLosesRange+Unspecified >> KqpSplit::AfterResolve+Unspecified >> KqpScan::Offset >> KqpScan::UnionBasic >> KqpScan::TwoAggregatesOneFullFrameWindow >> KqpScan::StreamExecuteScanQueryCancelation >> KqpSplit::AfterResultMultiRange+Ascending >> KqpScan::DecimalColumn >> KqpSplit::UndeliveryOnFinishedRead >> KqpFlowControl::FlowControl_Unlimited >> KqpScan::Join3 >> KqpScan::IsNull >> KqpScan::AggregateNoColumn >> KqpSplit::BorderKeys+Ascending >> KqpScan::IsNullPartial >> KqpScan::PureExpr >> KqpScan::NullInKey >> KqpScan::RightSemiJoinSimple >> KqpScan::Limit >> KqpScan::TopSort >> KqpRequestContext::TraceIdInErrorMessage >> KqpScan::TaggedScalar >> KqpScan::Grep >> KqpScan::StreamLookup >> KqpScan::UnionWithPureExpr ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 7400, MsgBus: 14552 2025-04-07T14:04:07.944971Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571773751011019:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:07.945630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001053/r3tmp/tmpBFeXmR/pdisk_1.dat 2025-04-07T14:04:08.427494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:04:08.449505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:08.449609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:08.453806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7400, node 1 2025-04-07T14:04:08.705149Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:04:08.705167Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:04:08.705173Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:04:08.705257Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14552 TClient is connected to server localhost:14552 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:09.507582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:11.701207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571790930880835:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:11.701312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:11.957485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:04:12.171781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571795225848288:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.175877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:04:12.176864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571795225848283:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.176929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:12.190499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571795225848290:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:04:12.950664Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571773751011019:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:12.963772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:13.163648Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-07T14:04:23.422665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:04:23.422690Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:3798, virtual void NKikimr::NKqp::NTestSuiteKqpQueryService::TTestCaseTableSink_OltpReplace::Execute_(NUnitTest::TTestContext &) [HasSecondaryIndex = true]: (it.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1AB9234F 1. /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:3798: Execute_ @ 0x1A20081D 2. /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:24: operator() @ 0x1A191EF7 3. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:24:1) &> @ 0x1A191EF7 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:24:1) &> @ 0x1A191EF7 5. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x1A191EF7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x1A191EF7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x1ABD1248 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x1ABD1248 9. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x1ABD1248 10. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1AB98EB8 11. /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:24: Execute @ 0x1A1910C3 12. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1AB9A785 13. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1ABCAE8C 14. ??:0: ?? @ 0x7F2730F2FD8F 15. ??:0: ?? @ 0x7F2730F2FE3F 16. ??:0: ?? @ 0x17DAD028 Trying to start YDB, gRPC: 21567, MsgBus: 14516 2025-04-07T14:05:14.118604Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572061240887249:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:14.118647Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001053/r3tmp/tmpjXNv1A/pdisk_1.dat 2025-04-07T14:05:14.214285Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:14.239184Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:14.239271Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:14.241172Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21567, node 2 2025-04-07T14:05:14.279799Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:14.279817Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:14.279822Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:14.279904Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14516 TClient is connected to server localhost:14516 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:14.594244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:16.302026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572069830822461:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:16.302096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:16.316286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:05:16.346857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572069830822560:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:16.346925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:16.347069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572069830822565:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:16.350194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:05:16.356477Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572069830822567:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:05:16.675858Z node 2 :GLOBAL ERROR: fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Operation is aborting because an duplicate key;tx_id=4; 2025-04-07T14:05:16.676108Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037888 errors: Status: STATUS_BAD_REQUEST Issues: { message: "Operation is aborting because an duplicate key" } 2025-04-07T14:05:16.676279Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037888 Status: STATUS_BAD_REQUEST Issues: { message: "Operation is aborting because an duplicate key" } 2025-04-07T14:05:16.676492Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490572069830822704:2334], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [2:7490572069830822688:2334]Got BAD REQUEST for table `[OwnerId: 72057594046644480, LocalPathId: 2]`. ShardID=72075186224037888, Sink=[2:7490572069830822704:2334].{
: Fatal: Operation is aborting because an duplicate key } 2025-04-07T14:05:16.677011Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490572069830822697:2334], SessionActorId: [2:7490572069830822688:2334], Bad request. Table `/Root/DataShard`. {
: Fatal: Operation is aborting because an duplicate key }. statusCode=BAD_REQUEST. subIssues=
: Fatal: Operation is aborting because an duplicate key . sessionActorId=[2:7490572069830822688:2334]. isRollback=0 2025-04-07T14:05:16.677122Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YThmYzQ4NWEtZjZmOWY3N2ItODEyN2YxOGMtMjExNTExNmE=, ActorId: [2:7490572069830822688:2334], ActorState: ExecuteState, TraceId: 01jr893pxj6vs7d1nsb5qh159t, got TEvKqpBuffer::TEvError in ExecuteState, status: BAD_REQUEST send to: [2:7490572069830822698:2334] from: [2:7490572069830822697:2334] 2025-04-07T14:05:16.677363Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7490572069830822698:2334] TxId: 281474976715663. Ctx: { TraceId: 01jr893pxj6vs7d1nsb5qh159t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YThmYzQ4NWEtZjZmOWY3N2ItODEyN2YxOGMtMjExNTExNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: {
: Error: Bad request. Table `/Root/DataShard`. {
: Fatal: Operation is aborting because an duplicate key };
: Fatal: Operation is aborting because an duplicate key } 2025-04-07T14:05:16.682512Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YThmYzQ4NWEtZjZmOWY3N2ItODEyN2YxOGMtMjExNTExNmE=, ActorId: [2:7490572069830822688:2334], ActorState: ExecuteState, TraceId: 01jr893pxj6vs7d1nsb5qh159t, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-07T14:05:19.119068Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572061240887249:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:19.119171Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 23138, MsgBus: 28130 2025-04-07T14:05:22.287442Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572098354552773:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:22.287496Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001053/r3tmp/tmpc6a6HT/pdisk_1.dat 2025-04-07T14:05:22.373072Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:22.392185Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:22.392317Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:22.393750Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23138, node 3 2025-04-07T14:05:22.431463Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:22.431485Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:22.431490Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:22.431572Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28130 TClient is connected to server localhost:28130 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:22.778270Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:24.299843Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572106944487981:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:24.299922Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:24.320473Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:05:24.450585Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:05:24.598603Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572106944489308:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:24.598666Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:24.602464Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572106944489317:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:24.602534Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:24.602570Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572106944489322:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:24.605296Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:05:24.614102Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572106944489324:2420], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } >> KqpQueryService::DdlExecuteScript [GOOD] >> KqpSplit::AfterResultMultiRange+Unspecified ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlExecuteScript [GOOD] Test command err: Trying to start YDB, gRPC: 2525, MsgBus: 31484 2025-04-07T14:03:34.977323Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571632891431359:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.977405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001104/r3tmp/tmpDFDDzB/pdisk_1.dat 2025-04-07T14:03:35.834205Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.871837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.871914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.873730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2525, node 1 2025-04-07T14:03:36.058719Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.058736Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.058741Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.058828Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31484 TClient is connected to server localhost:31484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.809568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:36.846324Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:03:36.873711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.123290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.341472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.440366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.601275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571650071302103:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.601377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.981801Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571632891431359:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.981854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.453834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.481539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.520207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.560034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.599687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.638026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.685440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571658661237201:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.685522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.685680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571658661237206:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.689724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.701254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571658661237208:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:41.627636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:03:41.778611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571662956204943:2476];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:03:41.778867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571662956204943:2476];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:03:41.779013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571662956204941:2474];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:03:41.779089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571662956204941:2474];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:03:41.779159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571662956204943:2476];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:03:41.779302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571662956204943:2476];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:03:41.779322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571662956204941:2474];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:03:41.779435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571662956204943:2476];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:03:41.779460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571662956204941:2474];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:03:41.779569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571662956204941:2474];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:03:41.779569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571662956204943:2476];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:03:41.779668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571662956204943:2476];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:03:41.779679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571662956204941:2474];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:03:41.779772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7490571662956204943:2476];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:03:41.779772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490571662956204941:2474];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 20 ... oot, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:26.786278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:26.808061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:26.828925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:26.852102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:26.879489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:26.906301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:26.941894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572114073585345:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:26.941935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572114073585350:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:26.941964Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:26.944900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:26.953316Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572114073585352:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:27.624421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:27.707139Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037919 not found 2025-04-07T14:05:27.722372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19099, MsgBus: 16628 2025-04-07T14:05:28.271896Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572123334746241:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:28.271961Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001104/r3tmp/tmpfT66Xl/pdisk_1.dat 2025-04-07T14:05:28.332070Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19099, node 3 2025-04-07T14:05:28.373278Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:28.373355Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:28.375067Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:28.380957Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:28.380983Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:28.380990Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:28.381085Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16628 TClient is connected to server localhost:16628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:28.654245Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:28.662806Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:28.705798Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:28.802048Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:28.862986Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:30.329553Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572131924682529:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:30.329629Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:30.361127Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:30.382434Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:30.402564Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:30.423867Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:30.447650Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:30.468843Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:30.498364Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572131924683021:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:30.498423Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572131924683026:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:30.498434Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:30.500599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:30.506904Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572131924683028:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:31.370555Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:31.376829Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:05:31.378727Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:05:31.698055Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:05:33.272634Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490572123334746241:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:33.272744Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] >> TSchemeShardTTLTests::CheckCounters [GOOD] >> KqpOlap::PredicatePushdownCastErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:05:10.079391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.079458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.079484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.079508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.093014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.208340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.208392Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.217349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.217433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.217539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.228884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.228998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.354023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.354067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.360173Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.496399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.496646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.496862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.497086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.497150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521664Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.535718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.537693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.548028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.550041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.550308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550353Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.550438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.550469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.550551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.552324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.552460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.552510Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.552560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.552685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... d: 72057594046678944 2025-04-07T14:05:35.017902Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:35.018005Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:35.018083Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:35.018350Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:35.028079Z node 37 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [37:123:2149] sender: [37:235:2058] recipient: [37:15:2062] 2025-04-07T14:05:35.040867Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:35.041189Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:35.041476Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:35.041744Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:35.041841Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:35.044583Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:35.044793Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:35.045078Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:35.045185Z node 37 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:35.045270Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:35.045333Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:35.047151Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:35.047229Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:35.047293Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:35.048823Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:35.048878Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:35.048954Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:35.049043Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:35.049266Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:35.050695Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:35.050968Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:35.052261Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:35.052480Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 158913792106 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:35.052575Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:35.052956Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:35.053040Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:35.053374Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:35.053508Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:35.055412Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:35.055499Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:35.055789Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:35.055871Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:202:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:35.056375Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:35.056465Z node 37 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:35.056699Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:35.056777Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:35.056875Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:35.056952Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:35.057019Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:35.057075Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:35.057169Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:35.057240Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:35.057310Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:35.058001Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:35.058155Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:35.058227Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:35.058300Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:35.058369Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:35.058517Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T14:05:35.061093Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T14:05:35.061794Z node 37 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T14:05:35.063129Z node 37 :TX_PROXY DEBUG: actor# [37:265:2257] Bootstrap 2025-04-07T14:05:35.098670Z node 37 :TX_PROXY DEBUG: actor# [37:265:2257] Become StateWork (SchemeCache [37:270:2262]) 2025-04-07T14:05:35.102049Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:35.102666Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:05:35.102876Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2025-04-07T14:05:35.103670Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2025-04-07T14:05:35.105502Z node 37 :TX_PROXY DEBUG: actor# [37:265:2257] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:05:35.110150Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral PG type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:35.110444Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-07T14:05:35.111141Z node 37 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:129:2058] recipient: [1:107:2139] 2025-04-07T14:05:10.102068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.102137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.102168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.102196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.102242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.102277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.102334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.102586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.205836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.205888Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:10.209782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.209905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.210070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.228015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.228164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.283357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.353769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.353810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.353979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.359694Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.472792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.472984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.473128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.518991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521486Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.535535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.537240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 120 RawX2: 4294969442 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.547725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.549569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.549722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549751Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.549820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.549844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.549870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.549908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.549933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.549949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.549984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:05:10.551382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.551458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:05:10.551483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:05:10.551506Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:05:10.551528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.551585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify ... hemeshard: 72057594046678944 2025-04-07T14:05:34.946303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T14:05:34.946422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T14:05:34.946725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T14:05:34.946769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:34.946825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 107:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-07T14:05:34.946913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-07T14:05:34.946941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-07T14:05:34.946978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-07T14:05:34.947033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:352:2327] message: TxId: 107 2025-04-07T14:05:34.947070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-07T14:05:34.947124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-07T14:05:34.947153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-07T14:05:34.947268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T14:05:34.947302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:05:34.947581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:05:34.947618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:05:34.947670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:34.949719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-07T14:05:34.949763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1396:3303] 2025-04-07T14:05:34.949930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 2025-04-07T14:05:35.056977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-07T14:05:35.057586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-04-07T14:05:35.057656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-04-07T14:05:35.057694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-07T14:05:35.057955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-07T14:05:35.058376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-04-07T14:05:35.058420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-04-07T14:05:35.058441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-07T14:05:35.152540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-07T14:05:35.152684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:05:35.152795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-07T14:05:35.152934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1744048000746233 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-07T14:05:35.153037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1744048000746233 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-07T14:05:35.153534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-07T14:05:35.154104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-07T14:05:35.154308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-07T14:05:35.154350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-07T14:05:35.154800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-07T14:05:35.154832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-07T14:05:35.158090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-07T14:05:35.158197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:05:35.158255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2025-04-07T18:46:40.746233Z, at schemeshard: 72057594046678944 2025-04-07T14:05:35.158338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-07T14:05:35.158371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-07T14:05:35.158459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:05:35.158485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2025-04-07T18:46:40.746233Z, at schemeshard: 72057594046678944 2025-04-07T14:05:35.158520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-07T14:05:35.180831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:05:35.214364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-07T14:05:35.214605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-04-07T14:05:35.214682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-04-07T14:05:35.214728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-07T14:05:35.214957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-07T14:05:35.215019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-04-07T14:05:35.215055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-04-07T14:05:35.215077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-07T14:05:35.267563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:05:35.320184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2025-04-07T14:05:35.320355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-04-07T14:05:35.320443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-04-07T14:05:35.320483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-07T14:05:35.320715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-07T14:05:35.320768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-04-07T14:05:35.320802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-04-07T14:05:35.320828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownCastErrors [GOOD] Test command err: Trying to start YDB, gRPC: 27044, MsgBus: 62737 2025-04-07T14:00:41.525260Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570889469718727:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:41.525319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fac/r3tmp/tmpqsGwU1/pdisk_1.dat 2025-04-07T14:00:42.125041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:42.144024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:42.144110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:42.145569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27044, node 1 2025-04-07T14:00:42.239332Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:42.239362Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:42.239370Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:42.239448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62737 TClient is connected to server localhost:62737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:42.989741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:43.014524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:43.160499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570898059653961:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:43.160840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570898059653961:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:43.161086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570898059653961:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:43.161222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570898059653961:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:43.161338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570898059653961:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:43.161452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570898059653961:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:43.161604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570898059653961:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:43.161723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570898059653961:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:43.161821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570898059653961:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:43.161906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570898059653961:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:43.162034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570898059653961:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:43.162126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570898059653961:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:43.169372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:00:43.169428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:00:43.169552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:00:43.169582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:00:43.169783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:00:43.169807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:00:43.169901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:00:43.169953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:00:43.170019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:00:43.170051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:00:43.170091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:00:43.170118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:00:43.170660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:00:43.170697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:00:43.170904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:00:43.170940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:00:43.171094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:00:43.171138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:00:43.171354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:00:43.171390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:00:43.171517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:00:43.171566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:00:43.220380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744034443046 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ... : Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Int16 '==' Optional 2025-04-07T14:05:32.616357Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2IyZTYzZjMtMjkwNGE0NGItZTVhZTk5NDQtNzQyNDBlY2M=, ActorId: [1:7490572139305292592:9451], ActorState: ExecuteState, TraceId: 01jr8946hnbgrhv5mdgg34g5we, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Int32; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-04-07T14:05:32.644948Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572139305292608:3038], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Int32 '==' Optional 2025-04-07T14:05:32.647313Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTE2NGJjMmUtMTFjMWRiZGMtNTEwMWMwMjUtMjM5ZmQxNGY=, ActorId: [1:7490572139305292606:9845], ActorState: ExecuteState, TraceId: 01jr8946jh6b6asjkqbj8eqz58, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Int64; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-04-07T14:05:32.686895Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572139305292620:3651], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Int64 '==' Optional 2025-04-07T14:05:32.688984Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTM5ZWQ4MWQtYTRmNmYyYzEtODRmZWFlZjktY2I1ZGJhODE=, ActorId: [1:7490572139305292618:3085], ActorState: ExecuteState, TraceId: 01jr8946kd2cbpj3znsc7ty74s, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt8; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-04-07T14:05:32.724418Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572139305292633:9272], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint8 '==' Optional 2025-04-07T14:05:32.726656Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzgzYjQ0YzktMzMxMWU3NDYtMzc5MjBlOTktZDBiODg2Y2M=, ActorId: [1:7490572139305292631:9242], ActorState: ExecuteState, TraceId: 01jr8946msa1sg21kcb991xv3n, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt16; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-04-07T14:05:32.751121Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572139305292645:3356], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint16 '==' Optional 2025-04-07T14:05:32.753549Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzBlMjdlNDktOGIyMWIzZTMtMjY4N2UwZGItOTc2OWM2MmU=, ActorId: [1:7490572139305292643:3385], ActorState: ExecuteState, TraceId: 01jr8946nyefy71k4p2fckeqan, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt32; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-04-07T14:05:32.784966Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572139305292657:3224], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint32 '==' Optional 2025-04-07T14:05:32.786788Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgzZWYzZGItNzVjNjIyYmItNDNmODM3ZGUtODliNjgwZmE=, ActorId: [1:7490572139305292655:3399], ActorState: ExecuteState, TraceId: 01jr8946psefx4cknyarqv2myw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt64; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-04-07T14:05:32.806213Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572139305292669:3560], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint64 '==' Optional 2025-04-07T14:05:32.808222Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmZiMzBmZmQtNmRkNWE0ZC01OWE5YzhhZS0zMDYyNjk1Mg==, ActorId: [1:7490572139305292667:3489], ActorState: ExecuteState, TraceId: 01jr8946qq9qc8czdextgx288x, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Double; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-04-07T14:05:32.830617Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572139305292681:3576], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Double '==' Optional 2025-04-07T14:05:32.830917Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDI5YTQ0ZTAtM2FkMzlhMzItZDNmZjNkNGMtNTAwODE0NzI=, ActorId: [1:7490572139305292679:3646], ActorState: ExecuteState, TraceId: 01jr8946rc9r91tr30w5f1w1nk, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Float; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-04-07T14:05:32.865114Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572139305292693:3631], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Float '==' Optional 2025-04-07T14:05:32.867746Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjgyYzQ3NWQtYWYzNGI4ZWQtMWNmNWNiYTYtYWIyYWNmZWU=, ActorId: [1:7490572139305292691:3658], ActorState: ExecuteState, TraceId: 01jr8946s8et5tv05gmyzwxhys, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS String; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-04-07T14:05:32.891214Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572139305292706:3662], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: String '==' Optional 2025-04-07T14:05:32.892884Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Njg3NTc1NC0xNjNmMmZiYy03MjliZDU0Mi04MjU1MjMzNw==, ActorId: [1:7490572139305292704:3643], ActorState: ExecuteState, TraceId: 01jr8946t93yx4q9rf7dpv46gm, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Utf8; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column Test query: --!syntax_v1 DECLARE $in_value AS Timestamp; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-04-07T14:05:32.918916Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572139305292722:3656], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Utf8 '==' Optional 2025-04-07T14:05:32.921373Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDgxYjVlMDQtMjg1ZTIzYjItYTU4NmU1ZDAtZDk1NzdlYWE=, ActorId: [1:7490572139305292720:3745], ActorState: ExecuteState, TraceId: 01jr8946v47zxvzdt0bwn78a1p, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:05:33.469217Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034733000, txId: 18446744073709551615] shutting down Test query: --!syntax_v1 DECLARE $in_value AS Date; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-04-07T14:05:34.407400Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034734000, txId: 18446744073709551615] shutting down Test query: --!syntax_v1 DECLARE $in_value AS Datetime; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-04-07T14:05:35.019330Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034734344, txId: 18446744073709551615] shutting down >> KqpScan::PrunePartitionsByLiteral >> KqpScan::TaggedScalar [GOOD] >> KqpScan::TooManyComputeActors >> KqpScan::TopSort [GOOD] >> KqpScan::TopSortOverSecondaryIndexRead >> KqpScan::UnionWithPureExpr [GOOD] >> KqpScan::YqlTableSample >> KqpScan::PureExpr [GOOD] >> KqpScan::RestrictSqlV0 >> KqpScan::IsNull [GOOD] >> KqpScan::GrepRange >> KqpScan::Grep [GOOD] >> KqpScan::GrepByString >> KqpScan::Offset [GOOD] >> KqpScan::Order >> KqpScan::SingleKey >> KqpSplit::AfterResolve+Unspecified [GOOD] >> KqpSplit::AfterResult+Ascending >> KqpSplit::UndeliveryOnFinishedRead [GOOD] >> KqpSplit::StreamLookupSplitBeforeReading >> KqpSplit::AfterResolve+Ascending [GOOD] >> KqpSplit::AfterResolve+Descending >> KqpSplit::ChoosePartition+Descending [GOOD] >> KqpSplit::ChoosePartition+Unspecified >> KqpScan::IsNullPartial [GOOD] >> KqpScan::Join >> KqpScan::NullInKey [GOOD] >> KqpScan::NullInKeySuffix >> KqpScan::Limit [GOOD] >> KqpScan::LimitOverSecondaryIndexRead >> KqpScan::LeftSemiJoinSimple [GOOD] >> KqpScan::JoinWithParams >> KqpScan::UnionBasic [GOOD] >> KqpScan::UnionMixed >> KqpSplit::IntersectionLosesRange+Unspecified [GOOD] >> KqpSplit::StreamLookupDeliveryProblem >> KqpScan::StreamLookupByPkPrefix >> KqpScan::StreamLookup [GOOD] >> KqpScan::StreamLookupByFullPk >> KqpScan::RightSemiJoinSimple [GOOD] >> KqpScan::SecondaryIndex >> KqpSplit::AfterResult+Descending [GOOD] >> KqpSplit::AfterResult+Unspecified >> KqpSplit::BorderKeys+Ascending [GOOD] >> KqpSplit::BorderKeys+Descending >> KqpSplit::AfterResultMultiRange+Ascending [GOOD] >> KqpSplit::AfterResultMultiRange+Descending >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test >> KqpScan::SelfJoin3xSameLabels [GOOD] >> KqpScan::SelfJoin3x >> KqpFlowControl::FlowControl_Unlimited [GOOD] >> KqpFlowControl::FlowControl_BigLimit >> KqpScan::StreamExecuteScanQueryCancelation [GOOD] >> KqpScan::StreamExecuteScanQueryClientTimeoutBruteForce >> KqpSplit::AfterResultMultiRangeSegmentPartition+Descending >> KqpScan::DecimalColumn [GOOD] >> KqpScan::DqSourceFullScan >> KqpSplit::StreamLookupSplitAfterFirstResult [GOOD] >> KqpSplit::StreamLookupRetryAttemptForFinishedRead >> KqpSplit::AfterResultMultiRange+Unspecified [GOOD] >> KqpSplit::AfterResultMultiRangeSegmentPartition+Ascending >> KqpScan::AggregateNoColumn [GOOD] >> KqpScan::AggregateEmptyCountStar >> KqpScan::Join3 [GOOD] >> KqpScan::Join3TablesNoRemap >> TFetchRequestTests::CheckAccess [GOOD] >> PQCountersSimple::PartitionWriteQuota >> KqpScan::SecondaryIndexCustomColumnOrder [GOOD] >> KqpScan::SelectExistsUnexpected >> KqpScan::CrossJoin [GOOD] >> KqpScan::CountDistinct >> KqpScan::YqlTableSample [GOOD] >> KqpScan::RestrictSqlV0 [GOOD] >> KqpScan::Order [GOOD] >> KqpScan::GrepByString [GOOD] >> KqpScan::GrepLimit >> KqpScan::PrunePartitionsByLiteral [GOOD] >> KqpScan::PrunePartitionsByExpr >> KqpScan::GrepRange [GOOD] >> KqpScan::GrepNonKeyColumns >> KqpSplit::AfterResolve+Descending [GOOD] >> KqpScan::UnionMixed [GOOD] >> KqpScan::SingleKey [GOOD] >> KqpScan::SimpleWindow >> KqpScan::StreamLookupByFullPk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::YqlTableSample [GOOD] Test command err: Trying to start YDB, gRPC: 20760, MsgBus: 4290 2025-04-07T14:05:31.954979Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572133205686344:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.955156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00161c/r3tmp/tmp5QBAh9/pdisk_1.dat 2025-04-07T14:05:32.526773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.526893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.530810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20760, node 1 2025-04-07T14:05:32.574644Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:05:32.574748Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:05:32.622509Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.724851Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.724874Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.724886Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.724989Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4290 TClient is connected to server localhost:4290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.415449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.433884Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.456566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:05:33.598510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.752990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.840789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.354096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150385557098:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.354220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.705198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.743549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.769570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.803258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.838098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.891860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.975782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150385557594:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.975856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.976057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150385557599:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.980113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.001509Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:05:36.001689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572150385557601:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.956294Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572133205686344:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.963262Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.207216Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737185, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 26442, MsgBus: 12225 2025-04-07T14:05:37.925078Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572159384804842:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:37.925122Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00161c/r3tmp/tmpAJcPrU/pdisk_1.dat 2025-04-07T14:05:38.042148Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.042222Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.042707Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.055302Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26442, node 2 2025-04-07T14:05:38.177255Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.177276Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.177284Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.177380Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12225 TClient is connected to server localhost:12225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:38.655254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.672701Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:38.679323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.782875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.963832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.055888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.200062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572176564675700:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.200134Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.239369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.277157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.311362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.348754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.380263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.451310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.515530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572176564676199:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.515630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.516014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572176564676204:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.520024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:41.533514Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572176564676206:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:42.484273Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490572180859643815:2464], status: UNSUPPORTED, issues:
: Error: Default error
:1:15: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2025-04-07T14:05:42.485760Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTg4NzJiNzEtMTQxMTk2YTYtN2ZhOTY0ZC1hODBlNWQwYQ==, ActorId: [2:7490572180859643808:2460], ActorState: ExecuteState, TraceId: 01jr894g5y52hwagqs6cstej5a, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::RestrictSqlV0 [GOOD] Test command err: Trying to start YDB, gRPC: 18628, MsgBus: 26151 2025-04-07T14:05:31.937599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572134248593873:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.938303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00161d/r3tmp/tmpqT7U7R/pdisk_1.dat 2025-04-07T14:05:32.511521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.511596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.517544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:32.560744Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18628, node 1 2025-04-07T14:05:32.729248Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.729276Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.729298Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.729390Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26151 TClient is connected to server localhost:26151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.703831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.748167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.895493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.065823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.138383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.581782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572151428464762:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.581941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.810695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.845659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.919116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.954893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.010137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.050341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.108599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572155723432557:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.108690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.112127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572155723432562:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.115489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.128853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572155723432565:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.940871Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572134248593873:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.994448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.206570Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737197, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 13416, MsgBus: 1704 2025-04-07T14:05:37.937620Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572162584537171:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:37.937693Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00161d/r3tmp/tmpO64l4d/pdisk_1.dat 2025-04-07T14:05:38.089586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.089663Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.091310Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13416, node 2 2025-04-07T14:05:38.143379Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.165183Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.165207Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.165214Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.165298Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1704 TClient is connected to server localhost:1704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:38.670127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.687895Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:38.702782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.802209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.080041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.163794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.259087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572179764408036:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.259155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.305849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.336212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.370972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.410935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.452612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.539240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.599176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572179764408533:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.599250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.599312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572179764408538:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.602534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:41.619763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572179764408540:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:42.626700Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490572184059376155:2464], status: GENERIC_ERROR, issues:
:1:0: Error: V0 syntax is disabled
:1:0: Error: V0 syntax is disabled 2025-04-07T14:05:42.628251Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWJhYzhmYmQtOWNkODUzYWMtZDA4MjhiNGUtZmY1MDlhN2M=, ActorId: [2:7490572184059376148:2460], ActorState: ExecuteState, TraceId: 01jr894gaj8s9wy91dtr4a52ya, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpSplit::ChoosePartition+Unspecified [GOOD] >> KqpSplit::StreamLookupSplitBeforeReading [GOOD] >> KqpScan::NullInKeySuffix [GOOD] >> KqpScan::NoTruncate >> KqpSplit::AfterResult+Ascending [GOOD] >> PQCountersSimple::PartitionWriteQuota [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist >> KqpScan::JoinWithParams [GOOD] >> KqpScan::LMapFunction >> KqpScan::TwoAggregatesOneFullFrameWindow [GOOD] >> KqpScan::TwoAggregatesTwoWindows >> KqpScan::TooManyComputeActors [GOOD] >> KqpSplit::StreamLookupDeliveryProblem [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::Order [GOOD] Test command err: Trying to start YDB, gRPC: 22577, MsgBus: 10362 2025-04-07T14:05:31.983867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572135702229247:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.984454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00165d/r3tmp/tmprZCBFA/pdisk_1.dat 2025-04-07T14:05:32.511488Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.544918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.545009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22577, node 1 2025-04-07T14:05:32.549455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:32.717125Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.717145Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.717151Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.717234Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10362 TClient is connected to server localhost:10362 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.577377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.596073Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.604677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:05:33.721441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.929440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.019893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.344450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572152882099994:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.344550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.706625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.745542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.788511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.811539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.836436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.884877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.990167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572152882100490:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.990237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.990459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572152882100495:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.994468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.009767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572152882100497:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:37.018484Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572135702229247:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:37.018713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.361625Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737375, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 26852, MsgBus: 18488 2025-04-07T14:05:38.080616Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572165568024035:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:38.080685Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00165d/r3tmp/tmpMPlKhN/pdisk_1.dat 2025-04-07T14:05:38.333384Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.343221Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.348827Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.350292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26852, node 2 2025-04-07T14:05:38.496651Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.496694Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.496702Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.496772Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18488 TClient is connected to server localhost:18488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.229633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.242825Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:39.254716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.337062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:39.491646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.590710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.519916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572178452927618:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.520029Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.523672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.597149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.670276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.704401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.741696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.821608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.879210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572178452928114:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.879293Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.879349Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572178452928119:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.882866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:41.892625Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572178452928121:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:42.954640Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034742951, txId: 281474976710671] shutting down 2025-04-07T14:05:43.081664Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572165568024035:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.081751Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSplit::AfterResult+Unspecified [GOOD] >> KqpSplit::AfterResultMultiRange+Descending [GOOD] >> KqpSplit::BorderKeys+Descending [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResolve+Descending [GOOD] Test command err: Trying to start YDB, gRPC: 22654, MsgBus: 6329 2025-04-07T14:05:31.969435Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572132891463522:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.969508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00162e/r3tmp/tmpjqyVGp/pdisk_1.dat 2025-04-07T14:05:32.556486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.556577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.558947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:32.577978Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22654, node 1 2025-04-07T14:05:32.717119Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.717139Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.717155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.717232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6329 TClient is connected to server localhost:6329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.534621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.553612Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.567238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.711868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.893721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:34.006851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.465634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150071334291:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.465763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.706289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.738217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.781205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.805685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.833017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.875919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.973295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150071334790:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.973367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.973679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150071334795:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.979354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:35.993739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572150071334797:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.960815Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572132891463522:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.968772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.236553Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894axb1v38fh3p97rymhx9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTkxNThjODItOGEzYjU3YTgtOGU3YzU3YzYtODAwMDc5NzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-04-07T14:05:37.325339Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737277, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 62448, MsgBus: 27841 2025-04-07T14:05:38.169507Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572166651246998:2121];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:38.172267Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00162e/r3tmp/tmpWqLqFZ/pdisk_1.dat 2025-04-07T14:05:38.374965Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.398466Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.398564Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.416274Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62448, node 2 2025-04-07T14:05:38.591591Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.591610Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.591617Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.591694Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27841 TClient is connected to server localhost:27841 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.260999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.297974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.390408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.555032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.640114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.649069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572179536150507:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.649144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.690925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.772475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.807820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.846427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.878567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.919096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.984951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572179536151003:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.985063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.985896Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572179536151008:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.989257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.006491Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572179536151010:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- 2025-04-07T14:05:43.145586Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894gpn4d1mg1bzjg9e440w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjkwMTIwY2UtMjk4ZDMyMy0yZDJjMzRhNy1mNTQzNGUwNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-04-07T14:05:43.172604Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572166651246998:2121];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.172734Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; resume evread ----------------------------------------------------------- 2025-04-07T14:05:43.217521Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034743192, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::UnionMixed [GOOD] Test command err: Trying to start YDB, gRPC: 4417, MsgBus: 27736 2025-04-07T14:05:31.952830Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572135573542850:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.953033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001616/r3tmp/tmpliPxcz/pdisk_1.dat 2025-04-07T14:05:32.530681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.530783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.540981Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.562619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4417, node 1 2025-04-07T14:05:32.717192Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.717216Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.717222Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.717325Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27736 TClient is connected to server localhost:27736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.742762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.754393Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.778068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.964231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.189142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.273159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.738680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572152753413611:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.738788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.979347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.007290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.083940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.149437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.183243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.220993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.270303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157048381409:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.270444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.270830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157048381414:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.274715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.285243Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:05:36.288905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572157048381416:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.948923Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572135573542850:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.964204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.544855Z node 1 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [1:7490572161343349141:2055], tablet: [1:7490572144163478105:2304], scanId: 5, table: /Root/EightShard 2025-04-07T14:05:37.554759Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737571, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 31560, MsgBus: 31095 2025-04-07T14:05:38.365656Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572164995964964:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:38.366184Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001616/r3tmp/tmpn7KvpI/pdisk_1.dat 2025-04-07T14:05:38.537835Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.564623Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.564698Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.565728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31560, node 2 2025-04-07T14:05:38.701104Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.701123Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.701131Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.701208Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31095 TClient is connected to server localhost:31095 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.256210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.262849Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.271342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:05:39.373036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.523538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.618812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.801768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572177880868535:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.802050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.819863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.892297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.918825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.946294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.980753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.017546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.069541Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572182175836328:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.069645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.069836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572182175836333:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.072621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.081212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572182175836335:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:43.342248Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034743381, txId: 281474976715671] shutting down 2025-04-07T14:05:43.364476Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572164995964964:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.364569Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpScan::StreamLookupByPkPrefix [GOOD] >> KqpScan::StreamLookupTryGetDataBeforeSchemeInitialization >> KqpScan::DqSourceFullScan [GOOD] >> KqpScan::DqSource >> KqpScan::TopSortOverSecondaryIndexRead [GOOD] >> KqpFlowControl::FlowControl_BigLimit [GOOD] >> KqpFlowControl::FlowControl_SmallLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::StreamLookupByFullPk [GOOD] Test command err: Trying to start YDB, gRPC: 27754, MsgBus: 61266 2025-04-07T14:05:31.953685Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572135962201725:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.953857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00164c/r3tmp/tmpIPg25o/pdisk_1.dat 2025-04-07T14:05:32.628535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.640834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.640941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.645568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27754, node 1 2025-04-07T14:05:32.729605Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.729624Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.729629Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.729711Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61266 TClient is connected to server localhost:61266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.806211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.844975Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.864763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:05:34.162747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.364233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.450626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.790665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572153142072467:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.790782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.060734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.088574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.115099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.142570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.173708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.207450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.268800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157437040260:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.268890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.271296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157437040265:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.278829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.293780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572157437040267:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.948758Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572135962201725:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.948830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.178590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:37.822171Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737844, txId: 281474976710674] shutting down Trying to start YDB, gRPC: 7209, MsgBus: 6902 2025-04-07T14:05:38.614029Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572166736083626:2189];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00164c/r3tmp/tmpSbLG0T/pdisk_1.dat 2025-04-07T14:05:38.672058Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:05:38.725662Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.735539Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.740931Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.745914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7209, node 2 2025-04-07T14:05:38.853299Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.853318Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.853326Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.853403Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6902 TClient is connected to server localhost:6902 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.441910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.461701Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:39.479157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.546894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.741191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.818845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.701642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572179620987048:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.701751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.733598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.773517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.801035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.841999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.869121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.911051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.006232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572183915954845:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.006302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.006535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572183915954850:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.009771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.023994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572183915954852:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:43.067441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:43.366561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:43.634989Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572166736083626:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.635032Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:43.678873Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034743682, txId: 281474976715675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupSplitBeforeReading [GOOD] Test command err: 2025-04-07T14:05:35.173678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:05:35.174015Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:05:35.174305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015e8/r3tmp/tmp1HKa3w/pdisk_1.dat 2025-04-07T14:05:35.499002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.540040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:35.594173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:35.594301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:35.605823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:35.942393Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:35.936 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [KQP] kqp_host.cpp:1343: Compiled query: ( (let $1 '('"Key" (AsOptionalType (DataType 'Uint64)) '('columnConstrains '()) '())) (let $2 '('"Value" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")))) (return (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/Test"))) (Void) $3)) ) 2025-04-07T14:05:35.943390Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:35.943 DEBUG ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 229us 2025-04-07T14:05:35.947325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:609:2518], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.947494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.953967Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:35.953 DEBUG ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [perf] type_ann_expr.cpp:44: Execution of [TypeAnnotationTransformer::DoTransform] took 4.35ms 2025-04-07T14:05:35.954568Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:35.954 DEBUG ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [perf] yql_expr_constraint.cpp:3134: Execution of [ConstraintTransformer::DoTransform] took 286us 2025-04-07T14:05:35.954747Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:35.954 DEBUG ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [perf] yql_expr_csee.cpp:599: Execution of [UpdateCompletness] took 59us 2025-04-07T14:05:35.955211Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:35.955 DEBUG ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [perf] yql_expr_csee.cpp:612: Execution of [EliminateCommonSubExpressions] took 352us 2025-04-07T14:05:35.960310Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:35.959 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 '('columnConstrains '())) (let $3 '('"Key" (OptionalType (DataType 'Uint64)) $2 '())) (let $4 '('"Value" (OptionalType (DataType 'String)) $2 '())) (let $5 (KiCreateTable! world $1 '"/Root/Test" '($3 $4) '('"Key") '() '() '() '() '() '() '"table" '"false" '0 '0)) (return (Commit! $5 $1 '('('"mode" '"flush")))) ) 2025-04-07T14:05:35.960454Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:35.960 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [core exec] yql_execution.cpp:59: Begin, root #75 2025-04-07T14:05:35.960536Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:35.960 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [core exec] yql_execution.cpp:72: Collect unused nodes for root #75, status: Ok 2025-04-07T14:05:35.961588Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:35.961 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [core exec] yql_execution.cpp:466: Register async execution for node #74 2025-04-07T14:05:35.961730Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:35.961 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [core exec] yql_execution.cpp:87: Finish, output #75, status: Async 2025-04-07T14:05:35.966634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.333843Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:36.333 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [core exec] yql_execution.cpp:133: Completed async execution for node #74 2025-04-07T14:05:36.333974Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:36.333 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #74 2025-04-07T14:05:36.334061Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:36.334 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [core exec] yql_execution.cpp:59: Begin, root #75 2025-04-07T14:05:36.334159Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:36.334 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [core exec] yql_execution.cpp:72: Collect unused nodes for root #75, status: Ok 2025-04-07T14:05:36.334244Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:36.334 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [core exec] yql_execution.cpp:577: Node #75 finished execution 2025-04-07T14:05:36.334361Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:36.334 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [core exec] yql_execution.cpp:594: Node #75 created 0 trackable nodes: 2025-04-07T14:05:36.334463Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:36.334 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [core exec] yql_execution.cpp:87: Finish, output #75, status: Ok 2025-04-07T14:05:36.334537Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=NzFiOWM2YzItNzMwNzAxODEtNWEwOWMyMWQtODY0MGQ0Njk= 2025-04-07 14:05:36.334 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #75 2025-04-07T14:05:36.340014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:713:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.340182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.340539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:718:2594], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.345624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.546900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:720:2596], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:05:36.639857Z node 1 :KQP_YQL INFO: TraceId: 01jr894a6k7kdsj9gwttaf89vx, SessionId: CompileActor 2025-04-07 14:05:36.639 INFO ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [KQP] kqp_host.cpp:1343: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/Test"))) (PersistableRepr '((AsStruct '('"Key" (Uint32 '"201")) '('"Value" (String '"Value1"))) (AsStruct '('"Key" (Uint32 '"202")) '('"Value" (String '"Value2"))) (AsStruct '('"Key" (Uint32 '"203")) '('"Value" (String '"Value3"))) (AsStruct '('"Key" (Uint32 '"803")) '('"Value" (String '"Value3"))))) '('('mode 'replace)))) ) 2025-04-07T14:05:36.640531Z node 1 :KQP_YQL DEBUG: TraceId: 01jr894a6k7kdsj9gwttaf89vx, SessionId: CompileActor 2025-04-07 14:05:36.640 DEBUG ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 269us 2025-04-07T14:05:36.646597Z node 1 :KQP_YQL DEBUG: TraceId: 01jr894a6k7kdsj9gwttaf89vx, SessionId: CompileActor 2025-04-07 14:05:36.646 DEBUG ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [perf] type_ann_expr.cpp:44: Execution of [TypeAnnotationTransformer::DoTransform] took 1.56ms 2025-04-07T14:05:36.647418Z node 1 :KQP_YQL DEBUG: TraceId: 01jr894a6k7kdsj9gwttaf89vx, SessionId: CompileActor 2025-04-07 14:05:36.647 DEBUG ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [perf] yql_expr_constraint.cpp:3134: Execution of [ConstraintTransformer::DoTransform] took 497us 2025-04-07T14:05:36.647610Z node 1 :KQP_YQL DEBUG: TraceId: 01jr894a6k7kdsj9gwttaf89vx, SessionId: CompileActor 2025-04-07 14:05:36.647 DEBUG ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [perf] yql_expr_csee.cpp:599: Execution of [UpdateCompletness] took 83us 2025-04-07T14:05:36.648236Z node 1 :KQP_YQL DEBUG: TraceId: 01jr894a6k7kdsj9gwttaf89vx, SessionId: CompileActor 2025-04-07 14:05:36.648 DEBUG ydb-core-kqp-ut-scan(pid=341132, tid=0x00007FE4C12F6C80) [perf] yql_ex ... , task: 1. Tasks execution finished 2025-04-07T14:05:37.584732Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:861:2691], TxId: 281474976715662, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OWE4OWU1NzMtZDZiMTVlMjYtZTdmZmNlOTItZTFhNTUzOQ==. TraceId : 01jr894awv9eshd0tchvx05rn6. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-07T14:05:37.584843Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 1. pass away 2025-04-07T14:05:37.584952Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:05:37.585151Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:862:2692], TxId: 281474976715662, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OWE4OWU1NzMtZDZiMTVlMjYtZTdmZmNlOTItZTFhNTUzOQ==. TraceId : 01jr894awv9eshd0tchvx05rn6. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:05:37.585190Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-07T14:05:37.585229Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. Tasks execution finished 2025-04-07T14:05:37.585255Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:862:2692], TxId: 281474976715662, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OWE4OWU1NzMtZDZiMTVlMjYtZTdmZmNlOTItZTFhNTUzOQ==. TraceId : 01jr894awv9eshd0tchvx05rn6. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:05:37.585302Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. pass away 2025-04-07T14:05:37.585344Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:05:37.586148Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down Trying to start YDB, gRPC: 9650, MsgBus: 31819 2025-04-07T14:05:38.178504Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572162975211985:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:38.178607Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015e8/r3tmp/tmpIiFJd4/pdisk_1.dat 2025-04-07T14:05:38.412754Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9650, node 2 2025-04-07T14:05:38.537363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.537470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.538485Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:38.588755Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.588774Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.588782Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.588866Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31819 TClient is connected to server localhost:31819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.187981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.207342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:39.290566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:05:39.445438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.507011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.220698Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572175860115441:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.220792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.270585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.325074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.355503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.390872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.426473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.506947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.555161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572175860115939:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.555209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.555323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572175860115944:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.557443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:41.565045Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572175860115946:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:42.556127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.185616Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572162975211985:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.185668Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:43.196899Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr894gaa272t8q4g2t3s66e4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZhNmY5ZmItZDU2ZTU4MmQtNmNiYmUxNzctNjZjMzBhZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:43.208914Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr894gaa272t8q4g2t3s66e4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZhNmY5ZmItZDU2ZTU4MmQtNmNiYmUxNzctNjZjMzBhZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:43.211258Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr894gaa272t8q4g2t3s66e4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZhNmY5ZmItZDU2ZTU4MmQtNmNiYmUxNzctNjZjMzBhZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:43.593685Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr894gxn67cm18f2s9hk3s4k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjlmMDZjZTktMzdhNTQ2NWMtNzI0ZWI1MDItM2M0MWRkYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715677 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-04-07T14:05:43.709785Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034743633, txId: 281474976715675] shutting down >> KqpScan::Join [GOOD] >> KqpScan::Join2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::ChoosePartition+Unspecified [GOOD] Test command err: Trying to start YDB, gRPC: 2418, MsgBus: 11855 2025-04-07T14:05:31.929802Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572135185320250:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.929882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001653/r3tmp/tmp8Gk1mo/pdisk_1.dat 2025-04-07T14:05:32.641991Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.650348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.650436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.652115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2418, node 1 2025-04-07T14:05:32.769217Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.769256Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.769265Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.769364Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11855 TClient is connected to server localhost:11855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.435437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.456765Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.477658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.591578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.740421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.842955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.418413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572152365191123:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.418521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.706317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.737415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.771975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.848698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.880606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.928050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.005923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572156660158916:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.005977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.006115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572156660158922:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.009839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.028310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572156660158924:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.938452Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572135185320250:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.938501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.235769Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894axh7tc5qxcxmjh4ga2r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWNiMDhmNWEtMjc5ZWYwZTgtMTJlZmQwYjYtNzkzNzgwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-04-07T14:05:37.638135Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737277, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 12944, MsgBus: 18524 2025-04-07T14:05:38.279462Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572165120290340:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:38.280647Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001653/r3tmp/tmpS8QLmB/pdisk_1.dat 2025-04-07T14:05:38.496059Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.504886Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.504955Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.509638Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12944, node 2 2025-04-07T14:05:38.577092Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.577110Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.577118Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.577192Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18524 TClient is connected to server localhost:18524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.052632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.062114Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:39.075332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.155918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.386328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.446865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.692798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572178005193899:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.692885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.735246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.804364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.840817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.911377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.948872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.007005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.049259Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572182300161696:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.049339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.049543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572182300161701:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.053153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.061094Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572182300161703:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:43.249638Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894gw27tfsc8t3j381v3bz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTM1NGNhNi01NjRjMzYxMi1iNjA2OTU5NC0zOGVhOGQzYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:43.277769Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572165120290340:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.277829Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-04-07T14:05:43.729312Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034743290, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResult+Ascending [GOOD] Test command err: Trying to start YDB, gRPC: 3964, MsgBus: 62508 2025-04-07T14:05:31.951646Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572136377375058:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.951791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001648/r3tmp/tmpKpDrHK/pdisk_1.dat 2025-04-07T14:05:32.575538Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.597363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.597459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.637192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3964, node 1 2025-04-07T14:05:32.725180Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.725206Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.725214Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.725279Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62508 TClient is connected to server localhost:62508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.675170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.729630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.979458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:34.233467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:05:34.327284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.710664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572153557245834:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.710747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.007387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.035359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.071194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.134419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.157695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.234882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.303179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157852213636:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.303232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.303329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157852213641:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.305952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.320956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572157852213643:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.940756Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572136377375058:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.940906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.412135Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894b63bqwmk7zkejvdv83z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY5YWE2YjctM2M5YzU3NDAtZjE5M2M2NDQtZjk4ZWIzYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-04-07T14:05:37.464851Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737459, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 5914, MsgBus: 14768 2025-04-07T14:05:38.147053Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572167060291547:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001648/r3tmp/tmpdvvx2m/pdisk_1.dat 2025-04-07T14:05:38.201401Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:05:38.290526Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.290595Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.292863Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.305938Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5914, node 2 2025-04-07T14:05:38.473325Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.473344Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.473351Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.473445Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14768 TClient is connected to server localhost:14768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:38.927955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.966712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.052995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.227402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.281082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.590761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572179945194967:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.591109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.609380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.659982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.691455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.749661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.800462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.854525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.952187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572179945195467:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.952279Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.952523Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572179945195472:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.956631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:41.979000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572179945195474:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:43.124366Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572167060291547:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.124605Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:43.259524Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr894gtm37kp2gk7kye93p9s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTY5MjUwNTYtNjk4ZmNiODgtOTRhMTMxNGQtNmZiNzc0MmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-04-07T14:05:43.677169Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034743297, txId: 281474976715671] shutting down >> KqpScan::LimitOverSecondaryIndexRead [GOOD] >> KqpScan::Like >> KqpScan::SelfJoin3x [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test >> KqpSplit::AfterResultMultiRangeSegmentPartition+Descending [GOOD] >> KqpSplit::AfterResultMultiRangeSegmentPartition+Unspecified ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::TooManyComputeActors [GOOD] Test command err: Trying to start YDB, gRPC: 64248, MsgBus: 4035 2025-04-07T14:05:31.942206Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572135757075958:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.953287Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001640/r3tmp/tmpklWHe7/pdisk_1.dat 2025-04-07T14:05:32.533501Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.584551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.596842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.616107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64248, node 1 2025-04-07T14:05:32.717209Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.717230Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.717236Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.717316Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4035 TClient is connected to server localhost:4035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.423670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.461069Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.476629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.703101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.995918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.135988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.696874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572152936946709:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.697035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.916459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.946688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.991833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.020086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.046890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.123002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.187527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157231914501:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.187613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.187817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157231914507:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.192522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.203919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572157231914509:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.940865Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572135757075958:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.949846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.263278Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737256, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 25926, MsgBus: 26133 2025-04-07T14:05:37.867308Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572159549322806:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:37.867373Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001640/r3tmp/tmpzj47Jl/pdisk_1.dat 2025-04-07T14:05:38.001087Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.022498Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.022587Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.026047Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25926, node 2 2025-04-07T14:05:38.069117Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.069149Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.069161Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.069246Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26133 TClient is connected to server localhost:26133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:38.554785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.567134Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:38.581711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.650059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.879150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.974202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.368776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572176729193679:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.368861Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.402878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.438026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.477414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.515345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.543455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.577738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.646948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572176729194174:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.647049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.647319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572176729194179:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.651085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:41.663206Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572176729194181:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:42.713731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:42.867431Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572159549322806:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:42.867496Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:44.053207Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTQ2YWVjMWUtNjFkNjk5OTEtYTZkM2U3ODItZWE3ZTY2N2E=, ActorId: [2:7490572181024162039:2486], ActorState: ExecuteState, TraceId: 01jr894gnpd6cmbgnw5fenkbh9, Create QueryResponse for error on request, msg: 2025-04-07T14:05:44.057170Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034744067, txId: 281474976710673] shutting down
: Warning: Type annotation, code: 1030
:7:13: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:8:18: Warning: At function: AssumeColumnOrderPartial, At function: Aggregate, At function: Filter, At function: Coalesce
:9:67: Warning: At function: And
:9:39: Warning: At function: <
:9:46: Warning: At function: -
:9:46: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
: Warning: Execution, code: 1060
:4:44: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Error: Requested too many execution units: 14, code: 2029 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResult+Unspecified [GOOD] Test command err: Trying to start YDB, gRPC: 4590, MsgBus: 21808 2025-04-07T14:05:31.960943Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572133911957987:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.961326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015e1/r3tmp/tmp0cZjam/pdisk_1.dat 2025-04-07T14:05:32.520499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.522905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.538449Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.539827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4590, node 1 2025-04-07T14:05:32.719419Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.719440Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.719470Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.719541Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21808 TClient is connected to server localhost:21808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.781207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.807026Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.815658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.044473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.191568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.272241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.721521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572151091828730:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.721630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.943734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.000356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.025675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.094021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.124272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.201664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.259130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572155386796533:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.259223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.259528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572155386796539:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.263195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.274793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572155386796541:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.940492Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572133911957987:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.940556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.430249Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894b54bmyvwmc115aqtfav, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODUxNTQ2ZmUtNjA1MzYyODMtMTY4MzczNDgtMmEwNDI2YTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-04-07T14:05:37.948198Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737473, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 14960, MsgBus: 24054 2025-04-07T14:05:38.704952Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572166878372650:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:38.704987Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015e1/r3tmp/tmpajAumF/pdisk_1.dat 2025-04-07T14:05:38.833054Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.852644Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.852831Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.857955Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14960, node 2 2025-04-07T14:05:38.984033Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.984058Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.984065Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.984169Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24054 TClient is connected to server localhost:24054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.583317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.597580Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:39.613104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.738483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.909198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.984012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:42.240804Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572184058243543:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.240898Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.285314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.320741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.371164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.444484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.474802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.548331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.598260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572184058244043:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.598322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.598488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572184058244048:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.601752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.609497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572184058244050:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:43.695096Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr894ha46eb5e0c26392f22z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2M5NWQyOTItODcxYWMxMmEtZGQwMmY4YmQtMzBmZDQ3Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-04-07T14:05:43.713905Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572166878372650:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.713957Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-04-07T14:05:44.156256Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034743738, txId: 281474976715671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupDeliveryProblem [GOOD] Test command err: Trying to start YDB, gRPC: 7173, MsgBus: 29826 2025-04-07T14:05:31.931467Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572132733453178:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.931582Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001625/r3tmp/tmpniVJPF/pdisk_1.dat 2025-04-07T14:05:32.560006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.577291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.577386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.582187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7173, node 1 2025-04-07T14:05:32.722081Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.722105Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.722114Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.722195Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29826 TClient is connected to server localhost:29826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.860629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.887017Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.900080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.109946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.341441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.438292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.831367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572149913324061:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.831472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.049830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.076344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.109220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.141423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.183401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.261336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.351818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572154208291863:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.351921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.352108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572154208291869:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.355727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.367121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572154208291871:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.936831Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572132733453178:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:37.022263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.657059Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894b5g484jj47xazvabxnk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQyNzgzODgtMjA2ZDJlZDEtZjhiZjhkMy02OWE4MWNmNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-04-07T14:05:37.706500Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737697, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 21427, MsgBus: 28781 2025-04-07T14:05:38.576311Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572163364276273:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001625/r3tmp/tmpDf0hw6/pdisk_1.dat 2025-04-07T14:05:38.613088Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:05:38.674759Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21427, node 2 2025-04-07T14:05:38.719377Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.719485Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.765011Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:38.844973Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.844994Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.845000Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.845078Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28781 TClient is connected to server localhost:28781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.389856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.401575Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:39.426634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.543734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.689737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.775712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.733657Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572176249179680:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.733734Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.782255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.810286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.879207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.909832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.943635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.019140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.066872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572180544147472:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.066946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.067196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572180544147477:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.070255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.081875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572180544147479:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:43.120629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.547828Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572163364276273:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.547881Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:43.667709Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr894gvy1ne8hkjtkjp80vp2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTVlMTY3OTgtYmRmZmQzOTctOTNjMTgxNDItNTZhYzNmZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:43.675280Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr894gvy1ne8hkjtkjp80vp2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTVlMTY3OTgtYmRmZmQzOTctOTNjMTgxNDItNTZhYzNmZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:43.677788Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr894gvy1ne8hkjtkjp80vp2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTVlMTY3OTgtYmRmZmQzOTctOTNjMTgxNDItNTZhYzNmZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:44.107900Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr894hc507ejv8sw0xhq77nn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjVmZDA4YTYtNmJkNmQ3NTUtZTNkMjQ4YzMtYjA3ZTZkMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- 2025-04-07T14:05:44.122295Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034744144, txId: 281474976715675] shutting down >> KqpScan::RightJoinSimple >> KqpScan::CustomWindow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResultMultiRange+Descending [GOOD] Test command err: Trying to start YDB, gRPC: 16775, MsgBus: 9590 2025-04-07T14:05:31.971844Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572135809210061:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.971891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001626/r3tmp/tmpSgSxRn/pdisk_1.dat 2025-04-07T14:05:32.598780Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.611020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.611095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.614713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16775, node 1 2025-04-07T14:05:32.752634Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.752681Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.752688Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.752767Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9590 TClient is connected to server localhost:9590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.524298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.555376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.669903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.851033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.959894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.683528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572152989080724:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.683631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.919009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.969399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.007406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.043292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.068143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.114666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.174649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157284048514:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.174697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.174839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157284048519:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.177337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.185952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572157284048521:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.972758Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572135809210061:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.972815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.654703Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894b0ta6v2b1z6ecz3sb38, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQyNWUyMzEtYzkzZGE1ZTUtNjI1YThmZGYtYjg4MzYyOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-04-07T14:05:37.980433Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737697, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 29374, MsgBus: 23629 2025-04-07T14:05:38.805596Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572165537683222:2079];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001626/r3tmp/tmpKxBBRe/pdisk_1.dat 2025-04-07T14:05:38.891817Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:05:39.034880Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:39.034955Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:39.036691Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:39.058038Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29374, node 2 2025-04-07T14:05:39.253143Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:39.253165Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:39.253174Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:39.253257Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23629 TClient is connected to server localhost:23629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.772022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.777854Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.783425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:05:39.852926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.034817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:40.132444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.204296Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572182717554070:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.204454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.254815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.337127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.397200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.429718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.503663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.571798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.643364Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572182717554577:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.643431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.643776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572182717554582:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.647463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.667102Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572182717554584:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:43.808951Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572165537683222:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.809024Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:44.131862Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr894hbd2fqtacwzsaqhqa5y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTJjODFkNjYtYjVhMGNmNzctZGQzMjhkOTQtMmUyZTU1ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-04-07T14:05:44.511901Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034744172, txId: 281474976715671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::BorderKeys+Descending [GOOD] Test command err: Trying to start YDB, gRPC: 3494, MsgBus: 1367 2025-04-07T14:05:31.954330Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572134935536060:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.954475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015ea/r3tmp/tmpYsn3Lp/pdisk_1.dat 2025-04-07T14:05:32.581459Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.597586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.597683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.609616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3494, node 1 2025-04-07T14:05:32.719648Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.719673Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.719683Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.720486Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1367 TClient is connected to server localhost:1367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.623599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.634385Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.650307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.802861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.003666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.085032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.541358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572152115406796:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.541455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.807096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.847673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.930756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.969536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.035332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.106848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.218704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572156410374606:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.218773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.219104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572156410374611:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.223206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.235923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572156410374613:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.949055Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572134935536060:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.949195Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.446765Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894b619tkbwy66nfkt7bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MThmMjc1YjUtZDE0YmVjMi1kZWRkNGRjNy1kMTA1OThjNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710674 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-04-07T14:05:37.957036Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737487, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 20369, MsgBus: 63736 2025-04-07T14:05:38.834451Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572167021730787:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:38.834485Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015ea/r3tmp/tmp7WKCKN/pdisk_1.dat 2025-04-07T14:05:39.131440Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:39.174887Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:39.174959Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 20369, node 2 2025-04-07T14:05:39.185157Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:39.401195Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:39.401219Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:39.401226Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:39.401320Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63736 TClient is connected to server localhost:63736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.882421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.891642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.980611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.148575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.240825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:42.408801Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572184201601669:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.417290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.458475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.528483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.558479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.592601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.633584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.713217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.809625Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572184201602173:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.809704Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.810135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572184201602178:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.814268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.832249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572184201602180:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:43.837807Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572167021730787:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.843479Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:43.981819Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894hhydm8my79t2dy4xmg0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWY4NzI2OTctNTIxNmI5MDItYjI4MWJmMDEtYTgxN2VmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710674 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-04-07T14:05:44.419003Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034744018, txId: 281474976710671] shutting down >> KqpSplit::StreamLookupRetryAttemptForFinishedRead [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::TopSortOverSecondaryIndexRead [GOOD] Test command err: Trying to start YDB, gRPC: 10179, MsgBus: 27038 2025-04-07T14:05:31.999478Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572136516469358:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.999759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00166e/r3tmp/tmpq2TV10/pdisk_1.dat 2025-04-07T14:05:32.625052Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.627941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.628020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.654538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10179, node 1 2025-04-07T14:05:32.813668Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.813697Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.813704Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.813795Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27038 TClient is connected to server localhost:27038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.475874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.547947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.685194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.838809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:33.951524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.482852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572153696340093:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.482995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.706022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.735151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.786762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.820214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.855340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.930686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.021676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157991307889:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.021775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.022026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157991307894:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.025225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.033293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572157991307896:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:37.024378Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572136516469358:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:37.024427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.272944Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737298, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 13162, MsgBus: 26931 2025-04-07T14:05:37.923156Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572161679597686:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:37.923191Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00166e/r3tmp/tmpO2aZKC/pdisk_1.dat 2025-04-07T14:05:38.050720Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.071004Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.071087Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13162, node 2 2025-04-07T14:05:38.074210Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:38.133756Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.133774Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.133782Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.133863Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26931 TClient is connected to server localhost:26931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:38.734328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.767109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.861462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.027821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.095490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.460784Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572178859468566:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.460868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.495695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.537460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.577727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.604480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.635030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.684142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.758537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572178859469060:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.758606Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.758812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572178859469065:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.761468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:41.771157Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572178859469067:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:42.924157Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572161679597686:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:42.924212Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:42.927299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.991100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.047152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":4,"Columns":["Fk1","Fk2","Key","Value"],"E-Rows":"No estimate","Table":"SecondaryComplexKeys","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["SecondaryComplexKeys\/Index\/indexImplTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"2"},{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Fk1 (1)","Fk2 (-∞, +∞)","Key (-∞, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"SecondaryComplexKeys\/Index\/indexImplTable","ReadColumns":["Fk1","Key"],"E-Cost":"No estimate"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"TopBy":"row.Fk1","Name":"Top","Limit":"2"}],"Node Type":"Top"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":4}],"Name":"TopSort","Limit":"2","TopSortBy":"row.Fk1"}],"Node Type":"TopSort"}],"Node Type":"Merge","SortColumns":["Fk1 (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Limit","Limit":"2"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryComplexKeys","reads":[{"lookup_by":["Key"],"columns":["Fk1","Fk2","Key","Value"],"type":"Lookup"}]},{"name":"\/Root\/SecondaryComplexKeys\/Index\/indexImplTable","reads":[{"lookup_by":["Fk1 (1)"],"columns":["Fk1","Key"],"scan_by":["Fk2 (-∞, +∞)","Key (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"No estimate","Columns":["Fk1","Fk2","Key","Value"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"SecondaryComplexKeys","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"Name":"TopSort","Limit":"2","TopSortBy":"row.Fk1"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"2"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpSplit::AfterResultMultiRangeSegmentPartition+Ascending [GOOD] >> KqpSplit::ChoosePartition+Ascending ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::SelfJoin3x [GOOD] Test command err: Trying to start YDB, gRPC: 4926, MsgBus: 32272 2025-04-07T14:05:31.929630Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572133361068026:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.929689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015df/r3tmp/tmpGkrVJP/pdisk_1.dat 2025-04-07T14:05:32.454032Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.479243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.479364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.505811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4926, node 1 2025-04-07T14:05:32.715026Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.715065Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.715073Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.715148Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32272 TClient is connected to server localhost:32272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.424957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.454602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.610858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.765322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.838725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.337815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150540938906:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.337928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.705515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.777228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.819939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.863000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.894519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.988052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.030009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572154835906707:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.030106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.030351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572154835906712:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.033904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.042901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572154835906714:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.933279Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572133361068026:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.933360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.200224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.247480Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034738166, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 19192, MsgBus: 7464 2025-04-07T14:05:39.030634Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572163358339779:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:39.064104Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015df/r3tmp/tmpL40e0w/pdisk_1.dat 2025-04-07T14:05:39.215119Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:39.218796Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:39.218871Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:39.221768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19192, node 2 2025-04-07T14:05:39.449271Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:39.449292Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:39.449301Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:39.449388Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7464 TClient is connected to server localhost:7464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:05:39.998799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:05:40.025989Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:40.037553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.122297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.283890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.351057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:42.481025Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572180538210531:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.481113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.531196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.565106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.644811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.681844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.750579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.791285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.879319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572180538211031:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.879417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.879488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572180538211036:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.882983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.892077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572180538211038:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:43.967297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:43.998341Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572163358339779:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.998412Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:44.981417Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034744921, txId: 281474976710673] shutting down >> KqpSplit::IntersectionLosesRange+Ascending >> KqpScan::UnionAggregate >> KqpScan::EarlyFinish >> KqpScan::Effects >> KqpScan::SecondaryIndex [GOOD] >> KqpScan::Join3TablesNoRemap [GOOD] >> KqpScan::Join3Tables >> KqpScan::MultipleResults >> KqpScan::SqlInParameter >> KqpScan::AggregateEmptyCountStar [GOOD] >> KqpScan::AggregateEmptySum ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupRetryAttemptForFinishedRead [GOOD] Test command err: Trying to start YDB, gRPC: 7706, MsgBus: 4778 2025-04-07T14:05:31.951903Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572134926589348:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.954137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001601/r3tmp/tmpZ7xeBp/pdisk_1.dat 2025-04-07T14:05:32.586705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.586776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.592804Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.594321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7706, node 1 2025-04-07T14:05:32.729064Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.729082Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.729086Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.729148Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4778 TClient is connected to server localhost:4778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.894346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.965301Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.981415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.123204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.291959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.366019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.882796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572152106460087:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.882959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.127812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.169941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.199852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.229324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.310001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.340507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.445247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572156401427882:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.445318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.445621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572156401427887:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.449144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.459278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572156401427889:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.948775Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572134926589348:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.959114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.421241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:37.964328Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894b9p37wj02zbbbr8ygx5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQxM2MzNjUtNGE0MmNkNWMtNGMyMDA2NmUtYWI5NWIyYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:37.980636Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr894b9p37wj02zbbbr8ygx5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQxM2MzNjUtNGE0MmNkNWMtNGMyMDA2NmUtYWI5NWIyYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:37.983565Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr894b9p37wj02zbbbr8ygx5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQxM2MzNjUtNGE0MmNkNWMtNGMyMDA2NmUtYWI5NWIyYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:38.383711Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr894bta0dykk1mr2mxds2e9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA4YmQ0MzYtM2MxOGMyLTMyNjNmZjk2LWU0ZGYwOTky, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710677 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-04-07T14:05:38.953763Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034738418, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 24007, MsgBus: 30266 2025-04-07T14:05:39.908425Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572169868921893:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:39.908471Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001601/r3tmp/tmpCWWP1n/pdisk_1.dat 2025-04-07T14:05:40.105658Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:40.105734Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:40.107867Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:40.121778Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24007, node 2 2025-04-07T14:05:40.325110Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:40.325131Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:40.325137Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:40.325221Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30266 TClient is connected to server localhost:30266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:05:40.773933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:05:40.779586Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:40.795935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.875465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.024890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.096068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:43.352793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572187048792770:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.352904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.391491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.436683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.465605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.495609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.537996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.608557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.652980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572187048793265:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.653067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.653150Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572187048793270:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.657023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:43.665871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572187048793272:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:44.780199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.911933Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572169868921893:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:44.912000Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:45.310540Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr894jfx0vxp6ea0xkhry0kp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTYzOGVhNzItOWQ1NjhiNjctMjNjNzIxM2MtMjNhMWVjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:45.315500Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr894jfx0vxp6ea0xkhry0kp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTYzOGVhNzItOWQ1NjhiNjctMjNjNzIxM2MtMjNhMWVjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:45.317464Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr894jfx0vxp6ea0xkhry0kp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTYzOGVhNzItOWQ1NjhiNjctMjNjNzIxM2MtMjNhMWVjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:05:45.687726Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr894jzdb96yy4jh26mmvfhm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDBmOGNlYzUtNTVlYTYyOTctZDgzZmUwZDEtYzg5NzdlMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- 2025-04-07T14:05:45.697511Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034745733, txId: 281474976715675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] Test command err: 2025-04-07T14:04:51.179222Z :HappyWay INFO: Random seed for debugging is 1744034691179191 2025-04-07T14:04:51.548387Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571964462877191:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:51.548802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:51.658127Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571964375466458:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:51.658187Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ac2/r3tmp/tmpplWFrO/pdisk_1.dat 2025-04-07T14:04:51.802539Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:04:51.805629Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:04:52.020129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:52.020272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:52.021564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:52.021612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:52.030617Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:04:52.030789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:52.031865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:52.070996Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30739, node 1 2025-04-07T14:04:52.102115Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:52.102238Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:52.324435Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001ac2/r3tmp/yandexTa9RG1.tmp 2025-04-07T14:04:52.324455Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001ac2/r3tmp/yandexTa9RG1.tmp 2025-04-07T14:04:52.324551Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001ac2/r3tmp/yandexTa9RG1.tmp 2025-04-07T14:04:52.324644Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:04:53.760630Z INFO: TTestServer started on Port 28121 GrpcPort 30739 TClient is connected to server localhost:28121 PQClient connected to localhost:30739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:54.073349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T14:04:54.412831Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571977260368655:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.412835Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571977260368647:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.412953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.433772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T14:04:54.456925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571977260368662:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T14:04:55.803435Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571977347780006:2309], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:04:55.805600Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490571977260368705:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:04:55.860475Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTdjMDdhZjYtYzg2NmRiM2QtYTNmYzc0MTEtMjgzOGZhZjU=, ActorId: [2:7490571977260368631:2280], ActorState: ExecuteState, TraceId: 01jr893182d9sztw2b5y7735p0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:04:55.862697Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:04:55.867909Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2U4YTdmYmEtYWZhMzExOTQtOTljY2NkMzAtYzJkOTg4MWM=, ActorId: [1:7490571977347779956:2301], ActorState: ExecuteState, TraceId: 01jr8931acb101sw2zp0ntvjjm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:04:55.868188Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:04:56.209211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:04:56.302912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:04:56.410270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:30739", true, true, 1000); 2025-04-07T14:04:56.553818Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571964462877191:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:56.553868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:56.658290Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571964375466458:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:56.658341Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:57.905864Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr89338sd1f01dd1bc30yvs4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdmYzA0MjEtOTVmMTNhMTItMWFjMjlkM2ItYTFmZjZkZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:04:58.075286Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 795 } 2025-04-07T14:04:58.075287Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\' ... s::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured kesus quota request event from [7:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:43.426362Z node 7 :PERSQUEUE INFO: new Cookie default|7723a078-7b78ac53-e8a656d4-d1f74579_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured kesus quota request event from [7:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:43.701621Z node 7 :PERSQUEUE INFO: new Cookie default|c456c864-ac8793e3-9305e84c-6a572723_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured kesus quota request event from [7:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR **** Total histogram: ****
Interval=0ms: 1
Interval=10000ms: 0
Interval=1000ms: 3
Interval=100ms: 0
Interval=10ms: 0
Interval=1ms: 0
Interval=20ms: 0
Interval=2500ms: 2
Interval=5000ms: 0
Interval=500ms: 0
Interval=50ms: 0
Interval=5ms: 0
Interval=999999ms: 0
**** **** **** **** 2025-04-07T14:05:44.831954Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:44.832062Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:44.856296Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:05:44.857318Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [8:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:05:44.858024Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [8:183:2196] 2025-04-07T14:05:44.861879Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [8:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:44.869373Z node 8 :PERSQUEUE INFO: new Cookie default|1c3ea4f2-8534e0e7-bd12b3dc-56ffcca9_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [8:201:2211] Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:44.880493Z node 8 :PERSQUEUE INFO: new Cookie default|efc49d7e-41b4db85-6efceb5d-c2ae21bc_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [8:201:2211] Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:45.223857Z node 8 :PERSQUEUE INFO: new Cookie default|af9c80ca-6b5460ef-53624378-a8e40c69_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured kesus quota request event from [8:201:2211] Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:45.473126Z node 8 :PERSQUEUE INFO: new Cookie default|d2ec7eb4-e4560cd1-8cb3b2c0-ab9fd19b_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured kesus quota request event from [8:201:2211] Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:45.765295Z node 8 :PERSQUEUE INFO: new Cookie default|12c5e5b0-3717fded-7b0f19dc-f758dde1_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured kesus quota request event from [8:201:2211] Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:05:46.013018Z node 8 :PERSQUEUE INFO: new Cookie default|6778e06c-e0b5fc09-c474e0f0-8af22f3e_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [8:201:2211] Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResultMultiRangeSegmentPartition+Ascending [GOOD] Test command err: Trying to start YDB, gRPC: 25711, MsgBus: 9405 2025-04-07T14:05:34.585625Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572148563274878:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:34.586168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015da/r3tmp/tmpI8S46Q/pdisk_1.dat 2025-04-07T14:05:34.941001Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25711, node 1 2025-04-07T14:05:35.003324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:35.003445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:35.004896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:35.023768Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:35.023796Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:35.023807Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:35.023906Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9405 TClient is connected to server localhost:9405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:35.480601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.506026Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:35.527215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.664063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.820799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.887962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:37.482276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572161448178450:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:37.482400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:37.716013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:37.756385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:37.786076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:37.857084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:37.885029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:37.913310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:38.000048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572161448178952:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:38.000140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:38.000575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572161448178957:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:38.005230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:38.018078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572161448178959:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:39.588909Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572148563274878:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:39.588976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:39.645820Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894cvq3k83jp4sp93t3j1z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWNhNDMwZWItMTM3MjNhNS04MGYyNGQ3YS1jNDBiNWM4OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-04-07T14:05:40.023395Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034739685, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 12025, MsgBus: 14438 2025-04-07T14:05:40.855735Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572175146786169:2164];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015da/r3tmp/tmpan68Wu/pdisk_1.dat 2025-04-07T14:05:40.893042Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:05:40.948599Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:40.977166Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:40.977241Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:40.981245Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12025, node 2 2025-04-07T14:05:41.050537Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:41.050563Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:41.050572Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:41.050682Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14438 TClient is connected to server localhost:14438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:41.450416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.460048Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:41.474604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.546693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:41.731482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.824074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:43.839224Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572188031689637:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.839298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.891177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.922752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.951761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.980243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.023438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.069089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.155790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572192326657434:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:44.155868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:44.156204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572192326657439:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:44.160743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:44.169422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572192326657441:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:45.655417Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894jrdc377fpqdebpwfte2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWE1M2I5ZWUtODk2OWIxMmMtNmYwMDQyZjEtN2U1Y2FiMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-04-07T14:05:45.852416Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572175146786169:2164];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:45.852498Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:46.026301Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034745698, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::SecondaryIndex [GOOD] Test command err: Trying to start YDB, gRPC: 25140, MsgBus: 13380 2025-04-07T14:05:31.951844Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572134560614664:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.954575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001637/r3tmp/tmpiMQw8K/pdisk_1.dat 2025-04-07T14:05:32.505800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.505881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.507112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:32.520580Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25140, node 1 2025-04-07T14:05:32.713977Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.714015Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.714027Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.714143Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13380 TClient is connected to server localhost:13380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.614370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.650523Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.658466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.853665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.075349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.188082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.513555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572151740485408:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.522852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.763181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.813167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.854210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.887623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.998432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.030676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.076106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572156035453205:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.076170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.076234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572156035453210:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.079428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.090231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572156035453212:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.952084Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572134560614664:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.952211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.129823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:37.753991Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737781, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 29590, MsgBus: 5446 2025-04-07T14:05:38.733361Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572164011931564:2193];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001637/r3tmp/tmplDPfx3/pdisk_1.dat 2025-04-07T14:05:38.820160Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:05:38.907452Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.910537Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.910625Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.912431Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29590, node 2 2025-04-07T14:05:39.023630Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:39.023649Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:39.023658Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:39.023751Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5446 TClient is connected to server localhost:5446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.669714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.698268Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:39.713224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.813252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.976699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:40.068265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.237447Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572181191802274:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.237697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.263432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.299695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.350730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.385227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.461535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.532846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.602893Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572181191802776:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.602970Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.603149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572181191802781:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.606815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.615385Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572181191802783:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:43.715178Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572164011931564:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.715334Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:43.791953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.836469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.887086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:05:45.726267Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034745761, txId: 281474976710677] shutting down 2025-04-07T14:05:46.031079Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034746069, txId: 281474976710679] shutting down 2025-04-07T14:05:46.199260Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034746237, txId: 281474976710681] shutting down 2025-04-07T14:05:46.574682Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034746601, txId: 281474976710683] shutting down >> KqpScan::AggregateNoColumnNoRemaps >> KqpScan::SelectExistsUnexpected [GOOD] >> KqpScan::PrunePartitionsByExpr [GOOD] >> KqpScan::LMapFunction [GOOD] >> KqpScan::CountDistinct [GOOD] >> KqpScan::Counters >> KqpScan::GrepLimit [GOOD] >> KqpScan::GrepNonKeyColumns [GOOD] >> KqpScan::SimpleWindow [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor >> TTabletPipeTest::TestConsumerSidePipeReset >> TTabletPipeTest::TestOpen >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer >> BootstrapperTest::RestartUnavailableTablet >> TFlatMetrics::MaximumValue1 [GOOD] >> TFlatMetrics::MaximumValue2 [GOOD] >> TTabletPipeTest::TestShutdown >> KqpScan::NoTruncate [GOOD] >> TPQTest::TestPartitionWriteQuota [GOOD] >> TPQTest::TestPQSmallRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::SelectExistsUnexpected [GOOD] Test command err: Trying to start YDB, gRPC: 14253, MsgBus: 7148 2025-04-07T14:05:31.941001Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572133962473144:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.947350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015f3/r3tmp/tmpuSLYj2/pdisk_1.dat 2025-04-07T14:05:32.505263Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.511702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.511782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.514674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14253, node 1 2025-04-07T14:05:32.716400Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.716426Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.716454Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.716546Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7148 TClient is connected to server localhost:7148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.611199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.629207Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.648569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.775251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.970405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.039288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.267988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572151142344019:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.268094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.706402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.779826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.806202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.833860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.866377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.931053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.026908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572155437311820:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.026961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.027014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572155437311825:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.030091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.039638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572155437311827:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.935630Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572133962473144:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.935694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.211012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:37.261970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:05:37.297185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:05:39.019530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:05:39.942689Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034739972, txId: 281474976710681] shutting down 2025-04-07T14:05:40.196540Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034740224, txId: 281474976710683] shutting down 2025-04-07T14:05:40.685333Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034740714, txId: 281474976710685] shutting down 2025-04-07T14:05:41.080641Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034741113, txId: 281474976710687] shutting down Trying to start YDB, gRPC: 24016, MsgBus: 13907 2025-04-07T14:05:42.041957Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572183844534057:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:42.042003Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015f3/r3tmp/tmp3ox2iA/pdisk_1.dat 2025-04-07T14:05:42.224141Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:42.256190Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:42.256277Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:42.258410Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24016, node 2 2025-04-07T14:05:42.400208Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:42.400232Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:42.400250Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:42.400390Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13907 TClient is connected to server localhost:13907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:42.938618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:42.967246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:43.028940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.227505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:43.304938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:45.469679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572196729437503:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:45.469782Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:45.509932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:45.542878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:45.578213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:45.607309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:45.640381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:45.709448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:45.752543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572196729438002:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:45.752653Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:45.752735Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572196729438007:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:45.755580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:45.763337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572196729438009:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:46.895729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.058128Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572183844534057:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:47.058169Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:47.659846Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034747406, txId: 281474976710673] shutting down 2025-04-07T14:05:48.105441Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034747882, txId: 281474976710676] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::PrunePartitionsByExpr [GOOD] Test command err: Trying to start YDB, gRPC: 3331, MsgBus: 28651 2025-04-07T14:05:37.467535Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572160393986079:2245];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:37.467593Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015d7/r3tmp/tmpjDW1YH/pdisk_1.dat 2025-04-07T14:05:37.734527Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3331, node 1 2025-04-07T14:05:37.826418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:37.826509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:37.827823Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:37.827832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:37.827842Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:37.827933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:05:37.828052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28651 TClient is connected to server localhost:28651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:38.465730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.508465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.647446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.913757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.988916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.078184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572177573856788:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.078270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.338857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.368817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.442735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.466545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.493683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.520969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.597062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572177573857284:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.597286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.601295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572177573857289:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.604477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:41.615321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572177573857291:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:42.466204Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572160393986079:2245];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:42.466277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:42.729039Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034742751, txId: 281474976710671] shutting down 2025-04-07T14:05:42.834414Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034742870, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 8893, MsgBus: 27276 2025-04-07T14:05:43.706509Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572184713748804:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.706583Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015d7/r3tmp/tmpyx54IB/pdisk_1.dat 2025-04-07T14:05:43.809315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:43.809403Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:43.810897Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:43.824895Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8893, node 2 2025-04-07T14:05:43.904975Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:43.905004Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:43.905013Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:43.905107Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27276 TClient is connected to server localhost:27276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:44.318903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.326676Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:44.335518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.393342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.564135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:44.649219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:05:46.719673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572197598652368:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:46.719749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:46.760795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:46.830074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:46.864283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:46.894007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:46.923700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.000147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.098183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572201893620176:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.098270Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.098541Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572201893620181:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.101952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:47.114583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572201893620183:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:48.207334Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034748239, txId: 281474976715671] shutting down >> KqpScan::StreamExecuteScanQueryClientTimeoutBruteForce [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::LMapFunction [GOOD] Test command err: Trying to start YDB, gRPC: 29323, MsgBus: 5182 2025-04-07T14:05:31.933172Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572133159995503:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.952857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015f8/r3tmp/tmp18c1yY/pdisk_1.dat 2025-04-07T14:05:32.485657Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.529032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.529125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.530583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29323, node 1 2025-04-07T14:05:32.720577Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.720598Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.720603Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.720695Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5182 TClient is connected to server localhost:5182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.446208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.483101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.639031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.826628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.955898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.436048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150339866265:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.436136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.705394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.780402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.809987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.846529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.875596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.980435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.069863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572154634834071:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.069932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.070199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572154634834076:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.073666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.083116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572154634834078:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.933549Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572133159995503:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.933609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.099154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:37.675128Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737676, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 31875, MsgBus: 30181 2025-04-07T14:05:38.403255Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572165136296549:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:38.403318Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015f8/r3tmp/tmplsC16G/pdisk_1.dat 2025-04-07T14:05:38.750505Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.775267Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.775351Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.781825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31875, node 2 2025-04-07T14:05:38.964523Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.964541Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.964550Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.964637Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30181 TClient is connected to server localhost:30181 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.481812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.503514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.593149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.748921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.817722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:42.117259Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572182316167421:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.117454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.138487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.166204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.231253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.269015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.295838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.374841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.451718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572182316167926:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.451804Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.452883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572182316167931:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.456583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.467564Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572182316167933:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:43.404634Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572165136296549:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.404771Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:43.602106Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034743612, txId: 281474976715671] shutting down 2025-04-07T14:05:43.802208Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034743815, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 15596, MsgBus: 29534 2025-04-07T14:05:44.643863Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572191409646628:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:44.643916Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015f8/r3tmp/tmpfMT0tV/pdisk_1.dat 2025-04-07T14:05:44.825831Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:44.839810Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:44.839901Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:44.841923Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15596, node 3 2025-04-07T14:05:44.894449Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:44.894470Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:44.894478Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:44.894561Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29534 TClient is connected to server localhost:29534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:05:45.333363Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.866189Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572204294549139:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.866295Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.880033Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.959840Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572204294549240:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.959913Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.960115Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572204294549245:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.963163Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:05:47.974251Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572204294549247:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:05:48.365666Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034748400, txId: 281474976710662] shutting down [[[2];[1000];["Dogecoin"]];[[4];[1];["XTC"]];[[5];[2];["Cardano"]];[[6];[3];["Tether"]]] >> KqpScan::StreamLookupTryGetDataBeforeSchemeInitialization [GOOD] >> TResourceBrokerInstant::TestErrors >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::GrepLimit [GOOD] Test command err: Trying to start YDB, gRPC: 13561, MsgBus: 24271 2025-04-07T14:05:31.979726Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572135826121463:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.979919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00163e/r3tmp/tmpJ3JY0M/pdisk_1.dat 2025-04-07T14:05:32.555081Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.576390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.580804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.589618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13561, node 1 2025-04-07T14:05:32.717921Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.717942Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.717946Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.718030Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24271 TClient is connected to server localhost:24271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.452017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.483988Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.496106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:05:33.610725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.766121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.859729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.360973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572153005992192:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.361101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.705298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.738899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.778566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.807299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.850675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.968501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.038588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157300959987:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.038657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.038856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157300959992:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.042141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.052129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572157300959994:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.981517Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572135826121463:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.981680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.383877Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737375, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 4597, MsgBus: 15062 2025-04-07T14:05:38.076629Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572165156576948:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00163e/r3tmp/tmpt3q0yy/pdisk_1.dat 2025-04-07T14:05:38.163659Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:05:38.224921Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4597, node 2 2025-04-07T14:05:38.248700Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.248772Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.250431Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:38.299617Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.299640Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.299648Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.299751Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15062 TClient is connected to server localhost:15062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:38.766885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.778025Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:38.802506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.873977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.080419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.206833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.340293Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572178041480365:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.340394Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.382714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.417391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.451768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.476130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.543237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.577440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.685886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572178041480866:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.685981Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.686287Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572178041480871:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.689555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:41.703937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572178041480873:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:42.895426Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034742926, txId: 281474976710671] shutting down 2025-04-07T14:05:43.056712Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572165156576948:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.056864Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21791, MsgBus: 62996 2025-04-07T14:05:43.706143Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572187779238448:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.706215Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00163e/r3tmp/tmprOsW0l/pdisk_1.dat 2025-04-07T14:05:43.823355Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:43.835291Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:43.835372Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:43.836827Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21791, node 3 2025-04-07T14:05:43.877109Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:43.877134Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:43.877142Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:43.877245Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62996 TClient is connected to server localhost:62996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:44.316599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.328833Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.387975Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.531642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.614403Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.985804Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572200664142029:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:46.985885Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.038134Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.074219Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.110404Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.187811Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.258239Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.299636Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.379223Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572204959109824:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.379315Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.379454Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572204959109829:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.383139Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:47.392153Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572204959109831:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:48.551406Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034748561, txId: 281474976715671] shutting down 2025-04-07T14:05:48.704376Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490572187779238448:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:48.704443Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestInterconnectSession >> TTabletPipeTest::TestOpen [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue2 [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> TTabletPipeTest::TestShutdown [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> TResourceBrokerInstant::TestErrors [GOOD] >> TResourceBrokerInstant::TestMerge >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::GrepNonKeyColumns [GOOD] Test command err: Trying to start YDB, gRPC: 3062, MsgBus: 22366 2025-04-07T14:05:31.961366Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572132881242635:2143];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.961747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001643/r3tmp/tmpi2NNuR/pdisk_1.dat 2025-04-07T14:05:32.465644Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.500736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.500832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.505297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3062, node 1 2025-04-07T14:05:32.723657Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.723692Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.723701Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.723781Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22366 TClient is connected to server localhost:22366 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.481241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.496038Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.501951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.642163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.827919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:33.906849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.245564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150061113432:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.245685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.706336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.732502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.768859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.798185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.835061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.895009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.971296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150061113929:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.971383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.971764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150061113934:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.980640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:35.993070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572150061113936:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.960203Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572132881242635:2143];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.960269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.103781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:37.444751Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737487, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 25992, MsgBus: 31373 2025-04-07T14:05:38.129598Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572164061045182:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001643/r3tmp/tmpuFYzlI/pdisk_1.dat 2025-04-07T14:05:38.187420Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:05:38.271401Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.288450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.288519Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.290505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25992, node 2 2025-04-07T14:05:38.344097Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.344130Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.344138Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.344244Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31373 TClient is connected to server localhost:31373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:38.875475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.895987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.985376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.183610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.284106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.430342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572176945948607:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.430401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.478229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.558517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.636546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.669475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.707585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.757068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.817007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572176945949108:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.817089Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.817148Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572176945949113:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.820853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:41.833750Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572176945949115:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:43.085398Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034743108, txId: 281474976710671] shutting down 2025-04-07T14:05:43.116777Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572164061045182:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.116845Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23439, MsgBus: 6659 2025-04-07T14:05:44.012054Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572190987369741:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001643/r3tmp/tmpTY5Zhc/pdisk_1.dat 2025-04-07T14:05:44.095354Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:05:44.123325Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:44.142600Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:44.142674Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:44.143559Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23439, node 3 2025-04-07T14:05:44.181162Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:44.181188Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:44.181196Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:44.181285Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6659 TClient is connected to server localhost:6659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:44.634850Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.656956Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:44.669965Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.769370Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.928164Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:45.014063Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.258341Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572203872273138:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.258466Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.306814Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.338285Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.375049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.404210Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.430351Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.465823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.558933Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572203872273637:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.559069Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.559169Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572203872273642:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.563038Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:47.574698Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572203872273644:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:48.980180Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034749009, txId: 281474976715671] shutting down 2025-04-07T14:05:49.006159Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490572190987369741:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:49.006231Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestShutdown [GOOD] >> KqpScan::TwoAggregatesTwoWindows [GOOD] >> BootstrapperTest::RestartUnavailableTablet [GOOD] >> BootstrapperTest::UnavailableStateStorage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::SimpleWindow [GOOD] Test command err: Trying to start YDB, gRPC: 13783, MsgBus: 24710 2025-04-07T14:05:38.142737Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572166955287776:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:38.142814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015d4/r3tmp/tmppOtjuN/pdisk_1.dat 2025-04-07T14:05:38.614436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.614560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.617171Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.619747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13783, node 1 2025-04-07T14:05:38.795393Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.795417Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.795426Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.795520Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24710 TClient is connected to server localhost:24710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.503357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.534637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.738799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.003100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.096976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.940269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572179840191227:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.940382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.163641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.216336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.244329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.272781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.296890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.379423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.458650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572184135159032:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.458738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.458902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572184135159037:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.462056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.476093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572184135159039:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:43.164322Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572166955287776:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.164401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:43.662654Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034743703, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 63752, MsgBus: 63198 2025-04-07T14:05:44.308597Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572191245650667:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:44.308643Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015d4/r3tmp/tmp9COxNg/pdisk_1.dat 2025-04-07T14:05:44.452568Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63752, node 2 2025-04-07T14:05:44.497388Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:44.497484Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:44.499402Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:44.529148Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:44.529168Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:44.529177Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:44.529262Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63198 TClient is connected to server localhost:63198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:44.954659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.964968Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:44.972836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:45.063570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:45.205880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:45.267980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.412771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572204130554270:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.412852Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.440461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.473783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.500524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.530996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.560163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.597223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.646671Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572204130554764:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.646736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.647089Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572204130554769:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.650137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:47.656919Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572204130554771:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:49.202423Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034749233, txId: 281474976715671] shutting down 2025-04-07T14:05:49.308612Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572191245650667:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:49.308694Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestOpen [GOOD] >> TTabletPipeTest::TestInterconnectSession [GOOD] >> TResourceBrokerInstant::TestMerge [GOOD] >> KqpFlowControl::FlowControl_SmallLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::StreamExecuteScanQueryClientTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 27615, MsgBus: 28912 2025-04-07T14:05:31.956119Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572133023195749:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.956169Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001612/r3tmp/tmp7kMkbP/pdisk_1.dat 2025-04-07T14:05:32.591032Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.605606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.605699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.610837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27615, node 1 2025-04-07T14:05:32.723919Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.723944Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.723958Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.724053Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28912 TClient is connected to server localhost:28912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.665917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.677892Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.694481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.861836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.221359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.364966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.820465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150203066647:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.820596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.139268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.167004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.203812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.237167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.299587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.367882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.453098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572154498034450:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.453168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.453362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572154498034455:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.456831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.465891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572154498034457:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.960765Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572133023195749:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.960943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.528523Z node 1 :RPC_REQUEST WARN: Client lost Trying to start YDB, gRPC: 27112, MsgBus: 22575 2025-04-07T14:05:39.494402Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572169524787355:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:39.494440Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001612/r3tmp/tmpCXRsfw/pdisk_1.dat 2025-04-07T14:05:39.876407Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:39.878367Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:39.878447Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:39.885722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27112, node 2 2025-04-07T14:05:40.021204Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:40.021229Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:40.021238Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:40.021346Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22575 TClient is connected to server localhost:22575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:05:40.529727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:05:40.560868Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:40.566723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:40.624950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:05:40.771835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:40.855825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.914063Z node 2 :KQP_WORKLOAD_SERVICE W ...
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7490572173819755391:2298] 2025-04-07T14:05:47.775306Z node 2 :TX_DATASHARD ERROR: TxId: 281474976710704. Snapshot is not valid, tabletId: 72075186224037893, step: 1744034747784 2025-04-07T14:05:47.775354Z node 2 :TX_DATASHARD ERROR: TxId: 281474976710704. Snapshot is not valid, tabletId: 72075186224037891, step: 1744034747784 2025-04-07T14:05:47.775390Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490572203884530357:2805]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037893, actor_id: [2:7490572173819755455:2302] 2025-04-07T14:05:47.775439Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490572203884530355:2803]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7490572173819755437:2299] 2025-04-07T14:05:47.775679Z node 2 :TX_DATASHARD ERROR: TxId: 281474976710704. Snapshot is not valid, tabletId: 72075186224037892, step: 1744034747784 2025-04-07T14:05:47.775722Z node 2 :TX_DATASHARD ERROR: TxId: 281474976710704. Snapshot is not valid, tabletId: 72075186224037893, step: 1744034747784 2025-04-07T14:05:47.775759Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490572203884530356:2804]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7490572173819755445:2301] 2025-04-07T14:05:47.775812Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490572203884530357:2805]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037893, actor_id: [2:7490572173819755455:2302] 2025-04-07T14:05:47.775904Z node 2 :TX_DATASHARD ERROR: TxId: 281474976710704. Snapshot is not valid, tabletId: 72075186224037890, step: 1744034747784 2025-04-07T14:05:47.775941Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490572203884530354:2802]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7490572173819755391:2298] 2025-04-07T14:05:47.776542Z node 2 :TX_DATASHARD ERROR: TxId: 281474976710704. Snapshot is not valid, tabletId: 72075186224037891, step: 1744034747784 2025-04-07T14:05:47.776602Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490572203884530355:2803]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7490572173819755437:2299] 2025-04-07T14:05:47.776894Z node 2 :TX_DATASHARD ERROR: TxId: 281474976710704. Snapshot is not valid, tabletId: 72075186224037890, step: 1744034747784 2025-04-07T14:05:47.777034Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490572203884530354:2802]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7490572173819755391:2298] 2025-04-07T14:05:47.777100Z node 2 :TX_DATASHARD ERROR: TxId: 281474976710704. Snapshot is not valid, tabletId: 72075186224037892, step: 1744034747784 2025-04-07T14:05:47.777148Z node 2 :TX_DATASHARD ERROR: TxId: 281474976710704. Snapshot is not valid, tabletId: 72075186224037893, step: 1744034747784 2025-04-07T14:05:47.777184Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490572203884530356:2804]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7490572173819755445:2301] 2025-04-07T14:05:47.777225Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490572203884530357:2805]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037893, actor_id: [2:7490572173819755455:2302] 2025-04-07T14:05:47.777601Z node 2 :TX_DATASHARD ERROR: TxId: 281474976710704. Snapshot is not valid, tabletId: 72075186224037891, step: 1744034747784 2025-04-07T14:05:47.777651Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490572203884530355:2803]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7490572173819755437:2299] 2025-04-07T14:05:47.778130Z node 2 :TX_DATASHARD ERROR: TxId: 281474976710704. Snapshot is not valid, tabletId: 72075186224037893, step: 1744034747784 2025-04-07T14:05:47.778187Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490572203884530357:2805]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037893, actor_id: [2:7490572173819755455:2302] 2025-04-07T14:05:47.778198Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490572203884530357:2805]. TKqpScanFetcherActor: broken tablet for this request 72075186224037893, retries limit exceeded (0/20) 2025-04-07T14:05:47.778309Z node 2 :TX_DATASHARD ERROR: TxId: 281474976710704. Snapshot is not valid, tabletId: 72075186224037890, step: 1744034747784 2025-04-07T14:05:47.778352Z node 2 :TX_DATASHARD ERROR: TxId: 281474976710704. Snapshot is not valid, tabletId: 72075186224037892, step: 1744034747784 2025-04-07T14:05:47.778390Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490572203884530354:2802]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7490572173819755391:2298] 2025-04-07T14:05:47.778397Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490572203884530354:2802]. TKqpScanFetcherActor: broken tablet for this request 72075186224037890, retries limit exceeded (0/20) 2025-04-07T14:05:47.778469Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490572203884530356:2804]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7490572173819755445:2301] 2025-04-07T14:05:47.778479Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490572203884530356:2804]. TKqpScanFetcherActor: broken tablet for this request 72075186224037892, retries limit exceeded (0/20) 2025-04-07T14:05:47.778823Z node 2 :TX_DATASHARD ERROR: TxId: 281474976710704. Snapshot is not valid, tabletId: 72075186224037891, step: 1744034747784 2025-04-07T14:05:47.778870Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490572203884530355:2803]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7490572173819755437:2299] 2025-04-07T14:05:47.778878Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490572203884530355:2803]. TKqpScanFetcherActor: broken tablet for this request 72075186224037891, retries limit exceeded (0/20) 2025-04-07T14:05:47.837954Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-07T14:05:47.911370Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034747945, txId: 281474976710706] shutting down 2025-04-07T14:05:48.001885Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-07T14:05:48.089054Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7490572208179497884:2829] TxId: 281474976710709. Ctx: { TraceId: 01jr894nk70jen4z6emdybavp2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDM0MTc2MzEtZmZjZDYyZWEtOTE1MjQzYzEtOGYyODFmZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-07T14:05:48.089184Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490572208179497892:2833], TxId: 281474976710709, task: 4. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDM0MTc2MzEtZmZjZDYyZWEtOTE1MjQzYzEtOGYyODFmZjU=. CustomerSuppliedId : . TraceId : 01jr894nk70jen4z6emdybavp2. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7490572208179497884:2829], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:05:48.089392Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDM0MTc2MzEtZmZjZDYyZWEtOTE1MjQzYzEtOGYyODFmZjU=, ActorId: [2:7490572208179497857:2829], ActorState: ExecuteState, TraceId: 01jr894nk70jen4z6emdybavp2, Create QueryResponse for error on request, msg: 2025-04-07T14:05:48.089417Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-07T14:05:48.089685Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034748120, txId: 281474976710708] shutting down 2025-04-07T14:05:48.090095Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490572208179497891:2832], TxId: 281474976710709, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDM0MTc2MzEtZmZjZDYyZWEtOTE1MjQzYzEtOGYyODFmZjU=. TraceId : 01jr894nk70jen4z6emdybavp2. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490572208179497884:2829], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:05:48.090559Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490572208179497893:2834], TxId: 281474976710709, task: 5. Ctx: { TraceId : 01jr894nk70jen4z6emdybavp2. SessionId : ydb://session/3?node_id=2&id=NDM0MTc2MzEtZmZjZDYyZWEtOTE1MjQzYzEtOGYyODFmZjU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490572208179497884:2829], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:05:48.090558Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490572208179497889:2830], TxId: 281474976710709, task: 1. Ctx: { TraceId : 01jr894nk70jen4z6emdybavp2. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDM0MTc2MzEtZmZjZDYyZWEtOTE1MjQzYzEtOGYyODFmZjU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490572208179497884:2829], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:05:48.180588Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-07T14:05:48.273620Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-07T14:05:48.368841Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-07T14:05:48.442011Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034748456, txId: 281474976710711] shutting down 2025-04-07T14:05:48.554960Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-07T14:05:48.612649Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034748645, txId: 281474976710713] shutting down 2025-04-07T14:05:48.712729Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-07T14:05:48.771377Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034748806, txId: 281474976710715] shutting down 2025-04-07T14:05:48.882110Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-07T14:05:48.928460Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034748960, txId: 281474976710717] shutting down 2025-04-07T14:05:49.035102Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-07T14:05:49.071728Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034749107, txId: 281474976710719] shutting down 2025-04-07T14:05:49.180839Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-07T14:05:49.284043Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-07T14:05:49.382062Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-07T14:05:49.407915Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034749443, txId: 281474976710721] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::NoTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 28063, MsgBus: 21592 2025-04-07T14:05:31.953819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572134333555189:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.953918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015ef/r3tmp/tmpvOTMOn/pdisk_1.dat 2025-04-07T14:05:32.619411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.619511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.621273Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.623833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28063, node 1 2025-04-07T14:05:32.725801Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.725842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.725852Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.725933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21592 TClient is connected to server localhost:21592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.691101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.709858Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.724054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.940267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:34.162912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:05:34.243475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.669598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572151513425932:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.691281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.960118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.993426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.021323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.050157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.072142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.134123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.200211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572155808393728:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.200324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.200637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572155808393733:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.203540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.214177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572155808393735:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.954017Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572134333555189:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.992814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.221285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:37.571389Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737606, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 20312, MsgBus: 25561 2025-04-07T14:05:38.351092Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572166281296916:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:38.351157Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015ef/r3tmp/tmpcv38cI/pdisk_1.dat 2025-04-07T14:05:38.519532Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.522545Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.522611Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.524785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20312, node 2 2025-04-07T14:05:38.668431Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.668451Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.668458Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.668592Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25561 TClient is connected to server localhost:25561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.154229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.160784Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:39.169730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.249355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.461128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at s ... DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.767491Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.819189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.851899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.881428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.911704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:41.982510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.069614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.157328Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572183461168287:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.157405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.157524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572183461168292:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.160234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.172209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572183461168294:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:43.352511Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572166281296916:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.352581Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:43.377419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.757537Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034743766, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 21478, MsgBus: 19460 2025-04-07T14:05:44.555067Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572192140452463:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015ef/r3tmp/tmpQaqgZP/pdisk_1.dat 2025-04-07T14:05:44.607622Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:05:44.659170Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:44.681612Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:44.681684Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:44.683487Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21478, node 3 2025-04-07T14:05:44.781102Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:44.781118Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:44.781124Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:44.781186Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19460 TClient is connected to server localhost:19460 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:45.202226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:45.220787Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:45.295138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:45.437026Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:45.509417Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.643893Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572205025355878:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.643968Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.695912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.726312Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.758284Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.784033Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.851810Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.889089Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.936340Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572205025356379:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.936417Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.936740Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572205025356384:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.940813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:47.950599Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572205025356386:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:49.052213Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.312830Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034749331, txId: 281474976715673] shutting down 2025-04-07T14:05:49.547187Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490572192140452463:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:49.547239Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] >> KqpScan::DqSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::StreamLookupTryGetDataBeforeSchemeInitialization [GOOD] Test command err: Trying to start YDB, gRPC: 9628, MsgBus: 14558 2025-04-07T14:05:38.645574Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572163660229231:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:38.645710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015d0/r3tmp/tmpSFoJvk/pdisk_1.dat 2025-04-07T14:05:39.174642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:39.174712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:39.181744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:39.276757Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9628, node 1 2025-04-07T14:05:39.445068Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:39.445084Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:39.445090Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:39.445157Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14558 TClient is connected to server localhost:14558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:40.147277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.160740Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:40.175902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.356424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.504023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.592493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:42.282855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572180840099970:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.282968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.548545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.581486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.622624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.652488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.689280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.737618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.830718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572180840100470:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.830791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.838621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572180840100475:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.842251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.859612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572180840100477:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:43.642363Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572163660229231:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.642449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2025-04-07T14:05:43.814975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.006442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.629287Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034744655, txId: 281474976710675] shutting down 2025-04-07T14:05:48.369892Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:05:48.370223Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:05:48.370296Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015d0/r3tmp/tmpX6mP1I/pdisk_1.dat 2025-04-07T14:05:48.619712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.642756Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:48.690434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:48.690559Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:48.701851Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:48.830603Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:610:2518], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:48.830752Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:48.838905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.142530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:760:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.142694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.142934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:765:2632], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.147435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:05:49.349060Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:767:2634], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } Captured TEvTxProxySchemeCache::TEvResolveKeySetResult from NKikimr::NSchemeBoard::(anonymous namespace)::TAccessCheckerResolve to NKikimr::NTxProxy::TResolveTablesActor Captured TEvTxProxySchemeCache::TEvResolveKeySetResult from NKikimr::NSchemeBoard::(anonymous namespace)::TAccessCheckerResolve to KQP_TABLE_RESOLVER 2025-04-07T14:05:49.759971Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr894ppp22w6rc073k8ramrn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWI0NTZhYi00NmU2MmYxYi0xMDNhMzkyNi1mMmYwMTk1Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvTxProxySchemeCache::TEvResolveKeySetResult from NKikimr::NSchemeBoard::(anonymous namespace)::TAccessCheckerResolve to KQP_STREAM_LOOKUP_ACTOR 2025-04-07T14:05:49.767019Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715660] shutting down Captured TEvTxProxySchemeCache::TEvResolveKeySetResult from NKikimr::NSchemeBoard::(anonymous namespace)::TAccessCheckerResolve to NKikimr::NTxProxy::TResolveTablesActor >> TPipeTrackerTest::TestSimpleAdd [GOOD] >> TResourceBroker::TestAutoTaskId |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] >> KqpScan::Like [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestMerge [GOOD] Test command err: 2025-04-07T14:05:50.539553Z node 1 :RESOURCE_BROKER ERROR: FinishTaskInstant failed for task 2: cannot finish unknown task >> TTabletLabeledCountersAggregator::HeavyAggregation >> KqpScan::Join2 [GOOD] >> BootstrapperTest::UnavailableStateStorage [GOOD] >> TPipeCacheTest::TestIdleRefresh >> TResourceBroker::TestCounters >> TPipeTrackerTest::TestShareTablet [GOOD] >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> TResourceBroker::TestAutoTaskId [GOOD] >> KqpScan::RightJoinSimple [GOOD] >> KqpScan::RightOnlyJoinSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::TwoAggregatesTwoWindows [GOOD] Test command err: Trying to start YDB, gRPC: 13368, MsgBus: 6866 2025-04-07T14:05:31.940319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572133785940456:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.951507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001641/r3tmp/tmpBTz9Br/pdisk_1.dat 2025-04-07T14:05:32.571963Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.573598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.573679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.593305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13368, node 1 2025-04-07T14:05:32.769589Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.769613Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.769624Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.769720Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6866 TClient is connected to server localhost:6866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.437574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.452723Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.469168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.597314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.793019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.872316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.302858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150965811189:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.302980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.706282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.740238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.770315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.796487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.832548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.870204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.972847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150965811689:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.972949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.976917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150965811694:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.980084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:35.992986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572150965811696:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.940745Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572133785940456:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.940803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:44.063032Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034738334, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 8953, MsgBus: 13909 2025-04-07T14:05:44.847888Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572189002891018:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:44.847936Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001641/r3tmp/tmpdmJQWH/pdisk_1.dat 2025-04-07T14:05:44.972739Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:44.997345Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:44.997428Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:44.998893Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8953, node 2 2025-04-07T14:05:45.045174Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:45.045198Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:45.045207Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:45.045301Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13909 TClient is connected to server localhost:13909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:45.467856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:45.487409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:45.546255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:45.700125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:45.762863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.858596Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572201887794594:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.858686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:47.879628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:47.955988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.003746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.078497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.119529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.154641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.217996Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572206182762390:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:48.218117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:48.218271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572206182762395:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:48.222217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:48.233757Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572206182762397:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:49.872956Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572189002891018:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:49.873069Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:50.251590Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034750255, txId: 281474976715671] shutting down >> KqpSplit::ChoosePartition+Ascending [GOOD] >> KqpSplit::BorderKeys+Unspecified >> KqpSplit::AfterResultMultiRangeSegmentPartition+Unspecified [GOOD] >> KqpScan::CustomWindow [GOOD] >> KqpScan::CrossJoinOneColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpFlowControl::FlowControl_SmallLimit [GOOD] Test command err: Trying to start YDB, gRPC: 64741, MsgBus: 6726 2025-04-07T14:05:31.947777Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572134455003840:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.948210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00162d/r3tmp/tmpTjGlMf/pdisk_1.dat 2025-04-07T14:05:32.697137Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.705839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.705932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.718848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64741, node 1 2025-04-07T14:05:32.935080Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.935100Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.935106Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.935182Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6726 TClient is connected to server localhost:6726 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.720646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.754656Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.775607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.950064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.124586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:34.190870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.798406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572151634874728:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.798527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.019404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.047376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.083674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.149303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.184215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.230289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.306358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572155929842524:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.306457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.306765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572155929842529:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.310601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.319496Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:05:36.320044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572155929842531:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:37.020730Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572134455003840:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:37.020813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.311622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.093299Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034738005, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 3623, MsgBus: 9199 2025-04-07T14:05:39.175455Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572167686483986:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:39.187109Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00162d/r3tmp/tmpUjyV9Y/pdisk_1.dat 2025-04-07T14:05:39.347298Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:39.357782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:39.357845Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:39.368216Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3623, node 2 2025-04-07T14:05:39.587244Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:39.587265Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:39.587272Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:39.587351Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9199 TClient is connected to server localhost:9199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:40.127718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.134629Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:40.147394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:40.274482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:05:40.420176Z node ... 4:05:42.641103Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.689809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.765529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.819544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.851157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.891617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.923387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.004167Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572184866355179:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.004232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.004280Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572184866355184:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.006832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:43.016630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572184866355186:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } waiting... 2025-04-07T14:05:43.924515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.145871Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572167686483986:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:44.145944Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:44.665184Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034744599, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 10405, MsgBus: 23331 2025-04-07T14:05:45.525610Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572196616720801:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:45.570518Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00162d/r3tmp/tmpNWJbrR/pdisk_1.dat 2025-04-07T14:05:45.658788Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10405, node 3 2025-04-07T14:05:45.678479Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:45.678560Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:45.694343Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:45.723415Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:45.723442Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:45.723450Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:45.723553Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23331 TClient is connected to server localhost:23331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:46.053011Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.057201Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:46.072357Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.120850Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.262858Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:46.325231Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.452495Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572209501624247:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:48.452582Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:48.490626Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.529520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.564540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.603196Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.647958Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.682947Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.734926Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572209501624743:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:48.735011Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:48.735393Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572209501624748:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:48.738373Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:48.747515Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572209501624750:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:49.730175Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:50.485191Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034750353, txId: 281474976715673] shutting down 2025-04-07T14:05:50.520478Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490572196616720801:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:50.520537Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpScan::Effects [GOOD] >> KqpScan::FullFrameWindow >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2025-04-07T14:05:51.308352Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA 2025-04-07T14:05:51.308955Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2025-04-07T14:05:51.309005Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.148014s 2025-04-07T14:05:51.410227Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... waiting for multiple state storage lookup attempts (done) >> BootstrapperTest::LoneBootstrapper >> TResourceBrokerConfig::UpdateTasks >> TResourceBroker::TestCounters [GOOD] >> TResourceBroker::TestChangeTaskType >> TResourceBroker::TestErrors >> TTabletPipeTest::TestSendAfterReboot >> TTabletPipeTest::TestSendBeforeBootTarget >> KqpScan::MultipleResults [GOOD] >> KqpScan::MiltiExprWithPure >> TTabletPipeTest::TestConnectReject |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::DqSource [GOOD] Test command err: Trying to start YDB, gRPC: 24945, MsgBus: 24918 2025-04-07T14:05:31.972919Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572136736820883:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.973267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001652/r3tmp/tmpxTtGDT/pdisk_1.dat 2025-04-07T14:05:32.501812Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.510945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.511062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.526947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24945, node 1 2025-04-07T14:05:32.716410Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.716433Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.716449Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.716530Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24918 TClient is connected to server localhost:24918 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.615081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.627695Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.637651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.870739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.113687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.218688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.659704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572153916691616:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.659804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.935221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.979172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.020391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.046365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.080455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.122974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.181801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572158211659409:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.181874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.182150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572158211659414:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.186514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.197934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572158211659416:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.965316Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572136736820883:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.965369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.210284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:38.135406Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737837, txId: 281474976710673] shutting down 2025-04-07T14:05:38.805257Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034738544, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 21514, MsgBus: 11298 2025-04-07T14:05:39.746141Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572170764267839:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:39.830803Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001652/r3tmp/tmpELdIBw/pdisk_1.dat 2025-04-07T14:05:39.943906Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:39.954239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:39.954319Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:39.955923Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21514, node 2 2025-04-07T14:05:40.133178Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:40.133201Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:40.133209Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:40.133317Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11298 TClient is connected to server localhost:11298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:40.619618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.632988Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:40.655172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.747696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:4 ... D, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.157881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.214158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.262477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.306292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.340000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.406286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.445622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.501895Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572187944139173:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.502020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.502219Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572187944139179:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.505910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:43.516790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572187944139181:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:44.419218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.668731Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-07T14:05:44.754206Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572170764267839:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:44.754259Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20980, MsgBus: 31143 2025-04-07T14:05:45.532691Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572197115369260:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:45.532759Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001652/r3tmp/tmp1p6BXc/pdisk_1.dat 2025-04-07T14:05:45.648081Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:45.659166Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:45.659264Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:45.661144Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20980, node 3 2025-04-07T14:05:45.706677Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:45.706693Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:45.706699Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:45.706784Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31143 TClient is connected to server localhost:31143 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:46.150899Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.158782Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:46.165867Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.226515Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.363267Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.440952Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.673650Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572210000272834:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:48.673755Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:48.720463Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.752375Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.783782Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.815194Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.847372Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.922305Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.005461Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572214295240633:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.005532Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572214295240638:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.005598Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.009155Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:49.020094Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572214295240640:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:50.073090Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:50.532926Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490572197115369260:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:50.532983Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:50.686757Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034750724, txId: 281474976715673] shutting down >> TResourceBrokerConfig::UpdateTasks [GOOD] >> TResourceBrokerInstant::Test >> KqpSplit::IntersectionLosesRange+Ascending [GOOD] >> KqpSplit::IntersectionLosesRange+Descending ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::Like [GOOD] Test command err: Trying to start YDB, gRPC: 61232, MsgBus: 24639 2025-04-07T14:05:31.951903Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572136468015229:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.954243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001605/r3tmp/tmpy5K5GG/pdisk_1.dat 2025-04-07T14:05:32.651818Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.655056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.655138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.666584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61232, node 1 2025-04-07T14:05:32.853058Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.853083Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.853089Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.853178Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24639 TClient is connected to server localhost:24639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.995429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.025443Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:34.042171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.271437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.443601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.516241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:36.129411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572153647885970:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.135865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.175471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.214547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.249386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.281892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.312151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.349940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.438737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157942853761:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.438787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572157942853766:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.438830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.442748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.453130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572157942853768:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.946366Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572136468015229:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.946524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.550749Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737592, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 17713, MsgBus: 27181 2025-04-07T14:05:38.413200Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572163140138818:2213];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001605/r3tmp/tmpNDOsKa/pdisk_1.dat 2025-04-07T14:05:38.481558Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:05:38.567942Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.568023Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.569471Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.569768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17713, node 2 2025-04-07T14:05:38.736988Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.737009Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.737020Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.737119Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27181 TClient is connected to server localhost:27181 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:05:39.303020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:05:39.319174Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:39.331453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:39.429319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.652054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:05:39.721211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... 5:41.895899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572176025042711:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:41.898869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:41.908806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572176025042713:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:42.899168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.947825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.029681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.352914Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572163140138818:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.353057Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":4,"Columns":["Fk1","Fk2","Key","Value"],"E-Rows":"No estimate","Table":"SecondaryComplexKeys","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["SecondaryComplexKeys\/Index\/indexImplTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"2"},{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Fk1 (1)","Fk2 (-∞, +∞)","Key (-∞, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"SecondaryComplexKeys\/Index\/indexImplTable","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"2"}],"Node Type":"Limit"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryComplexKeys","reads":[{"lookup_by":["Key"],"columns":["Fk1","Fk2","Key","Value"],"type":"Lookup"}]},{"name":"\/Root\/SecondaryComplexKeys\/Index\/indexImplTable","reads":[{"lookup_by":["Fk1 (1)"],"columns":["Key"],"scan_by":["Fk2 (-∞, +∞)","Key (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"No estimate","Columns":["Fk1","Fk2","Key","Value"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"SecondaryComplexKeys","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 6749, MsgBus: 63790 2025-04-07T14:05:45.782184Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572195152385426:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:45.782267Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001605/r3tmp/tmpKdQUB5/pdisk_1.dat 2025-04-07T14:05:45.887476Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:45.900067Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:45.900163Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:45.901256Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6749, node 3 2025-04-07T14:05:45.952919Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:45.953056Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:45.953066Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:45.953173Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63790 TClient is connected to server localhost:63790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:46.435089Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.441734Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:46.446669Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.529260Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.712295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.785436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:49.092491Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572212332256312:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.092577Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.148130Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.178628Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.208432Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.240394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.289479Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.319617Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.394307Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572212332256810:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.394390Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.394446Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572212332256815:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.399024Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:49.408712Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572212332256817:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:50.594162Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.782798Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490572195152385426:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:50.782845Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::Join2 [GOOD] Test command err: Trying to start YDB, gRPC: 3614, MsgBus: 14276 2025-04-07T14:05:31.927609Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572133496394788:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.951865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001664/r3tmp/tmpBZvmOa/pdisk_1.dat 2025-04-07T14:05:32.597085Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.606781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.616148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.629822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3614, node 1 2025-04-07T14:05:32.761175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.761209Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.761217Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.761291Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14276 TClient is connected to server localhost:14276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.713375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.731735Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.744265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.958617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.199034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.300289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:36.056166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150676265666:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.056466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.102577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.140352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.199011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.227985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.264157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.303563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.378953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572154971233459:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.379023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.379182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572154971233464:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.382440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.392443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572154971233466:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.944999Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572133496394788:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.945093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.377305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:05:37.671992Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737711, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 9334, MsgBus: 63707 2025-04-07T14:05:38.378218Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572164094738489:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:38.378312Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001664/r3tmp/tmpYFdMgy/pdisk_1.dat 2025-04-07T14:05:38.608646Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:38.617377Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:38.617448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:38.625701Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9334, node 2 2025-04-07T14:05:38.726245Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:38.726273Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:38.726282Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:38.726402Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63707 TClient is connected to server localhost:63707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:39.208400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.225490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.426920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.703112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:39.793034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsaf ... rce pool default not found or you don't have access permissions } 2025-04-07T14:05:42.208748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.239048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.282301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.327080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.417481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.478441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:42.558075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572181274609867:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.558186Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.558433Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572181274609872:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.562286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:42.578794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572181274609874:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:43.381140Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572164094738489:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:43.381198Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:43.759060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.478014Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034744494, txId: 281474976710673] shutting down 2025-04-07T14:05:44.877353Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034744900, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 24397, MsgBus: 12371 2025-04-07T14:05:45.611172Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572194381742666:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:45.611216Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001664/r3tmp/tmpE0pfeF/pdisk_1.dat 2025-04-07T14:05:45.729869Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:45.755754Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:45.755829Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:45.759496Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24397, node 3 2025-04-07T14:05:45.816888Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:45.816912Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:45.816921Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:45.817022Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12371 TClient is connected to server localhost:12371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:05:46.270824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:05:46.275765Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:46.293998Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.383305Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.527081Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.591980Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.767231Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572207266646177:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:48.767320Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:48.806965Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.841506Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.878654Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.912509Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.942039Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.013292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.091532Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572211561613976:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.091601Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.091786Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572211561613981:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.095448Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:49.107138Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572211561613983:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:50.106171Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:50.611254Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490572194381742666:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:50.611305Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:51.007679Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034750976, txId: 281474976715673] shutting down >> TPipeCacheTest::TestTabletNode [GOOD] >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers >> TResourceBroker::TestQueueWithConfigure >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor >> TResourceBroker::TestChangeTaskType [GOOD] >> TResourceBroker::TestErrors [GOOD] >> TResourceBroker::TestExecutionStat >> TTabletPipeTest::TestSendAfterReboot [GOOD] >> TTabletResolver::TabletResolvePriority [GOOD] >> TResourceBrokerInstant::Test [GOOD] >> KqpScan::SqlInParameter [GOOD] >> KqpScan::SqlInLiteral >> TResourceBroker::TestResubmitTask >> TTabletPipeTest::TestConnectReject [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen >> KqpScan::UnionAggregate [GOOD] >> KqpScan::UdfFailure >> BootstrapperTest::KeepExistingTablet >> TResourceBroker::TestOverusage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResultMultiRangeSegmentPartition+Unspecified [GOOD] Test command err: Trying to start YDB, gRPC: 13267, MsgBus: 20616 2025-04-07T14:05:39.627356Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572167615162998:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:39.627593Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015cc/r3tmp/tmpsGUzYc/pdisk_1.dat 2025-04-07T14:05:40.145390Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:40.177692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:40.177772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:40.180755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13267, node 1 2025-04-07T14:05:40.365848Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:40.365866Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:40.365875Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:40.365964Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20616 TClient is connected to server localhost:20616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:40.939081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:40.972992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.127501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.292432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.360539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:42.972287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572180500066461:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:42.972410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.188385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.212201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.256278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.280820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.359445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.401721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:43.510630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572184795034257:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.510719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.511053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572184795034262:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:43.514176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:43.525467Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:05:43.525703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572184795034265:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:44.638529Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572167615162998:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:44.638631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:44.933657Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894j5j7sbsm4jzx838h067, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0MzQ3N2QtZjg0M2M3Y2ItZmMyY2QwMTEtMmIxNGJjN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-04-07T14:05:45.407567Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034744977, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 61221, MsgBus: 31668 2025-04-07T14:05:46.190691Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572199436838800:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:46.190744Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015cc/r3tmp/tmpZTfiAl/pdisk_1.dat 2025-04-07T14:05:46.361415Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:46.385853Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:46.385935Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:46.390132Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61221, node 2 2025-04-07T14:05:46.566705Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:46.566727Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:46.566734Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:46.566849Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31668 TClient is connected to server localhost:31668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:46.987762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.994683Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:47.011341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.070480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.222023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.296964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:49.528179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572212321742407:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.528263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.562276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.595716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.628272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.670696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.701048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.769286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.824322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572212321742906:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.824425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.824848Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572212321742911:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.829743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:49.839514Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572212321742913:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:51.196132Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572199436838800:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:51.196213Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- 2025-04-07T14:05:51.260987Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr894rb2f4q0ewkde5n1h7gy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjVhZTk0ZjYtNDk5MWY3M2ItMmY3NmNhOS01YTA1YzRmNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-04-07T14:05:51.610814Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034751305, txId: 281474976715671] shutting down |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] >> TResourceBroker::TestExecutionStat [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::TabletResolvePriority [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterReboot [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:106:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:106:2057] recipient: [1:102:2135] Leader for TabletID 9437185 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] Leader for TabletID 9437184 is [1:114:2143] sender: [1:115:2057] recipient: [1:102:2135] Leader for TabletID 9437185 is [1:116:2144] sender: [1:117:2057] recipient: [1:103:2136] Leader for TabletID 9437184 is [1:114:2143] sender: [1:154:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:116:2144] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:116:2144] sender: [1:159:2057] recipient: [1:99:2134] Leader for TabletID 9437185 is [1:116:2144] sender: [1:161:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:116:2144] sender: [1:163:2057] recipient: [1:162:2174] Leader for TabletID 9437185 is [1:164:2175] sender: [1:165:2057] recipient: [1:162:2174] Leader for TabletID 9437185 is [1:164:2175] sender: [1:193:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:114:2143] sender: [1:196:2057] recipient: [1:98:2133] Leader for TabletID 9437184 is [1:114:2143] sender: [1:199:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:114:2143] sender: [1:200:2057] recipient: [1:198:2198] Leader for TabletID 9437184 is [1:201:2199] sender: [1:202:2057] recipient: [1:198:2198] Leader for TabletID 9437184 is [1:201:2199] sender: [1:230:2057] recipient: [1:14:2061] >> TResourceBroker::TestQueueWithConfigure [GOOD] >> TResourceBroker::TestOverusageDifferentResources |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::Test [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] >> TResourceBroker::TestResubmitTask [GOOD] >> TResourceBroker::TestUpdateCookie >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] >> TResourceBroker::TestOverusage [GOOD] >> TResourceBroker::TestNotifyActorDied >> KqpScan::AggregateEmptySum [GOOD] >> TResourceBrokerConfig::UpdateQueues [GOOD] >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> TResourceBroker::TestUpdateCookie [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] >> TResourceBroker::TestOverusageDifferentResources [GOOD] >> TFlatMetrics::TimeSeriesKV2 [GOOD] >> TPipeCacheTest::TestAutoConnect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:106:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:106:2057] recipient: [1:102:2135] Leader for TabletID 9437185 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] Leader for TabletID 9437184 is [1:114:2143] sender: [1:115:2057] recipient: [1:102:2135] Leader for TabletID 9437185 is [1:116:2144] sender: [1:117:2057] recipient: [1:103:2136] Leader for TabletID 9437184 is [1:114:2143] sender: [1:154:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:116:2144] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:116:2144] sender: [1:159:2057] recipient: [1:99:2134] Leader for TabletID 9437185 is [1:116:2144] sender: [1:161:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:116:2144] sender: [1:163:2057] recipient: [1:162:2174] Leader for TabletID 9437185 is [1:164:2175] sender: [1:165:2057] recipient: [1:162:2174] Leader for TabletID 9437185 is [1:164:2175] sender: [1:193:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:114:2143] sender: [1:196:2057] recipient: [1:98:2133] Leader for TabletID 9437184 is [1:114:2143] sender: [1:199:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:114:2143] sender: [1:200:2057] recipient: [1:198:2198] Leader for TabletID 9437184 is [1:201:2199] sender: [1:202:2057] recipient: [1:198:2198] Leader for TabletID 9437184 is [1:201:2199] sender: [1:230:2057] recipient: [1:14:2061] >> TTabletCountersPercentile::WithoutZero [GOOD] >> TTabletCountersPercentile::StartFromZero [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck >> TResourceBroker::TestNotifyActorDied [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] Test command err: 2025-04-07T14:05:53.380569Z node 3 :PIPE_SERVER ERROR: [9437185] NodeDisconnected NodeId# 2 >> TTabletPipeTest::TestRewriteSameNode >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed >> BootstrapperTest::KeepExistingTablet [GOOD] >> BootstrapperTest::DuplicateNodes >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] >> TTabletCountersPercentile::SingleBucket [GOOD] >> TTabletPipeTest::TestKillClientBeforServerIdKnown |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2025-04-07T14:05:53.255218Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2025-04-07T14:05:53.255440Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' is required" 2025-04-07T14:05:53.255600Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] >> TPQTest::TestPQRead [GOOD] >> TPQTest::TestOwnership >> KqpScan::Counters [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed [GOOD] >> TTabletCountersAggregator::ColumnShardCounters >> TTabletPipeTest::TestSendAfterOpen |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::StartFromZero [GOOD] >> TFlatMetrics::MaximumValue3 [GOOD] >> TFlatMetrics::MaximumValue4 [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::SingleBucket [GOOD] >> KqpScan::Join3Tables [GOOD] >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> TResourceBroker::TestRealUsage >> TBlockBlobStorageTest::DelayedErrorsNotIgnored >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 9437184 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] Leader for TabletID 9437184 is [1:106:2138] sender: [1:126:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [0:0:0] sender: [1:161:2057] recipient: [1:159:2166] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:161:2057] recipient: [1:159:2166] Leader for TabletID 9437185 is [1:165:2170] sender: [1:166:2057] recipient: [1:159:2166] Leader for TabletID 9437185 is [1:165:2170] sender: [1:185:2057] recipient: [1:14:2061] >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> TFlatMetrics::TimeSeriesAvg4 [GOOD] >> TFlatMetrics::TimeSeriesKV [GOOD] >> TTabletPipeTest::TestTwoNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 29530, MsgBus: 7413 2025-04-07T14:05:31.941328Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572133217786987:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.941435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015fc/r3tmp/tmpctNU18/pdisk_1.dat 2025-04-07T14:05:32.587033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.587116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.597171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:32.605791Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29530, node 1 2025-04-07T14:05:32.744898Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.744917Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.744924Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.745019Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7413 TClient is connected to server localhost:7413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.543567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.569494Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.591035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.850081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.201242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:34.295380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.901908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150397657876:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.902007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.208960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.241429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.266910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.386751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.422882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.462067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.517706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572154692625672:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.517789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.518008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572154692625677:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.521901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.531642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572154692625679:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.947061Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572133217786987:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.947181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:40.366518Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034738383, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 22664, MsgBus: 4202 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015fc/r3tmp/tmpJDpO9d/pdisk_1.dat 2025-04-07T14:05:41.400857Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:05:41.406237Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:41.422378Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:41.422471Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:41.424200Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22664, node 2 2025-04-07T14:05:41.637181Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:41.637207Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:41.637215Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:41.637320Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4202 TClient is connected to server localhost:4202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:42.157254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:42.163315Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:42.176551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:42.240386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:42.443205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:42.540029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.617635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572192231208857:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:44.617737Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:44.663755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.706053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.753393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.831133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.878118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.956410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:45.038537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572196526176659:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:45.038616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:45.038994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572196526176664:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:45.043126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:45.055990Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572196526176666:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:46.782494Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034746468, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 15752, MsgBus: 27670 2025-04-07T14:05:47.766997Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572205386623058:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:47.767046Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015fc/r3tmp/tmpwhqC6J/pdisk_1.dat 2025-04-07T14:05:47.975687Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15752, node 3 2025-04-07T14:05:48.077185Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:48.077208Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:48.077220Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:48.077335Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:05:48.094286Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:48.094369Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:48.096000Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27670 TClient is connected to server localhost:27670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:48.582062Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.589232Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:48.607185Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.694020Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.852118Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.927375Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:51.147476Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572222566493928:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.147582Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.191991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.236075Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.275318Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.300492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.338609Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.409525Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.495128Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572222566494433:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.495211Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.495300Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572222566494438:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.498869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:51.507645Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572222566494440:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:52.769545Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490572205386623058:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:52.769620Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:53.098252Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034752866, txId: 281474976715671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] Test command err: 2025-04-07T14:05:54.552702Z node 1 :PIPE_SERVER DEBUG: [9437185] Detach 2025-04-07T14:05:54.569654Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2025-04-07T14:05:54.576825Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2025-04-07T14:05:54.579686Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:126:2152] 2025-04-07T14:05:54.579739Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:126:2152] 2025-04-07T14:05:54.580027Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:126:2152] 2025-04-07T14:05:54.580073Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:126:2152] 2025-04-07T14:05:54.580186Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:126:2152] 2025-04-07T14:05:54.580215Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:126:2152] 2025-04-07T14:05:54.580284Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:126:2152] 2025-04-07T14:05:54.580436Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:126:2152] Type# 269877249 Reason# ActorUnknown 2025-04-07T14:05:54.580589Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:129:2154] 2025-04-07T14:05:54.580613Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:129:2154] 2025-04-07T14:05:54.580687Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:129:2154] 2025-04-07T14:05:54.580735Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:129:2154] 2025-04-07T14:05:54.580776Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:129:2154] 2025-04-07T14:05:54.580796Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:129:2154] 2025-04-07T14:05:54.580851Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:129:2154] 2025-04-07T14:05:54.580918Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:129:2154] Type# 269877249 Reason# ActorUnknown 2025-04-07T14:05:54.581003Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:131:2156] 2025-04-07T14:05:54.581021Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:131:2156] 2025-04-07T14:05:54.581057Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:131:2156] 2025-04-07T14:05:54.581085Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:131:2156] 2025-04-07T14:05:54.581112Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:131:2156] 2025-04-07T14:05:54.581130Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:131:2156] 2025-04-07T14:05:54.581162Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:131:2156] 2025-04-07T14:05:54.581243Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:131:2156] Type# 269877249 Reason# ActorUnknown |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> TTabletLabeledCountersAggregator::SimpleAggregation |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpen [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> TResourceBroker::TestRealUsage [GOOD] >> TResourceBroker::TestRandomQueue >> TPipeCacheTest::TestAutoConnect [GOOD] >> KqpScan::AggregateNoColumnNoRemaps [GOOD] >> KqpScan::AggregateWithFunction >> TTabletPipeTest::TestPipeConnectToHint |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesKV [GOOD] >> BootstrapperTest::DuplicateNodes [GOOD] >> TTabletLabeledCountersAggregator::SimpleAggregation [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::Counters [GOOD] Test command err: Trying to start YDB, gRPC: 28898, MsgBus: 10132 2025-04-07T14:05:31.967719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572135213103352:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.970989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015f1/r3tmp/tmpmT8wra/pdisk_1.dat 2025-04-07T14:05:32.441334Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.454368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.454539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.480354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28898, node 1 2025-04-07T14:05:32.724398Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.724420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.724425Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.724494Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10132 TClient is connected to server localhost:10132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.479486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.489983Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.507259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.686812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.916320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:34.023629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.689760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572152392974085:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.696462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.731184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.763618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.797566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.852998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.885677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.978649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:36.050948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572156687941884:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.050997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.051523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572156687941889:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:36.054689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:36.062980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572156687941891:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.964881Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572135213103352:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.964946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.090974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.983244Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034737830, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 17542, MsgBus: 31607 2025-04-07T14:05:42.806881Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572182756430134:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:42.807729Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015f1/r3tmp/tmpvffPTF/pdisk_1.dat 2025-04-07T14:05:42.955548Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17542, node 2 2025-04-07T14:05:42.983210Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:42.983309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:42.997150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:43.050032Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:43.050055Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:43.050072Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:43.050172Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31607 TClient is connected to server localhost:31607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:43.545352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:43.566648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:43.622031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:43.784778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:43.867781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:45.833479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572195641333698:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:45.833625Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:45.877030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:45.948588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:45.994932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:46.026759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:46.099664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:46.138869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:46.201786Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572199936301497:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:46.201900Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:46.202229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572199936301502:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:46.205498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:46.214951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572199936301504:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:47.800762Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572182756430134:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:47.857576Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:48.342406Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034748099, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 19899, MsgBus: 8458 2025-04-07T14:05:49.172781Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572211662232560:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:49.172848Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015f1/r3tmp/tmpPY0Iw0/pdisk_1.dat 2025-04-07T14:05:49.256177Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:49.277661Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:49.277772Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:49.279400Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19899, node 3 2025-04-07T14:05:49.332578Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:49.332597Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:49.332608Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:49.332720Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8458 TClient is connected to server localhost:8458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:49.799255Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:49.805960Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:49.820418Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:49.904183Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:50.084642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:50.172771Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:52.252186Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572224547136176:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:52.252262Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:52.330204Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:52.360984Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:52.388242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:52.415221Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:52.447703Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:52.517214Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:52.569914Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572224547136673:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:52.570017Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:52.570085Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572224547136678:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:52.573943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:52.583505Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572224547136680:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:53.867765Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034753909, txId: 281474976710671] shutting down |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestAutoConnect [GOOD] >> TResourceBroker::TestRandomQueue [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] >> TTabletPipeTest::TestTwoNodes [GOOD] >> TTabletPipeTest::TestPipeConnectToHint [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::Join3Tables [GOOD] Test command err: Trying to start YDB, gRPC: 26710, MsgBus: 18293 2025-04-07T14:05:31.952960Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572133102953655:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:31.953239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001609/r3tmp/tmpQVJWFX/pdisk_1.dat 2025-04-07T14:05:32.497363Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:32.530466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:32.530554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:32.532114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26710, node 1 2025-04-07T14:05:32.717365Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:32.717389Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:32.717397Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:32.717500Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18293 TClient is connected to server localhost:18293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:33.428683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.441857Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:33.455578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.598984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.741793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:33.828434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:35.460303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150282824414:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.460482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.705277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.732764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.796860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.835144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.873428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.922723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:35.985836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150282824909:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.985921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.986173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572150282824914:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:35.989901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:35.999091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572150282824916:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:36.940827Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572133102953655:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:36.940908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:37.098742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:38.930573Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034738355, txId: 281474976710673] shutting down 2025-04-07T14:05:40.430011Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034739888, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 22685, MsgBus: 17079 2025-04-07T14:05:41.228780Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572178079399561:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:41.228856Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001609/r3tmp/tmpwcEL5q/pdisk_1.dat 2025-04-07T14:05:41.353385Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:41.392213Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:41.392289Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:41.393466Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22685, node 2 2025-04-07T14:05:41.474097Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:41.474121Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:41.474129Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:41.474222Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17079 TClient is connected to server localhost:17079 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:41.953799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:41.959993Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:41.970798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:42.047046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04 ... 057594046644480 waiting... 2025-04-07T14:05:42.308111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:44.469670Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572190964303131:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:44.469751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:44.504211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.532690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.568602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.607167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.682197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.720388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:44.781682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572190964303629:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:44.781766Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:44.782103Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572190964303634:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:44.785034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:44.801283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572190964303636:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:45.844434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.229148Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572178079399561:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:46.229199Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:46.711968Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034746727, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 21827, MsgBus: 28735 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001609/r3tmp/tmpBTzImG/pdisk_1.dat 2025-04-07T14:05:47.609247Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:47.629770Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 21827, node 3 2025-04-07T14:05:47.644293Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:47.644397Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:47.647454Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:47.693739Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:47.693774Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:47.693785Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:47.693896Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28735 TClient is connected to server localhost:28735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:48.228011Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.246458Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.327726Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.496537Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.567408Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:50.727179Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572217040243481:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.727264Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.786252Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.819227Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.889241Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.934594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.013128Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.051717Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.089607Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572221335211275:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.089674Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.089830Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572221335211280:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.093561Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:51.110085Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572221335211282:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:52.216016Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.340774Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034753363, txId: 281474976715673] shutting down 2025-04-07T14:05:54.244036Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034754245, txId: 281474976715675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] Test command err: ... waiting for pipe to connect ... sleeping (original instance should be preserved) ... waiting for original instance to stop ... waiting for original instance to stop (done) ... waiting for pipe to connect 2025-04-07T14:05:54.508186Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:54.508273Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:54.508916Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-07T14:05:54.508968Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 12552810490399048506 2025-04-07T14:05:54.509082Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-07T14:05:54.509106Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-04-07T14:05:54.509877Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-04-07T14:05:54.509932Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.139961s 2025-04-07T14:05:54.510022Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-04-07T14:05:54.510048Z node 4 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-04-07T14:05:54.661266Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:54.661892Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:206:2093] 2025-04-07T14:05:54.662305Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-07T14:05:54.662354Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] Test command err: { LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestRandomQueue [GOOD] Test command err: 2025-04-07T14:05:55.488220Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-1 (1 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488304Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-1 (1 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488357Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-2 (2 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488416Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-6 (6 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488451Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-7 (7 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488509Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-10 (10 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488533Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-11 (11 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488554Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-12 (12 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488584Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-13 (13 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488637Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-17 (17 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488697Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-20 (20 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488719Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-21 (21 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488871Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-32 (32 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488925Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-37 (37 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488945Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-38 (38 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.488974Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-39 (39 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.489030Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-41 (41 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.489088Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-45 (45 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.489186Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-55 (55 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.489320Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-65 (65 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.489352Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-66 (66 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.489425Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-73 (73 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.489489Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-77 (77 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.489532Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-79 (79 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.489708Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-89 (89 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.489769Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-93 (93 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.489863Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-98 (98 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.489914Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-100 (100 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.489942Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-102 (102 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.489980Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-104 (104 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.490063Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-107 (107 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.490239Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-116 (116 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.490348Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-122 (122 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.490431Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-126 (126 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.490563Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-132 (132 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.490694Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-139 (139 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.490749Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-140 (140 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.490922Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-149 (149 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.490952Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-150 (150 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491077Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-157 (157 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491154Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-161 (161 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491242Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-169 (169 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491288Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-173 (173 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491359Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-177 (177 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491382Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-178 (178 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491411Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-180 (180 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491431Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-181 (181 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491456Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-183 (183 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491476Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-184 (184 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491622Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-196 (196 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491648Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-198 (198 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491665Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-199 (199 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491745Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-204 (204 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491776Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-206 (206 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491813Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-209 (209 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491867Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-213 (213 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.491954Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-221 (221 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492007Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-226 (226 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492076Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-231 (231 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492101Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-232 (232 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492134Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-233 (233 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492156Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-234 (234 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492214Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-240 (240 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492259Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-244 (244 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492287Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-246 (246 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492329Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-248 (248 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492361Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-249 (249 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492408Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-253 (253 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492441Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-254 (254 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492531Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-260 (260 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492593Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-263 (263 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492622Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-264 (264 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492657Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-267 (267 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492720Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-270 (270 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492778Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-274 (274 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492844Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-277 (277 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492875Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-278 (278 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.492935Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-282 (282 by [2:97:2132])' of unknown type ... t queue 2025-04-07T14:05:55.518203Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-472 (472 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518232Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-491 (491 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518254Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-493 (493 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518309Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-577 (577 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518352Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-591 (591 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518400Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-611 (611 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518430Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-628 (628 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518458Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-633 (633 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518508Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-665 (665 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518530Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-675 (675 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518562Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-699 (699 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518590Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-723 (723 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518628Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-751 (751 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518648Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-753 (753 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518668Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-780 (780 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518704Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-788 (788 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518736Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-791 (791 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518800Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-798 (798 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518880Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-857 (857 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518906Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-887 (887 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518940Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-897 (897 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518962Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-900 (900 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.518978Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-914 (914 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519022Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-929 (929 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519097Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-957 (957 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519129Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-960 (960 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519157Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-973 (973 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519212Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-996 (996 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519237Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-7 (7 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519261Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-10 (10 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519283Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-12 (12 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519310Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-20 (20 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519362Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-38 (38 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519385Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-45 (45 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519406Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-55 (55 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519421Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-66 (66 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519478Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-89 (89 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519527Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-150 (150 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519582Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-184 (184 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519601Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-199 (199 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519647Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-233 (233 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519698Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-249 (249 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519724Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-270 (270 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519752Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-278 (278 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519776Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-282 (282 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519807Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-316 (316 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519829Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-348 (348 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519848Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-371 (371 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519862Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-384 (384 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519884Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-386 (386 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519909Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-396 (396 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519930Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-399 (399 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.519979Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-404 (404 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520006Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-410 (410 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520042Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-422 (422 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520077Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-423 (423 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520102Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-427 (427 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520131Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-450 (450 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520151Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-451 (451 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520173Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-485 (485 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520197Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-498 (498 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520218Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-509 (509 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520265Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-545 (545 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520376Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-616 (616 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520418Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-643 (643 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520439Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-649 (649 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520469Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-672 (672 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520528Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-741 (741 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520587Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-750 (750 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520632Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-774 (774 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520658Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-803 (803 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520747Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-841 (841 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520775Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-877 (877 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520817Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-905 (905 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520831Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-908 (908 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520879Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-926 (926 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520906Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-950 (950 by [2:97:2132])' of unknown type 'wrong' to default queue 2025-04-07T14:05:55.520956Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-990 (990 by [2:97:2132])' of unknown type 'wrong' to default queue |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo >> BootstrapperTest::MultipleBootstrappers [GOOD] >> KqpScan::MiltiExprWithPure [GOOD] >> KqpScan::LongStringCombiner >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TFlatMetrics::DecayingAverageAvg [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2025-04-07T14:05:53.248152Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:53.248237Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:53.248277Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:53.249299Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-07T14:05:53.249351Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-04-07T14:05:53.249520Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-07T14:05:53.249547Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 838756400823690829 2025-04-07T14:05:53.249830Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-07T14:05:53.249862Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 2303809724928703835 2025-04-07T14:05:53.250554Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: UNKNOWN 2025-04-07T14:05:53.250776Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-04-07T14:05:53.250806Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.149198s 2025-04-07T14:05:53.250975Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-04-07T14:05:53.251054Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-04-07T14:05:53.251079Z node 3 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-04-07T14:05:53.251249Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-04-07T14:05:53.251508Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-04-07T14:05:53.251537Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.190190s 2025-04-07T14:05:53.425428Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:53.425906Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] 2025-04-07T14:05:53.426240Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-07T14:05:53.426273Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-04-07T14:05:53.472928Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:53.473495Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] 2025-04-07T14:05:53.473857Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-07T14:05:53.473889Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 3 (idx 1) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2025-04-07T14:05:54.080387Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 4 2025-04-07T14:05:54.080454Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 5 2025-04-07T14:05:54.080646Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-07T14:05:54.080722Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:54.080806Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-07T14:05:54.080828Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:54.081508Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] 2025-04-07T14:05:54.081653Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] 2025-04-07T14:05:54.082520Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-07T14:05:54.082558Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-04-07T14:05:54.082631Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-07T14:05:54.082649Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2025-04-07T14:05:54.594090Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 5 2025-04-07T14:05:54.594162Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 4 2025-04-07T14:05:54.594267Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-07T14:05:54.594309Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:54.594353Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-07T14:05:54.594390Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:54.595438Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] 2025-04-07T14:05:54.595823Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] ... disconnecting nodes 1 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-04-07T14:05:54.596184Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-07T14:05:54.596222Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 6528562917658346564 ... disconnecting nodes 1 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-04-07T14:05:54.596510Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-07T14:05:54.596553Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 16349739802483488852 2025-04-07T14:05:54.596926Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-04-07T14:05:54.597042Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: OWNER 2025-04-07T14:05:54.597073Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 3 (owner) 2025-04-07T14:05:54.597468Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: OWNER 2025-04-07T14:05:54.597488Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 3 (owner) ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2025-04-07T14:05:55.108143Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335029 2025-04-07T14:05:55.108215Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:55.108342Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335029 2025-04-07T14:05:55.108372Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:55.109093Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] 2025-04-07T14:05:55.109344Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] ... disconnecting nodes 1 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 1 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-04-07T14:05:55.109723Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-07T14:05:55.109753Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 13164802727073798053 2025-04-07T14:05:55.109945Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-07T14:05:55.109962Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 10171326560769670008 ... disconnecting nodes 1 <-> 2 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2025-04-07T14:05:55.110237Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335031 2025-04-07T14:05:55.110276Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: DISCONNECTED ... disconnecting nodes 1 <-> 3 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2025-04-07T14:05:55.110450Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-04-07T14:05:55.110489Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.116418s 2025-04-07T14:05:55.110533Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335031 2025-04-07T14:05:55.110554Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: DISCONNECTED 2025-04-07T14:05:55.110619Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-04-07T14:05:55.110634Z node 5 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-04-07T14:05:55.112075Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, tablet dead 2025-04-07T14:05:55.112151Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:55.115054Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:386:2094] 2025-04-07T14:05:55.128854Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-07T14:05:55.128896Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-04-07T14:05:55.182669Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-07T14:05:55.183332Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:386:2094] 2025-04-07T14:05:55.183780Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-07T14:05:55.183819Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... disconnecting nodes 1 <-> 0 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test >> KqpSplit::BorderKeys+Unspecified [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:99:2133] ... blocking block result NO_GROUP for [1:100:2133] ... blocking block result NO_GROUP for [1:101:2133] ... blocking block result NO_GROUP for [1:102:2133] >> KqpScan::RightOnlyJoinSimple [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation >> KqpSplit::IntersectionLosesRange+Descending [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> KqpScan::SqlInLiteral [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::BorderKeys+Unspecified [GOOD] Test command err: Trying to start YDB, gRPC: 23072, MsgBus: 61008 2025-04-07T14:05:46.895640Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572198616529935:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:46.895706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015c0/r3tmp/tmplymke4/pdisk_1.dat 2025-04-07T14:05:47.208559Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23072, node 1 2025-04-07T14:05:47.287026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:47.287220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:47.289601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:47.302666Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:47.302691Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:47.302699Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:47.302785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61008 TClient is connected to server localhost:61008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:47.861015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.878443Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:47.883972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.998279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.132078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.212909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:49.691597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572211501433516:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.691696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.980757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.008162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.043647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.076061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.102712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.140084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.183610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572215796401312:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.183680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.185295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572215796401317:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.190439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:50.200012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572215796401319:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:51.212153Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894rma84sawztqvmbf4dvk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGM4ZGViZTktMjQxZjRkMTgtOWExNjI3MjktNzRiY2M5MTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-04-07T14:05:51.725915Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034751256, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 24215, MsgBus: 18063 2025-04-07T14:05:52.376879Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572226277616264:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:52.376957Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015c0/r3tmp/tmpco9R5Y/pdisk_1.dat 2025-04-07T14:05:52.491168Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:52.503100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:52.503184Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:52.507306Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24215, node 2 2025-04-07T14:05:52.546958Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:52.546985Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:52.546999Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:52.547090Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18063 TClient is connected to server localhost:18063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:52.933924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:52.941758Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:52.956842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.019286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.180376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.254941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:55.107402Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572239162519843:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.107473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.142880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.167510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.189462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.212246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.236710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.264429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.297638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572239162520332:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.297703Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.297738Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572239162520338:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.300211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:55.306829Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572239162520340:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:56.068338Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr894xd163172b8xk7n3snb4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzI3MTZiNTUtNDk2OWJiNmMtMzVhZGJkMzAtZjllMjE1MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715674 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-04-07T14:05:56.530648Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034756114, txId: 281474976715671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::RightOnlyJoinSimple [GOOD] Test command err: Trying to start YDB, gRPC: 31351, MsgBus: 2687 2025-04-07T14:05:46.381074Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572201336585519:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:46.382145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015c9/r3tmp/tmpPhjjEM/pdisk_1.dat 2025-04-07T14:05:46.743563Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31351, node 1 2025-04-07T14:05:46.804952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:46.808934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:46.885108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:46.912752Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:46.912790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:46.912799Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:46.912920Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2687 TClient is connected to server localhost:2687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:47.455288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.477117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.610759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.797263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.895574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:49.580156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572214221489092:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.580270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.779869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.812833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.840488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.905990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.934808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.965836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.030512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572218516456883:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.030601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.034428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572218516456888:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.040971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:50.053098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572218516456890:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:50.999152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:51.389737Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572201336585519:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:51.389820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:51.563737Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034751592, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 19796, MsgBus: 8851 2025-04-07T14:05:52.289725Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572224608387831:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:52.289783Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015c9/r3tmp/tmpCizue8/pdisk_1.dat 2025-04-07T14:05:52.376148Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:52.392589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:52.392686Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:52.393826Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19796, node 2 2025-04-07T14:05:52.445023Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:52.445044Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:52.445049Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:52.445123Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8851 TClient is connected to server localhost:8851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:52.790420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:52.798926Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:52.804860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:52.881742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.012152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.088251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:54.990720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572233198324140:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:54.990789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.021585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.044750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.108978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.130360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.152684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.180208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.256537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572237493291934:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.256608Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.256618Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572237493291939:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.259062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:55.265170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572237493291941:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:56.161086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:56.662869Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034756695, txId: 281474976715673] shutting down |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile >> TNetClassifierTest::TestInitFromFile >> TNetClassifierTest::TestInitFromRemoteSource |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] Test command err: 2025-04-07T14:05:51.825284Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 [1:6:2053] 2025-04-07T14:05:51.825553Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:7:2054] worker 0 2025-04-07T14:05:51.825602Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:8:2055] worker 1 2025-04-07T14:05:51.825631Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:9:2056] worker 2 2025-04-07T14:05:51.825665Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:10:2057] worker 3 2025-04-07T14:05:51.825692Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:11:2058] worker 4 2025-04-07T14:05:51.825719Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:12:2059] worker 5 2025-04-07T14:05:51.825754Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:13:2060] worker 6 2025-04-07T14:05:51.825779Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:14:2061] worker 7 2025-04-07T14:05:51.825808Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:15:2062] worker 8 2025-04-07T14:05:51.825836Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:16:2063] worker 9 Sending message to [1:8:2055] from [1:6:2053] id 1 Sending message to [1:9:2056] from [1:6:2053] id 2 Sending message to [1:10:2057] from [1:6:2053] id 3 Sending message to [1:11:2058] from [1:6:2053] id 4 Sending message to [1:12:2059] from [1:6:2053] id 5 Sending message to [1:13:2060] from [1:6:2053] id 6 Sending message to [1:14:2061] from [1:6:2053] id 7 Sending message to [1:15:2062] from [1:6:2053] id 8 Sending message to [1:16:2063] from [1:6:2053] id 9 Sending message to [1:7:2054] from [1:6:2053] id 10 2025-04-07T14:05:52.505368Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [1:16:2063] 2025-04-07T14:05:52.505531Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [1:7:2054] 2025-04-07T14:05:52.505571Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [1:8:2055] 2025-04-07T14:05:52.505635Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [1:9:2056] 2025-04-07T14:05:52.505677Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [1:10:2057] 2025-04-07T14:05:52.505712Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [1:11:2058] 2025-04-07T14:05:52.505752Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [1:12:2059] 2025-04-07T14:05:52.505785Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [1:13:2060] 2025-04-07T14:05:52.505898Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [1:14:2061] 2025-04-07T14:05:52.505950Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [1:15:2062] 2025-04-07T14:05:52.506238Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [1:7:2054] 2025-04-07T14:05:52.507718Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [1:7:2054] 2025-04-07T14:05:52.536283Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:7:2054] Initiator [1:6:2053] 2025-04-07T14:05:52.556994Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:8:2055] 2025-04-07T14:05:52.558555Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:8:2055] 2025-04-07T14:05:52.588116Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:8:2055] Initiator [1:6:2053] 2025-04-07T14:05:52.608549Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:9:2056] 2025-04-07T14:05:52.609958Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:9:2056] 2025-04-07T14:05:52.637472Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:9:2056] Initiator [1:6:2053] 2025-04-07T14:05:52.660280Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:10:2057] 2025-04-07T14:05:52.661895Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:10:2057] 2025-04-07T14:05:52.693437Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:10:2057] Initiator [1:6:2053] 2025-04-07T14:05:52.716178Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:11:2058] 2025-04-07T14:05:52.717750Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:11:2058] 2025-04-07T14:05:52.749046Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:11:2058] Initiator [1:6:2053] 2025-04-07T14:05:52.771379Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:12:2059] 2025-04-07T14:05:52.772993Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:12:2059] 2025-04-07T14:05:52.803878Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:12:2059] Initiator [1:6:2053] 2025-04-07T14:05:52.826350Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:13:2060] 2025-04-07T14:05:52.827957Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:13:2060] 2025-04-07T14:05:52.859885Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:13:2060] Initiator [1:6:2053] 2025-04-07T14:05:52.881962Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:14:2061] 2025-04-07T14:05:52.883371Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:14:2061] 2025-04-07T14:05:52.911641Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:14:2061] Initiator [1:6:2053] 2025-04-07T14:05:52.931010Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:15:2062] 2025-04-07T14:05:52.932471Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:15:2062] 2025-04-07T14:05:52.962392Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:15:2062] Initiator [1:6:2053] 2025-04-07T14:05:52.984990Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:16:2063] 2025-04-07T14:05:52.986519Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:16:2063] 2025-04-07T14:05:53.017397Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:16:2063] Initiator [1:6:2053] 2025-04-07T14:05:53.039995Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [1:6:2053] 2025-04-07T14:05:53.040123Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [1:6:2053] 2025-04-07T14:05:53.045179Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:6:2053] 2025-04-07T14:05:53.045303Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:6:2053] 2025-04-07T14:05:53.050539Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:6:2053] 2025-04-07T14:05:53.050663Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:6:2053] 2025-04-07T14:05:53.055946Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:6:2053] 2025-04-07T14:05:53.056071Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:6:2053] 2025-04-07T14:05:53.062196Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:6:2053] 2025-04-07T14:05:53.062310Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:6:2053] 2025-04-07T14:05:53.067346Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:6:2053] 2025-04-07T14:05:53.067459Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:6:2053] 2025-04-07T14:05:53.072701Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:6:2053] 2025-04-07T14:05:53.072816Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:6:2053] 2025-04-07T14:05:53.080224Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:6:2053] 2025-04-07T14:05:53.080364Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:6:2053] 2025-04-07T14:05:53.085593Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:6:2053] 2025-04-07T14:05:53.085720Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:6:2053] 2025-04-07T14:05:53.089561Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:6:2053] 2025-04-07T14:05:53.089649Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:6:2053] 2025-04-07T14:05:53.093075Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:6:2053] Initiator [1:5:2052] TEST 2 10 duration 1.410272s 2025-04-07T14:05:53.306505Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 [2:6:2053] 2025-04-07T14:05:53.306899Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:7:2054] worker 0 2025-04-07T14:05:53.306943Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:8:2055] worker 1 2025-04-07T14:05:53.306970Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:9:2056] worker 2 2025-04-07T14:05:53.306995Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:10:2057] worker 3 2025-04-07T14:05:53.307018Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:11:2058] worker 4 2025-04-07T14:05:53.307058Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:12:2059] worker 5 2025-04-07T14:05:53.307082Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:13:2060] worker 6 2025-04-07T14:05:53.307125Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:14:2061] worker 7 2025-04-07T14:05:53.307153Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:15:2062] worker 8 2025-04-07T14:05:53.307190Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:16:2063] worker 9 2025-04-07T14:05:53.307214Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:17:2064] worker 10 2025-04-07T14:05:53.307239Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:18:2065] worker 11 2025-04-07T14:05:53.307261Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:19:2066] worker 12 2025-04-07T14:05:53.307305Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:20:2067] worker 13 2025-04-07T14:05:53.307336Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:21:2068] worker 14 2025-04-07T14:05:53.307363Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:22:2069] worker 15 2025-04-07T14:05:53.307389Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:23:2070] worker 16 2025-04-07T14:05:53.307415Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:24:2071] worker 17 2025-04-07T14:05:53.307439Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:25:2072] worker 18 2025-04-07T14:05:53.307480Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:26:2073] worker 19 Sending message to [2:8:2055] from [2:6:2053] id 1 Sending message to [2:9:2056] from [2:6:2053] id 2 Sending message to [2:10:2057] from [2:6:2053] id 3 Sending message to [2:11:2058] from [2:6:2053] id 4 Sending message to [2:12:2059] from [2:6:2053] id 5 Sending message to [2:13:2060] from [2:6:2053] id 6 Sending message to [2:14:2061] from [2:6:2053] id 7 Sending message to [2:15:2062] from [2:6:2053] id 8 Sending message to [2:16:2063] from [2:6:2053] id ... response node 15 [2:6:2053] 2025-04-07T14:05:54.409375Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 16 [2:6:2053] 2025-04-07T14:05:54.409389Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 16 [2:6:2053] 2025-04-07T14:05:54.409403Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 17 [2:6:2053] 2025-04-07T14:05:54.409414Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 17 [2:6:2053] 2025-04-07T14:05:54.409442Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 18 [2:6:2053] 2025-04-07T14:05:54.409453Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 18 [2:6:2053] 2025-04-07T14:05:54.409485Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 19 [2:6:2053] 2025-04-07T14:05:54.409496Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 19 [2:6:2053] 2025-04-07T14:05:54.409508Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [2:6:2053] 2025-04-07T14:05:54.409535Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [2:6:2053] 2025-04-07T14:05:54.409559Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [2:6:2053] 2025-04-07T14:05:54.409649Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [2:6:2053] 2025-04-07T14:05:54.412729Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [2:6:2053] 2025-04-07T14:05:54.412815Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [2:6:2053] 2025-04-07T14:05:54.415922Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [2:6:2053] 2025-04-07T14:05:54.416001Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [2:6:2053] 2025-04-07T14:05:54.419096Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [2:6:2053] 2025-04-07T14:05:54.419177Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [2:6:2053] 2025-04-07T14:05:54.422752Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [2:6:2053] 2025-04-07T14:05:54.422833Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [2:6:2053] 2025-04-07T14:05:54.426192Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [2:6:2053] 2025-04-07T14:05:54.426273Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [2:6:2053] 2025-04-07T14:05:54.429678Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [2:6:2053] 2025-04-07T14:05:54.429768Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [2:6:2053] 2025-04-07T14:05:54.434541Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [2:6:2053] 2025-04-07T14:05:54.434667Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [2:6:2053] 2025-04-07T14:05:54.438324Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [2:6:2053] 2025-04-07T14:05:54.438408Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [2:6:2053] 2025-04-07T14:05:54.441896Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [2:6:2053] 2025-04-07T14:05:54.441980Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [2:6:2053] 2025-04-07T14:05:54.445659Z node 2 :TABLET_AGGREGATOR INFO: aggregator request processed [2:6:2053] Initiator [2:5:2052] TEST 2 20 duration 1.269177s 2025-04-07T14:05:54.790160Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 [3:6:2053] 2025-04-07T14:05:54.790271Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [3:6:2053] self [3:7:2054] worker 0 Sending message to [3:7:2054] from [3:6:2053] id 1 Sending message to [3:7:2054] from [3:6:2053] id 2 Sending message to [3:7:2054] from [3:6:2053] id 3 Sending message to [3:7:2054] from [3:6:2053] id 4 Sending message to [3:7:2054] from [3:6:2053] id 5 Sending message to [3:7:2054] from [3:6:2053] id 6 Sending message to [3:7:2054] from [3:6:2053] id 7 Sending message to [3:7:2054] from [3:6:2053] id 8 Sending message to [3:7:2054] from [3:6:2053] id 9 Sending message to [3:7:2054] from [3:6:2053] id 10 2025-04-07T14:05:55.403427Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [3:7:2054] 2025-04-07T14:05:55.403477Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [3:7:2054] 2025-04-07T14:05:55.403507Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [3:7:2054] 2025-04-07T14:05:55.403579Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [3:7:2054] 2025-04-07T14:05:55.403620Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [3:7:2054] 2025-04-07T14:05:55.403671Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [3:7:2054] 2025-04-07T14:05:55.403722Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [3:7:2054] 2025-04-07T14:05:55.403752Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [3:7:2054] 2025-04-07T14:05:55.403795Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [3:7:2054] 2025-04-07T14:05:55.403862Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [3:7:2054] 2025-04-07T14:05:55.404203Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [3:7:2054] 2025-04-07T14:05:55.405489Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [3:7:2054] 2025-04-07T14:05:55.429883Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [3:7:2054] 2025-04-07T14:05:55.431368Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [3:7:2054] 2025-04-07T14:05:55.461688Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [3:7:2054] 2025-04-07T14:05:55.463205Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [3:7:2054] 2025-04-07T14:05:55.497483Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [3:7:2054] 2025-04-07T14:05:55.499021Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [3:7:2054] 2025-04-07T14:05:55.522287Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [3:7:2054] 2025-04-07T14:05:55.523215Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [3:7:2054] 2025-04-07T14:05:55.553187Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [3:7:2054] 2025-04-07T14:05:55.554241Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [3:7:2054] 2025-04-07T14:05:55.576682Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [3:7:2054] 2025-04-07T14:05:55.577779Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [3:7:2054] 2025-04-07T14:05:55.598915Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [3:7:2054] 2025-04-07T14:05:55.600044Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [3:7:2054] 2025-04-07T14:05:55.628477Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [3:7:2054] 2025-04-07T14:05:55.630181Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [3:7:2054] 2025-04-07T14:05:55.657578Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [3:7:2054] 2025-04-07T14:05:55.658547Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [3:7:2054] 2025-04-07T14:05:55.693150Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:7:2054] Initiator [3:6:2053] 2025-04-07T14:05:55.894467Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [3:6:2053] 2025-04-07T14:05:55.895057Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [3:6:2053] 2025-04-07T14:05:55.934932Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:6:2053] Initiator [3:5:2052] TEST 2 1 duration 1.289291s 2025-04-07T14:05:56.262577Z node 4 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [4:5:2052] self [4:6:2053] worker 0 Sending message to [4:6:2053] from [4:6:2053] id 1 Sending message to [4:6:2053] from [4:6:2053] id 2 Sending message to [4:6:2053] from [4:6:2053] id 3 Sending message to [4:6:2053] from [4:6:2053] id 4 Sending message to [4:6:2053] from [4:6:2053] id 5 Sending message to [4:6:2053] from [4:6:2053] id 6 Sending message to [4:6:2053] from [4:6:2053] id 7 Sending message to [4:6:2053] from [4:6:2053] id 8 Sending message to [4:6:2053] from [4:6:2053] id 9 Sending message to [4:6:2053] from [4:6:2053] id 10 2025-04-07T14:05:56.887421Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [4:6:2053] 2025-04-07T14:05:56.887460Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [4:6:2053] 2025-04-07T14:05:56.887477Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [4:6:2053] 2025-04-07T14:05:56.887491Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [4:6:2053] 2025-04-07T14:05:56.887544Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [4:6:2053] 2025-04-07T14:05:56.887572Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [4:6:2053] 2025-04-07T14:05:56.887602Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [4:6:2053] 2025-04-07T14:05:56.887623Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [4:6:2053] 2025-04-07T14:05:56.887644Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [4:6:2053] 2025-04-07T14:05:56.887675Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [4:6:2053] 2025-04-07T14:05:56.887881Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [4:6:2053] 2025-04-07T14:05:56.889112Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [4:6:2053] 2025-04-07T14:05:56.907956Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [4:6:2053] 2025-04-07T14:05:56.909051Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [4:6:2053] 2025-04-07T14:05:56.930464Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [4:6:2053] 2025-04-07T14:05:56.931429Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [4:6:2053] 2025-04-07T14:05:56.953420Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [4:6:2053] 2025-04-07T14:05:56.954459Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [4:6:2053] 2025-04-07T14:05:56.973202Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [4:6:2053] 2025-04-07T14:05:56.974261Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [4:6:2053] 2025-04-07T14:05:57.005220Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [4:6:2053] 2025-04-07T14:05:57.006296Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [4:6:2053] 2025-04-07T14:05:57.030331Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [4:6:2053] 2025-04-07T14:05:57.031890Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [4:6:2053] 2025-04-07T14:05:57.053056Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [4:6:2053] 2025-04-07T14:05:57.053979Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [4:6:2053] 2025-04-07T14:05:57.073679Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [4:6:2053] 2025-04-07T14:05:57.074834Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [4:6:2053] 2025-04-07T14:05:57.100640Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [4:6:2053] 2025-04-07T14:05:57.102363Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [4:6:2053] 2025-04-07T14:05:57.146434Z node 4 :TABLET_AGGREGATOR INFO: aggregator request processed [4:6:2053] Initiator [4:5:2052] TEST 2 1 duration 1.149074s >> TNodeBrokerTest::TestListNodes >> TNodeBrokerTest::TestListNodesEpochDeltas >> TSlotIndexesPoolTest::Expansion [GOOD] >> TTenantPoolTests::TestSensorsConfigForStaticSlot >> TNodeBrokerTest::FixedNodeId |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::IntersectionLosesRange+Descending [GOOD] Test command err: Trying to start YDB, gRPC: 29423, MsgBus: 63479 2025-04-07T14:05:47.353289Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572205436911104:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:47.353354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015bb/r3tmp/tmpDbooNy/pdisk_1.dat 2025-04-07T14:05:47.769424Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:47.771471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:47.771534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:47.776572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29423, node 1 2025-04-07T14:05:47.873118Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:47.873141Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:47.873147Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:47.873219Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63479 TClient is connected to server localhost:63479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:48.443346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.458388Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:48.470522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.594745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.728603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.788261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:50.382814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572218321814699:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.382956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.606164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.638781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.681271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.702802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.731185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.757119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.810494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572218321815192:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.810580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.810711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572218321815197:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.813524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:50.821148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572218321815199:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:52.085136Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr894s56d029ce61b77fkzyg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1NjFhMTktOGVlYmUwNmYtNmYxNzUyZC1jOTRiODVlNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-04-07T14:05:52.144437Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034752131, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 21942, MsgBus: 24396 2025-04-07T14:05:52.910340Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572225472657163:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:52.910502Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015bb/r3tmp/tmpz0umgD/pdisk_1.dat 2025-04-07T14:05:53.016500Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21942, node 2 2025-04-07T14:05:53.065239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:53.065329Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:53.074156Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:53.096787Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:53.096812Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:53.096820Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:53.096909Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24396 TClient is connected to server localhost:24396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:53.505893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.513250Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:53.539602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:53.594911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:05:53.748775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:53.813387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.750484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572238357560750:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.750567Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.786617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.811896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.832910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.857370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.886060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.917571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.954643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572238357561242:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.954720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572238357561247:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.954755Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.957674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:55.966601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572238357561249:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:57.010495Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr894y2m31zwn6kkca3qqh88, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmEyZWQyOTEtN2UyOGQ0NmQtNzM0ZDg5M2YtYzYwOTFjZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-04-07T14:05:57.081774Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034757052, txId: 281474976715671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::SqlInLiteral [GOOD] Test command err: Trying to start YDB, gRPC: 10279, MsgBus: 22626 2025-04-07T14:05:47.676964Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572203419400792:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:47.677007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015a4/r3tmp/tmpF5S8UW/pdisk_1.dat 2025-04-07T14:05:48.013202Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10279, node 1 2025-04-07T14:05:48.122511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:48.137041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:48.139636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:48.153141Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:48.153162Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:48.153167Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:48.153266Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22626 TClient is connected to server localhost:22626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:48.591086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.603317Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:05:48.619283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.747148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.866254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.945853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:50.627365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572216304304382:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.627486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.915037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.945002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.971486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.007410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.031376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.097213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.176058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572220599272181:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.176133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.176191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572220599272186:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.179419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:51.202578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572220599272188:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:52.447880Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034752481, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 2266, MsgBus: 9338 2025-04-07T14:05:53.210084Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572228035862285:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015a4/r3tmp/tmpFo8TT3/pdisk_1.dat 2025-04-07T14:05:53.257206Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:05:53.308174Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:53.330363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:53.330452Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:53.337199Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2266, node 2 2025-04-07T14:05:53.401220Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:53.401248Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:53.401258Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:53.401358Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9338 TClient is connected to server localhost:9338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:53.776970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.789625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.866716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:54.002831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:54.067706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:56.028001Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572240920765734:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:56.028083Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:56.045435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:56.072093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:56.097299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:56.120128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:56.141003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:56.204390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:56.240307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572240920766232:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:56.240379Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:56.240434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572240920766237:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:56.242739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:56.249753Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572240920766239:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:57.377976Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034757416, txId: 281474976715672] shutting down |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] |92.0%| [TA] $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TA] {RESULT} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ResourcePoolsDdl::TestDefaultPoolRestrictions >> TSlotIndexesPoolTest::Ranges [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] >> KqpScan::CrossJoinOneColumn [GOOD] >> KqpScan::CrossJoinCount |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TEnumerationTest::TestPublish [GOOD] >> TDynamicNameserverTest::BasicFunctionality >> TNodeBrokerTest::SingleDomainModeBannedIds >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig >> TNodeBrokerTest::ExtendLeaseRestartRace >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAlterTenant |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TEnumerationTest::TestPublish [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> KqpScan::AggregateWithFunction [GOOD] >> KqpScan::BoolFlag >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] Test command err: 2025-04-07T14:05:59.359461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:59.359530Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:59.430798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> TNodeBrokerTest::FixedNodeId [GOOD] >> KqpScan::FullFrameWindow [GOOD] >> KqpScan::EmptySet >> TLocalTests::TestAlterTenant [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] >> TLocalTests::TestAddTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] Test command err: 2025-04-07T14:06:00.403566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:00.403629Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:00.483781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:06:00.526736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 >> TSlotIndexesPoolTest::Basic [GOOD] >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] >> TNodeBrokerTest::NodeNameExpiration >> TNodeBrokerTest::NodeNameWithDifferentTenants >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] Test command err: 2025-04-07T14:05:59.390576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:59.390625Z node 1 :IMPORT WARN: Table profiles were not loaded |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAlterTenant [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] >> TLocalTests::TestRemoveTenantWhileResolving |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] >> GracefulShutdown::TTxGracefulShutdown |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2025-04-07T14:06:00.714930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:00.714994Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:00.826285Z node 1 :NODE_BROKER ERROR: Cannot register node host3:1001: ERROR_TEMP: No free node IDs 2025-04-07T14:06:00.851671Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-04-07T14:06:00.864560Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node ID is banned 2025-04-07T14:06:01.151129Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-04-07T14:06:01.169583Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] Test command err: 2025-04-07T14:06:00.844337Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:00.844866Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001559/r3tmp/tmpC8P6bk/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:00.845471Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001559/r3tmp/tmpC8P6bk/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001559/r3tmp/tmpC8P6bk/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14482360314245111751 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:00.850997Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:00.851481Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001559/r3tmp/tmpC8P6bk/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:00.851658Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001559/r3tmp/tmpC8P6bk/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001559/r3tmp/tmpC8P6bk/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3777600245025690608 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges >> TNodeBrokerTest::NodeNameReuseRestart >> TLocalTests::TestAddTenant [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2025-04-07T14:05:58.600679Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572249843704192:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:58.600777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002141/r3tmp/tmp5iHXux/pdisk_1.dat 2025-04-07T14:05:58.978411Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:58.984966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:58.985443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:58.988392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:59.028246Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/002141/r3tmp/yandexTNorpu.tmp 2025-04-07T14:05:59.028267Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/002141/r3tmp/yandexTNorpu.tmp 2025-04-07T14:05:59.028409Z node 1 :NET_CLASSIFIER ERROR: invalid NetData format 2025-04-07T14:05:59.028461Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: /home/runner/.ya/build/build_root/u6pr/002141/r3tmp/yandexTNorpu.tmp 2025-04-07T14:05:59.028561Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] >> KqpScan::LongStringCombiner [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2025-04-07T14:05:58.613171Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572250540728040:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:58.613226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0020dc/r3tmp/tmpkZNLOH/pdisk_1.dat 2025-04-07T14:05:58.968769Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:59.002365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:59.002459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:59.003983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:59.024361Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0020dc/r3tmp/yandexc0lCpZ.tmp 2025-04-07T14:05:59.024388Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0020dc/r3tmp/yandexc0lCpZ.tmp 2025-04-07T14:05:59.024771Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0020dc/r3tmp/yandexc0lCpZ.tmp 2025-04-07T14:05:59.024884Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2025-04-07T14:05:58.600510Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572249046467705:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:58.601166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002097/r3tmp/tmpNd4Q1l/pdisk_1.dat 2025-04-07T14:05:58.986441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:58.989394Z node 1 :HTTP ERROR: (#30,[::1]:15985) connection closed with error: Connection refused 2025-04-07T14:05:58.989732Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:05:58.995253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:58.995359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:58.997041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:59.025312Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:59.025337Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:59.025342Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:59.025438Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2025-04-07T14:05:59.387897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:59.387956Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant [GOOD] Test command err: 2025-04-07T14:06:01.991952Z node 1 :LOCAL ERROR: TDomainLocal(dc-1): Receive TEvDescribeSchemeResult with bad status StatusPathDoesNotExist reason is <> while resolving subdomain dc-1 2025-04-07T14:06:01.992209Z node 1 :LOCAL ERROR: Unknown domain dc-3 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] Test command err: 2025-04-07T14:05:59.398980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:59.399051Z node 1 :IMPORT WARN: Table profiles were not loaded >> TNodeBrokerTest::TestListNodes [GOOD] >> TDynamicNameserverTest::TestCacheUsage >> TTenantPoolTests::TestStateStatic >> KqpScan::UdfFailure [GOOD] >> TDynamicNameserverTest::BasicFunctionality [GOOD] |92.1%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] Test command err: 2025-04-07T14:05:59.381254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:59.381316Z node 1 :IMPORT WARN: Table profiles were not loaded |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::LongStringCombiner [GOOD] Test command err: Trying to start YDB, gRPC: 10798, MsgBus: 2715 2025-04-07T14:05:47.570157Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572204720460585:2242];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:47.570198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00159b/r3tmp/tmpL6tB5j/pdisk_1.dat 2025-04-07T14:05:47.979146Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10798, node 1 2025-04-07T14:05:48.016043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:48.016340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:48.028027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:48.046265Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:48.046282Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:48.046288Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:48.046370Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2715 TClient is connected to server localhost:2715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:48.533948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.567428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.686847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.823551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.904501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:50.389555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572217605363983:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.389651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.638612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.677599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.699318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.719801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.794933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.827803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.926104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572217605364488:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.926180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572217605364493:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.926198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.929369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:50.940239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572217605364495:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:51.925829Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572221900332110:2464], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029 2025-04-07T14:05:51.926133Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjYxNGRjOTEtNjJhODhmNTgtYzAwNzY4MjktYzkxNzg5YWQ=, ActorId: [1:7490572221900332103:2460], ActorState: ExecuteState, TraceId: 01jr894sbnbc19yn7texpsmwj8, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029 Trying to start YDB, gRPC: 15085, MsgBus: 16422 2025-04-07T14:05:52.717554Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572226590893343:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:52.717622Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00159b/r3tmp/tmpv4G9Gq/pdisk_1.dat 2025-04-07T14:05:52.826620Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:52.845391Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:52.845466Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:52.847074Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15085, node 2 2025-04-07T14:05:52.901697Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:52.901714Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:52.901728Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:52.901832Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16422 TClient is connected to server localhost:16422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:53.376881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:53.388985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:05:53.470337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.650798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.727158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation typ ... VICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572239475796918:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.252567Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.279689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.303121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.323990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.343794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.365270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.394337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.465750Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572239475797414:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.465826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.465894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572239475797419:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.469519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:55.477579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572239475797421:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:56.253560Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490572243770765029:2462], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029 2025-04-07T14:05:56.253734Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzJlYzE2ZGUtZDVjNDIzMjgtM2UwZTRmYjYtYzFjYzM4ZTI=, ActorId: [2:7490572243770765022:2458], ActorState: ExecuteState, TraceId: 01jr894xkt7cv7wkcnhsx56mbm, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029 Trying to start YDB, gRPC: 3680, MsgBus: 19788 2025-04-07T14:05:56.775720Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572240675294890:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:56.775778Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00159b/r3tmp/tmpUZfgUX/pdisk_1.dat 2025-04-07T14:05:56.839592Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3680, node 3 2025-04-07T14:05:56.890993Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:56.891016Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:56.891024Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:56.891123Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:05:56.894499Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:56.894558Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:56.895698Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19788 TClient is connected to server localhost:19788 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:57.212744Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:57.220452Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:57.291864Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:57.415363Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:57.487041Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:59.387296Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572253560198479:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:59.387457Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:59.411732Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:59.437221Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:59.463592Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:59.491543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:59.524646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:59.559173Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:59.634131Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572253560198975:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:59.634186Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572253560198980:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:59.634210Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:59.636776Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:59.668390Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572253560198982:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:06:01.595873Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034761021, txId: 281474976715671] shutting down 2025-04-07T14:06:01.776266Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490572240675294890:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:01.776366Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> ResourcePoolsDdl::TestDefaultPoolRestrictions [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToLimitedState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality [GOOD] Test command err: 2025-04-07T14:06:00.875196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:00.875254Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:00.967504Z node 1 :NODE_BROKER ERROR: Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::TestCacheUsage [GOOD] >> TLocalTests::TestAddTenantWhileResolving [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] Test command err: 2025-04-07T14:04:51.548423Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571963813401508:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:51.548485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:51.652961Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571964335029295:2165];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:51.655466Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:51.782194Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b1e/r3tmp/tmp58IWCV/pdisk_1.dat 2025-04-07T14:04:51.797244Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:04:52.047167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:52.047282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:52.048206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:52.048258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:52.051383Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:04:52.051520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:52.072266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:52.085654Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15721, node 1 2025-04-07T14:04:52.101644Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:52.101684Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:52.323531Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001b1e/r3tmp/yandexkVcfKZ.tmp 2025-04-07T14:04:52.323556Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001b1e/r3tmp/yandexkVcfKZ.tmp 2025-04-07T14:04:52.323720Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001b1e/r3tmp/yandexkVcfKZ.tmp 2025-04-07T14:04:52.323814Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:04:53.757126Z INFO: TTestServer started on Port 18484 GrpcPort 15721 TClient is connected to server localhost:18484 PQClient connected to localhost:15721 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:54.073523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:04:54.168283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:54.445249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571976698304243:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.445274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571976698304235:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.445353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.448753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:54.466247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571976698304252:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } waiting... 2025-04-07T14:04:55.849038Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571976698304430:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:04:55.849183Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490571977219931454:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:04:55.860317Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzIxNTk4N2MtOWZmNDk5MTYtZDgwM2ZkMGItOTdiY2M5MzU=, ActorId: [2:7490571977219931430:2283], ActorState: ExecuteState, TraceId: 01jr8931dceb06v91gzz45ezfp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:04:55.869041Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:04:55.876252Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGVkOGRjMjItOWFlN2RiYzgtMmU3ZDIwNjMtNDc4NmM5N2E=, ActorId: [1:7490571976698304232:2297], ActorState: ExecuteState, TraceId: 01jr89319b9jhn6z6rz40xa3hv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:04:55.876697Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:04:56.208367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:56.278008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:56.412200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:04:56.548735Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571963813401508:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:56.548788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:56.652624Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571964335029295:2165];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:56.652685Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:57.905850Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr89338k09n2j6ba20jcchwr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzZDgwOTgtNWI5NTg5ZWEtZWJjYTk0MzYtN2IxMTY4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490571993878174150:3134] 2025-04-07T14:04:58.075308Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 780 } 2025- ... ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.888742Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7490572215440934765:2324], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:05:50.889445Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=ZGI1MTcyMmYtNGE3MzIyMTYtM2UwZjA1NDAtNTdjN2RlYjg=, ActorId: [9:7490572215440934551:2305], ActorState: ExecuteState, TraceId: 01jr894r8t1rf3f8vr81zh1s2b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:05:50.889958Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:05:50.930480Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.931210Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7490572217360674661:2294], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:05:50.937477Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZDEwMzhkYTYtZmQwYjA1OWEtMmM4MWFmMWEtYThjNWU2M2Q=, ActorId: [10:7490572217360674589:2285], ActorState: ExecuteState, TraceId: 01jr894r93dy1mq81dy28b8xfc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:05:50.940175Z node 10 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:05:51.069724Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:05:51.360006Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr894rnydrzncf4a9cqhw35j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZjBlMWVlMWEtODg2NzIyNTAtYjNkZWQyYWQtZDQ4N2I5M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7490572219735902385:3081] 2025-04-07T14:05:51.457669Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7490572198261064288:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:51.457752Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:51.488489Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7490572200180805218:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:51.488585Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T14:05:56.527527Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-07T14:05:56.527552Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T14:05:56.527561Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-07T14:05:56.527581Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7490572241210739324:3347] (SourceId=A_Source, PreferedPartition=0) InitTable: SourceId=A_Source TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-04-07T14:05:56.531670Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:1, at schemeshard: 72057594046644480 2025-04-07T14:05:57.213018Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-07T14:05:57.863495Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:05:58.632729Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-07T14:05:59.376731Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-07T14:06:00.123124Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715701:0, at schemeshard: 72057594046644480 2025-04-07T14:06:00.800261Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7490572198261064285:2049], Recipient [9:7490572241210739324:3347]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-04-07T14:06:00.800310Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490572241210739324:3347] (SourceId=A_Source, PreferedPartition=0) StartKqpSession 2025-04-07T14:06:00.802701Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7490572198261064502:2256], Recipient [9:7490572241210739324:3347]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=ZjAxMzNiM2YtZjhkOGY1NjktOGFiY2FjMWMtNWUwZDY0ZGI=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2025-04-07T14:06:00.802736Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490572241210739324:3347] (SourceId=A_Source, PreferedPartition=0) Select from the table 2025-04-07T14:06:00.951762Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7490572198261064502:2256], Recipient [9:7490572241210739324:3347]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=ZjAxMzNiM2YtZjhkOGY1NjktOGFiY2FjMWMtNWUwZDY0ZGI=" PreparedQuery: "f6a8e795-33b3bba7-8437fcc5-cdaa6ed8" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jr89527hd1n72kkkrzg19n5j" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 92 2025-04-07T14:06:00.951919Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7490572241210739324:3347] (SourceId=A_Source, PreferedPartition=0) Selected from table PartitionId=(NULL) SeqNo=(NULL) 2025-04-07T14:06:00.951938Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7490572241210739324:3347] (SourceId=A_Source, PreferedPartition=0) OnPartitionChosen 2025-04-07T14:06:00.951952Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490572241210739324:3347] (SourceId=A_Source, PreferedPartition=0) Update the table 2025-04-07T14:06:01.107597Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [9:7490572198261064502:2256], Recipient [9:7490572241210739324:3347]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=ZjAxMzNiM2YtZjhkOGY1NjktOGFiY2FjMWMtNWUwZDY0ZGI=" PreparedQuery: "f86aa7a2-ff754884-2459cc1-d4c77990" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 91 Received TEvChooseResult: 0 2025-04-07T14:06:01.107643Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490572241210739324:3347] (SourceId=A_Source, PreferedPartition=0) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-07T14:06:01.107703Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490572241210739324:3347] (SourceId=A_Source, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-07T14:06:01.107730Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490572241210739324:3347] (SourceId=A_Source, PreferedPartition=0) Start idle 2025-04-07T14:06:01.636285Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:06:01.636336Z node 9 :IMPORT WARN: Table profiles were not loaded |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::UdfFailure [GOOD] Test command err: Trying to start YDB, gRPC: 2283, MsgBus: 19107 2025-04-07T14:05:47.319063Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572205619139293:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:47.319541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015b7/r3tmp/tmpj1YMK9/pdisk_1.dat 2025-04-07T14:05:47.658276Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2283, node 1 2025-04-07T14:05:47.709236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:47.709422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:47.715461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:47.741170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:47.741190Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:47.741195Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:47.741538Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19107 TClient is connected to server localhost:19107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:48.190564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.205283Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:48.217917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.358462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.501662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.580060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:50.072695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572218504042884:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.072802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.302645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.337023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.360283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.383505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.411129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.445655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.506439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572218504043377:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.506501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.506641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572218504043382:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.508968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:50.518160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572218504043384:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:52.406867Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572205619139293:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:52.408419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:52.484915Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034751851, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 22068, MsgBus: 22224 2025-04-07T14:05:53.268047Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572227999992927:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:53.268087Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015b7/r3tmp/tmplxu13J/pdisk_1.dat 2025-04-07T14:05:53.358774Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:53.371745Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:53.371815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:53.377461Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22068, node 2 2025-04-07T14:05:53.420804Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:53.420832Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:53.420839Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:53.420924Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22224 TClient is connected to server localhost:22224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:53.804965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.818180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.859851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.976995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:54.061137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:56.030037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: ... node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:56.198732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:56.235448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572240884897011:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:56.235530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:56.235552Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572240884897016:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:56.238726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:56.247153Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572240884897018:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:57.176406Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490572245179864654:2466], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr894yd84gmbhjfer4xnnx0s. SessionId : ydb://session/3?node_id=2&id=ZGQ4ZGIxMGMtNWU3MzJjYmYtNDU2ZWIxZjYtOTBmNjJkOGE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(17): Bad filter value. }. 2025-04-07T14:05:57.177157Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490572245179864655:2467], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGQ4ZGIxMGMtNWU3MzJjYmYtNDU2ZWIxZjYtOTBmNjJkOGE=. TraceId : 01jr894yd84gmbhjfer4xnnx0s. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-04-07T14:05:57.183637Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGQ4ZGIxMGMtNWU3MzJjYmYtNDU2ZWIxZjYtOTBmNjJkOGE=, ActorId: [2:7490572245179864618:2459], ActorState: ExecuteState, TraceId: 01jr894yd84gmbhjfer4xnnx0s, Create QueryResponse for error on request, msg: 2025-04-07T14:05:57.184001Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034757178, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 14071, MsgBus: 26404 2025-04-07T14:05:57.781471Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572245107991081:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:57.781564Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015b7/r3tmp/tmpz7OJc2/pdisk_1.dat 2025-04-07T14:05:57.863214Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14071, node 3 2025-04-07T14:05:57.906477Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:57.906545Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:57.908018Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:57.919836Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:57.919862Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:57.919873Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:57.919962Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26404 TClient is connected to server localhost:26404 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:58.365168Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:58.382906Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:58.454970Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:58.591196Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:58.669400Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:00.928313Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572257992894679:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:00.928390Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:00.966557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:06:00.995068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:01.023463Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:01.050832Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:06:01.080305Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:06:01.112125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:06:01.151946Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572262287862470:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:01.152031Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:01.152195Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572262287862475:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:01.155775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:06:01.166268Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572262287862477:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:06:02.250210Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490572266582830116:2467], TxId: 281474976715672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=N2QyMmMwZjItYzQ0ODFmNDQtMjc4NGZmYmUtZjdiOWU4ODU=. TraceId : 01jr8953bycvppb0k6w33z9dv0. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(17): Bad filter value. }. 2025-04-07T14:06:02.250644Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490572266582830118:2468], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jr8953bycvppb0k6w33z9dv0. SessionId : ydb://session/3?node_id=3&id=N2QyMmMwZjItYzQ0ODFmNDQtMjc4NGZmYmUtZjdiOWU4ODU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-04-07T14:06:02.251832Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2QyMmMwZjItYzQ0ODFmNDQtMjc4NGZmYmUtZjdiOWU4ODU=, ActorId: [3:7490572266582830082:2460], ActorState: ExecuteState, TraceId: 01jr8953bycvppb0k6w33z9dv0, Create QueryResponse for error on request, msg: 2025-04-07T14:06:02.252146Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034762288, txId: 281474976715671] shutting down >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] >> TNodeBrokerTest::BasicFunctionality >> TNodeBrokerTest::TestRandomActions >> TNodeBrokerTest::MinDynamicNodeIdShifted |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] >> GracefulShutdown::TTxGracefulShutdown [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] Test command err: 2025-04-07T14:06:00.942964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:00.943025Z node 1 :IMPORT WARN: Table profiles were not loaded ... rebooting node broker ... OnActivateExecutor tabletId# 72057594037936129 ... captured cache request ... captured cache request ... sending extend lease request ... waiting for response ... waiting for epoch update ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::TestCacheUsage [GOOD] Test command err: 2025-04-07T14:06:03.076913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:03.076968Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2025-04-07T14:06:02.026460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:02.026506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:02.042866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:06:02.076335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] >> TNodeBrokerTest::NodeNameExpiration [GOOD] >> TPQTest::TestOwnership [GOOD] >> TPQTest::TestPQCacheSizeManagement ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] Test command err: 2025-04-07T14:06:02.392264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:02.392344Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] Test command err: 2025-04-07T14:06:02.739987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:02.740038Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:02.755650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] Test command err: 2025-04-07T14:06:02.071400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:02.071466Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:02.088128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] Test command err: 2025-04-07T14:06:02.709877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:02.709927Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:02.727069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 >> KqpScan::EmptySet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:05:10.073986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:05:10.074129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.074175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:05:10.074211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:05:10.083682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:05:10.083766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:05:10.083876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:05:10.093066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:05:10.203919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:05:10.203969Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.211792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:05:10.211876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:05:10.211997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:05:10.231671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:05:10.231782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:05:10.244142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.244546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:05:10.286030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.353931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.354064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:05:10.354106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.354142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:05:10.354227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:05:10.360149Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:05:10.468118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:05:10.468280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.472350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:05:10.490289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:05:10.490406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.501265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.518967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:05:10.519291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.519358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:05:10.519413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:05:10.519449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:05:10.521504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521566Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:05:10.521646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:05:10.523163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.523241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.523283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.535724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:05:10.537551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:05:10.546364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:05:10.547264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:05:10.547432Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:05:10.547714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:05:10.547874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:05:10.547967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:05:10.549827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:05:10.549866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:05:10.549997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:05:10.550337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:05:10.550378Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:05:10.550459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:05:10.550490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:05:10.550581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:05:10.550628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:05:10.550659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:05:10.550706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:05:10.550758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:05:10.550786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Pu ... 6258Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:04.706368Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:06:04.706486Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:04.706510Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-04-07T14:06:04.706540Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2025-04-07T14:06:04.707125Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-07T14:06:04.707163Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1002 2025-04-07T14:06:04.708367Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-07T14:06:04.708424Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-07T14:06:04.708445Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-04-07T14:06:04.708469Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:06:04.708494Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:06:04.708942Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-07T14:06:04.709005Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-07T14:06:04.709025Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-04-07T14:06:04.709045Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T14:06:04.709070Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:06:04.709111Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2025-04-07T14:06:04.709808Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 702 } } 2025-04-07T14:06:04.709838Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-04-07T14:06:04.709916Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 702 } } 2025-04-07T14:06:04.709974Z node 72 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 702 } } 2025-04-07T14:06:04.710303Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 309237647627 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-04-07T14:06:04.710328Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-04-07T14:06:04.710392Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 309237647627 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-04-07T14:06:04.710420Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:06:04.710467Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 309237647627 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-04-07T14:06:04.710503Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:04.710524Z node 72 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-07T14:06:04.710546Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:06:04.710570Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 129 -> 240 2025-04-07T14:06:04.712985Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-04-07T14:06:04.713078Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-04-07T14:06:04.713386Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-07T14:06:04.713459Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-07T14:06:04.713676Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-07T14:06:04.713702Z node 72 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-04-07T14:06:04.713760Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-04-07T14:06:04.713782Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-04-07T14:06:04.713808Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-04-07T14:06:04.713834Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-04-07T14:06:04.713859Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2025-04-07T14:06:04.713878Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2025-04-07T14:06:04.713948Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2025-04-07T14:06:04.715565Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-04-07T14:06:04.715594Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-04-07T14:06:04.715798Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-04-07T14:06:04.715845Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-04-07T14:06:04.715866Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:400:2375] TestWaitNotification: OK eventTxId 1002 2025-04-07T14:06:04.716104Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:06:04.716231Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 147us result status StatusSuccess 2025-04-07T14:06:04.716558Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpScan::BoolFlag [GOOD] >> KqpScan::CrossJoinCount [GOOD] >> KqpScan::EarlyFinish [GOOD] >> KqpScan::DropRedundantSortByPk >> TPQTest::TestPQSmallRead [GOOD] >> TPQTest::TestPQReadAhead >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::EmptySet [GOOD] Test command err: Trying to start YDB, gRPC: 4989, MsgBus: 13954 2025-04-07T14:05:47.487767Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572202632994861:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:47.487817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015a9/r3tmp/tmpEdbwbn/pdisk_1.dat 2025-04-07T14:05:47.936404Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4989, node 1 2025-04-07T14:05:47.942282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:47.942381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:47.947552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:48.008608Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:48.008634Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:48.008641Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:48.008752Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13954 TClient is connected to server localhost:13954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:48.561428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.578690Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:48.595229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:05:48.720307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:05:48.878330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.935893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:50.499708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572215517898324:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.499867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.726714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.753212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.776380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.797786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.825882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.896820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.949780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572215517898826:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.949852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.950275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572215517898831:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.954718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:50.969744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572215517898833:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:51.926947Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572219812866445:2464], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:5:13: Error: Scan query cannot have data modifications., code: 2029
: Error: Execution, code: 1060
:5:13: Error: Scan query cannot have data modifications., code: 2029 2025-04-07T14:05:51.927181Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmIxNDJkOTQtMzQ4Yjk4MzMtZDBjZWY2YWYtOGIyMzU2MDI=, ActorId: [1:7490572219812866438:2460], ActorState: ExecuteState, TraceId: 01jr894s91eq50cbys6bw5y6mq, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
:5:13: Error: Scan query cannot have data modifications., code: 2029
: Error: Execution, code: 1060
:5:13: Error: Scan query cannot have data modifications., code: 2029 Trying to start YDB, gRPC: 16396, MsgBus: 9051 2025-04-07T14:05:52.592273Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572226123980515:2075];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015a9/r3tmp/tmpdONvxs/pdisk_1.dat 2025-04-07T14:05:52.609661Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:05:52.689354Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16396, node 2 2025-04-07T14:05:52.715608Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:52.715702Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:52.734174Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:52.753156Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:52.753179Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:52.753186Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:52.753271Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9051 TClient is connected to server localhost:9051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:53.158317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.174905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.250437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.395723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.459418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:55.337416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572239008884054:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.337473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.363720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.388119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.413663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.437731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.464033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.490675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.546529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572239008884544:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.546599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.546668Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572239008884549:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.549569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:55.556844Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572239008884551:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:06:00.468472Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572226123980515:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:00.471356Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:06:00.502971Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034757094, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 8643, MsgBus: 3919 2025-04-07T14:06:01.086504Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572263648256565:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:01.086569Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015a9/r3tmp/tmp1w81ag/pdisk_1.dat 2025-04-07T14:06:01.173135Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8643, node 3 2025-04-07T14:06:01.213467Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:01.213998Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:01.218232Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:01.241177Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:06:01.241195Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:06:01.241201Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:06:01.241299Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3919 TClient is connected to server localhost:3919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:01.681294Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:01.689364Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:01.758412Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:01.897075Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:01.949780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:03.870971Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572272238192876:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:03.871047Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:03.890498Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.918967Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.941571Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.968481Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.994180Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:06:04.028556Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:06:04.061623Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572276533160664:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:04.061710Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:04.061753Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572276533160669:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:04.064760Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:06:04.073439Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572276533160671:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:06:04.910954Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034764908, txId: 281474976715671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::BoolFlag [GOOD] Test command err: Trying to start YDB, gRPC: 6596, MsgBus: 19029 2025-04-07T14:05:48.811490Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572208875860482:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:48.811526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001597/r3tmp/tmpmVb4AW/pdisk_1.dat 2025-04-07T14:05:49.102675Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6596, node 1 2025-04-07T14:05:49.173173Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:49.173192Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:49.173219Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:49.173303Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:05:49.175597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:49.175702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:49.178476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19029 TClient is connected to server localhost:19029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:49.650232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:49.671437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:49.800996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:49.966550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:50.051084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:51.538602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572221760764072:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.538720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.780871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.800707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.827931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.846361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.870144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.898606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:51.931554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572221760764564:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.931630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.931975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572221760764569:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.935865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:51.943714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572221760764571:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:54.279119Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572208875860482:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:54.280153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:54.837885Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034753405, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 20838, MsgBus: 8030 2025-04-07T14:05:55.450349Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572238469327157:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:55.450415Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001597/r3tmp/tmpjsLdAb/pdisk_1.dat 2025-04-07T14:05:55.531599Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20838, node 2 2025-04-07T14:05:55.575278Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:55.575346Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:55.576338Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:55.584714Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:55.584741Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:55.584750Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:55.584847Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8030 TClient is connected to server localhost:8030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:55.970145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:55.985312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:56.025786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:56.142519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:56.214368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:58.065615Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572251354230760:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:58.065687Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:58.080889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:58.110488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:58.138185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:58.165300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:58.209106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:58.243621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:58.289585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572251354231252:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:58.289695Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:58.289789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572251354231257:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:58.293014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:58.303183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572251354231259:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:06:00.085755Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034759747, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 20569, MsgBus: 28274 2025-04-07T14:06:00.862706Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572258451306802:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:00.862774Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001597/r3tmp/tmp35ofxb/pdisk_1.dat 2025-04-07T14:06:00.951538Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:00.967256Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:00.967362Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:00.970190Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20569, node 3 2025-04-07T14:06:01.007179Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:06:01.007207Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:06:01.007221Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:06:01.007327Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28274 TClient is connected to server localhost:28274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:01.427475Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:01.441779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:01.512360Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:01.648007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:01.709759Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:03.538011Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572271336210399:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:03.538116Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:03.575894Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.602250Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.626286Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.654674Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.683731Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.712891Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.751300Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572271336210893:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:03.751400Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:03.751439Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572271336210898:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:03.754037Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:06:03.761645Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572271336210900:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:06:04.683417Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:06:05.043891Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034765081, txId: 281474976715675] shutting down |92.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2025-04-07T14:06:04.447400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:04.447467Z node 1 :IMPORT WARN: Table profiles were not loaded >> TNodeBrokerTest::BasicFunctionality [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::CrossJoinCount [GOOD] Test command err: Trying to start YDB, gRPC: 19347, MsgBus: 18094 2025-04-07T14:05:46.519442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572198476751405:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:46.523098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015c7/r3tmp/tmp944wHI/pdisk_1.dat 2025-04-07T14:05:46.915175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:46.915290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:46.916388Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:46.919555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19347, node 1 2025-04-07T14:05:46.982605Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:46.982622Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:46.982629Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:46.982724Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18094 TClient is connected to server localhost:18094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:47.499378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.523127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.674803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.851083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:47.915170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:49.513526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572211361654785:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.513615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:49.767291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.798295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.861530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.887189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.921570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:49.956958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.013227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572215656622578:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.013343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.013779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572215656622583:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.017986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:50.030848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572215656622585:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:51.519877Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572198476751405:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:51.519953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:51.676435Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034751704, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 23482, MsgBus: 22309 2025-04-07T14:05:52.430374Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572223278513618:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:52.430613Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015c7/r3tmp/tmp00woKF/pdisk_1.dat 2025-04-07T14:05:52.533193Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:05:52.558699Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:52.558755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:52.564779Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23482, node 2 2025-04-07T14:05:52.603654Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:52.603673Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:52.603681Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:52.603775Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22309 TClient is connected to server localhost:22309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:05:53.011141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.024042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:05:53.067969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.239146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:53.313767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:55.483306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572236163417197:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not fo ... ode 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.511312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.535667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.560585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.584682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.609553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.641114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.675362Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572236163417691:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.675440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.675503Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572236163417696:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.678155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:55.686466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572236163417698:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:05:56.534297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:57.430892Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572223278513618:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:57.430952Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:05:57.561632Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034757178, txId: 281474976715673] shutting down 2025-04-07T14:05:59.419553Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034758074, txId: 281474976715676] shutting down Trying to start YDB, gRPC: 26225, MsgBus: 27062 2025-04-07T14:06:00.217226Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572257463062048:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:00.217312Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015c7/r3tmp/tmpJcjjUn/pdisk_1.dat 2025-04-07T14:06:00.325630Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26225, node 3 2025-04-07T14:06:00.353352Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:00.353449Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:00.365972Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:00.400224Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:06:00.400244Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:06:00.400252Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:06:00.400373Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27062 TClient is connected to server localhost:27062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:00.828543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:00.846321Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:00.899964Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:01.023307Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:01.100950Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:03.144190Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572270347965648:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:03.144291Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:03.182964Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.248068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.276439Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.302733Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.329149Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.365155Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:06:03.400781Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572270347966144:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:03.400869Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:03.401112Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572270347966149:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:03.405041Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:06:03.413422Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572270347966151:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:06:04.337696Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:05.337416Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490572257463062048:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:05.339095Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:06:05.518917Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034764899, txId: 281474976715673] shutting down |92.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] Test command err: 2025-04-07T14:06:04.319581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:04.319631Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:04.857505Z node 1 :NODE_BROKER ERROR: Cannot register node host1:1001: WRONG_REQUEST: Another location is registered for host1:1001 2025-04-07T14:06:04.870415Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-04-07T14:06:04.870748Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:04.871008Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TJaegerTracingConfiguratorTests::DefaultConfig >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached >> TConsoleConfigTests::TestModifyConfigItem >> TConsoleTests::TestRestartConsoleAndPools >> TConsoleTests::TestGetUnknownTenantStatus >> TConsoleTests::TestCreateTenant >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks >> TGRpcStreamingTest::WritesDoneFromClient >> TGRpcStreamingTest::ReadFinish >> ResourcePoolsDdl::TestPoolSwitchToLimitedState [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState >> TGRpcStreamingTest::ClientNeverWrites >> TGRpcStreamingTest::SimpleEcho >> KqpStreamLookup::ReadTableDuringSplit >> KqpStreamLookup::ReadTableWithIndexDuringSplit >> TGRpcStreamingTest::ClientDisconnects >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems >> ReadSessionImplTest::ProperlyOrdersDecompressedData >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> Compression::WriteRAW >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> TGRpcStreamingTest::ReadFinish [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> TGRpcStreamingTest::ClientNeverWrites [GOOD] >> DataShardVolatile::DistributedWrite >> AssignTxId::Basic [GOOD] >> TGRpcStreamingTest::SimpleEcho [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-04-07T14:06:10.817214Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.817237Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.817248Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.817687Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.818247Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.825397Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.825737Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.826732Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.826744Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.826753Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.826961Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.827279Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.827365Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.827472Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.827642Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T14:06:10.828217Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.828229Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.828267Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.828471Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.829127Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.829205Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.829330Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.829840Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.830023Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:06:10.830088Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.830111Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-07T14:06:10.830778Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.830794Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.830806Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.831033Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.831318Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.831387Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.831541Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 11 Compressed message data size: 31 2025-04-07T14:06:10.832314Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:06:10.832550Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-07T14:06:10.832767Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-07T14:06:10.832957Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-07T14:06:10.833060Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.833089Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:06:10.833115Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T14:06:10.833228Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-04-07T14:06:10.833254Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T14:06:10.833271Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-07T14:06:10.833288Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:06:10.833391Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-04-07T14:06:10.833465Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-07T14:06:10.833484Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-07T14:06:10.833499Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T14:06:10.833569Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-04-07T14:06:10.833588Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-07T14:06:10.833601Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-07T14:06:10.833617Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:06:10.833690Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-04-07T14:06:10.834650Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.834668Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.834693Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.834926Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.835207Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.835279Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.835418Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2025-04-07T14:06:10.836129Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:06:10.836267Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-07T14:06:10.836451Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-07T14:06:10.836605Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-07T14:06:10.836706Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.836723Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:06:10.836733Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T14:06:10.836742Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-07T14:06:10.836758Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:06:10.836878Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2025-04-07T14:06:10.836933Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-07T14:06:10.836973Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-07T14:06:10.836983Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-07T14:06:10.836992Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-07T14:06:10.837003Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:06:10.837096Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2025-04-07T14:06:10.837827Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.837846Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.837865Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.838077Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.838436Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.838557Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.838714Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.839430Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:06:10.839963Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:06:10.840144Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-04-07T14:06:10.840231Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T14:06:10.840309Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.840335Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:06:10.840362Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-04-07T14:06:10.840378Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-04-07T14:06:10.840407Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-04-07T14:06:10.840425Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-07T14:06:10.840541Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-07T14:06:10.840640Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] >> TGRpcStreamingTest::ClientDisconnects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2025-04-07T14:06:08.863870Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572292806081971:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:08.863923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00172f/r3tmp/tmpxRh3Vc/pdisk_1.dat 2025-04-07T14:06:09.172731Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:09.240588Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream accepted Name# Session ok# true peer# ipv6:[::1]:56680 2025-04-07T14:06:09.242619Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade attach Name# Session actor# [1:7490572297101049636:2248] peer# ipv6:[::1]:56680 2025-04-07T14:06:09.242657Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade read Name# Session peer# ipv6:[::1]:56680 2025-04-07T14:06:09.242720Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade finish Name# Session peer# ipv6:[::1]:56680 grpc status# (0) message# 2025-04-07T14:06:09.243803Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] read finished Name# Session ok# false data# peer# ipv6:[::1]:56680 2025-04-07T14:06:09.243860Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream finished Name# Session ok# true peer# ipv6:[::1]:56680 grpc status# (0) message# 2025-04-07T14:06:09.243898Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream done notification Name# Session ok# true peer# ipv6:[::1]:56680 2025-04-07T14:06:09.243934Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] deregistering request Name# Session peer# ipv6:[::1]:56680 (finish done) 2025-04-07T14:06:09.243975Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-04-07T14:06:09.245267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:09.245393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:09.247586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-04-07T14:06:10.761397Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.761426Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.761492Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.761877Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T14:06:10.761931Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.761961Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.763165Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009831s 2025-04-07T14:06:10.763740Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.764204Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T14:06:10.764282Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.765693Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.765711Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.765732Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.766016Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T14:06:10.766039Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.766062Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.766109Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007012s 2025-04-07T14:06:10.766554Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.767036Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T14:06:10.767122Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.768024Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.768041Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.768056Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.768590Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-07T14:06:10.768626Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.768640Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.768705Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.293006s 2025-04-07T14:06:10.769057Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.769418Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T14:06:10.769485Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.770288Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.770306Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.770320Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.770642Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-07T14:06:10.770674Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.770697Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.770743Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.224930s 2025-04-07T14:06:10.771115Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.771475Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T14:06:10.771528Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.772356Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.772395Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.772426Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.772639Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.772967Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.780673Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.780990Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-04-07T14:06:10.781024Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.781040Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.781104Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.179102s 2025-04-07T14:06:10.781404Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-07T14:06:10.782612Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.782631Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.782667Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.782911Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.783342Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.783483Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.783861Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.884876Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.885102Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T14:06:10.885167Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.885206Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-07T14:06:10.885289Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-07T14:06:10.985575Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T14:06:10.985742Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-07T14:06:10.986854Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.986874Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.986907Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.987165Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.987717Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.987874Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.988256Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:11.089061Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:11.089226Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T14:06:11.089270Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:11.089302Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-07T14:06:11.089396Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-07T14:06:11.089474Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-07T14:06:11.089748Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T14:06:11.089832Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-07T14:06:11.090495Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2025-04-07T14:06:08.862327Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572292516326171:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:08.862375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001739/r3tmp/tmpxsEl9N/pdisk_1.dat 2025-04-07T14:06:09.152964Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:09.230722Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] stream accepted Name# Session ok# true peer# ipv6:[::1]:34648 2025-04-07T14:06:09.231009Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] facade attach Name# Session actor# [1:7490572296811293965:2247] peer# ipv6:[::1]:34648 2025-04-07T14:06:09.231050Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] facade write Name# Session data# peer# ipv6:[::1]:34648 2025-04-07T14:06:09.231236Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] facade write Name# Session data# peer# ipv6:[::1]:34648 grpc status# (0) message# 2025-04-07T14:06:09.231750Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] write finished Name# Session ok# true peer# ipv6:[::1]:34648 2025-04-07T14:06:09.232039Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] stream done notification Name# Session ok# true peer# ipv6:[::1]:34648 2025-04-07T14:06:09.232071Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] write finished Name# Session ok# true peer# ipv6:[::1]:34648 2025-04-07T14:06:09.232089Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] stream finished Name# Session ok# true peer# ipv6:[::1]:34648 grpc status# (0) message# 2025-04-07T14:06:09.232119Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] deregistering request Name# Session peer# ipv6:[::1]:34648 (finish done) 2025-04-07T14:06:09.232436Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-04-07T14:06:09.232452Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-04-07T14:06:09.249903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:09.250033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:09.252552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling >> KqpScanSpilling::SelfJoin >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-04-07T14:06:10.793442Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.793462Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.793483Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.793845Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.794235Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T14:06:10.794279Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.794845Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.794858Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.794866Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.795100Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.795281Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T14:06:10.795303Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.795752Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.795763Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.795787Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.796054Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T14:06:10.796085Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.796109Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.797361Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-04-07T14:06:10.797938Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.797955Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.797979Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.798261Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-07T14:06:10.798286Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.798304Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.798347Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-04-07T14:06:10.799070Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-07T14:06:10.799093Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-07T14:06:10.799123Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.799286Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.799575Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.807150Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-07T14:06:10.807442Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.807757Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (empty maybe) 2025-04-07T14:06:10.810574Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-04-07T14:06:10.810774Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.810805Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:06:10.810825Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T14:06:10.810839Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-07T14:06:10.810856Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-07T14:06:10.810870Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-07T14:06:10.810883Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-04-07T14:06:10.810906Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-04-07T14:06:10.811006Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-04-07T14:06:10.811023Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-04-07T14:06:10.811036Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-04-07T14:06:10.811047Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-04-07T14:06:10.811072Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-04-07T14:06:10.811088Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-04-07T14:06:10.811101Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-04-07T14:06:10.811113Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-04-07T14:06:10.811217Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-04-07T14:06:10.811231Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-04-07T14:06:10.811250Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-04-07T14:06:10.811263Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-04-07T14:06:10.811275Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-04-07T14:06:10.811289Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-04-07T14:06:10.811302Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-04-07T14:06:10.811323Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-04-07T14:06:10.811340Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-04-07T14:06:10.811353Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-04-07T14:06:10.811366Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-04-07T14:06:10.811379Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-04-07T14:06:10.811391Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-04-07T14:06:10.811404Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-04-07T14:06:10.811416Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-04-07T14:06:10.811441Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-04-07T14:06:10.811485Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-04-07T14:06:10.811503Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-04-07T14:06:10.811520Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-04-07T14:06:10.811532Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-04-07T14:06:10.811545Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-04-07T14:06:10.811558Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-04-07T14:06:10.811571Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-04-07T14:06:10.811592Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-04-07T14:06:10.811610Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-04-07T14:06:10.811622Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-04-07T14:06:10.811635Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-04-07T14:06:10.811651Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-04-07T14:06:10.811662Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-04-07T14:06:10.811673Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-04-07T14:06:10.811685Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-04-07T14:06:10.811697Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-04-07T14:06:10.811710Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-04-07T14:06:10.811724Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-04-07T14:06:10.811771Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-07T14:06:10.813655Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-04-07T14:06:10.813795Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-04-07T14:06:10.813829Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-04-07T14:06:10.813845Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-04-07T14:06:10.813858Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-04-07T14:06:10.813876Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-04-07T14:06:10.813908Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-04-07T14:06:10.813923Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-04-07T14:06:10.813937Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-04-07T14:06:10.813953Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-04-07T14:06:10.813965Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-04-07T14:06:10.813981Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-04-07T14:06:10.813995Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-04-07T14:06:10.814020Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-04-07T14:06:10.814040Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-04-07T14:06:10.814053Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-04-07T14:06:10.814070Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-04-07T14:06:10.814097Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-04-07T14:06:10.814111Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-04-07T14:06:10.814124Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-04-07T14:06:10.814137Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-04-07T14:06:10.814150Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-04-07T14:06:10.814172Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-04-07T14:06:10.814187Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-04-07T14:06:10.814199Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-04-07T14:06:10.814211Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-04-07T14:06:10.814224Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-04-07T14:06:10.814244Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-04-07T14:06:10.814257Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-04-07T14:06:10.814269Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-04-07T14:06:10.814282Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-04-07T14:06:10.814294Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-04-07T14:06:10.814306Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-04-07T14:06:10.814340Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-04-07T14:06:10.814358Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-04-07T14:06:10.814371Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-04-07T14:06:10.814384Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-04-07T14:06:10.814396Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-04-07T14:06:10.814419Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-04-07T14:06:10.814435Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-04-07T14:06:10.814448Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-04-07T14:06:10.814459Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-04-07T14:06:10.814471Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-04-07T14:06:10.814487Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-04-07T14:06:10.814499Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-04-07T14:06:10.814512Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-04-07T14:06:10.814525Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-04-07T14:06:10.814539Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-04-07T14:06:10.814562Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-04-07T14:06:10.814580Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-04-07T14:06:10.814593Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-04-07T14:06:10.814633Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-07T14:06:10.814760Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-07T14:06:10.815860Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.815884Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.815905Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.816219Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.816781Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.816888Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.817258Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.917917Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.918228Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T14:06:10.918306Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.918345Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-07T14:06:10.918423Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-07T14:06:11.118773Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-07T14:06:11.219260Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-07T14:06:11.219418Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T14:06:11.219818Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-07T14:06:11.221310Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:11.221329Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:11.221346Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:11.221752Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:11.222467Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:11.222624Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:11.223976Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:11.325601Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:11.327208Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T14:06:11.327272Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:11.327320Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-07T14:06:11.327422Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-07T14:06:11.327529Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-07T14:06:11.327660Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-07T14:06:11.327732Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T14:06:11.327871Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2025-04-07T14:06:08.862330Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572295284433761:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:08.862379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001669/r3tmp/tmpjbD0UZ/pdisk_1.dat 2025-04-07T14:06:09.173254Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:09.247488Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] stream accepted Name# Session ok# true peer# ipv6:[::1]:42022 2025-04-07T14:06:09.247761Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] facade attach Name# Session actor# [1:7490572299579401556:2247] peer# ipv6:[::1]:42022 2025-04-07T14:06:09.247784Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] facade read Name# Session peer# ipv6:[::1]:42022 2025-04-07T14:06:09.247835Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] facade write Name# Session data# peer# ipv6:[::1]:42022 2025-04-07T14:06:09.248045Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] facade finish Name# Session peer# ipv6:[::1]:42022 grpc status# (0) message# 2025-04-07T14:06:09.248426Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] write finished Name# Session ok# true peer# ipv6:[::1]:42022 2025-04-07T14:06:09.248635Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] read finished Name# Session ok# false data# peer# ipv6:[::1]:42022 2025-04-07T14:06:09.248693Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] stream done notification Name# Session ok# true peer# ipv6:[::1]:42022 2025-04-07T14:06:09.248721Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-04-07T14:06:09.248735Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] stream finished Name# Session ok# true peer# ipv6:[::1]:42022 grpc status# (0) message# 2025-04-07T14:06:09.248749Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-04-07T14:06:09.248759Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2025-04-07T14:06:09.248780Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] deregistering request Name# Session peer# ipv6:[::1]:42022 (finish done) 2025-04-07T14:06:09.269204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:09.269303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:09.271038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected >> KqpScanSpilling::SelfJoinQueryService >> KqpScanSpilling::HandleErrorsCorrectly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic [GOOD] Test command err: 2025-04-07T14:06:07.776710Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572287623123007:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:07.776788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002131/r3tmp/tmpWXj9oe/pdisk_1.dat 2025-04-07T14:06:08.078795Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:08.124486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:08.124638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:08.126432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2021 TServer::EnableGrpc on GrpcPort 28578, node 1 2025-04-07T14:06:08.354545Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:06:08.354571Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:06:08.354577Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:06:08.354658Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:08.836124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:09.956254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572296213058230:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:09.956378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:10.407539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:06:10.419605Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] OnActivateExecutor 2025-04-07T14:06:10.419706Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInitSchema] Execute 2025-04-07T14:06:10.421939Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInitSchema] Complete 2025-04-07T14:06:10.422012Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInit] Execute 2025-04-07T14:06:10.422182Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInit] Complete 2025-04-07T14:06:10.422197Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] SwitchToWork 2025-04-07T14:06:10.468976Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 2 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:28578" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } StrongConsistency { CommitIntervalMilliSeconds: 10000 } } 2025-04-07T14:06:10.469224Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxCreateReplication] Execute: NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 2 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:28578" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } StrongConsistency { CommitIntervalMilliSeconds: 10000 } } 2025-04-07T14:06:10.469308Z node 1 :REPLICATION_CONTROLLER NOTICE: [controller 72075186224037888][TxCreateReplication] Add replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T14:06:10.469803Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxCreateReplication] Complete 2025-04-07T14:06:10.471504Z node 1 :REPLICATION_CONTROLLER TRACE: [TenantResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root/replication TableId: [72057594046644480:2:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindReplication DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T14:06:10.471834Z node 1 :REPLICATION_CONTROLLER TRACE: [TenantResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2025-04-07T14:06:10.471938Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvResolveTenantResult { ReplicationId: 1 Tenant: /Root Sucess: 1 } 2025-04-07T14:06:10.471964Z node 1 :REPLICATION_CONTROLLER NOTICE: [controller 72075186224037888] Tenant resolved: rid# 1, tenant# /Root 2025-04-07T14:06:10.471974Z node 1 :REPLICATION_CONTROLLER INFO: [controller 72075186224037888] Discover tenant nodes: tenant# /Root 2025-04-07T14:06:10.472432Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvDiscovery::TEvDiscoveryData 2025-04-07T14:06:10.472477Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888] Create session: nodeId# 1 TClient::Ls request: /Root/replication TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "replication" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034770520 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10... (TRUNCATED) 2025-04-07T14:06:10.495346Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 1 TxId: 0 } 2025-04-07T14:06:10.495437Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 0 2025-04-07T14:06:10.495484Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 1, assigned# 0, allocated# 0, exhausted# 1 2025-04-07T14:06:10.495579Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T14:06:10.496627Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 5 2025-04-07T14:06:10.497015Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-04-07T14:06:10.497057Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/table, status# SCHEME_ERROR, issues# {
: Error: Path not found } 2025-04-07T14:06:10.498229Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-04-07T14:06:10.498336Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-04-07T14:06:10.498396Z node 1 :REPLICATION_CONTROLLER ERROR: [controller 72075186224037888][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /Root/table: SCHEME_ERROR ({
: Error: Path not found }) 2025-04-07T14:06:10.498590Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-04-07T14:06:10.498776Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxDiscoveryTargetsResult] Complete 2025-04-07T14:06:10.498877Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 0 } 2025-04-07T14:06:10.498906Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-04-07T14:06:10.498946Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-04-07T14:06:10.499142Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 18446744073709551615 } 2025-04-07T14:06:10.499186Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-04-07T14:06:10.499228Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-04-07T14:06:10.499431Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 10000 TxId: 0 } 2025-04-07T14:06:10.499459Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-04-07T14:06:10.499749Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-04-07T14:06:10.499917Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 5000 TxId: 0 } 2025-04-07T14:06:10.499942Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 2, allocated# 3 2025-04-07T14:06:10.499962Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-04-07T14:06:10.500143Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 20000 TxId: 0 } Versions { Step: 30000 TxId: 0 } Versions { Step: 40000 TxId: 0 } 2025-04-07T14:06:10.500165Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 3, assigned# 2, allocated# 3 2025-04-07T14:06:10.500422Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 0, exhausted# 0 2025-04-07T14:06:10.500493Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T14:06:10.500520Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 0, assigned# 5, allocated# 5 2025-04-07T14:06:10.500541Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2025-04-07T14:06:10.500597Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 50000 TxId: 0 } 2025-04-07T14:06:10.500625Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 5, allocated# 5 2025-04-07T14:06:10.500875Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2025-04-07T14:06:08.899075Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572294602334838:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:08.899244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00172d/r3tmp/tmp0TyAO5/pdisk_1.dat 2025-04-07T14:06:09.197268Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:09.245485Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:35676 2025-04-07T14:06:09.245880Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7490572298897302634:2247] peer# ipv6:[::1]:35676 2025-04-07T14:06:09.245921Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade read Name# Session peer# ipv6:[::1]:35676 2025-04-07T14:06:09.246142Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] read finished Name# Session ok# true data# peer# ipv6:[::1]:35676 2025-04-07T14:06:09.246315Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 1 2025-04-07T14:06:09.246360Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:35676 2025-04-07T14:06:09.246633Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade finish Name# Session peer# ipv6:[::1]:35676 grpc status# (0) message# 2025-04-07T14:06:09.246660Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:35676 2025-04-07T14:06:09.246980Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:35676 2025-04-07T14:06:09.247110Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:35676 grpc status# (0) message# 2025-04-07T14:06:09.247196Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:35676 (finish done) 2025-04-07T14:06:09.268394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:09.268483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:09.270267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor >> KqpScan::DropRedundantSortByPk [GOOD] >> KqpScan::DqSourceLiteralRange >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2025-04-07T14:06:08.891936Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572295174876458:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:08.891997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00165e/r3tmp/tmpJcwCHb/pdisk_1.dat 2025-04-07T14:06:09.148088Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:09.225976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:09.226104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:09.227544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:09.230540Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:38108 2025-04-07T14:06:09.230845Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7490572299469844260:2250] peer# ipv6:[::1]:38108 2025-04-07T14:06:09.231273Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:38108 2025-04-07T14:06:09.231345Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2025-04-07T14:06:09.231971Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2025-04-07T14:06:09.232023Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# unknown (finish done) >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TConsoleConfigTests::TestGetItems ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2025-04-07T14:06:08.863513Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572295936078160:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:08.863563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001732/r3tmp/tmpY343jg/pdisk_1.dat 2025-04-07T14:06:09.148126Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:09.231014Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream accepted Name# Session ok# true peer# ipv6:[::1]:46938 2025-04-07T14:06:09.231304Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade attach Name# Session actor# [1:7490572300231045820:2247] peer# ipv6:[::1]:46938 2025-04-07T14:06:09.231339Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade read Name# Session peer# ipv6:[::1]:46938 2025-04-07T14:06:09.231429Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] read finished Name# Session ok# false data# peer# ipv6:[::1]:46938 2025-04-07T14:06:09.231470Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-04-07T14:06:09.231521Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade finish Name# Session peer# ipv6:[::1]:46938 grpc status# (9) message# Everything is A-OK 2025-04-07T14:06:09.231753Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream done notification Name# Session ok# true peer# ipv6:[::1]:46938 2025-04-07T14:06:09.231810Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2025-04-07T14:06:09.231863Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream finished Name# Session ok# true peer# ipv6:[::1]:46938 grpc status# (9) message# Everything is A-OK 2025-04-07T14:06:09.231985Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] deregistering request Name# Session peer# ipv6:[::1]:46938 (finish done) 2025-04-07T14:06:09.234301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:09.234383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:09.235699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove |92.4%| [TA] $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-04-07T14:06:10.711692Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.711734Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.711764Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.712121Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.714520Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.724753Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.726665Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.730904Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:06:10.731254Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:06:10.731430Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-07T14:06:10.731529Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:06:10.731601Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.731630Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-07T14:06:10.731656Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T14:06:10.731669Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T14:06:10.733170Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.733192Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.733205Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.733549Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.734023Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.734153Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.734373Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2025-04-07T14:06:10.735565Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:06:10.735749Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-07T14:06:10.735986Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-07T14:06:10.736166Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-07T14:06:10.736259Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.736288Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:06:10.736313Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T14:06:10.736461Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-04-07T14:06:10.736490Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T14:06:10.736507Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-07T14:06:10.736528Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:06:10.736623Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-04-07T14:06:10.736720Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-07T14:06:10.736735Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-07T14:06:10.736746Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T14:06:10.736790Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-04-07T14:06:10.736803Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-07T14:06:10.736812Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-07T14:06:10.736822Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:06:10.736891Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-04-07T14:06:10.737673Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.737691Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.737723Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.738017Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.738359Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.738452Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.738615Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 100 Compressed message data size: 91 2025-04-07T14:06:10.739409Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:06:10.739590Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-07T14:06:10.739819Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-07T14:06:10.739977Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-07T14:06:10.740084Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.740123Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T14:06:10.740201Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2025-04-07T14:06:10.740226Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:06:10.740252Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T14:06:10.740325Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2025-04-07T14:06:10.740347Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T14:06:10.740364Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T14:06:10.740431Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2025-04-07T14:06:10.740449Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-07T14:06:10.740463Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataRecei ... uster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:06:11.705914Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2025-04-07T14:06:11.794041Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-07T14:06:11.794067Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-07T14:06:11.794090Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:11.794382Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:11.795091Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:11.795265Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-07T14:06:11.795471Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-04-07T14:06:11.880804Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-04-07T14:06:11.882946Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:11.882982Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:06:11.882997Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T14:06:11.883012Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-07T14:06:11.883027Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-07T14:06:11.883053Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-07T14:06:11.883066Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-04-07T14:06:11.883080Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-04-07T14:06:11.883100Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-04-07T14:06:11.883129Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-04-07T14:06:11.883179Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-04-07T14:06:11.883314Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:06:11.885290Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2025-04-07T14:06:11.896759Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:11.896794Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:11.896813Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:11.897158Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:11.897623Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:11.897773Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:11.897952Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:11.898320Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-04-07T14:06:11.899168Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:11.899186Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:11.899223Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:11.899493Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:11.900112Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:11.900228Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:11.900713Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:11.900940Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:06:11.901038Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:11.901069Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T14:06:11.901181Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain |92.4%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions >> TConsoleConfigTests::TestGetItems [GOOD] >> TConsoleConfigTests::TestGetNodeItems >> TPQTest::TestPQCacheSizeManagement [GOOD] >> TPQTest::TestOffsetEstimation [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestOffsetEstimation [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2025-04-07T14:04:51.811568Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.811655Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:175:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.831772Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.851665Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-04-07T14:04:51.852658Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:182:2195] 2025-04-07T14:04:51.855393Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.858823Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:183:2196] 2025-04-07T14:04:51.860689Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:183:2196] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.872163Z node 1 :PERSQUEUE INFO: new Cookie default|77112d0c-df453cbe-c59b1ac3-26ca8222_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [1:173:2188] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2025-04-07T14:04:52.333292Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:52.333383Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:175:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:180:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:181:2057] recipient: [2:179:2191] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:182:2192] sender: [2:183:2057] recipient: [2:179:2191] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:52.368911Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:52.368993Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:182:2192] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:182:2192] sender: [2:259:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:53.877381Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:53.878340Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "aaa" Generation: 2 Important: true } 2025-04-07T14:04:53.879264Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:266:2258] 2025-04-07T14:04:53.881631Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:266:2258] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:53.884414Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:267:2259] 2025-04-07T14:04:53.886192Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:267:2259] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured T ... [65:858:2697] sender: [65:859:2057] recipient: [65:855:2696] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:12.220001Z node 65 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:06:12.220058Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:06:12.220572Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [65:921:2752] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:06:12.222854Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [65:922:2753] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:06:12.232289Z node 65 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:06:12.232358Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 9 [65:922:2753] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:06:12.255250Z node 65 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:06:12.255337Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 9 [65:921:2752] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:06:12.305006Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 2025-04-07T14:06:12.362425Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [65:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:858:2697] sender: [65:958:2057] recipient: [65:14:2061] 2025-04-07T14:06:12.419937Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:858:2697] sender: [65:965:2057] recipient: [65:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:858:2697] sender: [65:968:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [65:858:2697] sender: [65:969:2057] recipient: [65:967:2781] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:970:2782] sender: [65:971:2057] recipient: [65:967:2781] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:12.487548Z node 65 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:06:12.487605Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:06:12.488179Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [65:1035:2839] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:06:12.490694Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [65:1036:2840] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:06:12.498807Z node 65 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:06:12.498877Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 10 [65:1036:2840] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:06:12.516482Z node 65 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:06:12.516573Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 10 [65:1035:2839] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:06:12.567233Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 2025-04-07T14:06:12.625986Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [65:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:970:2782] sender: [65:1072:2057] recipient: [65:14:2061] 2025-04-07T14:06:12.687789Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:970:2782] sender: [65:1079:2057] recipient: [65:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:970:2782] sender: [65:1081:2057] recipient: [65:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:970:2782] sender: [65:1083:2057] recipient: [65:1082:2868] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1084:2869] sender: [65:1085:2057] recipient: [65:1082:2868] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:12.754660Z node 65 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:06:12.754723Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:06:12.755339Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [65:1151:2928] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:06:12.757950Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [65:1152:2929] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:06:12.766851Z node 65 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:06:12.766922Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 11 [65:1152:2929] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:06:12.786563Z node 65 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:06:12.786653Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 11 [65:1151:2928] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:06:12.846687Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 2025-04-07T14:06:12.893615Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [65:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1084:2869] sender: [65:1186:2057] recipient: [65:14:2061] 2025-04-07T14:06:12.943812Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1084:2869] sender: [65:1193:2057] recipient: [65:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1084:2869] sender: [65:1196:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [65:1084:2869] sender: [65:1197:2057] recipient: [65:1195:2955] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1198:2956] sender: [65:1199:2057] recipient: [65:1195:2955] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:13.015128Z node 65 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:06:13.015195Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:06:13.015860Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [65:1267:3017] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:06:13.018871Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [65:1268:3018] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:06:13.028143Z node 65 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:06:13.028212Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 12 [65:1268:3018] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:06:13.052556Z node 65 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:06:13.052657Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 12 [65:1267:3017] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:06:13.099695Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 2025-04-07T14:06:13.154058Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> TConsoleConfigTests::TestGetNodeConfig >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> KqpScanSpilling::SpillingPragmaParseError >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> TConsoleConfigTests::TestAutoOrder >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits |92.4%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState [GOOD] >> ResourcePoolsDdl::TestDropResourcePool >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem >> TConsoleConfigTests::TestAutoOrder [GOOD] >> TConsoleConfigTests::TestAutoKind >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] Test command err: 2025-04-07T14:04:51.549216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571963675920668:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:51.549370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:51.642321Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571963221183118:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:51.642735Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:04:51.775870Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001aef/r3tmp/tmpICefyH/pdisk_1.dat 2025-04-07T14:04:51.801636Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:04:52.038154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:52.038270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:52.039032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:04:52.039068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:04:52.044310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:52.046683Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:04:52.047738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:04:52.049898Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30750, node 1 2025-04-07T14:04:52.102122Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:04:52.323646Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001aef/r3tmp/yandexlgTgvn.tmp 2025-04-07T14:04:52.323670Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001aef/r3tmp/yandexlgTgvn.tmp 2025-04-07T14:04:52.323809Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001aef/r3tmp/yandexlgTgvn.tmp 2025-04-07T14:04:52.323903Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:04:53.756825Z INFO: TTestServer started on Port 6998 GrpcPort 30750 TClient is connected to server localhost:6998 PQClient connected to localhost:30750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:04:54.073276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:04:54.168282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:54.435463Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571976106085373:2287], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.435480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490571976106085367:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.435796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.438549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571976560823391:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.438606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571976560823405:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.438688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:04:54.442651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:04:54.484810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571976560823408:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T14:04:54.488945Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490571976106085381:2288], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } waiting... 2025-04-07T14:04:55.832076Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571976560823606:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:04:55.843368Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490571976106085426:2292], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:04:55.860380Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGVhOWY5YTQtNTI5NDkyYzYtYTFlYmEwMmQtZjM4N2NlMmY=, ActorId: [2:7490571976106085355:2282], ActorState: ExecuteState, TraceId: 01jr893181bemx1sxdjgnaq60k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:04:55.862609Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:04:55.868091Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjU3MDkxMGMtMTljYjAzMzMtN2Y3OWQ0NTYtNmMwYTNhMjY=, ActorId: [1:7490571976560823386:2297], ActorState: ExecuteState, TraceId: 01jr893195apyfkfzggrg582dg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:04:55.868391Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:04:56.208481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:04:56.281180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:04:56.405440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:04:56.549194Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571963675920668:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:56.549255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:56.642329Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490571963221183118:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:04:56.642419Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:04:57.905870Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr89338mb4nm4dvf25x9h9q6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM1OGMzZjQtZGZkNjljNjYtNjkzMzk4NTUtMTdiYWRlOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: defau ... at schemeshard: 72057594046644480 2025-04-07T14:06:09.935083Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-07T14:06:10.616228Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-07T14:06:11.375216Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-07T14:06:12.086312Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-04-07T14:06:12.576325Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:06:12.576354Z node 9 :IMPORT WARN: Table profiles were not loaded Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (3445495608623639121, "Root", "00415F536F757263655F36", 1744034772875, 1744034772875, 0, 13); 2025-04-07T14:06:13.027895Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715708. Ctx: { TraceId: 01jr895dxf0dbs8xac5xez5j96, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YTA0YmRiYTAtYTBkMjgwNDYtNDRiM2QxNjMtNjkxNDU1NjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:13.057348Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-07T14:06:13.057374Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T14:06:13.057385Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-07T14:06:13.057407Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490572317185858698:4046] (SourceId=A_Source_6, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2025-04-07T14:06:13.057524Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7490572317185858699:4046], Recipient [9:7490572295711021153:3400]: NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:06:13.057616Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7490572317185858698:4046], Recipient [9:7490572295711021153:3400]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_6" 2025-04-07T14:06:13.057689Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [9:7490572295711021153:3400], Recipient [9:7490572317185858698:4046]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-07T14:06:13.057721Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7490572317185858698:4046] (SourceId=A_Source_6, PreferedPartition=(NULL)) InitTable: SourceId=A_Source_6 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-04-07T14:06:13.057797Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7490572317185858698:4046], Recipient [9:7490572295711021153:3400]: NActors::TEvents::TEvPoison 2025-04-07T14:06:13.057872Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7490572248466378723:2049], Recipient [9:7490572317185858698:4046]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-04-07T14:06:13.057895Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490572317185858698:4046] (SourceId=A_Source_6, PreferedPartition=(NULL)) StartKqpSession 2025-04-07T14:06:13.064353Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7490572248466378942:2256], Recipient [9:7490572317185858698:4046]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=YTNlNDlmNjAtMjNmNzViNWQtODU2ZGZiN2UtY2M4OTk0MmI=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2025-04-07T14:06:13.064397Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490572317185858698:4046] (SourceId=A_Source_6, PreferedPartition=(NULL)) Select from the table 2025-04-07T14:06:13.240106Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7490572248466378942:2256], Recipient [9:7490572317185858698:4046]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=YTNlNDlmNjAtMjNmNzViNWQtODU2ZGZiN2UtY2M4OTk0MmI=" PreparedQuery: "bb3989cf-b267487a-4712062d-c12fbc05" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jr895e7h6078sppp3te05fys" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1744034772875 } items { uint64_value: 1744034772875 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 110 2025-04-07T14:06:13.240365Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7490572317185858698:4046] (SourceId=A_Source_6, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2025-04-07T14:06:13.240415Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490572317185858698:4046] (SourceId=A_Source_6, PreferedPartition=(NULL)) GetOldSeqNo 2025-04-07T14:06:13.240632Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7490572317185858733:4046], Recipient [9:7490572295711021152:3399]: NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:06:13.252126Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271187968, Sender [9:7490572317185858698:4046], Recipient [9:7490572295711021152:3399]: NKikimrClient.TPersQueueRequest PartitionRequest { Partition: 0 CmdGetMaxSeqNo { SourceId: "\000A_Source_6" } PipeClient { RawX1: 7490572317185858733 RawX2: 38654709710 } } 2025-04-07T14:06:13.252293Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7490572317185858698:4046] (SourceId=A_Source_6, PreferedPartition=(NULL)) OnPartitionChosen 2025-04-07T14:06:13.252412Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7490572317185858698:4046], Recipient [9:7490572295711021152:3399]: NActors::TEvents::TEvPoison 2025-04-07T14:06:13.252467Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7490572317185858734:4046], Recipient [9:7490572295711021153:3400]: NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:06:13.252523Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7490572317185858698:4046], Recipient [9:7490572295711021153:3400]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2025-04-07T14:06:13.252583Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCheckPartition, received event# 271188558, Sender [9:7490572295711021153:3400], Recipient [9:7490572317185858698:4046]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-07T14:06:13.252607Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490572317185858698:4046] (SourceId=A_Source_6, PreferedPartition=(NULL)) Update the table 2025-04-07T14:06:13.252824Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7490572317185858698:4046], Recipient [9:7490572295711021153:3400]: NActors::TEvents::TEvPoison 2025-04-07T14:06:13.378740Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [9:7490572248466378942:2256], Recipient [9:7490572317185858698:4046]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=YTNlNDlmNjAtMjNmNzViNWQtODU2ZGZiN2UtY2M4OTk0MmI=" PreparedQuery: "16649932-5e9443f8-c928ab8f-bb8cba10" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 76 2025-04-07T14:06:13.378792Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490572317185858698:4046] (SourceId=A_Source_6, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS Received TEvChooseResult: 1 2025-04-07T14:06:13.378830Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490572317185858698:4046] (SourceId=A_Source_6, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=157 2025-04-07T14:06:13.378856Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7490572317185858698:4046] (SourceId=A_Source_6, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 3445495608623639121 AND Topic = "Root" AND ProducerId = "00415F536F757263655F36" 2025-04-07T14:06:13.578687Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715711. Ctx: { TraceId: 01jr895ed57qat91r59z6sm7fb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YjIzNmIwMDgtMTM0NDkwYWItZTdlNWRlZmItNTdiYWIxMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:13.847045Z node 9 :KQP_EXECUTER ERROR: ActorId: [9:7490572317185858816:2681] TxId: 281474976715712. Ctx: { TraceId: 01jr895ep3cbsp1makkds97k8f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=OTM3YjdmOTgtYzNiYTg5ZWItNDdiMmFlOGQtNjc0ODIzMmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 2025-04-07T14:06:13.847385Z node 9 :KQP_COMPUTE ERROR: SelfId: [9:7490572317185858823:2681], TxId: 281474976715712, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=9&id=OTM3YjdmOTgtYzNiYTg5ZWItNDdiMmFlOGQtNjc0ODIzMmE=. TraceId : 01jr895ep3cbsp1makkds97k8f. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [9:7490572317185858816:2681], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> TConsoleTests::TestSetDefaultStorageUnitsQuota >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateSharedTenant >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist >> TConsoleConfigTests::TestAutoKind [GOOD] >> TConsoleConfigTests::TestAutoSplit >> TTxDataShardMiniKQL::WriteEraseRead >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx >> TTxDataShardMiniKQL::CrossShard_1_Cycle >> TTxDataShardMiniKQL::CrossShard_5_AllToAll >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries >> TTxDataShardMiniKQL::ReadSpecialColumns >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd >> TPQTest::TestSourceIdDropByUserWrites [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount >> TConsoleConfigTests::TestAutoSplit [GOOD] >> TConsoleConfigTests::TestAllowedScopes >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock >> KqpScanSpilling::SelfJoin [GOOD] >> KqpScanSpilling::SelfJoinQueryService [GOOD] >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx >> TTxDataShardMiniKQL::ReadSpecialColumns [GOOD] >> TTxDataShardMiniKQL::SelectRange >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending >> KqpScan::DqSourceLiteralRange [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] >> TConsoleConfigHelpersTests::TestConfigCourier >> KqpScanSpilling::SpillingPragmaParseError [GOOD] >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TConsoleConfigTests::TestValidation >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard >> TTxDataShardMiniKQL::SelectRange [GOOD] >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoin [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/u6pr/000c14/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk1 Trying to start YDB, gRPC: 4698, MsgBus: 17602 2025-04-07T14:06:12.230386Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572311502803474:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:12.230722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c14/r3tmp/tmpvkyREm/pdisk_1.dat 2025-04-07T14:06:12.610661Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:12.633520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:12.633673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:12.637662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4698, node 1 2025-04-07T14:06:12.819141Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:06:12.819172Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:06:12.819181Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:06:12.819278Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17602 TClient is connected to server localhost:17602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:13.479731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.520781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.655263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.791261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.851202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:15.070403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572324387706935:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.070538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.442038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.463272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.487296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.511205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.573190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.593869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.630093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572324387707432:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.630169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.630371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572324387707437:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.633445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:06:15.641757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572324387707439:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:06:17.218194Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572311502803474:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:17.218556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:06:17.558391Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=1;ch_limit=50;inputs=0;input_channels_count=0; 2025-04-07T14:06:17.559934Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642624:2521], TxId: 281474976710682, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. TraceId : 01jr895j3t3sjkc9pbgjxp53kt. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Start compute actor [1:7490572332977642624:2521], task: 1 2025-04-07T14:06:17.559966Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642624:2521], TxId: 281474976710682, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. TraceId : 01jr895j3t3sjkc9pbgjxp53kt. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Set periodic stats 0.100000s 2025-04-07T14:06:17.560021Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642624:2521], TxId: 281474976710682, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. TraceId : 01jr895j3t3sjkc9pbgjxp53kt. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. EVLOGKQP START 2025-04-07T14:06:17.560020Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=1;input_channels_count=1; 2025-04-07T14:06:17.560774Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=2;input_channels_count=2; 2025-04-07T14:06:17.560869Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=1;input_channels_count=1; 2025-04-07T14:06:17.561757Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:44 :META:Table { TableId { OwnerId: 72057594046644480 TableId: 6 } TablePath: "/Root/KeyValue" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Columns { Id: 1 Name: "Key" Type: 4 } Columns { Id: 2 Name: "Value" Type: 4097 } KeyColumnTypes: 4 Reads { ShardId: 72075186224037911 KeyRanges { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } } ItemsLimit: 0 Reverse: false DataFormat: FORMAT_CELLVEC EnableShardsSequentialScan: true KeyColumnTypeInfos { } ReadType: ROWS 2025-04-07T14:06:17.562056Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642626:2522], TxId: 281474976710682, task: 2. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Start compute actor [1:7490572332977642626:2522], task: 2 2025-04-07T14:06:17.562090Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642626:2522], TxId: 281474976710682, task: 2. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Set periodic stats 0.100000s 2025-04-07T14:06:17.562386Z node 1 :KQP_COMPUTE DEBUG: Register LocalFileSpillingActor [1:7490572332977642633:3819] at service [1:7597699455116079460:27756] 2025-04-07T14:06:17.562415Z node 1 :KQP_COMPUTE DEBUG: Register LocalFileSpillingActor [1:7490572332977642634:3820] at service [1:7597699455116079460:27756] 2025-04-07T14:06:17.563555Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642626:2522], TxId: 281474976710682, task: 2. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646926 2025-04-07T14:06:17.563772Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642624:2521], TxId: 281474976710682, task: 1. Ctx: { ... ZmM1MTQtN2ExNGQyMGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-07T14:06:17.593307Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642627:2523], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:06:17.593319Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642627:2523], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:06:17.593541Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:06:17.593577Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:06:17.593620Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:06:17.593799Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642627:2523], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-07T14:06:17.593816Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642627:2523], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:06:17.593844Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642627:2523], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:06:17.593857Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646923 2025-04-07T14:06:17.593871Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Finish input channelId: 4, from: [1:7490572332977642627:2523] 2025-04-07T14:06:17.593890Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:06:17.593898Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642627:2523], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-07T14:06:17.593905Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:06:17.593920Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642627:2523], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:06:17.593950Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:06:17.593952Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [10] 2025-04-07T14:06:17.593963Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [11] 2025-04-07T14:06:17.593971Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished 2025-04-07T14:06:17.593990Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642627:2523], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:06:17.594071Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. pass away 2025-04-07T14:06:17.594130Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710682;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:06:17.594352Z node 1 :KQP_COMPUTE DEBUG: [CloseFile] from: [1:7490572332977642642:3824], error: (empty maybe) 2025-04-07T14:06:17.594398Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:06:17.594452Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:06:17.594502Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:06:17.594765Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:06:17.594787Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:06:17.595588Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:06:17.595659Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:06:17.595865Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:06:17.595900Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 4, seqNo: [11] 2025-04-07T14:06:17.595907Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Tasks execution finished 2025-04-07T14:06:17.595915Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490572332977642628:2524], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jr895j3t3sjkc9pbgjxp53kt. SessionId : ydb://session/3?node_id=1&id=MWMzYTQ0NzktMjQ3MDY1NmUtZDJhZmM1MTQtN2ExNGQyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:06:17.595966Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. pass away 2025-04-07T14:06:17.596034Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710682;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:06:17.596805Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034777590, txId: 281474976710681] shutting down >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoinQueryService [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/u6pr/000af0/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk2 Trying to start YDB, gRPC: 21266, MsgBus: 26249 2025-04-07T14:06:12.194786Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572310617120168:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:12.195072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000af0/r3tmp/tmpqA0yJi/pdisk_1.dat 2025-04-07T14:06:12.595317Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:12.621059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:12.623517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:12.625573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21266, node 1 2025-04-07T14:06:12.820977Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:06:12.821016Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:06:12.821028Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:06:12.821122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26249 TClient is connected to server localhost:26249 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:13.474128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.517972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.651401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.804099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.874422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:14.958382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572319207056342:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.958639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.442004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.503821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.529251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.555425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.578791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.606380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.642981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572323502024133:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.643029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.643075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572323502024138:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.645648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:06:15.652087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572323502024140:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:06:17.192773Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572310617120168:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:17.192831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 (StructType '('"Key" $3) '('"Value" $4))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($20) (block '( (let $21 (lambda '($22) (block '( (let $23 (VariantType (TupleType $5 $5))) (let $24 (Variant $22 '0 $23)) (let $25 (Variant $22 '1 $23)) (return $24 $25) )))) (return (FromFlow (MultiMap (ToFlow $20) $21))) ))) '('('"_logical_id" '688) '('"_id" '"d3bf71dc-762ef5e2-9d3554e4-abfd8efd")))) (let $7 (DqCnUnionAll (TDqOutput $6 '1))) (let $8 '('('"_logical_id" '531) '('"_id" '"424cc333-75f61a83-4139afd5-beab9ab2") '('"_wide_channels" $5))) (let $9 (DqPhyStage '($7) (lambda '($26) (block '( (let $27 (lambda '($28) (Member $28 '"Key") (Member $28 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $26) $27))) ))) $8)) (let $10 (DqCnMap (TDqOutput $6 '0))) (let $11 (DqCnBroadcast (TDqOutput $9 '0))) (let $12 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $13 '('('"_logical_id" '603) '('"_id" '"430b6373-e52a0f20-5129af39-b8f6b175") '('"_wide_channels" $12))) (let $14 (DqPhyStage '($10 $11) (lambda '($29 $30) (block '( (let $31 (lambda '($38) (block '( (let $39 (Member $38 '"Value")) (return (Member $38 '"Key") $39 $39 (Exists $39)) )))) (let $32 (lambda '($44 $45 $46 $47) $44 $45 $46)) (let $33 (lambda '($50 $51) $50 $51 $51)) (let $34 '('"2")) (let $35 '('0 '0 '1 '1)) (let $36 '('0 '"2" '1 '"3")) (let $37 (GraceJoinCore (WideMap (WideFilter (ExpandMap (ToFlow $29) $31) (lambda '($40 $41 $42 $43) $43)) $32) (WideMap (WideFilter (ToFlow $30) (lambda '($48 $49) (Exists $49))) $33) 'Inner $34 $34 $35 $36 '('"t1.Value") '('"t2.Value") '('"Broadcast"))) (return (FromFlow (WideSort $37 '('('0 (Bool 'true)))))) ))) $13)) (let $15 (DqCnMerge (TDqOutput $14 '0) '('('0 '"Asc")))) (let $16 (DqPhyStage '($15) (lambda '($52) (FromFlow (NarrowMap (ToFlow $52) (lambda '($53 $54 $55 $56) (AsStruct '('"t1.Key" $53) '('"t1.Value" $54) '('"t2.Key" $55) '('"t2.Value" $56)))))) '('('"_logical_id" '615) '('"_id" '"ec196bf7-d2dce4b4-bfcf96b0-f1a3fea5")))) (let $17 '($6 $9 $14 $16)) (let $18 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $19 (DqCnResult (TDqOutput $16 '0) $18)) (return (KqpPhysicalQuery '((KqpPhysicalTx $17 '($19) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $12) '0 '0)) '('('"type" '"query")))) ) >> KqpStreamLookup::ReadTableDuringSplit [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TConsoleTests::TestListTenants >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2025-04-07T14:06:08.704006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:08.704057Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:08.762304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:09.514725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:09.514772Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:09.556878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:10.311427Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:10.311472Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:10.346542Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:11.139965Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:11.140028Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:11.192300Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:11.996294Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:11.996342Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:12.039100Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:12.944308Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:12.944363Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:12.984954Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:13.848184Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:13.848252Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:13.889590Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:14.713611Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:14.713676Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:14.763321Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:15.609829Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:15.609893Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:15.649768Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:16.514810Z node 10 :CMS_CONFIGS TRACE: StateWork, received event# 269877760, Sender [10:311:2280], Recipient [10:310:2277]: NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:06:16.514881Z node 10 :CMS_CONFIGS TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-07T14:06:16.521935Z node 10 :CMS_CONFIGS TRACE: StateWork, received event# 273285144, Sender [10:310:2277], Recipient [10:350:2291]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionRequest { Generation: 1 Options { NodeId: 10 Host: "ghrun-lqtfcktism.auto.internal" Tenant: "" NodeType: "" } ConfigItemKinds: 29 ConfigItemKinds: 1 ConfigItemKinds: 2 ConfigItemKinds: 32 ConfigItemKinds: 3 ConfigItemKinds: 33 ConfigItemKinds: 34 ConfigItemKinds: 6 ConfigItemKinds: 36 ConfigItemKinds: 37 ConfigItemKinds: 8 ConfigItemKinds: 38 ConfigItemKinds: 10 ConfigItemKinds: 39 ConfigItemKinds: 43 ConfigItemKinds: 73 ConfigItemKinds: 75 ConfigItemKinds: 46 ConfigItemKinds: 77 ConfigItemKinds: 80 ConfigItemKinds: 81 ConfigItemKinds: 52 ConfigItemKinds: 54 ConfigItemKinds: 25 ConfigItemKinds: 55 ConfigItemKinds: 26 ServeYaml: true YamlApiVersion: 1 } 2025-04-07T14:06:16.522188Z node 10 :CMS_CONFIGS TRACE: StateWork, received event# 273285144, Sender [10:310:2277], Recipient [10:357:2303]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionRequest { Generation: 1 Options { NodeId: 10 Host: "ghrun-lqtfcktism.auto.internal" Tenant: "" NodeType: "" } ConfigItemKinds: 29 ConfigItemKinds: 1 ConfigItemKinds: 2 ConfigItemKinds: 32 ConfigItemKinds: 3 ConfigItemKinds: 33 ConfigItemKinds: 34 ConfigItemKinds: 6 ConfigItemKinds: 36 ConfigItemKinds: 37 ConfigItemKinds: 8 ConfigItemKinds: 38 ConfigItemKinds: 10 ConfigItemKinds: 39 ConfigItemKinds: 43 ConfigItemKinds: 73 ConfigItemKinds: 75 ConfigItemKinds: 46 ConfigItemKinds: 77 ConfigItemKinds: 80 ConfigItemKinds: 81 ConfigItemKinds: 52 ConfigItemKinds: 54 ConfigItemKinds: 25 ConfigItemKinds: 55 ConfigItemKinds: 26 ServeYaml: true YamlApiVersion: 1 } 2025-04-07T14:06:16.522254Z node 10 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionRequest 2025-04-07T14:06:16.522359Z node 10 :CMS_CONFIGS DEBUG: TConfigsProvider registered new subscription [10:310:2277]:1 2025-04-07T14:06:16.522442Z node 10 :CMS_CONFIGS TRACE: TConfigsProvider: check if update is required for volatile subscription [10:310:2277]:1 2025-04-07T14:06:16.522507Z node 10 :CMS_CONFIGS TRACE: TConfigsProvider: new config found for subscription [10:310:2277]:1 version= 2025-04-07T14:06:16.522612Z node 10 :CMS_CONFIGS TRACE: TSubscriptionClientSender([10:310:2277]) send TEvConfigSubscriptionResponse 2025-04-07T14:06:16.523497Z node 10 :CMS_CONFIGS TRACE: StateWork, received event# 273286169, Sender [10:403:2303], Recipient [10:310:2277]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionResponse { Generation: 1 Status { Code: SUCCESS } } 2025-04-07T14:06:16.523547Z node 10 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionResponse 2025-04-07T14:06:16.523741Z node 10 :CMS_CONFIGS TRACE: StateWork, received event# 273285146, Sender [10:357:2303], Recipient [10:403:2303]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { } YamlConfigNotChanged: true } 2025-04-07T14:06:16.523796Z node 10 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-04-07T14:06:16.523885Z node 10 :CMS_CONFIGS TRACE: TSubscriptionClientSender([10:310:2277]) send TEvConfigSubscriptionNotificationRequest: Order: 1 Generation: 1 Config { } YamlConfigNotChanged: true 2025-04-07T14:06:16.524024Z node 10 :CMS_CONFIGS TRACE: StateWork, received event# 273285146, Sender [10:403:2303], Recipient [10:310:2277]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Order: 1 Generation: 1 Config { } YamlConfigNotChanged: true } 2025-04-07T14:06:16.524057Z node 10 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-04-07T14:06:16.528838Z node 10 :CMS_CONFIGS INFO: TLogSettingsConfigurator: got new config: 2025-04-07T14:06:16.528951Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GLOBAL has been changed from WARN to NOTICE 2025-04-07T14:06:16.529014Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GLOBAL has been changed from WARN to DEBUG 2025-04-07T14:06:16.529057Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT has been changed from WARN to NOTICE 2025-04-07T14:06:16.529085Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT has been changed from WARN to DEBUG 2025-04-07T14:06:16.529108Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TEST has been changed from WARN to NOTICE 2025-04-07T14:06:16.529133Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TEST has been changed from WARN to DEBUG 2025-04-07T14:06:16.529156Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component PROTOCOLS has been changed from WARN to NOTICE 2025-04-07T14:06:16.529181Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component PROTOCOLS has been changed from WARN to DEBUG 2025-04-07T14:06:16.529207Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_SPEED_TEST has been changed from WARN to NOTICE 2025-04-07T14:06:16.529232Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_SPEED_TEST has been changed from WARN to DEBUG 2025-04-07T14:06:16.529257Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_STATUS has been changed from WARN to NOTICE 2025-04-07T14:06:16.529281Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_STATUS has been changed from WARN to DEBUG 2025-04-07T14:06:16.529305Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_NETWORK has been changed from WARN to NOTICE 2025-04-07T14:06:16.529328Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_NETWORK has been changed from WARN to DEBUG 2025-04-07T14:06:16.529351Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_SESSION has been changed from WARN to NOTICE 2025-04-07T14:06:16.529375Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_SESSION has been changed from WARN to DEBUG 2025-04-07T14:06:16.529398Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component HTTP has been changed from WARN to NOTICE 2025-04-07T14:06:16.529421Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component HTTP has been changed from WARN to DEBUG 2025-04-07T14:06:16.529445Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component LOGGER has been changed from WARN to NOTICE 2025-04-07T14:06:16.529468Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component LOGGER has been changed from WARN to DEBUG 2025-04-07T14:06:16.529491Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BLOBSTORAGE has been changed from WARN to NOTICE 2025-04-07T14:06:16.529514Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BLOBSTORAGE has been changed from WARN to DEBUG 2025-04-07T14:06:16.529538Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component STATESTORAGE has been changed from WARN to NOTICE 2025-04-07T14:06:16.529559Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component STATESTORAGE has been changed from WARN to DEBUG 2025-04-07T14:06:16.529579Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component RESOURCE_BROKER has been changed from WARN to NOTICE 2025-04-07T14:06:16.529603Z node 10 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component RESOURCE_BROKER has been changed from WARN to DEBUG 2025-04-07T14:06:16.529627Z node 10 :CMS_CONFIGS ... 2025-04-07T14:06:18.668085Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BLOB_DEPOT_EVENTS has been changed from NOTICE to ALERT 2025-04-07T14:06:18.668113Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BLOB_DEPOT_EVENTS has been changed from DEBUG to ALERT 2025-04-07T14:06:18.668139Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BLOB_DEPOT_EVENTS has been changed from 0 to 10 2025-04-07T14:06:18.668166Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DS_LOAD_TEST has been changed from NOTICE to ALERT 2025-04-07T14:06:18.668192Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DS_LOAD_TEST has been changed from DEBUG to ALERT 2025-04-07T14:06:18.668234Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DS_LOAD_TEST has been changed from 0 to 10 2025-04-07T14:06:18.668262Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_PROVIDER has been changed from NOTICE to ALERT 2025-04-07T14:06:18.668291Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_PROVIDER has been changed from DEBUG to ALERT 2025-04-07T14:06:18.668316Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_PROVIDER has been changed from 0 to 10 2025-04-07T14:06:18.668343Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_INITIALIZER has been changed from NOTICE to ALERT 2025-04-07T14:06:18.668369Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_INITIALIZER has been changed from DEBUG to ALERT 2025-04-07T14:06:18.668406Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_INITIALIZER has been changed from 0 to 10 2025-04-07T14:06:18.668436Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_MANAGER has been changed from NOTICE to ALERT 2025-04-07T14:06:18.668462Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_MANAGER has been changed from DEBUG to ALERT 2025-04-07T14:06:18.668488Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_MANAGER has been changed from 0 to 10 2025-04-07T14:06:18.668517Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_SECRET has been changed from NOTICE to ALERT 2025-04-07T14:06:18.668548Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_SECRET has been changed from DEBUG to ALERT 2025-04-07T14:06:18.668574Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_SECRET has been changed from 0 to 10 2025-04-07T14:06:18.668602Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_TIERING has been changed from NOTICE to ALERT 2025-04-07T14:06:18.668630Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_TIERING has been changed from DEBUG to ALERT 2025-04-07T14:06:18.668655Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_TIERING has been changed from 0 to 10 2025-04-07T14:06:18.668700Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BG_TASKS has been changed from NOTICE to ALERT 2025-04-07T14:06:18.668726Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BG_TASKS has been changed from DEBUG to ALERT 2025-04-07T14:06:18.668750Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BG_TASKS has been changed from 0 to 10 2025-04-07T14:06:18.668779Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY has been changed from NOTICE to ALERT 2025-04-07T14:06:18.668807Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY has been changed from DEBUG to ALERT 2025-04-07T14:06:18.668831Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY has been changed from 0 to 10 2025-04-07T14:06:18.668872Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY_CACHE has been changed from NOTICE to ALERT 2025-04-07T14:06:18.668903Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY_CACHE has been changed from DEBUG to ALERT 2025-04-07T14:06:18.668929Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY_CACHE has been changed from 0 to 10 2025-04-07T14:06:18.668957Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component EXT_INDEX has been changed from NOTICE to ALERT 2025-04-07T14:06:18.668987Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component EXT_INDEX has been changed from DEBUG to ALERT 2025-04-07T14:06:18.669013Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component EXT_INDEX has been changed from 0 to 10 2025-04-07T14:06:18.669044Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_CONVEYOR has been changed from NOTICE to ALERT 2025-04-07T14:06:18.669078Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_CONVEYOR has been changed from DEBUG to ALERT 2025-04-07T14:06:18.669110Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_CONVEYOR has been changed from 0 to 10 2025-04-07T14:06:18.669139Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_LIMITER has been changed from NOTICE to ALERT 2025-04-07T14:06:18.669165Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_LIMITER has been changed from DEBUG to ALERT 2025-04-07T14:06:18.669191Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_LIMITER has been changed from 0 to 10 2025-04-07T14:06:18.669222Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component ARROW_HELPER has been changed from NOTICE to ALERT 2025-04-07T14:06:18.669249Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component ARROW_HELPER has been changed from DEBUG to ALERT 2025-04-07T14:06:18.669276Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component ARROW_HELPER has been changed from 0 to 10 2025-04-07T14:06:18.669303Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component KAFKA_PROXY has been changed from NOTICE to ALERT 2025-04-07T14:06:18.669344Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component KAFKA_PROXY has been changed from DEBUG to ALERT 2025-04-07T14:06:18.669372Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component KAFKA_PROXY has been changed from 0 to 10 2025-04-07T14:06:18.669401Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component OBJECTS_MONITORING has been changed from NOTICE to ALERT 2025-04-07T14:06:18.669428Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component OBJECTS_MONITORING has been changed from DEBUG to ALERT 2025-04-07T14:06:18.669454Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component OBJECTS_MONITORING has been changed from 0 to 10 2025-04-07T14:06:18.669484Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component STATISTICS has been changed from NOTICE to ALERT 2025-04-07T14:06:18.669511Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component STATISTICS has been changed from DEBUG to ALERT 2025-04-07T14:06:18.669536Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component STATISTICS has been changed from 0 to 10 2025-04-07T14:06:18.669572Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_REQUEST_COST has been changed from NOTICE to ALERT 2025-04-07T14:06:18.669605Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_REQUEST_COST has been changed from DEBUG to ALERT 2025-04-07T14:06:18.669642Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_REQUEST_COST has been changed from 0 to 10 2025-04-07T14:06:18.669672Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_VDISK_BALANCING has been changed from NOTICE to ALERT 2025-04-07T14:06:18.669701Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_VDISK_BALANCING has been changed from DEBUG to ALERT 2025-04-07T14:06:18.669729Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_VDISK_BALANCING has been changed from 0 to 10 2025-04-07T14:06:18.669757Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from NOTICE to ALERT 2025-04-07T14:06:18.669784Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from DEBUG to ALERT 2025-04-07T14:06:18.669821Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2025-04-07T14:06:18.669851Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from NOTICE to ALERT 2025-04-07T14:06:18.669880Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from DEBUG to ALERT 2025-04-07T14:06:18.669907Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2025-04-07T14:06:18.669938Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from NOTICE to ALERT 2025-04-07T14:06:18.669966Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from DEBUG to ALERT 2025-04-07T14:06:18.669990Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2025-04-07T14:06:18.670018Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from NOTICE to ALERT 2025-04-07T14:06:18.670043Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from DEBUG to ALERT 2025-04-07T14:06:18.670119Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2025-04-07T14:06:18.670154Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from NOTICE to ALERT 2025-04-07T14:06:18.670181Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from DEBUG to ALERT 2025-04-07T14:06:18.670205Z node 13 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2025-04-07T14:06:18.670325Z node 13 :CMS_CONFIGS TRACE: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingPragmaParseError [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/u6pr/0009b4/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk5 Trying to start YDB, gRPC: 15882, MsgBus: 3335 2025-04-07T14:06:14.458642Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572321309731154:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:14.458722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009b4/r3tmp/tmpNNp28g/pdisk_1.dat 2025-04-07T14:06:14.745265Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15882, node 1 2025-04-07T14:06:14.811038Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:06:14.811056Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:06:14.811063Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:06:14.811131Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:06:14.825247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:14.825438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:14.826876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3335 TClient is connected to server localhost:3335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:15.229067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:15.251749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:15.363840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:15.487378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:15.541381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:16.892490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572329899667465:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:16.892619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:17.171864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:06:17.198344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:17.235091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:17.256032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:06:17.278575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:06:17.310311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:06:17.339488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572334194635253:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:17.339547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:17.339587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572334194635258:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:17.343020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:06:17.352129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572334194635260:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:06:18.143870Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572338489602867:2463], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:3:40: Error: Bad "EnableSpillingNodes" setting for "$all" cluster: (yexception) tools/enum_parser/enum_serialization_runtime/enum_runtime.cpp:70: Key 'GraceJoin1' not found in enum NYql::NDq::EEnabledSpillingNodes. Valid options are: 'None', 'GraceJoin', 'Aggregation', 'All'. 2025-04-07T14:06:18.144076Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Nzk5YjVkMjEtYjgyMDNiOTQtZmM0ODAzYWEtMmYwOWU0MGY=, ActorId: [1:7490572338489602860:2459], ActorState: ExecuteState, TraceId: 01jr895jzq30bbbs6twkj1jdv8, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::DqSourceLiteralRange [GOOD] Test command err: Trying to start YDB, gRPC: 9593, MsgBus: 29120 2025-04-07T14:05:47.419759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572202883077779:2165];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:47.420509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015ae/r3tmp/tmpu15rmi/pdisk_1.dat 2025-04-07T14:05:47.821048Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9593, node 1 2025-04-07T14:05:47.846209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:47.846284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:47.851348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:47.933207Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:47.933238Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:47.933266Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:47.933363Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29120 TClient is connected to server localhost:29120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:48.483062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.494009Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:05:48.505803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.654186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.799687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.863054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:50.710502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572215767981258:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.721901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.747462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.773493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.800420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.825433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.855024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.881985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:50.945511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572215767981749:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.945567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.945610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572215767981754:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:50.948476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:50.959699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572215767981756:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:05:52.420953Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572202883077779:2165];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:52.433771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:06:02.820782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:06:02.820808Z node 1 :IMPORT WARN: Table profiles were not loaded [[[0];[0u];["Value1"]];[[2];[200u];["Value3"]]] 2025-04-07T14:06:05.719527Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034765753, txId: 281474976710771] shutting down Trying to start YDB, gRPC: 19834, MsgBus: 13365 2025-04-07T14:06:06.334612Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572283334602418:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:06.334677Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015ae/r3tmp/tmpFgYtjY/pdisk_1.dat 2025-04-07T14:06:06.416620Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19834, node 2 2025-04-07T14:06:06.459905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:06.460036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:06.461579Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:06.469705Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:06:06.469733Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:06:06.469743Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:06:06.469883Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13365 TClient is connected to server localhost:13365 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:06.811195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:06.828696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:06.900412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:07.035758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:07.091833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, subo ... le, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:08.770859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572291924538725:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:08.770952Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:08.841054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:06:08.866686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:08.893012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:08.918393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:06:08.945625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:06:08.973897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:06:09.008688Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572296219506512:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:09.008764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:09.008818Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572296219506517:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:09.012054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:06:09.020070Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572296219506519:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:06:11.335303Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572283334602418:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:11.335370Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17737, MsgBus: 23275 2025-04-07T14:06:12.229601Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572309280510571:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:12.229642Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015ae/r3tmp/tmp5BYaIF/pdisk_1.dat 2025-04-07T14:06:12.403063Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:12.420153Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:12.420258Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 17737, node 3 2025-04-07T14:06:12.440323Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:12.557194Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:06:12.557216Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:06:12.557227Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:06:12.557396Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23275 TClient is connected to server localhost:23275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:13.106759Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.121668Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.179220Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.340692Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.412646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:15.963644Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572322165414162:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.963714Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.985200Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:06:16.010502Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:16.034519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:16.061314Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:06:16.086242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:06:16.116866Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:06:16.188480Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572326460381956:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:16.188549Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:16.188621Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490572326460381961:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:16.192046Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:06:16.201750Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490572326460381963:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } waiting... 2025-04-07T14:06:17.229489Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:06:17.229984Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490572309280510571:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:17.230061Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:06:17.701936Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034777709, txId: 281474976715673] shutting down 2025-04-07T14:06:17.897533Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034777940, txId: 281474976715675] shutting down >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] >> TConsoleConfigTests::TestValidation [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] Test command err: 2025-04-07T14:06:17.798617Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:17.862470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:17.862531Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:17.864631Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:17.865123Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:17.865399Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:17.875740Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:17.914629Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:17.914996Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:17.916706Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:17.916803Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:17.916872Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:17.917247Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:17.942221Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:17.942410Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:17.942531Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:17.942579Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:17.942624Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:17.942660Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:17.942832Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.942881Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.943227Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:17.943320Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:17.943414Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:17.943478Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:17.943550Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:17.943592Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:17.943628Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:17.943662Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:17.943701Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:17.985607Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.985674Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.985742Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:17.988121Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nx\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\016\n\010__tablet\030\004 9\032\023\n\r__updateEpoch\030\004 :\032\020\n\n__updateNo\030\004 ;(\"J\014/Root/table1\222\002\013\th\020\000\000\000\000\000\000\020\r" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:17.988186Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:17.988287Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:17.988454Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:17.988499Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:17.988547Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:17.988599Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:17.988637Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:17.988712Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:17.988742Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:17.989000Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:17.989053Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:17.989094Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:17.989131Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:17.989173Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:17.989200Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:17.989240Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:17.989274Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:17.989312Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:18.013456Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:18.013533Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:18.013566Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:18.013601Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:18.013660Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:18.014062Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:18.014102Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:18.014136Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:18.014236Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:18.014257Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:18.014364Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:18.014399Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:18.014438Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:18.014466Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:18.016784Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:18.016849Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:18.017051Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:18.017084Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:18.017132Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:18.017173Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:18.017205Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:18.017245Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:18.017281Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:18.017336Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:18.017381Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:18.017417Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:18.017450Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:18.017623Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:18.017674Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:18.017713Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:18.017735Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:18.017758Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:18.017815Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:18.017841Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:18.017875Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:18.017902Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:18.017949Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:18.017980Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:18.018026Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:18.018065Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:18.018099Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:0 ... : TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:19.509582Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:6] at 9437184 on unit FinishPropose 2025-04-07T14:06:19.509621Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:19.513042Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:229:2224]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2025-04-07T14:06:19.513091Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-07T14:06:19.513377Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:293:2279], Recipient [3:229:2224]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:19.513410Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:19.513457Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:292:2278], serverId# [3:293:2279], sessionId# [0:0:0] 2025-04-07T14:06:19.513647Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:229:2224]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\351\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4e\005\'?8\003\013?>\003?\000\003?@\000\003?B\000\006\004?F\003\203\014\000\003\203\014\000\003\003?H\000\377\007\002\000\005?\032\005?\026?r\000\005?\030\003\005? \005?\034?r\000\006\000?\036\003?x\005?&\006\0 2025-04-07T14:06:19.519978Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:19.520054Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:19.520724Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CheckDataTx 2025-04-07T14:06:19.520807Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-07T14:06:19.520846Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CheckDataTx 2025-04-07T14:06:19.520882Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:19.520913Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:19.520957Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-07T14:06:19.521005Z node 3 :TX_DATASHARD TRACE: Activated operation [0:8] at 9437184 2025-04-07T14:06:19.521041Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-07T14:06:19.521077Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:19.521102Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit ExecuteDataTx 2025-04-07T14:06:19.521123Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit ExecuteDataTx 2025-04-07T14:06:19.521634Z node 3 :TX_DATASHARD TRACE: Executed operation [0:8] at tablet 9437184 with status COMPLETE 2025-04-07T14:06:19.521699Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:8] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 2, SelectRangeBytes: 31, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:19.521749Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-07T14:06:19.521773Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit ExecuteDataTx 2025-04-07T14:06:19.521794Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:19.521817Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit FinishPropose 2025-04-07T14:06:19.521851Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 8 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T14:06:19.521912Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is DelayComplete 2025-04-07T14:06:19.521941Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:19.521990Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit CompletedOperations 2025-04-07T14:06:19.522023Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CompletedOperations 2025-04-07T14:06:19.522061Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-07T14:06:19.522081Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CompletedOperations 2025-04-07T14:06:19.522103Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:8] at 9437184 has finished 2025-04-07T14:06:19.522152Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:19.522187Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:8] at 9437184 on unit FinishPropose 2025-04-07T14:06:19.522229Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: 2025-04-07T14:06:11.194011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:11.194320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:11.194564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a5e/r3tmp/tmptTvgC4/pdisk_1.dat 2025-04-07T14:06:11.639552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:11.687668Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:11.739936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:11.740117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:11.751631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:11.878112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:12.267490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:703:2587], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:12.267620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2592], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:12.268063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:12.272801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:06:12.497308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:717:2595], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:06:18.130974Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr895d99fdkeymxgvnj9canr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM3OTE2Ni01NDY5NDQ4YS1kYmFmYjRiYy1jYjVkN2VkNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:18.589156Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr895k2r904sg6qcq2pvn86j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODU2ZjYyNGYtNDI5M2NiZGMtNmFjMWZmZjctYTFhM2JjNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleConfigTests::TestManageValidators >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenant >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: 2025-04-07T14:06:11.181348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:11.181789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:11.182114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a6a/r3tmp/tmpSJZnT0/pdisk_1.dat 2025-04-07T14:06:11.637915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:11.691225Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:11.740708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:11.740902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:11.752401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:11.878072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:12.264001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:751:2625], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:12.264138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:762:2630], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:12.264272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:12.271199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:06:12.491882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:765:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:06:19.589611Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr895d959s6p27780b4qf0h5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGUzYmIxZjMtMWM0NGIwYzgtZDM4MjMxMTEtMzU2YjMwYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:19.639265Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr895d959s6p27780b4qf0h5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGUzYmIxZjMtMWM0NGIwYzgtZDM4MjMxMTEtMzU2YjMwYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:19.705368Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr895d959s6p27780b4qf0h5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGUzYmIxZjMtMWM0NGIwYzgtZDM4MjMxMTEtMzU2YjMwYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:19.931075Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr895mmf4ez0tr82k14mj93h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY3ZjA3NTktZWI4MzNkYzgtZDkyYjUwNWItNjdhODFjYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] Test command err: 2025-04-07T14:06:17.659432Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:17.728138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:17.728201Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:17.731166Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:17.732485Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:17.732774Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:17.746322Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:17.780969Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:17.781396Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:17.784550Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:17.784636Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:17.784709Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:17.785678Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:17.809913Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:17.811115Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:17.811271Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:17.811311Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:17.811346Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:17.811381Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:17.811605Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.811661Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.812629Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:17.812853Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:17.812929Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:17.812969Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:17.813118Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:17.813156Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:17.813188Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:17.813220Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:17.813263Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:17.870911Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.870990Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.871719Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:17.875349Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:17.875407Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:17.875500Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:17.875653Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:17.875725Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:17.875778Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:17.875822Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:17.875857Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:17.875894Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:17.875925Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:17.876194Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:17.876258Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:17.876293Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:17.876340Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:17.876392Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:17.876417Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:17.876450Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:17.876493Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:17.876522Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:17.906737Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:17.906817Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:17.906864Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:17.906924Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:17.906997Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:17.907467Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.907514Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.907561Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:17.907699Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:17.907728Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:17.907843Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:17.907884Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:17.907929Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:17.907963Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:17.911484Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:17.911557Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:17.911734Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.911771Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.911818Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:17.911857Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:17.911888Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:17.911924Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-07T14:06:17.911959Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:17.911996Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:17.912035Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:17.912073Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:17.912117Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:17.912245Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Restart 2025-04-07T14:06:17.912279Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:17.912321Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:17.912352Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:17.912384Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:17.912633Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:17.912685Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:17.912829Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-07T14:06:17.912861Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:17.912898Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:17.912932Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:17.912965Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:17.913018Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:17.913045Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:17.913086Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:17.913133Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:17.913181Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is ... 990534} resubmit=1) 2025-04-07T14:06:20.252891Z node 3 :RESOURCE_BROKER DEBUG: Assigning waiting task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:251:2224]) to queue queue_transaction 2025-04-07T14:06:20.252916Z node 3 :RESOURCE_BROKER DEBUG: Allocate resources {0, 96990534} for task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:251:2224]) from queue queue_transaction 2025-04-07T14:06:20.252934Z node 3 :RESOURCE_BROKER DEBUG: Assigning in-fly task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:251:2224]) to queue queue_transaction 2025-04-07T14:06:20.252953Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 1.881864 to 18.818640 (insert task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:251:2224])) 2025-04-07T14:06:20.253004Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-04-07T14:06:20.253066Z node 3 :RESOURCE_BROKER DEBUG: Update task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:355:2306]) (priority=5 type=transaction resources={0, 96990534} resubmit=1) 2025-04-07T14:06:20.253092Z node 3 :RESOURCE_BROKER DEBUG: Assigning waiting task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:355:2306]) to queue queue_transaction 2025-04-07T14:06:20.253119Z node 3 :RESOURCE_BROKER DEBUG: Allocate resources {0, 96990534} for task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:355:2306]) from queue queue_transaction 2025-04-07T14:06:20.253142Z node 3 :RESOURCE_BROKER DEBUG: Assigning in-fly task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:355:2306]) to queue queue_transaction 2025-04-07T14:06:20.253167Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 33.873553 (insert task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:355:2306])) 2025-04-07T14:06:20.253216Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:20.253254Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2025-04-07T14:06:20.253880Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437184 restored its data 2025-04-07T14:06:20.479402Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437184 with status COMPLETE 2025-04-07T14:06:20.479469Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:20.479517Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:20.479542Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit ExecuteDataTx 2025-04-07T14:06:20.479569Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompleteOperation 2025-04-07T14:06:20.479589Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompleteOperation 2025-04-07T14:06:20.479778Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is DelayComplete 2025-04-07T14:06:20.479804Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompleteOperation 2025-04-07T14:06:20.479832Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompletedOperations 2025-04-07T14:06:20.479852Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompletedOperations 2025-04-07T14:06:20.479881Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is Executed 2025-04-07T14:06:20.479896Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompletedOperations 2025-04-07T14:06:20.479916Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437184 has finished 2025-04-07T14:06:20.479935Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:20.479952Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:20.479972Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:20.479989Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:20.480076Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{14, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-04-07T14:06:20.480117Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-04-07T14:06:20.480291Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:251:2224]) (release resources {0, 96990534}) 2025-04-07T14:06:20.480342Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 33.873553 to 16.936776 (remove task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:251:2224])) 2025-04-07T14:06:20.480439Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-04-07T14:06:20.480485Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-07T14:06:20.480505Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit ExecuteDataTx 2025-04-07T14:06:20.481303Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437185 restored its data 2025-04-07T14:06:20.715604Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437185 with status COMPLETE 2025-04-07T14:06:20.715689Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437185: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:20.715747Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is ExecutedNoMoreRestarts 2025-04-07T14:06:20.715778Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit ExecuteDataTx 2025-04-07T14:06:20.715807Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompleteOperation 2025-04-07T14:06:20.715836Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompleteOperation 2025-04-07T14:06:20.716044Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is DelayComplete 2025-04-07T14:06:20.716071Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompleteOperation 2025-04-07T14:06:20.716095Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompletedOperations 2025-04-07T14:06:20.716119Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompletedOperations 2025-04-07T14:06:20.716144Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is Executed 2025-04-07T14:06:20.716165Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompletedOperations 2025-04-07T14:06:20.716189Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437185 has finished 2025-04-07T14:06:20.716229Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:20.716251Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-07T14:06:20.716273Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-07T14:06:20.716296Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-07T14:06:20.716410Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{14, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-04-07T14:06:20.716466Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-04-07T14:06:20.716646Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:355:2306]) (release resources {0, 96990534}) 2025-04-07T14:06:20.716713Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 0.000000 (remove task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:355:2306])) 2025-04-07T14:06:20.733414Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} commited cookie 1 for step 8 2025-04-07T14:06:20.733496Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-07T14:06:20.733533Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437185 on unit CompleteOperation 2025-04-07T14:06:20.733574Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437185 at tablet 9437185 send result to client [3:97:2132], exec latency: 2 ms, propose latency: 4 ms 2025-04-07T14:06:20.733633Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-04-07T14:06:20.733682Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-07T14:06:20.733902Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} commited cookie 1 for step 8 2025-04-07T14:06:20.733936Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:20.733961Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437184 on unit CompleteOperation 2025-04-07T14:06:20.733995Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437184 at tablet 9437184 send result to client [3:97:2132], exec latency: 2 ms, propose latency: 4 ms 2025-04-07T14:06:20.734044Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-07T14:06:20.734067Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:20.734296Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:333:2306], Recipient [3:438:2388]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-04-07T14:06:20.734340Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:20.734397Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437185 consumer 9437185 txId 5 2025-04-07T14:06:20.734489Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:229:2224], Recipient [3:438:2388]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-07T14:06:20.734512Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:20.734534Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 5 >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope |92.5%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleConfigTests::TestManageValidators [GOOD] >> TConsoleConfigTests::TestDryRun >> ResourcePoolsDdl::TestDropResourcePool [GOOD] >> ResourcePoolsDdl::TestResourcePoolAcl >> TPQTest::TestPQReadAhead [GOOD] >> BuildStatsHistogram::Ten_Serial >> TRowVersionRangesTest::SimpleInserts [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random >> TIterator::Single >> TS3FIFOCache::Random [GOOD] >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TScreen::Join [GOOD] >> TScreen::Sequential >> TChargeBTreeIndex::NoNodes_History >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits >> DBase::Select [GOOD] >> DBase::Subsets [GOOD] >> DBase::Garbage [GOOD] >> DBase::WideKey >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded >> TChargeBTreeIndex::NoNodes_History [GOOD] >> TChargeBTreeIndex::OneNode >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 >> TPartBtreeIndexIteration::OneNode_Groups >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages >> Bloom::Conf [GOOD] >> Bloom::Hashes >> TConsoleConfigTests::TestDryRun [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain >> Bloom::Hashes [GOOD] >> Bloom::Rater ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestPQReadAhead [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2025-04-07T14:04:51.798292Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.798350Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:175:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.823302Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.844142Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } 2025-04-07T14:04:51.845140Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:182:2195] 2025-04-07T14:04:51.847659Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.867511Z node 1 :PERSQUEUE INFO: new Cookie default|60f49696-a62e1113-1a7f5dc7-c91bba12_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:173:2188] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:173:2188] Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup ... :TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 10 Bytes: 104857600 } Cookie: 123 } via pipe: [34:173:2188] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [34:241:2242] sender: [34:352:2057] recipient: [34:14:2061] 2025-04-07T14:06:19.999422Z node 34 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8365317 2025-04-07T14:06:19.999490Z node 34 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 12 size 7877895 Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:106:2057] recipient: [35:99:2133] 2025-04-07T14:06:20.720431Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:06:20.720509Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [35:147:2057] recipient: [35:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [35:147:2057] recipient: [35:145:2168] Leader for TabletID 72057594037927938 is [35:151:2172] sender: [35:152:2057] recipient: [35:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:177:2057] recipient: [35:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:20.746714Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:06:20.747518Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 35 actor [35:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 35 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 35 ReadRuleGenerations: 35 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 35 Important: false } Consumers { Name: "aaa" Generation: 35 Important: true } 2025-04-07T14:06:20.748272Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [35:184:2197] 2025-04-07T14:06:20.750888Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [35:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:06:20.754022Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [35:185:2198] 2025-04-07T14:06:20.756017Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [35:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:20.812383Z node 35 :PERSQUEUE INFO: new Cookie default|d36404bf-ce3e4cd8-edee3a55-6b31ed22_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR >>>>> 1 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [35:175:2190] >>>>> 2 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 1 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [35:175:2190] >>>>> 3 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 2 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [35:175:2190] >>>>> 4 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [35:175:2190] >>>>> 5 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 10 Bytes: 104857600 } Cookie: 123 } via pipe: [35:175:2190] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:106:2057] recipient: [36:99:2133] 2025-04-07T14:06:21.715258Z node 36 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:06:21.715329Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [36:147:2057] recipient: [36:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [36:147:2057] recipient: [36:145:2168] Leader for TabletID 72057594037927938 is [36:151:2172] sender: [36:152:2057] recipient: [36:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:177:2057] recipient: [36:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:21.733275Z node 36 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:06:21.733974Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 36 actor [36:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 36 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 36 ReadRuleGenerations: 36 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 36 Important: false } Consumers { Name: "aaa" Generation: 36 Important: true } 2025-04-07T14:06:21.734420Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [36:184:2197] 2025-04-07T14:06:21.736318Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [36:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:06:21.739081Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [36:185:2198] 2025-04-07T14:06:21.741080Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [36:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:21.775250Z node 36 :PERSQUEUE INFO: new Cookie default|4c6c00ca-4d5f656f-3738e610-9f58a3b5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR >>>>> 1 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] >>>>> 2 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 1 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] >>>>> 3 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 2 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] >>>>> 4 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] >>>>> 5 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 10 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded >> TScreen::Sequential [GOOD] >> TScreen::Random >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> TBtreeIndexBuilder::NoNodes [GOOD] >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize [GOOD] >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey [GOOD] >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> TBtreeIndexTPart::NoNodes [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] >> TBtreeIndexTPart::FewNodes >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory >> Bloom::Rater [GOOD] >> Bloom::Dipping >> TBtreeIndexTPart::FewNodes [GOOD] >> TBtreeIndexTPart::Erases [GOOD] >> TBtreeIndexTPart::Groups [GOOD] >> TBtreeIndexTPart::History >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower >> DBase::WideKey [GOOD] >> DBase::Outer [GOOD] >> DBase::VersionBasics [GOOD] >> DBase::VersionPureMem >> TBtreeIndexTPart::History [GOOD] >> TBtreeIndexTPart::External >> TConsoleTests::TestListTenants [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataGC [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxPartialDataHold [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease >> TIterator::Single [GOOD] >> TIterator::SingleReverse >> TBtreeIndexTPart::External [GOOD] >> TChargeBTreeIndex::NoNodes >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease [GOOD] >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan >> TChargeBTreeIndex::NoNodes [GOOD] >> TChargeBTreeIndex::NoNodes_Groups >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup-EvWrite >> DataShardTxOrder::RandomPoints_DelayData >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail+StreamLookup >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate+StreamLookup >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 >> DataShardTxOrder::ImmediateBetweenOnline >> DataShardOutOfOrder::TestImmediateQueueThenSplit >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP >> TPQTest::TestWritePQ [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage >> PersQueueSdkReadSessionTest::SettingsValidation >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart >> Bloom::Dipping [GOOD] >> Bloom::Basics [GOOD] >> Bloom::Stairs >> TIterator::SingleReverse [GOOD] >> TIterator::MixedReverse >> Bloom::Stairs [GOOD] >> BuildStatsBTreeIndex::Single >> TSharedPageCache::Limits [GOOD] >> TSharedPageCache::Limits_Config >> DataShardOutOfOrder::TestOutOfOrderLockLost+StreamLookup >> BuildStatsBTreeIndex::Single [GOOD] >> BuildStatsBTreeIndex::Single_Slices >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History >> BuildStatsBTreeIndex::Single_Slices [GOOD] >> BuildStatsBTreeIndex::Single_History >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> BuildStatsBTreeIndex::Single_History [GOOD] >> BuildStatsBTreeIndex::Single_History_Slices >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TConsoleTests::TestTenantConfigConsistency >> TSharedPageCache::Limits_Config [GOOD] >> TSharedPageCache::ThreeLeveledLRU >> BuildStatsBTreeIndex::Single_History_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem >> BuildStatsBTreeIndex::Single_Groups [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan [GOOD] >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex >> BuildStatsBTreeIndex::Single_Groups_Slices [GOOD] >> DBase::VersionCompactedMem [GOOD] >> DBase::VersionCompactedParts >> BuildStatsBTreeIndex::Single_Groups_History >> TChargeBTreeIndex::OneNode_History [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex >> BuildStatsHistogram::Ten_Serial [GOOD] >> BuildStatsHistogram::Ten_Serial_Log >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestSticky >> BuildStatsBTreeIndex::Single_Groups_History [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges >> TFlatTableExecutor_StickyPages::TestSticky [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyMain >> BuildStatsBTreeIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsBTreeIndex::Mixed >> TFlatTableExecutor_StickyPages::TestStickyMain [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex >> TSharedPageCache::ThreeLeveledLRU [GOOD] >> TSharedPageCache::S3FIFO >> DBase::VersionCompactedParts [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAll >> BuildStatsBTreeIndex::Mixed [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups >> TFlatTableExecutor_StickyPages::TestStickyAll [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRows >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> BuildStatsBTreeIndex::Mixed_Groups [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> BuildStatsBTreeIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Single >> TFlatTableExecutor_VersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs >> BuildStatsFlatIndex::Single [GOOD] >> BuildStatsFlatIndex::Single_Slices >> TSharedPageCache::S3FIFO [GOOD] >> TSharedPageCache::ClockPro >> BuildStatsFlatIndex::Single_Slices [GOOD] >> BuildStatsFlatIndex::Single_History >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows >> BuildStatsFlatIndex::Single_History [GOOD] >> BuildStatsFlatIndex::Single_History_Slices >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles >> BuildStatsFlatIndex::Single_History_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] >> TConsoleTests::TestModifyUsedZoneKind >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes >> BuildStatsFlatIndex::Single_Groups [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups_History >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex >> TSharedPageCache::ClockPro [GOOD] >> TSharedPageCache::ReplacementPolicySwitch >> BuildStatsFlatIndex::Single_Groups_History [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices >> TChargeBTreeIndex::FewNodes [GOOD] >> TChargeBTreeIndex::FewNodes_Groups >> TSharedPageCache::ReplacementPolicySwitch [GOOD] >> TSharedPageCache::Compaction_BTreeIndex >> BuildStatsFlatIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsFlatIndex::Mixed >> BuildStatsFlatIndex::Mixed [GOOD] >> BuildStatsFlatIndex::Mixed_Groups >> TSharedPageCache::Compaction_BTreeIndex [GOOD] >> TSharedPageCache::Compaction_FlatIndex >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> TConsoleTests::TestSetConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2025-04-07T14:06:08.699464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:08.699508Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:08.765156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:09.533632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:09.533686Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:09.579883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:10.372965Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:10.373043Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:10.412152Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:11.225210Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:11.225284Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:11.264197Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:12.532162Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:12.532260Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:12.578300Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:19.507510Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:19.507588Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:19.553237Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:20.658177Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:20.658237Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:20.709116Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:24.631300Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:24.631387Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:24.676284Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:25.753903Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:25.753965Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:25.795781Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:26.670413Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:26.670491Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:26.712294Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> BuildStatsFlatIndex::Mixed_Groups [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] >> TSharedPageCache::Compaction_FlatIndex [GOOD] >> TSharedPageCache::Compaction_ZeroCache_BTreeIndex >> BuildStatsFlatIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Serial >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient >> TSharedPageCache::Compaction_ZeroCache_BTreeIndex [GOOD] >> TSharedPageCache::Compaction_ZeroCache_FlatIndex >> BuildStatsFlatIndex::Serial [GOOD] >> BuildStatsFlatIndex::Serial_Groups >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices >> TSharedPageCache::Compaction_ZeroCache_FlatIndex [GOOD] >> TSwitchableCache::Touch [GOOD] >> TSwitchableCache::Erase [GOOD] >> TSwitchableCache::EvictNext [GOOD] >> TSwitchableCache::UpdateLimit [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_All [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_Parts [GOOD] >> TSwitchableCache::Switch_RotatePages_Force [GOOD] >> TSwitchableCache::Switch_RotatePages_Evicts [GOOD] >> TSwitchableCache::Switch_Touch [GOOD] >> TSwitchableCache::Switch_Erase [GOOD] >> TSwitchableCache::Switch_EvictNext [GOOD] >> TSwitchableCache::Switch_UpdateLimit [GOOD] >> TVersions::WreckHead >> BuildStatsFlatIndex::Serial_Groups [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History >> BuildStatsFlatIndex::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Single ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] Test command err: 2025-04-07T14:06:08.703320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:08.703365Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:08.762653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:09.749399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:09.749444Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:09.790849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:10.574462Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:10.574525Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:10.613046Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:11.499131Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:11.499198Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:11.546150Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:12.602547Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:12.602621Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:12.644683Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:13.675271Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:13.675342Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:13.716830Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:14.507412Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:14.507471Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:14.548489Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:15.547603Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:15.547677Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:15.586676Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:16.389418Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:16.389478Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:16.424161Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:17.300167Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:17.300265Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:17.339146Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:18.223482Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:18.223553Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:18.267744Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:19.043025Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:19.043094Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:19.086072Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:20.204227Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:20.204313Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:20.252727Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:21.303276Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:21.303352Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:21.363462Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:22.403441Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:22.403522Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:22.448884Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:23.310735Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:23.310827Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:23.366686Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:24.670266Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:24.670353Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:24.721080Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:26.122441Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:26.122510Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:26.191163Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:27.485046Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:27.485126Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:27.533367Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> DataShardOutOfOrder::TestImmediateQueueThenSplit [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop >> BuildStatsHistogram::Ten_Serial_Log [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TClockProCache::Touch [GOOD] >> TClockProCache::Lifecycle [GOOD] >> TClockProCache::EvictNext [GOOD] >> TClockProCache::UpdateLimit [GOOD] >> TClockProCache::Erase [GOOD] >> TClockProCache::Random [GOOD] >> TCompaction::OneMemtable [GOOD] >> TCompaction::ManyParts >> BuildStatsHistogram::Three_Mixed_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_0_Levels >> BuildStatsHistogram::Three_Mixed_Small_0_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels >> TIterator::MixedReverse [GOOD] >> TIterator::Serial >> DBase::KIKIMR_15598_Many_MemTables [GOOD] >> Memtable::Basics [GOOD] >> Memtable::BasicsReverse [GOOD] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TConsoleTests::TestCreateTenantWrongName >> BuildStatsHistogram::Three_Serial_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level >> Memtable::Wreck [GOOD] >> Memtable::Erased >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate+StreamLookup [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup+EvWrite >> BuildStatsHistogram::Three_Serial_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Serial_Small_0_Levels [GOOD] >> BuildStatsMixedIndex::Single >> Memtable::Erased [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> NFwd_TBlobs::Trace [GOOD] >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed >> TCompaction::ManyParts [GOOD] >> TCompaction::BootAbort >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices >> BuildStatsMixedIndex::Single [GOOD] >> BuildStatsMixedIndex::Single_Slices >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite >> DataShardVolatile::DistributedWriteShardRestartBeforePlan [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation >> BuildStatsMixedIndex::Single_Slices [GOOD] >> BuildStatsMixedIndex::Single_History >> BuildStatsMixedIndex::Single_History [GOOD] >> BuildStatsMixedIndex::Single_History_Slices >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults [GOOD] >> TCompaction::Merges [GOOD] >> TCompactionMulti::ManyParts >> DataShardTxOrder::ForceOnlineBetweenOnline >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] >> BuildStatsMixedIndex::Single_History_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] Test command err: 2025-04-07T14:06:24.122481Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:24.202615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:24.202666Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:24.204775Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:24.205219Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:24.205464Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:24.216207Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:24.260576Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:24.262096Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:24.263697Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:24.263792Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:24.263850Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:24.264235Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:24.293445Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:24.293654Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:24.293781Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:24.293821Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:24.293853Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:24.293891Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:24.294099Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.294172Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.294555Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:24.294698Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:24.294763Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:24.294833Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:24.294882Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:24.294920Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:24.294953Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:24.294985Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:24.295019Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:24.337483Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.337558Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.337624Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:24.339857Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:24.339924Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:24.340013Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:24.340156Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:24.340229Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:24.340269Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:24.340312Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:24.340359Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:24.340395Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:24.340425Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:24.340857Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:24.340936Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:24.340977Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:24.341016Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:24.341087Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:24.341114Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:24.341159Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:24.341189Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:24.341213Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:24.363750Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:24.363809Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:24.363857Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:24.363905Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:24.363969Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:24.364488Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.364544Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.364589Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:24.364765Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:24.364807Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:24.364919Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:24.364965Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:24.365006Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:24.365040Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:24.368776Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:24.368832Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:24.369003Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.369041Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.369083Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:24.369130Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:24.369165Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:24.369203Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-07T14:06:24.369235Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:24.369296Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:24.369334Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:24.369372Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:24.369407Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:24.369535Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-07T14:06:24.369567Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:24.369604Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:24.369630Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:24.369653Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:24.369705Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:24.369734Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:24.369787Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:24.369818Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:24.369863Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-04-07T14:06:24.369896Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:24.369928Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2025-04-07T14:06:24.369983Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:24.370024Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:24.370049Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit MakeSnapshot 2025-04-07T14:06:24.370077Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit MakeSnapshot 2025-04-07T14:06:24.370113Z node 1 :TX_DATASHARD ... CompleteOperation 2025-04-07T14:06:28.702404Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 134] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2025-04-07T14:06:28.702436Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-07T14:06:28.702461Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:28.702537Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:28.702555Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:137] at 9437186 on unit CompleteOperation 2025-04-07T14:06:28.702603Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 137] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2025-04-07T14:06:28.702648Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-07T14:06:28.702668Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:28.702763Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:28.702784Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:140] at 9437186 on unit CompleteOperation 2025-04-07T14:06:28.702813Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 140] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:06:28.702849Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:28.702930Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:28.702951Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:143] at 9437186 on unit CompleteOperation 2025-04-07T14:06:28.702994Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 143] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:06:28.703018Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:28.703093Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:28.703117Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:146] at 9437186 on unit CompleteOperation 2025-04-07T14:06:28.703143Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 146] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:06:28.703164Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:28.703237Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:28.703255Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:149] at 9437186 on unit CompleteOperation 2025-04-07T14:06:28.703340Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 149] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:06:28.703372Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:28.703455Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:28.703481Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:152] at 9437186 on unit CompleteOperation 2025-04-07T14:06:28.703508Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:06:28.703530Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:28.703769Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:332:2304]: {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-07T14:06:28.703805Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.703851Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-07T14:06:28.703944Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-07T14:06:28.703969Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.703997Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-07T14:06:28.704056Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-07T14:06:28.704122Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.704147Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-07T14:06:28.704217Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:332:2304]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-07T14:06:28.704247Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.704266Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-07T14:06:28.704342Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-07T14:06:28.704365Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.704384Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-07T14:06:28.704442Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:332:2304]: {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-07T14:06:28.704466Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.704485Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-07T14:06:28.704557Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-07T14:06:28.704586Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.704624Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-07T14:06:28.704965Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-07T14:06:28.704999Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.705023Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-07T14:06:28.705108Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-07T14:06:28.705142Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.705178Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-07T14:06:28.705251Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-07T14:06:28.705276Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.705295Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-07T14:06:28.705341Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-07T14:06:28.705372Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.705398Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-07T14:06:28.705476Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-07T14:06:28.705522Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.705557Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-04-07T14:06:28.705643Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-07T14:06:28.705670Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.705692Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-07T14:06:28.705764Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-07T14:06:28.705792Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.705811Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-07T14:06:28.705875Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-07T14:06:28.705896Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.705928Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-07T14:06:28.706020Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-07T14:06:28.706043Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.706062Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] Test command err: 2025-04-07T14:06:24.122151Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:24.192993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:24.193045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:24.195075Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:24.195494Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:24.195728Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:24.205759Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:24.241144Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:24.241560Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:24.242923Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:24.242995Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:24.243052Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:24.243296Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:24.270056Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:24.270248Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:24.270353Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:24.270382Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:24.270423Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:24.270460Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:24.270642Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.270687Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.270979Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:24.271046Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:24.271080Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:24.271110Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:24.271182Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:24.271211Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:24.271236Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:24.271257Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:24.271284Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:24.335879Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.335953Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.335999Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:24.338212Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:24.338264Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:24.338339Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:24.338502Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:24.338556Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:24.338597Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:24.338638Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:24.338674Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:24.338717Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:24.338778Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:24.339062Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:24.339129Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:24.339163Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:24.339203Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:24.339265Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:24.339288Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:24.339331Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:24.339362Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:24.339386Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:24.362009Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:24.362073Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:24.362111Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:24.362167Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:24.362225Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:24.365264Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.365313Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.365350Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:24.365481Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:24.365510Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:24.365603Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:24.365647Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:24.365683Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:24.365713Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:24.368761Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:24.368814Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:24.368976Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.369008Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.369049Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:24.369079Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:24.369106Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:24.369137Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:24.369169Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:24.369225Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:24.369267Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:24.369295Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:24.369320Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:24.369457Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:24.369493Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:24.369527Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:24.369548Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:24.369568Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:24.369613Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:24.369635Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:24.369667Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:24.369700Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:24.369744Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:24.369773Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:24.369819Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:24.369875Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:24.369896Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:24.369918Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... dy candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-04-07T14:06:28.797614Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-04-07T14:06:28.797638Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-07T14:06:28.797666Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-04-07T14:06:28.797689Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-04-07T14:06:28.797718Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-04-07T14:06:28.798101Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-04-07T14:06:28.798137Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:28.798168Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-04-07T14:06:28.798185Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-04-07T14:06:28.798203Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-04-07T14:06:28.798220Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-04-07T14:06:28.798343Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-04-07T14:06:28.798369Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-04-07T14:06:28.798397Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-04-07T14:06:28.798416Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-04-07T14:06:28.798435Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-07T14:06:28.798451Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-04-07T14:06:28.798466Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2025-04-07T14:06:28.798484Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:28.798500Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-07T14:06:28.798530Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-07T14:06:28.798548Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-07T14:06:28.798697Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:332:2304]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-07T14:06:28.798750Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.798768Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-07T14:06:28.798844Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 110 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 36} 2025-04-07T14:06:28.798861Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.798875Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 110 2025-04-07T14:06:28.798916Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 113 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 37} 2025-04-07T14:06:28.798938Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.798955Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2025-04-07T14:06:28.799054Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-07T14:06:28.799078Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.799099Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-07T14:06:28.799152Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-07T14:06:28.799172Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.799197Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-07T14:06:28.799257Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-07T14:06:28.799274Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.799291Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-07T14:06:28.799342Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-07T14:06:28.799355Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.799367Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-07T14:06:28.799416Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-07T14:06:28.799440Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.799455Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-07T14:06:28.799513Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-07T14:06:28.799527Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.799543Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-07T14:06:28.799591Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-07T14:06:28.799608Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.799622Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-07T14:06:28.799661Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-07T14:06:28.799677Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.799690Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-07T14:06:28.799729Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-07T14:06:28.799743Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.799758Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-07T14:06:28.799810Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-07T14:06:28.799832Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.799847Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-07T14:06:28.799912Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-07T14:06:28.799930Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.799946Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-07T14:06:28.799996Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-07T14:06:28.800010Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.800022Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-07T14:06:28.813649Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:28.813695Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-07T14:06:28.813744Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2025-04-07T14:06:28.813798Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-07T14:06:28.813833Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:28.814063Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-07T14:06:28.814097Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:28.814138Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> BuildStatsMixedIndex::Single_Groups [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices >> BuildStatsMixedIndex::Single_Groups_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups_History ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] Test command err: 2025-04-07T14:05:59.727255Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572255991391148:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:05:59.727876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dd9/r3tmp/tmpTx3HuY/pdisk_1.dat 2025-04-07T14:06:00.045819Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:00.118666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:00.118830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:00.120991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8675, node 1 2025-04-07T14:06:00.306781Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:06:00.306813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:06:00.306822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:06:00.306943Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:00.730879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:00.776931Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2025-04-07T14:06:02.091711Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T14:06:02.100899Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T14:06:02.101049Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-07T14:06:02.101081Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T14:06:02.101113Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490572268876293650:2301], Start check tables existence, number paths: 2 2025-04-07T14:06:02.102216Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490572268876293650:2301], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T14:06:02.102284Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490572268876293650:2301], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T14:06:02.102314Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490572268876293650:2301], Successfully finished 2025-04-07T14:06:02.103069Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T14:06:02.109296Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjNkNmMxYmUtOGM5Mjk3ZDAtOTk0N2FiM2UtNThkNzk5YTM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YjNkNmMxYmUtOGM5Mjk3ZDAtOTk0N2FiM2UtNThkNzk5YTM= 2025-04-07T14:06:02.109565Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjNkNmMxYmUtOGM5Mjk3ZDAtOTk0N2FiM2UtNThkNzk5YTM=, ActorId: [1:7490572268876293666:2302], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:06:02.123476Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572268876293668:2294], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T14:06:02.128737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:06:02.131284Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572268876293668:2294], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-07T14:06:02.133597Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572268876293668:2294], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-07T14:06:02.139133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572268876293668:2294], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:06:02.211015Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572268876293668:2294], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-07T14:06:02.216879Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572268876293668:2294], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-07T14:06:02.226107Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-07T14:06:02.226146Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-07T14:06:02.226270Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572268876293728:2304], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-07T14:06:02.226322Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjNkNmMxYmUtOGM5Mjk3ZDAtOTk0N2FiM2UtNThkNzk5YTM=, ActorId: [1:7490572268876293666:2302], ActorState: ReadyState, TraceId: 01jr8953ff9abc9hy73k2g1e0d, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE RESOURCE POOL default WITH ( CONCURRENT_QUERY_LIMIT=0 ); rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-07T14:06:02.227815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572268876293728:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:02.227935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:02.626629Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=YjNkNmMxYmUtOGM5Mjk3ZDAtOTk0N2FiM2UtNThkNzk5YTM=, ActorId: [1:7490572268876293666:2302], ActorState: ExecuteState, TraceId: 01jr8953ff9abc9hy73k2g1e0d, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7490572268876293737:2302] WorkloadServiceCleanup: 0 2025-04-07T14:06:02.632851Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjNkNmMxYmUtOGM5Mjk3ZDAtOTk0N2FiM2UtNThkNzk5YTM=, ActorId: [1:7490572268876293666:2302], ActorState: CleanupState, TraceId: 01jr8953ff9abc9hy73k2g1e0d, EndCleanup, isFinal: 0 2025-04-07T14:06:02.632994Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjNkNmMxYmUtOGM5Mjk3ZDAtOTk0N2FiM2UtNThkNzk5YTM=, ActorId: [1:7490572268876293666:2302], ActorState: CleanupState, TraceId: 01jr8953ff9abc9hy73k2g1e0d, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7490572255991391380:2256] 2025-04-07T14:06:02.636865Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDljYjM4ZWQtMTEzMDlhZTktY2VkMWE5YTAtMWYzZmNiMmQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDljYjM4ZWQtMTEzMDlhZTktY2VkMWE5YTAtMWYzZmNiMmQ= 2025-04-07T14:06:02.636987Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDljYjM4ZWQtMTEzMDlhZTktY2VkMWE5YTAtMWYzZmNiMmQ=, ActorId: [1:7490572268876293739:2305], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:06:02.637113Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-07T14:06:02.637162Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572268876293741:2306], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-07T14:06:02.637184Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDljYjM4ZWQtMTEzMDlhZTktY2VkMWE5YTAtMWYzZmNiMmQ=, ActorId: [1:7490572268876293739:2305], ActorState: ReadyState, TraceId: 01jr8953wdca8j6kjxre11f0nw, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7490572268876293738:2334] database: Root databaseId: /Root pool id: default 2025-04-07T14:06:02.637235Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7490572268876293739:2305], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=1&id=ZDljYjM4ZWQtMTEzMDlhZTktY2VkMWE5YTAtMWYzZmNiMmQ= 2025-04-07T14:06:02.637287Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7490572268876293742:2307], Database: /Root, Start database fetching 2025-04-07T14:06:02.637387Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7490572268876293742:2307], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-07T14:06:02.637437Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-07T14:06:02.637474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572268876293741:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:02.637494Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7490572268876293745:2308], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=1&id=ZDljYjM4ZWQtMTEzMDlhZTktY2VkMWE5YTAtMWYzZmNiMmQ=, Start pool fetching 2025-04-07T14:06:02.637521Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ... State: ExecuteState, TraceId: 01jr895x7n4sh6nk742mbjcaf7, Sending to Executer TraceId: 0 8 2025-04-07T14:06:28.604740Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x7n4sh6nk742mbjcaf7, Created new KQP executer: [5:7490572380091924304:2418] isRollback: 0 2025-04-07T14:06:28.607248Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=N2ExNDJjMjUtNTJiYmU4NGEtY2MzZGM1NDItYmFkZjNkMTA=, ActorId: [5:7490572367207021794:2303], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T14:06:28.607287Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=N2ExNDJjMjUtNTJiYmU4NGEtY2MzZGM1NDItYmFkZjNkMTA=, ActorId: [5:7490572367207021794:2303], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:06:28.607427Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=N2ExNDJjMjUtNTJiYmU4NGEtY2MzZGM1NDItYmFkZjNkMTA=, ActorId: [5:7490572367207021794:2303], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T14:06:28.607458Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=N2ExNDJjMjUtNTJiYmU4NGEtY2MzZGM1NDItYmFkZjNkMTA=, ActorId: [5:7490572367207021794:2303], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T14:06:28.607518Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=N2ExNDJjMjUtNTJiYmU4NGEtY2MzZGM1NDItYmFkZjNkMTA=, ActorId: [5:7490572367207021794:2303], ActorState: unknown state, Session actor destroyed 2025-04-07T14:06:28.609260Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x7n4sh6nk742mbjcaf7, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-07T14:06:28.609403Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x7n4sh6nk742mbjcaf7, txInfo Status: Committed Kind: ReadWrite TotalDuration: 11.372 ServerDuration: 11.24 QueriesCount: 2 2025-04-07T14:06:28.609494Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x7n4sh6nk742mbjcaf7, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T14:06:28.609550Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x7n4sh6nk742mbjcaf7, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:06:28.609599Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x7n4sh6nk742mbjcaf7, EndCleanup, isFinal: 0 2025-04-07T14:06:28.609652Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x7n4sh6nk742mbjcaf7, Sent query response back to proxy, proxyRequestId: 18, proxyId: [5:7490572354322119504:2256] 2025-04-07T14:06:28.610204Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, TxId: 2025-04-07T14:06:28.610302Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-04-07T14:06:28.610687Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ReadyState, TraceId: 01jr895x820yah4ydaf11ehvdj, received request, proxyRequestId: 19 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [5:7490572380091924316:2426] database: /Root databaseId: /Root pool id: default 2025-04-07T14:06:28.610719Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ReadyState, TraceId: 01jr895x820yah4ydaf11ehvdj, request placed into pool from cache: default 2025-04-07T14:06:28.611195Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x820yah4ydaf11ehvdj, ExecutePhyTx, tx: 0x000050C0002DA6D8 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-07T14:06:28.611250Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x820yah4ydaf11ehvdj, Sending to Executer TraceId: 0 8 2025-04-07T14:06:28.611322Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x820yah4ydaf11ehvdj, Created new KQP executer: [5:7490572380091924319:2418] isRollback: 0 2025-04-07T14:06:28.614965Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x820yah4ydaf11ehvdj, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-07T14:06:28.615037Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x820yah4ydaf11ehvdj, ExecutePhyTx, tx: 0x000050C00009C7D8 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-07T14:06:28.615738Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x820yah4ydaf11ehvdj, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-07T14:06:28.615833Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x820yah4ydaf11ehvdj, txInfo Status: Committed Kind: ReadOnly TotalDuration: 4.737 ServerDuration: 4.654 QueriesCount: 2 2025-04-07T14:06:28.615918Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x820yah4ydaf11ehvdj, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T14:06:28.615984Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x820yah4ydaf11ehvdj, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:06:28.616010Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x820yah4ydaf11ehvdj, EndCleanup, isFinal: 0 2025-04-07T14:06:28.616059Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ExecuteState, TraceId: 01jr895x820yah4ydaf11ehvdj, Sent query response back to proxy, proxyRequestId: 19, proxyId: [5:7490572354322119504:2256] 2025-04-07T14:06:28.616407Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, TxId: 2025-04-07T14:06:28.616500Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, TxId: 2025-04-07T14:06:28.616585Z node 5 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [5:7490572367207021928:2312], DatabaseId: /Root, PoolId: my_pool, succefully refreshed pool state, in flight: 0, delayed: 0 2025-04-07T14:06:28.616631Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T14:06:28.616690Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:06:28.616718Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T14:06:28.616747Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T14:06:28.616816Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWQxMjdhMzctYTJjNjIyNzUtNTY1MDkwYjItMzg2MzQ1YWM=, ActorId: [5:7490572380091924284:2418], ActorState: unknown state, Session actor destroyed >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_History >> BuildStatsMixedIndex::Single_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup-EvWrite >> BuildStatsMixedIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsMixedIndex::Mixed >> DataShardOutOfOrder::TestOutOfOrderLockLost+StreamLookup [GOOD] >> DataShardOutOfOrder::TestOutOfOrderLockLost-StreamLookup >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge >> BuildStatsMixedIndex::Mixed [GOOD] >> BuildStatsMixedIndex::Mixed_Groups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2025-04-07T14:06:24.121828Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:24.185574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:24.185639Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:24.188886Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:24.189247Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:24.189542Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:24.201263Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:24.231913Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:24.232400Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:24.234222Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:24.235183Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:24.235255Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:24.236340Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:24.260475Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:24.261678Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:24.261798Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:24.261832Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:24.261860Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:24.261888Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:24.262075Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.262117Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.263115Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:24.263212Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:24.263263Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:24.263298Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:24.263353Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:24.263380Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:24.263405Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:24.263428Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:24.263455Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:24.331283Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.331363Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.331420Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:24.334893Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:24.334938Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:24.335010Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:24.336498Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:24.336550Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:24.336603Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:24.336654Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:24.336704Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:24.336737Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:24.336769Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:24.337022Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:24.337069Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:24.337103Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:24.337134Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:24.337195Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:24.337219Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:24.337251Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:24.337278Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:24.337299Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:24.361510Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:24.361585Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:24.361632Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:24.361695Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:24.361760Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:24.363931Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.363989Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.364035Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:24.364176Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:24.364237Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:24.364388Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:24.364444Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:24.364482Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:24.364513Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:24.372694Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:24.372781Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:24.373012Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.373050Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.373099Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:24.373138Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:24.373175Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:24.373217Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:24.373263Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:24.373314Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:24.373359Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:24.373395Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:24.373429Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:24.373593Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:24.373634Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:24.373670Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:24.373694Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:24.373716Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:24.373777Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:24.373803Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:24.373846Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:24.373886Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:24.373936Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:24.373970Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:24.374015Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:24.374055Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:24.374077Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:24.374106Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... [1000005:154] at 9437184 on unit CompleteOperation 2025-04-07T14:06:29.638684Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2025-04-07T14:06:29.638713Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-07T14:06:29.638733Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:29.638896Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-07T14:06:29.638940Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:29.638994Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-07T14:06:29.639096Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:29.639122Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2025-04-07T14:06:29.639156Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2025-04-07T14:06:29.639193Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-07T14:06:29.639216Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:29.639238Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-07T14:06:29.639256Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-07T14:06:29.639272Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-07T14:06:29.639351Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:29.639393Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2025-04-07T14:06:29.639427Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2025-04-07T14:06:29.639474Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-07T14:06:29.639500Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:29.639519Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-07T14:06:29.639592Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:29.639610Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-04-07T14:06:29.639656Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2025-04-07T14:06:29.639703Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-07T14:06:29.639722Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:29.639739Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-07T14:06:29.639753Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-07T14:06:29.639822Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:29.639840Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-04-07T14:06:29.639876Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2025-04-07T14:06:29.639916Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-07T14:06:29.639950Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:29.640044Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:29.640064Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-07T14:06:29.640105Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2025-04-07T14:06:29.640142Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-07T14:06:29.640161Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:29.640255Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:29.640275Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-07T14:06:29.640302Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2025-04-07T14:06:29.640346Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-07T14:06:29.640370Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:29.640428Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:29.640446Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-07T14:06:29.640476Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2025-04-07T14:06:29.640508Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-07T14:06:29.640524Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:29.640701Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:434:2384], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-07T14:06:29.640726Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:29.640746Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-07T14:06:29.640782Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-07T14:06:29.640794Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:29.640811Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-07T14:06:29.640847Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:434:2384], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-07T14:06:29.640860Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:29.640877Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-07T14:06:29.640914Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-07T14:06:29.640930Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:29.640944Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-07T14:06:29.641006Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-07T14:06:29.641064Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:29.641078Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-07T14:06:29.641122Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:434:2384], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-07T14:06:29.641134Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:29.641146Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-07T14:06:29.641180Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:434:2384], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-07T14:06:29.641191Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:29.641202Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-07T14:06:29.641242Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:434:2384], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-07T14:06:29.641260Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:29.641275Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-07T14:06:29.641334Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:434:2384], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-07T14:06:29.641350Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:29.641364Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-07T14:06:29.641410Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:434:2384], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-07T14:06:29.641425Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:29.641439Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> BuildStatsMixedIndex::Mixed_Groups [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> NFwd_TFlatIndexCache::End [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > (34) | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > (36) | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > (38) | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > (34) | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > (36) | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > (38) | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail+StreamLookup [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail-StreamLookup >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> TConsoleTests::TestMergeConfig >> BuildStatsMixedIndex::Mixed_Groups_History [GOOD] >> BuildStatsMixedIndex::Serial >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse >> BuildStatsMixedIndex::Serial [GOOD] >> BuildStatsMixedIndex::Serial_Groups |92.5%| [TA] $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BuildStatsMixedIndex::Serial_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_History [GOOD] >> BuildStatsMixedIndex::Serial_Groups_History >> TChargeBTreeIndex::FewNodes_Sticky >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier+StreamLookup >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock >> TPartBtreeIndexIteration::OneNode_History_Slices >> BuildStatsMixedIndex::Serial_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_LowResolution >> TIterator::Serial [GOOD] >> TIterator::SerialReverse |92.5%| [TA] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BuildStatsMixedIndex::Single_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution >> BuildStatsMixedIndex::Single_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution >> TxOrderInternals::OperationOrder [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution >> TConsoleTests::TestSetConfig [GOOD] >> TConsoleTests::TestTenantGeneration >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution >> BuildStatsHistogram::Single [GOOD] >> BuildStatsHistogram::Single_Slices >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> TxOrderInternals::OperationOrder [GOOD] >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 >> BuildStatsMixedIndex::Single_Groups_History_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution [GOOD] >> Charge::Lookups [GOOD] >> Charge::ByKeysBasics >> DataShardTxOrder::RandomPoints_DelayRS_Reboot >> Charge::ByKeysBasics [GOOD] >> Charge::ByKeysGroups [GOOD] >> Charge::ByKeysGroupsLimits [GOOD] >> Charge::ByKeysLimits [GOOD] >> Charge::ByKeysReverse [GOOD] >> Charge::ByKeysHistory [GOOD] >> Charge::ByKeysIndex [GOOD] >> Charge::ByRows [GOOD] >> Charge::ByRowsReverse [GOOD] >> Charge::ByRowsLimits >> Charge::ByRowsLimits [GOOD] >> Charge::ByRowsLimitsReverse [GOOD] >> DBase::Basics [GOOD] >> DBase::Defaults [GOOD] >> DBase::Affects [GOOD] >> DBase::Annex [GOOD] >> DBase::AnnexRollbackChanges [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::AlterAndUpsertChangesVisibility [GOOD] >> DBase::DropModifiedTable [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] >> TCompactionMulti::MainPageCollectionOverflow >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs >> TTxDataShardMiniKQL::CrossShard_1_Cycle [GOOD] >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> TExecutorDb::RandomOps >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] >> TIterator::SerialReverse [GOOD] >> TLegacy::IndexIter >> DataShardOutOfOrder::UncommittedReadSetAck >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> DBase::DropModifiedTable [GOOD] Test command err: 10 parts: 24000 rows, 1023 pages, 5 levels: (15913, 5312) (32008, 10677) (48025, 16016) (64045, 21356) (79984, 26669) 24000 rows, 1022 pages, 5 levels: (95941, 31988) (111937, 37320) (127807, 42610) (143950, 47991) (159964, 53329) 24000 rows, 1017 pages, 5 levels: (175939, 58654) (191968, 63997) (208006, 69343) (224077, 74700) (240169, 80064) 24000 rows, 1018 pages, 5 levels: (256087, 85370) (272023, 90682) (288022, 96015) (304021, 101348) (320014, 106679) 24000 rows, 1017 pages, 5 levels: (336010, 112011) (352159, 117394) (368092, 122705) (384097, 128040) (400177, 133400) 24000 rows, 1020 pages, 5 levels: (416161, 138728) (432139, 144054) (447946, 149323) (463885, 154636) (479950, 159991) 24000 rows, 1018 pages, 5 levels: (495844, 165289) (511843, 170622) (527917, 175980) (543799, 181274) (559849, 186624) 24000 rows, 1020 pages, 5 levels: (575899, 191974) (591895, 197306) (607765, 202596) (623614, 207879) (639565, 213196) 24000 rows, 1016 pages, 5 levels: (655633, 218552) (671707, 223910) (687631, 229218) (703516, 234513) (719437, 239820) 24000 rows, 1015 pages, 5 levels: (735415, 245146) (751432, 250485) (767404, 255809) (783427, 261150) (799303, 266442) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 10% (actual 10%) key = (80038, 26687) value = 24000 (actual 24433 - 0% error) 10% (actual 9%) key = (160003, 53342) value = 48000 (actual 48386 - 0% error) 10% (actual 9%) key = (240184, 80069) value = 72000 (actual 72338 - 0% error) 10% (actual 9%) key = (320035, 106686) value = 96000 (actual 96293 - 0% error) 10% (actual 9%) key = (400189, 133404) value = 120000 (actual 120247 - 0% error) 10% (actual 9%) key = (479956, 159993) value = 144000 (actual 144195 - 0% error) 10% (actual 9%) key = (559891, 186638) value = 168000 (actual 168146 - 0% error) 10% (actual 9%) key = (639643, 213222) value = 192000 (actual 192095 - 0% error) 10% (actual 9%) key = (719461, 239828) value = 216000 (actual 216050 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80038, 26687) value = 2046023 (actual 2082181 - 0% error) 10% (actual 10%) key = (160003, 53342) value = 4091184 (actual 4123481 - 0% error) 9% (actual 9%) key = (240184, 80069) value = 6122543 (actual 6150789 - 0% error) 9% (actual 9%) key = (320035, 106686) value = 8157907 (actual 8182094 - 0% error) 9% (actual 9%) key = (400189, 133404) value = 10192958 (actual 10213133 - 0% error) 10% (actual 9%) key = (479956, 159993) value = 12231984 (actual 12248119 - 0% error) 9% (actual 9%) key = (559891, 186638) value = 14269160 (actual 14281266 - 0% error) 10% (actual 9%) key = (639643, 213222) value = 16310625 (actual 16318673 - 0% error) 9% (actual 9%) key = (719461, 239828) value = 18343487 (actual 18347516 - 0% error) 9% (actual 9%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (80041, 26688) value = 24000 (actual 24433 - 0% error) 10% (actual 9%) key = (160006, 53343) value = 48000 (actual 48386 - 0% error) 10% (actual 9%) key = (240187, 80070) value = 72000 (actual 72338 - 0% error) 10% (actual 9%) key = (320038, 106687) value = 96000 (actual 96293 - 0% error) 10% (actual 9%) key = (400192, 133405) value = 120000 (actual 120247 - 0% error) 10% (actual 9%) key = (479959, 159994) value = 144000 (actual 144195 - 0% error) 10% (actual 9%) key = (559894, 186639) value = 168000 (actual 168146 - 0% error) 10% (actual 9%) key = (639649, 213224) value = 192000 (actual 192095 - 0% error) 10% (actual 9%) key = (719467, 239830) value = 216000 (actual 216050 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2078803 - 0% error) 10% (actual 9%) key = (159427, 53150) value = 4076220 (actual 4112556 - 0% error) 10% (actual 9%) key = (239872, 79965) value = 6113960 (actual 6146198 - 0% error) 10% (actual 9%) key = (319849, 106624) value = 8153334 (actual 8181530 - 0% error) 10% (actual 9%) key = (400177, 133400) value = 10192636 (actual 10213133 - 0% error) 10% (actual 9%) key = (479950, 159991) value = 12231529 (actual 12248119 - 0% error) 10% (actual 9%) key = (559894, 186639) value = 14269160 (actual 14281266 - 0% error) 10% (actual 9%) key = (639481, 213168) value = 16306978 (actual 16318673 - 0% error) 10% (actual 9%) key = (719554, 239859) value = 18345472 (actual 18349551 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 40 pages RowCountHistogram: 10% (actual 4%) key = (34876, 11633) value = 24122 (actual 10941 - 5% error) 10% (actual 10%) key = (120064, 40029) value = 48205 (actual 36464 - 4% error) 10% (actual 10%) key = (205300, 68441) value = 72229 (actual 61965 - 4% error) 10% (actual 10%) key = (290449, 96824) value = 96245 (actual 87436 - 3% error) 10% (actual 11%) key = (380554, 126859) value = 121759 (actual 114432 - 3% error) 10% (actual 11%) key = (470221, 156748) value = 147190 (actual 141318 - 2% error) 10% (actual 11%) key = (564922, 188315) value = 172572 (actual 169665 - 1% error) 10% (actual 11%) key = (654781, 218268) value = 198052 (actual 196636 - 0% error) 10% (actual 11%) key = (744745, 248256) value = 223572 (actual 223623 - 0% error) 6% (actual 6%) DataSizeHistogram: 10% (actual 4%) key = (34876, 11633) value = 2051181 (actual 936371 - 5% error) 10% (actual 10%) key = (120064, 40029) value = 4097308 (actual 3106844 - 4% error) 10% (actual 10%) key = (205300, 68441) value = 6142376 (actual 5275531 - 4% error) 10% (actual 11%) key = (295510, 98511) value = 8302483 (actual 7560005 - 3% error) 10% (actual 11%) key = (385543, 128522) value = 10466012 (actual 9847986 - 3% error) 11% (actual 12%) key = (485089, 161704) value = 12745808 (actual 12376381 - 1% error) 10% (actual 11%) key = (574921, 191648) value = 14910864 (actual 14665877 - 1% error) 10% (actual 10%) key = (659821, 219948) value = 16952139 (actual 16831893 - 0% error) 10% (actual 11%) key = (749764, 249929) value = 19112817 (actual 19116870 - 0% error) 6% (actual 6%) 10 parts: 120000 rows, 5095 pages, 6 levels: (79984, 26669) (159964, 53329) (240169, 80064) (319996, 106673) (400177, 133400) 60000 rows, 2548 pages, 6 levels: (440041, 146688) (479950, 159991) (519868, 173297) (559849, 186624) (599833, 199952) 30000 rows, 1273 pages, 5 levels: (619663, 206562) (639565, 213196) (659563, 219862) (679648, 226557) (699550, 233191) 15000 rows, 634 pages, 5 levels: (709513, 236512) (719443, 239822) (729415, 243146) (739393, 246472) (749443, 249822) 7500 rows, 317 pages, 4 levels: (754330, 251451) (759397, 253140) (764335, 254786) (769423, 256482) (774439, 258154) 3750 rows, 159 pages, 4 levels: (776926, 258983) (779407, 259810) (781876, 260633) (784414, 261479) (786847, 262290) 1875 rows, 80 pages, 3 levels: (788179, 262734) (789382, 263135) (790639, 263554) (791884, 263969) (793144, 264389) 937 rows, 40 pages, 3 levels: (793732, 264585) (794365, 264796) (794989, 265004) (795610, 265211) (796246, 265423) 468 rows, 21 pages, 2 levels: (796516, 265513) (796828, 265617) (797182, 265735) (797425, 265816) (797809, 265944) 470 rows, 21 pages, 2 levels: (798118, 266047) (798412, 266145) (798733, 266252) (799042, 266355) (799339, 266454) Checking BTree: Touched 0% bytes, 5 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24511 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48568 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72759 - 0% error) 5% (actual 5%) key = (281245, 93756) value = 84339 (actual 84818 - 0% error) 10% (actual 10%) key = (361831, 120618) value = 108515 (actual 108996 - 0% error) 14% (actual 14%) key = (480337, 160120) value = 144115 (actual 144540 - 0% error) 5% (actual 5%) key = (520480, 173501) value = 156166 (actual 156594 - 0% error) 12% (actual 12%) key = (619723, 206582) value = 186013 (actual 186392 - 0% error) 9% (actual 9%) key = (699586, 233203) value = 210000 (actual 210331 - 0% error) 12% (actual 12%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2088769 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4138455 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6186028 - 0% error) 5% (actual 5%) key = (281245, 93756) value = 7170146 (actual 7210231 - 0% error) 10% (actual 10%) key = (361831, 120618) value = 9219080 (actual 9259232 - 0% error) 14% (actual 14%) key = (480337, 160120) value = 12241592 (actual 12277578 - 0% error) 5% (actual 5%) key = (520480, 173501) value = 13267167 (actual 13303212 - 0% error) 9% (actual 9%) key = (599860, 199961) value = 15290784 (actual 15322806 - 0% error) 12% (actual 12%) key = (699586, 233203) value = 17836473 (actual 17864572 - 0% error) 12% (actual 12%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (80065, 26696) value = 24008 (actual 24488 - 0% error) 10% (actual 10%) key = (160045, 53356) value = 48012 (actual 48493 - 0% error) 10% (actual 10%) key = (240238, 80087) value = 72016 (actual 72493 - 0% error) 10% (actual 10%) key = (320152, 106725) value = 96035 (actual 96517 - 0% error) 10% (actual 9%) key = (400369, 133464) value = 120052 (actual 120477 - 0% error) 10% (actual 10%) key = (480187, 160070) value = 144069 (actual 144495 - 0% error) 10% (actual 10%) key = (560152, 186725) value = 168082 (actual 168505 - 0% error) 10% (actual 9%) key = (639976, 213333) value = 192097 (actual 192468 - 0% error) 10% (actual 9%) key = (719863, 239962) value = 216115 (actual 216440 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2078707 - 0% error) 10% (actual 10%) key = (159505, 53176) value = 4078259 (actual 4118206 - 0% error) 10% (actual 10%) key = (239938, 79987) value = 6115953 (actual 6156029 - 0% error) 10% (actual 10%) key = (319918, 106647) value = 8154975 (actual 8195015 - 0% error) 10% (actual 9%) key = (400177, 133400) value = 10192596 (actual 10228980 - 0% error) 10% (actual 10%) key = (479950, 159991) value = 12231489 (actual 12267556 - 0% error) 10% (actual 10%) key = (559930, 186651) value = 14270004 (actual 14305948 - 0% error) 10% (actual 9%) key = (639565, 213196) value = 16308955 (actual 16341020 - 0% error) 10% (actual 9%) key = (719611, 239878) value = 18346862 (actual 18374842 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 45 pages RowCountHistogram: 10% (actual 4%) key = (34876, 11633) value = 24002 (actual 10940 - 5% error) 10% (actual 10%) key = (115162, 38395) value = 48090 (actual 35028 - 5% error) 10% (actual 10%) key = (195502, 65175) value = 72183 (actual 59117 - 5% error) 10% (actual 10%) key = (276202, 92075) value = 96373 (actual 83308 - 5% error) 10% (actual 10%) key = (356899, 118974) value = 120561 (actual 107496 - 5% error) 10% (actual 11%) key = (445249, 148424) value = 145590 (actual 133979 - 4% error) 10% (actual 10%) key = (525553, 175192) value = 169735 (actual 158120 - 4% error) 10% (actual 11%) key = (614770, 204931) value = 195036 (actual 184889 - 4% error) 10% (actual 11%) key = (704617, 234880) value = 220528 (actual 211850 - 3% error) 8 ... (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (553, 192) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27765 (actual 27678 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 5% (actual 5%) key = (91, 38) value = 25 (actual 25 - 0% error) 5% (actual 5%) key = (166, 63) value = 50 (actual 50 - 0% error) 4% (actual 4%) key = (253, 92) value = 74 (actual 74 - 0% error) 4% (actual 4%) key = (325, 116) value = 96 (actual 96 - 0% error) 4% (actual 4%) key = (394, 139) value = 119 (actual 119 - 0% error) 5% (actual 5%) key = (481, 168) value = 144 (actual 144 - 0% error) 4% (actual 4%) key = (556, 193) value = 167 (actual 167 - 0% error) 4% (actual 4%) key = (631, 218) value = 191 (actual 191 - 0% error) 4% (actual 4%) key = (709, 244) value = 215 (actual 215 - 0% error) 3% (actual 3%) key = (766, 263) value = 234 (actual 234 - 0% error) 5% (actual 5%) key = (853, 292) value = 261 (actual 261 - 0% error) 4% (actual 4%) key = (934, 319) value = 285 (actual 285 - 0% error) 4% (actual 4%) key = (1006, 343) value = 309 (actual 309 - 0% error) 4% (actual 4%) key = (1087, 370) value = 332 (actual 332 - 0% error) 0% (actual 0%) key = (1090, 371) value = 333 (actual 333 - 0% error) 4% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 4% (actual 4%) key = (91, 38) value = 1974 (actual 1974 - 0% error) 4% (actual 4%) key = (166, 63) value = 3992 (actual 3992 - 0% error) 4% (actual 4%) key = (253, 92) value = 5889 (actual 5889 - 0% error) 4% (actual 4%) key = (325, 116) value = 7868 (actual 7868 - 0% error) 4% (actual 4%) key = (394, 139) value = 9910 (actual 9910 - 0% error) 4% (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (556, 193) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27678 (actual 27678 - 0% error) 0% (actual 0%) key = (1090, 371) value = 27765 (actual 27765 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Mixed: Touched 100% bytes, 3 pages RowCountHistogram: 14% (actual 5%) key = (91, 38) value = 70 (actual 25 - 9% error) 5% (actual 5%) key = (166, 63) value = 95 (actual 50 - 9% error) 4% (actual 4%) key = (253, 92) value = 119 (actual 74 - 9% error) 4% (actual 4%) key = (325, 116) value = 141 (actual 96 - 9% error) 4% (actual 4%) key = (394, 139) value = 164 (actual 119 - 9% error) 5% (actual 5%) key = (481, 168) value = 189 (actual 144 - 9% error) 4% (actual 9%) key = (631, 218) value = 212 (actual 191 - 4% error) 4% (actual 4%) key = (709, 244) value = 236 (actual 215 - 4% error) 3% (actual 3%) key = (766, 263) value = 255 (actual 234 - 4% error) 5% (actual 5%) key = (853, 292) value = 282 (actual 261 - 4% error) 4% (actual 4%) key = (934, 319) value = 306 (actual 285 - 4% error) 4% (actual 4%) key = (1006, 343) value = 330 (actual 309 - 4% error) 4% (actual 4%) key = (1087, 370) value = 353 (actual 332 - 4% error) 0% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 14% (actual 4%) key = (91, 38) value = 5939 (actual 1974 - 9% error) 4% (actual 4%) key = (166, 63) value = 7957 (actual 3992 - 9% error) 4% (actual 4%) key = (253, 92) value = 9854 (actual 5889 - 9% error) 4% (actual 4%) key = (325, 116) value = 11833 (actual 7868 - 9% error) 4% (actual 4%) key = (394, 139) value = 13875 (actual 9910 - 9% error) 4% (actual 4%) key = (481, 168) value = 15903 (actual 11938 - 9% error) 4% (actual 8%) key = (631, 218) value = 17650 (actual 15674 - 4% error) 4% (actual 4%) key = (709, 244) value = 19685 (actual 17709 - 4% error) 4% (actual 4%) key = (766, 263) value = 21640 (actual 19664 - 4% error) 4% (actual 4%) key = (853, 292) value = 23649 (actual 21673 - 4% error) 4% (actual 4%) key = (934, 319) value = 25688 (actual 23712 - 4% error) 4% (actual 4%) key = (1006, 343) value = 27663 (actual 25687 - 4% error) 4% (actual 4%) key = (1087, 370) value = 29654 (actual 27678 - 4% error) 0% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) 3 parts: 167 rows, 1 pages, 0 levels: () () () () () 166 rows, 1 pages, 0 levels: () () () () () 167 rows, 1 pages, 0 levels: () () () () () Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 33% (actual 33%) key = (553, 192) value = 167 (actual 166 - 0% error) 33% (actual 33%) key = (1087, 370) value = 333 (actual 332 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (553, 192) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1087, 370) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 33% (actual 33%) key = (556, 193) value = 167 (actual 167 - 0% error) 33% (actual 33%) key = (1090, 371) value = 333 (actual 333 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (556, 193) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1090, 371) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) Got : 24000 2106439 49449 38 44 Expected: 24000 2106439 49449 38 44 { [2455,2599), [2798,3624), [4540,4713), [5654,7161), [8509,8794), [8936,9973), [11888,14280), [14337,14882), [15507,16365), [17368,19451), [19536,20135), [20790,21503), [21589,23243) } Got : 12816 1121048 49449 20 23 Expected: 12816 1121048 49449 20 23 Got : 24000 3547100 81694 64 44 Expected: 24000 3547100 81694 64 44 { [1012,1475), [1682,1985), [2727,3553), [3599,3992), [5397,7244), [9181,9807), [9993,10178), [12209,14029), [15089,15342), [16198,16984), [17238,18436), [21087,21876), [23701,23794) } Got : 9582 1425198 81694 26 17 Expected: 9582 1425198 81694 26 17 Got : 24000 2460139 23760 42 41 Expected: 24000 2460139 23760 42 41 { [1296,2520), [3888,4320), [5040,6840), [6912,7272), [10872,11160), [11520,12096), [12096,13824), [15192,15624), [17064,17856), [18216,19296), [19800,20160), [20736,21096), [21096,22104) } Got : 10440 1060798 23760 18 18 Expected: 10440 1060798 23760 18 18 Got : 24000 4054050 46562 68 43 Expected: 24000 4054050 46562 68 43 { [460,1518), [2300,2484), [2760,4002), [4600,5842), [6302,9752), [11178,12328), [14582,14858), [16790,18032), [18216,18446), [18722,19504), [19504,19964), [20378,20470), [21344,23506) } Got : 13570 2277890 46562 38 24 Expected: 13570 2277890 46562 38 24 Got : 24000 2106459 49449 38 44 Expected: 24000 2106459 49449 38 44 Got : 24000 2460219 23555 41 41 Expected: 24000 2460219 23555 41 41 Got : 24000 4054270 46543 66 43 Expected: 24000 4054270 46543 66 43 Got : 24000 2106479 49555 38 44 Expected: 24000 2106479 49555 38 44 Got : 24000 2460259 23628 41 41 Expected: 24000 2460259 23628 41 41 Got : 24000 4054290 46640 65 43 Expected: 24000 4054290 46640 65 43 Got : 24000 2106439 66674 3 4 Expected: 24000 2106439 66674 3 4 { [2455,2599), [2798,3624), [4540,4713), [5654,7161), [8509,8794), [8936,9973), [11888,14280), [14337,14882), [15507,16365), [17368,19451), [19536,20135), [20790,21503), [21589,23243) } Got : 12816 1121048 66674 2 2 Expected: 12816 1121048 66674 2 2 Got : 24000 2460139 33541 4 4 Expected: 24000 2460139 33541 4 4 { [1296,2520), [3888,4320), [5040,6840), [6912,7272), [10872,11160), [11520,12096), [12096,13824), [15192,15624), [17064,17856), [18216,19296), [19800,20160), [20736,21096), [21096,22104) } Got : 10440 1060798 33541 1 1 Expected: 10440 1060798 33541 1 1 Got : 24000 4054050 64742 7 4 Expected: 24000 4054050 64742 7 4 { [460,1518), [2300,2484), [2760,4002), [4600,5842), [6302,9752), [11178,12328), [14582,14858), [16790,18032), [18216,18446), [18722,19504), [19504,19964), [20378,20470), [21344,23506) } Got : 13570 2234982 64742 4 2 Expected: 13570 2234982 64742 4 2 >> TLegacy::IndexIter [GOOD] >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter >> TConsoleTests::TestMergeConfig [GOOD] >> TConsoleTests::TestRemoveTenant >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop [GOOD] >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::State [GOOD] >> TPart::Trivials [GOOD] >> TPart::Basics [GOOD] >> TPart::BasicColumnGroups [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup+EvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] Test command err: 2025-04-07T14:06:32.915979Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:32.976368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:32.976417Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:32.978528Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:32.978938Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:32.979202Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:32.987336Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:33.022591Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:33.023019Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:33.024396Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:33.024473Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:33.024523Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:33.024883Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:33.052799Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:33.053001Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:33.053146Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:33.053182Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:33.053213Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:33.053244Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:33.053447Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:33.053497Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:33.053914Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:33.054006Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:33.054056Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:33.054094Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:33.054136Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:33.054168Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:33.054198Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:33.054224Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:33.054254Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:33.092832Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:33.092907Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:33.092965Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:33.098821Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:33.098887Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:33.098958Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:33.099126Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:33.099177Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:33.099225Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:33.099262Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:33.099293Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:33.099320Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:33.099347Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:33.099679Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:33.099721Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:33.099753Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:33.099788Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:33.099838Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:33.099860Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:33.099903Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:33.099929Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:33.099961Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:33.122380Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:33.122444Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:33.122491Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:33.122537Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:33.122600Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:33.123083Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:33.123132Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:33.123168Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:33.123290Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:33.123319Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:33.123444Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:33.123502Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:33.123539Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:33.123573Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:33.126743Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:33.126790Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:33.126917Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:33.126941Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:33.126967Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:33.126993Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:33.127014Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:33.127039Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:33.127062Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:33.127096Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:33.127122Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:33.127143Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:33.127167Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:33.127288Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:33.127314Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:33.127338Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:33.127352Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:33.127363Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:33.127397Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:33.127413Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:33.127434Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:33.127455Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:33.127488Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:33.127509Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:33.127530Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:33.127578Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:33.127592Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:33.127607Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... 2025-04-07T14:06:33.973175Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:33.973229Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:4] at 9437184 on unit CompleteOperation 2025-04-07T14:06:33.973287Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 4] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2025-04-07T14:06:33.973359Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-07T14:06:33.973409Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:33.973441Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:06:33.973497Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:06:33.973520Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:33.973541Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:06:33.973556Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:06:33.973685Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:33.973707Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:6] at 9437184 on unit CompleteOperation 2025-04-07T14:06:33.973743Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 6] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2025-04-07T14:06:33.973781Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-07T14:06:33.973820Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:33.973843Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:06:33.973867Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:06:33.973966Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:33.973991Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:7] at 9437184 on unit CompleteOperation 2025-04-07T14:06:33.974031Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 7] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2025-04-07T14:06:33.974075Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-07T14:06:33.974100Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:33.974187Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:33.974210Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:8] at 9437184 on unit CompleteOperation 2025-04-07T14:06:33.974236Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 8] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2025-04-07T14:06:33.974280Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-07T14:06:33.974301Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:33.974402Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:33.974422Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:9] at 9437184 on unit CompleteOperation 2025-04-07T14:06:33.974446Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 9] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2025-04-07T14:06:33.974476Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-07T14:06:33.974495Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:33.974531Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:33.974566Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:14] at 9437184 on unit FinishPropose 2025-04-07T14:06:33.974606Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 14 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-07T14:06:33.974689Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:33.974897Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:33.974921Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:10] at 9437184 on unit CompleteOperation 2025-04-07T14:06:33.974952Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 10] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2025-04-07T14:06:33.974988Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-07T14:06:33.975009Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:33.975136Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:33.975168Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:12] at 9437184 on unit CompleteOperation 2025-04-07T14:06:33.975216Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 12] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2025-04-07T14:06:33.975253Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-07T14:06:33.975279Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:33.975376Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:33.975395Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:13] at 9437184 on unit CompleteOperation 2025-04-07T14:06:33.975420Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 13] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:06:33.975445Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:33.975481Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:06:33.975563Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:33.975586Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2025-04-07T14:06:33.975612Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2025-04-07T14:06:33.975643Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-07T14:06:33.975663Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:33.975866Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-07T14:06:33.975914Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:33.975971Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 4 2025-04-07T14:06:33.976043Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-07T14:06:33.976067Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:33.976084Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-07T14:06:33.976189Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-07T14:06:33.976211Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:33.976229Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-07T14:06:33.976273Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-07T14:06:33.976293Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:33.976311Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-07T14:06:33.976365Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-07T14:06:33.976395Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:33.976413Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-07T14:06:33.976484Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-07T14:06:33.976506Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:33.976526Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-07T14:06:33.976566Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-07T14:06:33.976585Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:33.976603Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-07T14:06:33.976924Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-07T14:06:33.976963Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:33.976987Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - interm - 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly >> TPart::MassCheck [GOOD] >> TPart::WreckPart >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop [GOOD] Test command err: 2025-04-07T14:06:26.131303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:26.131574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:26.131801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001994/r3tmp/tmpDwCBDg/pdisk_1.dat 2025-04-07T14:06:26.501492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:26.554454Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:26.607048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:26.607203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:26.618677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:26.739358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:26.772395Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:26.773529Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:26.774094Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:06:26.774330Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:26.804609Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:26.805333Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:26.805467Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:26.807056Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:06:26.807123Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:06:26.807171Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:06:26.807471Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:26.840040Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:06:26.840239Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:26.840347Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:06:26.840393Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:06:26.840430Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:06:26.840462Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:26.840788Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:26.840842Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:26.840996Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:06:26.841076Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:06:26.841416Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:26.841453Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:26.841491Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:06:26.841555Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:26.841587Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:26.841628Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:06:26.841661Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:26.841697Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:26.841723Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:06:26.841766Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:26.841859Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:06:26.841887Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:26.841967Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:26.842147Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:06:26.842193Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:06:26.842263Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:06:26.842312Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:26.842356Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:06:26.842388Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:06:26.842417Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:26.842625Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:26.842660Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:06:26.842698Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:26.842727Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:26.842765Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:06:26.842809Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:26.842834Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:06:26.842858Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:26.842877Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:26.844007Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:06:26.844042Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:06:26.854674Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:26.854732Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:26.854768Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:26.854818Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:06:26.854873Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:27.036742Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:27.036800Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:27.036826Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:06:27.037153Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:06:27.037179Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:27.037275Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:27.037313Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:06:27.037352Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:06:27.037384Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:06:27.041265Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:06:27.041351Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:27.043193Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:27.043237Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:27.043283Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:27.043349Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:27.043386Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:06:27.043433Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... 2025-04-07T14:06:34.087584Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 1970-01-01T00:00:04.000000Z, after 1.550000s 2025-04-07T14:06:34.087792Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:846:2658] TxId: 281474976715662. Ctx: { TraceId: 01jr89613n6m5xxs7ng3ja44t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:851:2684], task: 1, state: COMPUTE_STATE_FAILURE, stats: { CpuTimeUs: 345957 Tasks { TaskId: 1 CpuTimeUs: 343493 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 11 BuildCpuTimeUs: 343482 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034793209 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:34.087858Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715662. Ctx: { TraceId: 01jr89613n6m5xxs7ng3ja44t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:851:2684] 2025-04-07T14:06:34.087991Z node 2 :KQP_EXECUTER INFO: ActorId: [2:846:2658] TxId: 281474976715662. Ctx: { TraceId: 01jr89613n6m5xxs7ng3ja44t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-04-07T14:06:34.088067Z node 2 :KQP_EXECUTER INFO: ActorId: [2:846:2658] TxId: 281474976715662. Ctx: { TraceId: 01jr89613n6m5xxs7ng3ja44t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:852:2685], task: 2 2025-04-07T14:06:34.093579Z node 2 :KQP_EXECUTER INFO: ActorId: [2:846:2658] TxId: 281474976715662. Ctx: { TraceId: 01jr89613n6m5xxs7ng3ja44t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 354480 DurationUs: 1744034792141374 Tables { TablePath: "/Root/table-1" } ExecuterCpuTimeUs: 8523 StartTimeMs: 1950 FinishTimeMs: 1744034794091 Stages { StageGuid: "29c32a52-7edc18a3-e8efad0e-bec11aba" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (KqpTable \'\"/Root/table-1\" \'\"72057594046644480:2\" \'\"\" \'1))\n (let $2 (KqpWideReadTableRanges $1 (Void) \'(\'\"value2\") \'() \'()))\n (let $3 (DataType \'Uint64))\n (let $4 (lambda \'($9) (Just (SafeCast $9 $3))))\n (let $5 (Nothing (OptionalType $3)))\n (let $6 (Condense1 (NarrowMap $2 (lambda \'($7) (AsStruct \'(\'\"value2\" $7)))) (lambda \'($8) (IfPresent (Member $8 \'\"value2\") $4 $5)) (lambda \'($10 $11) (Bool \'false)) (lambda \'($12 $13) (block \'(\n (let $14 (IfPresent (Member $12 \'\"value2\") $4 $5))\n (return (AggrAdd $14 $13))\n )))))\n (return (FromFlow (ExpandMap $6 (lambda \'($15) $15))))\n))))\n)\n" ComputeActors { CpuTimeUs: 345957 Tasks { TaskId: 1 CpuTimeUs: 343493 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 11 BuildCpuTimeUs: 343482 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034793209 } MaxMemoryUsage: 1048576 } } Stages { StageId: 1 StageGuid: "40d31f3f-f099537b-977d64c3-a35290be" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (Bool \'false))\n (let $3 (WideCondense1 (ToFlow $1) (lambda \'($5) $5) (lambda \'($6 $7) $2) (lambda \'($8 $9) (AggrAdd $8 $9))))\n (let $4 (Condense (NarrowMap (Take $3 (Uint64 \'1)) (lambda \'($10) (AsStruct \'(\'Sum0 $10)))) (Nothing (OptionalType (StructType \'(\'Sum0 (OptionalType (DataType \'Uint64)))))) (lambda \'($11 $12) $2) (lambda \'($13 $14) (Just $13))))\n (return (FromFlow (Map $4 (lambda \'($15) (AsList (AsStruct \'(\'\"column0\" (Member $15 \'Sum0))))))))\n))))\n)\n" } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":5,\"Plans\":[{\"Node Type\":\"Precompute_0\",\"Parent Relationship\":\"InitPlan\",\"PlanNodeId\":4,\"PlanNodeType\":\"Materialize\",\"Plans\":[{\"Node Type\":\"Aggregate-Limit\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Aggregate\"},{\"Inputs\":[{\"ExternalPlanNodeId\":2}],\"Limit\":\"1\",\"Name\":\"Limit\"}],\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":2,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Aggregate-TableFullScan\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Aggregate\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"ReadColumns\":[\"value2\"],\"ReadRanges\":[\"key (-∞, +∞)\"],\"Scan\":\"Parallel\",\"Table\":\"table-1\"}],\"PlanNodeId\":1,\"StageGuid\":\"29c32a52-7edc18a3-e8efad0e-bec11aba\",\"Stats\":{\"ComputeNodes\":[{\"CpuTimeUs\":345957,\"Tasks\":[{\"ComputeTimeUs\":11,\"Host\":\"ghrun-lqtfcktism\",\"NodeId\":2,\"StartTimeMs\":1744034793209,\"TaskId\":1}]}],\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"table-1\"]}],\"StageGuid\":\"\"}],\"StageGuid\":\"40d31f3f-f099537b-977d64c3-a35290be\",\"Stats\":{\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\",\"Subplan Name\":\"CTE precompute_0_0\"}],\"StageGuid\":\"\"}" Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\001\022\016\010\345\216\025\020\345\216\025\030\345\216\025 \001" } } 2025-04-07T14:06:34.093676Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:846:2658] TxId: 281474976715662. Ctx: { TraceId: 01jr89613n6m5xxs7ng3ja44t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:06:34.093778Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:852:2685], TxId: 281474976715662, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=. TraceId : 01jr89613n6m5xxs7ng3ja44t3. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646735 2025-04-07T14:06:34.093868Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:852:2685], TxId: 281474976715662, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=. TraceId : 01jr89613n6m5xxs7ng3ja44t3. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Handle abort execution event from: [2:846:2658], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-04-07T14:06:34.093989Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. pass away 2025-04-07T14:06:34.094094Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-04-07T14:06:34.096704Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 2. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2025-04-07T14:06:34.096974Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=, ActorId: [2:820:2658], ActorState: ExecuteState, TraceId: 01jr89613n6m5xxs7ng3ja44t3, Create QueryResponse for error on request, msg: 2025-04-07T14:06:34.097390Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Handle TEvExecuteKqpTransaction 2025-04-07T14:06:34.097443Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] TxId# 281474976715664 ProcessProposeKqpTransaction 2025-04-07T14:06:34.097747Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-04-07T14:06:34.097817Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Handle TEvProposeTransaction 2025-04-07T14:06:34.097870Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] TxId# 0 ProcessProposeTransaction 2025-04-07T14:06:34.097955Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Cookie# 0 userReqId# "" txid# 0 reqId# [2:888:2717] SnapshotReq marker# P0 2025-04-07T14:06:34.098324Z node 2 :TX_PROXY DEBUG: Actor# [2:890:2717] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-04-07T14:06:34.098441Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Resolved key sets: 0 2025-04-07T14:06:34.098591Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr89613n6m5xxs7ng3ja44t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:34.098646Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Ctx: { TraceId: 01jr89613n6m5xxs7ng3ja44t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2025-04-07T14:06:34.098700Z node 2 :KQP_EXECUTER INFO: ActorId: [2:887:2658] TxId: 281474976715664. Ctx: { TraceId: 01jr89613n6m5xxs7ng3ja44t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-07T14:06:34.098798Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:887:2658] TxId: 281474976715664. Ctx: { TraceId: 01jr89613n6m5xxs7ng3ja44t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:06:34.098855Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:887:2658] TxId: 281474976715664. Ctx: { TraceId: 01jr89613n6m5xxs7ng3ja44t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-07T14:06:34.098902Z node 2 :TX_PROXY DEBUG: Actor# [2:890:2717] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-04-07T14:06:34.100055Z node 2 :TX_PROXY DEBUG: Actor# [2:888:2717] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-04-07T14:06:34.100230Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:559:2486], selfId: [2:50:2097], source: [2:820:2658] 2025-04-07T14:06:34.100531Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:888:2717], Recipient [2:631:2537]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-07T14:06:34.101407Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=OGQ3MjlkNDEtZTFkYTdkZWMtY2ZjZTRlZWItMWQwMDgzYzA=, workerId: [2:820:2658], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 544 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup+EvWrite [GOOD] Test command err: 2025-04-07T14:06:26.537781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:26.537987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:26.538158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019b5/r3tmp/tmp74x34X/pdisk_1.dat 2025-04-07T14:06:26.828413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:26.864288Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:26.911625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:26.911727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:26.922918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:27.033459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:27.068625Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:27.069505Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:27.069876Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:06:27.070090Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:27.109431Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:27.110197Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:27.110285Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:27.111609Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:06:27.111669Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:06:27.111718Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:06:27.111980Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:27.145279Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:06:27.145441Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:27.145557Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:06:27.145595Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:06:27.145640Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:06:27.145674Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:27.145958Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:27.145996Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:27.146136Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:06:27.146206Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:06:27.146522Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:27.146554Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:27.146598Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:06:27.146658Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:27.146689Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:27.146721Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:06:27.146769Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:27.146815Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:27.146846Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:06:27.146890Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:27.146989Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:06:27.147027Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:27.147102Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:27.147303Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:06:27.147354Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:06:27.147431Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:06:27.147467Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:27.147506Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:06:27.147538Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:06:27.147567Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:27.147803Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:27.147836Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:06:27.147879Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:27.147907Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:27.147949Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:06:27.147988Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:27.148018Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:06:27.148047Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:27.148067Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:27.149278Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:06:27.149318Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:06:27.159825Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:27.159885Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:27.159915Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:27.159950Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:06:27.160000Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:27.338142Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:27.338187Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:27.338214Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:06:27.338522Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:06:27.338552Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:27.338646Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:27.338686Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:06:27.338724Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:06:27.338757Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:06:27.342530Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:06:27.342587Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:27.343224Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:27.343255Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:27.343289Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:27.343338Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:27.343369Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:06:27.343411Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... onId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1008:2807], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 918 DurationUs: 7000 Tasks { TaskId: 1 CpuTimeUs: 202 FinishTimeMs: 1744034794366 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 100 BuildCpuTimeUs: 102 WaitInputTimeUs: 4328 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034794359 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:34.367529Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1008:2807] 2025-04-07T14:06:34.367619Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1014:2813], CA [2:1011:2810], CA [2:1012:2811], CA [2:1009:2808], 2025-04-07T14:06:34.367663Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1014:2813], CA [2:1011:2810], CA [2:1012:2811], CA [2:1009:2808], 2025-04-07T14:06:34.368523Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1009:2808], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 376 DurationUs: 8000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 83 FinishTimeMs: 1744034794367 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 50 BuildCpuTimeUs: 33 WaitInputTimeUs: 4723 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034794359 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:34.368571Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1009:2808] 2025-04-07T14:06:34.368641Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1014:2813], CA [2:1011:2810], CA [2:1012:2811], 2025-04-07T14:06:34.368708Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1014:2813], CA [2:1011:2810], CA [2:1012:2811], 2025-04-07T14:06:34.369308Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1011:2810], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 394 DurationUs: 9000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 200 FinishTimeMs: 1744034794368 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 175 BuildCpuTimeUs: 25 WaitInputTimeUs: 4754 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034794359 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:34.369362Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1011:2810] 2025-04-07T14:06:34.369403Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1014:2813], CA [2:1012:2811], 2025-04-07T14:06:34.369464Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1014:2813], CA [2:1012:2811], 2025-04-07T14:06:34.370075Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1012:2811], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 678 DurationUs: 10000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 396 FinishTimeMs: 1744034794369 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 350 BuildCpuTimeUs: 46 WaitInputTimeUs: 6914 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034794359 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:34.370121Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1012:2811] 2025-04-07T14:06:34.370159Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1014:2813], 2025-04-07T14:06:34.370190Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1014:2813], 2025-04-07T14:06:34.370424Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1013:2812], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 375 DurationUs: 10000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 172 FinishTimeMs: 1744034794369 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 130 BuildCpuTimeUs: 42 WaitInputTimeUs: 7964 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034794359 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:34.370472Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1013:2812] 2025-04-07T14:06:34.370507Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1014:2813], 2025-04-07T14:06:34.370537Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1014:2813], 2025-04-07T14:06:34.370783Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1014:2813], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 376 DurationUs: 10000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 166 FinishTimeMs: 1744034794370 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 118 BuildCpuTimeUs: 48 WaitInputTimeUs: 8641 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034794360 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:34.370815Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1014:2813] 2025-04-07T14:06:34.370973Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:06:34.371057Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr8962gk52mpw5r8634ek19c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ2ZmE1Yy1hYTBjYWM1ZC04OGQ0M2M4MC1jMzM1NzZkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.003715s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool >> BuildStatsHistogram::Single_Slices [GOOD] >> BuildStatsHistogram::Single_History >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup-EvWrite >> TPart::WreckPart [GOOD] >> TPart::PageFailEnv ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: 2025-04-07T14:06:26.444358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:26.444631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:26.444849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a07/r3tmp/tmpW7puQJ/pdisk_1.dat 2025-04-07T14:06:26.776557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:26.816804Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:26.865090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:26.865202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:26.876530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:26.989638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:27.339337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:06:27.625300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:885:2700], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:27.625400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:895:2705], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:27.625476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:27.630354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:06:27.825777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:899:2708], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:06:28.129325Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr895w973xj4d6gwjfksw82m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODgyOGM3YjgtNGFiMWU3YjAtYjYyY2VkOWQtNGQzOTIxODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:28.200985Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr895wspagmfbh26ncp8cmhs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2QwYjA3NWQtODllM2E2MDItMmY2M2RjYjUtZWM1MzI0Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:28.594928Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr895ww50ta0mxdy7fc62eey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc1MzU1M2ItNDg4NTJiNTgtNmI1N2NhYWItMmVmMTYwODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets 2025-04-07T14:06:28.732948Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr895x918mzecsr8tdcx2tkr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc1MzU1M2ItNDg4NTJiNTgtNmI1N2NhYWItMmVmMTYwODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... sending immediate upsert ... waiting for immediate propose 2025-04-07T14:06:28.807219Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr895xcd98npr65f1v28qj29, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTk1NzE5Yi1lYzhlNmQxMy03YzllYWIwNC1iYjgxMTY5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... immediate upsert is blocked 2025-04-07T14:06:28.809427Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1135:2806] TxId: 281474976715665. Ctx: { TraceId: 01jr895xcd98npr65f1v28qj29, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTk1NzE5Yi1lYzhlNmQxMy03YzllYWIwNC1iYjgxMTY5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: [WRONG_SHARD_STATE] Rejecting immediate tx 281474976715665 because datashard 72075186224037889 is restarting; 2025-04-07T14:06:28.824034Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTk1NzE5Yi1lYzhlNmQxMy03YzllYWIwNC1iYjgxMTY5OA==, ActorId: [1:1030:2806], ActorState: ExecuteState, TraceId: 01jr895xcd98npr65f1v28qj29, Create QueryResponse for error on request, msg: 2025-04-07T14:06:28.826055Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjc1MzU1M2ItNDg4NTJiNTgtNmI1N2NhYWItMmVmMTYwODk=, ActorId: [1:1032:2808], ActorState: ExecuteState, TraceId: 01jr895x918mzecsr8tdcx2tkr, Create QueryResponse for error on request, msg: 2025-04-07T14:06:28.826764Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr895xcd98npr65f1v28qj29, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTk1NzE5Yi1lYzhlNmQxMy03YzllYWIwNC1iYjgxMTY5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:28.838668Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr895x918mzecsr8tdcx2tkr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc1MzU1M2ItNDg4NTJiNTgtNmI1N2NhYWItMmVmMTYwODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:29.105960Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr895xm8casgptjjdt6h7zsp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg4N2JiODMtYjE3MWMzMTktMmVjOTlkZGUtZDkwNTAwZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-04-07T14:06:32.594000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:32.594375Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:32.594449Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a07/r3tmp/tmpjiUoyk/pdisk_1.dat 2025-04-07T14:06:32.876466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:32.901520Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:32.947417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:32.947518Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:32.959086Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:33.072643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:33.329254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:06:33.615231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:794:2649], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:33.615321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:804:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:33.615383Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:33.619286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:06:33.814155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:808:2657], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:06:33.919497Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89624d5kwdr8h26ay2n2pn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODgxMDc0ZTItNjAwOWRkZWMtYzZlMTNjN2MtMjBiZTJmM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:34.001463Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr8962ef9pxkbcve20jh2szx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzNkMGU5YjctZDFmM2IwYjYtNmQ0ZTBkZjQtZGZjZjZiNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for readsets 2025-04-07T14:06:34.603194Z node 2 :KQP_COMPUTE WARN: TxId: 281474976715664, task: 1, CA Id [2:965:2777]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-04-07T14:06:34.604006Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjNkNmU2NTEtNmRjYjY5MC0yYjM0NWEzNC1jN2ZjOTY0Yg==, ActorId: [2:922:2739], ActorState: ExecuteState, TraceId: 01jr8962h27z06ea31c5rertf8, Create QueryResponse for error on request, msg: { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup+EvWrite >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] Test command err: 2025-04-07T14:06:26.298039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:26.298281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:26.298495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019d6/r3tmp/tmpUnnAhO/pdisk_1.dat 2025-04-07T14:06:26.607157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:26.643703Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:26.692557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:26.692701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:26.703790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:26.813977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:27.154599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:06:27.458014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:792:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:27.458125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:803:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:27.458202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:27.462598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:06:27.666431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:806:2655], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:06:27.980913Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr895w40ddgqzcat1j3tx6e9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGJlYzEwYTMtZTQ0OTFmYTgtM2RkY2UzNGUtYTFmZTFhOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715661 TEvProposeTransaction 281474976715661 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 890 RawX2: 4294969941 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\tz\003\000\000\000\000\000\000\021U\n\000\000\001\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\002\002(\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\001\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CurrentExecutionId\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=1&id=ZGJlYzEwYTMtZTQ0OTFmYTgtM2RkY2UzNGUtYTFmZTFhOTk=\222\001\014\n\010Database\022\000\222\001\021\n\006PoolId\022\007default\222\001%\n\007TraceId\022\03201jr895w40ddgqzcat1j3tx6e9\222\001\026\n\022CustomerSuppliedId\022\000\230\001\000\"\n\010\212\243\022\020\0020\000@\n" TxId: 281474976715661 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715661 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715661 OrderId: 281474976715661 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 13 ActorId { RawX1: 632 RawX2: 4294969833 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 239 } } 2025-04-07T14:06:28.056836Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr895wn2ckcnf2kx73s1asg1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFlMzFjNmEtYTA4ODg4NTEtYWZlZTg5MjgtZjM4OGFmMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715662 TEvProposeTransaction 281474976715662 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 912 RawX2: 4294970013 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\220\003\000\000\000\000\000\000\021\235\n\000\000\001\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\004\004(\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\002\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\026\n\022CurrentExecutionId\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=1&id=ZjFlMzFjNmEtYTA4ODg4NTEtYWZlZTg5MjgtZjM4OGFmMTI=\222\001%\n\007TraceId\022\03201jr895wn2ckcnf2kx73s1asg1\222\001\014\n\010Database\022\000\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CustomerSuppliedId\022\000\222\001\021\n\006PoolId\022\007default\230\001\000\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715662 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715662 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715662 OrderId: 281474976715662 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 13 ActorId { RawX1: 719 RawX2: 4294969896 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 175 } } 2025-04-07T14:06:28.636998Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr895wzq12392xd9xwfctjwz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjAyYWJmNjYtYzlhNjI0OWItOTM0YTI5MjUtYWIzYmQxMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-07T14:06:29.056159Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr895xmaa30b1pmw2xhsn7z3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjY3OWZiZjYtZTYzZDBiNTMtNTg0MDUyZmYtOTU1Nzc5Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1002 RawX2: 4294970092 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\352\003\000\000\000\000\000\000\021\354\n\000\000\001\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\006\006(\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\003\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\014\n\010Database\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=1&id=ZjY3OWZiZjYtZTYzZDBiNTMtNTg0MDUyZmYtOTU1Nzc5Mzk=\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CustomerSuppliedId\022\000\222\001\026\n\022CurrentExecutionId\022\000\222\001%\n\007TraceId\022\03201jr895xmaa30b1pmw2xhsn7z3\222\001\021\n\006PoolId\022\007default\230\001\000\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715664 OrderId: 281474976715664 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 21 ActorId { RawX1: 632 RawX2: 4294969833 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 178 } } 2025-04-07T14:06:29.133975Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr895xpdbbxq0b6xjbjajsq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjAyYWJmNjYtYzlhNjI0OWItOTM0YTI5MjUtYWIzYmQxMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:29.208932Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr895xrtaa9w5dct69y8s832, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjAyYWJmNjYtYzlhNjI0OWItOTM0YTI5MjUtYWIzYmQxMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:29.264894Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjAyYWJmNjYtYzlhNjI0OWItOTM0YTI5MjUtYWIzYmQxMGQ=, ActorId: [1:929:2746], ActorState: ExecuteState, TraceId: 01jr895xtva9ne6t5qgj95nrkw, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-07T14:06:29.276292Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr895xtva9ne6t5qgj95nrkw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjAyYWJmNjYtYzlhNjI0OWItOTM0YTI5MjUtYWIzYmQxMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvProposeTransaction 281474976715667 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1051 RawX2: 4294970042 } TxBody: " \0018\001j3\010\001\032\'\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n" TxId: 281474976715667 ExecLevel: 0 Flags: 8 MvccSnapshot { Step: 4000 TxId: 18446744073709551615 } 2025-04-07T14:06:29.276979Z node 1 :GLOBAL ERROR: fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=MvccSnapshot without LockTxId is not implemented at tablet# 72075186224037888;tx_id=281474976715667; 2025-04-07T14:06:29.277021Z node 1 :TX_DATASHARD ERROR: MvccSnapshot without LockTxId is not implemented TEvProposeTransaction 281474976715667 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1051 RawX2: 4294970042 } TxBody: " \0018\001j3\010\001\032\'\n#\t\217\023\000\000\000\000\001\000\021\001\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0038\000 \003\"\006\020\0020\000@\n" TxId: 281474976715667 ExecLevel: 0 Flags: 8 MvccSnapshot { Step: 4000 TxId: 18446744073709551615 } EvWriteResult 281474976715667 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_BAD_REQUEST Issues { message: "MvccSnapshot without LockTxId is not implemented at tablet# 72075186224037888" } Origin: 72075186224037888 TxId: 281474976715667 2025-04-07T14:06:29.282988Z node 1 :GLOBAL ERROR: fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=MvccSnapshot without LockTxId is not implemented at tablet# 72075186224037889;tx_id=281474976715667; 2025-04-07T14:06:29.283037Z node 1 :TX_DATASHARD ERROR: MvccSnapshot without LockTxId is not implemented 2025-04-07T14:06:29.283199Z node 1 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=1&id=MjAyYWJmNjYtYzlhNjI0OWItOTM0YTI5MjUtYWIzYmQxMGQ=, ActorId: [1:929:2746], ActorState: CleanupState, TraceId: 01jr895xtva9ne6t5qgj95nrkw, Failed to cleanup:
: Error: Kikimr cluster or one of its subsystems was unavailable., code: 2005 EvWriteResult 281474976715667 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_BAD_REQUEST Issues { message: "MvccSnapshot without LockTxId is not implemented at tablet# 72075186224037889" } Origin: 72075186224037889 TxId: 281474976715667 2025-04-07T14:06:32.471855Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:32.472082Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:32.472137Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019d6/r3tmp/tmprjUeOi/pdisk_1.dat 2025-04-07T14:06:32.699857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:32.727183Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:32.775284Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:32.775411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:32.786821Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:32.898682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:33.196362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:06:33.486727Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:792:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:33.486822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:802:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:33.486897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:33.491695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:06:33.688245Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:806:2655], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:06:33.785912Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89620d98n3tpk29cd6ym57, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDUwNzI3NC05OWY0N2JjNi1jOTdhYTcyYy02YzljNDNkNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:33.859293Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr8962aae3148n7mcg4arp00, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTkyZGFmNmYtODA4MTE2NzMtYjI1MTEwNmUtNDE3MzZkYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:34.452696Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr8962mt9ekng5d1trgyexnj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzA5M2E3MDEtYTIzYmUzYWYtNDcwYzc4Y2ItYmRjNWFjMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-07T14:06:34.900044Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr8963afdp2vv8c4h1aqw07m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWQ0YjNjODYtMWJiYjdiZjYtNGMwZTVjMDktZWNkMTg0MzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:34.984604Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr8963d2d4ggn9qva7a5wpan, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzA5M2E3MDEtYTIzYmUzYWYtNDcwYzc4Y2ItYmRjNWFjMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:35.055709Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr8963fb12zqcb7mes8defde, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzA5M2E3MDEtYTIzYmUzYWYtNDcwYzc4Y2ItYmRjNWFjMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:35.126893Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzA5M2E3MDEtYTIzYmUzYWYtNDcwYzc4Y2ItYmRjNWFjMzQ=, ActorId: [2:929:2746], ActorState: ExecuteState, TraceId: 01jr8963hj4qrrxkt7v0c8znzg, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-07T14:06:35.138576Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr8963hj4qrrxkt7v0c8znzg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzA5M2E3MDEtYTIzYmUzYWYtNDcwYzc4Y2ItYmRjNWFjMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TConsoleTests::TestTenantGeneration [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain >> DataShardScan::ScanFollowedByUpdate >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> DataShardOutOfOrder::TestOutOfOrderLockLost-StreamLookup [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode >> DataShardTxOrder::RandomPoints_DelayRS >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2025-04-07T14:06:30.257330Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:30.329668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:30.329731Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:30.331523Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:30.331838Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:30.332061Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:30.341325Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:30.374467Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:30.374883Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:30.376417Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:30.376500Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:30.376559Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:30.376909Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:30.397085Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:30.397298Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:30.397431Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:30.397489Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:30.397535Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:30.397569Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:30.397787Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:30.397838Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:30.398236Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:30.398331Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:30.398428Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:30.398483Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:30.398529Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:30.398563Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:30.398600Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:30.398633Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:30.398674Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:30.437485Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:30.437556Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:30.437603Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:30.439966Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:30.440040Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:30.440136Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:30.440314Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:30.440385Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:30.440434Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:30.440476Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:30.440513Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:30.440570Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:30.440620Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:30.440937Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:30.440990Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:30.441024Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:30.441065Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:30.441123Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:30.441152Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:30.441197Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:30.441227Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:30.441250Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:30.464093Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:30.464185Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:30.464223Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:30.464276Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:30.464351Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:30.464890Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:30.464946Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:30.464992Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:30.465120Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:30.465154Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:30.465324Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:30.465385Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:30.465431Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:30.465470Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:30.468347Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:30.468410Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:30.468618Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:30.468647Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:30.468713Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:30.468744Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:30.468767Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:30.468801Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:30.468827Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:30.468887Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:30.468924Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:30.468960Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:30.468984Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:30.469142Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:30.469180Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:30.469226Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:30.469251Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:30.469273Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:30.469324Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:30.469343Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:30.469383Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:30.469410Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:30.469471Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:30.469506Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:30.469617Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:30.469683Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:30.469706Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:30.469732Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... om 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2025-04-07T14:06:35.971109Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-07T14:06:35.971138Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:35.971258Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:06:35.971293Z node 1 :TX_DATASHARD DEBUG: Send RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 99 Flags# 0} 2025-04-07T14:06:35.971328Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:06:35.971364Z node 1 :TX_DATASHARD DEBUG: Send RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 100 Flags# 0} 2025-04-07T14:06:35.971408Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:35.971441Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:150] at 9437184 on unit CompleteOperation 2025-04-07T14:06:35.971487Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 150] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2025-04-07T14:06:35.971514Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:35.971612Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:35.971639Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2025-04-07T14:06:35.971669Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2025-04-07T14:06:35.971693Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:35.971778Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:35.971805Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit StoreAndSendOutRS 2025-04-07T14:06:35.971849Z node 1 :TX_DATASHARD DEBUG: Send RS 50 at 9437184 from 9437184 to 9437186 txId 152 2025-04-07T14:06:35.971909Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:35.971936Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-04-07T14:06:35.971984Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2025-04-07T14:06:35.972011Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:35.972096Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:35.972128Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:153] at 9437184 on unit CompleteOperation 2025-04-07T14:06:35.972189Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 153] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2025-04-07T14:06:35.972217Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:35.972295Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:35.972317Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-04-07T14:06:35.972349Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2025-04-07T14:06:35.972384Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:35.972551Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-07T14:06:35.972583Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:35.972611Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-07T14:06:35.972723Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-07T14:06:35.972748Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:35.972772Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-07T14:06:35.972908Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-07T14:06:35.972932Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:35.972959Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-07T14:06:35.973058Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:227:2222], Recipient [1:432:2382]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-04-07T14:06:35.973087Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-07T14:06:35.973120Z node 1 :TX_DATASHARD DEBUG: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2025-04-07T14:06:35.973192Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-04-07T14:06:35.973234Z node 1 :TX_DATASHARD TRACE: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2025-04-07T14:06:35.973293Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-07T14:06:35.973406Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-07T14:06:35.973446Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:35.973471Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-07T14:06:35.973533Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:432:2382], Recipient [1:432:2382]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:35.973558Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:35.973598Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-04-07T14:06:35.973631Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:06:35.973669Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-04-07T14:06:35.973698Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-04-07T14:06:35.973739Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-07T14:06:35.973772Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-04-07T14:06:35.973811Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-04-07T14:06:35.973837Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-04-07T14:06:35.974295Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-04-07T14:06:35.974342Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:35.974416Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-04-07T14:06:35.974463Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-04-07T14:06:35.974489Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-04-07T14:06:35.974515Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-04-07T14:06:35.974700Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-04-07T14:06:35.974727Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-04-07T14:06:35.974751Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-04-07T14:06:35.974776Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-04-07T14:06:35.974805Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-07T14:06:35.974824Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-04-07T14:06:35.974864Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2025-04-07T14:06:35.974896Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:35.974920Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-07T14:06:35.974946Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-07T14:06:35.974971Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-07T14:06:35.992774Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:35.992839Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-07T14:06:35.992899Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2025-04-07T14:06:35.992981Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-07T14:06:35.993028Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:35.993334Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-07T14:06:35.993399Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:35.993430Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> TPart::ForwardEnv [GOOD] >> TPart::WreckPartColumnGroups >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] >> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderLockLost-StreamLookup [GOOD] Test command err: 2025-04-07T14:06:27.436859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:27.437132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:27.437350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001980/r3tmp/tmpdMYrtb/pdisk_1.dat 2025-04-07T14:06:27.756023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:27.790224Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:27.842772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:27.842944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:27.854423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:27.969127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:28.000729Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:28.001727Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:28.002123Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:06:28.002349Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:28.041915Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:28.042597Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:28.042693Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:28.044209Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:06:28.044286Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:06:28.044342Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:06:28.044631Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:28.079223Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:06:28.079527Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:28.079668Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:06:28.079705Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:06:28.079751Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:06:28.079791Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:28.080153Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:28.080217Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:28.080369Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:06:28.080446Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:06:28.080845Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:28.080888Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:28.080930Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:06:28.080983Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:28.081022Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:28.081056Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:06:28.081089Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:28.081152Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:28.081194Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:06:28.081245Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:28.081377Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:06:28.081415Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:28.081508Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:28.081717Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:06:28.081772Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:06:28.081864Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:06:28.081910Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:28.081955Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:06:28.081992Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:06:28.082023Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:28.082251Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:28.082279Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:06:28.082326Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:28.082360Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:28.082408Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:06:28.082447Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:28.082482Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:06:28.082509Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:28.082532Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:28.083775Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:06:28.083820Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:06:28.094574Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:28.094640Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:28.094677Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:28.094722Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:06:28.094782Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:28.274459Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:28.274512Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:28.274553Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:06:28.274934Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:06:28.274966Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:28.275062Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:28.275125Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:06:28.275168Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:06:28.275200Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:06:28.279234Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:06:28.279296Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:28.280148Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:28.280204Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:28.280252Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:28.280315Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:28.280360Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:06:28.280425Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... : 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1048:2845], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 552 DurationUs: 7000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 344 FinishTimeMs: 1744034796098 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 295 BuildCpuTimeUs: 49 WaitInputTimeUs: 4729 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034796091 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:36.099567Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1048:2845] 2025-04-07T14:06:36.099609Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1049:2846], CA [2:1050:2847], CA [2:1047:2844], CA [2:1051:2848], CA [2:1052:2849], 2025-04-07T14:06:36.099644Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1049:2846], CA [2:1050:2847], CA [2:1047:2844], CA [2:1051:2848], CA [2:1052:2849], 2025-04-07T14:06:36.099971Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1047:2844], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 393 DurationUs: 7000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 93 FinishTimeMs: 1744034796098 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 65 BuildCpuTimeUs: 28 WaitInputTimeUs: 4828 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034796091 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:36.100008Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1047:2844] 2025-04-07T14:06:36.100053Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1049:2846], CA [2:1050:2847], CA [2:1051:2848], CA [2:1052:2849], 2025-04-07T14:06:36.100097Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1049:2846], CA [2:1050:2847], CA [2:1051:2848], CA [2:1052:2849], 2025-04-07T14:06:36.100536Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1049:2846], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 393 DurationUs: 7000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 214 FinishTimeMs: 1744034796098 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 194 BuildCpuTimeUs: 20 WaitInputTimeUs: 4884 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034796091 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:36.100572Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1049:2846] 2025-04-07T14:06:36.100612Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2847], CA [2:1051:2848], CA [2:1052:2849], 2025-04-07T14:06:36.100645Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1050:2847], CA [2:1051:2848], CA [2:1052:2849], 2025-04-07T14:06:36.100881Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1050:2847], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 557 DurationUs: 9000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 329 FinishTimeMs: 1744034796100 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 287 BuildCpuTimeUs: 42 WaitInputTimeUs: 6456 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034796091 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:36.100913Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1050:2847] 2025-04-07T14:06:36.100946Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1051:2848], CA [2:1052:2849], 2025-04-07T14:06:36.100976Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1051:2848], CA [2:1052:2849], 2025-04-07T14:06:36.101175Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2848], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 327 DurationUs: 9000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 147 FinishTimeMs: 1744034796100 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 97 BuildCpuTimeUs: 50 WaitInputTimeUs: 7083 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034796091 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:36.101213Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2848] 2025-04-07T14:06:36.101246Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1052:2849], 2025-04-07T14:06:36.101277Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1052:2849], 2025-04-07T14:06:36.101496Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1052:2849], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 321 DurationUs: 10000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 141 FinishTimeMs: 1744034796101 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 118 BuildCpuTimeUs: 23 WaitInputTimeUs: 7649 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034796091 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:36.101528Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1052:2849] 2025-04-07T14:06:36.101687Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:06:36.101749Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2826] TxId: 281474976715666. Ctx: { TraceId: 01jr89647qe0xgfmtbxzhb8ns4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyNGM1ZS1hZjRlOTBhNy05Y2I1Zjg4YS0xMjdkNzJlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.003435s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup-EvWrite >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> TConsoleTests::TestAlterUnknownTenant >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail-StreamLookup [GOOD] >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier+StreamLookup [GOOD] >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier-StreamLookup >> DataShardScan::ScanFollowedByUpdate [GOOD] >> DataShardTxOrder::DelayData >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] Test command err: Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 4910b 40r} data 6206b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 50b (0, 1) | 1 2 50b (0, 4) | 2 4 50b (0, 7) | 3 6 50b (0, 10) | 4 8 50b (1, 3) | 5 10 50b (1, 6) | 6 12 50b (1, 8) | 7 14 50b (2, NULL) | 8 16 50b (2, 4) | 10 18 50b (2, 7) | 11 20 50b (2, 10) | 12 22 50b (3, 3) | 13 24 50b (3, 6) | 15 26 50b (3, 8) | 16 28 50b (4, NULL) | 17 30 50b (4, 4) | 18 32 50b (4, 7) | 19 34 50b (4, 10) | 21 36 50b (5, 3) | 22 38 50b (5, 6) | 22 39 50b (5, 7) + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 GroupDataSize: 3910 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 50 GroupDataSize: 76 ErasedRowCount: 0 | > (0, 4) | PageId: 1 RowCount: 4 DataSize: 100 GroupDataSize: 290 ErasedRowCount: 0 | > (0, 7) | PageId: 2 RowCount: 6 DataSize: 150 GroupDataSize: 442 ErasedRowCount: 0 | > (0, 10) | PageId: 3 RowCount: 8 DataSize: 200 GroupDataSize: 656 ErasedRowCount: 0 | > (1, 3) | PageId: 4 RowCount: 10 DataSize: 250 GroupDataSize: 870 ErasedRowCount: 0 | > (1, 6) | PageId: 5 RowCount: 12 DataSize: 300 GroupDataSize: 1023 ErasedRowCount: 0 | > (1, 8) | PageId: 6 RowCount: 14 DataSize: 350 GroupDataSize: 1239 ErasedRowCount: 0 | > (2, NULL) | PageId: 7 RowCount: 16 DataSize: 400 GroupDataSize: 1455 ErasedRowCount: 0 | > (2, 4) | PageId: 8 RowCount: 18 DataSize: 450 GroupDataSize: 1609 ErasedRowCount: 0 | > (2, 7) | PageId: 10 RowCount: 20 DataSize: 500 GroupDataSize: 1825 ErasedRowCount: 0 | > (2, 10) | PageId: 11 RowCount: 22 DataSize: 550 GroupDataSize: 2041 ErasedRowCount: 0 | > (3, 3) | PageId: 12 RowCount: 24 DataSize: 600 GroupDataSize: 2195 ErasedRowCount: 0 | > (3, 6) | PageId: 13 RowCount: 26 DataSize: 650 GroupDataSize: 2411 ErasedRowCount: 0 | > (3, 8) | PageId: 15 RowCount: 28 DataSize: 700 GroupDataSize: 2627 ErasedRowCount: 0 | > (4, NULL) | PageId: 16 RowCount: 30 DataSize: 750 GroupDataSize: 2781 ErasedRowCount: 0 | > (4, 4) | PageId: 17 RowCount: 32 DataSize: 800 GroupDataSize: 2997 ErasedRowCount: 0 | > (4, 7) | PageId: 18 RowCount: 34 DataSize: 850 GroupDataSize: 3213 ErasedRowCount: 0 | > (4, 10) | PageId: 19 RowCount: 36 DataSize: 900 GroupDataSize: 3367 ErasedRowCount: 0 | > (5, 3) | PageId: 21 RowCount: 38 DataSize: 950 GroupDataSize: 3583 ErasedRowCount: 0 | > (5, 6) | PageId: 22 RowCount: 40 DataSize: 1000 GroupDataSize: 3910 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0, 1) | ERowOp 1: (0, 3) + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (0, 4) | ERowOp 1: (0, 6) + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (0, 7) | ERowOp 1: (0, 8) + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (0, 10) | ERowOp 1: (1, 1) + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (1, 3) | ERowOp 1: (1, 4) + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (1, 6) | ERowOp 1: (1, 7) + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (1, 8) | ERowOp 1: (1, 10) + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (2, 1) | ERowOp 1: (2, 3) + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (2, 4) | ERowOp 1: (2, 6) + Rows{10} Label{104 rev 1, 50b}, [18, +2)row | ERowOp 1: (2, 7) | ERowOp 1: (2, 8) + Rows{11} Label{114 rev 1, 50b}, [20, +2)row | ERowOp 1: (2, 10) | ERowOp 1: (3, 1) + Rows{12} Label{124 rev 1, 50b}, [22, +2)row | ERowOp 1: (3, 3) | ERowOp 1: (3, 4) + Rows{13} Label{134 rev 1, 50b}, [24, +2)row | ERowOp 1: (3, 6) | ERowOp 1: (3, 7) + Rows{15} Label{154 rev 1, 50b}, [26, +2)row | ERowOp 1: (3, 8) | ERowOp 1: (3, 10) + Rows{16} Label{164 rev 1, 50b}, [28, +2)row | ERowOp 1: (4, 1) | ERowOp 1: (4, 3) + Rows{17} Label{174 rev 1, 50b}, [30, +2)row | ERowOp 1: (4, 4) | ERowOp 1: (4, 6) + Rows{18} Label{184 rev 1, 50b}, [32, +2)row | ERowOp 1: (4, 7) | ERowOp 1: (4, 8) + Rows{19} Label{194 rev 1, 50b}, [34, +2)row | ERowOp 1: (4, 10) | ERowOp 1: (5, 1) + Rows{21} Label{214 rev 1, 50b}, [36, +2)row | ERowOp 1: (5, 3) | ERowOp 1: (5, 4) + Rows{22} Label{224 rev 1, 50b}, [38, +2)row | ERowOp 1: (5, 6) | ERowOp 1: (5, 7) Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 5889b 40r} data 9293b + FlatIndex{41} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 136b (0, 1) | 2 2 136b (0, 4) | 4 4 152b (0, 7) | 6 6 136b (0, 10) | 8 8 136b (1, 3) | 10 10 154b (1, 6) | 12 12 138b (1, 8) | 14 14 138b (2, NULL) | 16 16 154b (2, 4) | 18 18 138b (2, 7) | 20 20 138b (2, 10) | 22 22 154b (3, 3) | 24 24 138b (3, 6) | 26 26 138b (3, 8) | 28 28 154b (4, NULL) | 30 30 138b (4, 4) | 32 32 138b (4, 7) | 34 34 154b (4, 10) | 36 36 138b (5, 3) | 39 38 138b (5, 6) | 39 39 138b (5, 7) + BTreeIndex{PageId: 42 RowCount: 40 DataSize: 2846 GroupDataSize: 3043 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 136 ErasedRowCount: 0 | > (0, 4) | PageId: 2 RowCount: 4 DataSize: 272 GroupDataSize: 154 ErasedRowCount: 0 | > (0, 7) | PageId: 4 RowCount: 6 DataSize: 424 GroupDataSize: 308 ErasedRowCount: 0 | > (0, 10) | PageId: 6 RowCount: 8 DataSize: 560 GroupDataSize: 462 ErasedRowCount: 0 | > (1, 3) | PageId: 8 RowCount: 10 DataSize: 696 GroupDataSize: 616 ErasedRowCount: 0 | > (1, 6) | PageId: 10 RowCount: 12 DataSize: 850 GroupDataSize: 771 ErasedRowCount: 0 | > (1, 8) | PageId: 12 RowCount: 14 DataSize: 988 GroupDataSize: 927 ErasedRowCount: 0 | > (2, NULL) | PageId: 14 RowCount: 16 DataSize: 1126 GroupDataSize: 1083 ErasedRowCount: 0 | > (2, 4) | PageId: 16 RowCount: 18 DataSize: 1280 GroupDataSize: 1239 ErasedRowCount: 0 | > (2, 7) | PageId: 18 RowCount: 20 DataSize: 1418 GroupDataSize: 1395 ErasedRowCount: 0 | > (2, 10) | PageId: 20 RowCount: 22 DataSize: 1556 GroupDataSize: 1551 ErasedRowCount: 0 | > (3, 3) | PageId: 22 RowCount: 24 DataSize: 1710 GroupDataSize: 1707 ErasedRowCount: 0 | > (3, 6) | PageId: 24 RowCount: 26 DataSize: 1848 GroupDataSize: 1863 ErasedRowCount: 0 | > (3, 8) | PageId: 26 RowCount: 28 DataSize: 1986 GroupDataSize: 2019 ErasedRowCount: 0 | > (4, NULL) | PageId: 28 RowCount: 30 DataSize: 2140 GroupDataSize: 2175 ErasedRowCount: 0 | > (4, 4) | PageId: 30 RowCount: 32 DataSize: 2278 GroupDataSize: 2331 ErasedRowCount: 0 | > (4, 7) | PageId: 32 RowCount: 34 DataSize: 2416 GroupDataSize: 2487 ErasedRowCount: 0 | > (4, 10) | PageId: 34 RowCount: 36 DataSize: 2570 GroupDataSize: 2643 ErasedRowCount: 0 | > (5, 3) | PageId: 36 RowCount: 38 DataSize: 2708 GroupDataSize: 2799 ErasedRowCount: 0 | > (5, 6) | PageId: 39 RowCount: 40 DataSize: 2846 GroupDataSize: 3043 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 136b}, [0, +2)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{2} Label{24 rev 1, 136b}, [2, +2)row | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{4} Label{44 rev 1, 152b}, [4, +2)row | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{6} Label{64 rev 1, 136b}, [6, +2)row | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{8} Label{84 rev 1, 136b}, [8, +2)row | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{10} Label{104 rev 1, 154b}, [10, +2)row | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{12} Label{124 rev 1, 138b}, [12, +2)row | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{14} Label{144 rev 1, 138b}, [14, +2)row | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{16} Label{164 rev 1, 154b}, [16, +2)row | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{18} Label{184 rev 1, 138b}, [18, +2)row | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{20} Label{204 rev 1, 138b}, [20, +2)row | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{22} Label{224 rev 1, 154b}, [22, +2)row | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{24} Label{244 rev 1, 138b}, [24, +2)row | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{26} Label{264 rev 1, 138b}, [26, +2)row | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{28} Label{284 rev 1, 154b}, [28, +2)row | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{30} Label{304 rev 1, 138b}, [30, +2)row | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{32} Label{324 rev 1, 138b}, [32, +2)row | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{34} Label{344 rev 1, 154b}, [34, +2)row | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{36} Label{364 rev 1, 138b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{39} Label{394 rev 1, 138b}, [38, +2)row | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,4), [6,8), [8,12), [14,16), [16,18), [20,28), [3 ... xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b (0, 1) | 1 2 120b (0, 4) | 2 4 120b (0, 7) | 3 6 120b (0, 10) | 4 8 120b (1, 3) | 5 10 122b (1, 6) | 7 12 122b (1, 8) | 8 14 122b (2, NULL) | 9 16 122b (2, 4) | 11 18 122b (2, 7) | 12 20 122b (2, 10) | 13 22 122b (3, 3) | 15 24 122b (3, 6) | 16 26 122b (3, 8) | 17 28 122b (4, NULL) | 19 30 122b (4, 4) | 20 32 122b (4, 7) | 21 34 122b (4, 10) | 24 36 122b (5, 3) | 25 38 122b (5, 6) | 25 39 122b (5, 7) + BTreeIndex{PageId: 29 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > (0, 4) | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > (0, 7) | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > (0, 10) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > (1, 3) | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > (1, 6) | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > (1, 8) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > (2, NULL) | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > (2, 4) | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > (2, 7) | + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > (2, 10) | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > (3, 3) | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > (3, 6) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > (3, 8) | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > (4, NULL) | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > (4, 4) | | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > (4, 7) | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > (4, 10) | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > (5, 3) | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > (5, 6) | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} >> DataShardOutOfOrder::TestSnapshotReadPriority >> TPart::WreckPartColumnGroups [GOOD] >> TPart::PageFailEnvColumnGroups >> DataShardTxOrder::RandomPointsAndRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail-StreamLookup [GOOD] Test command err: 2025-04-07T14:06:26.554062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:26.554257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:26.554407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019c4/r3tmp/tmp6b7tU5/pdisk_1.dat 2025-04-07T14:06:26.834600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:26.864296Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:26.911486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:26.911588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:26.922911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:27.033756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:27.063026Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:27.063882Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:27.064235Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:06:27.064449Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:27.100642Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:27.101496Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:27.101611Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:27.103280Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:06:27.103354Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:06:27.103411Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:06:27.103747Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:27.140446Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:06:27.140889Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:27.141311Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:06:27.141369Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:06:27.141421Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:06:27.141457Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:27.141837Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:27.141886Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:27.142136Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:06:27.142234Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:06:27.142591Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:27.142629Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:27.142665Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:06:27.142784Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:27.142820Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:27.142959Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:06:27.143001Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:27.143035Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:27.143062Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:06:27.143111Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:27.143244Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:06:27.143280Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:27.143364Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:27.143568Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:06:27.143626Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:06:27.143720Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:06:27.143765Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:27.143805Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:06:27.143831Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:06:27.143857Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:27.144068Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:27.144103Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:06:27.144134Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:27.144166Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:27.144240Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:06:27.144287Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:27.144321Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:06:27.144353Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:27.144395Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:27.145561Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:06:27.145600Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:06:27.156273Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:27.156333Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:27.156375Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:27.156539Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:06:27.156599Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:27.336270Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:27.336323Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:27.336358Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:06:27.336723Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:06:27.336760Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:27.336855Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:27.336893Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:06:27.336929Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:06:27.336969Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:06:27.341195Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:06:27.341260Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:27.342008Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:27.342045Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:27.342103Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:27.342159Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:27.342196Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:06:27.342234Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... ed key sets: 1 2025-04-07T14:06:37.969011Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-07T14:06:37.969090Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1203:2962] TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-04-07T14:06:37.969236Z node 2 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-07T14:06:37.969377Z node 2 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976715671. Shard resolve complete, resolved shards: 1 2025-04-07T14:06:37.969426Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1203:2962] TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-07T14:06:37.969476Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1203:2962] TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 2: [72075186224037888] 2025-04-07T14:06:37.969523Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:37.969565Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2025-04-07T14:06:37.969789Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1203:2962] TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-07T14:06:37.969840Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1203:2962] TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1207:2962], 2025-04-07T14:06:37.969889Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1203:2962] TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1207:2962], 2025-04-07T14:06:37.969938Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1203:2962] TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-07T14:06:37.970377Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1203:2962] TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1207:2962], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-07T14:06:37.970427Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1203:2962] TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1207:2962], 2025-04-07T14:06:37.970483Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1203:2962] TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1207:2962], 2025-04-07T14:06:37.970788Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1209:2962], Recipient [2:1131:2914]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-04-07T14:06:37.970851Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:06:37.970900Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v4001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v4001/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v2000/18446744073709551615 2025-04-07T14:06:37.970934Z node 2 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v5000/18446744073709551615 2025-04-07T14:06:37.970985Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-07T14:06:37.971052Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-07T14:06:37.971081Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:06:37.971112Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:37.971140Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:06:37.971176Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-07T14:06:37.971211Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-07T14:06:37.971239Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:37.971260Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:06:37.971283Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:06:37.971386Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-07T14:06:37.971549Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1209:2962], 0} after executionsCount# 1 2025-04-07T14:06:37.971597Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1209:2962], 0} sends rowCount# 1, bytes# 24, quota rows left# 32766, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:06:37.971660Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1209:2962], 0} finished in read 2025-04-07T14:06:37.971709Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-07T14:06:37.971726Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:06:37.971743Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:06:37.971761Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:06:37.971794Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-07T14:06:37.971808Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:06:37.971828Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-07T14:06:37.971857Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:06:37.972364Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1209:2962], Recipient [2:1131:2914]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T14:06:37.972439Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-07T14:06:37.973067Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1203:2962] TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1207:2962], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 689 DurationUs: 2000 Tasks { TaskId: 1 CpuTimeUs: 136 FinishTimeMs: 1744034797972 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 53 BuildCpuTimeUs: 83 WaitInputTimeUs: 1452 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034797970 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:37.973128Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1207:2962] 2025-04-07T14:06:37.973278Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1203:2962] TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:06:37.973343Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1203:2962] TxId: 281474976715671. Ctx: { TraceId: 01jr8966a73zbh2j2sjwfafbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmIzNzMyM2EtMTZlNGI0OWItMzVlYTNmOC1kZjViZWNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000689s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 { items { uint32_value: 7 } items { uint32_value: 4 } } >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding >> BuildStatsHistogram::Single_History [GOOD] >> BuildStatsHistogram::Single_History_Slices >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> DataShardOutOfOrder::TestReadTableWriteConflict >> TPart::PageFailEnvColumnGroups [GOOD] >> TPart::ForwardEnvColumnGroups >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] >> TPart::ForwardEnvColumnGroups [GOOD] >> TPart::Versions [GOOD] >> TPart::ManyVersions >> DataShardTxOrder::ImmediateBetweenOnline_oo8 >> TPart::ManyVersions [GOOD] >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] >> TPartBtreeIndexIteration::NoNodes >> TVersions::WreckHead [GOOD] >> TVersions::WreckHeadReverse >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> TConsoleTests::TestAlterUnknownTenantExtSubdomain >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup+EvWrite [GOOD] >> TConsoleTests::TestRemoveTenant [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: 2025-04-07T14:06:34.585222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:34.585473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:34.585701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001918/r3tmp/tmpQ1dD8f/pdisk_1.dat 2025-04-07T14:06:34.899931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:34.930875Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:34.977597Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:06:34.978415Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:06:34.978612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:34.978728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:34.989975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:35.099751Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T14:06:35.099807Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:06:35.099919Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T14:06:35.196883Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:06:35.197472Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:06:35.197556Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:06:35.197845Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:06:35.198009Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:06:35.198097Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:06:35.198354Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T14:06:35.199660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:35.200535Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T14:06:35.200599Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T14:06:35.231601Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:35.232500Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:625:2533], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:35.233810Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:35.234214Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:638:2540] 2025-04-07T14:06:35.234427Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:35.271772Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:35.271861Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:625:2533], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:35.272300Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:640:2542] 2025-04-07T14:06:35.272548Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:35.280854Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:625:2533], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:35.281607Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:35.281711Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:35.283169Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:06:35.283235Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:06:35.283285Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:06:35.283634Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:35.322260Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:06:35.322440Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:35.322537Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:671:2559] 2025-04-07T14:06:35.322571Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:06:35.322604Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:06:35.322649Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:35.322914Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:638:2540], Recipient [1:638:2540]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:35.322951Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:35.323266Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:35.323442Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:06:35.323524Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:06:35.323569Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:35.324819Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T14:06:35.324884Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T14:06:35.324934Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T14:06:35.325161Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:35.325209Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T14:06:35.325275Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:35.325333Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:672:2560] 2025-04-07T14:06:35.325355Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T14:06:35.325376Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T14:06:35.325411Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:06:35.325535Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:35.325574Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:35.325607Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:06:35.325647Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:35.325676Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:35.325705Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:06:35.325745Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:35.325850Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:640:2542], Recipient [1:640:2542]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:35.325881Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:35.326316Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:653:2549], Recipient [1:638:2540]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:35.326352Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:35.326387Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:630:2536], serverId# [1:653:2549], sessionId# [0:0:0] 2025-04-07T14:06:35.326594Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:653:2549] 2025-04-07T14:06:35.326646Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:35.326745Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:35.326942Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:06:35.326995Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:06:35.327071Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:06:35.327108Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:35.327137Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:06:35.327162Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 720751 ... 2025-04-07T14:06:40.575078Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:06:40.575112Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T14:06:40.575161Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715660] at 72075186224037888 2025-04-07T14:06:40.575195Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2025-04-07T14:06:40.575221Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:40.575240Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-07T14:06:40.575264Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-07T14:06:40.575301Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T14:06:40.575354Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715660] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191936 2025-04-07T14:06:40.575541Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-07T14:06:40.575592Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:40.575621Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-07T14:06:40.575659Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:40.575695Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:40.575772Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:40.575804Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:40.575840Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:06:40.575873Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:06:40.575911Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2025-04-07T14:06:40.575935Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:06:40.575963Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715660] at 72075186224037888 has finished 2025-04-07T14:06:40.586680Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:40.586750Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715660] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:40.586797Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715660 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T14:06:40.586871Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:40.588370Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Handle TEvProposeTransaction 2025-04-07T14:06:40.588416Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] TxId# 281474976715661 ProcessProposeTransaction 2025-04-07T14:06:40.588483Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [2:821:2658] DataReq marker# P0 2025-04-07T14:06:40.588595Z node 2 :TX_PROXY DEBUG: Actor# [2:821:2658] Cookie# 0 txid# 281474976715661 HANDLE TDataReq marker# P1 2025-04-07T14:06:40.588839Z node 2 :TX_PROXY DEBUG: Actor# [2:821:2658] txid# 281474976715661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2025-04-07T14:06:40.589005Z node 2 :TX_PROXY DEBUG: Actor# [2:821:2658] txid# 281474976715661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-04-07T14:06:40.589087Z node 2 :TX_PROXY DEBUG: Actor# [2:821:2658] txid# 281474976715661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2025-04-07T14:06:40.589392Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:821:2658], Recipient [2:631:2537]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 821 RawX2: 8589937250 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t5\003\000\000\000\000\000\000\021b\n\000\000\002\000\000\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 2025-04-07T14:06:40.589431Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:40.589502Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:40.589643Z node 2 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-04-07T14:06:40.589711Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2025-04-07T14:06:40.589754Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-07T14:06:40.589795Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2025-04-07T14:06:40.589825Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:40.589852Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:06:40.589884Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-07T14:06:40.589923Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715661] at 72075186224037888 2025-04-07T14:06:40.589953Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-07T14:06:40.589971Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:40.589986Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit MakeScanSnapshot 2025-04-07T14:06:40.590002Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit MakeScanSnapshot 2025-04-07T14:06:40.590020Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-07T14:06:40.590041Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit MakeScanSnapshot 2025-04-07T14:06:40.590068Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit WaitForStreamClearance 2025-04-07T14:06:40.590084Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-04-07T14:06:40.590137Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:821:2658] for [0:281474976715661] at 72075186224037888 2025-04-07T14:06:40.590167Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-04-07T14:06:40.590204Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:40.590269Z node 2 :TX_PROXY DEBUG: Got clearance request, shard: 72075186224037888, txid: 281474976715661 2025-04-07T14:06:40.590325Z node 2 :TX_PROXY DEBUG: Collected all clerance requests, txid: 281474976715661 2025-04-07T14:06:40.590362Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037888, txid: 281474976715661, cleared: 1 2025-04-07T14:06:40.590457Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:821:2658], Recipient [2:631:2537]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715661 2025-04-07T14:06:40.590482Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-07T14:06:40.590569Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:821:2658], Recipient [2:631:2537]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-04-07T14:06:40.590593Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-07T14:06:40.590660Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:631:2537], Recipient [2:631:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:40.590681Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:40.590723Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:40.590761Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:06:40.590804Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-07T14:06:40.590833Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-04-07T14:06:40.590870Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715661] at 72075186224037888 2025-04-07T14:06:40.590908Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-07T14:06:40.590942Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-07T14:06:40.590978Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit ReadTableScan 2025-04-07T14:06:40.591013Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit ReadTableScan 2025-04-07T14:06:40.591220Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-04-07T14:06:40.591244Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:06:40.591270Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:06:40.591307Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:40.591341Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:40.591397Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:40.591763Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:827:2663], Recipient [2:631:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-07T14:06:40.591800Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists >> TPartBtreeIndexIteration::NoNodes [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit >> ReadSessionImplTest::DataReceivedCallback [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup+EvWrite [GOOD] Test command err: 2025-04-07T14:06:33.232793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:33.233009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:33.233171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001947/r3tmp/tmpQZBi34/pdisk_1.dat 2025-04-07T14:06:33.509764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:33.545848Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:33.593278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:33.593401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:33.604741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:33.716059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:33.747488Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:33.748463Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:33.748878Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:06:33.749097Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:33.790575Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:33.791293Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:33.791399Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:33.792969Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:06:33.793036Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:06:33.793089Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:06:33.793367Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:33.827591Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:06:33.827771Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:33.827884Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:06:33.827924Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:06:33.827966Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:06:33.828002Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:33.828340Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:33.828386Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:33.828536Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:06:33.828628Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:06:33.829022Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:33.829069Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:33.829108Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:06:33.829161Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:33.829195Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:33.829230Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:06:33.829282Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:33.829332Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:33.829362Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:06:33.829413Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:33.829530Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:06:33.829569Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:33.829656Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:33.829863Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:06:33.829918Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:06:33.829998Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:06:33.830041Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:33.830094Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:06:33.830130Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:06:33.830158Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:33.830390Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:33.830421Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:06:33.830472Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:33.830504Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:33.830552Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:06:33.830594Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:33.830625Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:06:33.830653Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:33.830676Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:33.831940Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:06:33.831986Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:06:33.842649Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:33.842712Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:33.842743Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:33.842790Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:06:33.842843Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:34.024261Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:34.024315Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:34.024349Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:06:34.024787Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:06:34.024825Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:34.024946Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:34.024998Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:06:34.025040Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:06:34.025076Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:06:34.029327Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:06:34.029395Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:34.030259Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:34.030297Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:34.030338Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:34.030392Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:34.030429Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:06:34.030488Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... ssionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1008:2807], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 892 DurationUs: 7000 Tasks { TaskId: 1 CpuTimeUs: 201 FinishTimeMs: 1744034801197 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 94 BuildCpuTimeUs: 107 WaitInputTimeUs: 4175 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034801190 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:41.197884Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1008:2807] 2025-04-07T14:06:41.197942Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1014:2813], CA [2:1011:2810], CA [2:1012:2811], CA [2:1009:2808], 2025-04-07T14:06:41.197974Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1014:2813], CA [2:1011:2810], CA [2:1012:2811], CA [2:1009:2808], 2025-04-07T14:06:41.198571Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1009:2808], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 397 DurationUs: 8000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 86 FinishTimeMs: 1744034801198 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 55 BuildCpuTimeUs: 31 WaitInputTimeUs: 4583 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034801190 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:41.198609Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1009:2808] 2025-04-07T14:06:41.198650Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1014:2813], CA [2:1011:2810], CA [2:1012:2811], 2025-04-07T14:06:41.198704Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1014:2813], CA [2:1011:2810], CA [2:1012:2811], 2025-04-07T14:06:41.199192Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1011:2810], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 371 DurationUs: 8000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 197 FinishTimeMs: 1744034801198 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 172 BuildCpuTimeUs: 25 WaitInputTimeUs: 4664 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034801190 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:41.199242Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1011:2810] 2025-04-07T14:06:41.199278Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1014:2813], CA [2:1012:2811], 2025-04-07T14:06:41.199311Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1014:2813], CA [2:1012:2811], 2025-04-07T14:06:41.199848Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1012:2811], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 522 DurationUs: 9000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 299 FinishTimeMs: 1744034801199 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 257 BuildCpuTimeUs: 42 WaitInputTimeUs: 6261 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034801190 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:41.199886Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1012:2811] 2025-04-07T14:06:41.199920Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1014:2813], 2025-04-07T14:06:41.199953Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1014:2813], 2025-04-07T14:06:41.200166Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1013:2812], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 324 DurationUs: 9000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 147 FinishTimeMs: 1744034801199 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 104 BuildCpuTimeUs: 43 WaitInputTimeUs: 7011 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034801190 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:41.200215Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1013:2812] 2025-04-07T14:06:41.200248Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1014:2813], 2025-04-07T14:06:41.200291Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1014:2813], 2025-04-07T14:06:41.200518Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1014:2813], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 368 DurationUs: 10000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 169 FinishTimeMs: 1744034801200 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 136 BuildCpuTimeUs: 33 WaitInputTimeUs: 7561 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034801190 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:41.200551Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1014:2813] 2025-04-07T14:06:41.200706Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:06:41.200761Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1002:2790] TxId: 281474976715665. Ctx: { TraceId: 01jr89696w6q6tezc6dvd467n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5MmEyYzQtY2QwYjFiYmMtMWM0NjdjYjgtNWU4NjMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.003473s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup+EvWrite >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup+EvWrite >> DataShardTxOrder::ZigZag ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2025-04-07T14:06:10.736225Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.736273Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.736287Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.736759Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.745043Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.745194Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.745615Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.746068Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.746180Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:06:10.746257Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.746291Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T14:06:10.746999Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.747024Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.747043Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.747340Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.747822Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.747909Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.748079Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.748463Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.748559Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:06:10.748639Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.748691Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T14:06:10.749541Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.749561Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.749587Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.749938Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.750357Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.750461Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.750658Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.751356Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.751537Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:06:10.751669Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.751702Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T14:06:10.752677Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.752701Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.752718Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.752977Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.753600Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.753695Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.753883Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.759447Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.762162Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:06:10.762306Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.762373Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T14:06:10.763138Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.763155Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.763174Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.763448Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.764167Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.764315Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.764530Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.764965Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.765075Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:06:10.765160Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.765205Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-07T14:06:10.765838Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.765881Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.765928Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.766200Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.766777Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.766858Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.767126Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.767516Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.767613Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:06:10.767690Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.767745Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-07T14:06:10.768624Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.768646Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.768698Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.768964Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.769357Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.769446Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.769605Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.770295Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.770469Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:06:10.770561Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.770603Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-07T14:06:10.771458Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.771499Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.771522Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.771765Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:10.772149Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:10.772255Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.772408Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:10.773767Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.774156Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:06:10.774231Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:10.774268Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T14:06:10.778212Z :ReadSession INFO: Random seed for debugging is 1744034770778193 2025-04-07T14:06:11.034295Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572304984481835:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:11.034820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:06:11.205375Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:06:11.213750Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572305922716262:2246];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:11.213858Z node 2 :PQ_READ_PRO ... [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-04-07T14:06:29.054800Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) GOT MESSAGE: Message { Data: "message3" Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-04-07T14:06:29.047000Z WriteTime: 2025-04-07T14:06:29.048000Z Ip: "ipv6:[::1]:59884" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:59884" } } } 2025-04-07T14:06:29.054970Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 grpc read done: success# 1, data# { read { } } 2025-04-07T14:06:29.055047Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 got read request: guid# a6d32a7-16907060-63499f8-7d2454a8 2025-04-07T14:06:29.055056Z :DEBUG: [/Root] [/Root] [f9e775f1-13817707-34642e8d-53f3fbf3] [dc1] Commit offsets [2, 3). Partition stream id: 1 2025-04-07T14:06:29.055315Z :DEBUG: [/Root] [/Root] [f9e775f1-13817707-34642e8d-53f3fbf3] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T14:06:29.055458Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 grpc read done: success# 1, data# { commit { cookies { assign_id: 1 partition_cookie: 3 } } } 2025-04-07T14:06:29.055659Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 commit request from client for 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-07T14:06:29.055700Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 commit request from 3 to 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-07T14:06:29.055731Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 3 2025-04-07T14:06:29.055876Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T14:06:29.055913Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T14:06:29.056014Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_17903046697354361383_v1 2025-04-07T14:06:29.056164Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T14:06:29.058303Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T14:06:29.058353Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 3 2025-04-07T14:06:29.058368Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:06:29.058442Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2025-04-07T14:06:29.058485Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 3 2025-04-07T14:06:29.058499Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 3 2025-04-07T14:06:29.058894Z :DEBUG: [/Root] [/Root] [f9e775f1-13817707-34642e8d-53f3fbf3] [dc1] Committed response: { cookies { assign_id: 1 partition_cookie: 3 } } 2025-04-07T14:06:29.148723Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f03d34da-56c388ef-ccbb4ba-3d773439_0] Write session will now close 2025-04-07T14:06:29.148788Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f03d34da-56c388ef-ccbb4ba-3d773439_0] Write session: aborting 2025-04-07T14:06:29.149940Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|f03d34da-56c388ef-ccbb4ba-3d773439_0 grpc read done: success: 0 data: 2025-04-07T14:06:29.149984Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|f03d34da-56c388ef-ccbb4ba-3d773439_0 grpc read failed 2025-04-07T14:06:29.150008Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|f03d34da-56c388ef-ccbb4ba-3d773439_0 grpc closed 2025-04-07T14:06:29.150024Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|f03d34da-56c388ef-ccbb4ba-3d773439_0 is DEAD 2025-04-07T14:06:29.150855Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:06:29.150995Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:06:29.151036Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7490572382293896410:2625] destroyed 2025-04-07T14:06:29.151093Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T14:06:29.152746Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f03d34da-56c388ef-ccbb4ba-3d773439_0] Write session: gracefully shut down, all writes complete 2025-04-07T14:06:29.154273Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f03d34da-56c388ef-ccbb4ba-3d773439_0] Write session is aborting and will not restart 2025-04-07T14:06:29.156918Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f03d34da-56c388ef-ccbb4ba-3d773439_0] Write session: destroy 2025-04-07T14:06:31.306096Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:06:31.753709Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2025-04-07T14:06:36.306329Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:06:39.054339Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2025-04-07T14:06:39.164737Z :INFO: [/Root] [/Root] [f9e775f1-13817707-34642e8d-53f3fbf3] Closing read session. Close timeout: 0.000000s 2025-04-07T14:06:39.164815Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-04-07T14:06:39.164852Z :INFO: [/Root] [/Root] [f9e775f1-13817707-34642e8d-53f3fbf3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16441 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:06:39.164972Z :NOTICE: [/Root] [/Root] [f9e775f1-13817707-34642e8d-53f3fbf3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T14:06:39.165035Z :DEBUG: [/Root] [/Root] [f9e775f1-13817707-34642e8d-53f3fbf3] [dc1] Abort session to cluster 2025-04-07T14:06:39.166166Z :NOTICE: [/Root] [/Root] [f9e775f1-13817707-34642e8d-53f3fbf3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:06:39.171265Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 grpc read done: success# 0, data# { } 2025-04-07T14:06:39.171303Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 grpc read failed 2025-04-07T14:06:39.171334Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 grpc closed 2025-04-07T14:06:39.171384Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_17903046697354361383_v1 is DEAD 2025-04-07T14:06:39.173447Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:06:39.173485Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_17903046697354361383_v1 2025-04-07T14:06:39.173506Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7490572352229124640:2508] disconnected; active server actors: 1 2025-04-07T14:06:39.173523Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7490572352229124644:2511] destroyed 2025-04-07T14:06:39.173586Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_17903046697354361383_v1 2025-04-07T14:06:39.173563Z node 2 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7490572352229124640:2508] client user disconnected session shared/user_1_1_17903046697354361383_v1 2025-04-07T14:06:40.485706Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:40.485734Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:40.485769Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:40.486160Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:06:40.486662Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:06:40.486855Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:40.487078Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:06:40.487711Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:06:40.488115Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:06:40.488322Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-07T14:06:40.488399Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:06:40.488464Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:06:40.488497Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-07T14:06:40.488675Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T14:06:40.488726Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] Leader for TabletID 9437184 is [1:128:2151] sender: [1:130:2057] recipient: [1:106:2138] 2025-04-07T14:06:17.654040Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:17.738414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:17.738465Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:17.740505Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:17.740919Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:17.741195Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:17.750057Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:17.790831Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:17.790947Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:17.792358Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:17.792449Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:17.792498Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:17.792859Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:17.814665Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:17.814858Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:17.814948Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:193:2195] 2025-04-07T14:06:17.814981Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:17.815019Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:17.815102Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:17.816006Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.816060Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.816321Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:17.816435Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:17.816492Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:17.816529Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:17.816601Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:17.816632Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:17.816658Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:17.816709Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:17.816746Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 Leader for TabletID 9437184 is [1:128:2151] sender: [1:206:2057] recipient: [1:14:2061] 2025-04-07T14:06:17.870907Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.870982Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.871703Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:17.875158Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:17.875216Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:17.875316Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:17.875575Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:17.875620Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:17.875673Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:17.875722Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:17.875766Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:17.875807Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:17.875837Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:17.876085Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:17.876130Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:17.876162Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:17.876198Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:17.876298Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:17.876323Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:17.876355Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:17.876405Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:17.876438Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:17.899825Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:17.899979Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:17.900028Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:17.900076Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:17.900146Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:17.901441Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.901497Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.901552Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:17.901692Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:17.901725Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:17.901864Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:17.901907Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:17.901940Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:17.901975Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:17.906852Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:17.906921Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:17.907150Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.907188Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.907237Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:17.907276Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:17.907308Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:17.907344Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-07T14:06:17.907375Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:17.907413Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:17.907445Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:17.907480Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:17.907516Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:17.907681Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-07T14:06:17.907731Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:17.907768Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:17.907802Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:17.907825Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:17.907879Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:17.907900Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:17.907925Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:17.907951Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:17.908005Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-04-07T14:06:17.908040Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:17.908068Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2025-04-07T14:06:17.908103Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:17.908126Z node 1 :TX_DATASHARD TRACE: Adv ... 25-04-07T14:06:42.466461Z node 20 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [20:271:2259], Recipient [20:229:2224]: NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:06:42.466497Z node 20 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-07T14:06:42.466544Z node 20 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 9437184 promoting UnprotectedReadEdge to v0/18446744073709551615 2025-04-07T14:06:42.466630Z node 20 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [20:121:2147], Recipient [20:229:2224]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-07T14:06:42.466664Z node 20 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-07T14:06:42.466702Z node 20 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-07T14:06:42.466769Z node 20 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-07T14:06:42.509628Z node 20 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [20:271:2259], Recipient [20:229:2224]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:06:42.509694Z node 20 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:06:42.542371Z node 20 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [20:282:2268], Recipient [20:229:2224]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:42.542453Z node 20 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:42.542510Z node 20 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [20:280:2267], serverId# [20:282:2268], sessionId# [0:0:0] 2025-04-07T14:06:42.542700Z node 20 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [20:279:2266], Recipient [20:229:2224]: NKikimrTabletBase.TEvGetCounters 2025-04-07T14:06:42.554594Z node 20 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [20:97:2132], Recipient [20:229:2224]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 85899348052 } 2025-04-07T14:06:42.554671Z node 20 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-07T14:06:42.555019Z node 20 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [20:284:2270], Recipient [20:229:2224]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:42.555062Z node 20 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:42.555115Z node 20 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [20:283:2269], serverId# [20:284:2270], sessionId# [0:0:0] 2025-04-07T14:06:42.555354Z node 20 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [20:97:2132], Recipient [20:229:2224]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 85899348052 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2025-04-07T14:06:42.555389Z node 20 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:42.555479Z node 20 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:42.556407Z node 20 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-04-07T14:06:42.556495Z node 20 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-07T14:06:42.556541Z node 20 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-04-07T14:06:42.556585Z node 20 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:42.556626Z node 20 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:42.556685Z node 20 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T14:06:42.556753Z node 20 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-04-07T14:06:42.556798Z node 20 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-07T14:06:42.556824Z node 20 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:42.556848Z node 20 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-07T14:06:42.556874Z node 20 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-07T14:06:42.556919Z node 20 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T14:06:42.556970Z node 20 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 132374 more memory 2025-04-07T14:06:42.557011Z node 20 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-07T14:06:42.557274Z node 20 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:42.557323Z node 20 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-07T14:06:42.557374Z node 20 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T14:06:42.558425Z node 20 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 132502 and requests 1060016 more for the next try 2025-04-07T14:06:42.558580Z node 20 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-07T14:06:42.558627Z node 20 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-07T14:06:42.558805Z node 20 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:42.558835Z node 20 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-07T14:06:42.559552Z node 20 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-07T14:06:42.559602Z node 20 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T14:06:42.560072Z node 20 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 1192518 and requests 9540144 more for the next try 2025-04-07T14:06:42.560176Z node 20 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-07T14:06:42.560204Z node 20 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-07T14:06:42.560348Z node 20 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:42.560377Z node 20 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-07T14:06:42.560862Z node 20 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-07T14:06:42.560902Z node 20 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T14:06:42.561333Z node 20 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 10732662 and requests 85861296 more for the next try 2025-04-07T14:06:42.561411Z node 20 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-07T14:06:42.561439Z node 20 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-07T14:06:42.561576Z node 20 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:42.561604Z node 20 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-07T14:06:42.562041Z node 20 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-07T14:06:42.562077Z node 20 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T14:06:42.863471Z node 20 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-07T14:06:42.863582Z node 20 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:42.863657Z node 20 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:42.863691Z node 20 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-07T14:06:42.863739Z node 20 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:42.863781Z node 20 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-07T14:06:42.863881Z node 20 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:42.863913Z node 20 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:42.863953Z node 20 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-07T14:06:42.863993Z node 20 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-07T14:06:42.864048Z node 20 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-07T14:06:42.864077Z node 20 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-07T14:06:42.864117Z node 20 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-07T14:06:42.876565Z node 20 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:42.876647Z node 20 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-07T14:06:42.876721Z node 20 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-07T14:06:42.876832Z node 20 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:42.877881Z node 20 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [20:289:2275], Recipient [20:229:2224]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:42.877942Z node 20 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:42.877990Z node 20 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [20:288:2274], serverId# [20:289:2275], sessionId# [0:0:0] 2025-04-07T14:06:42.878148Z node 20 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [20:287:2273], Recipient [20:229:2224]: NKikimrTabletBase.TEvGetCounters >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] >> TConsoleTests::TestScaleRecommenderPolicies ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] Test command err: 2025-04-07T14:06:38.450240Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:38.516780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:38.516834Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:38.518744Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:38.519119Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:38.519359Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:38.528971Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:38.570464Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:38.570795Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:38.572210Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:38.572292Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:38.572347Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:38.572697Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:38.597677Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:38.597819Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:38.597899Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:38.597923Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:38.597948Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:38.597970Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:38.598110Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:38.598138Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:38.598413Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:38.598465Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:38.598498Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:38.598525Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:38.598556Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:38.598575Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:38.598596Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:38.598615Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:38.598635Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:38.637908Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:38.637989Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:38.638050Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:38.645743Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:38.645833Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:38.645932Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:38.646119Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:38.646171Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:38.646237Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:38.646285Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:38.646321Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:38.646355Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:38.646387Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:38.646685Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:38.646730Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:38.646757Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:38.646793Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:38.646839Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:38.646861Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:38.646895Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:38.646923Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:38.646946Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:38.674682Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:38.674741Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:38.674787Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:38.674836Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:38.674902Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:38.675442Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:38.675491Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:38.675530Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:38.675659Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:38.675696Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:38.675830Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:38.675882Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:38.675922Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:38.675951Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:38.678437Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:38.678482Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:38.678622Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:38.678644Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:38.678673Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:38.678696Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:38.678721Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:38.678804Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:38.678829Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:38.678879Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:38.678908Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:38.678931Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:38.678951Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:38.679085Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:38.679108Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:38.679130Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:38.679142Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:38.679156Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:38.679188Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:38.679207Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:38.679227Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:38.679249Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:38.679285Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:38.679305Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:38.679324Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:38.679367Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:38.679383Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:38.679396Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... BUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:06:42.967926Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:42.968149Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-07T14:06:42.968180Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.968205Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-07T14:06:42.968276Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-07T14:06:42.968296Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.968314Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-07T14:06:42.968387Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-07T14:06:42.968405Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.968418Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-07T14:06:42.968465Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-07T14:06:42.968490Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.968519Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-07T14:06:42.968564Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-07T14:06:42.968587Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.968608Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-07T14:06:42.968655Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-07T14:06:42.968684Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.968698Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-07T14:06:42.968753Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-07T14:06:42.968782Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.968800Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-07T14:06:42.968860Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-07T14:06:42.968880Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.968897Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-04-07T14:06:42.968942Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-07T14:06:42.968954Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.968966Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-07T14:06:42.969029Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-07T14:06:42.969048Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.969065Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-07T14:06:42.969126Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-07T14:06:42.969145Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.969160Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-07T14:06:42.969241Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-07T14:06:42.969270Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.969289Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-07T14:06:42.969334Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-07T14:06:42.969347Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.969359Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-07T14:06:42.969405Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:42.969427Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2025-04-07T14:06:42.969458Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2025-04-07T14:06:42.969498Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-07T14:06:42.969525Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:42.969618Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:42.969642Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2025-04-07T14:06:42.969678Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2025-04-07T14:06:42.969706Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-07T14:06:42.969724Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:42.969783Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:42.969799Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-04-07T14:06:42.969829Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2025-04-07T14:06:42.969853Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-07T14:06:42.969867Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:42.969927Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:42.969942Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-04-07T14:06:42.969967Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2025-04-07T14:06:42.970002Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-07T14:06:42.970021Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:42.970146Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-07T14:06:42.970176Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.970200Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-07T14:06:42.970290Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-07T14:06:42.970309Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.970326Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-07T14:06:42.970361Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-07T14:06:42.970373Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.970386Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-07T14:06:42.970455Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-07T14:06:42.970472Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:42.970487Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> DataShardTxOrder::ReadWriteReorder >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> TPartBtreeIndexIteration::NoNodes_History >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> TConsoleTests::TestAlterBorrowedStorage >> BuildStatsHistogram::Single_History_Slices [GOOD] >> BuildStatsHistogram::Ten_Mixed >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] >> NFwd_TFlatIndexCache::Trace [GOOD] >> NFwd_TFlatIndexCache::Slices [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> NOther::Blocks [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 >> NPage::ABI_002 [GOOD] >> NPage::GroupIdEncoding [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] >> DataShardOutOfOrder::UncommittedReads >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier-StreamLookup [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> TGenCompaction::OverloadFactorDuringForceCompaction >> NPageCollection::Meta [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 [GOOD] >> Self::Literals [GOOD] >> DataShardTxOrder::ZigZag_oo8_dirty >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty >> DataShardTxOrder::ReadWriteReorder [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup-EvWrite >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> TPartBtreeIndexIteration::OneNode >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> TIterator::Basics [GOOD] >> TIterator::External [GOOD] >> TIterator::Mixed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier-StreamLookup [GOOD] Test command err: 2025-04-07T14:06:34.706858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:34.707124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:34.707330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001902/r3tmp/tmpmto5FY/pdisk_1.dat 2025-04-07T14:06:35.036062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:35.071326Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:35.120787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:35.120916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:35.134426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:35.246590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:35.277069Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:35.278066Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:35.278529Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:06:35.278812Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:35.319553Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:35.320266Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:35.320375Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:35.322039Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:06:35.322112Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:06:35.322250Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:06:35.322566Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:35.355444Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:06:35.355650Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:35.355953Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:06:35.355996Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:06:35.356052Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:06:35.356086Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:35.356450Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:35.356501Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:35.356725Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:06:35.356804Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:06:35.357191Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:35.357242Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:35.357289Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:06:35.357371Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:35.357446Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:35.357495Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:06:35.357533Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:35.357588Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:35.357620Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:06:35.357684Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:35.357827Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:06:35.357866Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:35.357950Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:35.358180Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:06:35.358241Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:06:35.358340Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:06:35.358378Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:35.358430Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:06:35.358464Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:06:35.358501Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:35.358743Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:35.358789Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:06:35.358825Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:35.358855Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:35.358906Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:06:35.358957Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:35.358991Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:06:35.359031Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:35.359062Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:35.360473Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:06:35.360521Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:06:35.371199Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:35.371264Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:35.371301Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:35.371378Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:06:35.371455Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:35.553392Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:35.553455Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:35.553489Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:06:35.553924Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:06:35.553964Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:35.554068Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:35.554111Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:06:35.554150Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:06:35.554185Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:06:35.561457Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:06:35.561536Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:35.562398Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:35.562434Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:35.562469Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:35.562519Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:35.562550Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:06:35.562585Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... ed key sets: 1 2025-04-07T14:06:44.715058Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-07T14:06:44.715157Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-04-07T14:06:44.715315Z node 2 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-07T14:06:44.715468Z node 2 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976715672. Shard resolve complete, resolved shards: 1 2025-04-07T14:06:44.715538Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-07T14:06:44.715599Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 2: [72075186224037888] 2025-04-07T14:06:44.715657Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:44.715705Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2025-04-07T14:06:44.715994Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-07T14:06:44.716056Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1162:2911], 2025-04-07T14:06:44.716135Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1162:2911], 2025-04-07T14:06:44.716186Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-07T14:06:44.716692Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1162:2911], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-07T14:06:44.716782Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1162:2911], 2025-04-07T14:06:44.716858Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1162:2911], 2025-04-07T14:06:44.717240Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1164:2911], Recipient [2:1067:2852]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-04-07T14:06:44.717318Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:06:44.717368Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2002/281474976715665 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v2000/18446744073709551615 2025-04-07T14:06:44.717410Z node 2 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-04-07T14:06:44.717465Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-07T14:06:44.717547Z node 2 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:06:44.717589Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:06:44.717631Z node 2 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:44.717666Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:06:44.717712Z node 2 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-07T14:06:44.717751Z node 2 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:06:44.717775Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:44.717797Z node 2 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:06:44.717821Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:06:44.717906Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-07T14:06:44.718096Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1164:2911], 0} after executionsCount# 1 2025-04-07T14:06:44.718151Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1164:2911], 0} sends rowCount# 1, bytes# 24, quota rows left# 32766, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:06:44.718231Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1164:2911], 0} finished in read 2025-04-07T14:06:44.718284Z node 2 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:06:44.718308Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:06:44.718331Z node 2 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:06:44.718353Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:06:44.718393Z node 2 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:06:44.718414Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:06:44.718439Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-07T14:06:44.718473Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:06:44.719065Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1164:2911], Recipient [2:1067:2852]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T14:06:44.719122Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-07T14:06:44.719648Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1162:2911], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 759 DurationUs: 3000 Tasks { TaskId: 1 CpuTimeUs: 172 FinishTimeMs: 1744034804719 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 85 BuildCpuTimeUs: 87 WaitInputTimeUs: 1690 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034804716 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:44.719702Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1162:2911] 2025-04-07T14:06:44.719835Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:06:44.719913Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jr896cwv981zpxmd93jcyvqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk2OTUwNC1jODUyMDA3ZS1hNGZkMzQyYi0yZDZhNDU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000759s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ReadWriteReorder [GOOD] Test command err: 2025-04-07T14:06:44.632276Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:44.690736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:44.690790Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:44.692685Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:44.693035Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:44.693293Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:44.701422Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:44.731431Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:44.731792Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:44.733057Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:44.733133Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:44.733164Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:44.733412Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:44.753389Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:44.753522Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:44.753610Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:44.753637Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:44.753657Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:44.753679Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:44.753799Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:44.753829Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:44.754110Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:44.754170Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:44.754199Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:44.754220Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:44.754251Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:44.754273Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:44.754295Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:44.754319Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:44.754348Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:44.793857Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:44.793918Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:44.793964Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:44.795775Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\n\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:44.795819Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:44.795887Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:44.796048Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:44.796088Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:44.796148Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:44.796186Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:44.796225Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:44.796253Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:44.796286Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:44.796506Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:44.796544Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:44.796570Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:44.796601Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:44.796641Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:44.796677Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:44.796702Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:44.796732Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:44.796761Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:44.819544Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:44.819622Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:44.819685Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:44.819734Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:44.819808Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:44.820358Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:44.820418Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:44.820460Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:44.820586Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:44.820616Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:44.820778Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:44.820827Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:44.820863Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:44.820911Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:44.824058Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:44.824129Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:44.824322Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:44.824360Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:44.824403Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:44.824436Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:44.824465Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:44.824497Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:44.824524Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:44.824553Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:44.824591Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:44.824624Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:44.824653Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:44.824843Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:44.824886Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:44.824922Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:44.824948Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:44.824973Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:44.825029Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:44.825070Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:44.825106Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:44.825142Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:44.825193Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:44.825225Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:44.825261Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:44.825300Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:44.825331Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:44.825367Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeSna ... WaitInRS 2025-04-07T14:06:45.529806Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-07T14:06:45.529827Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadAndWaitInRS 2025-04-07T14:06:45.529844Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit ExecuteDataTx 2025-04-07T14:06:45.529862Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit ExecuteDataTx 2025-04-07T14:06:45.530296Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437184 with status COMPLETE 2025-04-07T14:06:45.530348Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437184: {NSelectRow: 3, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 3, SelectRowBytes: 24, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:45.530385Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-07T14:06:45.530426Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit ExecuteDataTx 2025-04-07T14:06:45.530457Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompleteOperation 2025-04-07T14:06:45.530479Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompleteOperation 2025-04-07T14:06:45.530636Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is DelayComplete 2025-04-07T14:06:45.530660Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompleteOperation 2025-04-07T14:06:45.530687Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompletedOperations 2025-04-07T14:06:45.530715Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompletedOperations 2025-04-07T14:06:45.530754Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-07T14:06:45.530774Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompletedOperations 2025-04-07T14:06:45.530796Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437184 has finished 2025-04-07T14:06:45.530840Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:45.530868Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:45.530897Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:45.530926Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:45.531084Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:331:2303], Recipient [1:331:2303]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:45.531111Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:45.531162Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-07T14:06:45.531190Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:45.531209Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-07T14:06:45.531232Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000005:12] in PlanQueue unit at 9437185 2025-04-07T14:06:45.531252Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit PlanQueue 2025-04-07T14:06:45.531273Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-07T14:06:45.531292Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit PlanQueue 2025-04-07T14:06:45.531310Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit LoadTxDetails 2025-04-07T14:06:45.531329Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit LoadTxDetails 2025-04-07T14:06:45.531841Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 1000005:12 keys extracted: 1 2025-04-07T14:06:45.531880Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-07T14:06:45.531905Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit LoadTxDetails 2025-04-07T14:06:45.531937Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit FinalizeDataTxPlan 2025-04-07T14:06:45.531957Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit FinalizeDataTxPlan 2025-04-07T14:06:45.531986Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-07T14:06:45.532003Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit FinalizeDataTxPlan 2025-04-07T14:06:45.532024Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:45.532041Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit BuildAndWaitDependencies 2025-04-07T14:06:45.532083Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically complete end at 9437185 2025-04-07T14:06:45.532104Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically incomplete end at 9437185 2025-04-07T14:06:45.532147Z node 1 :TX_DATASHARD TRACE: Activated operation [1000005:12] at 9437185 2025-04-07T14:06:45.532180Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-07T14:06:45.532198Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:45.532234Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit BuildDataTxOutRS 2025-04-07T14:06:45.532257Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit BuildDataTxOutRS 2025-04-07T14:06:45.532297Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-07T14:06:45.532324Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit BuildDataTxOutRS 2025-04-07T14:06:45.532343Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit StoreAndSendOutRS 2025-04-07T14:06:45.532359Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit StoreAndSendOutRS 2025-04-07T14:06:45.532378Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-07T14:06:45.532401Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit StoreAndSendOutRS 2025-04-07T14:06:45.532423Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit PrepareDataTxInRS 2025-04-07T14:06:45.532441Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit PrepareDataTxInRS 2025-04-07T14:06:45.532462Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-07T14:06:45.532480Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit PrepareDataTxInRS 2025-04-07T14:06:45.532500Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit LoadAndWaitInRS 2025-04-07T14:06:45.532517Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit LoadAndWaitInRS 2025-04-07T14:06:45.532536Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-07T14:06:45.532551Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit LoadAndWaitInRS 2025-04-07T14:06:45.532588Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit ExecuteDataTx 2025-04-07T14:06:45.532616Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit ExecuteDataTx 2025-04-07T14:06:45.532874Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437185 with status COMPLETE 2025-04-07T14:06:45.532911Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437185: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 8, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:45.532942Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-07T14:06:45.532961Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit ExecuteDataTx 2025-04-07T14:06:45.532981Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompleteOperation 2025-04-07T14:06:45.532999Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompleteOperation 2025-04-07T14:06:45.533136Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is DelayComplete 2025-04-07T14:06:45.533163Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompleteOperation 2025-04-07T14:06:45.533182Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompletedOperations 2025-04-07T14:06:45.533205Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompletedOperations 2025-04-07T14:06:45.533227Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-07T14:06:45.533268Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompletedOperations 2025-04-07T14:06:45.533288Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437185 has finished 2025-04-07T14:06:45.533310Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:45.533338Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-07T14:06:45.533363Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-07T14:06:45.533381Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-07T14:06:45.548256Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000005 txid# 12} 2025-04-07T14:06:45.548324Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000005} 2025-04-07T14:06:45.548375Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-07T14:06:45.548411Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437185 on unit CompleteOperation 2025-04-07T14:06:45.548477Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437185 at tablet 9437185 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:06:45.548524Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-07T14:06:45.548757Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 12} 2025-04-07T14:06:45.548788Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-04-07T14:06:45.548819Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:45.548856Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437184 on unit CompleteOperation 2025-04-07T14:06:45.548888Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:06:45.548914Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> Self::Literals [GOOD] Test command err: + BTreeIndex{PageId: 0 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385, 13 rev 1, 683b} | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | > 0, a, false, 0 | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | > 1, b, true, 10 | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | > 2, c, false, 20 | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | > 3, d, true, 30 | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | > 4, e, false, 40 | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | > 5, f, true, 50 | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | > 6, g, false, 60 | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | > 7, h, true, 70 | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > 8, i, false, 80 | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | > 9, j, true, 90 | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 + BTreeIndex{PageId: 9 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 116b} | + BTreeIndex{PageId: 5 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | + BTreeIndex{PageId: 0 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93, 13 rev 1, 179b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > 0, a, false, 0 | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > 1, b, true, 10 | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | > 2, c, false, 20 | | + BTreeIndex{PageId: 1 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195, 13 rev 1, 179b} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > 3, d, true, 30 | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > 4, e, false, 40 | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | > 5, f, true, 50 | | + BTreeIndex{PageId: 2 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > 6, g, false, 60 | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > 7, h, true, 70 | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > 8, i, false, 80 | + BTreeIndex{PageId: 8 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 242b} | | + BTreeIndex{PageId: 3 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 179b} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > 9, j, true, 90 | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > 10, k, false, 100 | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > 11, l, true, 110 | | + BTreeIndex{PageId: 4 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555, 13 rev 1, 179b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > 12, m, false, 120 | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > 13, n, true, 130 | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | > 14, o, false, 140 | | + BTreeIndex{PageId: 6 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693, 13 rev 1, 179b} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > 15, p, true, 150 | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > 16, q, false, 160 | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | > 17, r, true, 170 | | + BTreeIndex{PageId: 7 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 179b} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > 18, s, false, 180 | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > 19, t, true, 190 | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 + BTreeIndex{PageId: 15 RowCount: 15150 DataSize: 106050 GroupDataSize: 207050 ErasedRowCount: 8080, 13 rev 1, 174b} | + BTreeIndex{PageId: 12 RowCount: 9078 DataSize: 70278 GroupDataSize: 138278 ErasedRowCount: 4318, 13 rev 1, 690b} | | + BTreeIndex{PageId: 0 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 702b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > 0, x, NULL, NULL | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > 1, xx, NULL, NULL | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | | > 2, xxx, NULL, NULL | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > 3, xxxx, NULL, NULL | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > 4, xxxxx, NULL, NULL | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | | > 5, xxxxxx, NULL, NULL | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > 6, xxxxxxx, NULL, NULL | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > 7, xxxxxxxx, NULL, NULL | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | | | > 8, xxxxxxxxx, NULL, NULL | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > 9, xxxxxxxxxx, NULL, NULL | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > 10, xxxxxxxxxx.., NULL, NULL | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > 11, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 1 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891, 13 rev 1, 683b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > 12, xxxxxxxxxx.., NULL, NULL | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > 13, xxxxxxxxxx.., NULL, NULL | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | | > 14, xxxxxxxxxx.., NULL, NULL | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > 15, xxxxxxxxxx.., NULL, NULL | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > 16, xxxxxxxxxx.., NULL, NULL | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | | > 17, xxxxxxxxxx.., NULL, NULL | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > 18, xxxxxxxxxx.., NULL, NULL | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > 19, xxxxxxxxxx.., NULL, NULL | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 | | | > 20, xxxxxxxxxx.., NULL, NULL | | | PageId: 10021 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891 | | > 21, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 2 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395, 13 rev 1, 689b} | | | PageId: 10022 RowCount: 2553 DataSize: 23253 GroupDataSize: 46253 ErasedRowCount: 943 | | | > 22, xxxxxxxxxx.., NULL, NULL | | | PageId: 10023 RowCount: 2676 DataSize: 24276 GroupDataSize: 48276 ErasedRowCount: 996 | | | > 23, xxxxxxxxxx.., NULL, NULL | | | PageId: 10024 RowCount: 2800 DataSize: 25300 GroupDataSize: 50300 ErasedRowCount: 1050 | | | > 24, xxxxxxxxxx.., NULL, NULL | | | PageId: 10025 RowCount: 2925 DataSize: 26325 GroupDataSize: 52325 ErasedRowCount: 1105 | | | > 25, xxxxxxxxxx.., NULL, NULL | | | PageId: 10026 RowCount: 3051 DataSize: 27351 GroupDataSize: 54351 ErasedRowCount: 1161 | | | > 26, xxxxxxxxxx.., NULL, NULL | | | PageId: 10027 RowCount: 3178 DataSize: 28378 GroupDataSize: 56378 ErasedRowCount: 1218 | | | > 27, xxxxxxxxxx.., NULL, NULL | | | PageId: 10028 RowCount: 3306 DataSize: 29406 GroupDataSize: 58406 ErasedRowCount: 1276 | | | > 28, xxxxxxxxxx.., NULL, NULL | | | PageId: 10029 RowCount: 3435 DataSize: 30435 GroupDataSize: 60435 ErasedRowCount: 1335 | | | > 29, xxxxxxxxxx.., NULL, NULL | | | PageId: 10030 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395 | | > 30, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 3 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911, 13 rev 1, 669b} | | | PageId: 10031 RowCount: 3696 DataSize: 32496 GroupDataSize: 64496 ErasedRowCount: 1456 | | | > 31, xxxxxxxxxx.., NULL, NULL | | | PageId: 10032 RowCount: 3828 DataSize: 33528 GroupDataSize: 66528 ErasedRowCount: 1518 | | | > 32, xxxxxxxxxx.., NULL, NULL | | | PageId: 10033 RowCount: 3961 DataSize: 34561 GroupDataSize: 68561 ErasedRowCount: 1581 | | | > 33, xxxxxxxxxx.., NULL, NULL | | | PageId: 10034 RowCount: 4095 DataSize: 35595 GroupDataSize: 70595 ErasedRowCount: 1645 | | | > 34, xxxxxxxxxx.., NULL, NULL | | | PageId: 10035 RowCount: 4230 DataSize: 36630 GroupDataSize: 72630 ErasedRowCount: 1710 | | | > 35, xxxxxxxxxx.., NULL, NULL | | | PageId: 10036 RowCount: 4366 DataSize: 37666 GroupDataSize: 74666 ErasedRowCount: 1776 | | | > 36, xxxxxxxxxx.., NULL, NULL | | | PageId: 10037 RowCount: 4503 DataSize: 38703 GroupDataSize: 76703 ErasedRowCount: 1843 | | | > 37, xxxxxxxxxx.., NULL, NULL | | | PageId: 10038 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911 | | > 38, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 4 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491, 13 rev 1, 725b} | | | PageId: 10039 RowCount: 4780 DataSize: 40780 GroupDataSize: 80780 ErasedRowCount: 1980 | | | > 39, xxxxxxxxxx.., NULL, NULL | | | PageId: 10040 RowCount: 4920 DataSize: 41820 GroupDataSize: 82820 ErasedRowCount: 2050 | | | > 40, xxxxxxxxxx.., NULL, NULL | | | PageId: 10041 RowCount: 5061 DataSize: 42861 GroupDataSize: 84861 ErasedRowCount: 2121 | | | > 41, xxxxxxxxxx.., NULL, NULL | | | PageId: 10042 RowCount: 5203 DataSize: 43903 GroupDataSize: 86903 ErasedRowCount: 2193 | | | > 42, xxxxxxxxxx.., NULL, NULL | | | PageId: 10043 RowCount: 5346 DataSize: 44946 GroupDataSize: 88946 ErasedRowCount: 2266 | | | > 43, xxxxxxxxxx.., NULL, NULL | | | PageId: 10044 RowCount: 5490 DataSize: 45990 GroupDataSize: 90990 ErasedRowCount: 2340 | | | > 44, xxxxxxxxxx.., NULL, NULL | | | PageId: 10045 RowCount: 5635 DataSize: 47035 GroupDataSize: 93035 ErasedRowCount: 2415 | | | > 45, xxxxxxxxxx.., NULL, NULL | | | PageId: 10046 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491 | | > 46, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 5 RowCount: 6831 DataSize: 55431 GroupDataSize: 109431 ErasedRowCount: 3051, 13 rev 1, 674b} | | | PageId: 10047 RowCount: 5928 DataSize: 49128 GroupDataSize: 97128 ErasedRowCount: 2568 | | | > 47, xxxxxxxxxx.., NULL, NULL | | | Pa ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::FewNodes >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init >> DataShardTxOrder::ZigZag_oo >> BuildStatsHistogram::Ten_Mixed [GOOD] >> BuildStatsHistogram::Ten_Crossed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] Test command err: 2025-04-07T14:06:38.489634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:38.489930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:38.490153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018d8/r3tmp/tmpEH3zEZ/pdisk_1.dat 2025-04-07T14:06:38.816585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:38.859115Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:38.908250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:38.908385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:38.920012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:39.033120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:39.352045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2025-04-07T14:06:39.649232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:792:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:39.649342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:803:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:39.649425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:39.654115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:06:39.854087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:806:2655], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:06:40.115656Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89680za2rr5scmr4w9g3fm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGZiZTZhNGYtY2JmOWRjNDMtYTBhMDdlMDItZjc4YTlhZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:40.188380Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr8968g7bwsem8m6wa5rs65n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODMxYWZlOWQtYTY4OTdjOWYtODJiZWQxOTUtNzc2MGFjNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2025-04-07T14:06:40.502245Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr8968k74h0m2aaaazz0e4g9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWJlZDZlNDQtM2RkYTQzYzMtY2MwNDY4MTMtY2QwYjI5N2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2025-04-07T14:06:40.601625Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr8968w357vv44de2ggwqx0y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWJlZDZlNDQtM2RkYTQzYzMtY2MwNDY4MTMtY2QwYjI5N2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT 2025-04-07T14:06:41.042598Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr89698rdjevk6gxp7dxtwt1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTBiNjYwYTMtODc3MzQ5OWYtNjVhODI4NmUtMTMzOTEzZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } 2025-04-07T14:06:44.217789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:44.218199Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:44.218284Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018d8/r3tmp/tmp1Uh9eo/pdisk_1.dat 2025-04-07T14:06:44.474553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:44.499990Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:44.546732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:44.546868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:44.558286Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:44.669289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:44.927721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:06:45.232418Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:792:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:45.232508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:802:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:45.232578Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:45.237032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:06:45.435018Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:806:2655], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:06:45.528409Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr896dfe7tx510sps0q5pzqs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWNlZDVmM2QtOWU3MjBjYjktY2RiYThkOTUtNjBkODFiZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:45.608069Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr896ds854r239bfk1czqrvv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YThiNWY5YzQtMjQ3YmM3NjQtMzU5NGMzZS02NjAyNzc1Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2025-04-07T14:06:46.424649Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr896ejdb0c3jb0y224ea763, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2QwMTEyOTgtOWVkMGY2MjAtNzA4NjEzNDItNDlmNmNkMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:46.501710Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr896emyesmpj2061pta9v5j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGNhNmVhNDQtZTNhMjM2ZmMtOTgzYWY2MGUtYjI4MmYyYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 >> DataShardTxOrder::RandomDotRanges_DelayRS >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup+EvWrite [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TConsoleTests::TestScaleRecommenderPolicies [GOOD] >> TConsoleTests::TestScaleRecommenderPoliciesValidation >> TPartBtreeIndexIteration::FewNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup+EvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup+EvWrite [GOOD] Test command err: 2025-04-07T14:06:40.126153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:40.126428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:40.126644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018c5/r3tmp/tmpBQ0lzX/pdisk_1.dat 2025-04-07T14:06:40.442903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:40.479407Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:40.527689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:40.527804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:40.539112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:40.650446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:40.684260Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:40.685214Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:40.685615Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:06:40.685839Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:40.727401Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:40.728068Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:40.728189Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:40.729701Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:06:40.729764Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:06:40.729828Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:06:40.730138Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:40.759196Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:06:40.759397Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:40.759537Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:06:40.759583Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:06:40.759616Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:06:40.759653Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:40.759972Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:40.760021Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:40.760207Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:06:40.760289Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:06:40.760644Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:40.760700Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:40.760742Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:06:40.760810Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:40.760848Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:40.760903Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:06:40.760971Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:40.761015Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:40.761046Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:06:40.761100Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:40.761224Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:06:40.761263Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:40.761345Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:40.761547Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:06:40.761612Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:06:40.761702Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:06:40.761741Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:40.761786Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:06:40.761821Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:06:40.761856Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:40.762090Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:40.762121Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:06:40.762166Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:40.762197Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:40.762249Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:06:40.762286Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:40.762327Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:06:40.762354Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:40.762378Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:40.763604Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:06:40.763653Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:06:40.774333Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:40.774403Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:40.774435Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:40.774498Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:06:40.774555Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:40.953850Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:40.953892Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:40.953925Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:06:40.954257Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:06:40.954285Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:40.954386Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:40.954431Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:06:40.954471Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:06:40.954505Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:06:40.958092Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:06:40.958169Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:40.958898Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:40.958926Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:40.958972Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:40.959014Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:40.959040Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:06:40.959091Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... : 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1048:2843], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 553 DurationUs: 7000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 350 FinishTimeMs: 1744034807812 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 307 BuildCpuTimeUs: 43 WaitInputTimeUs: 4455 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034807805 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:47.813606Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1048:2843] 2025-04-07T14:06:47.813649Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1047:2842], CA [2:1051:2846], CA [2:1052:2847], CA [2:1049:2844], 2025-04-07T14:06:47.813686Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1047:2842], CA [2:1051:2846], CA [2:1052:2847], CA [2:1049:2844], 2025-04-07T14:06:47.814208Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1047:2842], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 396 DurationUs: 8000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 93 FinishTimeMs: 1744034807813 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 61 BuildCpuTimeUs: 32 WaitInputTimeUs: 4583 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034807805 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:47.814248Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1047:2842] 2025-04-07T14:06:47.814303Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], CA [2:1052:2847], CA [2:1049:2844], 2025-04-07T14:06:47.814340Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], CA [2:1052:2847], CA [2:1049:2844], 2025-04-07T14:06:47.814894Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1049:2844], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 371 DurationUs: 8000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 199 FinishTimeMs: 1744034807813 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 175 BuildCpuTimeUs: 24 WaitInputTimeUs: 4539 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034807805 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:47.814935Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1049:2844] 2025-04-07T14:06:47.814976Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], CA [2:1052:2847], 2025-04-07T14:06:47.815031Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], CA [2:1052:2847], 2025-04-07T14:06:47.815145Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1050:2845], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 522 DurationUs: 9000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 311 FinishTimeMs: 1744034807814 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 263 BuildCpuTimeUs: 48 WaitInputTimeUs: 5975 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034807805 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:47.815179Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1050:2845] 2025-04-07T14:06:47.815216Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1051:2846], CA [2:1052:2847], 2025-04-07T14:06:47.815252Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1051:2846], CA [2:1052:2847], 2025-04-07T14:06:47.815407Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2846], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 327 DurationUs: 8000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 149 FinishTimeMs: 1744034807814 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 100 BuildCpuTimeUs: 49 WaitInputTimeUs: 6722 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034807806 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:47.815438Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2846] 2025-04-07T14:06:47.815468Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1052:2847], 2025-04-07T14:06:47.815497Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1052:2847], 2025-04-07T14:06:47.815757Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1052:2847], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 352 DurationUs: 9000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 158 FinishTimeMs: 1744034807815 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 127 BuildCpuTimeUs: 31 WaitInputTimeUs: 8220 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034807806 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:47.815792Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1052:2847] 2025-04-07T14:06:47.815951Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:06:47.816012Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896fnmcatf89s56r64584g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFmMjY4NS0zNTM1MDI0OS02YmU2NmU1Mi01NGRiNmVjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.003356s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant >> DataShardTxOrder::DelayData [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup+EvWrite [GOOD] Test command err: 2025-04-07T14:06:40.213123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:40.213428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:40.213666Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018bf/r3tmp/tmpQkDLnX/pdisk_1.dat 2025-04-07T14:06:40.532320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:40.569782Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:40.621365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:40.621488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:40.632959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:40.744393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:40.775618Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:40.776730Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:40.777195Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:06:40.777442Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:40.813453Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:40.814071Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:40.814155Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:40.815598Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:06:40.815671Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:06:40.815726Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:06:40.816044Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:40.846025Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:06:40.846206Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:40.846316Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:06:40.846353Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:06:40.846394Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:06:40.846428Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:40.846743Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:40.846780Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:40.846925Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:06:40.847004Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:06:40.847326Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:40.847353Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:40.847387Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:06:40.847434Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:40.847464Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:40.847510Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:06:40.847577Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:40.847612Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:40.847636Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:06:40.847679Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:40.847815Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:06:40.847854Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:40.847944Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:40.848139Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:06:40.848194Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:06:40.848292Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:06:40.848340Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:40.848391Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:06:40.848429Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:06:40.848463Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:40.850564Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:40.850647Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:06:40.850700Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:40.850739Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:40.850805Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:06:40.850844Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:40.850881Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:06:40.850915Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:40.850941Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:40.852386Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:06:40.852439Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:06:40.863448Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:40.863516Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:40.863553Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:40.863614Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:06:40.863676Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:41.045685Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:41.045741Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:41.045788Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:06:41.046219Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:06:41.046259Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:41.046386Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:41.046432Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:06:41.046495Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:06:41.046534Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:06:41.050577Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:06:41.050644Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:41.051524Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:41.051561Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:41.051623Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:41.051666Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:41.051723Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:06:41.051777Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... Id: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1048:2843], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 405 DurationUs: 5000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 247 FinishTimeMs: 1744034808517 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 218 BuildCpuTimeUs: 29 WaitInputTimeUs: 2984 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034808512 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:48.517977Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1048:2843] 2025-04-07T14:06:48.518004Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1047:2842], CA [2:1051:2846], CA [2:1052:2847], CA [2:1049:2844], 2025-04-07T14:06:48.518038Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1047:2842], CA [2:1051:2846], CA [2:1052:2847], CA [2:1049:2844], 2025-04-07T14:06:48.518413Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1047:2842], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 261 DurationUs: 5000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 64 FinishTimeMs: 1744034808517 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 44 BuildCpuTimeUs: 20 WaitInputTimeUs: 3069 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034808512 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:48.518441Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1047:2842] 2025-04-07T14:06:48.518477Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], CA [2:1052:2847], CA [2:1049:2844], 2025-04-07T14:06:48.518498Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], CA [2:1052:2847], CA [2:1049:2844], 2025-04-07T14:06:48.518872Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1049:2844], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 267 DurationUs: 6000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 149 FinishTimeMs: 1744034808518 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 132 BuildCpuTimeUs: 17 WaitInputTimeUs: 3069 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034808512 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:48.518898Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1049:2844] 2025-04-07T14:06:48.518924Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], CA [2:1052:2847], 2025-04-07T14:06:48.518957Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], CA [2:1052:2847], 2025-04-07T14:06:48.519021Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1050:2845], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 409 DurationUs: 6000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 251 FinishTimeMs: 1744034808518 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 194 BuildCpuTimeUs: 57 WaitInputTimeUs: 4125 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034808512 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:48.519039Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1050:2845] 2025-04-07T14:06:48.519059Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1051:2846], CA [2:1052:2847], 2025-04-07T14:06:48.519080Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1051:2846], CA [2:1052:2847], 2025-04-07T14:06:48.519197Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2846], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 223 DurationUs: 6000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 107 FinishTimeMs: 1744034808518 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 72 BuildCpuTimeUs: 35 WaitInputTimeUs: 4664 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034808512 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:48.519216Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2846] 2025-04-07T14:06:48.519235Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1052:2847], 2025-04-07T14:06:48.519255Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1052:2847], 2025-04-07T14:06:48.519426Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1052:2847], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 248 DurationUs: 7000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 104 FinishTimeMs: 1744034808519 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 83 BuildCpuTimeUs: 21 WaitInputTimeUs: 5727 HostName: "ghrun-lqtfcktism" NodeId: 2 StartTimeMs: 1744034808512 } MaxMemoryUsage: 1048576 } 2025-04-07T14:06:48.519454Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1052:2847] 2025-04-07T14:06:48.519574Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:06:48.519621Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2824] TxId: 281474976715667. Ctx: { TraceId: 01jr896geaezy0wm78xmzfweqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZkODBlMzUtZWMzMTIyMzItYWYzYjMwOTUtN2ViY2U2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.002443s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> DataShardOutOfOrder::TestPlannedTimeoutSplit >> DataShardOutOfOrder::UncommittedReads [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> TConsoleTests::TestCreateSubSubDomain >> DataShardTxOrder::ZigZag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2025-04-07T14:06:36.855170Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:36.926020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:36.926073Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:36.928024Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:36.928435Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:36.928683Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:36.938611Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:36.979958Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:36.980433Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:36.981869Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:36.981946Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:36.981994Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:36.982318Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:37.013291Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:37.013471Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:37.013580Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:37.013617Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:37.013654Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:37.013691Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:37.013856Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:37.013895Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:37.014233Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:37.014317Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:37.014361Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:37.014464Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:37.014503Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:37.014532Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:37.014563Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:37.014592Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:37.014625Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:37.067159Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:37.067235Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:37.067284Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:37.069849Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:37.069905Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:37.069994Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:37.070161Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:37.070221Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:37.070276Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:37.070319Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:37.070355Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:37.070383Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:37.070413Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:37.070680Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:37.070712Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:37.070741Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:37.070776Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:37.070826Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:37.070855Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:37.070883Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:37.070920Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:37.070949Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:37.093776Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:37.093843Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:37.093895Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:37.093946Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:37.094010Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:37.094471Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:37.094525Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:37.094560Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:37.094697Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:37.094730Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:37.094840Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:37.094882Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:37.094915Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:37.094959Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:37.098408Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:37.098464Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:37.098641Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:37.098674Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:37.098721Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:37.098759Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:37.098787Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:37.098825Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:37.098856Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:37.098893Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:37.098943Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:37.098982Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:37.099010Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:37.099158Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:37.099207Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:37.099241Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:37.099266Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:37.099289Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:37.099341Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:37.099372Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:37.099399Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:37.099427Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:37.099481Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:37.099513Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:37.099538Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:37.099579Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:37.099610Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:37.099669Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit Ma ... e 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit PrepareDataTxInRS 2025-04-07T14:06:49.111450Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit LoadAndWaitInRS 2025-04-07T14:06:49.111462Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit LoadAndWaitInRS 2025-04-07T14:06:49.111473Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-04-07T14:06:49.111482Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit LoadAndWaitInRS 2025-04-07T14:06:49.111494Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit ExecuteDataTx 2025-04-07T14:06:49.111503Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit ExecuteDataTx 2025-04-07T14:06:49.111740Z node 2 :TX_DATASHARD TRACE: Executed operation [1000005:506] at tablet 9437184 with status COMPLETE 2025-04-07T14:06:49.111793Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:506] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 81, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:49.111836Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-04-07T14:06:49.111852Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit ExecuteDataTx 2025-04-07T14:06:49.111875Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompleteOperation 2025-04-07T14:06:49.111889Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompleteOperation 2025-04-07T14:06:49.112002Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is DelayComplete 2025-04-07T14:06:49.112024Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompleteOperation 2025-04-07T14:06:49.112061Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompletedOperations 2025-04-07T14:06:49.112086Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompletedOperations 2025-04-07T14:06:49.112118Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-04-07T14:06:49.112134Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompletedOperations 2025-04-07T14:06:49.112151Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000005:506] at 9437184 has finished 2025-04-07T14:06:49.112179Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:49.112204Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:49.112228Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2025-04-07T14:06:49.112450Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:227:2222], Recipient [2:227:2222]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:49.112482Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:49.112523Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:49.112545Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:49.112565Z node 2 :TX_DATASHARD DEBUG: Return cached ready operation [1000005:507] at 9437184 2025-04-07T14:06:49.112590Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2025-04-07T14:06:49.112609Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-07T14:06:49.112625Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:49.112639Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:49.112651Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:49.112996Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2025-04-07T14:06:49.113021Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-07T14:06:49.113036Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:49.113068Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2025-04-07T14:06:49.113089Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2025-04-07T14:06:49.113113Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-07T14:06:49.113125Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2025-04-07T14:06:49.113137Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:49.113147Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:49.113168Z node 2 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically complete end at 9437184 2025-04-07T14:06:49.113185Z node 2 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically incomplete end at 9437184 2025-04-07T14:06:49.113237Z node 2 :TX_DATASHARD TRACE: Activated operation [1000005:507] at 9437184 2025-04-07T14:06:49.113255Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-07T14:06:49.113266Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:49.113287Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2025-04-07T14:06:49.113299Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2025-04-07T14:06:49.113319Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-07T14:06:49.113329Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2025-04-07T14:06:49.113349Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2025-04-07T14:06:49.113360Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2025-04-07T14:06:49.113372Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-07T14:06:49.113382Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2025-04-07T14:06:49.113399Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2025-04-07T14:06:49.113414Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2025-04-07T14:06:49.113432Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-07T14:06:49.113453Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2025-04-07T14:06:49.113468Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2025-04-07T14:06:49.113479Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2025-04-07T14:06:49.113490Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-07T14:06:49.113499Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2025-04-07T14:06:49.113526Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2025-04-07T14:06:49.113538Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2025-04-07T14:06:49.113745Z node 2 :TX_DATASHARD TRACE: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2025-04-07T14:06:49.113783Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:49.113808Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:49.113829Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2025-04-07T14:06:49.113851Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2025-04-07T14:06:49.113875Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2025-04-07T14:06:49.113966Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is DelayComplete 2025-04-07T14:06:49.113990Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2025-04-07T14:06:49.114005Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2025-04-07T14:06:49.114026Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2025-04-07T14:06:49.114046Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-07T14:06:49.114057Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2025-04-07T14:06:49.114080Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000005:507] at 9437184 has finished 2025-04-07T14:06:49.114104Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:49.114120Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:49.114135Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:49.114170Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:49.127673Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2025-04-07T14:06:49.127720Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-04-07T14:06:49.127764Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:49.127823Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-04-07T14:06:49.127919Z node 2 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:06:49.127990Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:49.128172Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:49.128198Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-04-07T14:06:49.128232Z node 2 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:06:49.128258Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TVersions::WreckHeadReverse [GOOD] >> TVersions::Wreck2 >> TExecutorDb::RandomOps [GOOD] >> TExecutorDb::FullScan >> BasicUsage::BrokenCredentialsProvider [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag [GOOD] Test command err: 2025-04-07T14:06:43.866299Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:43.940894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:43.940956Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:43.942952Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:43.943361Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:43.943605Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:43.954133Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:43.989961Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:43.990402Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:43.991912Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:43.991993Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:43.992048Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:43.992413Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:44.024965Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:44.025147Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:44.025274Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:44.025313Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:44.025348Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:44.025381Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:44.025558Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:44.025604Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:44.025966Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:44.026043Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:44.026091Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:44.026127Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:44.026173Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:44.026214Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:44.026247Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:44.026318Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:44.026366Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:44.069933Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:44.070019Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:44.070081Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:44.072934Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:44.073003Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:44.073087Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:44.073302Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:44.073357Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:44.073410Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:44.073459Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:44.073492Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:44.073526Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:44.073565Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:44.073835Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:44.073890Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:44.073930Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:44.073986Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:44.074048Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:44.074078Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:44.074108Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:44.074151Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:44.074182Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:44.096596Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:44.096683Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:44.096737Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:44.096800Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:44.096865Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:44.097361Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:44.097410Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:44.097458Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:44.097579Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:44.097607Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:44.097762Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:44.097804Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:44.097838Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:44.097884Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:44.101390Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:44.101445Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:44.101618Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:44.101652Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:44.101686Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:44.101722Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:44.101748Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:44.101780Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:44.101808Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:44.101838Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:44.101879Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:44.101913Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:44.101937Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:44.102055Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:44.102083Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:44.102115Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:44.102139Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:44.102156Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:44.102193Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:44.102214Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:44.102235Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:44.102260Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:44.102304Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:44.102329Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:44.102353Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:44.102388Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:44.102410Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:44.102430Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... aitInRS 2025-04-07T14:06:49.744352Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-07T14:06:49.744389Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-04-07T14:06:49.744412Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-04-07T14:06:49.744431Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-04-07T14:06:49.744855Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-04-07T14:06:49.744916Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:49.744960Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-07T14:06:49.744981Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-04-07T14:06:49.745001Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-04-07T14:06:49.745022Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-04-07T14:06:49.745193Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-04-07T14:06:49.745224Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-04-07T14:06:49.745256Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-04-07T14:06:49.745285Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-04-07T14:06:49.745321Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-07T14:06:49.745353Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-04-07T14:06:49.745378Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-04-07T14:06:49.745418Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:49.745446Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-07T14:06:49.745480Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-07T14:06:49.745512Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-07T14:06:49.745716Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:228:2223], Recipient [2:228:2223]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:49.745751Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:49.745790Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:49.745818Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:49.745840Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:49.745869Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2025-04-07T14:06:49.745892Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2025-04-07T14:06:49.745923Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:49.745941Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:49.745963Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:49.745998Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:49.746542Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2025-04-07T14:06:49.746578Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:49.746599Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:49.746619Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2025-04-07T14:06:49.746640Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2025-04-07T14:06:49.746670Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:49.746699Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2025-04-07T14:06:49.746719Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:49.746736Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:49.746778Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2025-04-07T14:06:49.746800Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2025-04-07T14:06:49.746823Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2025-04-07T14:06:49.746851Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:49.746869Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:49.746888Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2025-04-07T14:06:49.746908Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2025-04-07T14:06:49.746945Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:49.746962Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2025-04-07T14:06:49.746993Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2025-04-07T14:06:49.747063Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2025-04-07T14:06:49.747091Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:49.747108Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2025-04-07T14:06:49.747128Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2025-04-07T14:06:49.747148Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2025-04-07T14:06:49.747171Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:49.747188Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2025-04-07T14:06:49.747204Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2025-04-07T14:06:49.747221Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2025-04-07T14:06:49.747240Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:49.747270Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-04-07T14:06:49.747292Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-04-07T14:06:49.747312Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-04-07T14:06:49.747602Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-04-07T14:06:49.747650Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:49.747686Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:49.747707Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-04-07T14:06:49.747726Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-04-07T14:06:49.747745Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-04-07T14:06:49.747901Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-04-07T14:06:49.747924Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-04-07T14:06:49.747947Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-04-07T14:06:49.747969Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-04-07T14:06:49.747993Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:49.748010Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-04-07T14:06:49.748031Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-04-07T14:06:49.748056Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:49.748109Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:49.748136Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:49.748159Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:49.761467Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-04-07T14:06:49.761535Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-04-07T14:06:49.761593Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:49.761649Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-04-07T14:06:49.761713Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 2 ms 2025-04-07T14:06:49.761764Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:49.762197Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-04-07T14:06:49.762234Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-04-07T14:06:49.762267Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-07T14:06:49.762293Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-04-07T14:06:49.762328Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 2 ms 2025-04-07T14:06:49.762356Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate-StreamLookup >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] >> TIterator::Mixed [GOOD] >> TIterator::GetKey >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] Test command err: 2025-04-07T14:06:39.238777Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T14:06:39.255692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:39.256230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:06:39.256395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:39.257157Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:39.257189Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018d9/r3tmp/tmpD8ktxx/pdisk_1.dat 2025-04-07T14:06:39.614645Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:39.772768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:39.882166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:39.882311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:39.886062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:39.886159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:39.902016Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:06:39.902382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:39.902622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:40.256461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:40.329988Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:1182:2345], Recipient [2:1210:2356]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:40.334683Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:1182:2345], Recipient [2:1210:2356]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:40.335110Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1210:2356] 2025-04-07T14:06:40.335299Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:40.377068Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:1182:2345], Recipient [2:1210:2356]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:40.380921Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:40.381187Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:40.382371Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:06:40.382426Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:06:40.382478Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:06:40.382724Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:40.414380Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:06:40.414547Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:40.414635Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1234:2371] 2025-04-07T14:06:40.414666Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:06:40.414705Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:06:40.414739Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:40.415606Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1210:2356], Recipient [2:1210:2356]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:40.415649Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:40.415904Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:06:40.415977Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:06:40.416108Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:40.416151Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:40.416184Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:06:40.416211Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:40.416257Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:40.416293Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:06:40.416340Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:40.472250Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1238:2372], Recipient [2:1210:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:40.472304Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:40.472352Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1192:2734], serverId# [2:1238:2372], sessionId# [0:0:0] 2025-04-07T14:06:40.472618Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:777:2436], Recipient [2:1238:2372] 2025-04-07T14:06:40.472657Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:40.472766Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:40.472981Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:06:40.473052Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:06:40.473158Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:06:40.473204Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:40.473264Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:06:40.473302Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:06:40.473352Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:40.473690Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:40.473728Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:06:40.473761Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:40.473795Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:40.473853Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:06:40.473902Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:40.473962Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:06:40.473998Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:40.474027Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:40.477505Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:40.477566Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:40.477614Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:40.477691Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:40.477751Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:40.478168Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [2:1239:2373], Recipient [2:1210:2356]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:06:40.478242Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:06:40.826442Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1271:2382], Recipient [2:1210:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:40.826491Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:40.826522Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1269:2757], serverId# [2:1271:2382], sessionId# [0:0:0] 2025-04-07T14:06:40.828065Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:990:2585], Recipient [2:1271:2382] 2025-04-07T14:06:40.828121Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:40.828266Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:40.828304Z node 2 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:06:40.828340Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:06:40.828371Z node 2 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:06:40.832253Z node 2 :TX_DATASHARD DEBUG: Plann ... \004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\00 2025-04-07T14:06:49.163923Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:49.164035Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [3:632:2537], Recipient [3:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T14:06:49.164063Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T14:06:49.164162Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:49.164525Z node 3 :TX_DATASHARD TRACE: TxId: 281474976715664, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\004\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } 2025-04-07T14:06:49.164585Z node 3 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Uint32 : 4) 2025-04-07T14:06:49.164640Z node 3 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-04-07T14:06:49.164901Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CheckDataTx 2025-04-07T14:06:49.164954Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-04-07T14:06:49.164995Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2025-04-07T14:06:49.165028Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:49.165057Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:06:49.165096Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-04-07T14:06:49.165153Z node 3 :TX_DATASHARD TRACE: Activated operation [0:281474976715664] at 72075186224037888 2025-04-07T14:06:49.165185Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-04-07T14:06:49.165223Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:49.165262Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-07T14:06:49.165284Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-07T14:06:49.165326Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-04-07T14:06:49.165379Z node 3 :TX_DATASHARD TRACE: Operation [0:281474976715664] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191936 2025-04-07T14:06:49.165549Z node 3 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-07T14:06:49.165598Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:49.165628Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-07T14:06:49.165663Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:49.165689Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:49.165713Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2025-04-07T14:06:49.165739Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:49.165784Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:06:49.165816Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:06:49.165848Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-04-07T14:06:49.165868Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:06:49.165890Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037888 has finished ... blocked commit for tablet 72075186224037888 2025-04-07T14:06:49.272767Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr896hae2bhajjn5cqxr08f2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTY4ODllOGYtZmI1ZTIzZi02ODRiYmRhZC04MWFkMDQ0OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:49.274268Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:927:2743], Recipient [3:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-07T14:06:49.274367Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:06:49.274420Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-04-07T14:06:49.274459Z node 3 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v2500/18446744073709551615 2025-04-07T14:06:49.274516Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-07T14:06:49.274611Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:06:49.274660Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:06:49.274697Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:49.274730Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:06:49.274771Z node 3 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-07T14:06:49.274808Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:06:49.274829Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:49.274847Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:06:49.274865Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:06:49.274956Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-07T14:06:49.275141Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is DelayComplete 2025-04-07T14:06:49.275175Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:06:49.275207Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:06:49.275239Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:06:49.275281Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:06:49.275299Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:06:49.275329Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-07T14:06:49.275363Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:06:49.390884Z node 3 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [3:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 3000 2025-04-07T14:06:49.391211Z node 3 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [3:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 3000 2025-04-07T14:06:49.391305Z node 3 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [3:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} 2025-04-07T14:06:49.571261Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:49.571341Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:49.571405Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1000 ms, status: COMPLETE 2025-04-07T14:06:49.571517Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:49.571580Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-07T14:06:49.571642Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:06:49.571688Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:927:2743], 0} after executionsCount# 1 2025-04-07T14:06:49.571744Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:927:2743], 0} sends rowCount# 4, bytes# 96, quota rows left# 997, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:06:49.571831Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:927:2743], 0} finished in read 2025-04-07T14:06:49.573896Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:927:2743], Recipient [3:632:2537]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T14:06:49.573966Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 4 } items { uint32_value: 4 } } >> TIterator::GetKey [GOOD] >> TIterator::GetKeyWithEraseCache [GOOD] >> TIterator::GetKeyWithVersionSkips [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup+EvWrite >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-04-07T14:06:10.717005Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1744034770716972 2025-04-07T14:06:11.042195Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572305147164185:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:11.042247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ddf/r3tmp/tmpIWG2ig/pdisk_1.dat 2025-04-07T14:06:11.234988Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:06:11.236330Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:06:11.284583Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:11.437889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:11.437999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:11.438725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:11.438764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:11.445316Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:06:11.445803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:11.446432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2254, node 1 2025-04-07T14:06:11.504788Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:06:11.508205Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:06:11.508640Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:11.609197Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001ddf/r3tmp/yandexKcLmg4.tmp 2025-04-07T14:06:11.609418Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001ddf/r3tmp/yandexKcLmg4.tmp 2025-04-07T14:06:11.609528Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001ddf/r3tmp/yandexKcLmg4.tmp 2025-04-07T14:06:11.609618Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:06:11.786357Z INFO: TTestServer started on Port 12297 GrpcPort 2254 TClient is connected to server localhost:12297 PQClient connected to localhost:2254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:12.027371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T14:06:14.084370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572319226774138:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.084537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572319226774146:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.084636Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.089775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T14:06:14.102739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572319226774152:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T14:06:14.420539Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572318032067134:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:06:14.420515Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490572319226774195:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:06:14.420802Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTUyZGQ3OGMtYTNmMTA2M2YtOGY2ZGUwYjMtNGFmM2Q4MDI=, ActorId: [2:7490572319226774136:2280], ActorState: ExecuteState, TraceId: 01jr895f22dphafhrs8betswzp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:06:14.424156Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:06:14.427826Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzIyZGE2MDgtYzUwNGVmMDgtNGJkYjNkOTAtMmMxOTA0Mjc=, ActorId: [1:7490572318032067108:2303], ActorState: ExecuteState, TraceId: 01jr895f5f83pd1rkzjehqmc8r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:06:14.428148Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:06:14.465032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:06:14.597901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:06:14.720980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:2254", true, true, 1000); 2025-04-07T14:06:15.114294Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr895fr8138q70rm5ba4zjt4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTZkNzRjNWItN2Q1MjFlZjQtNmY2YmIzOGMtYzA0NzVhNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490572322327034886:3009] 2025-04-07T14:06:16.042441Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572305147164185:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:16.042552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T14:06:21.043360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:2254 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T14:06:21.137023Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:2254 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976710679 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK AddTopic: rt3.dc1--test-topic ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cl ... userAgent="pqv1 server" ip=ipv6:[::1]:55550 proto=v1 topic=test-topic durationSec=0 2025-04-07T14:06:48.976824Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T14:06:48.977908Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-07T14:06:48.977986Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-07T14:06:48.977993Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T14:06:48.978002Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-07T14:06:48.978018Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490572465319279924:2479] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-07T14:06:48.979686Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490572465319279924:2479] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-07T14:06:49.101646Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490572465319279924:2479] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-07T14:06:49.101879Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7490572469614247268:2479] connected; active server actors: 1 2025-04-07T14:06:49.101911Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490572465319279924:2479] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-07T14:06:49.101923Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490572465319279924:2479] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-07T14:06:49.102151Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7490572469614247268:2479] disconnected; active server actors: 1 2025-04-07T14:06:49.102169Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7490572469614247268:2479] disconnected no session 2025-04-07T14:06:49.220863Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490572465319279924:2479] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-07T14:06:49.220905Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490572465319279924:2479] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-07T14:06:49.220927Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490572465319279924:2479] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-07T14:06:49.220961Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T14:06:49.221775Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:49.221809Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7490572469614247286:2479], now have 1 active actors on pipe 2025-04-07T14:06:49.221932Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2025-04-07T14:06:49.222083Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T14:06:49.222115Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T14:06:49.222217Z node 6 :PERSQUEUE INFO: new Cookie src|f10a5b7-48ddd146-547360cb-5cfe3b5_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-07T14:06:49.222351Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T14:06:49.222439Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:06:49.222876Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T14:06:49.222903Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T14:06:49.222970Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:06:49.223201Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|f10a5b7-48ddd146-547360cb-5cfe3b5_0 2025-04-07T14:06:49.223952Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744034809223 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:06:49.224062Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|f10a5b7-48ddd146-547360cb-5cfe3b5_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-07T14:06:49.224258Z :INFO: [] MessageGroupId [src] SessionId [src|f10a5b7-48ddd146-547360cb-5cfe3b5_0] Write session: close. Timeout = 0 ms 2025-04-07T14:06:49.224293Z :INFO: [] MessageGroupId [src] SessionId [src|f10a5b7-48ddd146-547360cb-5cfe3b5_0] Write session will now close 2025-04-07T14:06:49.224348Z :DEBUG: [] MessageGroupId [src] SessionId [src|f10a5b7-48ddd146-547360cb-5cfe3b5_0] Write session: aborting 2025-04-07T14:06:49.224851Z :INFO: [] MessageGroupId [src] SessionId [src|f10a5b7-48ddd146-547360cb-5cfe3b5_0] Write session: gracefully shut down, all writes complete 2025-04-07T14:06:49.224889Z :DEBUG: [] MessageGroupId [src] SessionId [src|f10a5b7-48ddd146-547360cb-5cfe3b5_0] Write session: destroy 2025-04-07T14:06:49.225924Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|f10a5b7-48ddd146-547360cb-5cfe3b5_0 grpc read done: success: 0 data: 2025-04-07T14:06:49.225966Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|f10a5b7-48ddd146-547360cb-5cfe3b5_0 grpc read failed 2025-04-07T14:06:49.226004Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|f10a5b7-48ddd146-547360cb-5cfe3b5_0 grpc closed 2025-04-07T14:06:49.226025Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|f10a5b7-48ddd146-547360cb-5cfe3b5_0 is DEAD 2025-04-07T14:06:49.226525Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:06:49.226831Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:06:49.226883Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7490572469614247286:2479] destroyed 2025-04-07T14:06:49.226911Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T14:06:49.234659Z :INFO: [/Root] [/Root] [876e42af-a950d211-cb1e79c5-f6308276] Starting read session 2025-04-07T14:06:49.234718Z :DEBUG: [/Root] [/Root] [876e42af-a950d211-cb1e79c5-f6308276] Starting session to cluster null (localhost:1975) 2025-04-07T14:06:49.236718Z :DEBUG: [/Root] [/Root] [876e42af-a950d211-cb1e79c5-f6308276] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:49.236769Z :DEBUG: [/Root] [/Root] [876e42af-a950d211-cb1e79c5-f6308276] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:49.236816Z :DEBUG: [/Root] [/Root] [876e42af-a950d211-cb1e79c5-f6308276] [null] Reconnecting session to cluster null in 0.000000s 2025-04-07T14:06:49.238244Z :ERROR: [/Root] [/Root] [876e42af-a950d211-cb1e79c5-f6308276] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2025-04-07T14:06:49.238303Z :DEBUG: [/Root] [/Root] [876e42af-a950d211-cb1e79c5-f6308276] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:49.238335Z :DEBUG: [/Root] [/Root] [876e42af-a950d211-cb1e79c5-f6308276] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:49.238459Z :INFO: [/Root] [/Root] [876e42af-a950d211-cb1e79c5-f6308276] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2025-04-07T14:06:49.238634Z :NOTICE: [/Root] [/Root] [876e42af-a950d211-cb1e79c5-f6308276] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:06:49.238667Z :DEBUG: [/Root] [/Root] [876e42af-a950d211-cb1e79c5-f6308276] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2025-04-07T14:06:49.238755Z :INFO: [/Root] [/Root] [876e42af-a950d211-cb1e79c5-f6308276] Closing read session. Close timeout: 0.000000s 2025-04-07T14:06:49.238794Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-07T14:06:49.238832Z :INFO: [/Root] [/Root] [876e42af-a950d211-cb1e79c5-f6308276] Counters: { Errors: 1 CurrentSessionLifetimeMs: 4 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:06:49.238957Z :NOTICE: [/Root] [/Root] [876e42af-a950d211-cb1e79c5-f6308276] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:06:49.587162Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490572469614247324:2497], TxId: 281474976710687, task: 1, CA Id [5:7490572469614247320:2497]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-07T14:06:49.620767Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490572469614247324:2497], TxId: 281474976710687, task: 1, CA Id [5:7490572469614247320:2497]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T14:06:49.673112Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490572469614247324:2497], TxId: 281474976710687, task: 1, CA Id [5:7490572469614247320:2497]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T14:06:49.730496Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490572469614247324:2497], TxId: 281474976710687, task: 1, CA Id [5:7490572469614247320:2497]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2025-04-07T14:06:45.646320Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:45.710041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:45.710098Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:45.712351Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:45.712839Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:45.713067Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:45.724130Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:45.760437Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:45.760912Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:45.762041Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:45.762094Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:45.762127Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:45.762586Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:45.790233Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:45.790375Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:45.790475Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:45.790511Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:45.790541Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:45.790580Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:45.790759Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:45.790804Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:45.791198Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:45.791296Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:45.791390Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:45.791441Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:45.791493Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:45.791530Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:45.791570Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:45.791598Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:45.791633Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:45.832987Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:45.833066Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:45.833124Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:45.835810Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:45.835872Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:45.835963Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:45.836173Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:45.836228Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:45.836280Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:45.836328Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:45.836365Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:45.836397Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:45.836442Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:45.836751Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:45.836824Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:45.836860Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:45.836904Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:45.836960Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:45.836989Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:45.837020Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:45.837060Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:45.837090Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:45.860021Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:45.860096Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:45.860168Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:45.860223Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:45.860289Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:45.860824Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:45.860878Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:45.860944Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:45.861067Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:45.861094Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:45.861214Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:45.861257Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:45.861292Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:45.861340Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:45.865138Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:45.865207Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:45.865395Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:45.865431Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:45.865481Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:45.865520Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:45.865558Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:45.865595Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:45.865645Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:45.865683Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:45.865734Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:45.865781Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:45.865813Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:45.865973Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:45.866005Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:45.866042Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:45.866067Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:45.866089Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:45.866144Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:45.866172Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:45.866205Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:45.866243Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:45.866294Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:45.866328Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:45.866373Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:45.866424Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:45.866457Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:45.866488Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... aitInRS 2025-04-07T14:06:50.690371Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-07T14:06:50.690389Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-04-07T14:06:50.690406Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-04-07T14:06:50.690425Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-04-07T14:06:50.690848Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-04-07T14:06:50.690913Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:50.690961Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-07T14:06:50.690983Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-04-07T14:06:50.691004Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-04-07T14:06:50.691027Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-04-07T14:06:50.691200Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-04-07T14:06:50.691228Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-04-07T14:06:50.691260Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-04-07T14:06:50.691289Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-04-07T14:06:50.691323Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-07T14:06:50.691343Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-04-07T14:06:50.691372Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-04-07T14:06:50.691408Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:50.691455Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-07T14:06:50.691493Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-07T14:06:50.691524Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-07T14:06:50.691698Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:228:2223], Recipient [2:228:2223]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:50.691732Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:50.691770Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:50.691799Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:50.691821Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:50.691848Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2025-04-07T14:06:50.691870Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2025-04-07T14:06:50.691913Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:50.691943Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:50.691966Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:50.691990Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:50.692435Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2025-04-07T14:06:50.692468Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:50.692482Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:50.692497Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2025-04-07T14:06:50.692511Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2025-04-07T14:06:50.692532Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:50.692551Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2025-04-07T14:06:50.692570Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:50.692606Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:50.692654Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2025-04-07T14:06:50.692695Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2025-04-07T14:06:50.692726Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2025-04-07T14:06:50.692758Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:50.692778Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:50.692799Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2025-04-07T14:06:50.692817Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2025-04-07T14:06:50.692858Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:50.692876Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2025-04-07T14:06:50.692894Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2025-04-07T14:06:50.692914Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2025-04-07T14:06:50.692935Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:50.692958Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2025-04-07T14:06:50.692998Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2025-04-07T14:06:50.693020Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2025-04-07T14:06:50.693044Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:50.693063Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2025-04-07T14:06:50.693081Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2025-04-07T14:06:50.693098Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2025-04-07T14:06:50.693116Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:50.693134Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-04-07T14:06:50.693152Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-04-07T14:06:50.693177Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-04-07T14:06:50.693460Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-04-07T14:06:50.693499Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:50.693527Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:50.693552Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-04-07T14:06:50.693567Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-04-07T14:06:50.693582Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-04-07T14:06:50.693728Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-04-07T14:06:50.693745Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-04-07T14:06:50.693765Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-04-07T14:06:50.693780Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-04-07T14:06:50.693796Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:06:50.693808Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-04-07T14:06:50.693822Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-04-07T14:06:50.693842Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:50.693861Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:50.693883Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:50.693923Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:50.706432Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-04-07T14:06:50.706494Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-04-07T14:06:50.706554Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:50.706592Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-04-07T14:06:50.706647Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:06:50.706694Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:50.706926Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-04-07T14:06:50.706960Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-04-07T14:06:50.707002Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-07T14:06:50.707049Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-04-07T14:06:50.707086Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:06:50.707117Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> BuildStatsHistogram::Ten_Crossed [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] Test command err: 2025-04-07T14:06:43.313269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:43.313571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:43.313794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018ab/r3tmp/tmpAdSSKq/pdisk_1.dat 2025-04-07T14:06:43.639015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:43.666611Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:43.714278Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:06:43.714893Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:06:43.715074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:43.715173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:43.726543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:43.834293Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T14:06:43.834337Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:06:43.834460Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T14:06:43.932674Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:06:43.933181Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:06:43.933252Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:06:43.933458Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:06:43.933576Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:06:43.933647Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:06:43.933910Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T14:06:43.935155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:43.935890Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T14:06:43.935949Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T14:06:43.963453Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:43.964259Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:625:2533], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:43.964886Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:43.965231Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:638:2540] 2025-04-07T14:06:43.965447Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:43.993532Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:43.993602Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:625:2533], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:43.993953Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:640:2542] 2025-04-07T14:06:43.994111Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:44.000460Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:625:2533], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:44.001140Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:44.001245Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:44.002434Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:06:44.002484Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:06:44.002522Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:06:44.002749Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:44.042357Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:06:44.042519Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:44.042623Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:671:2559] 2025-04-07T14:06:44.042668Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:06:44.042692Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:06:44.042728Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:44.042966Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:638:2540], Recipient [1:638:2540]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:44.043000Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:44.043210Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:44.043322Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:06:44.043376Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:06:44.043406Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:44.044287Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T14:06:44.044343Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T14:06:44.044374Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T14:06:44.044562Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:44.044598Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T14:06:44.044643Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:44.044705Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:672:2560] 2025-04-07T14:06:44.044725Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T14:06:44.044740Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T14:06:44.044766Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:06:44.044868Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:44.044896Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:44.044933Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:06:44.044980Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:44.045010Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:44.045039Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:06:44.045074Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:44.045148Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:640:2542], Recipient [1:640:2542]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:44.045174Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:44.045496Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:653:2549], Recipient [1:638:2540]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:44.045521Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:44.045551Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:630:2536], serverId# [1:653:2549], sessionId# [0:0:0] 2025-04-07T14:06:44.045674Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:653:2549] 2025-04-07T14:06:44.045712Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:44.045797Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:44.045957Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:06:44.046004Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:06:44.046062Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:06:44.046112Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:44.046144Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:06:44.046167Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 720751 ... Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:928:2729] TxId: 281474976715662. Ctx: { TraceId: 01jr896jdfawyze0k1avjn7h0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc4OWNjOTAtMjE3NGQxLWJmY2VhMzU4LTFiZjhhZjhm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2025-04-07T14:06:50.357194Z node 2 :KQP_EXECUTER INFO: ActorId: [2:928:2729] TxId: 281474976715662. Ctx: { TraceId: 01jr896jdfawyze0k1avjn7h0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc4OWNjOTAtMjE3NGQxLWJmY2VhMzU4LTFiZjhhZjhm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-07T14:06:50.357232Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:928:2729] TxId: 281474976715662. Ctx: { TraceId: 01jr896jdfawyze0k1avjn7h0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc4OWNjOTAtMjE3NGQxLWJmY2VhMzU4LTFiZjhhZjhm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-04-07T14:06:50.357281Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:928:2729] TxId: 281474976715662. Ctx: { TraceId: 01jr896jdfawyze0k1avjn7h0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc4OWNjOTAtMjE3NGQxLWJmY2VhMzU4LTFiZjhhZjhm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2025-04-07T14:06:50.357334Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:928:2729] TxId: 281474976715662. Ctx: { TraceId: 01jr896jdfawyze0k1avjn7h0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc4OWNjOTAtMjE3NGQxLWJmY2VhMzU4LTFiZjhhZjhm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-04-07T14:06:50.357384Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:928:2729] TxId: 281474976715662. Ctx: { TraceId: 01jr896jdfawyze0k1avjn7h0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc4OWNjOTAtMjE3NGQxLWJmY2VhMzU4LTFiZjhhZjhm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-07T14:06:50.357782Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:932:2746], Recipient [2:889:2713]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:50.357839Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:50.357882Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:931:2745], serverId# [2:932:2746], sessionId# [0:0:0] 2025-04-07T14:06:50.358123Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:928:2729], Recipient [2:889:2713]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 928 RawX2: 8589937321 } TxBody: " \0018\000`\200\200\200\005j\240\006\010\001\022\217\006\010\001\022\024\n\022\t\240\003\000\000\000\000\000\000\021\251\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000 2025-04-07T14:06:50.358164Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:50.358313Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [2:889:2713], Recipient [2:889:2713]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T14:06:50.358346Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T14:06:50.358408Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:50.358784Z node 2 :TX_DATASHARD TRACE: TxId: 281474976715662, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\004\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } 2025-04-07T14:06:50.358856Z node 2 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Uint32 : 4) 2025-04-07T14:06:50.358910Z node 2 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-04-07T14:06:50.359190Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CheckDataTx 2025-04-07T14:06:50.359249Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-07T14:06:50.359316Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CheckDataTx 2025-04-07T14:06:50.359355Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:50.359386Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:06:50.359433Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-07T14:06:50.359489Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715662] at 72075186224037888 2025-04-07T14:06:50.359527Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-07T14:06:50.359550Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:50.359572Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-07T14:06:50.359598Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-07T14:06:50.359656Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-07T14:06:50.359714Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715662] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191944 2025-04-07T14:06:50.359905Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-07T14:06:50.359966Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:50.360000Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-07T14:06:50.360034Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:50.360066Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:50.360145Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:50.360193Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:50.360236Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:06:50.360282Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:06:50.360332Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-07T14:06:50.360356Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:06:50.360385Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2025-04-07T14:06:50.371216Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:50.371282Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:50.371337Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T14:06:50.371413Z node 2 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2025-04-07T14:06:50.371478Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:50.372403Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [2:24:2071], Recipient [2:889:2713]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 2001} 2025-04-07T14:06:50.372449Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2025-04-07T14:06:50.372490Z node 2 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2025-04-07T14:06:50.372546Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:50.372617Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:928:2729] TxId: 281474976715662. Ctx: { TraceId: 01jr896jdfawyze0k1avjn7h0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc4OWNjOTAtMjE3NGQxLWJmY2VhMzU4LTFiZjhhZjhm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-04-07T14:06:50.372786Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:928:2729] TxId: 281474976715662. Ctx: { TraceId: 01jr896jdfawyze0k1avjn7h0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc4OWNjOTAtMjE3NGQxLWJmY2VhMzU4LTFiZjhhZjhm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:06:50.372838Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:928:2729] TxId: 281474976715662. Ctx: { TraceId: 01jr896jdfawyze0k1avjn7h0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc4OWNjOTAtMjE3NGQxLWJmY2VhMzU4LTFiZjhhZjhm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-07T14:06:50.372896Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:928:2729] TxId: 281474976715662. Ctx: { TraceId: 01jr896jdfawyze0k1avjn7h0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc4OWNjOTAtMjE3NGQxLWJmY2VhMzU4LTFiZjhhZjhm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] Test command err: 2025-04-07T14:06:41.415890Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:41.482162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:41.482198Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:41.483495Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:41.483746Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:41.483992Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:41.491263Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:41.521758Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:41.522097Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:41.523415Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:41.523489Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:41.523542Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:41.523814Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:41.546524Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:41.546654Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:41.546736Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:41.546772Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:41.546806Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:41.546834Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:41.546968Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:41.547000Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:41.547256Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:41.547332Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:41.547413Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:41.547452Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:41.547487Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:41.547516Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:41.547545Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:41.547566Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:41.547589Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:41.590858Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:41.590924Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:41.590986Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:41.592802Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:41.592856Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:41.592951Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:41.593084Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:41.593125Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:41.593172Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:41.593213Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:41.593256Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:41.593295Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:41.593326Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:41.593547Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:41.593591Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:41.593619Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:41.593654Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:41.593697Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:41.593718Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:41.593754Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:41.593777Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:41.593795Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:41.616282Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:41.616348Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:41.616394Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:41.616450Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:41.616553Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:41.616993Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:41.617036Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:41.617067Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:41.617158Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:41.617192Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:41.617312Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:41.617353Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:41.617386Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:41.617414Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:41.620000Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:41.620052Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:41.620228Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:41.620258Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:41.620310Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:41.620345Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:41.620372Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:41.620410Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:41.620437Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:41.620486Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:41.620521Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:41.620547Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:41.620571Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:41.620904Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:41.620938Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:41.620969Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:41.620992Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:41.621009Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:41.621059Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:41.621079Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:41.621106Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:41.621143Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:41.621186Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:41.621215Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:41.621240Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:41.621283Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:41.621297Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:41.621313Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... BUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:06:50.603639Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:50.603897Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-07T14:06:50.603931Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.603969Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-07T14:06:50.604046Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-07T14:06:50.604069Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.604108Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-07T14:06:50.604200Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-07T14:06:50.604225Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.604249Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-07T14:06:50.604320Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-07T14:06:50.604357Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.604384Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-07T14:06:50.604437Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-07T14:06:50.604462Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.604483Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-07T14:06:50.604551Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-07T14:06:50.604574Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.604594Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-07T14:06:50.604693Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-07T14:06:50.604720Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.604745Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-07T14:06:50.604832Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-07T14:06:50.604856Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.604883Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-04-07T14:06:50.604948Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-07T14:06:50.604969Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.604991Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-07T14:06:50.605069Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-07T14:06:50.605095Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.605115Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-07T14:06:50.605180Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-07T14:06:50.605219Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.605242Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-07T14:06:50.605339Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-07T14:06:50.605364Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.605389Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-07T14:06:50.605449Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-07T14:06:50.605472Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.605492Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-07T14:06:50.605561Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:50.605589Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2025-04-07T14:06:50.605640Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2025-04-07T14:06:50.605686Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-07T14:06:50.605716Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:50.605827Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:50.605849Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2025-04-07T14:06:50.605892Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2025-04-07T14:06:50.605930Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-07T14:06:50.605954Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:50.606046Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:50.606079Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-04-07T14:06:50.606112Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2025-04-07T14:06:50.606146Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-07T14:06:50.606167Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:50.606260Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:50.606280Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-04-07T14:06:50.606315Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2025-04-07T14:06:50.606366Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-07T14:06:50.606396Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:50.606548Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-07T14:06:50.606576Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.606600Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-07T14:06:50.606691Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-07T14:06:50.606714Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.606743Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-07T14:06:50.606797Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-07T14:06:50.606819Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.606842Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-07T14:06:50.606941Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-07T14:06:50.606968Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:50.606990Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TIterator::GetKeyWithVersionSkips [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:22.591930Z 00000.019 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.025 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.032 II| FAKE_ENV: Starting storage for BS group 0 00000.032 II| FAKE_ENV: Starting storage for BS group 1 00000.032 II| FAKE_ENV: Starting storage for BS group 2 00000.032 II| FAKE_ENV: Starting storage for BS group 3 00000.043 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.045 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 302b annex 0, ~{ } -{ }, 0 gb} 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.051 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile} hope 1 -> done Change{2, redo 0b alter 15b annex 0, ~{ } -{ }, 0 gb} 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile} release 4194304b of static, Memory{0 dyn 0} 00000.052 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxCheckResourceProfile} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxCheckResourceProfile 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxCheckResourceProfile} took 1024b of static mem, Memory{1024 dyn 0} 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxCheckResourceProfile} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxCheckResourceProfile} release 1024b of static, Memory{0 dyn 0} 00000.053 II| TABLET_EXECUTOR: Leader{1:2:4} suiciding, Waste{2:0, 317b +(0, 0b), 3 trc, -0b acc} 00000.056 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [1:2:1:1:28672:35:0] } 00000.057 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 317 bytes, 317 total, blobs: { [1:2:2:1:8192:302:0], [1:2:3:1:8192:15:0] } 00000.058 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.058 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.058 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 94b, wait} done, Waste{2:0, 317b +(0, 0b), 3 trc} 00000.059 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 00000.059 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxCheckResourceProfile} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxCheckResourceProfile 00000.059 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxCheckResourceProfile} took 1024b of static mem, Memory{1024 dyn 0} 00000.059 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxCheckResourceProfile} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.059 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxCheckResourceProfile} release 1024b of static, Memory{0 dyn 0} 00000.060 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 317b +(0, 0b), 1 trc, -0b acc} 00000.060 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.060 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 0 reqs hit {0 0b} miss {0 0b} 00000.060 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.060 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {262b, 6} 00000.060 II| FAKE_ENV: DS.1 gone, left {411b, 3}, put {446b, 4} 00000.060 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.060 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.060 II| FAKE_ENV: All BS storage groups are stopped 00000.061 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.061 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 47}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:22.657276Z 00000.005 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.008 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.008 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 302b annex 0, ~{ } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.010 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile} hope 1 -> done Change{2, redo 0b alter 15b annex 0, ~{ } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile} release 4194304b of static, Memory{0 dyn 0} 00000.010 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 9216b requested for data (10240b in total) 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} took 10240b of static mem, Memory{10240 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} hope 2 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release 10240b of static, Memory{0 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 19456b requested for data (20480b in total) 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release 1024b of static, Memory{0 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release tx data 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} request Res{1 20480b} type small_transaction 00000.012 DD| RESOURCE_BROKER: Submitted new unknown task Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (1 by [2:29:2061]) priority=5 resources={0, 20480} 00000.012 EE| RESOURCE_BROKER: Assigning waiting task 'Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (1 by [2:29:2061])' of unknown type 'small_transaction' to default queue 00000.012 DD| RESOURCE_BROKER: Allocate resources {0, 20480} for task Tx{4, NKi ... 509b} miss {30 140710b} 00000.065 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.065 II| FAKE_ENV: DS.1 gone, left {143784b, 7}, put {158022b, 150} 00000.065 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.065 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.065 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14313b, 148} 00000.065 II| FAKE_ENV: All BS storage groups are stopped 00000.065 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.065 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:26.028365Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.060 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.061 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 8 reqs hit {0 0b} miss {32 149036b} 00000.061 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.061 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14313b, 148} 00000.061 II| FAKE_ENV: DS.1 gone, left {143227b, 7}, put {157466b, 150} 00000.061 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.061 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.061 II| FAKE_ENV: All BS storage groups are stopped 00000.061 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.061 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:26.100741Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.061 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.062 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 11 reqs hit {0 0b} miss {33 149660b} 00000.062 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.062 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14313b, 148} 00000.062 II| FAKE_ENV: DS.1 gone, left {143220b, 7}, put {157459b, 150} 00000.062 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.062 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.062 II| FAKE_ENV: All BS storage groups are stopped 00000.062 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.062 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:26.167833Z 00000.007 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.066 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.066 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 3 reqs hit {4 509b} miss {30 140710b} 00000.066 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.066 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14470b, 149} 00000.066 II| FAKE_ENV: DS.1 gone, left {143829b, 8}, put {158128b, 152} 00000.066 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.066 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.066 II| FAKE_ENV: All BS storage groups are stopped 00000.067 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.067 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:26.239378Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.064 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.065 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 4 reqs hit {32 141808b} miss {50 281387b} 00000.065 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.065 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14482b, 150} 00000.065 II| FAKE_ENV: DS.1 gone, left {143836b, 10}, put {157993b, 152} 00000.065 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.065 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.065 II| FAKE_ENV: All BS storage groups are stopped 00000.065 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.065 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:26.309161Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.058 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.059 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 3 reqs hit {6 1077b} miss {50 281387b} 00000.059 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.059 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.059 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14354b, 149} 00000.059 II| FAKE_ENV: DS.1 gone, left {143736b, 8}, put {157893b, 150} 00000.059 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.059 II| FAKE_ENV: All BS storage groups are stopped 00000.059 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.059 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:26.373253Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.031 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.031 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 3 reqs hit {3 512b} miss {0 0b} 00000.031 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.032 II| FAKE_ENV: DS.0 gone, left {1356b, 12}, put {1376b, 13} 00000.032 II| FAKE_ENV: DS.1 gone, left {6814b, 23}, put {6814b, 23} 00000.032 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.032 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.032 II| FAKE_ENV: All BS storage groups are stopped 00000.032 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.032 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:26.409738Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.189 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.189 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 10 reqs hit {860 5551893b} miss {0 0b} 00000.189 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.189 II| FAKE_ENV: DS.0 gone, left {1201b, 13}, put {1221b, 14} 00000.189 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.189 II| FAKE_ENV: DS.1 gone, left {6751256b, 17}, put {6751256b, 17} 00000.198 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.198 II| FAKE_ENV: All BS storage groups are stopped 00000.198 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.198 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:26.612950Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00009.989 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00009.989 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 4109 reqs hit {2091 2366986b} miss {6144 6340608b} 00009.989 II| FAKE_ENV: Shut order, stopping 4 BS groups 00009.990 II| FAKE_ENV: DS.0 gone, left {1761b, 14}, put {1781b, 15} 00009.990 II| FAKE_ENV: DS.1 gone, left {6927727b, 27}, put {6927727b, 27} 00010.001 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00010.001 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00010.001 II| FAKE_ENV: All BS storage groups are stopped 00010.001 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00010.001 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:36.626248Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00008.510 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00008.510 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 4106 reqs hit {43 253450b} miss {4096 4227072b} 00008.510 II| FAKE_ENV: Shut order, stopping 4 BS groups 00008.510 II| FAKE_ENV: DS.0 gone, left {44744b, 2}, put {164747b, 16} 00008.511 II| FAKE_ENV: DS.1 gone, left {2764621b, 2068}, put {2764621b, 2068} 00008.517 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00008.518 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00008.518 II| FAKE_ENV: All BS storage groups are stopped 00008.518 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00008.518 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:45.153723Z 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:45.185124Z 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:45.221365Z 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:45.263037Z 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes_History >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants >> TSchemeShardTest::Boot >> TSchemeShardTest::RmDirTwice >> TSchemeShardTest::MkRmDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] Test command err: 2025-04-07T14:06:48.165633Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:48.229349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:48.229411Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:48.231111Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:48.231465Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:48.231680Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:48.241330Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:48.275322Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:48.275803Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:48.277369Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:48.277450Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:48.277499Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:48.277806Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:48.303733Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:48.303919Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:48.304069Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:48.304131Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:48.304171Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:48.304201Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:48.304366Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:48.304415Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:48.304803Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:48.304896Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:48.304943Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:48.305025Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:48.305086Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:48.305115Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:48.305146Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:48.305188Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:48.305225Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:48.353346Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:48.353420Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:48.353472Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:48.356013Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:48.356132Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:48.356207Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:48.356360Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:48.356412Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:48.356460Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:48.356498Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:48.356530Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:48.356574Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:48.356607Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:48.356877Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:48.356925Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:48.356960Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:48.356995Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:48.357050Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:48.357086Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:48.357119Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:48.357147Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:48.357168Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:48.379593Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:48.379650Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:48.379695Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:48.379743Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:48.379807Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:48.380272Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:48.380321Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:48.380364Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:48.380481Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:48.380514Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:48.380633Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:48.380702Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:48.380738Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:48.380773Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:48.384359Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:48.384418Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:48.384619Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:48.384654Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:48.384720Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:48.384760Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:48.384790Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:48.384825Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:48.384868Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:48.384912Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:48.384953Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:48.384985Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:48.385016Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:48.385186Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:48.385221Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:48.385256Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:48.385280Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:48.385302Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:48.385360Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:48.385382Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:48.385415Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:48.385449Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:48.385530Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:48.385563Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:48.385606Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:48.385650Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:48.385671Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:48.385694Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... 2:2382], Recipient [1:432:2382]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:52.486349Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:52.486376Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-04-07T14:06:52.486398Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:06:52.486419Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-04-07T14:06:52.486439Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-04-07T14:06:52.486459Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-07T14:06:52.486482Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-04-07T14:06:52.486500Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-04-07T14:06:52.486528Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-04-07T14:06:52.486824Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-04-07T14:06:52.486858Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:06:52.486898Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-04-07T14:06:52.486928Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-04-07T14:06:52.486949Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-04-07T14:06:52.486969Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-04-07T14:06:52.487095Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-04-07T14:06:52.487118Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-04-07T14:06:52.487143Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-04-07T14:06:52.487167Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-04-07T14:06:52.487193Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-07T14:06:52.487212Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-04-07T14:06:52.487228Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2025-04-07T14:06:52.487272Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:52.487293Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-07T14:06:52.487321Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-07T14:06:52.487348Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-07T14:06:52.487469Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-07T14:06:52.487494Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.487517Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-07T14:06:52.487580Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 113 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 37} 2025-04-07T14:06:52.487594Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.487607Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2025-04-07T14:06:52.487651Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-07T14:06:52.487662Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.487680Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-07T14:06:52.487717Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-07T14:06:52.487729Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.487749Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-07T14:06:52.487809Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-07T14:06:52.487831Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.487852Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-07T14:06:52.487905Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-07T14:06:52.487918Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.487948Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-07T14:06:52.488008Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-07T14:06:52.488026Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.488043Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-07T14:06:52.488111Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-07T14:06:52.488126Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.488139Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-07T14:06:52.488198Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-07T14:06:52.488219Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.488234Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-07T14:06:52.488279Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-07T14:06:52.488294Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.488309Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-07T14:06:52.488345Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-07T14:06:52.488356Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.488368Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-07T14:06:52.488411Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-07T14:06:52.488428Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.488441Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-07T14:06:52.488498Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-07T14:06:52.488512Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.488528Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-07T14:06:52.488569Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-07T14:06:52.488583Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.488595Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-07T14:06:52.502094Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:06:52.502159Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-07T14:06:52.502217Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2025-04-07T14:06:52.502286Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-07T14:06:52.502321Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:06:52.502536Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-07T14:06:52.502572Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:06:52.502603Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true >> TSchemeShardTest::InitRootAgain >> TSchemeShardTest::CreateAlterTableWithCodec >> TSchemeShardTest::AlterTableKeyColumns >> TSchemeShardCheckProposeSize::CopyTable >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestAlterServerlessTenant >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] >> TSchemeShardTest::InitRootAgain [GOOD] >> TSchemeShardTest::InitRootWithOwner >> TSchemeShardTest::Boot [GOOD] >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> TSchemeShardTest::ConsistentCopyTable >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::PathName >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor >> TTxDataShardMiniKQL::CrossShard_3_AllToOne [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll >> TSchemeShardTest::CreateTableWithUnknownNamedConfig >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> TSchemeShardTest::AlterTableFollowers >> BuildStatsHistogram::Ten_Mixed_Log [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2025-04-07T14:04:51.787563Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.787632Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:175:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.824289Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.844056Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-07T14:04:51.845052Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:182:2195] 2025-04-07T14:04:51.847627Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.851332Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:183:2196] 2025-04-07T14:04:51.853051Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:183:2196] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.861581Z node 1 :PERSQUEUE INFO: new Cookie owner|5dbf3672-1951d80c-41c85535-68c018c5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner 2025-04-07T14:04:51.862185Z node 1 :PERSQUEUE INFO: new Cookie owner|6e84407f-5b51db6-84f5a9-96262cf5_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner 2025-04-07T14:04:51.862906Z node 1 :PERSQUEUE INFO: new Cookie owner|d4ac9f28-da255622-e9638b82-660facbe_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR 2025-04-07T14:04:53.178981Z node 1 :PERSQUEUE INFO: new Cookie default|ce55dfca-3491f38c-47992c23-6922e01c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-07T14:04:53.179683Z node 1 :PERSQUEUE INFO: new Cookie owner2|47482898-be190d40-83464de5-69f0650_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2025-04-07T14:04:53.180805Z node 1 :PERSQUEUE INFO: new Cookie owner|a4ce9549-410819c6-d58bb44c-5a773924_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TE ... ::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:52.641508Z node 81 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-04-07T14:06:52.656649Z node 81 :PERSQUEUE INFO: new Cookie default|24d51cbe-10708f86-471de8f7-85ec7161_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [82:101:2057] recipient: [82:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [82:101:2057] recipient: [82:99:2133] Leader for TabletID 72057594037927937 is [82:105:2137] sender: [82:106:2057] recipient: [82:99:2133] 2025-04-07T14:06:53.162851Z node 82 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:06:53.162921Z node 82 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [82:147:2057] recipient: [82:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [82:147:2057] recipient: [82:145:2168] Leader for TabletID 72057594037927938 is [82:151:2172] sender: [82:152:2057] recipient: [82:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [82:105:2137] sender: [82:175:2057] recipient: [82:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:53.182578Z node 82 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:06:53.183972Z node 82 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 82 actor [82:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 82 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 82 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 82 Important: true } 2025-04-07T14:06:53.185094Z node 82 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [82:182:2195] 2025-04-07T14:06:53.187630Z node 82 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [82:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:06:53.189715Z node 82 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [82:183:2196] 2025-04-07T14:06:53.191551Z node 82 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [82:183:2196] 2025-04-07T14:06:53.192820Z node 82 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [82:184:2197] 2025-04-07T14:06:53.194581Z node 82 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [82:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:53.226382Z node 82 :PERSQUEUE INFO: new Cookie default|21f7c1fa-b88f61ed-f78f3c1d-279597de_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:53.323693Z node 82 :PERSQUEUE INFO: new Cookie default|ff60826b-8294316c-6b52fef0-df38ca6a_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:53.409807Z node 82 :PERSQUEUE INFO: new Cookie default|1dec0eb5-b2e466a6-5dda6f7a-f5b3d6f5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:06:53.539079Z node 82 :PERSQUEUE INFO: new Cookie default|8203f783-7d7d51f-2f21ffbc-b7d7d3ce_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:53.589081Z node 82 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-04-07T14:06:53.605858Z node 82 :PERSQUEUE INFO: new Cookie default|cc68d795-ced9ff21-1a847ad2-82baf81a_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [83:101:2057] recipient: [83:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [83:101:2057] recipient: [83:99:2133] Leader for TabletID 72057594037927937 is [83:105:2137] sender: [83:106:2057] recipient: [83:99:2133] 2025-04-07T14:06:54.033509Z node 83 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:06:54.033567Z node 83 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [83:147:2057] recipient: [83:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [83:147:2057] recipient: [83:145:2168] Leader for TabletID 72057594037927938 is [83:151:2172] sender: [83:152:2057] recipient: [83:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [83:105:2137] sender: [83:175:2057] recipient: [83:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:54.051720Z node 83 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:06:54.052812Z node 83 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 83 actor [83:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 83 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 83 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 83 Important: true } 2025-04-07T14:06:54.053932Z node 83 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [83:182:2195] 2025-04-07T14:06:54.056844Z node 83 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [83:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:06:54.058888Z node 83 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [83:183:2196] 2025-04-07T14:06:54.061098Z node 83 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [83:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:06:54.062934Z node 83 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [83:184:2197] 2025-04-07T14:06:54.065100Z node 83 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [83:184:2197] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:54.096149Z node 83 :PERSQUEUE INFO: new Cookie default|48bcacee-f2f0af61-6269773e-528b9e4_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:54.178367Z node 83 :PERSQUEUE INFO: new Cookie default|7a566d82-6347759d-27c5d10-5c749a1a_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:54.253998Z node 83 :PERSQUEUE INFO: new Cookie default|714f854-9e995f63-aa811c02-93e7429_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-07T14:06:54.361452Z node 83 :PERSQUEUE INFO: new Cookie default|7ff16968-d7c2f1c1-8d5a46dd-6379ab43_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:06:54.406268Z node 83 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-04-07T14:06:54.422302Z node 83 :PERSQUEUE INFO: new Cookie default|18a765d5-d01644fa-a84ee976-1af81b1b_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky >> TSchemeShardTest::InitRootWithOwner [GOOD] >> TSchemeShardTest::DropTableTwice >> DataShardOutOfOrder::TestPlannedTimeoutSplit [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::PathName_SetLocale >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> TSchemeShardTest::DependentOps >> TSchemeShardTest::PathName_SetLocale [GOOD] >> TSchemeShardTest::ModifyACL >> TExecutorDb::FullScan [GOOD] >> TExecutorDb::CoordinatorSimulation >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup+EvWrite [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::AlterTable >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex >> TSchemeShardTest::DropTableTwice [GOOD] >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate-StreamLookup [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup+EvWrite >> TSchemeShardTest::Restart [GOOD] >> TSchemeShardTest::ReadOnlyMode >> TSchemeShardTest::ModifyACL [GOOD] >> TSchemeShardTest::NameFormat >> TSchemeShardTest::DependentOps [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute >> TSchemeShardTest::AlterTable [GOOD] >> TSchemeShardTest::AlterTableDropColumnReCreateSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup+EvWrite [GOOD] Test command err: 2025-04-07T14:06:48.241417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:48.241632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:48.241784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001890/r3tmp/tmpvoQBEq/pdisk_1.dat 2025-04-07T14:06:48.541928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:48.574609Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:48.623011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:48.623141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:48.634421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:48.745173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:49.068179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2025-04-07T14:06:49.348154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:792:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:49.348241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:803:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:49.348295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:49.351770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:06:49.544977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:806:2655], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:06:49.837640Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr896hg2fpnkvbk5wdacjrd2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjU2Nzk3OWMtNWUzNjFiMWQtZjVhMjZkYmYtZGEyMjVjNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:49.902943Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr896j00b529e2yxxrbwv8aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDcxN2Q1MDctMjQxZDg4MTItODRmMmE4MDUtN2NmNjM5Zjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2025-04-07T14:06:50.225952Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr896j2926mkpagw2aw822mz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzI2YmM1ODgtZjhiNTkyZDQtMWY3Njg3YzItN2RiM2QzYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2025-04-07T14:06:50.315563Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr896jbw9wpjk0kavfrgdmc1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzI2YmM1ODgtZjhiNTkyZDQtMWY3Njg3YzItN2RiM2QzYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT 2025-04-07T14:06:50.750111Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr896jr92smhqdvxf2rpzw8a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZjZDM5MTMtOTQwZmQ5ODMtZjMwNTE4NWMtOTQ0ZjVmY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } 2025-04-07T14:06:53.492686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:53.492935Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:53.492984Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001890/r3tmp/tmpW91PCt/pdisk_1.dat 2025-04-07T14:06:53.712179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:53.732913Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:53.778435Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:53.778547Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:53.789763Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:53.901454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:54.168992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2025-04-07T14:06:54.459731Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:792:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:54.459833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:802:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:54.459911Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:54.465194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:06:54.660434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:806:2655], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:06:54.766644Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr896pft9rybpe2e377jdksw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmU1MWYyZDQtODhhNWNmNmYtYWViMjFkZjYtNmE3NTI3NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715661 TEvProposeTransaction 281474976715661 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 890 RawX2: 8589937237 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\tz\003\000\000\000\000\000\000\021U\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\002\002(\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\001\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\026\n\022CurrentExecutionId\022\000\222\001\021\n\006PoolId\022\007default\222\001\023\n\nDatabaseId\022\005/Root\222\001%\n\007TraceId\022\03201jr896pft9rybpe2e377jdksw\222\001\014\n\010Database\022\000\222\001\026\n\022CustomerSuppliedId\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=MmU1MWYyZDQtODhhNWNmNmYtYWViMjFkZjYtNmE3NTI3NWI=\230\001\000\"\n\010\263\243\022\020\0020\000@\n" TxId: 281474976715661 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715661 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715661 OrderId: 281474976715661 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 13 ActorId { RawX1: 631 RawX2: 8589937129 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 213 } } 2025-04-07T14:06:54.849741Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr896psz13hwrzj7h0m73nmw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDhhNGY5Y2ItNzA1M2EyZTgtMWVjYWY0YzItMWJjODMyZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715662 TEvProposeTransaction 281474976715662 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 912 RawX2: 8589937309 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\220\003\000\000\000\000\000\000\021\235\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\004\002(\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\002\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\021\n\006PoolId\022\007default\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CurrentExecutionId\022\000\222\001%\n\007TraceId\022\03201jr896psz13hwrzj7h0m73nmw\222\001\026\n\022CustomerSuppliedId\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=ZDhhNGY5Y2ItNzA1M2EyZTgtMWVjYWY0YzItMWJjODMyZmI=\222\001\014\n\010Database\022\000\230\001\000\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715662 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715662 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715662 OrderId: 281474976715662 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 13 ActorId { RawX1: 720 RawX2: 8589937192 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 229 } } ===== Begin SELECT 2025-04-07T14:06:55.146848Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr896pwn1e94ww15sc2jgg3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFhYmI3Y2QtYWZiM2Y5ZDUtMzFiNjExYjMtMTcwOTQzYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2025-04-07T14:06:55.247700Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr896q5pawf2g695zrrm6wy1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFhYmI3Y2QtYWZiM2Y5ZDUtMzFiNjExYjMtMTcwOTQzYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 978 RawX2: 8589937331 } TxBody: " \0008\000`\200\200\200\005j\365\006\010\001\022\223\006\010\001\022\024\n\022\t\322\003\000\000\000\000\000\000\021\263\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_1\022\r\010\240\234\001\022\005\t\000\002\006\004(\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\003\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\026\n\022CustomerSuppliedId\022\000\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CurrentExecutionId\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=MjFhYmI3Y2QtYWZiM2Y5ZDUtMzFiNjExYjMtMTcwOTQzYmE=\222\001%\n\007TraceId\022\03201jr896q5pawf2g695zrrm6wy1\222\001\014\n\010Database\022\000\222\001\021\n\006PoolId\022\007default\230\001\000\032O\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000\020\200\200\204\200\200\200\204\200\001\020\201\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001\030\201\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 0 Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 978 RawX2: 8589937331 } TxBody: " \0008\000`\200\200\200\005j\367\006\010\001\022\225\006\010\002\022\024\n\022\t\322\003\000\000\000\000\000\000\021\263\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\010\004(\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\004\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004H\001\200\001\000\222\001\026\n\022CustomerSuppliedId\022\000\222\001\014\n\010Database\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=MjFhYmI3Y2QtYWZiM2Y5ZDUtMzFiNjExYjMtMTcwOTQzYmE=\222\001\026\n\022CurrentExecutionId\022\000\222\001\021\n\006PoolId\022\007default\222\001%\n\007TraceId\022\03201jr896q5pawf2g695zrrm6wy1\222\001\023\n\nDatabaseId\022\005/Root\230\001\000\032O\n#\t\217\023\000\000\000\000\001\000\021\001\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0038\000\020\200\200\204\200\200\200\204\200\001\020\201\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001\030\201\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 0 EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_PREPARED Origin: 72075186224037888 TxId: 281474976715664 MinStep: 2007 MaxStep: 32007 DomainCoordinators: 72057594046316545 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 15 ActorId { RawX1: 631 RawX2: 8589937129 } IsFollower: false } TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 220 } } EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_PREPARED Origin: 72075186224037889 TxId: 281474976715664 MinStep: 2007 MaxStep: 32007 DomainCoordinators: 72057594046316545 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 14 ActorId { RawX1: 720 RawX2: 8589937192 } IsFollower: false } TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 131 } } ... captured readset ... captured readset ===== restarting tablet EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715664 Step: 2500 OrderId: 281474976715664 TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 594 } } EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715664 Step: 2500 OrderId: 281474976715664 TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 745 } } ===== Waiting for commit response ===== Last SELECT 2025-04-07T14:06:55.687434Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr896qjy75s9gb9v5zh91ybf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzkzMjAwMzMtN2NlYzAyMTAtZjlkMjEwMWMtNTMzZjI2ZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::AlterTableSplitSchema >> TSchemeShardTest::ReadOnlyMode [GOOD] >> TSchemeShardTest::SchemeErrors >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex >> TSchemeShardTest::CreateTable >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::ParallelCreateTable >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects >> TSchemeShardTest::IgnoreUserColumnIds [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] >> TSchemeShardTest::DropBlockStoreVolume ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] Test command err: 2025-04-07T14:06:41.645179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:41.645425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:41.645610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018b4/r3tmp/tmpSyA1qF/pdisk_1.dat 2025-04-07T14:06:41.935447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:41.947923Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-04-07T14:06:41.947992Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 1 Status# 16 SEND to# [1:380:2375] Proxy marker# C1 2025-04-07T14:06:41.983705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:42.032855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:42.032999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:42.044368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:42.152249Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-04-07T14:06:42.152371Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-04-07T14:06:42.152637Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-04-07T14:06:42.153058Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-04-07T14:06:42.153123Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:380:2375] Proxy 2025-04-07T14:06:42.155159Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-07T14:06:42.155309Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-07T14:06:42.155355Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-04-07T14:06:42.155392Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-04-07T14:06:42.160895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:42.192029Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:622:2531], Recipient [1:631:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:42.192871Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:622:2531], Recipient [1:631:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:42.193326Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:631:2537] 2025-04-07T14:06:42.193558Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:42.226085Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:622:2531], Recipient [1:631:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:42.226597Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:42.226675Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:42.227790Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:06:42.227850Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:06:42.227891Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:06:42.228108Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:42.245805Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:06:42.245992Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:42.246089Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:647:2546] 2025-04-07T14:06:42.246126Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:06:42.246157Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:06:42.246197Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:42.246611Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:631:2537], Recipient [1:631:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:42.246675Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:42.246810Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:06:42.246873Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:06:42.247206Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:638:2541], Recipient [1:631:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:42.247234Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:42.247271Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:627:2534], serverId# [1:638:2541], sessionId# [0:0:0] 2025-04-07T14:06:42.247347Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:42.247377Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:42.247406Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:06:42.247430Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:42.247461Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:42.247489Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:06:42.247541Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:42.247662Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:638:2541] 2025-04-07T14:06:42.247699Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:42.247781Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:42.247966Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:06:42.248017Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:06:42.248097Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:06:42.248146Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:42.248179Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:06:42.248223Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:06:42.248249Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:42.248513Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:42.248555Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:06:42.248583Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:42.248625Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:42.248693Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:06:42.248721Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:42.248763Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:06:42.248805Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:42.248843Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:42.249533Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:42.249573Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:42.249595Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:42.249661Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:06:42.249717Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:42.251534Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 HANDLE EvProposeTransaction marker# C0 2025-04-07T14:06:42.251587Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 step# 501 Status# 16 SEND to# [1:380:2375] Proxy marker# C1 2025-04-07T14:06:42.251950Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:648:2547], Recipient [1:631:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:06:42.251995Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:06:42.432378Z node 1 :TX_COORDINATOR DEBUG: Transaction 281474976715657 has been planned 2025-04-07T14:06:42.432448Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for mediator 72057594046382081 tablet 72057594046644480 2025-04-07T14:06:42.432489Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for mediator 72057594046382081 tablet 72075186224037888 2025-04-07T14:06:42.432680Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1500 in 0.500000s at 1.450000s 2025-04-07T14:06:42.433028Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1000, txid# 2814749767156 ... :06:55.538381Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1636:2421], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:06:55.538439Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1636:2421], 0} finished in read 2025-04-07T14:06:55.538505Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-07T14:06:55.538528Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:06:55.538548Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:06:55.538570Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:06:55.538605Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-07T14:06:55.538626Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:06:55.538642Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-04-07T14:06:55.538674Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:06:55.538755Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-07T14:06:55.539369Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1636:2421], Recipient [3:1215:2359]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T14:06:55.539408Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-07T14:06:55.661890Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr896qjb2tb9z176kswpj3sz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjFkZDViYzgtOGEyMmI2MGYtYjBmZjE3YmUtNDJlNmJiMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:55.664389Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1654:2422], Recipient [3:1215:2359]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 RangesSize: 1 2025-04-07T14:06:55.664506Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:06:55.664576Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-07T14:06:55.664704Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:06:55.664763Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:06:55.664809Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:55.664846Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:06:55.664893Z node 3 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-07T14:06:55.664939Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:06:55.664962Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:55.664985Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:06:55.665007Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:06:55.665132Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 } 2025-04-07T14:06:55.665354Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715662, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T14:06:55.665410Z node 3 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-07T14:06:55.665455Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1654:2422], 0} after executionsCount# 1 2025-04-07T14:06:55.665512Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1654:2422], 0} sends rowCount# 1, bytes# 24, quota rows left# 1000, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:06:55.665592Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1654:2422], 0} finished in read 2025-04-07T14:06:55.665667Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:06:55.665694Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:06:55.665716Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:06:55.665738Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:06:55.665780Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:06:55.665801Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:06:55.665828Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-07T14:06:55.665873Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:06:55.665968Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-07T14:06:55.666981Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1654:2422], Recipient [3:1215:2359]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T14:06:55.667043Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-04-07T14:06:55.782834Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr896qnr9zprvjav62xfrsqy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDE3YTgyYjktNjk0M2Q0MjQtZWUxOTE5NzUtZTRhYzEwZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:55.784535Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1678:2423], Recipient [3:1215:2359]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 RangesSize: 1 2025-04-07T14:06:55.784617Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:06:55.784691Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2025-04-07T14:06:55.784756Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-07T14:06:55.784784Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:06:55.784810Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:55.784845Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:06:55.784880Z node 3 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2025-04-07T14:06:55.784911Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-07T14:06:55.784927Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:55.784941Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:06:55.784966Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:06:55.785078Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 } 2025-04-07T14:06:55.785268Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715666, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T14:06:55.785311Z node 3 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-04-07T14:06:55.785347Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1678:2423], 0} after executionsCount# 1 2025-04-07T14:06:55.785388Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1678:2423], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:06:55.785450Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1678:2423], 0} finished in read 2025-04-07T14:06:55.785517Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-07T14:06:55.785535Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:06:55.785552Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:06:55.785566Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:06:55.785601Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-07T14:06:55.785615Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:06:55.785635Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-04-07T14:06:55.785666Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:06:55.785736Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-07T14:06:55.786552Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1678:2423], Recipient [3:1215:2359]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T14:06:55.786609Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-07T14:06:55.787094Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [3:196:2110], Recipient [3:1215:2359]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 2 Status: STATUS_SUBSCRIBED { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate >> TSchemeShardTest::SchemeErrors [GOOD] >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary >> BuildStatsHistogram::Ten_Crossed_Log [GOOD] >> BuildStatsHistogram::Five_Five_Mixed >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TSchemeShardTest::AlterTableSettings >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::ParallelCreateTable [GOOD] >> TSchemeShardTest::ParallelCreateSameTable >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/u6pr/00120e/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk0 Trying to start YDB, gRPC: 6951, MsgBus: 3305 2025-04-07T14:06:12.189332Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572309112606875:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:12.190569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00120e/r3tmp/tmpm75lEW/pdisk_1.dat 2025-04-07T14:06:12.624860Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:12.669339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:12.669474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6951, node 1 2025-04-07T14:06:12.674672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:12.817981Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:06:12.817999Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:06:12.818015Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:06:12.818094Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3305 TClient is connected to server localhost:3305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:13.477142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.515355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.618359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.736306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.787756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:14.981961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572317702543039:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.982105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.442049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.461866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.518332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.540972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.567047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.592416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.637200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572321997510831:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.637289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.637360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572321997510836:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.639888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:06:15.646526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572321997510838:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:06:17.195160Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572309112606875:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:17.195227Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:06:27.621494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:06:27.621530Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '763) '('"_id" '"a3885b8e-4ead016b-c9d2ad95-c119e434") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '661) '('"_id" '"8883d950-168ecf41-72dfaf2d-b493f087") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '673) '('"_id" '"2645bd6-509095f4-a916bacf-c0f32acf")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) 2025-04-07T14:06:56.785859Z node 1 :KQP_COMPUTE ERROR: TxId: 281474976710971. Error: [TEvError] Spilling Service not started 2025-04-07T14:06:56.787993Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490572498091175628:4385], TxId: 281474976710971, task: 2. Ctx: { TraceId : 01jr896rkb8etm3gvnpcsv26m0. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjNmMjU0YjItY2RiZjZkMjItZWI2MmQ2YTctNzFiMzUwY2U=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: [Compute spilling][TEvError] Spilling Service not started }. 2025-04-07T14:06:56.789922Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490572498091175627:4384], TxId: 281474976710971, task: 1. Ctx: { TraceId : 01jr896rkb8etm3gvnpcsv26m0. SessionId : ydb://session/3?node_id=1&id=MjNmMjU0YjItY2RiZjZkMjItZWI2MmQ2YTctNzFiMzUwY2U=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-04-07T14:06:56.790233Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490572498091175629:4386], TxId: 281474976710971, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=MjNmMjU0YjItY2RiZjZkMjItZWI2MmQ2YTctNzFiMzUwY2U=. CustomerSuppliedId : . TraceId : 01jr896rkb8etm3gvnpcsv26m0. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-04-07T14:06:56.813979Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjNmMjU0YjItY2RiZjZkMjItZWI2MmQ2YTctNzFiMzUwY2U=, ActorId: [1:7490572498091175614:4380], ActorState: ExecuteState, TraceId: 01jr896rkb8etm3gvnpcsv26m0, Create QueryResponse for error on request, msg: >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableById >> TSchemeShardTest::CreateTable [GOOD] >> TSchemeShardTest::CreateTableWithDate >> TSchemeShardTest::DropTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DropTable >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::RejectAlterSolomon >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> TSchemeShardTest::AlterTableById [GOOD] >> TSchemeShardTest::AlterTableConfig >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> TVersions::Wreck2 [GOOD] >> TVersions::Wreck2Reverse >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> TSchemeShardTest::MultipleColumnFamilies >> TSchemeShardTest::CreateTableWithDate [GOOD] >> TSchemeShardTest::CreateIndexedTable >> TSchemeShardTest::RejectAlterSolomon [GOOD] >> TSchemeShardTest::SimultaneousDropForceDrop >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices >> TSchemeShardTest::DropTable [GOOD] >> TSchemeShardTest::DropTableById >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestAuthorization >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> TSchemeShardTest::CopyTableForBackup >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] >> TSchemeShardTest::CopyIndexedTable >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> TSchemeShardTest::RejectSystemViewPath >> TSchemeShardTest::AlterTableConfig [GOOD] >> TSchemeShardTest::AlterTableCompactionPolicy >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/u6pr/000aa0/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk4 Trying to start YDB, gRPC: 17426, MsgBus: 14274 2025-04-07T14:06:12.198575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572311127857151:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:12.198775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000aa0/r3tmp/tmplP25re/pdisk_1.dat 2025-04-07T14:06:12.657811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:12.657934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:12.668122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:12.677428Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17426, node 1 2025-04-07T14:06:12.817300Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:06:12.817335Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:06:12.817342Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:06:12.817430Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14274 TClient is connected to server localhost:14274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:13.474083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.515489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.655147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.772956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.849806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:15.201236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572324012760551:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.201362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.441995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.463573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.488893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.554332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.578439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.603882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.659936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572324012761049:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.659998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572324012761054:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.660009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.663137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:06:15.671790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572324012761056:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:06:17.194058Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572311127857151:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:17.200833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:06:27.644978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:06:27.645016Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '763) '('"_id" '"733f704b-f69b0b78-55e91628-736cd228") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '661) '('"_id" '"e89aeebe-5c2c60dc-c3287659-3b1db8fb") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '673) '('"_id" '"9282412-e69abb06-4c712d5f-b5247c18")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/u6pr/000a56/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk3 Trying to start YDB, gRPC: 8624, MsgBus: 9756 2025-04-07T14:06:12.197333Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572309064147580:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:12.198034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000a56/r3tmp/tmpe17S9W/pdisk_1.dat 2025-04-07T14:06:12.617322Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:12.624337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:12.624448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:12.626230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8624, node 1 2025-04-07T14:06:12.817278Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:06:12.817300Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:06:12.817307Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:06:12.817387Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9756 TClient is connected to server localhost:9756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:13.474225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.515359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.621349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.747691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:13.795152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:06:14.958399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572317654083869:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.958531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.442049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.466232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.493157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.520576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.546980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.580677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:06:15.628784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572321949051659:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.628860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.628986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572321949051664:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:15.633188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:06:15.640025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572321949051666:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:06:17.197525Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572309064147580:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:17.197652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:06:27.590160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:06:27.590201Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '763) '('"_id" '"b4894bbe-d5d83fab-f3cadf80-665a4830") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '661) '('"_id" '"19ccea79-eee40dec-c7c90bb7-9a5b006f") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '673) '('"_id" '"460b0270-243b2e5c-a3327813-7b63b2c4")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs >> TSchemeShardTest::CreateIndexedTable [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects |92.6%| [TA] $(B)/ydb/core/kqp/ut/spilling/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/spilling/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom >> TSchemeShardTest::DropTableById [GOOD] >> TSchemeShardTest::DropPQ >> DataShardReadIterator::ShouldHandleReadAck >> TSchemeShardTest::RejectSystemViewPath [GOOD] >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::SplitAlterCopy >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup+EvWrite [GOOD] >> DataShardReadIterator::ShouldReadRangeCellVec >> BuildStatsHistogram::Five_Five_Mixed [GOOD] >> BuildStatsHistogram::Five_Five_Serial >> TNodeBrokerTest::TestRandomActions [GOOD] >> DataShardReadIteratorSysTables::ShouldRead >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit [GOOD] Test command err: 2025-04-07T14:06:52.671339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:52.671583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:52.671822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00187e/r3tmp/tmpn3oPLo/pdisk_1.dat 2025-04-07T14:06:52.982365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:53.016544Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:53.064622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:53.064768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:53.075768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:53.185967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:53.215683Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:53.216526Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:53.216915Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:06:53.217099Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:53.252292Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:53.252969Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:53.253055Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:53.254358Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:06:53.254420Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:06:53.254465Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:06:53.254710Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:53.284563Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:06:53.284802Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:53.284903Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:06:53.284933Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:06:53.284977Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:06:53.285011Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:53.285336Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:53.285376Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:53.285517Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:06:53.285591Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:06:53.285977Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:53.286027Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:53.286090Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:06:53.286156Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:53.286189Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:53.286231Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:06:53.286264Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:53.286305Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:53.286331Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:06:53.286373Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:53.286488Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:06:53.286519Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:53.286601Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:53.286799Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:06:53.286851Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:06:53.286930Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:06:53.286985Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:53.287031Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:06:53.287062Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:06:53.287091Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:53.287318Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:53.287356Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:06:53.287385Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:53.287415Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:53.287459Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:06:53.287503Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:53.287533Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:06:53.287559Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:53.287595Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:53.288580Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:06:53.288611Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:06:53.300120Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:53.300210Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:53.300245Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:53.300309Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:06:53.300370Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:53.480468Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:53.480517Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:53.480552Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:06:53.481003Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:06:53.481052Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:53.481170Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:53.481214Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:06:53.481257Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:06:53.481293Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:06:53.484867Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:06:53.484931Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:53.485739Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:53.485781Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:53.485830Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:53.485881Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:53.485922Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:06:53.485965Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... teWork, received event# 2146435072, Sender [2:986:2786], Recipient [2:986:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:00.071770Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:00.071803Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 ack snapshot OpId 281474976715665 2025-04-07T14:07:00.071827Z node 2 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037893 2025-04-07T14:07:00.071858Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:07:00.071889Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-04-07T14:07:00.071913Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037893, actorId: [2:1137:2895] 2025-04-07T14:07:00.071931Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037893 2025-04-07T14:07:00.071954Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037893 2025-04-07T14:07:00.071974Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-07T14:07:00.072090Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:990:2790], Recipient [2:990:2790]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:00.072107Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:00.072202Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1119:2877], Recipient [2:631:2537]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:07:00.072221Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:07:00.072270Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:990:2790], Recipient [2:720:2600]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2025-04-07T14:07:00.072292Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2025-04-07T14:07:00.072420Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1123:2881], Recipient [2:720:2600]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:07:00.072435Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:07:00.072478Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:986:2786]: {TEvRegisterTabletResult TabletId# 72075186224037891 Entry# 2000} 2025-04-07T14:07:00.072492Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-07T14:07:00.072507Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 2000 2025-04-07T14:07:00.072524Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-07T14:07:00.072771Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-07T14:07:00.072797Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:00.072815Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037891 2025-04-07T14:07:00.072831Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037891 has no attached operations 2025-04-07T14:07:00.072846Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037891 2025-04-07T14:07:00.072861Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-07T14:07:00.072881Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-07T14:07:00.072904Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-07T14:07:00.072918Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:00.072928Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037893 2025-04-07T14:07:00.072940Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037893 has no attached operations 2025-04-07T14:07:00.072950Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037893 2025-04-07T14:07:00.072963Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037893 TxInFly 0 2025-04-07T14:07:00.072976Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-07T14:07:00.073069Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1121:2879], Recipient [2:986:2786]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:07:00.073089Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:07:00.073111Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:1119:2877], serverId# [2:1121:2879], sessionId# [0:0:0] 2025-04-07T14:07:00.073139Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1126:2884], Recipient [2:990:2790]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:07:00.073152Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:07:00.073166Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1123:2881], serverId# [2:1126:2884], sessionId# [0:0:0] 2025-04-07T14:07:00.073232Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:990:2790]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 2000} 2025-04-07T14:07:00.073246Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-07T14:07:00.073258Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 2000 2025-04-07T14:07:00.073272Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-07T14:07:00.073552Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:986:2786]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-04-07T14:07:00.073572Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-07T14:07:00.073590Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 0 next step 2000 2025-04-07T14:07:00.073608Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037891: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-07T14:07:00.073638Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037891 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-07T14:07:00.073733Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:990:2790]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-04-07T14:07:00.073747Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-07T14:07:00.073760Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 2000 2025-04-07T14:07:00.073773Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037893: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-07T14:07:00.073790Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-07T14:07:00.084422Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 ack split to schemeshard 281474976715664 2025-04-07T14:07:00.084547Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715665 2025-04-07T14:07:00.087523Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:380:2375], Recipient [2:639:2541] 2025-04-07T14:07:00.087578Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037888, state: SplitSrcWaitForPartitioningChanged 2025-04-07T14:07:00.089235Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:380:2375], Recipient [2:727:2604] 2025-04-07T14:07:00.089273Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-04-07T14:07:00.090285Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 ack split partitioning changed to schemeshard 281474976715664 2025-04-07T14:07:00.090342Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-07T14:07:00.090424Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:623:2531], Recipient [2:631:2537]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-07T14:07:00.091037Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2025-04-07T14:07:00.091070Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-07T14:07:00.091307Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:712:2595], Recipient [2:720:2600]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-07T14:07:00.529605Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:939:2646], Recipient [2:631:2537]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 939 RawX2: 8589937238 } TxBody: " \0008\000`\200\200\200\005j\322\006\010\001\022\223\006\010\001\022\024\n\022\t\253\003\000\000\000\000\000\000\021V\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\00 2025-04-07T14:07:00.529681Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:07:00.529789Z node 2 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-04-07T14:07:00.530189Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2025-04-07T14:07:00.530640Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 >> TSchemeShardTest::AlterTableCompactionPolicy [GOOD] >> TSchemeShardTest::AlterPersQueueGroup >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::CopyTable >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::DocumentApiVersion >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions [GOOD] Test command err: 2025-04-07T14:06:04.343491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:04.343538Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:04.420821Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:04.434690Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-07T14:06:04.434958Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-07T14:06:04.750582Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-07T14:06:04.750804Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-07T14:06:04.760615Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.043419Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.067072Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.078625Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.091077Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.101438Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.126453Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.126701Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.139129Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.140232Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.152646Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.153475Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.179211Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.192405Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.205724Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.206576Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.219884Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.247210Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.260407Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.286365Z node 1 :NODE_BROKER ERROR: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2025-04-07T14:06:05.287963Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.288207Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.288418Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.288626Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.289271Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.303730Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.304015Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.329420Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.368651Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.369175Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.413098Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:05.777728Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.849032Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.849525Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.873930Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.937514Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-07T14:06:05.950052Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-07T14:06:06.163160Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-04-07T14:06:06.163962Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-07T14:06:06.164752Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-07T14:06:06.165046Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-07T14:06:06.214207Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-07T14:06:06.878336Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-04-07T14:06:06.892292Z node 1 :NODE_BROKER ERROR: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2025-04-07T14:06:06.911984Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-04-07T14:06:06.925469Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:06.938331Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:06.938847Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-04-07T14:06:06.953382Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.020750Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-04-07T14:06:07.021531Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-04-07T14:06:07.049575Z node 1 :NODE_BROKER ERROR: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2025-04-07T14:06:07.063352Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-04-07T14:06:07.093703Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.107420Z node 1 :NODE_BROKER ERROR: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2025-04-07T14:06:07.108636Z node 1 :NODE_BROKER ERROR: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2025-04-07T14:06:07.109232Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.144836Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.146312Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.159632Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.160566Z node 1 :NODE_BROKER ERROR: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2025-04-07T14:06:07.177594Z node 1 :NODE_BROKER ERROR: Cannot register node host11:10: ERROR_TEMP: No free node IDs 2025-04-07T14:06:07.191214Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.191653Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.218690Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.219121Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.219430Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.239667Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.254894Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.256513Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-04-07T14:06:07.257930Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.284541Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.298143Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.330971Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.331591Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-07T14:06:07.858293Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-07T14:06:07.858942Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.056008Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.056417Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.069909Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.095877Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.096532Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.097035Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.508944Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.509559Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.510098Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.727820Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.728353Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.728884Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.742466Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.919215Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-07T14:06:08.919730Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-07T14:06:09.084783Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-07T14:06:09.085388Z node 1 :NODE_BROKER ERROR: Cannot extend lease for no ... nnot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-07T14:06:52.830661Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-07T14:06:52.832433Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-07T14:06:52.834123Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-07T14:06:52.837439Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-07T14:06:52.890900Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:52.938066Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:52.949663Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:52.950940Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:52.958818Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:52.960233Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:52.995765Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:52.998050Z node 1 :NODE_BROKER ERROR: Cannot register node host11:10: ERROR_TEMP: No free node IDs 2025-04-07T14:06:53.910290Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:53.914114Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:53.967886Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:54.016510Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-07T14:06:54.071105Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:54.073156Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:54.074993Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:54.076829Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:54.282089Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-07T14:06:54.529443Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:54.571337Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:54.575265Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:54.578957Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:54.651101Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:54.669850Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-07T14:06:54.687328Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.076477Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.096561Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.102390Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.104213Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.304818Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.306385Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.307783Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.309243Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.356880Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.358195Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.376252Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.378150Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.379775Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.454082Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Node has expired 2025-04-07T14:06:55.455434Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Node has expired 2025-04-07T14:06:55.711999Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.729185Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.731197Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.735445Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.739372Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.741208Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.762424Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:55.780500Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:56.049979Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:56.069180Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:56.073738Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:56.091192Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:56.347402Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:56.352503Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:56.354338Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:56.372867Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:56.390737Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:56.392588Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-07T14:06:56.489897Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-07T14:06:56.494840Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-07T14:06:56.497783Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-07T14:06:56.957096Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-07T14:06:57.037197Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:57.039284Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:57.041124Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:06:57.108296Z node 1 :NODE_BROKER ERROR: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2025-04-07T14:06:57.110793Z node 1 :NODE_BROKER ERROR: Cannot register node host11:10: ERROR_TEMP: No free node IDs 2025-04-07T14:06:57.128571Z node 1 :NODE_BROKER ERROR: Cannot register node host11:10: ERROR_TEMP: No free node IDs 2025-04-07T14:06:57.385219Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:57.419680Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-07T14:06:57.525544Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:57.561391Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:57.753577Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-07T14:06:57.933004Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-07T14:06:58.016841Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-04-07T14:06:58.018987Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-04-07T14:06:58.084183Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-07T14:06:58.089045Z node 1 :NODE_BROKER ERROR: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2025-04-07T14:06:58.566235Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-07T14:06:58.583866Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-07T14:06:58.666923Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-07T14:06:58.782284Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-07T14:06:58.787396Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-07T14:06:58.790151Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-07T14:06:58.797840Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-07T14:06:58.818975Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-07T14:06:58.821153Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-07T14:06:59.630415Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:59.631861Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-07T14:06:59.665299Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-07T14:06:59.698684Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-07T14:06:59.956335Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-07T14:06:59.994162Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-07T14:07:00.320158Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:07:00.322436Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:07:00.324614Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-07T14:07:00.326631Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne >> DataShardReadIterator::ShouldReverseReadMultipleKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup+EvWrite [GOOD] Test command err: 2025-04-07T14:06:53.511373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:53.511616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:53.511793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001875/r3tmp/tmpXQoHwm/pdisk_1.dat 2025-04-07T14:06:53.792918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:53.840049Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:53.891052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:53.891186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:53.902539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:54.012532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:54.366504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:06:54.650661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:885:2700], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:54.650761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:895:2705], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:54.650861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:54.655651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:06:54.850299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:899:2708], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:06:55.192451Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr896pnre30twb1k2h4ezqj3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdhMDJlYzMtOGFhMDAxYTgtZmRkOWJkMTUtNmQ0NTA3ZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:55.282972Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr896q7t6zsezmr3wth0t9dx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzAzNWYyMzgtZjg0NzI0M2EtNTc5YzZmODgtMjg3ZWI0Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:55.586276Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr896qaedthx93e43y0gf0yx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdjNzJmNzUtZDY2M2YyN2QtODExZmY4NDEtNDJmZmRkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets 2025-04-07T14:06:55.676362Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr896qkd13awpemy9kfjx4fc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdjNzJmNzUtZDY2M2YyN2QtODExZmY4NDEtNDJmZmRkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... sending immediate upsert ... waiting for immediate propose 2025-04-07T14:06:55.763420Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr896qpc5csd5qj9mz9mx4gw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjI0YTRhYTQtMWI5OTVhOWYtOWY4ODJlNzctNTA3NmFiOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... immediate upsert is blocked 2025-04-07T14:06:55.765764Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1135:2806] TxId: 281474976715665. Ctx: { TraceId: 01jr896qpc5csd5qj9mz9mx4gw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjI0YTRhYTQtMWI5OTVhOWYtOWY4ODJlNzctNTA3NmFiOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: [WRONG_SHARD_STATE] Rejecting immediate tx 281474976715665 because datashard 72075186224037889 is restarting; 2025-04-07T14:06:55.792248Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjI0YTRhYTQtMWI5OTVhOWYtOWY4ODJlNzctNTA3NmFiOGU=, ActorId: [1:1030:2806], ActorState: ExecuteState, TraceId: 01jr896qpc5csd5qj9mz9mx4gw, Create QueryResponse for error on request, msg: 2025-04-07T14:06:55.794431Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODdjNzJmNzUtZDY2M2YyN2QtODExZmY4NDEtNDJmZmRkMw==, ActorId: [1:1032:2808], ActorState: ExecuteState, TraceId: 01jr896qkd13awpemy9kfjx4fc, Create QueryResponse for error on request, msg: 2025-04-07T14:06:55.795101Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr896qpc5csd5qj9mz9mx4gw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjI0YTRhYTQtMWI5OTVhOWYtOWY4ODJlNzctNTA3NmFiOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:55.807322Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr896qkd13awpemy9kfjx4fc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdjNzJmNzUtZDY2M2YyN2QtODExZmY4NDEtNDJmZmRkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:06:56.072468Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr896qyz996w0dqck55zcrw3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmRjZjFhZjQtYTc5OTRkNDctZjNmYjJhYjAtN2E4ZGRmODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-04-07T14:06:59.094231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:59.094473Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:59.094522Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001875/r3tmp/tmpsOLPTe/pdisk_1.dat 2025-04-07T14:06:59.322226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:59.342276Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:59.387897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:59.388020Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:59.399070Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:59.508573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:59.781212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2025-04-07T14:07:00.062274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:792:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:00.062330Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:802:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:00.062384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:00.065584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:07:00.254771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:806:2655], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:07:00.348022Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr896vyx1jscbzcp35ysgej6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzUwZjdkNzEtOTQ4NmJkYjctY2UxMTRiY2YtNWYyZTg5NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715661 TEvProposeTransaction 281474976715661 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 890 RawX2: 8589937237 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\tz\003\000\000\000\000\000\000\021U\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\00 ... ng_0_0\022\r\010\240\234\001\022\005\t\000\002\002\002(\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\001\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001%\n\007TraceId\022\03201jr896vyx1jscbzcp35ysgej6\222\001\021\n\006PoolId\022\007default\222\001\026\n\022CurrentExecutionId\022\000\222\001\014\n\010Database\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=YzUwZjdkNzEtOTQ4NmJkYjctY2UxMTRiY2YtNWYyZTg5NWY=\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CustomerSuppliedId\022\000\230\001\000\"\n\010\233\243\022\020\0020\000@\n" TxId: 281474976715661 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715661 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715661 OrderId: 281474976715661 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 13 ActorId { RawX1: 631 RawX2: 8589937129 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 179 } } 2025-04-07T14:07:00.429338Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr896w8cc2k72xz9jy9craph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTU1ZWFiMDYtYTdiODA0ZDAtMzczMDNjNmMtOTA3YjM2NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715662 TEvProposeTransaction 281474976715662 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 912 RawX2: 8589937309 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\220\003\000\000\000\000\000\000\021\235\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\004\002(\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\002\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\023\n\nDatabaseId\022\005/Root\222\001\014\n\010Database\022\000\222\001%\n\007TraceId\022\03201jr896w8cc2k72xz9jy9craph\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=OTU1ZWFiMDYtYTdiODA0ZDAtMzczMDNjNmMtOTA3YjM2NmY=\222\001\026\n\022CurrentExecutionId\022\000\222\001\021\n\006PoolId\022\007default\222\001\026\n\022CustomerSuppliedId\022\000\230\001\000\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715662 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715662 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715662 OrderId: 281474976715662 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 13 ActorId { RawX1: 720 RawX2: 8589937192 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 209 } } ===== Begin SELECT 2025-04-07T14:07:00.689340Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr896wazb130cz5eznd60pb8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2U1YmE4MC1iNmIwOWM0YS1mZGY0ZjJmNC1kNmMzMGRhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2025-04-07T14:07:00.796971Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr896wka63q5ddzrb88cxgrf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2U1YmE4MC1iNmIwOWM0YS1mZGY0ZjJmNC1kNmMzMGRhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 978 RawX2: 8589937331 } TxBody: " \0008\000`\200\200\200\005j\365\006\010\001\022\223\006\010\001\022\024\n\022\t\322\003\000\000\000\000\000\000\021\263\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_1\022\r\010\240\234\001\022\005\t\000\002\006\004(\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\003\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\026\n\022CustomerSuppliedId\022\000\222\001\021\n\006PoolId\022\007default\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=M2U1YmE4MC1iNmIwOWM0YS1mZGY0ZjJmNC1kNmMzMGRhMA==\222\001\026\n\022CurrentExecutionId\022\000\222\001%\n\007TraceId\022\03201jr896wka63q5ddzrb88cxgrf\222\001\014\n\010Database\022\000\222\001\023\n\nDatabaseId\022\005/Root\230\001\000\032O\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000\020\200\200\204\200\200\200\204\200\001\020\201\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001\030\201\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 0 Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 978 RawX2: 8589937331 } TxBody: " \0008\000`\200\200\200\005j\367\006\010\001\022\225\006\010\002\022\024\n\022\t\322\003\000\000\000\000\000\000\021\263\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\010\004(\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\004\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004H\001\200\001\000\222\001\023\n\nDatabaseId\022\005/Root\222\001%\n\007TraceId\022\03201jr896wka63q5ddzrb88cxgrf\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=M2U1YmE4MC1iNmIwOWM0YS1mZGY0ZjJmNC1kNmMzMGRhMA==\222\001\026\n\022CurrentExecutionId\022\000\222\001\014\n\010Database\022\000\222\001\021\n\006PoolId\022\007default\222\001\026\n\022CustomerSuppliedId\022\000\230\001\000\032O\n#\t\217\023\000\000\000\000\001\000\021\001\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0038\000\020\200\200\204\200\200\200\204\200\001\020\201\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001\030\201\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 0 EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_PREPARED Origin: 72075186224037888 TxId: 281474976715664 MinStep: 2031 MaxStep: 32031 DomainCoordinators: 72057594046316545 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 15 ActorId { RawX1: 631 RawX2: 8589937129 } IsFollower: false } TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 168 } } EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_PREPARED Origin: 72075186224037889 TxId: 281474976715664 MinStep: 2031 MaxStep: 32031 DomainCoordinators: 72057594046316545 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 14 ActorId { RawX1: 720 RawX2: 8589937192 } IsFollower: false } TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 111 } } ... captured readset ... captured readset ===== restarting tablet EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715664 Step: 2500 OrderId: 281474976715664 TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 607 } } EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715664 Step: 2500 OrderId: 281474976715664 TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 835 } } ===== Waiting for commit response ===== Last SELECT 2025-04-07T14:07:01.241886Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr896wzwd13d0zak0mfaqfe4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjE3ZTQ3MmQtODM1Y2NhZmItM2E5YzgyODgtYTY4NzY5NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage |92.6%| [TA] $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {RESULT} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentChanges >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir >> DataShardVolatile::DistributedWriteAsymmetricExecute [GOOD] >> DataShardVolatile::DistributedWriteThenDropTable >> DataShardReadIteratorBatchMode::RangeFull >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> TSchemeShardTest::CreateIndexedTableRejects [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] Test command err: 2025-04-07T14:06:52.626459Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:52.702269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:52.702327Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:52.704319Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:52.704773Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:52.704998Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:52.715505Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:52.759422Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:52.759799Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:52.761315Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:52.761394Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:52.761447Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:52.761735Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:52.788836Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:52.789026Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:52.789151Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:52.789193Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:52.789226Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:52.789274Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:52.789459Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:52.789501Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:52.789918Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:52.790003Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:52.790053Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:52.790090Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:52.790140Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:52.790172Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:52.790203Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:52.790233Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:52.790268Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:52.834896Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:52.834979Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:52.835039Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:52.837579Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:52.837650Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:52.837736Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:52.837902Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:52.837954Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:52.838004Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:52.838045Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:52.838076Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:52.838111Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:52.838139Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:52.838441Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:52.838491Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:52.838524Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:52.838563Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:52.838620Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:52.838648Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:52.838692Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:52.838723Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:52.838754Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:52.861211Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:52.861262Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:52.861298Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:52.861336Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:52.861387Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:52.861804Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:52.861841Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:52.861877Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:52.861961Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:52.861983Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:52.862100Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:52.862140Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:52.862177Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:52.862209Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:52.864832Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:52.864880Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:52.865043Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:52.865078Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:52.865113Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:52.865142Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:52.865166Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:52.865194Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:52.865217Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:52.865260Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:52.865290Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:52.865313Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:52.865335Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:52.865443Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:52.865464Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:52.865488Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:52.865502Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:52.865515Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:52.865548Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:52.865567Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:52.865587Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:52.865611Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:52.865659Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:52.865695Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:52.865724Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:52.865790Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:52.865806Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:52.865837Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit Ma ... eady operations at 9437184 2025-04-07T14:07:02.794870Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:02.794911Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-04-07T14:07:02.794953Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2025-04-07T14:07:02.795007Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-07T14:07:02.795055Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:02.795083Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:07:02.795227Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:02.795251Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-04-07T14:07:02.795281Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:02.795342Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-07T14:07:02.795359Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:02.795407Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:07:02.795431Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:07:02.795500Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:02.795522Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:508] at 9437184 on unit CompleteOperation 2025-04-07T14:07:02.795546Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 508] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:02.795568Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-07T14:07:02.795582Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:02.795596Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:07:02.795606Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:07:02.795618Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:07:02.795680Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:02.795694Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:509] at 9437184 on unit CompleteOperation 2025-04-07T14:07:02.795725Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 509] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:02.795755Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-07T14:07:02.795775Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:02.795799Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:07:02.795870Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:02.795890Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:510] at 9437184 on unit CompleteOperation 2025-04-07T14:07:02.795919Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 510] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:02.795943Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-07T14:07:02.795959Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:02.796024Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:02.796041Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:511] at 9437184 on unit CompleteOperation 2025-04-07T14:07:02.796081Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 511] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:02.796116Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-07T14:07:02.796132Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:02.796224Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:02.796247Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:512] at 9437184 on unit CompleteOperation 2025-04-07T14:07:02.796289Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 512] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:02.796326Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-07T14:07:02.796345Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:02.796402Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:02.796434Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:516] at 9437184 on unit FinishPropose 2025-04-07T14:07:02.796477Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 516 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-07T14:07:02.796547Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:02.796758Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:02.796786Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:514] at 9437184 on unit CompleteOperation 2025-04-07T14:07:02.796828Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 514] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2025-04-07T14:07:02.796876Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-07T14:07:02.796905Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:02.796998Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:02.797027Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:515] at 9437184 on unit CompleteOperation 2025-04-07T14:07:02.797060Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 515] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:02.797091Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:02.797235Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-07T14:07:02.797270Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:02.797312Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 506 2025-04-07T14:07:02.797612Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-07T14:07:02.797656Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:02.797682Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 507 2025-04-07T14:07:02.797843Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-07T14:07:02.797873Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:02.797896Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 508 2025-04-07T14:07:02.797999Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-07T14:07:02.798024Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:02.798064Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 509 2025-04-07T14:07:02.798235Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-07T14:07:02.798278Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:02.798301Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2025-04-07T14:07:02.798384Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-07T14:07:02.798406Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:02.798428Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2025-04-07T14:07:02.798515Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-07T14:07:02.798535Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:02.798549Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2025-04-07T14:07:02.798604Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2303]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-07T14:07:02.798623Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:02.798651Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 expect 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - interm 5 6 - 6 6 - - - - - - - - - - - - - - - - - - - - - - - - - - - >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table >> TSchemeShardTest::TopicReserveSize [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize >> DataShardReadIterator::ShouldReceiveErrorAfterSplit >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::ParallelModifying >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] >> TSchemeShardTest::DropPQ [GOOD] >> TSchemeShardTest::DropPQFail ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-04-07T14:06:10.710039Z :ReadSession INFO: Random seed for debugging is 1744034770710005 2025-04-07T14:06:11.022890Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572306598357690:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:11.022947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:06:11.073600Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572307421159747:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:11.228125Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:06:11.229909Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e2f/r3tmp/tmp9K3Kxe/pdisk_1.dat 2025-04-07T14:06:11.280840Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:06:11.477297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:11.477419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:11.478823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:11.478933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:11.486566Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:06:11.486717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:11.487459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:11.510203Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7481, node 1 2025-04-07T14:06:11.526633Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:06:11.526652Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:06:11.612860Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001e2f/r3tmp/yandexGDuA2v.tmp 2025-04-07T14:06:11.612885Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001e2f/r3tmp/yandexGDuA2v.tmp 2025-04-07T14:06:11.612984Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001e2f/r3tmp/yandexGDuA2v.tmp 2025-04-07T14:06:11.613062Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:06:11.784921Z INFO: TTestServer started on Port 1638 GrpcPort 7481 TClient is connected to server localhost:1638 PQClient connected to localhost:7481 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:12.055896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T14:06:14.160589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572319483260410:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.160753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.160903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572319483260437:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.163982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572320306061737:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.164028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572320306061716:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.164079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:06:14.164202Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.167634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572319483260473:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.167723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.177360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572319483260439:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:06:14.177488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572320306061744:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:06:14.420430Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572319483260547:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:06:14.420430Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490572320306061787:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:06:14.424109Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzgwZTEyODgtYTNkMmM4M2UtNjBlMzNjNzQtYjBjMDlhNjg=, ActorId: [2:7490572320306061713:2280], ActorState: ExecuteState, TraceId: 01jr895f4j6kztg2b7y8pjhjz4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:06:14.426477Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:06:14.426753Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2E1ZWU0MDEtYzFkYS01MzQ0OTMwOC1hNmQ4ODdiZg==, ActorId: [1:7490572319483260407:2303], ActorState: ExecuteState, TraceId: 01jr895f3s5j4m340dqt9qzreg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:06:14.427078Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:06:14.469203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:14.582092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:14.712401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:7481", true, true, 1000); 2025-04-07T14:06:15.114282Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr895fqs0qbqwvsc8ky95dds, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg4NTQ3ZmMtYjEyZDM0OGQtZmQ0MmFiMDctZmU2YmNmMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490572323778228296:3022] 2025-04-07T14:06:16.022968Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572306598357690:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:16.023137Z ... hared/user session shared/user_7_1_17623216137415426302_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset3 2025-04-07T14:07:02.239497Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_17623216137415426302_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 715c50d2-46e2ca63-e83f930c-6004c92c has messages 1 2025-04-07T14:07:02.239599Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_17623216137415426302_v1 read done: guid# 715c50d2-46e2ca63-e83f930c-6004c92c, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2025-04-07T14:07:02.239623Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_17623216137415426302_v1 response to read: guid# 715c50d2-46e2ca63-e83f930c-6004c92c 2025-04-07T14:07:02.239844Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_17623216137415426302_v1 Process answer. Aval parts: 0 2025-04-07T14:07:02.240531Z :DEBUG: [/Root] [/Root] [2d2bc1a4-8b6afee9-12688c9b-bfb0e173] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:02.240734Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-04-07T14:07:02.240884Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2025-04-07T14:07:02.240730Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_17623216137415426302_v1 grpc read done: success# 1, data# { read { } } 2025-04-07T14:07:02.240858Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_17623216137415426302_v1 got read request: guid# 46a33ed0-e1b0f8e0-981028c6-74dc8cd7 2025-04-07T14:07:02.240958Z :DEBUG: [/Root] [/Root] [2d2bc1a4-8b6afee9-12688c9b-bfb0e173] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-04-07T14:07:01.118000Z WriteTime: 2025-04-07T14:07:01.120000Z Ip: "ipv6:[::1]:35042" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:35042" } } } } 2025-04-07T14:07:02.241177Z :INFO: [/Root] [/Root] [2d2bc1a4-8b6afee9-12688c9b-bfb0e173] Closing read session. Close timeout: 3.000000s 2025-04-07T14:07:02.241228Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-04-07T14:07:02.241271Z :INFO: [/Root] [/Root] [2d2bc1a4-8b6afee9-12688c9b-bfb0e173] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1337 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:07:02.241994Z :INFO: [/Root] [/Root] [2d2bc1a4-8b6afee9-12688c9b-bfb0e173] Closing read session. Close timeout: 0.000000s 2025-04-07T14:07:02.242050Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-04-07T14:07:02.242132Z :INFO: [/Root] [/Root] [2d2bc1a4-8b6afee9-12688c9b-bfb0e173] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1338 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:07:02.242341Z :NOTICE: [/Root] [/Root] [2d2bc1a4-8b6afee9-12688c9b-bfb0e173] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:07:02.242861Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_17623216137415426302_v1 grpc read done: success# 0, data# { } 2025-04-07T14:07:02.242887Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_17623216137415426302_v1 grpc read failed 2025-04-07T14:07:02.242924Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_17623216137415426302_v1 grpc closed 2025-04-07T14:07:02.244575Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:07:02.242959Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_17623216137415426302_v1 is DEAD 2025-04-07T14:07:02.244621Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_17623216137415426302_v1 2025-04-07T14:07:02.244675Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7490572518389380003:2505] destroyed 2025-04-07T14:07:02.244730Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_1_17623216137415426302_v1 2025-04-07T14:07:02.244755Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [7:7490572518389380000:2502] disconnected; active server actors: 1 2025-04-07T14:07:02.244797Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [7:7490572518389380000:2502] client user disconnected session shared/user_7_1_17623216137415426302_v1 2025-04-07T14:07:03.432912Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:03.432958Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:03.432993Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:07:03.433333Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:07:03.433828Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:07:03.434011Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:03.434569Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-04-07T14:07:03.437035Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:03.437079Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:03.437119Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:07:03.437469Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:07:03.438046Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:07:03.438210Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:03.438468Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:07:03.439593Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:07:03.440077Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-04-07T14:07:03.440156Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-04-07T14:07:03.440304Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:07:03.440346Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:07:03.440369Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T14:07:03.440414Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-07T14:07:03.442425Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:03.442457Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:03.442502Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:07:03.442780Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:07:03.443175Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:07:03.443291Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:03.443492Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:07:03.444209Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:03.444371Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:07:03.444503Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:07:03.444558Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T14:07:03.444639Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2025-04-07T14:07:03.446200Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:03.446232Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:03.446272Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:07:03.446532Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:07:03.446934Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:07:03.447054Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:03.447719Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:07:03.447835Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:07:03.447915Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:07:03.447999Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> BuildStatsHistogram::Five_Five_Serial [GOOD] >> BuildStatsHistogram::Five_Five_Crossed >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits >> TSchemeShardTest::CreateIndexedTableAndForceDrop [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactCommittedLongTx >> DataShardTxOrder::ZigZag_oo [GOOD] >> TFlatTableExecutor_LongTx::CompactCommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactedLongTxRestart [GOOD] >> TFlatTableExecutor_LongTx::CompactMultipleChanges [GOOD] >> TFlatTableExecutor_LongTx::LongTxBorrow >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRemoveServerlessTenant >> TFlatTableExecutor_LongTx::LongTxBorrow [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTxRead [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::SmallValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues >> TSchemeShardTest::AlterPersQueueGroup [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] Leader for TabletID 9437184 is [1:128:2151] sender: [1:130:2057] recipient: [1:106:2138] 2025-04-07T14:06:17.654022Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:17.729027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:17.729079Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:17.731265Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:17.732428Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:17.732758Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:17.745274Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:17.789512Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:17.789627Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:17.791273Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:17.791355Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:17.791405Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:17.791735Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:17.821778Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:17.821962Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:17.822053Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:193:2195] 2025-04-07T14:06:17.822086Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:17.822126Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:17.822178Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:17.823034Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.823093Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.823375Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:17.823473Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:17.823525Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:17.823566Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:17.823631Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:17.823665Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:17.823701Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:17.823732Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:17.823766Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 Leader for TabletID 9437184 is [1:128:2151] sender: [1:206:2057] recipient: [1:14:2061] 2025-04-07T14:06:17.871269Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.871342Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.871777Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:17.879223Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:17.879294Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:17.879393Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:17.879542Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:17.879585Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:17.879634Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:17.879702Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:17.879736Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:17.879771Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:17.879803Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:17.880081Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:17.880142Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:17.880182Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:17.880243Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:17.880310Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:17.880352Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:17.880389Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:17.880423Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:17.880450Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:17.907065Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:17.907156Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:17.907205Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:17.907284Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:17.907357Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:17.907853Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.907894Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.907929Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:17.908033Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:17.908056Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:17.908205Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:17.908268Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:17.908297Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:17.908326Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:17.911548Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:17.911613Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:17.911835Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.911884Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.911928Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:17.911961Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:17.911987Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:17.912022Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-07T14:06:17.912053Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:17.912087Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:17.912118Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:17.912150Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:17.912177Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:17.912333Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-07T14:06:17.912375Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:17.912414Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:17.912434Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:17.912451Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:17.912506Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:17.912529Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:17.912555Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:17.912581Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:17.912631Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-04-07T14:06:17.912657Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:17.912704Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2025-04-07T14:06:17.912737Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-07T14:06:17.912757Z node 1 :TX_DATASHARD TRACE: Adv ... 9437184 to execution unit ExecuteDataTx 2025-04-07T14:07:04.194619Z node 36 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit ExecuteDataTx 2025-04-07T14:07:04.194828Z node 36 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437184 with status COMPLETE 2025-04-07T14:07:04.194866Z node 36 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:07:04.194928Z node 36 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-04-07T14:07:04.194952Z node 36 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit ExecuteDataTx 2025-04-07T14:07:04.194972Z node 36 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit CompleteOperation 2025-04-07T14:07:04.194997Z node 36 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit CompleteOperation 2025-04-07T14:07:04.195158Z node 36 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is DelayComplete 2025-04-07T14:07:04.195190Z node 36 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit CompleteOperation 2025-04-07T14:07:04.195219Z node 36 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit CompletedOperations 2025-04-07T14:07:04.195248Z node 36 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit CompletedOperations 2025-04-07T14:07:04.195283Z node 36 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-04-07T14:07:04.195309Z node 36 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit CompletedOperations 2025-04-07T14:07:04.195337Z node 36 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437184 has finished 2025-04-07T14:07:04.195368Z node 36 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:04.195396Z node 36 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:07:04.195424Z node 36 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:07:04.195453Z node 36 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:07:04.195602Z node 36 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [36:436:2386], Recipient [36:436:2386]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:04.195637Z node 36 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:04.195680Z node 36 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-04-07T14:07:04.195710Z node 36 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:07:04.195736Z node 36 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-07T14:07:04.195766Z node 36 :TX_DATASHARD DEBUG: Found ready operation [7:6] in PlanQueue unit at 9437186 2025-04-07T14:07:04.195795Z node 36 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit PlanQueue 2025-04-07T14:07:04.195828Z node 36 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-07T14:07:04.195856Z node 36 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit PlanQueue 2025-04-07T14:07:04.195882Z node 36 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit LoadTxDetails 2025-04-07T14:07:04.195911Z node 36 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit LoadTxDetails 2025-04-07T14:07:04.196468Z node 36 :TX_DATASHARD DEBUG: LoadTxDetails at 9437186 loaded tx from db 7:6 keys extracted: 1 2025-04-07T14:07:04.196508Z node 36 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-07T14:07:04.196536Z node 36 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit LoadTxDetails 2025-04-07T14:07:04.196561Z node 36 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit FinalizeDataTxPlan 2025-04-07T14:07:04.196590Z node 36 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit FinalizeDataTxPlan 2025-04-07T14:07:04.196626Z node 36 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-07T14:07:04.196653Z node 36 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit FinalizeDataTxPlan 2025-04-07T14:07:04.196696Z node 36 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit BuildAndWaitDependencies 2025-04-07T14:07:04.196724Z node 36 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit BuildAndWaitDependencies 2025-04-07T14:07:04.196762Z node 36 :TX_DATASHARD TRACE: Operation [7:6] is the new logically complete end at 9437186 2025-04-07T14:07:04.196789Z node 36 :TX_DATASHARD TRACE: Operation [7:6] is the new logically incomplete end at 9437186 2025-04-07T14:07:04.196819Z node 36 :TX_DATASHARD TRACE: Activated operation [7:6] at 9437186 2025-04-07T14:07:04.196853Z node 36 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-07T14:07:04.196880Z node 36 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit BuildAndWaitDependencies 2025-04-07T14:07:04.196905Z node 36 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit BuildDataTxOutRS 2025-04-07T14:07:04.196932Z node 36 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit BuildDataTxOutRS 2025-04-07T14:07:04.196979Z node 36 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-07T14:07:04.197005Z node 36 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit BuildDataTxOutRS 2025-04-07T14:07:04.197030Z node 36 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit StoreAndSendOutRS 2025-04-07T14:07:04.197057Z node 36 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit StoreAndSendOutRS 2025-04-07T14:07:04.197083Z node 36 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-07T14:07:04.197107Z node 36 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit StoreAndSendOutRS 2025-04-07T14:07:04.197133Z node 36 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit PrepareDataTxInRS 2025-04-07T14:07:04.197159Z node 36 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit PrepareDataTxInRS 2025-04-07T14:07:04.197187Z node 36 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-07T14:07:04.197213Z node 36 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit PrepareDataTxInRS 2025-04-07T14:07:04.197239Z node 36 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit LoadAndWaitInRS 2025-04-07T14:07:04.197264Z node 36 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit LoadAndWaitInRS 2025-04-07T14:07:04.197290Z node 36 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-07T14:07:04.197317Z node 36 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit LoadAndWaitInRS 2025-04-07T14:07:04.197343Z node 36 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit ExecuteDataTx 2025-04-07T14:07:04.197370Z node 36 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit ExecuteDataTx 2025-04-07T14:07:04.197627Z node 36 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437186 with status COMPLETE 2025-04-07T14:07:04.197675Z node 36 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437186: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:07:04.197718Z node 36 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-07T14:07:04.197745Z node 36 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit ExecuteDataTx 2025-04-07T14:07:04.197772Z node 36 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit CompleteOperation 2025-04-07T14:07:04.197798Z node 36 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit CompleteOperation 2025-04-07T14:07:04.197935Z node 36 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is DelayComplete 2025-04-07T14:07:04.197963Z node 36 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit CompleteOperation 2025-04-07T14:07:04.197987Z node 36 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit CompletedOperations 2025-04-07T14:07:04.198013Z node 36 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit CompletedOperations 2025-04-07T14:07:04.198046Z node 36 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-07T14:07:04.198072Z node 36 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit CompletedOperations 2025-04-07T14:07:04.198098Z node 36 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437186 has finished 2025-04-07T14:07:04.198127Z node 36 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:04.198152Z node 36 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-07T14:07:04.198179Z node 36 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-07T14:07:04.198230Z node 36 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-07T14:07:04.211120Z node 36 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 7 txid# 6} 2025-04-07T14:07:04.211187Z node 36 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 7} 2025-04-07T14:07:04.211240Z node 36 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:04.211279Z node 36 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437184 on unit CompleteOperation 2025-04-07T14:07:04.211338Z node 36 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437184 at tablet 9437184 send result to client [36:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:04.211382Z node 36 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:04.211614Z node 36 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437186 step# 7 txid# 6} 2025-04-07T14:07:04.211646Z node 36 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437186 step# 7} 2025-04-07T14:07:04.211682Z node 36 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:07:04.211713Z node 36 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437186 on unit CompleteOperation 2025-04-07T14:07:04.211754Z node 36 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437186 at tablet 9437186 send result to client [36:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:04.211787Z node 36 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:07:04.211960Z node 36 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 7 txid# 6} 2025-04-07T14:07:04.211995Z node 36 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 7} 2025-04-07T14:07:04.212031Z node 36 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-07T14:07:04.212073Z node 36 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437185 on unit CompleteOperation 2025-04-07T14:07:04.212115Z node 36 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437185 at tablet 9437185 send result to client [36:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:04.212147Z node 36 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTableWithUniformPartitioning >> TExecutorDb::CoordinatorSimulation [GOOD] >> TExecutorDb::RandomCoordinatorSimulation >> TFlatTableExecutor_PostponedScan::TestPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] Test command err: 2025-04-07T14:06:48.350908Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:48.422796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:48.422860Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:48.424835Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:48.425238Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:48.425531Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:48.435613Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:48.477015Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:48.477469Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:48.478940Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:48.479014Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:48.479068Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:48.479364Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:48.504400Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:48.504572Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:48.504685Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:48.504720Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:48.504773Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:48.504808Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:48.504982Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:48.505024Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:48.505376Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:48.505441Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:48.505485Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:48.505523Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:48.505566Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:48.505600Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:48.505672Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:48.505706Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:48.505736Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:48.546680Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:48.546750Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:48.546803Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:48.549163Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\004\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:48.549214Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:48.549295Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:48.549465Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:48.549516Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:48.549563Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:48.549607Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:48.549639Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:48.549669Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:48.549703Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:48.549966Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:48.550007Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:48.550045Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:48.550091Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:48.550137Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:48.550160Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:48.550192Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:48.550233Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:48.550257Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:48.572533Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:48.572618Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:48.572700Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:48.572755Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:48.572829Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:48.573364Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:48.573423Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:48.573465Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:48.573608Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:48.573642Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:48.573762Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:48.573818Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:48.573865Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:48.573908Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:48.577642Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:48.577708Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:48.577898Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:48.577942Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:48.577990Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:48.578031Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:48.578066Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:48.578104Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:48.578149Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:48.578186Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:48.578243Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:48.578279Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:48.578313Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:48.578463Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:48.578496Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:48.578532Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:48.578553Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:48.578574Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:48.578625Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:48.578651Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:48.578682Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:48.578716Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:48.578766Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:48.578798Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:48.578836Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:48.578879Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:48.578917Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:48.578943Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... aitInRS 2025-04-07T14:07:04.594357Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-07T14:07:04.594379Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-04-07T14:07:04.594399Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-04-07T14:07:04.594418Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-04-07T14:07:04.594847Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-04-07T14:07:04.594927Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:07:04.594983Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-07T14:07:04.595010Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-04-07T14:07:04.595035Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-04-07T14:07:04.595061Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-04-07T14:07:04.595275Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-04-07T14:07:04.595309Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-04-07T14:07:04.595351Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-04-07T14:07:04.595388Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-04-07T14:07:04.595422Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-07T14:07:04.595446Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-04-07T14:07:04.595474Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-04-07T14:07:04.595519Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:04.595559Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-07T14:07:04.595603Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-07T14:07:04.595642Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-07T14:07:04.595867Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [6:229:2224], Recipient [6:229:2224]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:04.595905Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:04.595945Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:07:04.595976Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:07:04.596001Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:07:04.596031Z node 6 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2025-04-07T14:07:04.596074Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2025-04-07T14:07:04.596102Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:07:04.596127Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2025-04-07T14:07:04.596151Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:07:04.596178Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2025-04-07T14:07:04.596810Z node 6 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2025-04-07T14:07:04.596850Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:07:04.596876Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:07:04.596900Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2025-04-07T14:07:04.596926Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2025-04-07T14:07:04.596962Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:07:04.596984Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2025-04-07T14:07:04.597007Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:07:04.597028Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:07:04.597071Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2025-04-07T14:07:04.597098Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2025-04-07T14:07:04.597124Z node 6 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2025-04-07T14:07:04.597162Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:07:04.597185Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:07:04.597208Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2025-04-07T14:07:04.597231Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2025-04-07T14:07:04.597275Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:07:04.597298Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2025-04-07T14:07:04.597318Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2025-04-07T14:07:04.597340Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2025-04-07T14:07:04.597365Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:07:04.597387Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2025-04-07T14:07:04.597410Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2025-04-07T14:07:04.597434Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2025-04-07T14:07:04.597462Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:07:04.597484Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2025-04-07T14:07:04.597503Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2025-04-07T14:07:04.597527Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2025-04-07T14:07:04.597548Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:07:04.597569Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-04-07T14:07:04.597590Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-04-07T14:07:04.597616Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-04-07T14:07:04.597945Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-04-07T14:07:04.597994Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:07:04.598034Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:07:04.598057Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-04-07T14:07:04.598080Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-04-07T14:07:04.598104Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-04-07T14:07:04.598267Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-04-07T14:07:04.598292Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-04-07T14:07:04.598318Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-04-07T14:07:04.598346Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-04-07T14:07:04.598374Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-07T14:07:04.598396Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-04-07T14:07:04.598419Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-04-07T14:07:04.598451Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:04.598477Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:07:04.598505Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:07:04.598534Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:07:04.611672Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-04-07T14:07:04.611742Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-04-07T14:07:04.611798Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:04.611841Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-04-07T14:07:04.611904Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [6:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:04.611949Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:04.612254Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-04-07T14:07:04.612283Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-04-07T14:07:04.612311Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-07T14:07:04.612331Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-04-07T14:07:04.612361Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [6:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:04.612386Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:06:54.241783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:06:54.241869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.241902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:06:54.241933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:06:54.246320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:06:54.246395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:06:54.246487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.247588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:54.306694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:54.306797Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:54.316389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:54.316516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:06:54.317391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:06:54.326219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:06:54.326983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:06:54.328860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.330312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:06:54.334842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:06:54.347761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.347804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:06:54.348008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.354622Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:06:54.461932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:06:54.462121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.462300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:06:54.462490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:06:54.462548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.464917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.465066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:06:54.465248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.465321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:06:54.465408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:06:54.465443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:06:54.467360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.467409Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:06:54.467439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:06:54.469018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.469065Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.469103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.469168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.472075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:06:54.473560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:06:54.473711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:06:54.474516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.474631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:06:54.474684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.474901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:06:54.474964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.475126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.475213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:06:54.476962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.477008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.477176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.477219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:06:54.477462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.477520Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:06:54.477608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:06:54.477641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.477678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:06:54.477729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.477762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:06:54.477791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:06:54.477855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:06:54.477892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:06:54.477925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:06:54.479369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.479473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.479512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:06:54.479546Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:06:54.479577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.479650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 6Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-04-07T14:07:05.512690Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-04-07T14:07:05.513078Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-04-07T14:07:05.513151Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-07T14:07:05.513197Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409549 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-07T14:07:05.513227Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-07T14:07:05.513257Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-04-07T14:07:05.513285Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-07T14:07:05.520103Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:07:05.520266Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:07:05.520341Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:07:05.520423Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:07:05.520585Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-07T14:07:05.552010Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-07T14:07:05.552086Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-07T14:07:05.552613Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-04-07T14:07:05.552695Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-07T14:07:05.552749Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-04-07T14:07:05.604011Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409552, partId: 0 2025-04-07T14:07:05.604221Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-07T14:07:05.604330Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-07T14:07:05.604395Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-07T14:07:05.604450Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-07T14:07:05.604647Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-07T14:07:05.604896Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-07T14:07:05.607450Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:07:05.607663Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:05.607721Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:07:05.608062Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:05.608117Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [13:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-07T14:07:05.608218Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:07:05.608280Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-07T14:07:05.608430Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-07T14:07:05.608475Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:07:05.608540Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-07T14:07:05.608597Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:07:05.608657Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T14:07:05.608724Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T14:07:05.608932Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 10 2025-04-07T14:07:05.608982Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 1 2025-04-07T14:07:05.609027Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-07T14:07:05.610170Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:07:05.610288Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:07:05.610338Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:07:05.610392Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-07T14:07:05.610444Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-07T14:07:05.610536Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-04-07T14:07:05.610594Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [13:434:2390] 2025-04-07T14:07:05.614797Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T14:07:05.614911Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:07:05.614954Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [13:1529:3333] TestWaitNotification: OK eventTxId 104 2025-04-07T14:07:05.627622Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:07:05.627826Z node 13 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 231us result status StatusSuccess 2025-04-07T14:07:05.628393Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TFlatTableExecutor_RejectProbability::SomeRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false >> TSchemeShardTest::DropPQFail [GOOD] >> TSchemeShardTest::DropPQAbort >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables >> TSchemeShardTest::AlterBlockStoreVolume [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::CreateTableWithUniformPartitioning [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries >> DataShardReadIterator::ShouldReadKeyCellVec >> DataShardReadIteratorSysTables::ShouldRead [GOOD] >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> TFlatTableExecutor_Reschedule::TestExecuteReschedule [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> DataShardReadIterator::ShouldReadRangeCellVec [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReadRangeArrow >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture >> DataShardReadIterator::ShouldHandleReadAck [GOOD] >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRanges >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::AdoptDropSolomon >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup >> BuildStatsHistogram::Five_Five_Crossed [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels >> TSchemeShardTest::CreateTableWithSplitBoundaries [GOOD] >> TSchemeShardTest::CreateTableWithConfig >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon >> BuildStatsHistogram::Single_Small_2_Levels [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Single_Small_1_Level >> BuildStatsHistogram::Single_Small_1_Level [GOOD] >> BuildStatsHistogram::Single_Small_0_Levels [GOOD] >> BuildStatsHistogram::Mixed_Groups_History >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true >> DataShardReadIteratorBatchMode::RangeFull [GOOD] >> DataShardReadIteratorBatchMode::RangeToInclusive >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin-ColumnStore >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::PathErrors >> OlapEstimationRowsCorrectness::TPCH2 >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin-ColumnStore >> BuildStatsHistogram::Mixed_Groups_History [GOOD] >> BuildStatsHistogram::Serial_Groups_History >> TSchemeShardTest::AdoptDropSolomon [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:22.653676Z 00000.006 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.007 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.016 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [1:29:2061]) priority=200 resources={1, 0} 00000.016 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [1:29:2061]) to queue queue_background_compaction 00000.016 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [1:29:2061]) from queue queue_background_compaction 00000.016 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [1:29:2061]) to queue queue_background_compaction 00000.016 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 50.000000 (insert task gen0-table-101-tablet-1 (1 by [1:29:2061])) 00000.025 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [1:29:2061]) (release resources {1, 0}) 00000.025 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 50.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [1:29:2061])) 00000.026 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.026 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 0 reqs hit {0 0b} miss {0 0b} 00000.026 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.026 II| FAKE_ENV: DS.0 gone, left {771b, 9}, put {791b, 10} 00000.026 II| FAKE_ENV: DS.1 gone, left {1347b, 10}, put {1347b, 10} 00000.026 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.026 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.026 II| FAKE_ENV: All BS storage groups are stopped 00000.026 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.026 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 23}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:22.683600Z 00000.004 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.005 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.008 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987987 by [2:7:2054]) priority=0 resources={1, 0} 00000.008 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987987 by [2:7:2054]) to queue queue_background_compaction 00000.008 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987987 by [2:7:2054]) from queue queue_background_compaction 00000.008 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987987 by [2:7:2054]) to queue queue_background_compaction 00000.008 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 300.000000 (insert task bckg-block (987987987987 by [2:7:2054])) 00000.009 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [2:29:2061]) priority=200 resources={1, 0} 00000.010 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [2:29:2061]) to queue queue_background_compaction 00000.010 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.011 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (1 by [2:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.011 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [2:29:2061]) to queue queue_compaction_gen0 00000.011 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [2:29:2061]) from queue queue_compaction_gen0 00000.011 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [2:29:2061]) to queue queue_compaction_gen0 00000.011 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 5.000000 (insert task gen0-table-101-tablet-1 (1 by [2:29:2061])) 00000.025 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [2:29:2061]) (release resources {1, 0}) 00000.025 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 5.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [2:29:2061])) 00000.026 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.026 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 0 reqs hit {0 0b} miss {0 0b} 00000.026 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.026 II| FAKE_ENV: DS.0 gone, left {1262b, 14}, put {1282b, 15} 00000.026 II| FAKE_ENV: DS.1 gone, left {1890b, 15}, put {1890b, 15} 00000.026 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.026 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.026 II| FAKE_ENV: All BS storage groups are stopped 00000.026 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.026 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 31}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:22.713189Z 00000.004 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.005 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.008 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987987 by [3:7:2054]) priority=0 resources={1, 0} 00000.008 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987987 by [3:7:2054]) to queue queue_background_compaction 00000.008 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987987 by [3:7:2054]) from queue queue_background_compaction 00000.008 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987987 by [3:7:2054]) to queue queue_background_compaction 00000.008 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 300.000000 (insert task bckg-block (987987987987 by [3:7:2054])) 00000.010 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [3:29:2061]) priority=200 resources={1, 0} 00000.010 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [3:29:2061]) to queue queue_background_compaction 00000.010 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.011 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (1 by [3:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.011 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [3:29:2061]) to queue queue_compaction_gen0 00000.011 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [3:29:2061]) from queue queue_compaction_gen0 00000.011 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [3:29:2061]) to queue queue_compaction_gen0 00000.011 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 5.000000 (insert task gen0-table-101-tablet-1 (1 by [3:29:2061])) 00000.013 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [3:29:2061]) (release resources {1, 0}) 00000.013 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 5.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [3:29:2061])) 00000.014 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (2 by [3:29:2061]) priority=200 resources={1, 0} 00000.014 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [3:29:2061]) to queue queue_background_compaction 00000.014 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.015 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (2 by [3:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.015 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [3:29:2061]) to queue queue_compaction_gen0 00000.015 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (2 by [3:29:2061]) from queue queue_compaction_gen0 00000.015 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (2 by [3:29:2061]) to queue queue_compaction_gen0 00000.015 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.750000 (insert task gen0-table-101-tablet-1 (2 by [3:29:2061])) 00000.017 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (2 by [3:29:2061]) (release resources {1, 0}) 00000.017 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 4.750000 to 0.000000 (remove task gen0-table-101-tablet-1 (2 by [3:29:2061])) 00000.017 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (3 by [3:29:2061]) priority=200 resources={1, 0} 00000.017 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [3:29:2061]) to queue queue_background_compaction 00000.017 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.018 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (3 by [3:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.018 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [3:29:2061]) to queue queue_compaction_gen0 00000.018 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (3 by [3:29:2061]) from queue queue_compaction_gen0 00000.018 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (3 by [3:29:2061]) to queue queue_compaction_gen0 00000.018 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.500000 (insert task gen0-table-101-tablet-1 (3 by [3:29:2061])) 00000.032 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (3 by [3:29:2061]) (release resources {1, 0}) 00000.032 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 4.500000 to 0.000000 (remove task gen0-table-101-tablet-1 (3 by [3:29:2061])) 00000.033 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (4 by [3:29:2061]) priority=200 resources={1, 0} 00000.033 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [3:29:2061]) to queue queue_background_compaction 00000.033 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.034 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (4 by [3:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.034 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [3:29:2061]) to queue queue_compaction_gen0 00000.034 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (4 by [3:29:2061]) from queue queue_compaction_gen0 00000.034 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (4 by [3:29:2061]) to queue queue_compaction_gen0 00000.034 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.250000 (insert task gen0-table-101-table ... TTxAddRows 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:20} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 1024b of static mem, Memory{1024 dyn 0} 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:20} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{16, redo 2123b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:20} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 1024b of static, Memory{0 dyn 0} 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:21} commited cookie 1 for step 20 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:21} Tx{16, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:21} Tx{16, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 1024b of static mem, Memory{1024 dyn 0} 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:21} Tx{16, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{17, redo 2123b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:21} Tx{16, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 1024b of static, Memory{0 dyn 0} 00000.028 DD| TABLET_EXECUTOR: Leader{1:2:22} commited cookie 1 for step 21 00000.028 DD| TABLET_EXECUTOR: Leader{1:2:22} Tx{17, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.028 DD| TABLET_EXECUTOR: Leader{1:2:22} Tx{17, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 1024b of static mem, Memory{1024 dyn 0} 00000.028 DD| TABLET_EXECUTOR: Leader{1:2:22} Tx{17, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{18, redo 2123b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.028 DD| TABLET_EXECUTOR: Leader{1:2:22} Tx{17, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 1024b of static, Memory{0 dyn 0} 00000.028 DD| RESOURCE_BROKER: Submitted new compaction_gen0 task gen0-table-101-tablet-1 (3 by [50:29:2061]) priority=5 resources={1, 0} 00000.028 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [50:29:2061]) to queue queue_compaction_gen0 00000.028 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (3 by [50:29:2061]) from queue queue_compaction_gen0 00000.028 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (3 by [50:29:2061]) to queue queue_compaction_gen0 00000.028 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.500000 (insert task gen0-table-101-tablet-1 (3 by [50:29:2061])) 00000.028 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 3, edge 9223372036854775807/0, generation 0 00000.028 II| TABLET_EXECUTOR: Leader{1:2:23} starting compaction 00000.028 II| TABLET_EXECUTOR: Leader{1:2:24} starting Scan{5 on 101, Compact{1.2.23, eph 3}} 00000.028 II| TABLET_EXECUTOR: Leader{1:2:24} started compaction 5 00000.029 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 5 generation 0 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:24} commited cookie 1 for step 22 00000.030 II| TABLET_EXECUTOR: Leader{1:2:24} Compact 5 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 23, product {1 parts epoch 4} done 00000.030 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 5, generation 0 00000.031 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.031 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (3 by [50:29:2061]) (release resources {1, 0}) 00000.031 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 4.500000 to 0.000000 (remove task gen0-table-101-tablet-1 (3 by [50:29:2061])) 00000.031 DD| TABLET_EXECUTOR: Leader{1:2:25} commited cookie 8 for step 23 00000.031 DD| TABLET_EXECUTOR: Leader{1:2:25} Tx{18, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.031 DD| TABLET_EXECUTOR: Leader{1:2:25} Tx{18, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 1024b of static mem, Memory{1024 dyn 0} 00000.031 DD| TABLET_EXECUTOR: Leader{1:2:25} Tx{18, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{20, redo 2123b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.031 DD| TABLET_EXECUTOR: Leader{1:2:25} Tx{18, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 1024b of static, Memory{0 dyn 0} 00000.031 DD| TABLET_EXECUTOR: Leader{1:2:26} commited cookie 3 for step 24 00000.031 DD| TABLET_EXECUTOR: Leader{1:2:26} switch applied on followers, step 24 00000.031 DD| TABLET_EXECUTOR: Leader{1:2:26} commited cookie 1 for step 25 00000.031 DD| TABLET_EXECUTOR: Leader{1:2:26} Tx{19, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:26} Tx{19, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 1024b of static mem, Memory{1024 dyn 0} 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:26} Tx{19, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{21, redo 2123b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:26} Tx{19, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 1024b of static, Memory{0 dyn 0} 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:27} commited cookie 1 for step 26 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:27} Tx{20, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:27} Tx{20, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 1024b of static mem, Memory{1024 dyn 0} 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:27} Tx{20, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{22, redo 2123b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:27} Tx{20, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 1024b of static, Memory{0 dyn 0} 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:28} commited cookie 1 for step 27 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:28} Tx{21, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:28} Tx{21, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 1024b of static mem, Memory{1024 dyn 0} 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:28} Tx{21, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{23, redo 2123b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:28} Tx{21, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 1024b of static, Memory{0 dyn 0} 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:29} commited cookie 1 for step 28 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:29} Tx{22, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:29} Tx{22, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 1024b of static mem, Memory{1024 dyn 0} 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:29} Tx{22, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{24, redo 2123b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:29} Tx{22, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 1024b of static, Memory{0 dyn 0} 00000.033 DD| RESOURCE_BROKER: Submitted new compaction_gen0 task gen0-table-101-tablet-1 (4 by [50:29:2061]) priority=5 resources={1, 0} 00000.033 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [50:29:2061]) to queue queue_compaction_gen0 00000.033 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (4 by [50:29:2061]) from queue queue_compaction_gen0 00000.033 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (4 by [50:29:2061]) to queue queue_compaction_gen0 00000.033 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.250000 (insert task gen0-table-101-tablet-1 (4 by [50:29:2061])) 00000.033 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 4, edge 9223372036854775807/0, generation 0 00000.033 II| TABLET_EXECUTOR: Leader{1:2:30} starting compaction 00000.034 II| TABLET_EXECUTOR: Leader{1:2:31} starting Scan{7 on 101, Compact{1.2.30, eph 4}} 00000.034 II| TABLET_EXECUTOR: Leader{1:2:31} started compaction 7 00000.034 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 7 generation 0 00000.034 DD| TABLET_EXECUTOR: Leader{1:2:31} commited cookie 1 for step 29 00000.035 DD| TABLET_EXECUTOR: Leader{1:2:31} commited cookie 8 for step 30 00000.038 II| TABLET_EXECUTOR: Leader{1:2:31} Compact 7 on TGenCompactionParams{101: gen 0 epoch +inf, 2 parts} step 30, product {1 parts epoch 5} done 00000.038 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 7, generation 0 00000.038 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.038 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (4 by [50:29:2061]) (release resources {1, 0}) 00000.038 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 4.250000 to 0.000000 (remove task gen0-table-101-tablet-1 (4 by [50:29:2061])) 00000.039 DD| TABLET_EXECUTOR: Leader{1:2:32} commited cookie 3 for step 31 00000.039 DD| TABLET_EXECUTOR: Leader{1:2:32} switch applied on followers, step 31 00000.039 DD| TABLET_EXECUTOR: Leader{1:2:32} Tx{23, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory 00000.039 DD| TABLET_EXECUTOR: Leader{1:2:32} Tx{23, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 0} 00000.039 DD| TABLET_EXECUTOR: Leader{1:2:32} Tx{23, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} hope 1 -> retry Change{26, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.039 DD| TABLET_EXECUTOR: Leader{1:2:32} Tx{23, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} touch new 12813b, 0b lo load (12813b in total), 45056b requested for data (46080b in total) 00000.040 EE| TABLET_EXECUTOR: Leader{1:2:32} Tx{23, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} mem 58893b terminated, limit 51200b is exceeded 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:32} Tx{23, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release 1024b of static, Memory{0 dyn 0} 00000.040 II| TABLET_EXECUTOR: Leader{1:2:32} suiciding, Waste{2:0, 42796b +(26, 44874b), 31 trc, -44874b acc} 00000.041 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.041 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 6 reqs hit {11 41984b} miss {0 0b} 00000.041 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.041 II| FAKE_ENV: DS.0 gone, left {3004b, 31}, put {3024b, 32} 00000.042 II| FAKE_ENV: DS.1 gone, left {88148b, 35}, put {88148b, 35} 00000.042 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.042 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.042 II| FAKE_ENV: All BS storage groups are stopped 00000.042 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.042 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 1 Left 209}, stopped >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive >> TVersions::Wreck2Reverse [GOOD] >> TVersions::Wreck1 >> TSchemeShardTest::CreateTableWithConfig [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas >> BuildStatsHistogram::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Benchmark >> DataShardReadIterator::ShouldReceiveErrorAfterSplit [GOOD] >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::ManyDirs >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup >> BuildStatsHistogram::Benchmark [GOOD] >> BuildStatsHistogram::Many_Mixed >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin-ColumnStore >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::ConfigColumnFamily ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:06:55.545792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:06:55.545864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:55.545888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:06:55.545911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:06:55.545971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:06:55.546003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:06:55.546051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:55.546373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:55.597853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:55.597951Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:55.606734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:55.606854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:06:55.607024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:06:55.611581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:06:55.611752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:06:55.612135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:55.612274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:06:55.613473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:55.614394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:55.614429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:55.614514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:06:55.614548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:55.614577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:06:55.614711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:06:55.618933Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:06:55.710430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:06:55.710629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:55.710797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:06:55.710943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:06:55.710984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:55.712881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:55.713021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:06:55.713183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:55.713262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:06:55.713349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:06:55.713384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:06:55.715395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:55.715445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:06:55.715476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:06:55.721441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:55.721496Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:55.721538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:55.721594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:06:55.724876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:06:55.726671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:06:55.726844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:06:55.727836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:55.727946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:06:55.728017Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:55.728274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:06:55.728326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:55.728496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:55.728595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:06:55.730488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:55.730533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:55.730687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:55.730727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:06:55.730986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:55.731029Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:06:55.731120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:06:55.731155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:55.731193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:06:55.731256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:55.731300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:06:55.731331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:06:55.731387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:06:55.731423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:06:55.731456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:06:55.733014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:55.733104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:55.733136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:06:55.733184Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:06:55.733222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:55.733296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... OK TEvConfigureStatus from tablet# 72075186233409547 shardIdx# 72057594046678944:2 at schemeshard# 72057594046678944 2025-04-07T14:07:09.542001Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2025-04-07T14:07:09.545254Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:07:09.545893Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:07:09.545992Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:07:09.546035Z node 15 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:07:09.546104Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:07:09.546182Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-07T14:07:09.546355Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:07:09.548207Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-07T14:07:09.548314Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-07T14:07:09.548612Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:09.548767Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 64424511595 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:09.548837Z node 15 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:07:09.549196Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-07T14:07:09.549273Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:07:09.549523Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:09.549612Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:07:09.549667Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:07:09.551336Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:09.551372Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:09.551501Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:07:09.551619Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:09.551653Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:202:2205], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T14:07:09.551689Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:202:2205], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T14:07:09.551963Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:07:09.552009Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:07:09.552205Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:07:09.552264Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:07:09.552342Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:07:09.552409Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:07:09.552469Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:07:09.552516Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:07:09.552713Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:07:09.552775Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-07T14:07:09.552824Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T14:07:09.552870Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-07T14:07:09.553538Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:07:09.553613Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:07:09.553649Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:07:09.553705Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T14:07:09.553765Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:07:09.554471Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:07:09.554537Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:07:09.554565Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:07:09.554592Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:07:09.554620Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:07:09.554688Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T14:07:09.557159Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:07:09.558084Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T14:07:09.558371Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:07:09.558435Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T14:07:09.558917Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:07:09.559022Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:07:09.559075Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [15:406:2373] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-07T14:07:09.562448Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "Topic1" TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 121 } MeteringMode: METERING_MODE_RESERVED_CAPACITY } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:07:09.562821Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /MyRoot/USER_1/Topic1, opId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:07:09.563118Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_1/Topic1', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes, at schemeshard: 72057594046678944 2025-04-07T14:07:09.565612Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusResourceExhausted Reason: "Check failed: path: \'/MyRoot/USER_1/Topic1\', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:09.565856Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot/USER_1, subject: , status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_1/Topic1', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes, operation: CREATE PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T14:07:09.566183Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:07:09.566244Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:07:09.566726Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:07:09.566833Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:07:09.566890Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:413:2380] TestWaitNotification: OK eventTxId 102 >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow >> TSchemeShardTest::AlterTableAndAfterSplit [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributes >> TConsoleTests::TestRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup [GOOD] >> TSchemeShardTest::CreatePersQueueGroup >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:06:54.242709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:06:54.242811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.242847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:06:54.242882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:06:54.246362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:06:54.246419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:06:54.246504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.247589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:54.316454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:54.316552Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:54.327767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:54.327888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:06:54.328017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:06:54.332577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:06:54.332771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:06:54.333400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.333564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:06:54.335060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347445Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:06:54.347660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.347777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:06:54.347901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.354546Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:06:54.475303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:06:54.475514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.475672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:06:54.475822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:06:54.475867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.477510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.477616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:06:54.477722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.477770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:06:54.477830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:06:54.477853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:06:54.479500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.479552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:06:54.479594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:06:54.481120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.481173Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.481227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.481295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.489241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:06:54.490666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:06:54.490797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:06:54.491540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.491639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:06:54.491691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.491855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:06:54.491903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.492022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.492113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:06:54.493622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.493665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.493787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.493817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:06:54.493994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.494028Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:06:54.494133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:06:54.494171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.494208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:06:54.494251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.494277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:06:54.494299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:06:54.494339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:06:54.494378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:06:54.494404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:06:54.495708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.495797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.495839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:06:54.495877Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:06:54.495904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.495982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ss: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 33 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 31 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:10.421865Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:07:10.422136Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" took 270us result status StatusSuccess 2025-04-07T14:07:10.422584Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" PathDescription { Self { Name: "DirA" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 28 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } Children { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Table2" PathId: 32 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 33 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 29 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:10.423624Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:07:10.423977Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" took 352us result status StatusSuccess 2025-04-07T14:07:10.424437Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 32 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 33 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 32 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:10.425630Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:07:10.425855Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" took 242us result status StatusSuccess 2025-04-07T14:07:10.426267Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "Table3" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 33 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:10.427345Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:07:10.427704Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" took 406us result status StatusSuccess 2025-04-07T14:07:10.428154Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 33 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 33 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid [GOOD] >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion >> DataShardReadIterator::ShouldReadKeyCellVec [GOOD] >> DataShardReadIterator::ShouldReadKeyArrow >> KqpJoinOrder::TestJoinOrderHintsSimple+StreamLookupJoin-ColumnStore >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead >> DataShardReadIterator::ShouldReverseReadMultipleRanges [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks >> DataShardReadIterator::ShouldReadRangeArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite >> TSchemeShardTest::CreatePersQueueGroup [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> KqpIndexLookupJoin::LeftOnly+StreamLookup >> DataShardReadIteratorBatchMode::RangeToInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeToNonInclusive >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive [GOOD] >> DataShardReadIterator::ShouldNotReadAfterCancel >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::CreateTableWithCompactionStrategies >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] Test command err: ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 755b + FlatIndex{4} Label{3 rev 3, 172b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b (1, aaa) | 1 3 88b (1, b) | 2 6 86b (2, NULL) | 3 9 86b (2, ccx) | 3 11 86b (2, cxz) + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 208b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > (1, b) | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > (2, NULL) | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > (2, ccx) | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 777b + FlatIndex{4} Label{3 rev 3, 179b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b (1, aaa) | 1 3 88b (1, baaaa) | 2 6 86b (2, aaa) | 3 9 86b (2, ccx) | 3 11 86b (2, cxz) + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 223b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > (1, baaaa) | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > (2, aaa) | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > (2, ccx) | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b (1, aaa) | 1 1 42b (1, ab) | 2 2 42b (1, ac) | 3 3 42b (1, b) | 4 4 42b (1, bb) | 5 5 42b (2, NULL) | 6 6 42b (2, ab) | 7 7 42b (2, ac) | 8 8 42b (2, b) | 9 9 42b (2, bb) | 9 9 42b (2, bba) + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > (1, ab) | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > (1, ac) | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > (1, b) | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > (1, bb) | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > (2, NULL) | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > (2, ab) | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > (2, ac) | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > (2, b) | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > (2, bb) | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b (1, aaa) | 1 1 42b (1, aba) | 2 2 42b (1, aca) | 3 3 42b (1, baa) | 4 4 42b (1, bba) | 5 5 42b (2, aaa) | 6 6 42b (2, aba) | 7 7 42b (2, aca) | 8 8 42b (2, baa) | 9 9 42b (2, bba) | 9 9 42b (2, bba) + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > (1, aba) | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > (1, aca) | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > (1, baa) | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > (1, bba) | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > (2, aaa) | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > (2, aba) | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > (2, aca) | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > (2, baa) | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > (2, bba) | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= SLICES ======= { [0,2), [2,3), [3,5), [5,6), [6,7), [7,9), [9,9] } ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b (1, aaa) | 1 1 42b (1, ab) | 2 2 42b (1, ac) | 3 3 42b (1, b) | 4 4 42b (1, bb) | 5 5 42b (2, NULL) | 6 6 42b (2, ab) | 7 7 42b (2, ac) | 8 8 42b (2, b) | 9 9 42b (2, bb) | 9 9 42b (2, bba) + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > (1, ab) | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > (1, ac) | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > (1, b) | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > (1, bb) | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > (2, NULL) | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > (2, ab) | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > (2, ac) | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > (2, b) | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > (2, bb) | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b (1, aaa) | 1 1 42b (1, aba) | 2 2 42b (1, aca) | 3 3 42b (1, baa) | 4 4 42b (1, bba) | 5 5 42b (2, aaa) | 6 6 42b (2, aba) | 7 7 42b (2, aca) | 8 8 42b (2, baa) | 9 9 42b (2, bba) | 9 9 42b (2, bba) + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > (1, aba) | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > (1, aca) | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > (1, baa) | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > (1, bba) | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > (2, aaa) | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > (2, aba) | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > (2, aca) | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > (2, baa) | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > (2, bba) | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 41b (ccccccd) | 1 1 41b (ccccccd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (ccccccd) | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{2} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 43b (ccccccd) | 1 1 43b (ccccccddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (ccccccd) | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 40b (cccccd) | 1 1 40b (cccccd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (cccccd) | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 42b (cccccd) | 1 1 42b (cccccddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (cccccd) | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 39b (ccccd) | 1 1 39b (ccccd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (ccccd) | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 41b (ccccd) | 1 1 41b (ccccddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (ccccd) | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{2} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 38b (cccd) | 1 1 38b (cccd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (cccd) | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 40b (cccd) | 1 1 40b (cccddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (cccd) | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{2} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 35b (d) | 1 1 35b (d) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (d) | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{2} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 37b (d) | 1 1 37b (ddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (d) | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{2} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b () | 1 1 35b (d) | 1 1 35b (d) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > (d) | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{2} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b () | 1 1 37b (d) | 1 1 37b (ddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 103b} | ... et 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{48} Label{484 rev 1, 138b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{51} Label{514 rev 1, 138b}, [38, +2)row | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,4), [6,8), [8,12), [14,16), [16,18), [20,28), [32,34), [34,38), [38,39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b (0, 1) | 2 2 66b (0, 4) | 4 4 82b (0, 7) | 8 6 66b (0, 10) | 11 8 66b (1, 3) | 14 10 82b (1, 6) | 20 12 66b (1, 8) | 23 14 66b (2, NULL) | 26 16 82b (2, 4) | 36 18 66b (2, 7) | 39 20 66b (2, 10) | 42 22 82b (3, 3) | 48 24 66b (3, 6) | 53 26 66b (3, 8) | 58 28 82b (4, NULL) | 64 30 66b (4, 4) | 67 32 66b (4, 7) | 70 34 82b (4, 10) | 82 36 66b (5, 3) | 87 38 66b (5, 6) | 87 39 66b (5, 7) + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > (0, 4) | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > (0, 7) | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > (0, 10) | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > (1, 3) | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > (1, 6) | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > (1, 8) | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > (2, NULL) | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > (2, 4) | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > (2, 7) | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > (2, 10) | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > (3, 3) | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > (3, 6) | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > (3, 8) | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > (4, NULL) | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > (4, 4) | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > (4, 7) | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > (4, 10) | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > (5, 3) | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > (5, 6) | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: (0, 1) | ERowOp 1: (0, 3) + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: (0, 4) | ERowOp 1: (0, 6) + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: (0, 7) | ERowOp 1: (0, 8) + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: (0, 10) | ERowOp 1: (1, 1) + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: (1, 3) | ERowOp 1: (1, 4) + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: (1, 6) | ERowOp 1: (1, 7) + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: (1, 8) | ERowOp 1: (1, 10) + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: (2, 1) | ERowOp 1: (2, 3) + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: (2, 4) | ERowOp 1: (2, 6) + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: (2, 7) | ERowOp 1: (2, 8) + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: (2, 10) | ERowOp 1: (3, 1) + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: (3, 3) | ERowOp 1: (3, 4) + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: (3, 6) | ERowOp 1: (3, 7) + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: (3, 8) | ERowOp 1: (3, 10) + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: (4, 1) | ERowOp 1: (4, 3) + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: (4, 4) | ERowOp 1: (4, 6) + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: (4, 7) | ERowOp 1: (4, 8) + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: (4, 10) | ERowOp 1: (5, 1) + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: (5, 3) | ERowOp 1: (5, 4) + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: (5, 6) | ERowOp 1: (5, 7) Slices{ [0,4), [6,8), [8,12), [14,16), [16,18), [20,28), [32,34), [34,38), [38,39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b (0, 1) | 2 2 66b (0, 4) | 4 4 82b (0, 7) | 8 6 66b (0, 10) | 11 8 66b (1, 3) | 14 10 82b (1, 6) | 20 12 66b (1, 8) | 23 14 66b (2, NULL) | 26 16 82b (2, 4) | 36 18 66b (2, 7) | 39 20 66b (2, 10) | 42 22 82b (3, 3) | 48 24 66b (3, 6) | 53 26 66b (3, 8) | 58 28 82b (4, NULL) | 64 30 66b (4, 4) | 67 32 66b (4, 7) | 70 34 82b (4, 10) | 82 36 66b (5, 3) | 87 38 66b (5, 6) | 87 39 66b (5, 7) + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > (0, 4) | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > (0, 7) | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > (0, 10) | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > (1, 3) | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > (1, 6) | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > (1, 8) | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > (2, NULL) | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > (2, 4) | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > (2, 7) | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > (2, 10) | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > (3, 3) | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > (3, 6) | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > (3, 8) | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > (4, NULL) | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > (4, 4) | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > (4, 7) | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > (4, 10) | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > (5, 3) | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > (5, 6) | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: (0, 1) | ERowOp 1: (0, 3) + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: (0, 4) | ERowOp 1: (0, 6) + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: (0, 7) | ERowOp 1: (0, 8) + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: (0, 10) | ERowOp 1: (1, 1) + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: (1, 3) | ERowOp 1: (1, 4) + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: (1, 6) | ERowOp 1: (1, 7) + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: (1, 8) | ERowOp 1: (1, 10) + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: (2, 1) | ERowOp 1: (2, 3) + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: (2, 4) | ERowOp 1: (2, 6) + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: (2, 7) | ERowOp 1: (2, 8) + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: (2, 10) | ERowOp 1: (3, 1) + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: (3, 3) | ERowOp 1: (3, 4) + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: (3, 6) | ERowOp 1: (3, 7) + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: (3, 8) | ERowOp 1: (3, 10) + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: (4, 1) | ERowOp 1: (4, 3) + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: (4, 4) | ERowOp 1: (4, 6) + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: (4, 7) | ERowOp 1: (4, 8) + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: (4, 10) | ERowOp 1: (5, 1) + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: (5, 3) | ERowOp 1: (5, 4) + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: (5, 6) | ERowOp 1: (5, 7) >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:06:54.241784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:06:54.241882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.241919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:06:54.241951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:06:54.246354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:06:54.246417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:06:54.246500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.247586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:54.310159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:54.310257Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:54.319922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:54.320034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:06:54.320183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:06:54.325144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:06:54.326960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:06:54.328867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.330291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:06:54.334880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:06:54.347771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.347815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:06:54.347979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.354073Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:06:54.449506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:06:54.450577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.451409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:06:54.452523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:06:54.452617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.455594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.456406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:06:54.456609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.456705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:06:54.456787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:06:54.456824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:06:54.458970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.459023Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:06:54.459065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:06:54.460851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.460910Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.460961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.461022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.465956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:06:54.467833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:06:54.468940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:06:54.470770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.470948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:06:54.470999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.471166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:06:54.471199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.471357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.471443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:06:54.473820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.473854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.473967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.474002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:06:54.474190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.474236Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:06:54.474378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:06:54.474403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.474437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:06:54.474477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.474504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:06:54.474529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:06:54.474571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:06:54.474601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:06:54.474629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:06:54.476201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.476273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.476298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:06:54.476336Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:06:54.476365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.476464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... Id: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000007 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } Children { Name: "Copy2" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:13.247533Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:07:13.247828Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 286us result status StatusSuccess 2025-04-07T14:07:13.248255Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table1" PathDescription { Self { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } } Table { Name: "Table1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:13.249109Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:07:13.249378Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy1" took 281us result status StatusSuccess 2025-04-07T14:07:13.249775Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy1" PathDescription { Self { Name: "Copy1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000007 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Copy1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:13.250625Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:07:13.250845Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy2" took 231us result status StatusSuccess 2025-04-07T14:07:13.251239Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy2" PathDescription { Self { Name: "Copy2" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Copy2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:13.252080Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:07:13.252336Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy3" took 299us result status StatusSuccess 2025-04-07T14:07:13.252789Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy3" PathDescription { Self { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Copy3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage >> TSchemeShardTest::CreateTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::CreateSystemColumn >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow [GOOD] >> DataShardReadIterator::ShouldReadRangeReverse >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin-ColumnStore [GOOD] >> OlapEstimationRowsCorrectness::TPCH10 >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] >> TConsoleTests::TestNotifyOperationCompletion >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds >> KqpJoin::LeftJoinWithNull+StreamLookupJoin >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true >> KqpJoinOrder::TPCH3+StreamLookupJoin-ColumnStore >> TConsoleTests::TestAttributes [GOOD] >> TConsoleTests::TestAttributesExtSubdomain >> TSchemeShardTest::CreateSystemColumn [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TSchemeShardTest::CreateDropKesus >> TExecutorDb::EncodedPage [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinLeftFirst+StreamLookupJoin-ColumnStore >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatEraseCacheTest::CacheEviction >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatExecutorLeases::Basics >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> DataShardReadIterator::ShouldReadKeyArrow [GOOD] >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] >> DataShardReadIteratorSysTables::ShouldNotAllowArrow >> DataShardVolatile::DistributedWriteThenCopyTable [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:06:57.890854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:06:57.890934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:57.890960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:06:57.890982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:06:57.891022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:06:57.891045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:06:57.891084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:57.891345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:57.949868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:57.949980Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:57.960056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:57.960207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:06:57.960381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:06:57.965001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:06:57.965171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:06:57.965663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:57.965832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:06:57.967386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:57.968512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:57.968559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:57.968702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:06:57.968744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:57.968778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:06:57.968931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:06:57.974101Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:06:58.070359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:06:58.070574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:58.070740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:06:58.070890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:06:58.070935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:58.072687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:58.072798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:06:58.072941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:58.072993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:06:58.073067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:06:58.073094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:06:58.074577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:58.074628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:06:58.074656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:06:58.076114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:58.076170Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:58.076209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:58.076260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:06:58.079524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:06:58.081111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:06:58.081287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:06:58.082206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:58.082319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:06:58.082384Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:58.082586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:06:58.082631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:58.082793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:58.082880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:06:58.085742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:58.085774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:58.085878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:58.085905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:06:58.086072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:58.086108Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:06:58.086175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:06:58.086202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:58.086238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:06:58.086297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:58.086332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:06:58.086355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:06:58.086405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:06:58.086436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:06:58.086457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:06:58.087770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:58.087857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:58.087889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:06:58.087939Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:06:58.087975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:58.088119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 78944] TDone opId# 102:0 ProgressState 2025-04-07T14:07:16.100597Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:07:16.100686Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:07:16.100775Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T14:07:16.100868Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [15:343:2318] message: TxId: 102 2025-04-07T14:07:16.100950Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:07:16.101024Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:07:16.101073Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:07:16.101271Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:07:16.103370Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:07:16.103446Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:344:2319] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-04-07T14:07:16.106902Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "SystemColumnInCopyAllowed" CopyFromTable: "/MyRoot/SystemColumnAllowed" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:07:16.107260Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /MyRoot/SystemColumnInCopyAllowed, opId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:07:16.107900Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SystemColumnInCopyAllowed, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:07:16.108009Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-07T14:07:16.108073Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:07:16.108150Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:07:16.108264Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:07:16.108477Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:07:16.109161Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:07:16.109247Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:07:16.112270Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-04-07T14:07:16.112524Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/SystemColumnInCopyAllowed 2025-04-07T14:07:16.112859Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:16.112928Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:16.113184Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:07:16.113307Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:16.113375Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:202:2205], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-07T14:07:16.113443Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:202:2205], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-07T14:07:16.114048Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:07:16.114144Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046678944 2025-04-07T14:07:16.114498Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-04-07T14:07:16.115750Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:07:16.115923Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:07:16.115980Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T14:07:16.116064Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-07T14:07:16.116132Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:07:16.117454Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:07:16.117544Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:07:16.117575Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T14:07:16.117610Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 1 2025-04-07T14:07:16.117662Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:07:16.117760Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-07T14:07:16.119981Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2025-04-07T14:07:16.120209Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2025-04-07T14:07:16.120285Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-04-07T14:07:16.120721Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-04-07T14:07:16.121018Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2025-04-07T14:07:16.121743Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-07T14:07:16.121817Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-04-07T14:07:16.121988Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-07T14:07:16.122064Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-04-07T14:07:16.122180Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-07T14:07:16.122307Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2025-04-07T14:07:16.123613Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:07:16.126365Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:07:16.127926Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:07:16.128334Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:07:16.128425Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 ProgressState at tablet# 72057594046678944 2025-04-07T14:07:16.128512Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 103:0 seqNo# 2:2 at tablet# 72057594046678944 2025-04-07T14:07:16.133134Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-04-07T14:07:16.133309Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2025-04-07T14:07:16.133400Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409547 2025-04-07T14:07:16.133434Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 TestModificationResult got TxId: 103, wait until txId: 103 >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateAlterKesus >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin+ColumnStore >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks [GOOD] >> DataShardReadIterator::ShouldStopWhenNodeDisconnected >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardTest::CreateDropSolomon >> TFlatExecutorLeases::Basics [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout >> DataShardReadIteratorBatchMode::RangeToNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::SelectingColumns >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::LeftOnly+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> KqpJoinOrder::TestJoinOrderHintsSimple+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin+ColumnStore >> DataShardReadIterator::ShouldNotReadAfterCancel [GOOD] >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLease >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false >> DataShardReadIterator::ShouldReadRangeReverse [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false >> KqpJoin::LeftJoinWithNull+StreamLookupJoin [GOOD] >> KqpJoin::LeftJoinWithNull-StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:06:54.242427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:06:54.242508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.242547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:06:54.242578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:06:54.246349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:06:54.246429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:06:54.246517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.247585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:54.314616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:54.314722Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:54.325033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:54.325152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:06:54.325300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:06:54.330461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:06:54.330637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:06:54.331222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.331384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:06:54.334745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:06:54.347712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.347758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:06:54.347930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.354446Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:06:54.494392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:06:54.494623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.494812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:06:54.495029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:06:54.495087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.497110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.497247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:06:54.497392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.497446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:06:54.497523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:06:54.497555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:06:54.499198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.499256Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:06:54.499290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:06:54.500728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.500772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.500829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.500891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.504619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:06:54.506190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:06:54.506353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:06:54.507334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.507454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:06:54.507509Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.507741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:06:54.507799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.507950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.508045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:06:54.509787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.509831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.509973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.510011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:06:54.510280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.510324Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:06:54.510413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:06:54.510446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.510486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:06:54.510537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.510574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:06:54.510615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:06:54.510678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:06:54.510713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:06:54.510744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:06:54.512702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.512823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.512860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:06:54.512909Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:06:54.512944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.513048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... EBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-07T14:07:20.929121Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:07:20.929204Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:07:20.929245Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-07T14:07:20.929314Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:07:20.929376Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:07:20.929757Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:07:20.929827Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:07:20.929856Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-07T14:07:20.929885Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:07:20.929915Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:07:20.929994Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-07T14:07:20.932824Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:07:20.932900Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:07:20.932933Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:07:20.932963Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:07:20.933789Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:07:20.933987Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2025-04-07T14:07:20.935451Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:07:20.935784Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:07:20.936146Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-07T14:07:20.936325Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409546 2025-04-07T14:07:20.936610Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:20.936852Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:07:20.937481Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-07T14:07:20.937668Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-07T14:07:20.937829Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:07:20.938987Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409549 2025-04-07T14:07:20.939189Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:07:20.939359Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:07:20.939834Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:07:20.939909Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:07:20.940027Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 Forgetting tablet 72075186233409547 2025-04-07T14:07:20.942253Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:07:20.942330Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T14:07:20.944431Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:07:20.944476Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:07:20.944605Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T14:07:20.944634Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-07T14:07:20.944719Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:07:20.944773Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:07:20.945847Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T14:07:20.946201Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T14:07:20.946271Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T14:07:20.946850Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T14:07:20.946973Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:07:20.947036Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [15:534:2489] TestWaitNotification: OK eventTxId 103 2025-04-07T14:07:20.947754Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:07:20.948033Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Solomon" took 304us result status StatusPathDoesNotExist 2025-04-07T14:07:20.948245Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2025-04-07T14:07:20.948844Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-07T14:07:20.948953Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-04-07T14:07:20.949011Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-04-07T14:07:20.949064Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2025-04-07T14:07:20.949629Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:07:20.949841Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 243us result status StatusSuccess 2025-04-07T14:07:20.950308Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardReadIteratorSysTables::ShouldNotAllowArrow [GOOD] >> ReadIteratorExternalBlobs::ExtBlobs >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse [GOOD] >> DataShardReadIterator::ShouldForbidDuplicatedReadId >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn [GOOD] >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin-ColumnStore >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True >> DataShardReadIteratorBatchMode::SelectingColumns [GOOD] >> DataShardReadIteratorBatchMode::ShouldHandleReadAck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:06:54.242354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:06:54.242443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.242492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:06:54.242524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:06:54.246347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:06:54.246397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:06:54.246478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.247640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:54.310111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:54.310220Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:54.320279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:54.320405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:06:54.320529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:06:54.325752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:06:54.326961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:06:54.328873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.330313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:06:54.334820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:06:54.347724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.347766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:06:54.347933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.354233Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:06:54.464069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:06:54.464265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.464440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:06:54.464599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:06:54.464644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.466614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.466746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:06:54.466894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.466992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:06:54.467030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:06:54.467062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:06:54.468613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.468681Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:06:54.468717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:06:54.470096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.470148Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.470189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.470238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.473578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:06:54.475092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:06:54.475258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:06:54.476103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.476200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:06:54.476254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.476461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:06:54.476516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.476680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.476759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:06:54.478385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.478422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.478571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.478608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:06:54.478804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.478858Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:06:54.478934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:06:54.478961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.478995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:06:54.479043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.479077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:06:54.479101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:06:54.479154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:06:54.479182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:06:54.479209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:06:54.480974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.481060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.481108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:06:54.481160Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:06:54.481204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.481307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 227 } } 2025-04-07T14:07:23.084332Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T14:07:23.084580Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 60500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 227 } } 2025-04-07T14:07:23.084719Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#102:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 60500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 227 } } 2025-04-07T14:07:23.084767Z node 12 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-07T14:07:23.084888Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2025-04-07T14:07:23.084943Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-04-07T14:07:23.087405Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:07:23.087575Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:07:23.087649Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:07:23.087768Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-07T14:07:23.087979Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:07:23.089552Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:07:23.089688Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-07T14:07:23.090638Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:23.090757Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 51539609706 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:23.090822Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-07T14:07:23.091086Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-07T14:07:23.091241Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-07T14:07:23.096345Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:23.096414Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:07:23.096703Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:23.096763Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [12:204:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:07:23.097894Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:07:23.097975Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-07T14:07:23.098806Z node 12 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:07:23.098954Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:07:23.099011Z node 12 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:07:23.099069Z node 12 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-07T14:07:23.099124Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:07:23.099232Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:07:23.101280Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1333 } } 2025-04-07T14:07:23.101339Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T14:07:23.101503Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1333 } } 2025-04-07T14:07:23.101618Z node 12 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1333 } } 2025-04-07T14:07:23.105963Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:07:23.106048Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T14:07:23.106254Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:07:23.106343Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:07:23.106477Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:07:23.106568Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:23.106626Z node 12 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:07:23.106680Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:07:23.106736Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T14:07:23.107892Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:07:23.113793Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:07:23.114042Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:07:23.114223Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:07:23.114281Z node 12 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:07:23.114469Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:07:23.114529Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:07:23.114608Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T14:07:23.114705Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [12:341:2316] message: TxId: 102 2025-04-07T14:07:23.114786Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:07:23.114877Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:07:23.114927Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:07:23.115060Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:07:23.117412Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:07:23.117466Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [12:388:2362] TestWaitNotification: OK eventTxId 102 >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 >> DataShardVolatile::DistributedWriteThenBulkUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False >> KqpJoinOrder::FourWayJoinLeftFirst+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TPCH3+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH11-StreamLookupJoin-ColumnStore >> TSchemeShardTest::DropPQAbort [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight [GOOD] >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes >> TSchemeShardTest::ListNotCreatedDirCase [GOOD] >> TSchemeShardTest::FindSubDomainPathId >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin-ColumnStore >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False >> TSchemeShardTest::FindSubDomainPathId [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor >> KqpJoin::LeftJoinWithNull-StreamLookupJoin [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Simple >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeys >> DataShardReadIterator::ShouldForbidDuplicatedReadId [GOOD] >> DataShardReadIterator::ShouldFailUknownColumns >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff >> TSchemeShardTest::FindSubDomainPathIdActor [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:06:54.242889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:06:54.242985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.243032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:06:54.243075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:06:54.246380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:06:54.246435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:06:54.246515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.247600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:54.314638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:54.314753Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:54.324812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:54.324931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:06:54.325066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:06:54.330070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:06:54.330263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:06:54.330871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.331038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:06:54.334658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:06:54.347766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.347812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:06:54.347986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.354454Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:06:54.478582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:06:54.478780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.478952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:06:54.479119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:06:54.479182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.480974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.481087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:06:54.481204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.481250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:06:54.481316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:06:54.481343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:06:54.482672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.482716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:06:54.482755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:06:54.484405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.484446Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.484484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.484544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.493704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:06:54.495518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:06:54.495678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:06:54.496645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.496797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:06:54.496856Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.497080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:06:54.497127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.497289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.497393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:06:54.499286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.499334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.499500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.499538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:06:54.499758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.499797Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:06:54.499875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:06:54.499907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.499941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:06:54.500003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.500040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:06:54.500067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:06:54.500133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:06:54.500168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:06:54.500195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:06:54.502042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.502131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.502166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:06:54.502233Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:06:54.502274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.502404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... erTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1472 DataSize: 1472 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:28.361334Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:07:28.361712Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue" took 427us result status StatusSuccess 2025-04-07T14:07:28.362607Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue" PathDescription { Self { Name: "indexByValue" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 3 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1472 DataSize: 1472 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "indexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 3 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:28.364535Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:07:28.364908Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue/indexImplTable" took 389us result status StatusSuccess 2025-04-07T14:07:28.365806Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 3 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1472 DataSize: 1472 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations >> DataShardReadIteratorBatchMode::ShouldHandleReadAck [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace >> BuildStatsHistogram::Many_Mixed [GOOD] >> BuildStatsHistogram::Many_Serial >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 >> DataShardReadIterator::ShouldStopWhenNodeDisconnected [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:06:54.242747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:06:54.242827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.242863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:06:54.242907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:06:54.246355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:06:54.246434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:06:54.246513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.247590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:54.317784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:54.317886Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:54.327331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:54.327437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:06:54.327568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:06:54.332730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:06:54.332869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:06:54.333400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.333554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:06:54.335183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:06:54.347678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.347719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:06:54.347872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.354513Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:06:54.482635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:06:54.482867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.483025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:06:54.483203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:06:54.483268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.485263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.485383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:06:54.485512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.485569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:06:54.485647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:06:54.485682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:06:54.486998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.487050Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:06:54.487087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:06:54.488489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.488530Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.488568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.488646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.492367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:06:54.493875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:06:54.494041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:06:54.494939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.495076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:06:54.495134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.495351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:06:54.495409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.495570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.495664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:06:54.497365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.497410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.497545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.497583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:06:54.497790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.497830Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:06:54.497922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:06:54.497952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.497996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:06:54.498055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.498091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:06:54.498119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:06:54.498170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:06:54.498224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:06:54.498256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:06:54.500016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.500118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.500154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:06:54.500207Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:06:54.500250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.500334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:07:29.393634Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:07:29.393768Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-07T14:07:29.394131Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:29.394283Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 64424511595 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:29.394366Z node 15 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#102:0 HandleReply TEvOperationPlan, step: 5000003, at tablet: 72057594046678944 2025-04-07T14:07:29.394563Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-07T14:07:29.394824Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:07:29.394896Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:07:29.396654Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:29.396729Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:07:29.396842Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:07:29.396993Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:29.397016Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:202:2205], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:07:29.397047Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:202:2205], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-07T14:07:29.397358Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:07:29.397393Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:07:29.397541Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:07:29.397583Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:07:29.397640Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-07T14:07:29.397688Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:07:29.397739Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:07:29.397773Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:07:29.397923Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-07T14:07:29.397968Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-07T14:07:29.398015Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-07T14:07:29.398058Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-07T14:07:29.398701Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:07:29.398757Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:07:29.398790Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:07:29.398843Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-07T14:07:29.398889Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:07:29.400144Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:07:29.400221Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:07:29.400248Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:07:29.400282Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-07T14:07:29.400312Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:07:29.400391Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-07T14:07:29.402749Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:07:29.403872Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T14:07:29.404136Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:07:29.404199Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:07:29.404754Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:07:29.404890Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:07:29.404945Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:462:2419] TestWaitNotification: OK eventTxId 102 2025-04-07T14:07:29.405540Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:07:29.405824Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA" took 324us result status StatusSuccess 2025-04-07T14:07:29.406295Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA" PathDescription { Self { Name: "SubDomenA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" BalancerTabletID: 72075186233409547 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:29.406901Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:07:29.407118Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA/Topic1" took 254us result status StatusSuccess 2025-04-07T14:07:29.407589Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:29.658643Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: FindTabletSubDomainPathId for tablet 72075186233409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] Test command err: 2025-04-07T14:06:08.704750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:08.704811Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:08.768644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:09.672971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-07T14:06:09.774177Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:09.774543Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:09.775063Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8729482236269251938 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:09.812387Z node 9 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:09.812702Z node 9 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:09.812806Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8193363830166202174 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:09.864405Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:09.864818Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:09.864934Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4825322505914304943 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:09.902244Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:09.902602Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:09.902706Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2597777500459727180 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:09.942418Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:09.942807Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:09.942935Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpiJKJz0/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3408515333859095952 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:09.945083Z node 4 :BS_LOCALRECOVERY CRIT: VDISK[80000002:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFr ... elFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpWhAxKn/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:07:24.105483Z node 161 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:07:24.105892Z node 161 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpWhAxKn/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:07:24.106060Z node 161 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpWhAxKn/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpWhAxKn/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4189336705028777317 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:07:24.145399Z node 156 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:07:24.145905Z node 156 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpWhAxKn/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:07:24.146084Z node 156 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpWhAxKn/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpWhAxKn/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7886885938874244154 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:07:24.148961Z node 156 :BS_LOCALRECOVERY CRIT: VDISK[80000001:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpWhAxKn/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:07:24.227457Z node 157 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:07:24.227852Z node 157 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpWhAxKn/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:07:24.228031Z node 157 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpWhAxKn/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpWhAxKn/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5274399025874671202 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:07:24.259583Z node 158 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:07:24.260118Z node 158 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpWhAxKn/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:07:24.260288Z node 158 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpWhAxKn/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001195/r3tmp/tmpWhAxKn/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14287839716001119264 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:07:24.477328Z node 154 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:07:24.477456Z node 154 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:24.565308Z node 154 :STATISTICS WARN: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2025-04-07T14:07:27.692750Z node 163 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:07:27.692837Z node 163 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:27.753792Z node 163 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds [GOOD] >> DataShardReadIterator::ShouldReadRangeLeftInclusive >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] >> TVersions::Wreck1 [GOOD] >> TVersions::Wreck1Reverse >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin+ColumnStore >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] Test command err: Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b (0, 1) | 1 39 2466b (5, 7) + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b (0, 1) | 1 2 120b (0, 4) | 2 4 120b (0, 7) | 3 6 120b (0, 10) | 4 8 120b (1, 3) | 5 10 122b (1, 6) | 6 12 122b (1, 8) | 7 14 122b (2, NULL) | 8 16 122b (2, 4) | 9 18 122b (2, 7) | 10 20 122b (2, 10) | 11 22 122b (3, 3) | 12 24 122b (3, 6) | 13 26 122b (3, 8) | 14 28 122b (4, NULL) | 15 30 122b (4, 4) | 16 32 122b (4, 7) | 17 34 122b (4, 10) | 18 36 122b (5, 3) | 19 38 122b (5, 6) | 19 39 122b (5, 7) + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > (0, 4) | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > (0, 7) | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > (0, 10) | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > (1, 3) | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > (1, 6) | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > (1, 8) | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > (2, NULL) | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > (2, 4) | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > (2, 7) | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > (2, 10) | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > (3, 3) | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > (3, 6) | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > (3, 8) | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > (4, NULL) | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > (4, 4) | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > (4, 7) | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > (4, 10) | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > (5, 3) | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > (5, 6) | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 4910b 40r} data 6206b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 50b (0, 1) | 1 2 50b (0, 4) | 2 4 50b (0, 7) | 3 6 50b (0, 10) | 4 8 50b (1, 3) | 5 10 50b (1, 6) | 6 12 50b (1, 8) | 7 14 50b (2, NULL) | 8 16 50b (2, 4) | 10 18 50b (2, 7) | 11 20 50b (2, 10) | 12 22 50b (3, 3) | 13 24 50b (3, 6) | 15 26 50b (3, 8) | 16 28 50b (4, NULL) | 17 30 50b (4, 4) | 18 32 50b (4, 7) | 19 34 50b (4, 10) | 21 36 50b (5, 3) | 22 38 50b (5, 6) | 22 39 50b (5, 7) + BTreeIndex{Pa ... FlatExecutorLeases::TLeasesTablet::TTxInit 2025-04-07T14:07:21.668690Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} took 4194304b of static mem, Memory{8388608 dyn 0} 2025-04-07T14:07:21.668763Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:07:21.668810Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema} release 4194304b of static, Memory{4194304 dyn 0} 2025-04-07T14:07:21.669149Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:07:21.669191Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:07:21.678858Z node 14 :TABLET_MAIN INFO: Tablet: 9437184 Active! Generation: 3, Type: Dummy started in 501msec Marker# TSYS24 2025-04-07T14:07:21.681560Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:1:0:0:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:07:21.681651Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:1:1:28672:94:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:07:21.681750Z node 14 :TABLET_MAIN DEBUG: Tablet: 9437184 GcCollect 0 channel, tablet:gen:step => 3:0 Marker# TSYS28 2025-04-07T14:07:21.681858Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} commited cookie 2 for step 1 2025-04-07T14:07:21.730901Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:2:0:0:41:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-04-07T14:07:21.731042Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:3} commited cookie 8 for step 2 2025-04-07T14:07:21.785277Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:3:0:0:41:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-04-07T14:07:21.785406Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:4} commited cookie 8 for step 3 2025-04-07T14:07:21.916258Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:4:0:0:41:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-04-07T14:07:21.916421Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:5} commited cookie 8 for step 4 2025-04-07T14:07:21.960760Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:5:0:0:41:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-04-07T14:07:21.960887Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:6} commited cookie 8 for step 5 2025-04-07T14:07:22.004404Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:6:0:0:41:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-04-07T14:07:22.004556Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:7} commited cookie 8 for step 6 2025-04-07T14:07:22.059649Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:7:0:0:41:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-04-07T14:07:22.059801Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} commited cookie 8 for step 7 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:07:22.372635Z 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:07:22.410026Z 00000.008 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00001.246 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.247 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00001.248 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.248 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199545b, 2023} 00001.248 II| FAKE_ENV: DS.1 gone, left {2023431b, 4}, put {7269804b, 2026} 00001.248 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.248 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.248 II| FAKE_ENV: All BS storage groups are stopped 00001.248 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.249 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:07:23.670343Z 00000.007 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00001.236 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.237 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00001.238 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.238 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199548b, 2023} 00001.238 II| FAKE_ENV: DS.1 gone, left {2023431b, 4}, put {7269804b, 2026} 00001.238 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.238 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.238 II| FAKE_ENV: All BS storage groups are stopped 00001.238 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.238 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:07:24.920250Z 00000.007 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00001.298 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.299 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 299 reqs hit {1012 6947830b} miss {0 0b} 00001.299 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.299 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199548b, 2023} 00001.299 II| FAKE_ENV: DS.1 gone, left {2007005b, 4}, put {7211282b, 2026} 00001.300 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.300 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.300 II| FAKE_ENV: All BS storage groups are stopped 00001.300 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.300 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:07:26.231361Z 00000.007 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00001.240 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.241 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00001.242 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.242 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199557b, 2023} 00001.242 II| FAKE_ENV: DS.1 gone, left {2013604b, 4}, put {7237892b, 2026} 00001.242 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.242 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.242 II| FAKE_ENV: All BS storage groups are stopped 00001.242 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.242 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:07:27.484467Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00001.078 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.079 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 299 reqs hit {1012 6947830b} miss {0 0b} 00001.079 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.079 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199548b, 2023} 00001.079 II| FAKE_ENV: DS.1 gone, left {2007005b, 4}, put {7211300b, 2026} 00001.080 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.080 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.080 II| FAKE_ENV: All BS storage groups are stopped 00001.080 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.080 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:07:28.575573Z 00000.007 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00001.186 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.187 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 309 reqs hit {1118 6955338b} miss {2 9773b} 00001.187 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.187 II| FAKE_ENV: DS.1 gone, left {2023431b, 4}, put {7269804b, 2026} 00001.187 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.187 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.187 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199554b, 2023} 00001.187 II| FAKE_ENV: All BS storage groups are stopped 00001.187 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.187 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:07:29.773931Z 00000.007 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00001.918 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.919 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 651 reqs hit {780 4008302b} miss {1002 6916040b} 00001.919 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.920 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {200304b, 2033} 00001.920 II| FAKE_ENV: DS.1 gone, left {2023570b, 4}, put {9254517b, 2039} 00001.920 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.920 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.920 II| FAKE_ENV: All BS storage groups are stopped 00001.920 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.920 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped >> DataShardReadIterator::ShouldReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne >> KqpJoin::RightSemiJoin_SimpleKey >> KqpJoin::LeftJoinPushdownPredicate_Simple [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Nulls >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNonExistingKey >> DataShardReadIterator::ShouldFailUknownColumns [GOOD] >> DataShardReadIterator::ShouldFailWrongSchema >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint1+StreamLookupJoin-ColumnStore >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 >> KqpJoinOrder::TPCH11-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH21-StreamLookupJoin-ColumnStore >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite >> YdbIndexTable::MultiShardTableOneUniqIndex >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestReorderedExecutor >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks >> DataShardReadIterator::ShouldReadRangeLeftInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeRightInclusive >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: 2025-04-07T14:06:08.522244Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572293236007126:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:08.522840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpLJWnq9/pdisk_1.dat 2025-04-07T14:06:08.805721Z node 1 :HTTP ERROR: (#30,[::1]:7932) connection closed with error: Connection refused 2025-04-07T14:06:08.806293Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:06:08.808125Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:08.902560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:08.902662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:08.904282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:11.188796Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572305273854882:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:11.188855Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpnvMNR8/pdisk_1.dat 2025-04-07T14:06:11.260223Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:11.277446Z node 2 :HTTP ERROR: (#32,[::1]:19563) connection closed with error: Connection refused 2025-04-07T14:06:11.277898Z node 2 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:06:11.320255Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:11.320310Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:11.321228Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:13.976386Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490572316319851838:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:13.976431Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmp0SEZ4l/pdisk_1.dat 2025-04-07T14:06:14.089130Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:14.092485Z node 3 :HTTP ERROR: (#30,[::1]:19487) connection closed with error: Connection refused 2025-04-07T14:06:14.092854Z node 3 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:06:14.104764Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:14.104831Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:14.106476Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:16.517322Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490572327229606048:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:16.517371Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpRAyfpM/pdisk_1.dat 2025-04-07T14:06:16.577217Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:16.592315Z node 4 :HTTP ERROR: (#32,[::1]:26732) connection closed with error: Connection refused 2025-04-07T14:06:16.592565Z node 4 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:06:16.635524Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:16.635592Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:16.637423Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:19.060263Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490572339808261138:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:19.060306Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpyXHApK/pdisk_1.dat 2025-04-07T14:06:19.130038Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:19.143114Z node 5 :HTTP ERROR: (#34,[::1]:14166) connection closed with error: Connection refused 2025-04-07T14:06:19.143269Z node 5 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:06:19.161181Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:19.161235Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:19.162287Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:21.742705Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490572349766363273:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:21.742758Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpEkHiAe/pdisk_1.dat 2025-04-07T14:06:21.817012Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:21.835487Z node 6 :HTTP ERROR: (#36,[::1]:63193) connection closed with error: Connection refused 2025-04-07T14:06:21.835741Z node 6 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:06:21.872193Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:21.872271Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:21.873345Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:24.482492Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7490572363588711317:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:24.482561Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmp85blQJ/pdisk_1.dat 2025-04-07T14:06:24.569428Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:24.579454Z node 7 :HTTP ERROR: (#38,[::1]:23558) connection closed with error: Connection refused 2025-04-07T14:06:24.580090Z node 7 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:06:24.589724Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:24.589799Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:24.591803Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:27.566819Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7490572373642942986:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpaBluhh/pdisk_1.dat 2025-04-07T14:06:27.600211Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:06:27.638175Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:27.659569Z node 8 :HTTP ERROR: (#40,[::1]:61592) connection closed with error: Connection refused 2025-04-07T14:06:27.659928Z node 8 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:06:27.661462Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:27.661549Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:27.664627Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:31.232230Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7490572392903290540:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpimKRQG/pdisk_1.dat 2025-04-07T14:06:31.279569Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:06:31.336623Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:31.362046Z node 9 :HTTP ERROR: (#42,[::1]:18021) connection closed with error: Connection refused 2025-04-07T14:06:31.362581Z node 9 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:06:31.363605Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:31.363689Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:31.365479Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:34.948190Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7490572405851566826:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:34.948280Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpebfwZc/pdisk_1.dat 2025-04-07T14:06:35.047500Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:35.071110Z node 10 :HTTP ERROR: (#30,[::1]:27327) connection closed with error: Connection refused 2025-04-07T14:06:35.076591Z node 10 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:06:35.077563Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileSta ... heme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpZQcYsU/pdisk_1.dat 2025-04-07T14:06:51.077480Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:51.095774Z node 14 :HTTP ERROR: (#38,[::1]:1201) connection closed with error: Connection refused 2025-04-07T14:06:51.096403Z node 14 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:06:51.111585Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:51.111689Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:51.114255Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:55.273577Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7490572497439799015:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:55.273659Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpaJ56Ou/pdisk_1.dat 2025-04-07T14:06:55.390560Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:55.411747Z node 15 :HTTP ERROR: (#40,[::1]:19856) connection closed with error: Connection refused 2025-04-07T14:06:55.412083Z node 15 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:06:55.412635Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:55.412754Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:55.414591Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:59.595188Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7490572512391902030:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:59.595251Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmp7Su5yh/pdisk_1.dat 2025-04-07T14:06:59.678029Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:59.704966Z node 16 :HTTP ERROR: (#42,[::1]:27634) connection closed with error: Connection refused 2025-04-07T14:06:59.705255Z node 16 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:06:59.719368Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:59.719459Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:59.721162Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:03.807517Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7490572528024534349:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:03.807600Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmprUCjpS/pdisk_1.dat 2025-04-07T14:07:03.920232Z node 17 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:03.947778Z node 17 :HTTP ERROR: (#30,[::1]:31541) connection closed with error: Connection refused 2025-04-07T14:07:03.948002Z node 17 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:07:03.955779Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:03.955866Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:03.957391Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:07.994087Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7490572547062188043:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:07.994145Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpSg92ru/pdisk_1.dat 2025-04-07T14:07:08.121067Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:08.142723Z node 18 :HTTP ERROR: (#29,[::1]:23446) connection closed with error: Connection refused 2025-04-07T14:07:08.143677Z node 18 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:07:08.151595Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:08.151702Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:08.153311Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:12.376934Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7490572570534856804:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:12.376998Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpXWCDjS/pdisk_1.dat 2025-04-07T14:07:12.474404Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:12.497886Z node 19 :HTTP ERROR: (#34,[::1]:12092) connection closed with error: Connection refused 2025-04-07T14:07:12.498416Z node 19 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:07:12.511938Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:12.512071Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:12.513518Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:16.845197Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7490572587679879983:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:16.845244Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpNvkT7O/pdisk_1.dat 2025-04-07T14:07:16.970004Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:17.005219Z node 20 :HTTP ERROR: (#33,[::1]:22370) connection closed with error: Connection refused 2025-04-07T14:07:17.005483Z node 20 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:07:17.007572Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:17.007683Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:17.009369Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:22.271555Z node 21 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7490572613139845536:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:22.271642Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpXWrwxq/pdisk_1.dat 2025-04-07T14:07:22.392266Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:22.408514Z node 21 :HTTP ERROR: (#38,[::1]:15922) connection closed with error: Connection refused 2025-04-07T14:07:22.409000Z node 21 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:07:22.426297Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:22.426414Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:22.428319Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:26.947747Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7490572626864092010:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:26.947882Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmpwrwAqP/pdisk_1.dat 2025-04-07T14:07:27.074476Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:27.107671Z node 22 :HTTP ERROR: (#40,[::1]:10020) connection closed with error: Connection refused 2025-04-07T14:07:27.108690Z node 22 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:07:27.110631Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:27.110737Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:27.112405Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:32.298139Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7490572652445316104:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00119e/r3tmp/tmp7Oiz5V/pdisk_1.dat 2025-04-07T14:07:32.370684Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:07:32.426866Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:32.442649Z node 23 :HTTP ERROR: (#39,[::1]:5952) connection closed with error: Connection refused 2025-04-07T14:07:32.443871Z node 23 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-07T14:07:32.462562Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:32.462671Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:32.469975Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix1 >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin+ColumnStore >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] >> DataShardReadIterator::ShouldReadNonExistingKey [GOOD] >> DataShardReadIterator::ShouldReadNotExistingRange >> YdbIndexTable::OnlineBuild >> DataShardReadIterator::ShouldFailWrongSchema [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange >> ReadIteratorExternalBlobs::ExtBlobs [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls >> KqpJoin::RightSemiJoin_SimpleKey [GOOD] >> KqpJoin::TwoJoinsWithQueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] Test command err: 2025-04-07T14:06:33.232096Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:33.305945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:33.306006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:33.307883Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:33.308267Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:33.308544Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:33.318142Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:33.358654Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:33.359075Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:33.360589Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:33.360682Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:33.360737Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:33.361059Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:33.384262Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:33.384430Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:33.384540Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:33.384578Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:33.384611Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:33.384649Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:33.384870Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:33.384918Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:33.385267Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:33.385341Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:33.385388Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:33.385424Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:33.385468Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:33.385500Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:33.385533Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:33.385563Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:33.385594Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:33.425905Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:33.425962Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:33.426014Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:33.427847Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:33.427896Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:33.427972Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:33.428164Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:33.428237Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:33.428287Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:33.428326Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:33.428351Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:33.428371Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:33.428391Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:33.428605Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:33.428641Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:33.428686Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:33.428720Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:33.428765Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:33.428801Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:33.428832Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:33.428869Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:33.428894Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:33.451227Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:33.451287Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:33.451331Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:33.451377Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:33.451440Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:33.451930Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:33.451981Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:33.452019Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:33.452140Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:33.452179Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:33.452295Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:33.452351Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:33.452386Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:33.452418Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:33.455964Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:33.456028Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:33.456239Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:33.456277Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:33.456356Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:33.456391Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:33.456421Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:33.456458Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:33.456508Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:33.456556Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:33.456595Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:33.456624Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:33.456654Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:33.456837Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:33.456870Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:33.456908Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:33.456933Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:33.456953Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:33.457030Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:33.457054Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:33.457089Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:33.457125Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:33.457171Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:33.457197Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:33.457236Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:33.457279Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:33.457298Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:33.457320Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... treamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.742514Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.742547Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:21] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.742583Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 21] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.742613Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.742738Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.742766Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:22] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.742804Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 22] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.742834Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.742962Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.742993Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:23] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.743027Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 23] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.743056Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.743251Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.743284Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.743322Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.743353Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.743516Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.743543Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.743582Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.743614Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.743798Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.743830Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.743867Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.743897Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.744041Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.744068Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.744107Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.744136Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.744267Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.744295Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.744335Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.744366Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.744496Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.744528Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.744565Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.744594Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.744994Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.745078Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.745141Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.745193Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.745360Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.745390Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.745431Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.745460Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.745599Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.745625Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.745663Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.745691Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.745812Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.745836Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.745872Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.745900Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.746027Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.746055Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.746093Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.746118Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.746298Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.746323Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.746359Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.746388Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.746576Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.746602Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.746638Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.746678Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.746825Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:39.746851Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-07T14:07:39.746887Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:39.746914Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:39.747168Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-07T14:07:39.747209Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:39.747247Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-07T14:07:39.747354Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-07T14:07:39.747381Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:39.747407Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-07T14:07:39.747477Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-07T14:07:39.747506Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:39.747532Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-07T14:07:39.747598Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-07T14:07:39.747627Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:39.747651Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-07T14:07:39.747734Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-07T14:07:39.747763Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:39.747790Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 expect 28 29 31 31 30 29 31 30 30 30 14 30 29 24 11 19 29 14 18 29 29 30 30 20 14 29 18 24 15 24 - - actual 28 29 31 31 30 29 31 30 30 30 14 30 29 24 11 19 29 14 18 29 29 30 30 20 14 29 18 24 15 24 - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] >> KqpJoin::RightSemiJoin_ComplexKey >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] Test command err: 2025-04-07T14:06:37.606668Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:37.664549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:37.664590Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:37.666110Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:37.666429Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:37.666660Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:37.675196Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:37.701002Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:37.701364Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:37.702517Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:37.702589Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:37.702631Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:37.702867Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:37.724834Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:37.725040Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:37.725163Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:37.725212Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:37.725245Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:37.725278Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:37.725483Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:37.725531Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:37.725927Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:37.726014Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:37.726058Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:37.726097Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:37.726143Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:37.726176Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:37.726211Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:37.726238Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:37.726271Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:37.753870Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:37.753935Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:37.753981Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:37.755835Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:37.755878Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:37.755946Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:37.756089Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:37.756155Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:37.756224Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:37.756265Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:37.756298Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:37.756329Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:37.756358Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:37.756624Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:37.756687Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:37.756716Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:37.756755Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:37.756803Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:37.756829Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:37.756862Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:37.756904Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:37.756926Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:37.779637Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:37.779722Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:37.779767Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:37.779824Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:37.779884Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:37.780405Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:37.780455Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:37.780491Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:37.780608Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:37.780637Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:37.780786Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:37.780834Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:37.780865Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:37.780911Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:37.784185Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:37.784244Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:37.784427Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:37.784459Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:37.784503Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:37.784542Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:37.784571Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:37.784607Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:37.784637Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:37.784694Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:37.784750Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:37.784788Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:37.784816Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:37.784993Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:37.785025Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:37.785062Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:37.785088Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:37.785108Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:37.785160Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:37.785198Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:37.785228Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:37.785262Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:37.785319Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:37.785345Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:37.785383Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:37.785419Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:37.785448Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:37.785476Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... pose latency: 1 ms 2025-04-07T14:07:40.323970Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:40.324119Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:40.324151Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-07T14:07:40.324196Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:40.324230Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:40.324442Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:40.324477Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-07T14:07:40.324523Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:40.324558Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:40.328482Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:40.328564Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-07T14:07:40.328634Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:40.328701Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:40.328950Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:40.328987Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-07T14:07:40.329038Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:40.329075Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:40.329224Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:40.329260Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-07T14:07:40.329303Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:40.329335Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:40.329498Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:40.329530Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-07T14:07:40.329575Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:40.329609Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:40.329759Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:40.329792Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-07T14:07:40.329837Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:40.329870Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:40.330013Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:40.330046Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-07T14:07:40.330090Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:40.330123Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:40.330270Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:40.330301Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-07T14:07:40.330346Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:40.330380Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:40.330575Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:40.330609Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-07T14:07:40.330654Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:40.330687Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:40.330879Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:40.330910Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-07T14:07:40.330951Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:40.330983Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:40.331156Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:40.331189Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-07T14:07:40.331232Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:40.331264Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:40.331426Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:40.331458Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-07T14:07:40.331503Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:40.331543Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:40.331869Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-07T14:07:40.331937Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:40.331979Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2025-04-07T14:07:40.332116Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-07T14:07:40.332151Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:40.332182Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-07T14:07:40.332277Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-07T14:07:40.332308Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:40.332340Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-07T14:07:40.332428Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-07T14:07:40.332463Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:40.332494Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-07T14:07:40.332606Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-07T14:07:40.332639Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:40.332685Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-07T14:07:40.332754Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-07T14:07:40.332786Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:40.332816Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-07T14:07:40.332904Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-07T14:07:40.332938Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:40.332967Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-07T14:07:40.333055Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-07T14:07:40.333089Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:40.333122Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-07T14:07:40.333208Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-07T14:07:40.333240Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:40.333268Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 23 25 28 31 27 24 27 30 30 29 31 30 30 28 24 28 22 22 12 28 13 4 10 28 28 22 7 16 - - - - actual 23 25 28 31 27 24 27 30 30 29 31 30 30 28 24 28 22 22 12 28 13 4 10 28 28 22 7 16 - - - - interm 3 3 1 3 2 2 4 4 4 1 - 4 - 6 6 4 - - 6 - - 4 - - 6 - - - - - - - >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2025-04-07T14:06:08.701071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:08.701129Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:08.769839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:11.736728Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:11.736807Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:11.794068Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:14.753991Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:14.754060Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:14.817824Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:15.621452Z node 19 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/dc-1/users/tenant-1" resources { storage_units { unit_kind: "hdd" count: 1 } } database_quotas { } scale_recommender_policies { } } 2025-04-07T14:06:15.621646Z node 19 :CMS_TENANTS DEBUG: Add tenant /dc-1/users/tenant-1 (txid = 482048) 2025-04-07T14:06:15.643853Z node 19 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-07T14:06:15.644108Z node 19 :CMS_TENANTS DEBUG: TPoolManip(/dc-1/users/tenant-1:hdd) Bootstrap 2025-04-07T14:06:15.644305Z node 19 :CMS_TENANTS DEBUG: TPoolManip(/dc-1/users/tenant-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd" } } } 2025-04-07T14:06:15.644844Z node 19 :CMS_TENANTS DEBUG: TPoolManip(/dc-1/users/tenant-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 2 2025-04-07T14:06:15.644962Z node 19 :CMS_TENANTS DEBUG: TPoolManip(/dc-1/users/tenant-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-07T14:06:15.646837Z node 19 :CMS_TENANTS DEBUG: Add subscription to /dc-1/users/tenant-1 for [19:516:2137] 2025-04-07T14:06:15.659297Z node 19 :CMS_TENANTS DEBUG: TPoolManip(/dc-1/users/tenant-1:hdd) got config response: Status { Success: true AssignedStoragePoolId: 1 } Success: true ConfigTxSeqNo: 3 2025-04-07T14:06:15.659400Z node 19 :CMS_TENANTS DEBUG: TPoolManip(/dc-1/users/tenant-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-07T14:06:15.659665Z node 19 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /dc-1/users/tenant-1:hdd of /dc-1/users/tenant-1 state=ALLOCATED 2025-04-07T14:06:15.671974Z node 19 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /dc-1/users/tenant-1:hdd 2025-04-07T14:06:15.672142Z node 19 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /dc-1/users/tenant-1 to CREATING_SUBDOMAIN 2025-04-07T14:06:15.684129Z node 19 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /dc-1/users/tenant-1 2025-04-07T14:06:15.697534Z node 19 :CMS_TENANTS DEBUG: TSubdomainManip(/dc-1/users/tenant-1)::Bootstrap 2025-04-07T14:06:15.697609Z node 19 :CMS_TENANTS DEBUG: TSubDomainManip(/dc-1/users/tenant-1) create subdomain 2025-04-07T14:06:15.701249Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-07T14:06:15.710894Z node 19 :CMS_TENANTS DEBUG: TSubdomainManip(/dc-1/users/tenant-1) got propose result: Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046578944 PathId: 3 2025-04-07T14:06:15.718063Z node 19 :CMS_TENANTS DEBUG: TSubdomainManip(/dc-1/users/tenant-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715657 2025-04-07T14:06:15.827173Z node 26 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:15.827605Z node 26 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001185/r3tmp/tmpJAM5n2/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:15.828096Z node 26 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001185/r3tmp/tmpJAM5n2/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001185/r3tmp/tmpJAM5n2/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11943895386202775191 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:15.856102Z node 27 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:15.856430Z node 27 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001185/r3tmp/tmpJAM5n2/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:15.856588Z node 27 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001185/r3tmp/tmpJAM5n2/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001185/r3tmp/tmpJAM5n2/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7155645898832855214 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:15.895149Z node 21 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:15.895534Z node 21 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001185/r3tmp/tmpJAM5n2/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:15.895671Z node 21 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001185/r3tmp/tmpJAM5n2/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001185/r3tmp/tmpJAM5n2/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2107880128404173432 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:15.947675Z node 25 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:15.948001Z node 25 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001185/r3tmp/tmpJAM5n2/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:15.948153Z node 25 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001185/r3tmp/tmpJAM5n2/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001185/r3tmp/tmpJAM5n2/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2383397855840796774 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch Writ ... 1/1 2025-04-07T14:07:36.396441Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-04-07T14:07:36.396549Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 11 2025-04-07T14:07:36.396617Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715661, ready parts: 1/1, is published: false 2025-04-07T14:07:36.396804Z node 154 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [154:1361:2609], msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3, at schemeshard: 72057594046578944 2025-04-07T14:07:36.396869Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-04-07T14:07:36.396960Z node 154 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715661:0 2025-04-07T14:07:36.397028Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715661:0 2025-04-07T14:07:36.397140Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 12 2025-04-07T14:07:36.397215Z node 154 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715661, publications: 1, subscribers: 1 2025-04-07T14:07:36.397304Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715661, [OwnerId: 72057594046578944, LocalPathId: 3], 7 2025-04-07T14:07:36.400740Z node 154 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-04-07T14:07:36.400788Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 281474976715661, path id: [OwnerId: 72057594046578944, LocalPathId: 3] 2025-04-07T14:07:36.400986Z node 154 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-04-07T14:07:36.401029Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [154:676:2242], at schemeshard: 72057594046578944, txId: 281474976715661, path id: 3 2025-04-07T14:07:36.401169Z node 154 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3 2025-04-07T14:07:36.401265Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3, at schemeshard: 72075186233409546 2025-04-07T14:07:36.401523Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-04-07T14:07:36.402016Z node 154 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-07T14:07:36.402104Z node 154 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 for mediator 72057594046382081 tablet 72057594046578944 removed=1 2025-04-07T14:07:36.402145Z node 154 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 for mediator 72057594046382081 acknowledged 2025-04-07T14:07:36.402180Z node 154 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 acknowledged 2025-04-07T14:07:36.403750Z node 154 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046578944, cookie: 281474976715661 2025-04-07T14:07:36.403841Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046578944, cookie: 281474976715661 2025-04-07T14:07:36.403907Z node 154 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 281474976715661 2025-04-07T14:07:36.404010Z node 154 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 281474976715661, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], version: 7 2025-04-07T14:07:36.404103Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 11 2025-04-07T14:07:36.404264Z node 154 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 281474976715661, subscribers: 1 2025-04-07T14:07:36.404378Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [154:1901:2387] 2025-04-07T14:07:36.407626Z node 154 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046578944, msg: DomainSchemeShard: 72057594046578944 DomainPathId: 3 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 3 TenantHive: 18446744073709551615 TenantSysViewProcessor: 72075186233409553 TenantRootACL: "" TenantStatisticsAggregator: 72075186233409554 TenantGraphShard: 18446744073709551615 2025-04-07T14:07:36.407690Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 2025-04-07T14:07:36.407766Z node 154 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046578944, LocalPathId: 3], Generation: 2, ActorId:[154:1361:2609], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 3, TenantHive: 18446744073709551615, TenantSysViewProcessor: 72075186233409553, TenantStatisticsAggregator: 72075186233409554, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 3, tenantHive: 18446744073709551615, tenantSysViewProcessor: 72075186233409553, at schemeshard: 72057594046578944 2025-04-07T14:07:36.407854Z node 154 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-07T14:07:36.407888Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-07T14:07:36.408040Z node 154 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-07T14:07:36.408076Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [154:1571:2758], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-07T14:07:36.409781Z node 154 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72075186233409546, cookie: 0 2025-04-07T14:07:36.411686Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 281474976715661 2025-04-07T14:07:36.411764Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 2025-04-07T14:07:38.938416Z node 163 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:07:38.938517Z node 163 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:39.013444Z node 163 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> DataShardVolatile::DistributedWriteLostPlanThenDrop [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenSplit >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit >> DataShardReadIterator::ShouldReadRangeRightInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeOneByOne >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull >> KqpJoinOrder::TestJoinHint1+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin+ColumnStore >> YdbIndexTable::MultiShardTableOneIndex >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix1 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix2 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite >> KqpJoin::TwoJoinsWithQueryService [GOOD] >> KqpJoin::RightTableKeyPredicate >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] >> OlapEstimationRowsCorrectness::TPCH2 [GOOD] >> OlapEstimationRowsCorrectness::TPCH3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] Test command err: 2025-04-07T14:06:08.709824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:08.709888Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:08.769704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:09.685376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-07T14:06:09.788885Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:09.789401Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:09.789923Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2551181475192316193 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:09.792503Z node 2 :BS_LOCALRECOVERY CRIT: VDISK[80000000:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:06:09.844833Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:09.845330Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:09.845574Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1905313164621570281 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:09.875181Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:09.875687Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:09.875864Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16216186906328743116 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:09.927799Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:09.928289Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:09.928464Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15662662799242921401 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:09.958342Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:09.958827Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:09.959045Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001169/r3tmp/tmpiJVoyt/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/b ... onds was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.CanCancelROWithReadSets was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.DataTxProfile.LogThresholdMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.DataTxProfile.BufferThresholdMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.DataTxProfile.BufferSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.BackupReadAheadLo was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.BackupReadAheadHi was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.PrioritizedMvccSnapshotReads was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.UnprotectedMvccSnapshotReads was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.EnableLeaderLeases was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.MinLeaderLeaseDurationUs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.TtlReadAheadLo was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.TtlReadAheadHi was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.EnableLockedWrites was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.MaxLockedWritesPerKey was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.ChangeRecordDebugPrint was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.IncrementalRestoreReadAheadLo was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.IncrementalRestoreReadAheadHi was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.CdcInitialScanReadAheadLo was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.CdcInitialScanReadAheadHi was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerRequestDataSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerShardReadSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerShardIncomingReadSetSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.DefaultTimeoutMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.EnableLeaderLeases was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.MinLeaderLeaseDurationUs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.VolatilePlanLeaseMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.PlanAheadTimeShiftMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control SchemeShardControls.ForceShardSplitDataSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control SchemeShardControls.DisableForceShardSplit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.ProfileSamplingRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.GuardedSamplingRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.MemoryLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.PageCacheTargetSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.PageCacheReleaseRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableLocalSyncLogDataCutting was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableSyncLogChunkCompressionHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableSyncLogChunkCompressionSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxSyncLogChunksInFlightHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxSyncLogChunksInFlightSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsNVME was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleNVME was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DefaultHugeGarbagePerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.HugeDefragFreeSpaceBorderPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TabletControls.MaxCommitRedoMB was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThreshold was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplier was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.LongRequestThresholdMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.LongRequestReportingDelayMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisks was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control BlobStorageControllerControls.EnableSelfHealWithDegraded was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. >> DataShardReadIterator::ShouldReadNotExistingRange [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1_100 >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 3355, MsgBus: 12542 2025-04-07T14:07:13.298017Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572572591101819:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:13.298048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00102f/r3tmp/tmpciMyrL/pdisk_1.dat 2025-04-07T14:07:13.564860Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3355, node 1 2025-04-07T14:07:13.653150Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:13.653195Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:13.653223Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:13.653359Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:07:13.664331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:13.664420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:13.666005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12542 TClient is connected to server localhost:12542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:14.110988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:14.133784Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:07:14.141257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:14.274451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:07:14.438246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.504045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:16.132919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572585476005411:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:16.133041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:16.356212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.377459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.402224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.426578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.448152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.478605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.515661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572585476005907:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:16.515818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:16.515942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572585476005912:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:16.519516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:16.533020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572585476005914:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:17.519013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:17.560820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:17.582208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:07:17.604657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T14:07:17.628324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:07:17.648148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T14:07:18.298303Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572572591101819:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:18.298367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 63790, MsgBus: 20647 2025-04-07T14:07:19.004342Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572598579418098:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:19.004408Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00102f/r3tmp/tmpM1b3yy/pdisk_1.dat 2025-04-07T14:07:19.096356Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:19.108595Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:19.108775Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:19.110252Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63790, node 2 2025-04-07T14:07:19.153196Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:19.153217Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:19.153224Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:19.153315Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20647 TClient is connected to server localhost:20647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:19.534062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo u ... have access permissions } 2025-04-07T14:07:35.104790Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:35.104986Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490572666336780344:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:35.109515Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:35.120908Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490572666336780346:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:07:36.260550Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.296711Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.327222Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.355199Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.388298Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.456558Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.558156Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490572649156908964:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:36.558214Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8181, MsgBus: 24389 2025-04-07T14:07:38.302983Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490572680523735457:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:38.303027Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00102f/r3tmp/tmpWe5NKf/pdisk_1.dat 2025-04-07T14:07:38.416526Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:38.448537Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:38.448634Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:38.450089Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8181, node 5 2025-04-07T14:07:38.520295Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:38.520319Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:38.520329Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:38.520486Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24389 TClient is connected to server localhost:24389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:39.076496Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:39.087588Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:07:39.102763Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:07:39.185752Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:07:39.363392Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:39.449913Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:42.112820Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490572697703606335:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:42.112946Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:42.157078Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:42.227191Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:42.265066Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:42.303058Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:42.374305Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:42.421217Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:42.477900Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490572697703606836:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:42.477981Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:42.478202Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490572697703606841:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:42.481358Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:42.489939Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490572697703606843:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:07:43.303946Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490572680523735457:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:43.304017Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:43.668066Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:43.706151Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:43.776508Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:07:43.813123Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:07:43.849019Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:07:43.895947Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup >> BuildStatsHistogram::Many_Serial [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite >> KqpJoinOrder::TPCH21-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH21+StreamLookupJoin-ColumnStore >> DataShardReadIterator::ShouldReadRangeOneByOne [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk7 >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd >> KqpJoin::RightSemiJoin_ComplexKey [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> BuildStatsHistogram::Many_Serial [GOOD] Test command err: Got : 24000 2106439 49449 9 9 Expected: 24000 2106439 49449 9 9 { [2455,2599), [2798,3624), [4540,4713), [5654,7161), [8509,8794), [8936,9973), [11888,14280), [14337,14882), [15507,16365), [17368,19451), [19536,20135), [20790,21503), [21589,23243) } Got : 12816 1121048 49449 9 9 Expected: 12816 1121048 49449 9 9 Got : 24000 3547100 81694 9 9 Expected: 24000 3547100 81694 9 9 { [1012,1475), [1682,1985), [2727,3553), [3599,3992), [5397,7244), [9181,9807), [9993,10178), [12209,14029), [15089,15342), [16198,16984), [17238,18436), [21087,21876), [23701,23794) } Got : 9582 1425282 81694 9 9 Expected: 9582 1425282 81694 9 9 Got : 24000 2460139 23760 9 9 Expected: 24000 2460139 23760 9 9 { [1296,2520), [3888,4320), [5040,6840), [6912,7272), [10872,11160), [11520,12096), [12096,13824), [15192,15624), [17064,17856), [18216,19296), [19800,20160), [20736,21096), [21096,22104) } Got : 10440 1060767 23760 9 9 Expected: 10440 1060767 23760 9 9 Got : 24000 4054050 46562 9 9 Expected: 24000 4054050 46562 9 9 { [460,1518), [2300,2484), [2760,4002), [4600,5842), [6302,9752), [11178,12328), [14582,14858), [16790,18032), [18216,18446), [18722,19504), [19504,19964), [20378,20470), [21344,23506) } Got : 13570 2273213 46562 9 9 Expected: 13570 2273213 46562 9 9 Got : 24000 2106459 49449 9 9 Expected: 24000 2106459 49449 9 9 Got : 24000 2460219 23555 9 9 Expected: 24000 2460219 23555 9 9 Got : 24000 4054270 46543 9 9 Expected: 24000 4054270 46543 9 9 Got : 24000 2106439 25272 38 44 Expected: 24000 2106439 25272 38 44 { [2455,2599), [2798,3624), [4540,4713), [5654,7161), [8509,8794), [8936,9973), [11888,14280), [14337,14882), [15507,16365), [17368,19451), [19536,20135), [20790,21503), [21589,23243) } Got : 12816 1121048 25272 20 23 Expected: 12816 1121048 25272 20 23 Got : 24000 3547100 49916 64 44 Expected: 24000 3547100 49916 64 44 { [1012,1475), [1682,1985), [2727,3553), [3599,3992), [5397,7244), [9181,9807), [9993,10178), [12209,14029), [15089,15342), [16198,16984), [17238,18436), [21087,21876), [23701,23794) } Got : 9582 1425198 49916 26 17 Expected: 9582 1425198 49916 26 17 Got : 24000 2460139 13170 42 41 Expected: 24000 2460139 13170 42 41 { [1296,2520), [3888,4320), [5040,6840), [6912,7272), [10872,11160), [11520,12096), [12096,13824), [15192,15624), [17064,17856), [18216,19296), [19800,20160), [20736,21096), [21096,22104) } Got : 10440 1060798 13170 18 18 Expected: 10440 1060798 13170 18 18 Got : 24000 4054050 29361 68 43 Expected: 24000 4054050 29361 68 43 { [460,1518), [2300,2484), [2760,4002), [4600,5842), [6302,9752), [11178,12328), [14582,14858), [16790,18032), [18216,18446), [18722,19504), [19504,19964), [20378,20470), [21344,23506) } Got : 13570 2277890 29361 38 24 Expected: 13570 2277890 29361 38 24 Got : 24000 2106459 25428 38 44 Expected: 24000 2106459 25428 38 44 Got : 24000 2460219 13482 41 41 Expected: 24000 2460219 13482 41 41 Got : 24000 4054270 29970 67 43 Expected: 24000 4054270 29970 67 43 Got : 24000 2106479 25458 38 44 Expected: 24000 2106479 25458 38 44 Got : 24000 2460259 13528 42 41 Expected: 24000 2460259 13528 42 41 Got : 24000 4054290 30013 67 43 Expected: 24000 4054290 30013 67 43 1 parts: 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 0% bytes, 4 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 205073 (actual 205115 - 0% error) 14% (actual 14%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 17416844 (actual 17420850 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (80065, 26696) value = 24008 (actual 24056 - 0% error) 10% (actual 10%) key = (160045, 53356) value = 48012 (actual 48061 - 0% error) 10% (actual 10%) key = (240238, 80087) value = 72016 (actual 72061 - 0% error) 10% (actual 10%) key = (320152, 106725) value = 96035 (actual 96085 - 0% error) 10% (actual 10%) key = (400354, 133459) value = 120047 (actual 120093 - 0% error) 10% (actual 10%) key = (480133, 160052) value = 144053 (actual 144100 - 0% error) 10% (actual 10%) key = (560080, 186701) value = 168060 (actual 168102 - 0% error) 10% (actual 10%) key = (639892, 213305) value = 192073 (actual 192119 - 0% error) 10% (actual 10%) key = (719776, 239933) value = 216090 (actual 216137 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2042645 - 0% error) 10% (actual 10%) key = (159427, 53150) value = 4076220 (actual 4080259 - 0% error) 10% (actual 10%) key = (239872, 79965) value = 6113940 (actual 6117932 - 0% error) 10% (actual 10%) key = (319834, 106619) value = 8152983 (actual 8156951 - 0% error) 10% (actual 10%) key = (400105, 133376) value = 10190566 (actual 10194584 - 0% error) 10% (actual 10%) key = (479833, 159952) value = 12228261 (actual 12232212 - 0% error) 10% (actual 10%) key = (559774, 186599) value = 14265925 (actual 14269984 - 0% error) 10% (actual 10%) key = (639385, 213136) value = 16304923 (actual 16308915 - 0% error) 10% (actual 10%) key = (719437, 239820) value = 18342658 (actual 18346641 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 51 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 217180 (actual 217228 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 18443184 (actual 18447186 - 0% error) 9% (actual 9%) { [12965,17271), [20685,27602), [31405,43682), [58051,73731), [81074,85635), [86559,89297), [92588,112654), [134937,148111), [152568,158136), [169526,171272), [181381,184364), [188301,199001), [201179,227534) } 1 parts: 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 3% bytes, 111 pages RowCountHistogram: 6% (actual 6%) key = (80152, 26725) value = 7654 (actual 7700 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 21908 (actual 21959 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 37729 (actual 37776 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 44561 (actual 44610 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 62406 (actual 62455 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 70269 (actual 70314 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 83950 (actual 83996 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 96207 (actual 96256 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 110645 (actual 110694 - 0% error) 12% (actual 12%) DataSizeHistogram: 6% (actual 6%) key = (80152, 26725) value = 650681 (actual 654673 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 1862907 (actual 1866988 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 3200081 (actual 3204123 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 3780473 (actual 3784554 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 5294670 (actual 5298760 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 5965285 (actual 5969310 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 7125413 (actual 7129406 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 8166922 (actual 8170966 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 9391370 (actual 9395383 - 0% error) 12% (actual 12%) { [12965,17271), [20685,27602), [31405,43682), [58051,73731), [81074,85635), [86559,89297), [92588,112654), [134937,148111), [152568,158136), [169526,171272), [181381,184364), [188301,199001), [201179,227534) } Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (109672, 36565) value = 12716 (actual 12760 - 0% error) 10% (actual 10%) key = (200011, 66678) value = 25439 (actual 25485 - 0% error) 10% (actual 10%) key = (242497, 80840) value = 38151 (actual 38197 - 0% error) 10% (actual 10%) key = (323278, 107767) value = 50861 (actual 50910 - 0% error) 9% (actual 9%) key = (365755, 121926) value = 63568 (actual 63614 - 0% error) 10% (actual 10%) key = (482191, 160738) value = 76283 (actual 76335 - 0% error) 10% (actual 9%) key = (610882, 203635) value = 88992 (actual 89039 - 0% error) 10% (actual 10%) key = (673702, 224575) value = 101722 (actual 101768 - 0% error) 10% (actual 10%) key = (715753, 238592) value = 114435 (actual 114484 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (109522, 36515) value = 1078779 (actual 1082732 - 0% error) 10% (actual 10%) key = (199786, 66603) value = 2157298 (actual 2161219 - 0% error) ... (307549, NULL) (307615, NULL) (307678, NULL) (307744, NULL) 100 rows, 100 pages, 4 levels: (307810, NULL) (307876, NULL) (307939, NULL) (308005, NULL) (308065, NULL) 100 rows, 100 pages, 4 levels: (308131, NULL) (308194, NULL) (308260, NULL) (308320, NULL) (308386, NULL) 100 rows, 100 pages, 4 levels: (308452, NULL) (308518, NULL) (308587, NULL) (308650, NULL) (308719, NULL) 100 rows, 100 pages, 4 levels: (308779, NULL) (308842, NULL) (308908, NULL) (308974, NULL) (309049, NULL) 100 rows, 100 pages, 4 levels: (309115, NULL) (309181, NULL) (309247, NULL) (309319, NULL) (309385, NULL) 100 rows, 100 pages, 4 levels: (309448, NULL) (309511, NULL) (309580, NULL) (309649, NULL) (309715, NULL) 100 rows, 100 pages, 4 levels: (309775, NULL) (309850, NULL) (309922, NULL) (309994, NULL) (310060, NULL) 100 rows, 100 pages, 4 levels: (310132, NULL) (310195, NULL) (310264, NULL) (310327, NULL) (310396, NULL) 100 rows, 100 pages, 4 levels: (310465, NULL) (310534, NULL) (310594, NULL) (310660, NULL) (310726, NULL) 100 rows, 100 pages, 4 levels: (310801, NULL) (310867, NULL) (310945, NULL) (311011, NULL) (311077, NULL) 100 rows, 100 pages, 4 levels: (311140, NULL) (311209, NULL) (311281, NULL) (311344, NULL) (311416, NULL) 100 rows, 100 pages, 4 levels: (311479, NULL) (311542, NULL) (311614, NULL) (311683, NULL) (311755, NULL) 100 rows, 100 pages, 4 levels: (311821, NULL) (311890, NULL) (311956, NULL) (312034, NULL) (312100, NULL) 100 rows, 100 pages, 4 levels: (312172, NULL) (312232, NULL) (312301, NULL) (312370, NULL) (312439, NULL) 100 rows, 100 pages, 4 levels: (312508, NULL) (312571, NULL) (312637, NULL) (312700, NULL) (312760, NULL) 100 rows, 100 pages, 4 levels: (312835, NULL) (312904, NULL) (312970, NULL) (313030, NULL) (313102, NULL) 100 rows, 100 pages, 4 levels: (313174, NULL) (313240, NULL) (313300, NULL) (313366, NULL) (313429, NULL) 100 rows, 100 pages, 4 levels: (313498, NULL) (313573, NULL) (313639, NULL) (313699, NULL) (313768, NULL) 100 rows, 100 pages, 4 levels: (313828, NULL) (313891, NULL) (313957, NULL) (314023, NULL) (314086, NULL) 100 rows, 100 pages, 4 levels: (314149, NULL) (314212, NULL) (314275, NULL) (314338, NULL) (314401, NULL) 100 rows, 100 pages, 4 levels: (314464, NULL) (314530, NULL) (314590, NULL) (314656, NULL) (314719, NULL) 100 rows, 100 pages, 4 levels: (314788, NULL) (314854, NULL) (314920, NULL) (314983, NULL) (315046, NULL) 100 rows, 100 pages, 4 levels: (315109, NULL) (315178, NULL) (315238, NULL) (315304, NULL) (315370, NULL) 100 rows, 100 pages, 4 levels: (315433, NULL) (315496, NULL) (315565, NULL) (315631, NULL) (315697, NULL) 100 rows, 100 pages, 4 levels: (315766, NULL) (315826, NULL) (315889, NULL) (315952, NULL) (316024, NULL) 100 rows, 100 pages, 4 levels: (316087, NULL) (316156, NULL) (316222, NULL) (316288, NULL) (316357, NULL) 100 rows, 100 pages, 4 levels: (316432, NULL) (316498, NULL) (316564, NULL) (316636, NULL) (316705, NULL) 100 rows, 100 pages, 4 levels: (316768, NULL) (316831, NULL) (316891, NULL) (316951, NULL) (317011, NULL) 100 rows, 100 pages, 4 levels: (317080, NULL) (317143, NULL) (317218, NULL) (317287, NULL) (317356, NULL) 100 rows, 100 pages, 4 levels: (317422, NULL) (317497, NULL) (317563, NULL) (317632, NULL) (317701, NULL) 100 rows, 100 pages, 4 levels: (317764, NULL) (317824, NULL) (317887, NULL) (317953, NULL) (318019, NULL) 100 rows, 100 pages, 4 levels: (318088, NULL) (318166, NULL) (318235, NULL) (318304, NULL) (318370, NULL) 100 rows, 100 pages, 4 levels: (318442, NULL) (318511, NULL) (318574, NULL) (318640, NULL) (318703, NULL) 100 rows, 100 pages, 4 levels: (318772, NULL) (318838, NULL) (318898, NULL) (318970, NULL) (319036, NULL) 100 rows, 100 pages, 4 levels: (319099, NULL) (319162, NULL) (319225, NULL) (319294, NULL) (319360, NULL) 100 rows, 100 pages, 4 levels: (319423, NULL) (319492, NULL) (319555, NULL) (319621, NULL) (319687, NULL) 100 rows, 100 pages, 4 levels: (319753, NULL) (319828, NULL) (319900, NULL) (319963, NULL) (320035, NULL) 100 rows, 100 pages, 4 levels: (320104, NULL) (320164, NULL) (320233, NULL) (320299, NULL) (320365, NULL) 100 rows, 100 pages, 4 levels: (320428, NULL) (320500, NULL) (320569, NULL) (320629, NULL) (320698, NULL) 100 rows, 100 pages, 4 levels: (320764, NULL) (320833, NULL) (320893, NULL) (320959, NULL) (321019, NULL) 100 rows, 100 pages, 4 levels: (321085, NULL) (321151, NULL) (321214, NULL) (321277, NULL) (321352, NULL) 100 rows, 100 pages, 4 levels: (321421, NULL) (321493, NULL) (321562, NULL) (321631, NULL) (321691, NULL) 100 rows, 100 pages, 4 levels: (321757, NULL) (321823, NULL) (321886, NULL) (321949, NULL) (322009, NULL) 100 rows, 100 pages, 4 levels: (322081, NULL) (322159, NULL) (322225, NULL) (322294, NULL) (322363, NULL) 100 rows, 100 pages, 4 levels: (322429, NULL) (322498, NULL) (322564, NULL) (322642, NULL) (322711, NULL) 100 rows, 100 pages, 4 levels: (322783, NULL) (322846, NULL) (322915, NULL) (322978, NULL) (323041, NULL) 100 rows, 100 pages, 4 levels: (323104, NULL) (323164, NULL) (323230, NULL) (323305, NULL) (323368, NULL) 100 rows, 100 pages, 4 levels: (323434, NULL) (323506, NULL) (323569, NULL) (323632, NULL) (323707, NULL) 100 rows, 100 pages, 4 levels: (323776, NULL) (323851, NULL) (323917, NULL) (323986, NULL) (324052, NULL) 100 rows, 100 pages, 4 levels: (324115, NULL) (324184, NULL) (324256, NULL) (324316, NULL) (324379, NULL) 100 rows, 100 pages, 4 levels: (324442, NULL) (324502, NULL) (324568, NULL) (324631, NULL) (324703, NULL) 100 rows, 100 pages, 4 levels: (324769, NULL) (324838, NULL) (324904, NULL) (324973, NULL) (325033, NULL) 100 rows, 100 pages, 4 levels: (325105, NULL) (325174, NULL) (325234, NULL) (325297, NULL) (325363, NULL) 100 rows, 100 pages, 4 levels: (325438, NULL) (325504, NULL) (325570, NULL) (325630, NULL) (325699, NULL) 100 rows, 100 pages, 4 levels: (325771, NULL) (325834, NULL) (325900, NULL) (325966, NULL) (326032, NULL) 100 rows, 100 pages, 4 levels: (326101, NULL) (326170, NULL) (326233, NULL) (326296, NULL) (326359, NULL) 100 rows, 100 pages, 4 levels: (326434, NULL) (326497, NULL) (326563, NULL) (326632, NULL) (326701, NULL) 100 rows, 100 pages, 4 levels: (326773, NULL) (326836, NULL) (326905, NULL) (326965, NULL) (327025, NULL) 100 rows, 100 pages, 4 levels: (327097, NULL) (327169, NULL) (327232, NULL) (327301, NULL) (327364, NULL) 100 rows, 100 pages, 4 levels: (327430, NULL) (327496, NULL) (327559, NULL) (327622, NULL) (327682, NULL) 100 rows, 100 pages, 4 levels: (327742, NULL) (327811, NULL) (327871, NULL) (327934, NULL) (327997, NULL) 100 rows, 100 pages, 4 levels: (328072, NULL) (328138, NULL) (328222, NULL) (328291, NULL) (328363, NULL) 100 rows, 100 pages, 4 levels: (328432, NULL) (328501, NULL) (328573, NULL) (328648, NULL) (328717, NULL) 100 rows, 100 pages, 4 levels: (328783, NULL) (328849, NULL) (328915, NULL) (328978, NULL) (329044, NULL) 100 rows, 100 pages, 4 levels: (329119, NULL) (329185, NULL) (329248, NULL) (329317, NULL) (329383, NULL) 100 rows, 100 pages, 4 levels: (329455, NULL) (329518, NULL) (329590, NULL) (329662, NULL) (329722, NULL) 100 rows, 100 pages, 4 levels: (329782, NULL) (329854, NULL) (329917, NULL) (329983, NULL) (330049, NULL) 100 rows, 100 pages, 4 levels: (330118, NULL) (330187, NULL) (330253, NULL) (330322, NULL) (330382, NULL) 100 rows, 100 pages, 4 levels: (330454, NULL) (330520, NULL) (330595, NULL) (330673, NULL) (330739, NULL) 100 rows, 100 pages, 4 levels: (330808, NULL) (330874, NULL) (330940, NULL) (331003, NULL) (331072, NULL) 100 rows, 100 pages, 4 levels: (331132, NULL) (331204, NULL) (331276, NULL) (331342, NULL) (331405, NULL) 100 rows, 100 pages, 4 levels: (331465, NULL) (331540, NULL) (331615, NULL) (331684, NULL) (331753, NULL) 100 rows, 100 pages, 4 levels: (331816, NULL) (331891, NULL) (331960, NULL) (332026, NULL) (332086, NULL) 100 rows, 100 pages, 4 levels: (332152, NULL) (332215, NULL) (332284, NULL) (332350, NULL) (332419, NULL) 100 rows, 100 pages, 4 levels: (332491, NULL) (332557, NULL) (332623, NULL) (332686, NULL) (332752, NULL) 100 rows, 100 pages, 4 levels: (332818, NULL) (332884, NULL) (332944, NULL) (333013, NULL) (333073, NULL) 100 rows, 100 pages, 4 levels: (333148, NULL) (333214, NULL) (333274, NULL) (333340, NULL) (333403, NULL) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 5% (actual 6%) key = (16984, 5669) value = 5100 (actual 6998 - -1% error) 10% (actual 9%) key = (50416, 16813) value = 15100 (actual 16798 - -1% error) 10% (actual 9%) key = (83701, 27908) value = 25100 (actual 26598 - -1% error) 10% (actual 9%) key = (116986, 39003) value = 35100 (actual 36398 - -1% error) 10% (actual 9%) key = (150319, 50114) value = 45100 (actual 46198 - -1% error) 10% (actual 9%) key = (183700, 61241) value = 55100 (actual 55998 - 0% error) 10% (actual 9%) key = (217081, 72368) value = 65100 (actual 65798 - 0% error) 10% (actual 9%) key = (250486, 83503) value = 75100 (actual 75598 - 0% error) 10% (actual 9%) key = (283771, 94598) value = 85100 (actual 85398 - 0% error) 14% (actual 14%) DataSizeHistogram: 5% (actual 6%) key = (16648, 5557) value = 524891 (actual 723287 - -1% error) 10% (actual 9%) key = (50086, 16703) value = 1569936 (actual 1747238 - -1% error) 9% (actual 9%) key = (83356, 27793) value = 2610698 (actual 2767306 - -1% error) 10% (actual 9%) key = (116647, 38890) value = 3652143 (actual 3787394 - -1% error) 9% (actual 9%) key = (149656, 49893) value = 4685435 (actual 4800597 - -1% error) 10% (actual 9%) key = (183040, 61021) value = 5728420 (actual 5822785 - 0% error) 10% (actual 9%) key = (216727, 72250) value = 6776444 (actual 6848929 - 0% error) 9% (actual 9%) key = (250144, 83389) value = 7813547 (actual 7865227 - 0% error) 9% (actual 9%) key = (283444, 94489) value = 8853697 (actual 8884838 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1000 pages RowCountHistogram: 10% (actual 11%) key = (33379, 11134) value = 10000 (actual 11800 - -1% error) 10% (actual 9%) key = (66721, 22248) value = 20000 (actual 21600 - -1% error) 10% (actual 9%) key = (100015, 33346) value = 30000 (actual 31400 - -1% error) 10% (actual 9%) key = (133258, 44427) value = 40000 (actual 41200 - -1% error) 10% (actual 9%) key = (166621, 55548) value = 50000 (actual 51000 - -1% error) 10% (actual 9%) key = (200041, 66688) value = 60000 (actual 60800 - 0% error) 10% (actual 9%) key = (233449, 77824) value = 70000 (actual 70600 - 0% error) 10% (actual 9%) key = (266824, 88949) value = 80000 (actual 80400 - 0% error) 10% (actual 9%) key = (300073, 100032) value = 90000 (actual 90200 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 11%) key = (33187, NULL) value = 1041247 (actual 1229534 - -1% error) 10% (actual 9%) key = (66517, NULL) value = 2082456 (actual 2249844 - -1% error) 10% (actual 9%) key = (99709, NULL) value = 3123684 (actual 3270138 - -1% error) 10% (actual 9%) key = (132925, NULL) value = 4164886 (actual 4290603 - -1% error) 10% (actual 9%) key = (166246, NULL) value = 5206111 (actual 5311117 - -1% error) 10% (actual 9%) key = (199678, NULL) value = 6247321 (actual 6331068 - 0% error) 10% (actual 9%) key = (233290, NULL) value = 7288529 (actual 7350869 - 0% error) 10% (actual 9%) key = (266701, NULL) value = 8329759 (actual 8371441 - 0% error) 10% (actual 9%) key = (300052, NULL) value = 9371030 (actual 9392083 - 0% error) 9% (actual 9%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull >> DataShardVolatile::DistributedWriteLostPlanThenSplit [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> DataShardReadIterator::ShouldReadKeyPrefix2 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix3 >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexKey [GOOD] Test command err: Trying to start YDB, gRPC: 17208, MsgBus: 9716 2025-04-07T14:07:16.193655Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572587794988527:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:16.193725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00102b/r3tmp/tmp4NL9e0/pdisk_1.dat 2025-04-07T14:07:16.463539Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17208, node 1 2025-04-07T14:07:16.536271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:16.536411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:16.537882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:16.544710Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:16.544739Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:16.544745Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:16.544841Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9716 TClient is connected to server localhost:9716 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:17.012121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:17.032352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:07:17.141401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:07:17.267975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:07:17.333545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:07:18.810460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572596384924838:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:18.810563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:19.000381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:19.027221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:19.050514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:19.075428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:19.098881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:19.123541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:19.163532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572600679892630:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:19.163597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:19.163659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572600679892635:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:19.166643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:19.175243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572600679892637:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:20.078810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:20.104296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:20.168495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:07:21.193951Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572587794988527:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:21.194045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17138, MsgBus: 23036 2025-04-07T14:07:21.639990Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572606762737052:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:21.640068Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00102b/r3tmp/tmpRC2pYh/pdisk_1.dat 2025-04-07T14:07:21.719649Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17138, node 2 2025-04-07T14:07:21.766247Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:21.766328Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:21.767961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:21.779299Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:21.779321Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:21.779334Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:21.779467Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23036 TClient is connected to server localhost:23036 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:22.143623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:22.162784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:22.204848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:07:22.358254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-0 ... 94046644480 2025-04-07T14:07:37.900188Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:37.968550Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:38.048171Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490572682232746102:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:38.048236Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:38.048613Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490572682232746107:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:38.052433Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:38.060788Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490572682232746109:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:07:39.268216Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:39.298257Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490572665052874698:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:39.298352Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:39.340566Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:39.426810Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22334, MsgBus: 8625 2025-04-07T14:07:41.362439Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490572694959492217:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:41.362492Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00102b/r3tmp/tmpM57DAL/pdisk_1.dat 2025-04-07T14:07:41.513340Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:41.526889Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:41.526980Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:41.527761Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22334, node 5 2025-04-07T14:07:41.572799Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:41.572822Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:41.572835Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:41.572944Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8625 TClient is connected to server localhost:8625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:42.037568Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:42.054584Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:42.139703Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:07:42.314042Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:42.394398Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:07:44.861963Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490572707844395821:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:44.862085Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:44.938554Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:45.013258Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:45.053885Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:45.103619Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:45.181981Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:45.242559Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:45.285010Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490572712139363616:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:45.285106Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:45.285338Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490572712139363621:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:45.289344Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:45.302959Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490572712139363623:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:46.364853Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490572694959492217:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:46.410766Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:46.589332Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:46.627095Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:46.671106Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:07:46.717193Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T14:07:46.763086Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:43: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:56: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpJoin::RightTableKeyPredicate [GOOD] >> KqpJoin::RightTableIndexPredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] Test command err: 2025-04-07T14:07:49.120380Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T14:07:49.120559Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T14:07:49.126697Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T14:07:49.129718Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T14:07:49.138522Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-07T14:07:49.138640Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite |92.7%| [TA] $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TA] {RESULT} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin-ColumnStore >> DataShardReadIterator::ShouldReadRangeChunk1_100 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1 >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin-ColumnStore >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted [GOOD] >> DataShardReadIterator::NoErrorOnFinalACK >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] >> TExportToS3Tests::RebootDuringCompletion >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> TExportToS3Tests::CheckItemProgress >> OlapEstimationRowsCorrectness::TPCH10 [GOOD] >> OlapEstimationRowsCorrectness::TPCH11 >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites [GOOD] >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] Test command err: 2025-04-07T14:06:48.611472Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:48.661939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:48.661989Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:48.663718Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:48.664030Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:48.664251Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:48.673858Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:48.707230Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:48.707659Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:48.709063Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:48.709144Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:48.709196Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:48.709528Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:48.729794Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:48.729943Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:48.730037Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:48.730069Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:48.730107Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:48.730141Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:48.730305Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:48.730407Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:48.730690Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:48.730770Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:48.730828Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:48.730864Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:48.730908Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:48.730939Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:48.730966Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:48.730992Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:48.731029Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:48.767228Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:48.767298Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:48.767349Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:48.773237Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:48.773326Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:48.773415Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:48.773582Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:48.773653Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:48.773712Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:48.773754Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:48.773788Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:48.773823Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:48.773856Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:48.774116Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:48.774177Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:48.774212Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:48.774278Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:48.774331Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:48.774362Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:48.774403Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:48.774449Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:48.774479Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:48.796793Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:48.796876Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:48.796918Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:48.796966Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:48.797035Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:48.797653Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:48.797693Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:48.797726Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:48.797814Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:48.797835Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:48.797929Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:48.797967Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:48.797999Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:48.798029Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:48.804881Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:48.804952Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:48.805146Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:48.805186Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:48.805232Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:48.805276Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:48.805308Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:48.805347Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:48.805381Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:48.805440Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:48.805481Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:48.805516Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:48.805566Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:48.805706Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:48.805741Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:48.805781Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:48.805805Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:48.805825Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:48.805900Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:48.805927Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:48.805961Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:48.805993Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:48.806044Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:48.806076Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:48.806105Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:48.806167Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:48.806188Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:48.806231Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... ency: 1 ms 2025-04-07T14:07:54.227739Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.227909Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.227938Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.227978Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.228008Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.228211Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.228240Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.228281Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.228310Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.228505Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.228537Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.228579Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.228610Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.228835Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.228867Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.228912Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.228946Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.229107Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.229137Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.229176Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.229204Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.229367Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.229396Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.229433Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.229461Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.229658Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.229690Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.229730Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.229766Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.229915Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.229943Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.229985Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.230016Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.230171Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.230204Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.230249Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.230282Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.230456Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.230488Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.230527Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.230557Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.230741Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.230770Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.230811Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.230840Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.230975Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.231001Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.231039Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.231066Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.231230Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.231257Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.231294Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.231322Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.231576Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-07T14:07:54.231616Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.231650Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2025-04-07T14:07:54.231757Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-07T14:07:54.231784Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.231811Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-07T14:07:54.231899Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-07T14:07:54.231928Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.231957Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-07T14:07:54.232035Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-07T14:07:54.232061Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.232087Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-07T14:07:54.232170Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-07T14:07:54.232199Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.232223Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-07T14:07:54.232291Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-07T14:07:54.232318Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.232343Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-07T14:07:54.232422Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-07T14:07:54.232449Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.232475Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-07T14:07:54.232551Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-07T14:07:54.232582Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.232607Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-07T14:07:54.232704Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-07T14:07:54.232735Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.232764Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 29 28 31 31 31 28 22 31 28 24 29 28 25 21 20 29 27 31 31 31 29 25 29 22 28 22 25 23 20 25 - - actual 29 28 31 31 31 28 22 31 28 24 29 28 25 21 20 29 27 31 31 31 29 25 29 22 28 22 25 23 20 25 - - interm 5 2 5 4 - 2 3 2 5 - 6 - 5 6 6 5 4 4 3 - 6 4 - 6 - 6 - - - - - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] Test command err: 2025-04-07T14:06:45.683157Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:45.747436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:45.747491Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:45.749412Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:45.749718Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:45.749903Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:45.758514Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:45.793708Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:45.794207Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:45.795508Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:45.795586Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:45.795627Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:45.795937Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:45.816699Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:45.816846Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:45.816960Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:45.816998Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:45.817046Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:45.817077Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:45.817267Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:45.817304Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:45.817590Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:45.817647Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:45.817678Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:45.817711Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:45.817752Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:45.817776Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:45.817798Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:45.817819Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:45.817855Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:45.857001Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:45.857066Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:45.857121Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:45.859648Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:45.859717Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:45.859796Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:45.859980Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:45.860041Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:45.860093Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:45.860159Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:45.860193Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:45.860239Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:45.860283Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:45.860580Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:45.860632Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:45.860684Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:45.860727Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:45.860788Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:45.860824Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:45.860871Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:45.860902Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:45.860933Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:45.883467Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:45.883532Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:45.883574Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:45.883618Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:45.883687Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:45.884144Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:45.884185Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:45.884219Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:45.884308Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:45.884334Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:45.884421Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:45.884458Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:45.884491Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:45.884517Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:45.887103Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:45.887162Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:45.887309Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:45.887341Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:45.887381Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:45.887417Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:45.887447Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:45.887483Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:45.887515Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:45.887567Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:45.887617Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:45.887647Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:45.887672Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:45.887772Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:45.887802Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:45.887829Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:45.887845Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:45.887862Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:45.887899Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:45.887938Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:45.887964Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:45.887991Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:45.888043Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:45.888077Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:45.888098Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:45.888150Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:45.888167Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:45.888190Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... T14:07:54.418616Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.418775Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.418807Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:23] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.418849Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 23] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.418881Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.419068Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.419098Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.419139Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.419171Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.419365Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.419396Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.419436Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.419466Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.419683Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.419718Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.419758Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.419787Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.420014Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.420053Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.420104Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.420135Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.420326Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.420356Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.420391Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.420419Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.420550Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.420579Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.420619Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.420646Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.420833Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.420865Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.420913Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.420940Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.421082Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.421111Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.421150Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.421180Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.421361Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.421394Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.421435Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.421466Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.421615Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.421643Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.421679Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.421707Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.421876Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.421904Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.421941Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.421973Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.422117Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.422143Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.422177Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.422204Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.422386Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.422416Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.422454Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.422486Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.422611Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:07:54.422642Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-07T14:07:54.422679Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:07:54.422709Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:07:54.423059Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-07T14:07:54.423111Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.423152Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-07T14:07:54.423233Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-07T14:07:54.423265Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.423294Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-07T14:07:54.423383Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-07T14:07:54.423413Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.423441Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-07T14:07:54.423516Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-07T14:07:54.423545Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.423573Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-07T14:07:54.423656Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-07T14:07:54.423688Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.423718Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-07T14:07:54.423788Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-07T14:07:54.423815Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.423843Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-07T14:07:54.423946Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-07T14:07:54.423980Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:07:54.424009Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 26 30 26 29 30 30 25 30 28 26 30 30 19 18 30 30 22 16 0 - 11 - 16 28 - 0 - 16 - - - - actual 26 30 26 29 30 30 25 30 28 26 30 30 19 18 30 30 22 16 0 - 11 - 16 28 - 0 - 16 - - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - >> DataShardReadIterator::ShouldReadRangeChunk7 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix1 >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::CompletedExportEndTime >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::RebootDuringAbortion >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks [GOOD] >> DataShardReadIterator::ShouldRollbackLocksWhenWrite >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> DataShardReadIterator::ShouldReadKeyPrefix3 [GOOD] >> DataShardReadIterator::ShouldReadFromFollower >> TExportToS3Tests::DropCopiesBeforeTransferring1 >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::ExportStartTime >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 23769, MsgBus: 3240 2025-04-07T14:07:26.369889Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572629314276345:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:26.370679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001016/r3tmp/tmpjKgCcI/pdisk_1.dat 2025-04-07T14:07:26.700467Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23769, node 1 2025-04-07T14:07:26.778537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:26.778619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:26.779731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:26.797212Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:26.797236Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:26.797242Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:26.797368Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3240 TClient is connected to server localhost:3240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:27.254266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:27.273507Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:07:27.284842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:27.412329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:27.548227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:27.607708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:29.142435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572642199179949:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:29.142530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:29.357827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:29.393755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:29.420711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:29.447446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:29.515281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:29.541251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:29.582448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572642199180448:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:29.582552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:29.582816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572642199180454:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:29.585649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:29.594521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572642199180456:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:30.536851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:30.590173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:31.370723Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572629314276345:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:31.370792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11547, MsgBus: 2885 2025-04-07T14:07:32.087390Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572653915937048:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:32.087439Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001016/r3tmp/tmpJpOq7q/pdisk_1.dat 2025-04-07T14:07:32.179694Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:32.203057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:32.203126Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:32.204204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11547, node 2 2025-04-07T14:07:32.237086Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:32.237107Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:32.237115Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:32.237221Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2885 TClient is connected to server localhost:2885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:32.689824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:32.697535Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:32.701886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:07:32.749310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:07:32.898438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation typ ... e] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:47.385550Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:47.459620Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:47.504070Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:47.545785Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:47.585248Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:47.665213Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:47.754134Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490572717803224204:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:47.754230Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:47.754698Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490572717803224209:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:47.759018Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:47.770775Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490572717803224211:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:07:48.686169Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490572700623352886:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:48.686232Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:48.892192Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:48.968786Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63099, MsgBus: 15114 2025-04-07T14:07:51.029161Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490572733923612124:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:51.077247Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001016/r3tmp/tmpKaJu3o/pdisk_1.dat 2025-04-07T14:07:51.221245Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63099, node 5 2025-04-07T14:07:51.288506Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:51.288620Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:51.289914Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:51.313241Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:51.313269Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:51.313278Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:51.313402Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15114 TClient is connected to server localhost:15114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:52.002018Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:52.009547Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:07:52.018959Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:52.107370Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:52.311802Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:52.391254Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:54.926242Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490572751103482834:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:54.926378Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:54.969380Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:55.003304Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:55.032870Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:55.065411Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:55.138194Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:55.213020Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:55.317020Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490572755398450640:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:55.317156Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:55.321093Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490572755398450645:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:55.326314Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:55.348857Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490572755398450647:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:07:55.988519Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490572733923612124:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:55.988586Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:56.574715Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:56.671144Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpJoin::RightTableIndexPredicate [GOOD] >> KqpJoin::RightTableValuePredicate >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 >> TExportToS3Tests::ExportStartTime [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TExportToS3Tests::AuditCompletedExport >> DataShardReadIterator::ShouldReadRangeChunk1 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk2 >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TExportToS3Tests::CorruptedDyNumber >> DataShardReadIterator::NoErrorOnFinalACK [GOOD] >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportStartTime [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:07:55.653337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:07:55.653428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:55.653464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:07:55.653492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:07:55.653532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:07:55.653553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:07:55.653600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:55.653935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:55.727510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:07:55.727553Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:55.736149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:55.736264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:07:55.736404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:07:55.742767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:07:55.742945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:07:55.743566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.743731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:07:55.745470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.746640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:55.746689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.746781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:07:55.746826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:55.746881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:07:55.747020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.752245Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:07:55.878840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:07:55.879140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.879408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:07:55.879677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:07:55.879753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.882185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.882366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:07:55.882571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.882668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:07:55.882709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:07:55.882751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:07:55.884743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.884806Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:07:55.884845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:07:55.886098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.886130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.886170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.886260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.889771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:07:55.891625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:07:55.891813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:07:55.893011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.893167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:55.893223Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.893485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:07:55.893540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.893722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:55.893941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:07:55.896099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:55.896146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:55.896307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.896349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:07:55.896738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.896791Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:07:55.896906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:07:55.896948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.896995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:07:55.897050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.897089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:07:55.897120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:07:55.897187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:07:55.897225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:07:55.897259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:07:55.899196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:55.899320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:55.899367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:07:55.899409Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:07:55.899448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:55.899545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... :07:59.688614Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710757 2025-04-07T14:07:59.692646Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 2025-04-07T14:07:59.694714Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpCreateConsistentCopyTables CreateConsistentCopyTables { CopyTableDescriptions { SrcPath: "/MyRoot/Table" DstPath: "/MyRoot/export-102/0" OmitIndexes: true OmitFollowers: true IsBackup: true } } Internal: true } TxId: 281474976710758 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:07:59.694994Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /MyRoot/export-102/0, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.695406Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 3], parent name: export-102, child name: 0, child id: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-07T14:07:59.695477Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 0 2025-04-07T14:07:59.695518Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:07:59.695567Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T14:07:59.695640Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T14:07:59.695789Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710758:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:07:59.696286Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:07:59.696341Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:07:59.699456Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710758, response: Status: StatusAccepted TxId: 281474976710758 SchemeshardId: 72057594046678944 PathId: 4, at schemeshard: 72057594046678944 2025-04-07T14:07:59.699545Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710758, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, path: /MyRoot/export-102/0 2025-04-07T14:07:59.699696Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710758, status# StatusAccepted 2025-04-07T14:07:59.699742Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710758 SchemeshardId: 72057594046678944 PathId: 4 2025-04-07T14:07:59.699826Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:59.699874Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:07:59.700002Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T14:07:59.700083Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:59.700116Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:202:2205], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 3 2025-04-07T14:07:59.700151Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:202:2205], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 2025-04-07T14:07:59.700761Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.700835Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710758:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046678944 2025-04-07T14:07:59.701077Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710758:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-04-07T14:07:59.701608Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-07T14:07:59.701685Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-07T14:07:59.701712Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-04-07T14:07:59.701744Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-07T14:07:59.701774Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:07:59.706041Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-07T14:07:59.706149Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-07T14:07:59.706180Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-04-07T14:07:59.706212Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 1 2025-04-07T14:07:59.706244Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T14:07:59.706331Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-04-07T14:07:59.707500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-04-07T14:07:59.707543Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-04-07T14:07:59.707575Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-04-07T14:07:59.709102Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2025-04-07T14:07:59.709238Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72057594037968897 2025-04-07T14:07:59.709273Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:2, partId: 0 2025-04-07T14:07:59.709863Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-04-07T14:07:59.710135Z node 3 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-04-07T14:07:59.710337Z node 3 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2025-04-07T14:07:59.710517Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-07T14:07:59.710563Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:2, partId: 0 2025-04-07T14:07:59.710671Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-07T14:07:59.710717Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-04-07T14:07:59.710779Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-07T14:07:59.710883Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710758:0 2 -> 3 2025-04-07T14:07:59.711669Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-04-07T14:07:59.715047Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.715431Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.715494Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 281474976710758:0 ProgressState at tablet# 72057594046678944 2025-04-07T14:07:59.715548Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 281474976710758:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 281474976710758:0 seqNo# 2:2 at tablet# 72057594046678944 2025-04-07T14:07:59.721858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-04-07T14:07:59.722017Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2025-04-07T14:07:59.722101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409547 2025-04-07T14:07:59.722133Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:07:55.641819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:07:55.641913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:55.641953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:07:55.641983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:07:55.642027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:07:55.642074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:07:55.642132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:55.642517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:55.706966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:07:55.707018Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:55.716457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:55.716579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:07:55.716733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:07:55.722633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:07:55.722782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:07:55.728747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.729041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:07:55.734477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.741229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:55.741296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.741406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:07:55.741461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:55.741515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:07:55.741665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.757772Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:07:55.881970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:07:55.882172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.882347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:07:55.882556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:07:55.882624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.885840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.885958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:07:55.886104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.886190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:07:55.886222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:07:55.886267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:07:55.889348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.889394Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:07:55.889435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:07:55.891227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.891268Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.891302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.891370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.894551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:07:55.897257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:07:55.897429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:07:55.898648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.898748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:55.898788Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.899039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:07:55.899084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.899230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:55.899314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:07:55.901374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:55.901412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:55.901559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.901597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:07:55.901842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.901883Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:07:55.901968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:07:55.902008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.902045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:07:55.902104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.902139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:07:55.902170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:07:55.902225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:07:55.902258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:07:55.902284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:07:55.904029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:55.904134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:55.904170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:07:55.904206Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:07:55.904244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:55.904340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... lt, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 281474976710765:0, left await: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:00.008344Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 3 -> 128 2025-04-07T14:08:00.010342Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-04-07T14:08:00.010490Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-04-07T14:08:00.010539Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:00.010645Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710765 ready parts: 1/1 2025-04-07T14:08:00.010777Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976710765 MinStep: 5000010 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:00.012190Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710765:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710765 msg type: 269090816 2025-04-07T14:08:00.012318Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710765, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710765 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710765 at step: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 281474976710765 at step: 5000010 2025-04-07T14:08:00.012874Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:00.012985Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710765 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 12884904043 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:00.013045Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 HandleReply TEvOperationPlan, stepId: 5000010, at schemeshard: 72057594046678944 2025-04-07T14:08:00.013164Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 128 -> 129 2025-04-07T14:08:00.013299Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 REQUEST: PUT /Backup2/metadata.json HTTP/1.1 HEADERS: Host: localhost:25806 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 87A06269-A400-43D1-AB01-6888858267C3 amz-sdk-request: attempt=1 content-length: 73 content-md5: 5UnTthDw7DG9u0TfCJZu+w== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000010 2025-04-07T14:08:00.046445Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:00.046514Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710765, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-07T14:08:00.046784Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:00.046823Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:202:2205], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 7 2025-04-07T14:08:00.047202Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-04-07T14:08:00.047284Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:00.048317Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-04-07T14:08:00.048414Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-04-07T14:08:00.048444Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 2025-04-07T14:08:00.048478Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 REQUEST: PUT /Backup2/permissions.pb HTTP/1.1 HEADERS: Host: localhost:25806 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FE011358-05C0-4DC1-950D-DE5C5F23FEC9 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 2025-04-07T14:08:00.048514Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 S3_MOCK::HttpServeWrite: /Backup2/permissions.pb / / 43 2025-04-07T14:08:00.048722Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710765 2025-04-07T14:08:00.055852Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 REQUEST: PUT /Backup2/scheme.pb HTTP/1.1 HEADERS: Host: localhost:25806 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AFAB6709-C117-4A93-A12E-7B19471FDF17 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/scheme.pb / / 355 REQUEST: PUT /Backup2/data_00.csv HTTP/1.1 HEADERS: Host: localhost:25806 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C0421FD3-8572-45D8-80A4-6DEFF29383C4 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/data_00.csv / / 0 2025-04-07T14:08:00.075646Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 795 RawX2: 12884904625 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-07T14:08:00.075711Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710765, tablet: 72075186233409549, partId: 0 2025-04-07T14:08:00.075882Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944, message: Source { RawX1: 795 RawX2: 12884904625 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-07T14:08:00.076020Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 795 RawX2: 12884904625 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-07T14:08:00.076097Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710765:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:00.076141Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-04-07T14:08:00.076182Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710765:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-07T14:08:00.076227Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 129 -> 240 2025-04-07T14:08:00.076405Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710765:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:00.082609Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-04-07T14:08:00.082999Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-04-07T14:08:00.083053Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710765:0 ProgressState 2025-04-07T14:08:00.083178Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:0 progress is 1/1 2025-04-07T14:08:00.083210Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-04-07T14:08:00.083246Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: true 2025-04-07T14:08:00.083318Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:121:2147] message: TxId: 281474976710765 2025-04-07T14:08:00.083361Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-04-07T14:08:00.083396Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:0 2025-04-07T14:08:00.083431Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710765:0 2025-04-07T14:08:00.083546Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-07T14:08:00.093695Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2025-04-07T14:08:00.093789Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710765 2025-04-07T14:08:00.097352Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:08:00.097424Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:822:2757] TestWaitNotification: OK eventTxId 104 >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> TExportToS3Tests::AuditCompletedExport [GOOD] >> TExportToS3Tests::AuditCancelledExport >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows [GOOD] >> DataShardReadIteratorLatency::ReadSplitLatency >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix1 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+StreamLookupJoin-ColumnStore >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> TExportToS3Tests::AuditCancelledExport [GOOD] >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CorruptedDyNumber [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:07:59.028084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:07:59.028166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:59.028205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:07:59.028257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:07:59.028299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:07:59.028322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:07:59.028395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:59.028734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:59.092640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:07:59.092708Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:59.103540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:59.103664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:07:59.103865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:07:59.110045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:07:59.110224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:07:59.110784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:59.110938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:07:59.112431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:59.113310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:59.113377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:59.113470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:07:59.113506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:59.113543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:07:59.113663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.119062Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:07:59.276024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:07:59.276312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.276540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:07:59.276851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:07:59.276933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.279063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:59.279194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:07:59.279361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.279448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:07:59.279482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:07:59.279514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:07:59.281364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.281419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:07:59.281454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:07:59.283053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.283105Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.283140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:59.283227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:07:59.286888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:07:59.288842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:07:59.289043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:07:59.290170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:59.290302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:59.290345Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:59.290615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:07:59.290669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:59.290857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:59.290949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:07:59.293540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:59.293592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:59.293788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:59.293832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:07:59.294092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.294143Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:07:59.294269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:07:59.294317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:59.294382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:07:59.294440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:59.294475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:07:59.294507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:07:59.294574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:07:59.294611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:07:59.294640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:07:59.296510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:59.296639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:59.296730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:07:59.296779Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:07:59.296820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:59.296933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... abletId# 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: PREPARED TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 PrepareArriveTime: 101500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 253 } } 2025-04-07T14:08:02.123524Z node 3 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-07T14:08:02.123621Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409547, shardIdx: 72057594046678944:2, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.123670Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-04-07T14:08:02.125537Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.125678Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.125729Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:02.125797Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-04-07T14:08:02.125914Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:02.127358Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-04-07T14:08:02.127456Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-04-07T14:08:02.127938Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:02.128053Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 12884904043 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:02.128117Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-04-07T14:08:02.128221Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-04-07T14:08:02.128351Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:20867 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5BF553BB-E175-487C-B945-D981AEE06E75 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-04-07T14:08:02.645448Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:02.645492Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T14:08:02.645691Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:02.645726Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:202:2205], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2025-04-07T14:08:02.646212Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.646261Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:02.647392Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-07T14:08:02.647468Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-07T14:08:02.647494Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-04-07T14:08:02.647522Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-07T14:08:02.647551Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T14:08:02.647617Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true 2025-04-07T14:08:02.647832Z node 3 :DATASHARD_BACKUP ERROR: [Export] [scanner] Error read data from table: Invalid DyNumber binary representation FAKE_COORDINATOR: Erasing txId 281474976710759 2025-04-07T14:08:02.669573Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 2025-04-07T14:08:02.681908Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 440 RawX2: 12884904299 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid DyNumber binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-07T14:08:02.681963Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-04-07T14:08:02.682093Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 440 RawX2: 12884904299 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid DyNumber binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-07T14:08:02.682205Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 440 RawX2: 12884904299 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid DyNumber binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-07T14:08:02.682275Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:02.682312Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.682349Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:08:02.682391Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-07T14:08:02.682535Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:02.687267Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.687560Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.687602Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-04-07T14:08:02.687730Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-07T14:08:02.687760Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-07T14:08:02.687798Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-04-07T14:08:02.687876Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:122:2148] message: TxId: 281474976710759 2025-04-07T14:08:02.687912Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-07T14:08:02.687946Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-07T14:08:02.687971Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-04-07T14:08:02.688064Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:08:02.689809Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-07T14:08:02.689867Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-04-07T14:08:02.690023Z node 3 :EXPORT NOTICE: TExport::TTxProgress: issues during backing up, cancelling, info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Transferring WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/MyRoot/Table' SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2] State: Done SubState: Proposed WaitTxId: 0 Issue: 'shard: 72057594046678944:2, error: Invalid DyNumber binary representation' } 2025-04-07T14:08:02.691941Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:08:02.691991Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:466:2430] TestWaitNotification: OK eventTxId 102 >> DataShardReadIterator::ShouldRollbackLocksWhenWrite [GOOD] >> DataShardReadIterator::TryWriteManyRows+Commit >> TExportToS3Tests::UidAsIdempotencyKey >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-04-07T14:06:10.764256Z :WriteRAW INFO: Random seed for debugging is 1744034770764221 2025-04-07T14:06:11.022995Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572307530742144:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:11.023095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:06:11.068493Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572305515747012:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:11.224411Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:06:11.242075Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e03/r3tmp/tmp4WMXcg/pdisk_1.dat 2025-04-07T14:06:11.303599Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:06:11.474276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:11.474474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:11.476412Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:11.483573Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:06:11.484379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8074, node 1 2025-04-07T14:06:11.521250Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:06:11.521437Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:06:11.531336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:11.531404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:11.541756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:11.612943Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001e03/r3tmp/yandexPIOBMm.tmp 2025-04-07T14:06:11.612971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001e03/r3tmp/yandexPIOBMm.tmp 2025-04-07T14:06:11.613123Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001e03/r3tmp/yandexPIOBMm.tmp 2025-04-07T14:06:11.613247Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:06:11.784907Z INFO: TTestServer started on Port 21418 GrpcPort 8074 TClient is connected to server localhost:21418 PQClient connected to localhost:8074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:12.051296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T14:06:12.160943Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T14:06:14.058269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572318400649067:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.058266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572320415645008:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.058280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572320415645034:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.058382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.058571Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572318400649045:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.058648Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.061514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:06:14.063812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572320415645070:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.063871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.076564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572320415645037:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:06:14.076625Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572318400649078:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:06:14.420878Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490572318400649121:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:06:14.426877Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTc0OTRlMy01OGQzNDNlZS0xZWQ0MThmNC1hYjdlN2Q2Nw==, ActorId: [2:7490572318400649039:2280], ActorState: ExecuteState, TraceId: 01jr895f187v2svhmv6dqybnag, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:06:14.426903Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572320415645143:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:06:14.428473Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDQzY2ZhN2QtMmI4NjBjMjAtMmUzOGMyYjMtNWIyZjc4ZTA=, ActorId: [1:7490572320415645005:2303], ActorState: ExecuteState, TraceId: 01jr895f0wf43wqm9ze6n3fd7s, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:06:14.428943Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:06:14.429173Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:06:14.464968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:14.618551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:14.738581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:8074", true, true, 1000); 2025-04-07T14:06:15.114257Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr895frfc83r4j3a73qknq2n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWMxN2JjNjEtYTgzYmEwMGMtNWRiNTM2MDgtY2MyMjU4MDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490572324710612892:3035] 2025-04-07T14:06:16.023101Z node 1 :METADATA_PROVIDER ERROR: fline= ... opic: "test-topic" message_group_id: "src" from ipv6:[::1]:53410 2025-04-07T14:08:01.185309Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:53410 proto=v1 topic=test-topic durationSec=0 2025-04-07T14:08:01.185319Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T14:08:01.187747Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-07T14:08:01.187903Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-07T14:08:01.187917Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T14:08:01.187930Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-07T14:08:01.187952Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490572777202162876:2475] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-07T14:08:01.191876Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490572777202162876:2475] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-07T14:08:01.429971Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490572777202162876:2475] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-07T14:08:01.431059Z node 16 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7490572777202162918:2475] connected; active server actors: 1 2025-04-07T14:08:01.431557Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490572777202162876:2475] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-07T14:08:01.431588Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490572777202162876:2475] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-07T14:08:01.432953Z node 16 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7490572777202162918:2475] disconnected; active server actors: 1 2025-04-07T14:08:01.432992Z node 16 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7490572777202162918:2475] disconnected no session 2025-04-07T14:08:01.630976Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490572777202162876:2475] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-07T14:08:01.631027Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490572777202162876:2475] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-07T14:08:01.631050Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490572777202162876:2475] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-07T14:08:01.631094Z node 15 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T14:08:01.632757Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 15, Generation: 1 2025-04-07T14:08:01.632802Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:08:01.632837Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [15:7490572777202162947:2475], now have 1 active actors on pipe 2025-04-07T14:08:01.632868Z node 15 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T14:08:01.632900Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T14:08:01.633073Z node 15 :PERSQUEUE INFO: new Cookie src|1904430f-5282072a-164ad043-bb74c351_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-07T14:08:01.633182Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T14:08:01.633236Z node 15 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:08:01.635643Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744034881635 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:08:01.635786Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|1904430f-5282072a-164ad043-bb74c351_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-07T14:08:01.636977Z :INFO: [] MessageGroupId [src] SessionId [src|1904430f-5282072a-164ad043-bb74c351_0] Write session: close. Timeout = 0 ms 2025-04-07T14:08:01.638186Z :INFO: [] MessageGroupId [src] SessionId [src|1904430f-5282072a-164ad043-bb74c351_0] Write session will now close 2025-04-07T14:08:01.638251Z :DEBUG: [] MessageGroupId [src] SessionId [src|1904430f-5282072a-164ad043-bb74c351_0] Write session: aborting 2025-04-07T14:08:01.633372Z node 15 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T14:08:01.633399Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T14:08:01.633727Z node 15 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:08:01.634680Z node 15 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|1904430f-5282072a-164ad043-bb74c351_0 2025-04-07T14:08:01.643815Z :INFO: [] MessageGroupId [src] SessionId [src|1904430f-5282072a-164ad043-bb74c351_0] Write session: gracefully shut down, all writes complete 2025-04-07T14:08:01.643893Z :DEBUG: [] MessageGroupId [src] SessionId [src|1904430f-5282072a-164ad043-bb74c351_0] Write session: destroy 2025-04-07T14:08:01.666492Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|1904430f-5282072a-164ad043-bb74c351_0 grpc read done: success: 0 data: 2025-04-07T14:08:01.666526Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|1904430f-5282072a-164ad043-bb74c351_0 grpc read failed 2025-04-07T14:08:01.666555Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|1904430f-5282072a-164ad043-bb74c351_0 grpc closed 2025-04-07T14:08:01.666574Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|1904430f-5282072a-164ad043-bb74c351_0 is DEAD 2025-04-07T14:08:01.667155Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:08:01.667524Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:08:01.667569Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [15:7490572777202162947:2475] destroyed 2025-04-07T14:08:01.667625Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T14:08:01.692022Z :INFO: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Starting read session 2025-04-07T14:08:01.692080Z :DEBUG: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Starting cluster discovery 2025-04-07T14:08:01.692330Z :INFO: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30167: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:30167
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:30167. " 2025-04-07T14:08:01.692381Z :DEBUG: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Restart cluster discovery in 0.006592s 2025-04-07T14:08:01.700860Z :DEBUG: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Starting cluster discovery 2025-04-07T14:08:01.701371Z :INFO: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30167: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:30167
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:30167. " 2025-04-07T14:08:01.701429Z :DEBUG: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Restart cluster discovery in 0.011120s 2025-04-07T14:08:01.716851Z :DEBUG: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Starting cluster discovery 2025-04-07T14:08:01.717051Z :INFO: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30167: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:30167
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:30167. " 2025-04-07T14:08:01.717092Z :DEBUG: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Restart cluster discovery in 0.038836s 2025-04-07T14:08:01.761817Z :DEBUG: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Starting cluster discovery 2025-04-07T14:08:01.762237Z :NOTICE: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30167: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:30167
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:30167. " } 2025-04-07T14:08:01.762468Z :NOTICE: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30167: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:30167
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:30167. " } 2025-04-07T14:08:01.762625Z :INFO: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Closing read session. Close timeout: 0.000000s 2025-04-07T14:08:01.762752Z :NOTICE: [/Root] [/Root] [599cc368-c30a2cb6-bdc8ee36-f238f0d3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AuditCancelledExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:107:2139] 2025-04-07T14:07:55.634613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:07:55.634715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:55.634749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:07:55.634785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:07:55.636012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:07:55.636057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:07:55.636148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:55.639714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:55.705523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:07:55.705573Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:55.726211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:55.726324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:07:55.726486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:07:55.734208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:07:55.734576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:07:55.735251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.735444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:07:55.737959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.741211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:55.741286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.741489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:07:55.741541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:55.741589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:07:55.741756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.750127Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-07T14:07:55.882653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:07:55.882820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.882992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:07:55.883204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:07:55.883268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.885174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.885370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:07:55.885485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.885531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:07:55.885554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:07:55.885575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:07:55.886985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.887030Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:07:55.887058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:07:55.888100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.888135Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.888187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.888250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.890998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:07:55.892365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:07:55.892519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:07:55.893377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.893501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:55.893539Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.893755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:07:55.893800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.893949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:55.894028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:07:55.895606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:55.895643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:55.895780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.895820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:07:55.896161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.896220Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:07:55.896305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:07:55.896336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.896371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:07:55.896405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.896440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:07:55.896469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:07:55.896525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:07:55.896556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:07:55.896582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:07:55.898166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:55.898263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:55.898308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:07:55.898340Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:07:55.898375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:55.898460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... se Execute, message: Transaction { WorkingDir: "/MyRoot/export-102" OperationType: ESchemeOpBackup Backup { TableName: "0" NumberOfRetries: 0 S3Settings { Endpoint: "localhost:62871" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:03.602662Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:03.602812Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T14:08:03.603195Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:03.603255Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:03.604929Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:08:03.604990Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:08:03.606203Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:03.606552Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2025-04-07T14:08:03.606892Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2025-04-07T14:08:03.606978Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2025-04-07T14:08:03.607388Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:03.607457Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet# 72057594046678944 2025-04-07T14:08:03.607520Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2025-04-07T14:08:03.607559Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 2 -> 3 2025-04-07T14:08:03.612177Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2025-04-07T14:08:03.612252Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2025-04-07T14:08:03.612878Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:03.612934Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:03.613122Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-04-07T14:08:03.618580Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-04-07T14:08:03.618919Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:03.618962Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:03.619104Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-04-07T14:08:03.619683Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2025-04-07T14:08:03.619796Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2025-04-07T14:08:03.620574Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-04-07T14:08:03.620770Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2025-04-07T14:08:03.623256Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-04-07T14:08:03.624044Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:08:03.624101Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:550:2512] TestWaitNotification: OK eventTxId 102 AUDIT LOG buffer(7): 2025-04-07T14:08:02.760157Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-07T14:08:02.803971Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE TABLE, paths=[/MyRoot/Table], status=SUCCESS, detailed_status=StatusAccepted 2025-04-07T14:08:03.232048Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT START, status=SUCCESS, detailed_status=SUCCESS, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none} 2025-04-07T14:08:03.240329Z: component=schemeshard, tx_id=281474976710757, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE DIRECTORY, paths=[/MyRoot/export-102], status=SUCCESS, detailed_status=StatusAccepted 2025-04-07T14:08:03.268229Z: component=schemeshard, tx_id=281474976710758, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=CREATE TABLE COPY FROM, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2025-04-07T14:08:03.606419Z: component=schemeshard, tx_id=281474976710759, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=BACKUP TABLE, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2025-04-07T14:08:03.620078Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2025-04-07T14:08:02.800518Z, end_time=2025-04-07T14:08:32.848518Z AUDIT LOG checked line: 2025-04-07T14:08:03.620078Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2025-04-07T14:08:02.800518Z, end_time=2025-04-07T14:08:32.848518Z >> TExportToS3Tests::DropSourceTableBeforeTransferring >> DataShardReadIterator::ShouldReadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadHeadFromFollower >> KqpJoinOrder::TPCH21+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH21-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees+StreamLookupJoin-ColumnStore >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+StreamLookupJoin-ColumnStore >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TExportToS3Tests::UserSID >> TExportToS3Tests::UserSID [GOOD] >> TExportToS3Tests::TablePermissions >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::ExportPartitioningSettings >> TExportToS3Tests::TablePermissions [GOOD] >> KqpJoin::RightTableValuePredicate [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk2 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk3 >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:08:02.721661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:08:02.721768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:02.721800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:08:02.721878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:08:02.721926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:08:02.721963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:08:02.722069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:02.722417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:08:02.794857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:08:02.794916Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:02.805867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:08:02.806015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:08:02.806183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:08:02.811109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:08:02.811273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:02.811861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:02.812043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:08:02.813684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:02.814809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:02.814858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:02.814970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:02.815040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:02.815079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:02.815218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.821929Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:08:02.945482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:02.945748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.946062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:08:02.946309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:02.946382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.949729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:02.949879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:08:02.950075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.950145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:08:02.950178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:08:02.950209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:08:02.953723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.953786Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:02.953821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:08:02.955679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.955728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.955774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:02.955865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:08:02.959308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:02.961209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:08:02.961377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:08:02.962363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:02.962512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:02.962553Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:02.962806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:08:02.962853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:02.963013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:02.963086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:08:02.965201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:02.965258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:02.965427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:02.965463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:08:02.965723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:02.965766Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:08:02.965867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:08:02.965911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:02.965961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:08:02.965998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:02.966027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:08:02.966056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:08:02.966119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:02.966152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:08:02.966180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:08:02.968045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:02.968155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:02.968194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:08:02.968227Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:08:02.968279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:02.968373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 3 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710763 2025-04-07T14:08:06.701086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-07T14:08:06.701128Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-07T14:08:06.701248Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-04-07T14:08:06.701376Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:06.701894Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:06.701965Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:06.701990Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-07T14:08:06.702015Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-07T14:08:06.702043Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-07T14:08:06.702862Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:06.702940Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:06.702965Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-07T14:08:06.702990Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-04-07T14:08:06.703014Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T14:08:06.703071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-07T14:08:06.704983Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-07T14:08:06.705526Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-07T14:08:06.705587Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-07T14:08:06.705625Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-07T14:08:06.706327Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-04-07T14:08:06.706450Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:08:06.706745Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000010 2025-04-07T14:08:06.707290Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:06.707388Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 12884904043 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:06.707428Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000010, at schemeshard: 72057594046678944 2025-04-07T14:08:06.707544Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-07T14:08:06.707599Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-04-07T14:08:06.707648Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-07T14:08:06.707722Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:08:06.707781Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:08:06.707830Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-04-07T14:08:06.707903Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-07T14:08:06.707936Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-04-07T14:08:06.707966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-04-07T14:08:06.708036Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T14:08:06.708079Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-04-07T14:08:06.708110Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-07T14:08:06.708136Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-07T14:08:06.708777Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:06.710150Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:06.710184Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:06.710321Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T14:08:06.710417Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:06.710460Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:202:2205], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-04-07T14:08:06.710505Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:202:2205], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-04-07T14:08:06.711148Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:06.711226Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:06.711262Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-07T14:08:06.711301Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-07T14:08:06.711342Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-07T14:08:06.711712Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:06.711838Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:06.711867Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-07T14:08:06.711895Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-07T14:08:06.711922Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:08:06.712008Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-04-07T14:08:06.712050Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:122:2148] 2025-04-07T14:08:06.714555Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:06.714871Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:06.714942Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-04-07T14:08:06.714987Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-04-07T14:08:06.715021Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-07T14:08:06.715049Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-07T14:08:06.715076Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-04-07T14:08:06.716362Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-07T14:08:06.716429Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:08:06.716469Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:827:2764] TestWaitNotification: OK eventTxId 103 |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:08:04.673170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:08:04.673260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:04.673321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:08:04.673358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:08:04.673409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:08:04.673437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:08:04.673489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:04.673769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:08:04.737009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:08:04.737080Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:04.747970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:08:04.748061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:08:04.748244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:08:04.753883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:08:04.754071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:04.754750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:04.754946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:08:04.757319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:04.758565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:04.758621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:04.758747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:04.758800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:04.758857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:04.759026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:08:04.766932Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:08:04.890515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:04.890766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:04.890983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:08:04.891258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:04.891345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:04.893445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:04.893575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:08:04.893768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:04.893837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:08:04.893881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:08:04.893907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:08:04.896040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:04.896123Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:04.896183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:08:04.898125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:04.898178Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:04.898211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:04.898275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:08:04.901721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:04.903588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:08:04.903761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:08:04.904849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:04.904991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:04.905042Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:04.905285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:08:04.905337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:04.905490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:04.905594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:08:04.908187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:04.908228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:04.908383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:04.908422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:08:04.908701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:04.908745Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:08:04.908842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:08:04.908877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:04.908927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:08:04.908977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:04.909018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:08:04.909044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:08:04.909110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:04.909154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:08:04.909180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:08:04.910885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:04.910977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:04.911014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:08:04.911046Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:08:04.911076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:04.911179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... lectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409547, shardIdx: 72057594046678944:2, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:07.299074Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-04-07T14:08:07.300609Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:07.300783Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:07.300828Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:07.300917Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-04-07T14:08:07.301034Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:07.302415Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-04-07T14:08:07.302508Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-04-07T14:08:07.302758Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:07.302839Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 12884904043 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:07.302886Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-04-07T14:08:07.302993Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-04-07T14:08:07.303132Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:28749 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4595D5FB-7CFA-4D2B-AFC2-D3817527B96E amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:28749 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6F102C39-EC66-488A-B61B-7054962F8A60 amz-sdk-request: attempt=1 content-length: 137 content-md5: WeIr3D5bqIjvqMGEjx2JrA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 137 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-04-07T14:08:07.361751Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:07.361799Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T14:08:07.362037Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:07.362068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:202:2205], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2025-04-07T14:08:07.362502Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:07.362548Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:07.363157Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-07T14:08:07.363243Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-07T14:08:07.363267Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-04-07T14:08:07.363290Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-07T14:08:07.363316Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T14:08:07.363375Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710759 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:28749 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 223E886C-510E-4CD9-A142-6A0215844563 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 2025-04-07T14:08:07.366617Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:28749 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 778ADDD8-CDD4-4FA3-9506-B0A9AD517EDD amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-04-07T14:08:07.388222Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 449 RawX2: 12884904308 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-07T14:08:07.388277Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-04-07T14:08:07.388384Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 449 RawX2: 12884904308 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-07T14:08:07.388485Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 449 RawX2: 12884904308 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-07T14:08:07.388539Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:07.388574Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:07.388609Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:08:07.388645Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-07T14:08:07.388823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:07.391251Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:07.391516Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:07.391551Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-04-07T14:08:07.391648Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-07T14:08:07.391671Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-07T14:08:07.391702Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-04-07T14:08:07.391757Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:122:2148] message: TxId: 281474976710759 2025-04-07T14:08:07.391822Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-07T14:08:07.391851Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-07T14:08:07.391877Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-04-07T14:08:07.391961Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:08:07.394044Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-07T14:08:07.394104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-04-07T14:08:07.396080Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:08:07.396125Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:475:2439] TestWaitNotification: OK eventTxId 103 |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableValuePredicate [GOOD] Test command err: Trying to start YDB, gRPC: 5994, MsgBus: 24848 2025-04-07T14:07:34.056311Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572664486969906:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:34.056348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00100a/r3tmp/tmpKh3hsG/pdisk_1.dat 2025-04-07T14:07:34.353353Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:34.357998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:34.358088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:34.361221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5994, node 1 2025-04-07T14:07:34.432864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:34.432899Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:34.432912Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:34.433019Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24848 TClient is connected to server localhost:24848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:34.871342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:34.894217Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:07:34.905520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:35.037399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:35.188033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:35.264255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:36.594591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572673076906224:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:36.594704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:36.792021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.857062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.921022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.961001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:37.033837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:37.114461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:37.195010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572677371874031:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:37.195080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:37.195222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572677371874036:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:37.199560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:37.214422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572677371874038:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:07:38.210177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:38.234735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:38.259933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:07:38.290401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:07:38.321115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:07:39.056688Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572664486969906:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:39.062675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 12502, MsgBus: 18043 2025-04-07T14:07:41.021241Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572695255061998:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:41.021286Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00100a/r3tmp/tmp7Vnii9/pdisk_1.dat 2025-04-07T14:07:41.127415Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12502, node 2 2025-04-07T14:07:41.168108Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:41.168199Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:41.169727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:41.182524Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:41.182547Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:41.182554Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:41.182659Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18043 TClient is connected to server localhost:18043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:41.632081 ... 5661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:56.024232Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490572758656435901:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:56.024324Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:56.077398Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:56.128703Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:56.172435Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:56.222170Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:56.272420Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:56.318952Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:56.379217Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490572758656436397:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:56.379323Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:56.379566Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490572758656436402:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:56.383071Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:56.395759Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490572758656436404:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:07:57.428751Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490572741476565027:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:57.428819Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:57.597880Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24306, MsgBus: 11824 2025-04-07T14:07:59.906857Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490572771103778666:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:59.906934Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00100a/r3tmp/tmpubWaLP/pdisk_1.dat 2025-04-07T14:08:00.029653Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:00.041762Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:00.041865Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:00.044191Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24306, node 5 2025-04-07T14:08:00.121289Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:00.121316Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:00.121327Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:00.121438Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11824 TClient is connected to server localhost:11824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:00.746026Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:00.756806Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:08:00.783224Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:00.894985Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:01.111012Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:01.237290Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:03.801242Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490572788283649539:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:03.801339Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:03.856424Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:08:03.903040Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:08:03.956239Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:08:03.994102Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:08:04.074601Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:08:04.123691Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:08:04.232432Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490572792578617342:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:04.232558Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:04.232769Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490572792578617348:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:04.238084Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:08:04.249633Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490572792578617350:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:08:04.907456Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490572771103778666:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:04.907531Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:08:05.580082Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> DataShardReadIteratorLatency::ReadSplitLatency [GOOD] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction >> DataShardReadIterator::ShouldReadRangePrefix2 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix3 |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] >> TBSV::ShardsNotLeftInShardsToDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:08:05.858679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:08:05.858754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:05.858808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:08:05.858832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:08:05.858865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:08:05.858896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:08:05.858942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:05.859223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:08:05.914313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:08:05.914358Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:05.926434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:08:05.926523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:08:05.926651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:08:05.934491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:08:05.934677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:05.935235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:05.935421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:08:05.937190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:05.938411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:05.938464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:05.938594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:05.938645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:05.938685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:05.938813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:08:05.944036Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:08:06.055355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:06.055610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:06.055844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:08:06.057152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:06.057224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:06.060414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:06.060552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:08:06.060759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:06.060828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:08:06.060862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:08:06.060890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:08:06.063856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:06.063913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:06.063950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:08:06.065850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:06.065893Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:06.065928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:06.065994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:08:06.070249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:06.072346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:08:06.072521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:08:06.073532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:06.073657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:06.073699Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:06.073949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:08:06.073998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:06.074145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:06.074218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:08:06.077276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:06.077324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:06.077496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:06.077534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:08:06.077762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:06.077804Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:08:06.077890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:08:06.077923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:06.077958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:08:06.078006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:06.078033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:08:06.078076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:08:06.078139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:06.078184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:08:06.078213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:08:06.079922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:06.080039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:06.080077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:08:06.080110Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:08:06.080171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:06.080275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:09.125393Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-04-07T14:08:09.127104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:09.127232Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:09.127277Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:09.127347Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-04-07T14:08:09.127482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:09.130312Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-04-07T14:08:09.130441Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 281474976710759 at step: 5000005 2025-04-07T14:08:09.130748Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:09.130858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 12884904043 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:09.130915Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-04-07T14:08:09.131079Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-04-07T14:08:09.131206Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:22143 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 674132A8-B80A-4205-A3D2-29B1386B2047 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-04-07T14:08:09.204289Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:09.204338Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-04-07T14:08:09.204568Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:09.204604Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:202:2205], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 6 2025-04-07T14:08:09.205276Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:09.205337Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:09.205841Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-07T14:08:09.205912Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-07T14:08:09.205938Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-04-07T14:08:09.205967Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-04-07T14:08:09.205996Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-04-07T14:08:09.206059Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710759 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:22143 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B85A8AA4-E170-4A2C-8FDD-8AB1541102D4 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2025-04-07T14:08:09.212249Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:22143 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C7169992-CD29-41F2-9266-E022B40694EE amz-sdk-request: attempt=1 content-length: 602 content-md5: GgrERoUcI3sF1n0Je2MTCQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 602 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:22143 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7663B1EB-F214-483A-91C6-DC73726D5202 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-04-07T14:08:09.231463Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 494 RawX2: 12884904343 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-07T14:08:09.231512Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409548, partId: 0 2025-04-07T14:08:09.231645Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 494 RawX2: 12884904343 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-07T14:08:09.231765Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 494 RawX2: 12884904343 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-07T14:08:09.231847Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:09.231887Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:09.231923Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-07T14:08:09.231965Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-07T14:08:09.232100Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:09.237839Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:09.238103Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:09.238142Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-04-07T14:08:09.238253Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-07T14:08:09.238281Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-07T14:08:09.238337Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-04-07T14:08:09.238399Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:122:2148] message: TxId: 281474976710759 2025-04-07T14:08:09.238437Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-07T14:08:09.238469Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-07T14:08:09.238496Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-04-07T14:08:09.238595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-07T14:08:09.240446Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-07T14:08:09.240509Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-04-07T14:08:09.242017Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:08:09.242068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:522:2476] TestWaitNotification: OK eventTxId 102 >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:08:09.831585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:08:09.831728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:09.831782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:08:09.831839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:08:09.833371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:08:09.833453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:08:09.833564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:09.837135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:08:09.918706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:08:09.918785Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:09.934182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:08:09.934357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:08:09.934538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:08:09.942633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:08:09.942960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:09.944907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:09.945260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:08:09.951534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:09.961101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:09.961195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:09.961368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:09.961441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:09.961579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:09.961771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:08:09.972156Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:08:10.102100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:10.103337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.104345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:08:10.108697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:10.108824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.114879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:10.117996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:08:10.118379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.118445Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:08:10.118548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:08:10.118584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:08:10.121644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.121729Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:10.121767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:08:10.123869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.123922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.123963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:10.124028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:08:10.128849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:10.132404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:08:10.133737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:08:10.135998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:10.136174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:10.136228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:10.136571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:08:10.136627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:10.136854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:10.136964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:08:10.141908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:10.142010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:10.142202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:10.142246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:08:10.142490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.142543Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:08:10.142649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:08:10.142685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:10.142741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:08:10.142811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:10.142846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:08:10.142876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:08:10.142953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:10.143021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:08:10.143055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:08:10.145192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:10.145314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:10.145352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:08:10.145397Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:08:10.145467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:10.145576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... TICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.419428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.419535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.419705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.419807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.419851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.419902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.427146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:08:10.429032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:10.430099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:10.430183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:10.430698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:08:10.430767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:08:10.430866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:10.432417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.433768Z node 1 :FLAT_TX_SCHEMESHARD WARN: TTxCleanBlockStoreVolumes Complete, done PersistRemoveBlockStoreVolume for 1 volumes, left 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.433819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:10.433863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:10.435196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:398:2375] sender: [1:463:2058] recipient: [1:15:2062] 2025-04-07T14:08:10.481904Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:08:10.482132Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 239us result status StatusPathDoesNotExist 2025-04-07T14:08:10.482302Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:08:10.483309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:398:2375] sender: [1:464:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:398:2375] sender: [1:467:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:398:2375] sender: [1:468:2058] recipient: [1:466:2428] Leader for TabletID 72057594046678944 is [1:469:2429] sender: [1:470:2058] recipient: [1:466:2428] 2025-04-07T14:08:10.536115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:08:10.536220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:10.536267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:08:10.536306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:08:10.536345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:08:10.536373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:08:10.536443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:10.536796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:08:10.556259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:08:10.557557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:08:10.557734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:08:10.557914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:08:10.557953Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:10.558242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:10.558900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:10.559041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.559114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.559522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.559731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.559848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.559984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.560078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.560175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.560350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.560643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.560775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.561168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.561249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.561469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.561598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.561694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.561878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.561963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.562118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.562328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.562439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.562490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.562533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:08:10.575303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:10.575390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:10.576326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:10.576391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:10.576439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:10.577037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:469:2429] sender: [1:529:2058] recipient: [1:15:2062] 2025-04-07T14:08:10.617832Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:08:10.618071Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 259us result status StatusPathDoesNotExist 2025-04-07T14:08:10.618227Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] >> DataShardReadIterator::ShouldReadHeadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadFromHead >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:08:11.365805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:08:11.365889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:11.365926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:08:11.365973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:08:11.366010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:08:11.366030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:08:11.366089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:11.366311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:08:11.418034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:08:11.418087Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:11.427627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:08:11.427774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:08:11.427944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:08:11.432794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:08:11.432973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:11.433522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:11.433742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:08:11.437395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:11.438602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:11.438666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:11.438773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:11.438825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:11.438867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:11.439012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:08:11.445395Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:08:11.564203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:11.564414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:11.564603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:08:11.564815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:11.564935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:11.569632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:11.569765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:08:11.569941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:11.569991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:08:11.570026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:08:11.570057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:08:11.572120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:11.572172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:11.572209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:08:11.573965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:11.574012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:11.574045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:11.574084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:08:11.577655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:11.579526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:08:11.579717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:08:11.580737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:11.580897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:11.580953Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:11.581182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:08:11.581234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:11.581400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:11.581487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:08:11.583449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:11.583500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:11.583702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:11.583736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:08:11.583983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:11.584020Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:08:11.584098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:08:11.584127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:11.584310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:08:11.584356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:11.584387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:08:11.584414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:08:11.584473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:11.584529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:08:11.584560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:08:11.586409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:11.586518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:11.586558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:08:11.586598Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:08:11.586667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:11.586772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... UG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:11.669122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-04-07T14:08:11.669435Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-07T14:08:11.670245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:08:11.670403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-04-07T14:08:11.671407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:08:11.674064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:08:11.674541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:08:11.674648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-07T14:08:11.675289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:11.675404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:11.675462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-07T14:08:11.675571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:08:11.675675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:08:11.675704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:08:11.675763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:11.675829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:08:11.675862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-07T14:08:11.675907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:08:11.675936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:08:11.675993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:08:11.676097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:08:11.676136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-07T14:08:11.676168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-07T14:08:11.676205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-07T14:08:11.679477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:08:11.679545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:08:11.680050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:08:11.680094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:08:11.680733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:08:11.680767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:08:11.680894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:11.680939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:11.681079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:08:11.681196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:11.681223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-07T14:08:11.681250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:08:11.681625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:08:11.681711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:08:11.681741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:08:11.681788Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:08:11.681822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:08:11.682133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:08:11.682173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:08:11.682227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:11.682531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:08:11.682593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:08:11.682615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:08:11.682649Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:08:11.682672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:11.682725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-07T14:08:11.683301Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-04-07T14:08:11.683657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:11.683850Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-04-07T14:08:11.684409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:08:11.685337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:08:11.686433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:08:11.686515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:08:11.687900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:08:11.687977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T14:08:11.688262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:08:11.688299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:08:11.688653Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:08:11.688777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:08:11.688818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:387:2368] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-07T14:08:11.689220Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-07T14:08:11.689328Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] Test command err: 2025-04-07T14:06:13.955614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:06:13.955866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:06:13.956103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a7c/r3tmp/tmpxWejdk/pdisk_1.dat 2025-04-07T14:06:14.422435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:06:14.474150Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:14.534266Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:06:14.536570Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:06:14.536862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:14.537004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:14.548512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:14.806655Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T14:06:14.806725Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:06:14.807867Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:611:2520] 2025-04-07T14:06:14.913845Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:06:14.919246Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:06:14.919393Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:06:14.919661Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:06:14.919839Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:06:14.919916Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:06:14.922346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:06:14.922726Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T14:06:14.923209Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T14:06:14.923276Z node 1 :TX_PROXY DEBUG: Actor# [1:611:2520] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T14:06:14.958133Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:627:2535], Recipient [1:636:2541]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:14.958914Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:627:2535], Recipient [1:636:2541]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:14.959343Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:636:2541] 2025-04-07T14:06:14.959617Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:14.972348Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:627:2535], Recipient [1:636:2541]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:15.000549Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:15.000690Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:15.002372Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:06:15.002465Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:06:15.002531Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:06:15.002888Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:15.031561Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:06:15.032547Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:15.032705Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:652:2550] 2025-04-07T14:06:15.032744Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:06:15.032781Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:06:15.032815Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:06:15.033173Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:636:2541], Recipient [1:636:2541]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:15.034143Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:15.035022Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:06:15.035121Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:06:15.035240Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:641:2543], Recipient [1:636:2541]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:15.035286Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:15.035329Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:632:2538], serverId# [1:641:2543], sessionId# [0:0:0] 2025-04-07T14:06:15.035668Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:06:15.035710Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:15.035754Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:06:15.035778Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:06:15.035801Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:06:15.035828Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:06:15.035880Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:06:15.035939Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:641:2543] 2025-04-07T14:06:15.035969Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:15.036104Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:06:15.038541Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:06:15.038604Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:06:15.038684Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:06:15.038726Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:06:15.038769Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:06:15.038801Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:06:15.038858Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:15.039122Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:15.039147Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:06:15.039176Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:06:15.039201Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:15.039302Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:06:15.039327Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:06:15.039353Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:06:15.039385Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:06:15.039412Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:15.040533Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:653:2551], Recipient [1:636:2541]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:06:15.040578Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:06:15.051301Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:06:15.051371Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:06:15.051407Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:06:15.051459Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... ipient [16:990:2804]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:08:09.967312Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:08:09.967400Z node 16 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [16:985:2801], serverId# [16:1009:2815], sessionId# [0:0:0] 2025-04-07T14:08:09.967582Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [16:740:2609], Recipient [16:990:2804]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-07T14:08:09.967625Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-07T14:08:09.967710Z node 16 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-04-07T14:08:09.967873Z node 16 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-07T14:08:09.968484Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:08:09.979454Z node 16 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-07T14:08:09.979667Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:990:2804], Recipient [16:740:2609]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-07T14:08:09.979730Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:08:09.979845Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 ... reading final result 2025-04-07T14:08:10.530276Z node 16 :TX_PROXY DEBUG: actor# [16:52:2099] Handle TEvExecuteKqpTransaction 2025-04-07T14:08:10.530463Z node 16 :TX_PROXY DEBUG: actor# [16:52:2099] TxId# 281474976715664 ProcessProposeKqpTransaction 2025-04-07T14:08:10.532052Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [16:1032:2835], Recipient [16:990:2804]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:08:10.532156Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:08:10.532234Z node 16 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [16:1031:2834], serverId# [16:1032:2835], sessionId# [0:0:0] 2025-04-07T14:08:10.543680Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr89908vc7jkqzh2wbj8w6xf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=NDFmZjAyN2MtOTA2Yjk5YTEtYTFmMmRmNjMtM2VlODE0NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:08:10.549786Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [16:1043:2838], Recipient [16:990:2804]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 6000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-07T14:08:10.549918Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:08:10.550039Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037888 on unit CheckRead 2025-04-07T14:08:10.550190Z node 16 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037888 is Executed 2025-04-07T14:08:10.550259Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:08:10.550333Z node 16 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:10.550392Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:08:10.550462Z node 16 :TX_DATASHARD TRACE: Activated operation [0:1] at 72075186224037888 2025-04-07T14:08:10.550527Z node 16 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037888 is Executed 2025-04-07T14:08:10.550556Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:10.550579Z node 16 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:08:10.550604Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:08:10.550747Z node 16 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 6000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-07T14:08:10.551049Z node 16 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v6000/18446744073709551615 2025-04-07T14:08:10.551130Z node 16 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[16:1043:2838], 0} after executionsCount# 1 2025-04-07T14:08:10.551210Z node 16 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[16:1043:2838], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:10.551335Z node 16 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[16:1043:2838], 0} finished in read 2025-04-07T14:08:10.551439Z node 16 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037888 is Executed 2025-04-07T14:08:10.551466Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:08:10.551490Z node 16 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:08:10.551518Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:08:10.551568Z node 16 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037888 is Executed 2025-04-07T14:08:10.551591Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:08:10.551626Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:1] at 72075186224037888 has finished 2025-04-07T14:08:10.551690Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:08:10.551864Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-07T14:08:10.552195Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [16:1045:2839], Recipient [16:740:2609]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 6000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-07T14:08:10.552262Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-07T14:08:10.552308Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-04-07T14:08:10.552365Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-07T14:08:10.552391Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-04-07T14:08:10.552414Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:10.552442Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-07T14:08:10.552482Z node 16 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2025-04-07T14:08:10.552513Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-07T14:08:10.552538Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:10.552561Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-04-07T14:08:10.552586Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-04-07T14:08:10.552686Z node 16 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 6000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-07T14:08:10.552874Z node 16 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v6000/18446744073709551615 2025-04-07T14:08:10.552913Z node 16 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[16:1045:2839], 0} after executionsCount# 1 2025-04-07T14:08:10.552948Z node 16 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[16:1045:2839], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:10.552999Z node 16 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[16:1045:2839], 0} finished in read 2025-04-07T14:08:10.553037Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-07T14:08:10.553063Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-04-07T14:08:10.553089Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-04-07T14:08:10.553112Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-04-07T14:08:10.553147Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-07T14:08:10.553169Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-04-07T14:08:10.553193Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2025-04-07T14:08:10.553218Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-07T14:08:10.553273Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-07T14:08:10.554580Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [16:1045:2839], Recipient [16:740:2609]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T14:08:10.554651Z node 16 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } 2025-04-07T14:08:10.554733Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [16:1043:2838], Recipient [16:990:2804]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T14:08:10.554763Z node 16 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } } >> TBSV::ShouldLimitBlockStoreVolumeDropRate |92.8%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |92.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapAggregations::AggregationCountPushdown [GOOD] >> TGroupMapperTest::MakeDisksUnusable >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite >> TGroupMapperTest::MakeDisksNonoperational [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:107:2139] 2025-04-07T14:07:59.276410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:07:59.276501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:59.276542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:07:59.276578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:07:59.276620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:07:59.276680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:07:59.276753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:59.277051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:59.355381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:07:59.355437Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:59.368376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:59.368501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:07:59.368680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:07:59.375942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:07:59.376321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:07:59.376959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:59.377164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:07:59.379529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:59.380772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:59.380840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:59.381006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:07:59.381057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:59.381091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:07:59.381250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.387293Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-07T14:07:59.524172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:07:59.524436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.524658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:07:59.526899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:07:59.526983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.529394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:59.529597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:07:59.529799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.529864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:07:59.529897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:07:59.529931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:07:59.534268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.534328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:07:59.534366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:07:59.536337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.536386Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.536429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:59.536500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:07:59.540130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:07:59.542310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:07:59.542497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:07:59.543526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:59.543687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:59.543735Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:59.544032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:07:59.544087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:59.544250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:59.544330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:07:59.546383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:59.546425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:59.546582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:59.546629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:07:59.547000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:59.547046Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:07:59.547140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:07:59.547172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:59.547219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:07:59.547254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:59.547291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:07:59.547320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:07:59.547393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:07:59.547427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:07:59.547457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:07:59.549340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:59.549462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:59.549497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:07:59.549531Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:07:59.549565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:59.549664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... d manually' Items: 2 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710763 2025-04-07T14:08:12.659180Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-07T14:08:12.659231Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-07T14:08:12.659298Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-04-07T14:08:12.659429Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:12.660193Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:12.660279Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:12.660302Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-07T14:08:12.660343Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-07T14:08:12.660395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-07T14:08:12.661906Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:12.662005Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:12.662037Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-07T14:08:12.662065Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-04-07T14:08:12.662126Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:08:12.662226Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-07T14:08:12.669349Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-07T14:08:12.669756Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-07T14:08:12.669800Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-07T14:08:12.669854Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-07T14:08:12.669963Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-04-07T14:08:12.670086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000009 2025-04-07T14:08:12.672385Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:12.672537Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 12884904044 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:12.672595Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000009, at schemeshard: 72057594046678944 2025-04-07T14:08:12.672739Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-07T14:08:12.672828Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-04-07T14:08:12.672875Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-07T14:08:12.672942Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:08:12.673007Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T14:08:12.673041Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-04-07T14:08:12.673091Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-07T14:08:12.673130Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-04-07T14:08:12.673161Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-04-07T14:08:12.673248Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:08:12.673305Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-04-07T14:08:12.673341Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-07T14:08:12.673371Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-07T14:08:12.674276Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:12.674385Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:12.676175Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:12.676218Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:12.676372Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T14:08:12.676474Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:12.676519Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:203:2206], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-04-07T14:08:12.676582Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:203:2206], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-04-07T14:08:12.677392Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:12.677472Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:12.677506Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-07T14:08:12.677550Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-07T14:08:12.677603Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-07T14:08:12.678559Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:12.678637Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:12.678665Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-07T14:08:12.678695Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-07T14:08:12.678725Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T14:08:12.678809Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-04-07T14:08:12.678869Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:122:2148] 2025-04-07T14:08:12.681701Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:12.682099Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-07T14:08:12.682221Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-04-07T14:08:12.682277Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-04-07T14:08:12.682314Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-07T14:08:12.682342Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-07T14:08:12.682370Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-04-07T14:08:12.684073Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-07T14:08:12.687419Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:08:12.687495Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:750:2691] TestWaitNotification: OK eventTxId 103 >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] >> TGroupMapperTest::MonteCarlo >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] >> TGroupMapperTest::MakeDisksUnusable [GOOD] >> TGroupMapperTest::Mirror3dc |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksNonoperational [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksUnusable [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::AggregationCountPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 8847, MsgBus: 9928 2025-04-07T14:02:07.468898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571258904251228:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:08.173064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ca5/r3tmp/tmpnUg1zd/pdisk_1.dat 2025-04-07T14:02:08.761040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:02:08.810952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:02:08.811137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:02:08.822066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8847, node 1 2025-04-07T14:02:09.177304Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:02:09.177328Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:02:09.177347Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:02:09.177447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9928 TClient is connected to server localhost:9928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:02:10.759107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:02:10.777452Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:02:10.787305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:02:10.911120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571271789153643:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:10.911393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571271789153643:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:10.911664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571271789153643:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:10.911792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571271789153643:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:10.911905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571271789153643:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:10.912023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571271789153643:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:10.912164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571271789153643:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:10.912284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571271789153643:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:10.912411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571271789153643:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:10.912540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571271789153643:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:10.912637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571271789153643:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:10.925231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490571271789153643:2293];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:11.028496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571271789153677:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:11.028566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571271789153677:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:11.033032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571271789153677:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:11.033199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571271789153677:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:11.033298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571271789153677:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:11.033403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571271789153677:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:11.033506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571271789153677:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:11.033600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571271789153677:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:02:11.033689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571271789153677:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:02:11.033787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571271789153677:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:02:11.033873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571271789153677:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:02:11.033969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490571271789153677:2294];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:02:11.099693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571271789153641:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:02:11.099753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571271789153641:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:02:11.099963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571271789153641:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:02:11.100056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571271789153641:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:02:11.100139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571271789153641:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:02:11.100222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571271789153641:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:02:11.100303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571271789153641:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:02:11.100386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490571271789153641:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descrip ... ne=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:02:11.205183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:02:11.205209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:02:11.205327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:02:11.205345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:02:11.205430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:02:11.205476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:02:11.205524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:02:11.205544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:02:11.205572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:02:11.205602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:02:11.205868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:02:11.205951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:02:11.206112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:02:11.206148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:02:11.206275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:02:11.206299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:02:11.206444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:02:11.206464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:02:11.206548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:02:11.206565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:02:11.260733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; 2025-04-07T14:02:12.367183Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571258904251228:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:02:12.367222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2025-04-07T14:02:23.757037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:02:23.757075Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:11.565056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572822272352305:4420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:11.565175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:11.568821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572822272352317:4423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:11.573443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:08:11.588214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572822272352319:4424], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } !!! Pushdown query execution time: 1295 !!! 2025-04-07T14:08:12.841710Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744034892000, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":4}]},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"columns":["level"],"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":4}]},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '505) '('"_id" '"f23277ff-bac5fe7c-b1f2a226-7cb4f7f2") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '('"level") '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 'count '"level")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '1053) '('"_id" '"11577ca0-3bfdf754-2658077b-325a58a3")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1156) '('"_id" '"2ec84619-ebaaee3b-783628ef-a9f346cd") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> TGroupMapperTest::Mirror3dc [GOOD] >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk3 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk5 |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc [GOOD] >> CompressExecutor::TestExecutorMemUsage [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:08:13.676839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:08:13.676962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:13.677004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:08:13.677059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:08:13.677105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:08:13.677134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:08:13.677192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:13.677490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:08:13.744503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:08:13.744561Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:13.756172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:08:13.756314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:08:13.756477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:08:13.762606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:08:13.762800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:13.763400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:13.763632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:08:13.765500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:13.766727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:13.766787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:13.766927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:13.766973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:13.767011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:13.767169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:08:13.777601Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:08:13.918821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:13.919033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:13.919240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:08:13.919465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:13.919525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:13.929550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:13.929689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:08:13.929858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:13.929906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:08:13.929940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:08:13.929988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:08:13.935305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:13.935373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:13.935407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:08:13.941704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:13.941763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:13.941801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:13.941846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:08:13.945250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:13.949894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:08:13.950082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:08:13.951038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:13.951190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:13.951245Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:13.951487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:08:13.951527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:13.951696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:13.951812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:08:13.954090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:13.954145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:13.954322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:13.954355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:08:13.954571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:13.954613Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:08:13.954705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:08:13.954738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:13.954779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:08:13.954848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:13.954885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:08:13.954925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:08:13.954995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:13.955044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:08:13.955076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:08:13.956921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:13.957021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:13.957056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:08:13.957092Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:08:13.957160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:13.957262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... HARD INFO: TDropBlockStoreVolume TPropose, operationId: 129:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:14.970844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2025-04-07T14:08:14.970934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:14.972526Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 TabletID: 72075186233409569 Forgetting tablet 72075186233409569 2025-04-07T14:08:14.973152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-04-07T14:08:14.973347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 4 2025-04-07T14:08:14.973491Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 TabletID: 72075186233409568 Forgetting tablet 72075186233409568 2025-04-07T14:08:14.974188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-07T14:08:14.974446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-04-07T14:08:14.974587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 3 2025-04-07T14:08:14.975152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-07T14:08:14.976578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-04-07T14:08:14.976701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000028 2025-04-07T14:08:14.977028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-07T14:08:14.977060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-04-07T14:08:14.977402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000028, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:14.977487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000028 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:14.977541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000028, at schemeshard: 72057594046678944 2025-04-07T14:08:14.977639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-04-07T14:08:14.977733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-07T14:08:14.977755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-07T14:08:14.977808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:14.977847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-04-07T14:08:14.977881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-04-07T14:08:14.977916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-07T14:08:14.977940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2025-04-07T14:08:14.977957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2025-04-07T14:08:14.978062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-04-07T14:08:14.978089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-04-07T14:08:14.978122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 54 2025-04-07T14:08:14.978140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 13], 18446744073709551615 2025-04-07T14:08:14.978664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-04-07T14:08:14.978696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-04-07T14:08:14.981066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:08:14.981105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:08:14.981268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:14.981308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:14.981456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 13] 2025-04-07T14:08:14.981594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:14.981632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-04-07T14:08:14.981670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 129, path id: 13 FAKE_COORDINATOR: Erasing txId 129 2025-04-07T14:08:14.982074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-04-07T14:08:14.982159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-04-07T14:08:14.982193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-04-07T14:08:14.982223Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 13], version: 18446744073709551615 2025-04-07T14:08:14.982263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-04-07T14:08:14.982534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:08:14.982571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-04-07T14:08:14.982631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:14.983041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-04-07T14:08:14.983105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-04-07T14:08:14.983124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-04-07T14:08:14.983170Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 54 2025-04-07T14:08:14.983194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:14.983254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-04-07T14:08:14.983437Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 2025-04-07T14:08:14.983577Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 2025-04-07T14:08:14.983729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-04-07T14:08:14.984099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-04-07T14:08:14.986611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-07T14:08:14.987458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:08:14.989303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-07T14:08:14.989410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-07T14:08:14.989509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2025-04-07T14:08:14.990199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2025-04-07T14:08:14.990259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2025-04-07T14:08:14.991012Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-04-07T14:08:14.991113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-04-07T14:08:14.991152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:1682:3553] TestWaitNotification: OK eventTxId 129 >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin+ColumnStore |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_1disk >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] |92.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |92.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin-ColumnStore >> TGroupMapperTest::ReassignGroupTest3dc >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable [GOOD] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix3 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix4 >> TGroupMapperTest::MapperSequentialCalls ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2025-04-07T14:06:10.792387Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1744034770792352 2025-04-07T14:06:11.039145Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572306746746811:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:11.039266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:06:11.058885Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572305382137028:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:11.178356Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:06:11.194101Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e20/r3tmp/tmpDuDalQ/pdisk_1.dat 2025-04-07T14:06:11.221734Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:06:11.455565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:11.455638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:11.456433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:11.456525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:11.459757Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:06:11.459919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:11.460678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:11.482474Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5131, node 1 2025-04-07T14:06:11.508963Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:06:11.508999Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:06:11.609154Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001e20/r3tmp/yandex8OA8fI.tmp 2025-04-07T14:06:11.609184Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001e20/r3tmp/yandex8OA8fI.tmp 2025-04-07T14:06:11.609313Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001e20/r3tmp/yandex8OA8fI.tmp 2025-04-07T14:06:11.609412Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:06:11.784898Z INFO: TTestServer started on Port 22030 GrpcPort 5131 TClient is connected to server localhost:22030 PQClient connected to localhost:5131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:12.066826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T14:06:14.016604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572319631649507:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.016749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.016926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572319631649519:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.023612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:06:14.038615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572319631649522:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:06:14.420494Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490572318267039106:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:06:14.420822Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTdhYjI5MGYtYzA5NDc0ZGMtYzEzMGY0YmItZjNiNzVjYzM=, ActorId: [2:7490572318267039066:2280], ActorState: ExecuteState, TraceId: 01jr895f1m72g9g31640gvr74n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:06:14.420494Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572319631649621:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:06:14.425514Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:06:14.426908Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjBkY2Q4Y2YtZDgxNmJlYzUtOGI0NGQzNzktNmM2YjkzMjg=, ActorId: [1:7490572319631649505:2302], ActorState: ExecuteState, TraceId: 01jr895ezy1a0hfth0vdq52nrv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:06:14.427215Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:06:14.464915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:06:14.610403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:06:14.727923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:5131", true, true, 1000); 2025-04-07T14:06:15.114242Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr895frn7ky3jgpjwrms0dye, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y4ZDhkZTItOTFlZTc1MjMtNzVlMDc5MmEtNDAyMTg0ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490572323926617315:2967] 2025-04-07T14:06:16.039089Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572306746746811:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:16.039132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:06:16.058013Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490572305382137028:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:16.058068Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T14:06:21.056182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:5131 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T14:06:21.154521Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:5131 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" Read ... dn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr8992kmeaefchs5xw8qzcng" } } YdbStatus: UNAVAILABLE ConsumedRu: 335 } 2025-04-07T14:08:12.695121Z :INFO: [/Root] MessageGroupId [test-message-group-id] Running cds request ms 2025-04-07T14:08:12.706103Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1de8f2a-6c10ea1a-944a53ef-bf73af98_0] Got CDS response: write_sessions_clusters { clusters { endpoint: "localhost:14010" name: "dc1" available: true } primary_cluster_selection_reason: CLIENT_LOCATION } version: 1 2025-04-07T14:08:12.706171Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1de8f2a-6c10ea1a-944a53ef-bf73af98_0] Start write session. Will connect to endpoint: localhost:14010 2025-04-07T14:08:12.711822Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1de8f2a-6c10ea1a-944a53ef-bf73af98_0] Write session: send init request: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-04-07T14:08:12.712759Z node 15 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-07T14:08:12.712804Z node 15 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2025-04-07T14:08:12.715473Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-04-07T14:08:12.715618Z node 15 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:48342 2025-04-07T14:08:12.715633Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:48342 proto=v1 topic=test-topic durationSec=0 2025-04-07T14:08:12.715643Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T14:08:12.717505Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-04-07T14:08:12.717679Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-07T14:08:12.717695Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T14:08:12.717710Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-07T14:08:12.717731Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490572827370857393:2531] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-04-07T14:08:12.721878Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490572827370857393:2531] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-04-07T14:08:12.898642Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715697. Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-07T14:08:12.898771Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7490572827370857406:2533] TxId: 281474976715697. Ctx: { TraceId: 01jr8992xj1dyabkavh3nqsj03, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZmI1YWMwZjItZGRhMjRmNjQtNWJkMDZkYWMtZDU2Y2FlNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-07T14:08:12.899097Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=ZmI1YWMwZjItZGRhMjRmNjQtNWJkMDZkYWMtZDU2Y2FlNWQ=, ActorId: [15:7490572827370857394:2533], ActorState: ExecuteState, TraceId: 01jr8992xj1dyabkavh3nqsj03, Create QueryResponse for error on request, msg: 2025-04-07T14:08:12.900314Z node 15 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [15:7490572827370857393:2531] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=ZmI1YWMwZjItZGRhMjRmNjQtNWJkMDZkYWMtZDU2Y2FlNWQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr8992xj1dyabkavh7b093z2" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2025-04-07T14:08:12.900450Z node 15 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=ZmI1YWMwZjItZGRhMjRmNjQtNWJkMDZkYWMtZDU2Y2FlNWQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr8992xj1dyabkavh7b093z2" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2025-04-07T14:08:12.900849Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2025-04-07T14:08:12.902767Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1de8f2a-6c10ea1a-944a53ef-bf73af98_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=ZmI1YWMwZjItZGRhMjRmNjQtNWJkMDZkYWMtZDU2Y2FlNWQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr8992xj1dyabkavh7b093z2" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2025-04-07T14:08:12.902805Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1de8f2a-6c10ea1a-944a53ef-bf73af98_0] Write session will restart in 2.000000s 2025-04-07T14:08:12.902918Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1de8f2a-6c10ea1a-944a53ef-bf73af98_0] Write session: Do CDS request 2025-04-07T14:08:12.902951Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1de8f2a-6c10ea1a-944a53ef-bf73af98_0] Do schedule cds request after 2000 ms 2025-04-07T14:08:13.199881Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720683. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T14:08:13.199987Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7490572824821745441:2446] TxId: 281474976720683. Ctx: { TraceId: 01jr8993661sbpm3d1snkma1c1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=OTM0YzFiYTgtNzQ3ZmFhZTMtOTMxNjRjMjEtYTczODNmMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T14:08:13.200286Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=OTM0YzFiYTgtNzQ3ZmFhZTMtOTMxNjRjMjEtYTczODNmMzM=, ActorId: [16:7490572824821745438:2446], ActorState: ExecuteState, TraceId: 01jr8993661sbpm3d1snkma1c1, Create QueryResponse for error on request, msg: 2025-04-07T14:08:13.202388Z node 16 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr8993678r87z43cdxeg2e94" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-07T14:08:13.387398Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715699. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T14:08:13.387506Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7490572831665824757:2535] TxId: 281474976715699. Ctx: { TraceId: 01jr8993c8471b8a1njsy62fs3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=N2FmZjQwNDgtYmVmNTViMDctNjBlYjBlOTMtZGJlNzYzNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T14:08:13.387828Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=N2FmZjQwNDgtYmVmNTViMDctNjBlYjBlOTMtZGJlNzYzNjA=, ActorId: [15:7490572831665824754:2535], ActorState: ExecuteState, TraceId: 01jr8993c8471b8a1njsy62fs3, Create QueryResponse for error on request, msg: 2025-04-07T14:08:13.389101Z node 15 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr8993c8471b8a1njt44hb8g" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-07T14:08:13.704444Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1de8f2a-6c10ea1a-944a53ef-bf73af98_0] Write session: close. Timeout = 0 ms 2025-04-07T14:08:13.704504Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1de8f2a-6c10ea1a-944a53ef-bf73af98_0] Write session will now close 2025-04-07T14:08:13.704553Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1de8f2a-6c10ea1a-944a53ef-bf73af98_0] Write session: aborting 2025-04-07T14:08:13.705231Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1de8f2a-6c10ea1a-944a53ef-bf73af98_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-04-07T14:08:13.705272Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1de8f2a-6c10ea1a-944a53ef-bf73af98_0] Write session: destroy 2025-04-07T14:08:14.046449Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715701. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T14:08:14.046575Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7490572831665824832:2536] TxId: 281474976715701. Ctx: { TraceId: 01jr8993gw65qjrjpka9w527m6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=Y2Y0MjI1ODItNmMxNmJjOGMtNjY0MjE4MjMtZGZlYTlkMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T14:08:14.046936Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=Y2Y0MjI1ODItNmMxNmJjOGMtNjY0MjE4MjMtZGZlYTlkMA==, ActorId: [15:7490572831665824789:2536], ActorState: ExecuteState, TraceId: 01jr8993gw65qjrjpka9w527m6, Create QueryResponse for error on request, msg: 2025-04-07T14:08:14.048066Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr89940wcd2tb1s36m3tagkk" } } YdbStatus: UNAVAILABLE ConsumedRu: 331 } |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dc >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin+ColumnStore >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain >> DataShardReadIterator::ShouldReadFromHead [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict >> TGroupMapperTest::Block42_2disk >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin+ColumnStore >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] >> TGroupMapperTest::NonUniformCluster >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] >> TGroupMapperTest::SanitizeGroupTest3dc |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] >> TMultiversionObjectMap::MonteCarlo >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] Test command err: 2025-04-07T14:06:39.643136Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:39.710953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:39.711009Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:39.712952Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:39.713342Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:39.713598Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:39.724435Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:39.779497Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:39.779979Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:39.781595Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:39.781722Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:39.781781Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:39.782064Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:39.810815Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:39.810985Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:39.811105Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:39.811147Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:39.811181Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:39.811216Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:39.811484Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:39.811539Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:39.811899Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:39.811996Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:39.812048Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:39.812090Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:39.812161Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:39.812194Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:39.812227Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:39.812254Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:39.812293Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:39.842578Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:39.842645Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:39.842715Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:39.844393Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:39.844434Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:39.844497Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:39.844636Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:39.844710Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:39.844758Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:39.844801Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:39.844838Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:39.844869Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:39.844904Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:39.845146Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:39.845202Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:39.845236Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:39.845291Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:39.845344Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:39.845372Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:39.845399Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:39.845421Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:39.845437Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:39.869736Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:39.869796Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:39.869843Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:39.869891Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:39.869952Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:39.870442Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:39.870486Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:39.870521Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:39.870630Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:39.870659Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:39.870772Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:39.870846Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:39.870881Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:39.870913Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:39.874363Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:39.874423Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:39.874594Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:39.874624Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:39.874663Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:39.874695Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:39.874726Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:39.874762Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:39.874803Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:39.874847Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:39.874879Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:39.874910Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:39.874937Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:39.875109Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:39.875142Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:39.875174Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:39.875196Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:39.875230Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:39.875290Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:39.875313Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:39.875339Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:39.875369Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:39.875426Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:39.875457Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:39.875498Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:39.875541Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:39.875559Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:39.875580Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit Ma ... node 4 :TX_DATASHARD DEBUG: Found ready candidate operation [1000004:403] at 9437186 for LoadAndWaitInRS 2025-04-07T14:08:14.906442Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437186 on unit LoadAndWaitInRS 2025-04-07T14:08:14.906472Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437186 is Executed 2025-04-07T14:08:14.906501Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437186 executing on unit LoadAndWaitInRS 2025-04-07T14:08:14.906527Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437186 to execution unit ExecuteDataTx 2025-04-07T14:08:14.906556Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437186 on unit ExecuteDataTx 2025-04-07T14:08:14.907425Z node 4 :TX_DATASHARD TRACE: Executed operation [1000004:403] at tablet 9437186 with status COMPLETE 2025-04-07T14:08:14.907482Z node 4 :TX_DATASHARD TRACE: Datashard execution counters for [1000004:403] at 9437186: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 3, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 8, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 21, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:08:14.907531Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437186 is ExecutedNoMoreRestarts 2025-04-07T14:08:14.907559Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437186 executing on unit ExecuteDataTx 2025-04-07T14:08:14.907588Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437186 to execution unit CompleteOperation 2025-04-07T14:08:14.907619Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437186 on unit CompleteOperation 2025-04-07T14:08:14.907882Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437186 is DelayComplete 2025-04-07T14:08:14.907916Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437186 executing on unit CompleteOperation 2025-04-07T14:08:14.907947Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437186 to execution unit CompletedOperations 2025-04-07T14:08:14.907979Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437186 on unit CompletedOperations 2025-04-07T14:08:14.908018Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437186 is Executed 2025-04-07T14:08:14.908043Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437186 executing on unit CompletedOperations 2025-04-07T14:08:14.908072Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437186 has finished 2025-04-07T14:08:14.908103Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:08:14.908130Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-07T14:08:14.908159Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-07T14:08:14.908187Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-07T14:08:14.928700Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:08:14.928763Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:402] at 9437186 on unit CompleteOperation 2025-04-07T14:08:14.928828Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 402] from 9437186 at tablet 9437186 send result to client [4:97:2132], exec latency: 4 ms, propose latency: 6 ms 2025-04-07T14:08:14.928891Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 399} 2025-04-07T14:08:14.928930Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:08:14.929154Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:08:14.929183Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437186 on unit StoreAndSendOutRS 2025-04-07T14:08:14.929212Z node 4 :TX_DATASHARD DEBUG: Send RS 400 at 9437186 from 9437186 to 9437184 txId 403 2025-04-07T14:08:14.929263Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:08:14.929295Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-07T14:08:14.929324Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-07T14:08:14.929350Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437186 on unit CompleteOperation 2025-04-07T14:08:14.929387Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437186 at tablet 9437186 send result to client [4:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:08:14.929436Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2025-04-07T14:08:14.929464Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-07T14:08:14.930013Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:439:2389], Recipient [4:334:2307]: {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 399} 2025-04-07T14:08:14.930056Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:08:14.930090Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437186 consumer 9437186 txId 402 2025-04-07T14:08:14.930441Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [4:439:2389], Recipient [4:230:2225]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletProducer# 9437186 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-04-07T14:08:14.930479Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-07T14:08:14.930512Z node 4 :TX_DATASHARD DEBUG: Receive RS at 9437184 source 9437186 dest 9437184 producer 9437186 txId 403 2025-04-07T14:08:14.930574Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437184 got read set: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletProducer# 9437186 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-04-07T14:08:14.930613Z node 4 :TX_DATASHARD TRACE: Filled readset for [1000004:403] from=9437186 to=9437184origin=9437186 2025-04-07T14:08:14.930675Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-07T14:08:14.930750Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:439:2389], Recipient [4:334:2307]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2025-04-07T14:08:14.930778Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:08:14.930810Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437186 consumer 9437186 txId 403 2025-04-07T14:08:14.931120Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [4:230:2225], Recipient [4:230:2225]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:08:14.931157Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:08:14.931200Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:08:14.931231Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:08:14.931265Z node 4 :TX_DATASHARD DEBUG: Found ready candidate operation [1000004:403] at 9437184 for LoadAndWaitInRS 2025-04-07T14:08:14.931296Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit LoadAndWaitInRS 2025-04-07T14:08:14.931328Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2025-04-07T14:08:14.931358Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit LoadAndWaitInRS 2025-04-07T14:08:14.931389Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit ExecuteDataTx 2025-04-07T14:08:14.931416Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit ExecuteDataTx 2025-04-07T14:08:14.934393Z node 4 :TX_DATASHARD TRACE: Executed operation [1000004:403] at tablet 9437184 with status COMPLETE 2025-04-07T14:08:14.934455Z node 4 :TX_DATASHARD TRACE: Datashard execution counters for [1000004:403] at 9437184: {NSelectRow: 1, NSelectRange: 2, NUpdateRow: 3, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 8, SelectRangeRows: 79, SelectRangeBytes: 632, UpdateRowBytes: 21, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:08:14.934516Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:08:14.934546Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit ExecuteDataTx 2025-04-07T14:08:14.934572Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompleteOperation 2025-04-07T14:08:14.934601Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompleteOperation 2025-04-07T14:08:14.934846Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is DelayComplete 2025-04-07T14:08:14.934875Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompleteOperation 2025-04-07T14:08:14.934901Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompletedOperations 2025-04-07T14:08:14.934930Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompletedOperations 2025-04-07T14:08:14.934961Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2025-04-07T14:08:14.934985Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompletedOperations 2025-04-07T14:08:14.935011Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437184 has finished 2025-04-07T14:08:14.935041Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:08:14.935065Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:08:14.935094Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:08:14.935120Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:08:14.954181Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:08:14.954251Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437184 on unit CompleteOperation 2025-04-07T14:08:14.954318Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437184 at tablet 9437184 send result to client [4:97:2132], exec latency: 4 ms, propose latency: 6 ms 2025-04-07T14:08:14.954392Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-04-07T14:08:14.954434Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:08:14.954793Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:230:2225], Recipient [4:439:2389]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-04-07T14:08:14.954843Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:08:14.954880Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 403 >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksForbidden [GOOD] >> TGroupMapperTest::NonUniformCluster2 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksForbidden [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds+StreamLookupJoin-ColumnStore >> DataShardReadIterator::ShouldReadRangeChunk5 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk100 >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] >> DataShardReadIterator::TryWriteManyRows+Commit [GOOD] >> DataShardReadIterator::TryWriteManyRows-Commit |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] >> TGroupMapperTest::MapperSequentialCalls [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 [GOOD] >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint2+StreamLookupJoin-ColumnStore >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> TExportToS3Tests::CancelledExportEndTime >> DataShardReadIterator::ShouldReadRangePrefix4 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix5 |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MapperSequentialCalls [GOOD] >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> TExportToS3Tests::ShouldCheckQuotas >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault [GOOD] >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions [GOOD] >> DataShardReadIterator::HandlePersistentSnapshotGoneInContinue [GOOD] >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> TExportToS3Tests::CancelledExportEndTime [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] Test command err: 2025-04-07T14:07:05.866253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:07:05.866532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:07:05.866752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001895/r3tmp/tmpelfban/pdisk_1.dat 2025-04-07T14:07:06.135096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:07:06.169225Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:06.218385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:06.218514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:06.230070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:06.341224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:07:06.378741Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:07:06.379698Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:07:06.380115Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:07:06.380341Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:06.411170Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:07:06.411738Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:06.411832Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:07:06.413071Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:07:06.413138Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:07:06.413184Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:07:06.413466Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:07:06.437548Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:07:06.437720Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:07:06.437803Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:07:06.437846Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:07:06.437883Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:07:06.437913Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:06.438236Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:06.438282Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:06.438424Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:07:06.438502Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:07:06.438936Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:06.438984Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:06.439024Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:07:06.439082Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:06.439144Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:06.439195Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:07:06.439239Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:07:06.439271Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:07:06.439320Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:07:06.439370Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:07:06.439509Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:07:06.439553Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:07:06.439636Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:07:06.439842Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:07:06.439878Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:07:06.439969Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:07:06.440022Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:07:06.440081Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:07:06.440106Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:07:06.440139Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:06.440369Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:07:06.440392Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:07:06.440426Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:07:06.440459Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:06.440497Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:07:06.440521Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:07:06.440549Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:07:06.440585Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:06.440604Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:07:06.441629Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:07:06.441662Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:07:06.452382Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:07:06.452450Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:06.452486Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:06.452525Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:07:06.452599Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:07:06.632403Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:06.632464Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:06.632498Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:07:06.632873Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:07:06.632905Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:07:06.633018Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:06.633083Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:07:06.633123Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:07:06.633161Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:07:06.642776Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:07:06.642865Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:06.643896Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:06.643941Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:06.643989Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:06.644049Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:07:06.644096Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:07:06.644145Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... leId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\002\000\004\000\000\000\002\000\000\000\004\000\000\000\000\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 1 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "index" Type: 1 } MaxValueSizeBytes: 4 } Columns { Column { Id: 3 Name: "value" Type: 1 } MaxValueSizeBytes: 4 } } 2025-04-07T14:08:24.324688Z node 13 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Int32 : 2, Int32 : 0) 2025-04-07T14:08:24.324810Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 2, Int32 : 0) table: [72057594046644480:2:1] 2025-04-07T14:08:24.325013Z node 13 :TX_DATASHARD TRACE: -- AddReadRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-07T14:08:24.325140Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-07T14:08:24.325578Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-04-07T14:08:24.325693Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T14:08:24.325768Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-04-07T14:08:24.325838Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:24.325894Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:08:24.325963Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/0 ImmediateWriteEdgeReplied# v3501/0 2025-04-07T14:08:24.326070Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-04-07T14:08:24.326133Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T14:08:24.326161Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:24.326186Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-07T14:08:24.326212Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-07T14:08:24.326276Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/0 ImmediateWriteEdgeReplied# v3501/0 2025-04-07T14:08:24.326353Z node 13 :TX_DATASHARD TRACE: TSysLocks::GetLock: lock 281474976715663 not found 2025-04-07T14:08:24.326415Z node 13 :TX_DATASHARD TRACE: ValidateLocks: broken lock 281474976715663 expected 2:5 found 0:0 2025-04-07T14:08:24.326553Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715663 DataShard: 72075186224037888 Generation: 2 Counter: 5 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-04-07T14:08:24.326652Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T14:08:24.326680Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-07T14:08:24.326706Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:08:24.326732Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-07T14:08:24.326872Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:08:24.326925Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:08:24.326981Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:08:24.327035Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:08:24.327087Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T14:08:24.327111Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:08:24.327149Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-04-07T14:08:24.327874Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:08:24.327948Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-07T14:08:24.328022Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2025-04-07T14:08:24.328138Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:08:24.329048Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZmNjODgzNzItNmI3NDRhMDYtZjFjMmUyZi1jMjZiMTAzNQ==, ActorId: [13:913:2731], ActorState: ExecuteState, TraceId: 01jr899e3p5kc3vyvhfpp0zv9g, Create QueryResponse for error on request, msg: 2025-04-07T14:08:24.330070Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr899e3p5kc3vyvhfpp0zv9g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZmNjODgzNzItNmI3NDRhMDYtZjFjMmUyZi1jMjZiMTAzNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:08:24.330366Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:964:2731], Recipient [13:838:2673]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 964 RawX2: 55834577579 } TxBody: " \0018\001j3\010\001\032\'\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\002 \005)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n" TxId: 281474976715666 ExecLevel: 0 Flags: 8 2025-04-07T14:08:24.330398Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:08:24.330499Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:838:2673], Recipient [13:838:2673]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T14:08:24.330532Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T14:08:24.330586Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:08:24.330741Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-07T14:08:24.330812Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2025-04-07T14:08:24.330856Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-07T14:08:24.330886Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2025-04-07T14:08:24.330912Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:24.330938Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:08:24.330978Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/18446744073709551615 ImmediateWriteEdgeReplied# v3501/18446744073709551615 2025-04-07T14:08:24.331024Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715666] at 72075186224037888 2025-04-07T14:08:24.331050Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-07T14:08:24.331075Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:24.331102Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-07T14:08:24.331129Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-07T14:08:24.331197Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193454 2025-04-07T14:08:24.331327Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715663 DataShard: 72075186224037888 Generation: 2 Counter: 5 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-04-07T14:08:24.331410Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-07T14:08:24.331473Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-07T14:08:24.331501Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-07T14:08:24.331526Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:08:24.331555Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-04-07T14:08:24.331603Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T14:08:24.331714Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2025-04-07T14:08:24.331769Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:08:24.331833Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:08:24.331884Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:08:24.331924Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-07T14:08:24.331948Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:08:24.331975Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2025-04-07T14:08:24.332041Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:08:24.332115Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-04-07T14:08:24.332182Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:08:24.333486Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [13:54:2101], Recipient [13:838:2673]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 13 Status: STATUS_NOT_FOUND ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] Test command err: 2025-04-07T14:07:04.378801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:07:04.379151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:07:04.379425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018bc/r3tmp/tmpW6K21U/pdisk_1.dat 2025-04-07T14:07:04.815499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:07:04.857798Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:04.910787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:04.910907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:04.922599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:05.049639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:07:05.088586Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:07:05.089576Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:07:05.089994Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:07:05.090212Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:05.133792Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:07:05.134492Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:05.134611Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:07:05.137072Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:07:05.137173Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:07:05.137225Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:07:05.139296Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:07:05.167986Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:07:05.169293Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:07:05.169433Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:07:05.169477Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:07:05.169513Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:07:05.169562Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:05.169923Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.169984Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.170894Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:07:05.171007Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:07:05.172216Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.172288Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.172335Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:07:05.172400Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:05.172455Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:05.172535Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:07:05.172629Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:07:05.172678Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:07:05.172720Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:07:05.172771Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:07:05.172924Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:07:05.172963Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:07:05.173056Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:07:05.175274Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:07:05.175341Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:07:05.175454Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:07:05.175548Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:07:05.175588Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:07:05.175624Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:07:05.175658Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:05.175958Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:07:05.176005Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:07:05.176063Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:07:05.176106Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:05.176172Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:07:05.176202Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:07:05.176235Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:07:05.176272Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:05.176297Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:07:05.177859Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:07:05.177921Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:07:05.188607Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:07:05.188743Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:05.188789Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:05.188849Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:07:05.189498Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:07:05.369852Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.369913Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.369948Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:07:05.370336Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:07:05.370376Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:07:05.370534Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:05.370573Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:07:05.370612Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:07:05.370650Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:07:05.374584Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:07:05.374664Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:05.376413Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.376461Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.376504Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:05.376544Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:07:05.376583Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:07:05.376622Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:08:24.771257Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:08:24.771294Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:08:24.771505Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [13:845:2678], Recipient [13:845:2678]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:08:24.771540Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:08:24.771584Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:08:24.771637Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:08:24.771669Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-07T14:08:24.771724Z node 13 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715665] in PlanQueue unit at 72075186224037889 2025-04-07T14:08:24.771768Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit PlanQueue 2025-04-07T14:08:24.771800Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-07T14:08:24.771829Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit PlanQueue 2025-04-07T14:08:24.771858Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit LoadTxDetails 2025-04-07T14:08:24.771888Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit LoadTxDetails 2025-04-07T14:08:24.772015Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715665 keys extracted: 0 2025-04-07T14:08:24.772052Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-07T14:08:24.772072Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit LoadTxDetails 2025-04-07T14:08:24.772093Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:24.772120Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-07T14:08:24.772151Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically complete end at 72075186224037889 2025-04-07T14:08:24.772176Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically incomplete end at 72075186224037889 2025-04-07T14:08:24.772202Z node 13 :TX_DATASHARD TRACE: Activated operation [3500:281474976715665] at 72075186224037889 2025-04-07T14:08:24.772229Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-07T14:08:24.772245Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:24.772261Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-07T14:08:24.772280Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-07T14:08:24.772345Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-07T14:08:24.772362Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-07T14:08:24.772397Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-07T14:08:24.772429Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-07T14:08:24.772455Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-07T14:08:24.772479Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-07T14:08:24.772503Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompleteOperation 2025-04-07T14:08:24.772528Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2025-04-07T14:08:24.772682Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is DelayComplete 2025-04-07T14:08:24.772743Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompleteOperation 2025-04-07T14:08:24.772787Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompletedOperations 2025-04-07T14:08:24.772820Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompletedOperations 2025-04-07T14:08:24.772852Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-07T14:08:24.772875Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompletedOperations 2025-04-07T14:08:24.772900Z node 13 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715665] at 72075186224037889 has finished 2025-04-07T14:08:24.772932Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:08:24.772964Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-07T14:08:24.772997Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-07T14:08:24.773026Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-07T14:08:24.784124Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715665} 2025-04-07T14:08:24.784228Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-07T14:08:24.784332Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:08:24.784415Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2025-04-07T14:08:24.784517Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [13:1032:2832], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:08:24.784593Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:08:24.784926Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 281474976715665} 2025-04-07T14:08:24.784961Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-07T14:08:24.784995Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:08:24.785017Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037888 on unit CompleteOperation 2025-04-07T14:08:24.785050Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037888 at tablet 72075186224037888 send result to client [13:1032:2832], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:08:24.785084Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:08:24.786451Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:558:2485], Recipient [13:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-07T14:08:24.786582Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:08:24.786679Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2025-04-07T14:08:24.786815Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-07T14:08:24.786885Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:08:24.786949Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:24.787011Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:08:24.787077Z node 13 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2025-04-07T14:08:24.787138Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-07T14:08:24.787172Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:24.787212Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:08:24.787259Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:08:24.787429Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW } 2025-04-07T14:08:24.787814Z node 13 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715665 2025-04-07T14:08:24.787890Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:558:2485], 1} after executionsCount# 1 2025-04-07T14:08:24.787961Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:558:2485], 1} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:24.788217Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:558:2485], 1} finished in read 2025-04-07T14:08:24.788337Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-07T14:08:24.788368Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:08:24.788395Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:08:24.788425Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:08:24.788489Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-07T14:08:24.788512Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:08:24.788564Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-04-07T14:08:24.788626Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:08:24.789335Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelledExportEndTime [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:07:55.642159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:07:55.642244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:55.642285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:07:55.642319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:07:55.642361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:07:55.642387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:07:55.642437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:55.642775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:55.727311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:07:55.727366Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:55.737827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:55.737960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:07:55.738102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:07:55.743627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:07:55.743794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:07:55.744449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.744611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:07:55.746661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.747739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:55.747790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.747936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:07:55.747992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:55.748040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:07:55.748178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.755515Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:07:55.863210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:07:55.863440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.863637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:07:55.863870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:07:55.863939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.865809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.865950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:07:55.866098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.866171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:07:55.866207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:07:55.866242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:07:55.867940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.867994Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:07:55.868025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:07:55.869495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.869546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.869588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.869647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.872891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:07:55.874350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:07:55.875459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:07:55.876425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.876551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:55.876597Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.876885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:07:55.876933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.877081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:55.877157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:07:55.878980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:55.879023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:55.879171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.879211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:07:55.879484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.879532Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:07:55.879629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:07:55.879666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.879709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:07:55.879761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.879801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:07:55.879830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:07:55.880065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:07:55.880102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:07:55.880132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:07:55.881926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:55.882028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:55.882076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:07:55.882113Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:07:55.882147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:55.882247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ier }, at tablet# 72057594046678944 2025-04-07T14:08:25.341768Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710758:0 240 -> 240 2025-04-07T14:08:25.345360Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-04-07T14:08:25.345424Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710758:0 ProgressState 2025-04-07T14:08:25.345550Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-04-07T14:08:25.345589Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-04-07T14:08:25.345646Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: true 2025-04-07T14:08:25.345731Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:121:2147] message: TxId: 281474976710758 2025-04-07T14:08:25.345809Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-04-07T14:08:25.345856Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-04-07T14:08:25.345891Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-04-07T14:08:25.346047Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:08:25.346087Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:08:25.353759Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-04-07T14:08:25.353862Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-04-07T14:08:25.357801Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-04-07T14:08:25.372947Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:08:25.372990Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:08:25.374866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/export-102" OperationType: ESchemeOpBackup Backup { TableName: "0" NumberOfRetries: 0 S3Settings { Endpoint: "localhost:13696" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:25.375298Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:25.375394Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T14:08:25.375630Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:25.375679Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:25.377127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:08:25.377165Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:08:25.377610Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:25.377735Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2025-04-07T14:08:25.377886Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2025-04-07T14:08:25.377929Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2025-04-07T14:08:25.378191Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:25.378255Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet# 72057594046678944 2025-04-07T14:08:25.378297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2025-04-07T14:08:25.378328Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 2 -> 3 2025-04-07T14:08:25.380196Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2025-04-07T14:08:25.380268Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2025-04-07T14:08:25.380725Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:25.380785Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:25.380954Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-04-07T14:08:25.383104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-04-07T14:08:25.383643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:25.383681Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:25.383810Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-04-07T14:08:25.384339Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2025-04-07T14:08:25.384431Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2025-04-07T14:08:25.385232Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-04-07T14:08:25.385370Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2025-04-07T14:08:25.390982Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-04-07T14:08:25.392598Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:08:25.392655Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:548:2510] TestWaitNotification: OK eventTxId 102 |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn >> TPQTest::TestSourceIdDropBySourceIdCount [GOOD] >> TPQTest::TestSetClientOffset >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite >> TExportToS3Tests::ShouldCheckQuotas [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex+StreamLookupJoin-ColumnStore >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple >> Secret::Deactivated |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:07:55.645872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:07:55.645931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:55.645957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:07:55.645982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:07:55.646012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:07:55.646032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:07:55.646067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:55.646332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:55.698433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:07:55.698473Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:55.714023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:55.714238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:07:55.714342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:07:55.725934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:07:55.726115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:07:55.726703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.728608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:07:55.733382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.740995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:55.741082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.741210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:07:55.741283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:55.741404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:07:55.741560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.755720Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:07:55.851785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:07:55.853348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.854184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:07:55.855695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:07:55.855768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.861795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.862665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:07:55.862878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.863032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:07:55.863115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:07:55.863146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:07:55.865781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.865838Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:07:55.865871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:07:55.868173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.868230Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.868273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.868334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.872252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:07:55.874393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:07:55.875479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:07:55.876449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.876549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:55.876599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.876887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:07:55.876932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.877074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:55.877143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:07:55.885969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:55.886015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:55.886166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.886203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:07:55.886446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.886487Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:07:55.886579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:07:55.886614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.886648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:07:55.886695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.886724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:07:55.886748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:07:55.886799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:07:55.886831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:07:55.886857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:07:55.888576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:55.888706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:55.888745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:07:55.888775Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:07:55.888807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:55.888903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 74976720762 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976720762 2025-04-07T14:08:27.444736Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720762:0, at schemeshard: 72057594046678944 2025-04-07T14:08:27.444778Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-04-07T14:08:27.444844Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720762 ready parts: 1/1 2025-04-07T14:08:27.444969Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976720762 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:27.445492Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-07T14:08:27.445563Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-07T14:08:27.445590Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-04-07T14:08:27.445617Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-07T14:08:27.445645Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:08:27.446497Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-07T14:08:27.446576Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-07T14:08:27.446601Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-04-07T14:08:27.446626Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-07T14:08:27.446652Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:08:27.446713Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-04-07T14:08:27.448599Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-07T14:08:27.448927Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720762, at schemeshard: 72057594046678944 2025-04-07T14:08:27.448982Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-04-07T14:08:27.449024Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720762, at schemeshard: 72057594046678944 2025-04-07T14:08:27.450282Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720762:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976720762 msg type: 269090816 2025-04-07T14:08:27.450382Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720762, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976720762 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976720762 at step: 5000007 2025-04-07T14:08:27.450662Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-04-07T14:08:27.450907Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:27.450989Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720762 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 17179871337 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:27.451050Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976720762:0, step: 5000007, at schemeshard: 72057594046678944 2025-04-07T14:08:27.451148Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-04-07T14:08:27.451218Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-04-07T14:08:27.451258Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-04-07T14:08:27.451320Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:27.451382Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:08:27.451415Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 1/1, is published: false 2025-04-07T14:08:27.451453Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-04-07T14:08:27.451490Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720762:0 2025-04-07T14:08:27.451521Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720762:0 2025-04-07T14:08:27.451581Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:08:27.451623Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720762, publications: 2, subscribers: 1 2025-04-07T14:08:27.451660Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-07T14:08:27.451689Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-07T14:08:27.452651Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-04-07T14:08:27.453953Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:27.453992Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:27.454111Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:08:27.454220Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:27.454255Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:334:2312], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 1 2025-04-07T14:08:27.454291Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:334:2312], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976720762 2025-04-07T14:08:27.454953Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-07T14:08:27.455032Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-07T14:08:27.455065Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720762 2025-04-07T14:08:27.455105Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-07T14:08:27.455143Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:08:27.455548Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-07T14:08:27.455615Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-07T14:08:27.455647Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720762 2025-04-07T14:08:27.455671Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-07T14:08:27.455701Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:08:27.455781Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720762, subscribers: 1 2025-04-07T14:08:27.455829Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:286:2274] 2025-04-07T14:08:27.457922Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-04-07T14:08:27.458238Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-04-07T14:08:27.458304Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976720762 2025-04-07T14:08:27.458346Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976720762 2025-04-07T14:08:27.458377Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-07T14:08:27.458400Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762 2025-04-07T14:08:27.458426Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762, id# 102, itemIdx# 4294967295 2025-04-07T14:08:27.460078Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-07T14:08:27.460150Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:08:27.460194Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:701:2644] TestWaitNotification: OK eventTxId 102 |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:07:55.643543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:07:55.643640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:55.643674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:07:55.643713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:07:55.643755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:07:55.643795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:07:55.643859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:07:55.644151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:55.713337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:07:55.713385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:55.727034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:55.727134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:07:55.727263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:07:55.733390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:07:55.733595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:07:55.734132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.734320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:07:55.735990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.741071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:55.741141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.741300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:07:55.741368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:55.741416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:07:55.741563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.752630Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:07:55.865669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:07:55.865866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.866094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:07:55.866286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:07:55.866343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.868045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.868166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:07:55.868294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.868357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:07:55.868390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:07:55.868417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:07:55.869887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.869932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:07:55.869959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:07:55.871246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.871280Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.871312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.871376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.874371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:07:55.875606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:07:55.875740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:07:55.876537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:07:55.876629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:07:55.876685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.876883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:07:55.876932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:07:55.877070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:55.877146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:07:55.879808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:07:55.879867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:07:55.879978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:07:55.880008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:07:55.880194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:07:55.880262Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:07:55.880353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:07:55.880391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.880425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:07:55.880466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:07:55.880499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:07:55.880523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:07:55.880569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:07:55.880599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:07:55.880630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:07:55.882170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:55.882260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:07:55.882294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:07:55.882322Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:07:55.882349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:07:55.882428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... T_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-04-07T14:08:21.720276Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-04-07T14:08:21.720320Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-04-07T14:08:21.720416Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-07T14:08:21.720461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-04-07T14:08:21.720489Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-04-07T14:08:21.730841Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:08:24.360396Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0017 2025-04-07T14:08:24.395176Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0017 2025-04-07T14:08:24.442928Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-07T14:08:24.443064Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-04-07T14:08:24.443118Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-04-07T14:08:24.443153Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-04-07T14:08:24.443302Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-07T14:08:24.443334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-04-07T14:08:24.443361Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-04-07T14:08:24.453859Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:08:27.005925Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0017 2025-04-07T14:08:27.026690Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0017 2025-04-07T14:08:27.048640Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-07T14:08:27.048841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-04-07T14:08:27.048897Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-04-07T14:08:27.048942Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-04-07T14:08:27.049031Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-07T14:08:27.049063Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-04-07T14:08:27.049092Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-04-07T14:08:27.059479Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:08:28.508923Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [3:560:2521], attempt# 1 2025-04-07T14:08:28.547138Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvReset: self# [3:559:2520] 2025-04-07T14:08:28.555941Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [3:560:2521], sender# [3:559:2520] 2025-04-07T14:08:28.556042Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [3:559:2520] 2025-04-07T14:08:28.556222Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [3:560:2521], sender# [3:559:2520], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } 2025-04-07T14:08:28.556443Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [3:560:2521], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [6e3e0a41fdab8add833862f1bd2954c3,1d8dd09e584ce6a47582a31b591900e2,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:3311 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E312D8E5-81A0-4DC8-8820-71AE0F0509D4 amz-sdk-request: attempt=1 content-length: 459 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-04-07T14:08:28.568817Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [3:560:2521], result# 2025-04-07T14:08:28.569110Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [3:559:2520], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-07T14:08:28.580508Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 440 RawX2: 12884904299 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T14:08:28.580607Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-04-07T14:08:28.580834Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 440 RawX2: 12884904299 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T14:08:28.580977Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 440 RawX2: 12884904299 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T14:08:28.581086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:28.581139Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:28.581183Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:08:28.581231Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-07T14:08:28.581425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:28.584932Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:28.585379Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-07T14:08:28.585437Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-04-07T14:08:28.585559Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-07T14:08:28.585605Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-07T14:08:28.585671Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-04-07T14:08:28.585926Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:122:2148] message: TxId: 281474976710759 2025-04-07T14:08:28.585989Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-07T14:08:28.586033Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-07T14:08:28.586070Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-04-07T14:08:28.586196Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:08:28.588492Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-07T14:08:28.588587Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-04-07T14:08:28.590746Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:08:28.590801Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:580:2538] TestWaitNotification: OK eventTxId 102 >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] Test command err: 2025-04-07T14:07:04.377648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:07:04.378002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:07:04.378248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018c7/r3tmp/tmpuaIboV/pdisk_1.dat 2025-04-07T14:07:04.820202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:07:04.859669Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:04.910200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:04.910354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:04.921702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:05.049662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:07:05.092880Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:07:05.093835Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:07:05.094283Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:07:05.094518Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:05.137186Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:07:05.137897Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:05.137995Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:07:05.139443Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:07:05.139529Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:07:05.139592Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:07:05.139873Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:07:05.173962Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:07:05.174152Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:07:05.174273Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:07:05.174312Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:07:05.174346Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:07:05.174383Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:05.174754Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.174812Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.174985Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:07:05.175081Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:07:05.175501Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.175555Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.175596Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:07:05.175655Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:05.175712Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:05.175760Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:07:05.175796Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:07:05.175846Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:07:05.175882Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:07:05.175933Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:07:05.176082Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:07:05.176126Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:07:05.176209Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:07:05.176425Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:07:05.176471Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:07:05.176566Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:07:05.176616Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:07:05.176654Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:07:05.176709Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:07:05.176739Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:05.176993Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:07:05.177039Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:07:05.177083Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:07:05.177147Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:05.177199Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:07:05.177233Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:07:05.177274Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:07:05.177308Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:05.177330Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:07:05.178702Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:07:05.178753Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:07:05.189400Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:07:05.189462Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:05.189508Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:05.189547Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:07:05.189616Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:07:05.369873Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.369926Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.369963Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:07:05.370357Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:07:05.370397Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:07:05.370519Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:05.370562Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:07:05.370598Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:07:05.370633Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:07:05.374898Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:07:05.374955Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:05.376437Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.376482Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.376524Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:05.376571Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:07:05.376612Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:07:05.376649Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... :2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.536488Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.536884Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709543002, quota bytes left# 18446744073709138191, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.537037Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.537083Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.537122Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.537475Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542903, quota bytes left# 18446744073709133439, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.537631Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.537675Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.537716Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.538086Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542804, quota bytes left# 18446744073709128687, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.538250Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.538299Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.538343Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.538736Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542705, quota bytes left# 18446744073709123935, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.538907Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.538951Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.538991Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.539365Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542606, quota bytes left# 18446744073709119183, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.539495Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.539541Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.539583Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.539974Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542507, quota bytes left# 18446744073709114431, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.540143Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.540187Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.540229Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.540600Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542408, quota bytes left# 18446744073709109679, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.540747Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.540792Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.540831Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.541205Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542309, quota bytes left# 18446744073709104927, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.541366Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.541409Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.541449Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.541844Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542210, quota bytes left# 18446744073709100175, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.541969Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.542016Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.542057Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.542416Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542111, quota bytes left# 18446744073709095423, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.542580Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.542623Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.542662Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.543054Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542012, quota bytes left# 18446744073709090671, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.543240Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.543286Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.543329Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.543710Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541913, quota bytes left# 18446744073709085919, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.543871Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.543919Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.543962Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.544372Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541814, quota bytes left# 18446744073709081167, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.544530Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.544578Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.544619Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.545024Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541715, quota bytes left# 18446744073709076415, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.545200Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.545245Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.545283Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.545665Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541616, quota bytes left# 18446744073709071663, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.545793Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:938:2753], Recipient [14:938:2753]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:29.545836Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 0 2025-04-07T14:08:29.545879Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 0 2025-04-07T14:08:29.546039Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:558:2485], 1} sends rowCount# 1, bytes# 48, quota rows left# 18446744073709541615, quota bytes left# 18446744073709071615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:29.546109Z node 14 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[14:558:2485], 1} finished in ReadContinue |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] Test command err: 2025-04-07T14:07:08.006297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:07:08.006553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:07:08.006760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00188a/r3tmp/tmppO5Eg0/pdisk_1.dat 2025-04-07T14:07:08.286119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:07:08.318271Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:08.368253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:08.368362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:08.379499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:08.489003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:07:08.516260Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:07:08.517285Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:07:08.517763Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:07:08.518004Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:08.547045Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:07:08.547591Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:08.547682Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:07:08.548760Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:07:08.548837Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:07:08.548877Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:07:08.549119Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:07:08.570410Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:07:08.570579Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:07:08.570676Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:07:08.570712Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:07:08.570741Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:07:08.570771Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:08.571121Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:08.571166Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:08.571314Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:07:08.571414Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:07:08.571751Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:08.571780Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:08.571811Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:07:08.571855Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:08.571900Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:08.571936Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:07:08.571963Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:07:08.571987Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:07:08.572013Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:07:08.572069Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:07:08.572161Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:07:08.572187Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:07:08.572261Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:07:08.572435Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:07:08.572465Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:07:08.572549Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:07:08.572591Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:07:08.572617Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:07:08.572639Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:07:08.572679Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:08.572879Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:07:08.572903Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:07:08.572929Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:07:08.572959Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:08.572996Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:07:08.573014Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:07:08.573051Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:07:08.573088Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:08.573110Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:07:08.574107Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:07:08.574143Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:07:08.584874Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:07:08.584975Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:08.585003Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:08.585038Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:07:08.585092Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:07:08.764142Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:08.764214Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:08.764245Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:07:08.764557Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:07:08.764584Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:07:08.764691Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:08.764727Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:07:08.764767Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:07:08.764809Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:07:08.768488Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:07:08.768554Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:08.769870Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:08.769940Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:08.770002Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:08.770048Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:07:08.770087Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:07:08.770137Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-07T14:08:30.915132Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-07T14:08:30.915182Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-07T14:08:30.915410Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:632:2537], Recipient [14:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:08:30.915444Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:08:30.915483Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:08:30.915516Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:08:30.915542Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:08:30.915571Z node 14 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037888 2025-04-07T14:08:30.915598Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037888 on unit PlanQueue 2025-04-07T14:08:30.915627Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037888 is Executed 2025-04-07T14:08:30.915652Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037888 executing on unit PlanQueue 2025-04-07T14:08:30.915678Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037888 to execution unit LoadTxDetails 2025-04-07T14:08:30.915705Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037888 on unit LoadTxDetails 2025-04-07T14:08:30.915853Z node 14 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 3000:281474976715664 keys extracted: 0 2025-04-07T14:08:30.915902Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037888 is Executed 2025-04-07T14:08:30.915929Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037888 executing on unit LoadTxDetails 2025-04-07T14:08:30.915952Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:30.915978Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:08:30.916015Z node 14 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037888 2025-04-07T14:08:30.916048Z node 14 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037888 2025-04-07T14:08:30.916081Z node 14 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037888 2025-04-07T14:08:30.916114Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037888 is Executed 2025-04-07T14:08:30.916137Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:30.916159Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037888 to execution unit CreateVolatileSnapshot 2025-04-07T14:08:30.916188Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037888 on unit CreateVolatileSnapshot 2025-04-07T14:08:30.916274Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:08:30.916300Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037888 executing on unit CreateVolatileSnapshot 2025-04-07T14:08:30.916336Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037888 to execution unit DropVolatileSnapshot 2025-04-07T14:08:30.916363Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037888 on unit DropVolatileSnapshot 2025-04-07T14:08:30.916388Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037888 is Executed 2025-04-07T14:08:30.916409Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037888 executing on unit DropVolatileSnapshot 2025-04-07T14:08:30.916430Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037888 to execution unit CompleteOperation 2025-04-07T14:08:30.916453Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-07T14:08:30.916566Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037888 is DelayComplete 2025-04-07T14:08:30.916591Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037888 executing on unit CompleteOperation 2025-04-07T14:08:30.916621Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:08:30.919592Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:08:30.919669Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037888 is Executed 2025-04-07T14:08:30.919700Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:08:30.919749Z node 14 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037888 has finished 2025-04-07T14:08:30.919789Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:08:30.919828Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:08:30.919865Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:08:30.919902Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:08:30.945503Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3000 txid# 281474976715664} 2025-04-07T14:08:30.945628Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2025-04-07T14:08:30.945739Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:08:30.945811Z node 14 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-07T14:08:30.945911Z node 14 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [14:999:2801], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:08:30.946002Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:08:30.946390Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3000 txid# 281474976715664} 2025-04-07T14:08:30.946431Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2025-04-07T14:08:30.946477Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:08:30.946505Z node 14 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-07T14:08:30.946544Z node 14 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [14:999:2801], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:08:30.946583Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:08:30.948064Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:558:2485], Recipient [14:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW RangesSize: 1 2025-04-07T14:08:30.948196Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:08:30.948294Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-07T14:08:30.948434Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:08:30.948495Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:08:30.948558Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:30.948609Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:08:30.948654Z node 14 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-07T14:08:30.948737Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:08:30.948765Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:30.948788Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:08:30.948815Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:08:30.948946Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW } 2025-04-07T14:08:30.949379Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2025-04-07T14:08:30.949460Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:558:2485], 1} after executionsCount# 1 2025-04-07T14:08:30.949534Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:558:2485], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:30.949798Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:558:2485], 1} finished in read 2025-04-07T14:08:30.949890Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:08:30.949916Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:08:30.949942Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:08:30.949970Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:08:30.950019Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:08:30.950039Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:08:30.950073Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-07T14:08:30.950125Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:08:30.950290Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict [GOOD] Test command err: 2025-04-07T14:07:09.302795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:07:09.303072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:07:09.303294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00186a/r3tmp/tmpFz82mT/pdisk_1.dat 2025-04-07T14:07:09.609705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:07:09.640434Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:09.691111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:09.691277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:09.702607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:09.811745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:07:09.844546Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:07:09.845467Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:07:09.845853Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:07:09.846088Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:09.881150Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:07:09.881880Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:09.881971Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:07:09.883106Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:07:09.883176Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:07:09.883209Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:07:09.883417Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:07:09.902681Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:07:09.902857Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:07:09.902977Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:07:09.903033Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:07:09.903077Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:07:09.903107Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:09.903465Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:09.903519Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:09.903708Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:07:09.903810Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:07:09.904246Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:09.904284Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:09.904321Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:07:09.904383Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:09.904444Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:09.904489Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:07:09.904521Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:07:09.904583Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:07:09.904619Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:07:09.904654Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:07:09.904787Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:07:09.904823Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:07:09.904933Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:07:09.905158Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:07:09.905202Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:07:09.905314Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:07:09.905383Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:07:09.905421Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:07:09.905452Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:07:09.905480Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:09.905741Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:07:09.905784Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:07:09.905831Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:07:09.905880Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:09.905940Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:07:09.905967Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:07:09.906007Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:07:09.906043Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:09.906063Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:07:09.907361Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:07:09.907414Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:07:09.917933Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:07:09.918011Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:09.918057Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:09.918093Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:07:09.918153Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:07:10.096576Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:10.096635Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:10.096686Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:07:10.097061Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:07:10.097096Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:07:10.097205Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:10.097246Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:07:10.097277Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:07:10.097306Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:07:10.101493Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:07:10.101559Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:10.102476Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:10.102541Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:10.102608Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:10.102645Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:07:10.102698Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:07:10.102740Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... hNWMtNTVkYWMxYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, datashard 72075186224037888 not finished yet: Executing 2025-04-07T14:08:33.579468Z node 14 :KQP_EXECUTER DEBUG: ActorId: [14:1112:2872] TxId: 281474976715667. Ctx: { TraceId: 01jr899q2ya6907rhmtdsv8xaz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=YTI5ZTQ0OWEtZWM4MjQ0NDgtY2FkYTRhNWMtNTVkYWMxYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-04-07T14:08:33.579952Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [14:1123:2897], Recipient [14:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:08:33.579994Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:08:33.580028Z node 14 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [14:1122:2896], serverId# [14:1123:2897], sessionId# [0:0:0] 2025-04-07T14:08:33.581119Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:558:2485], Recipient [14:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 KeysSize: 6 2025-04-07T14:08:33.581258Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:08:33.581330Z node 14 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-07T14:08:33.581388Z node 14 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3001/18446744073709551615 2025-04-07T14:08:33.581464Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-07T14:08:33.581619Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:08:33.581686Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:08:33.581735Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:33.581774Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:08:33.581844Z node 14 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-07T14:08:33.581900Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:08:33.581931Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:33.581954Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:08:33.581976Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:08:33.582082Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2025-04-07T14:08:33.582454Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Continue 2025-04-07T14:08:33.582491Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Continue at tablet# 72075186224037888 2025-04-07T14:08:33.582563Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-07T14:08:33.616918Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [14:1020:2819], Recipient [14:632:2537]: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-07T14:08:33.617031Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-07T14:08:33.617094Z node 14 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037891 dest 72075186224037888 producer 72075186224037891 txId 281474976715667 2025-04-07T14:08:33.617212Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-07T14:08:33.617402Z node 14 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715667] from 72075186224037888 at tablet 72075186224037888 send result to client [14:1112:2872], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:08:33.617488Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:08:33.617555Z node 14 :TX_DATASHARD DEBUG: Found ready candidate operation [0:4] at 72075186224037888 for ExecuteRead 2025-04-07T14:08:33.617848Z node 14 :KQP_EXECUTER DEBUG: ActorId: [14:1112:2872] TxId: 281474976715667. Ctx: { TraceId: 01jr899q2ya6907rhmtdsv8xaz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=YTI5ZTQ0OWEtZWM4MjQ0NDgtY2FkYTRhNWMtNTVkYWMxYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-04-07T14:08:33.618021Z node 14 :KQP_EXECUTER DEBUG: ActorId: [14:1112:2872] TxId: 281474976715667. Ctx: { TraceId: 01jr899q2ya6907rhmtdsv8xaz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=YTI5ZTQ0OWEtZWM4MjQ0NDgtY2FkYTRhNWMtNTVkYWMxYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:08:33.618114Z node 14 :KQP_EXECUTER DEBUG: ActorId: [14:1112:2872] TxId: 281474976715667. Ctx: { TraceId: 01jr899q2ya6907rhmtdsv8xaz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=YTI5ZTQ0OWEtZWM4MjQ0NDgtY2FkYTRhNWMtNTVkYWMxYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-07T14:08:33.618255Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:632:2537], Recipient [14:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:08:33.618323Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:08:33.619196Z node 14 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [14:558:2485], selfId: [14:50:2097], source: [14:1090:2872] 2025-04-07T14:08:33.619377Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:08:33.619658Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:08:33.619737Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:08:33.619789Z node 14 :TX_DATASHARD DEBUG: Return cached ready operation [0:4] at 72075186224037888 2025-04-07T14:08:33.619841Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:08:33.620023Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2025-04-07T14:08:33.620477Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-07T14:08:33.620544Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:558:2485], 1} after executionsCount# 2 2025-04-07T14:08:33.620609Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:558:2485], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 0 2025-04-07T14:08:33.620869Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:08:33.620922Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:08:33.620973Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:08:33.621018Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:08:33.621102Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:08:33.621121Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:08:33.621149Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-07T14:08:33.621203Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:08:33.621251Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:08:33.621300Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:08:33.621350Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:08:33.621958Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:632:2537], Recipient [14:632:2537]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:33.622050Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 2 2025-04-07T14:08:33.622282Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 2 2025-04-07T14:08:33.622446Z node 14 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[14:558:2485], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551611, quota bytes left# 18446744073709551551, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 2 2025-04-07T14:08:33.623307Z node 14 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=14&id=YTI5ZTQ0OWEtZWM4MjQ0NDgtY2FkYTRhNWMtNTVkYWMxYmQ=, workerId: [14:1090:2872], local sessions count: 0 2025-04-07T14:08:33.623433Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:632:2537], Recipient [14:632:2537]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:08:33.623483Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[14:558:2485], 1}, firstUnprocessedQuery# 4 2025-04-07T14:08:33.623614Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[14:558:2485], 1}, FirstUnprocessedQuery# 4 2025-04-07T14:08:33.623727Z node 14 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[14:558:2485], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551609, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 6, firstUnprocessed# 4 2025-04-07T14:08:33.623839Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 read iterator# {[14:558:2485], 1} finished in ReadContinue 2025-04-07T14:08:33.624070Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [14:54:2101], Recipient [14:1020:2819]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 14 Status: STATUS_NOT_FOUND >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpJoinOrder::TestJoinHint2+StreamLookupJoin-ColumnStore [GOOD] >> TGroupMapperTest::NonUniformCluster [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] Test command err: 2025-04-07T14:07:05.093927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:07:05.094211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:07:05.094451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018a1/r3tmp/tmpyEf92Z/pdisk_1.dat 2025-04-07T14:07:05.374027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:07:05.408466Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:05.454823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:05.454914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:05.466094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:05.575696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:07:05.602574Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:07:05.603406Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:07:05.603753Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:07:05.603981Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:05.632923Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:07:05.633430Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:05.633500Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:07:05.634703Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:07:05.634778Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:07:05.634820Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:07:05.635117Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:07:05.660517Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:07:05.660707Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:07:05.660800Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:07:05.660835Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:07:05.660866Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:07:05.660900Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:05.661227Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.661268Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.661403Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:07:05.661476Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:07:05.661830Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.661859Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.661891Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:07:05.661934Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:05.661977Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:05.662025Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:07:05.662060Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:07:05.662091Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:07:05.662119Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:07:05.662153Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:07:05.662257Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:07:05.662285Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:07:05.662344Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:07:05.662550Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:07:05.662592Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:07:05.662666Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:07:05.662712Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:07:05.662743Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:07:05.662767Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:07:05.662800Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:05.662995Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:07:05.663021Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:07:05.663049Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:07:05.663081Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:05.663127Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:07:05.663150Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:07:05.663173Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:07:05.663196Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:05.663212Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:07:05.664203Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:07:05.664244Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:07:05.674869Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:07:05.674936Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:05.674962Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:05.675010Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:07:05.675059Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:07:05.854262Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.854344Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.854377Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:07:05.854763Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:07:05.854798Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:07:05.854921Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:05.854961Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:07:05.854994Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:07:05.855051Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:07:05.863870Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:07:05.863957Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:05.864936Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.864986Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.865027Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:05.865072Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:07:05.865108Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:07:05.865145Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:08:35.248488Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:843:2677], Recipient [14:843:2677]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:08:35.248524Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:08:35.248561Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:08:35.248599Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:08:35.248632Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-07T14:08:35.250007Z node 14 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-04-07T14:08:35.250072Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2025-04-07T14:08:35.250119Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-07T14:08:35.250158Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2025-04-07T14:08:35.250198Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2025-04-07T14:08:35.250239Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2025-04-07T14:08:35.250378Z node 14 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-04-07T14:08:35.250425Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-07T14:08:35.250452Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-04-07T14:08:35.250475Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:35.250500Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-07T14:08:35.250544Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-04-07T14:08:35.250582Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-04-07T14:08:35.250622Z node 14 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2025-04-07T14:08:35.250665Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-07T14:08:35.250689Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:35.250710Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-07T14:08:35.250738Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-07T14:08:35.250847Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-07T14:08:35.250874Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-07T14:08:35.250919Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-07T14:08:35.250960Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-07T14:08:35.250985Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-07T14:08:35.251009Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-07T14:08:35.251034Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-04-07T14:08:35.251062Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-07T14:08:35.251204Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-04-07T14:08:35.251252Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-04-07T14:08:35.251306Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-04-07T14:08:35.251344Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-04-07T14:08:35.251377Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-07T14:08:35.251403Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-04-07T14:08:35.251430Z node 14 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-04-07T14:08:35.251468Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:08:35.251510Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-07T14:08:35.251549Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-07T14:08:35.251591Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-07T14:08:35.263561Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 281474976715666} 2025-04-07T14:08:35.263712Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-07T14:08:35.263833Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:08:35.263908Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-04-07T14:08:35.264018Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [14:1035:2833], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:08:35.264115Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:08:35.264309Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715666} 2025-04-07T14:08:35.264344Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-07T14:08:35.264391Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:08:35.264421Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-07T14:08:35.264462Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [14:1035:2833], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:08:35.264495Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:08:35.266402Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:558:2485], Recipient [14:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-07T14:08:35.266578Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:08:35.266689Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-07T14:08:35.266833Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:08:35.266910Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:08:35.266974Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:35.267034Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:08:35.267084Z node 14 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-07T14:08:35.267147Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:08:35.267178Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:35.267203Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:08:35.267227Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:08:35.267381Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-04-07T14:08:35.267838Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T14:08:35.267923Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-04-07T14:08:35.268008Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:558:2485], 3} after executionsCount# 1 2025-04-07T14:08:35.268094Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:558:2485], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:35.268354Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:558:2485], 3} finished in read 2025-04-07T14:08:35.268452Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:08:35.268488Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:08:35.268517Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:08:35.268546Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:08:35.268602Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:08:35.268625Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:08:35.268658Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-07T14:08:35.269394Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:08:35.269664Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster [GOOD] >> OlapEstimationRowsCorrectness::TPCH3 [GOOD] >> OlapEstimationRowsCorrectness::TPCH5 |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] >> TSchemeShardServerLess::StorageBilling ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint2+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 19304, MsgBus: 29190 2025-04-07T14:07:19.060566Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572599109766618:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:19.060683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001021/r3tmp/tmp5a0F2N/pdisk_1.dat 2025-04-07T14:07:19.348244Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19304, node 1 2025-04-07T14:07:19.421894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:19.421992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:19.424852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:19.442502Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:19.442523Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:19.442531Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:19.442641Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29190 TClient is connected to server localhost:29190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:19.919661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:19.935658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:20.062720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:20.182426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:20.263218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:21.865888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572607699702920:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:21.866055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:21.898789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:21.922245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:21.953664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:21.976370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:21.996868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:22.063082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:22.102304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572611994670716:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:22.102389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:22.102520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572611994670721:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:22.105333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:22.113175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572611994670723:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:23.097354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:23.123621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:23.152966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:07:23.204359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T14:07:23.226065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:07:23.328966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T14:07:23.393148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:07:23.457325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T14:07:23.485941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T14:07:23.511348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T14:07:23.578565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T14:07:23.600946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T14:07:23.626475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T14:07:24.064908Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572599109766618:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:24.065021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:24.112980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T14:07:24.145201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T14:07:24.176060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T14:07:24.210199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T14:07:24.243432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T14:07:24.271081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T14:07:24.309948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part ... 2898502288094:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:29.223706Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:08:29.245144Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490572898502288096:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:08:30.761793Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:08:30.808599Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:08:30.857600Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:08:30.920230Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:08:30.977348Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:08:31.163677Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T14:08:31.209920Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T14:08:31.290961Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-07T14:08:31.333814Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-07T14:08:31.379244Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-07T14:08:31.441517Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-07T14:08:31.524517Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-07T14:08:31.599929Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-07T14:08:32.518880Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2025-04-07T14:08:32.577968Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-07T14:08:32.619786Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:08:32.662067Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-07T14:08:32.707192Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-07T14:08:32.746103Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-07T14:08:32.827451Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-07T14:08:32.885317Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-07T14:08:32.961697Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.053343Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.106783Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.186855Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.237246Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.280219Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.319955Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.352446Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.423544Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.503774Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.543484Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.589838Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.641283Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.693235Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.783670Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.838056Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2025-04-07T14:08:34.083854Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2025-04-07T14:08:34.158830Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2025-04-07T14:08:34.206526Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-04-07T14:08:34.289897Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-07T14:08:34.328035Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-07T14:08:34.383015Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-04-07T14:08:34.432409Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2025-04-07T14:08:34.514561Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2025-04-07T14:08:34.559728Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2025-04-07T14:08:34.698885Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-04-07T14:08:34.755651Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-04-07T14:08:34.805076Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-04-07T14:08:34.848278Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] Test command err: 2025-04-07T14:07:06.733101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:07:06.733387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:07:06.733625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00188c/r3tmp/tmpmZSBAB/pdisk_1.dat 2025-04-07T14:07:06.994480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:07:07.030591Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:07.078702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:07.078839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:07.090146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:07.201340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:07:07.231268Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:07:07.232021Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:07:07.232383Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:07:07.232576Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:07.263046Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:07:07.263662Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:07.263742Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:07:07.264937Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:07:07.265014Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:07:07.265052Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:07:07.265288Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:07:07.289579Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:07:07.289814Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:07:07.289924Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:07:07.289969Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:07:07.290004Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:07:07.290040Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:07.290461Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:07.290514Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:07.290728Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:07:07.290827Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:07:07.291268Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:07.291312Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:07.291371Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:07:07.291432Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:07.291481Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:07.291545Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:07:07.291586Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:07:07.291624Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:07:07.291660Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:07:07.291706Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:07:07.291863Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:07:07.291926Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:07:07.292015Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:07:07.292280Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:07:07.292334Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:07:07.292429Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:07:07.292484Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:07:07.292519Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:07:07.292557Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:07:07.292611Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:07.292931Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:07:07.292976Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:07:07.293013Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:07:07.293047Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:07.293101Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:07:07.293150Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:07:07.293191Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:07:07.293243Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:07.293272Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:07:07.294630Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:07:07.294680Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:07:07.305425Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:07:07.305502Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:07.305547Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:07.305654Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:07:07.305724Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:07:07.485296Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:07.485345Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:07.485372Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:07:07.485705Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:07:07.485736Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:07:07.485832Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:07.485872Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:07:07.485918Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:07:07.485954Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:07:07.488911Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:07:07.488982Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:07.489875Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:07.489942Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:07.489998Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:07.490035Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:07:07.490073Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:07:07.490137Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:08:36.731722Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:843:2677], Recipient [14:843:2677]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:08:36.731759Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:08:36.731803Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:08:36.731836Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:08:36.731870Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-07T14:08:36.731909Z node 14 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-04-07T14:08:36.731942Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2025-04-07T14:08:36.731979Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-07T14:08:36.732011Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2025-04-07T14:08:36.732043Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2025-04-07T14:08:36.732076Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2025-04-07T14:08:36.732190Z node 14 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-04-07T14:08:36.732228Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-07T14:08:36.732249Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-04-07T14:08:36.732271Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:36.732293Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-07T14:08:36.732323Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-04-07T14:08:36.732352Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-04-07T14:08:36.732388Z node 14 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2025-04-07T14:08:36.732424Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-07T14:08:36.732443Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:36.732463Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-07T14:08:36.732494Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-07T14:08:36.732574Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-07T14:08:36.732602Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-07T14:08:36.732629Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-07T14:08:36.732656Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-07T14:08:36.733295Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-07T14:08:36.733323Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-07T14:08:36.733348Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-04-07T14:08:36.733376Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-07T14:08:36.733541Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-04-07T14:08:36.733575Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-04-07T14:08:36.733615Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-04-07T14:08:36.733656Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-04-07T14:08:36.733691Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-07T14:08:36.733717Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-04-07T14:08:36.733748Z node 14 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-04-07T14:08:36.733782Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:08:36.733817Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-07T14:08:36.733853Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-07T14:08:36.733887Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-07T14:08:36.744889Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 281474976715666} 2025-04-07T14:08:36.745002Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-07T14:08:36.745111Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:08:36.745180Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-04-07T14:08:36.745277Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [14:1035:2833], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:08:36.745358Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:08:36.745663Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715666} 2025-04-07T14:08:36.745704Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-07T14:08:36.745745Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:08:36.745772Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-07T14:08:36.745812Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [14:1035:2833], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:08:36.745856Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:08:36.747579Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:558:2485], Recipient [14:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-07T14:08:36.747747Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:08:36.747879Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-07T14:08:36.748029Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:08:36.748109Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:08:36.748181Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:36.748245Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:08:36.748299Z node 14 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-07T14:08:36.748363Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:08:36.748394Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:36.748420Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:08:36.748446Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:08:36.748623Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-04-07T14:08:36.749101Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T14:08:36.749182Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-04-07T14:08:36.749247Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:558:2485], 10} after executionsCount# 1 2025-04-07T14:08:36.749337Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:558:2485], 10} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:36.749566Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:558:2485], 10} finished in read 2025-04-07T14:08:36.749664Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:08:36.749691Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:08:36.749713Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:08:36.749738Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:08:36.749791Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-07T14:08:36.749817Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:08:36.749851Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-07T14:08:36.749903Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:08:36.750090Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> TSchemeShardServerLess::TestServerlessComputeResourcesMode >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> DataShardReadIterator::TryWriteManyRows-Commit [GOOD] >> DataShardReadIteratorBatchMode::RangeFromInclusive >> YdbTableSplit::SplitByLoadWithUpdates >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData >> YdbTableSplit::MergeByNoLoadAfterSplit >> YdbTableSplit::RenameTablesAndSplit |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TMultiversionObjectMap::MonteCarlo [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:08:39.606947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:08:39.607197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:39.607243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:08:39.607279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:08:39.607348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:08:39.607383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:08:39.607442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:39.607759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:08:39.677338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:08:39.677413Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:39.687565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:08:39.687818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:08:39.687969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:08:39.692790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:08:39.692960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:39.693591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:39.693779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:08:39.695578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:39.696692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:39.696778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:39.696906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:39.696953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:39.697026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:39.697171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.703106Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:08:39.821958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:39.823258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.824599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:08:39.830124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:39.830238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.833429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:39.837345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:08:39.837578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.837644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:08:39.837710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:08:39.837743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:08:39.839782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.839857Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:39.839895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:08:39.841572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.841614Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.841649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:39.841692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:08:39.845226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:39.846785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:08:39.848967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:08:39.850202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:39.850368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:39.850431Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:39.850759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:08:39.850818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:39.851004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:39.851089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:08:39.853833Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:39.853892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:39.854086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:39.854128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:08:39.854399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.854446Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:08:39.854536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:08:39.854570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:39.854612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:08:39.854671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:39.854711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:08:39.854780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:08:39.854841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:39.854901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:08:39.854935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:08:39.856867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:39.856969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:39.857004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:08:39.857042Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:08:39.857089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:39.857188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... -04-07T14:08:40.245394Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:08:40.245435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2025-04-07T14:08:40.245480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-04-07T14:08:40.245622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:40.248021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-07T14:08:40.248121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-07T14:08:40.248418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:40.248570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:40.248618Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-07T14:08:40.248936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-07T14:08:40.248999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-07T14:08:40.249108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-07T14:08:40.249199Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:601:2527], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-04-07T14:08:40.250963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:40.251006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:08:40.251158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:40.251188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-07T14:08:40.251526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:08:40.251576Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-04-07T14:08:40.251608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2025-04-07T14:08:40.252411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:08:40.252512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:08:40.252552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:08:40.252588Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-07T14:08:40.252625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-04-07T14:08:40.252722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-07T14:08:40.255058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:08:40.255110Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-07T14:08:40.255202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-07T14:08:40.255254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:08:40.255306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-07T14:08:40.255349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:08:40.255389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T14:08:40.255417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T14:08:40.255592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-07T14:08:40.255852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-07T14:08:40.257452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-07T14:08:40.257495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-07T14:08:40.257912Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-07T14:08:40.258018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:08:40.258052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:755:2636] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-07T14:08:40.260500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:40.260704Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } 2025-04-07T14:08:40.260745Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/SharedDB 2025-04-07T14:08:40.260889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-04-07T14:08:40.260936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-04-07T14:08:40.264231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:40.264366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, operation: ALTER DATABASE, path: /MyRoot/SharedDB TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2025-04-07T14:08:40.266982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:40.267140Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, feature flag EnableAlterDatabaseCreateHiveFirst 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } 2025-04-07T14:08:40.267184Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, path /MyRoot/ServerLess0 2025-04-07T14:08:40.267307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 106:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-04-07T14:08:40.267354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-04-07T14:08:40.269303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:40.269451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 106, wait until txId: 106 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:08:39.603323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:08:39.603415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:39.603453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:08:39.603481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:08:39.603516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:08:39.603551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:08:39.603607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:39.604196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:08:39.673797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:08:39.673858Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:39.683665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:08:39.683808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:08:39.683935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:08:39.688802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:08:39.688978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:39.689574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:39.689768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:08:39.691449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:39.694331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:39.694394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:39.694520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:39.694562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:39.694606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:39.694786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.701619Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:08:39.852500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:39.852790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.852985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:08:39.853191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:39.853264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.854937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:39.855102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:08:39.855247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.855290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:08:39.855333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:08:39.855374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:08:39.856945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.856999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:39.857033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:08:39.858561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.858601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.858636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:39.858680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:08:39.862150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:39.863741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:08:39.863914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:08:39.864838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:39.864985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:39.865039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:39.865257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:08:39.865315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:39.865461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:39.865539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:08:39.867293Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:39.867347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:39.867525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:39.867559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:08:39.867799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.867837Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:08:39.867915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:08:39.867943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:39.868001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:08:39.868043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:39.868083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:08:39.868113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:08:39.868165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:39.868213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:08:39.868240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:08:39.869962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:39.870072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:39.870105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:08:39.870136Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:08:39.870176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:39.870265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... shard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 3 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-07T14:08:40.352623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:08:40.352720Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:605:2531], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:08:40.352849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-04-07T14:08:40.352896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-04-07T14:08:40.353003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-04-07T14:08:40.353029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:696:2597], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-04-07T14:08:40.354044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-04-07T14:08:40.354400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:3 msg type: 268697640 2025-04-07T14:08:40.354536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2025-04-07T14:08:40.354650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-07T14:08:40.354694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:08:40.354825Z node 1 :HIVE INFO: [72075186233409546] TEvUpdateDomain, msg: DomainKey { SchemeShard: 72057594046678944 PathId: 3 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared TxId: 106 2025-04-07T14:08:40.354901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Update domain reply, message: Origin: 72075186233409546 TxId: 106, at schemeshard: 72057594046678944 2025-04-07T14:08:40.354940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-04-07T14:08:40.355029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-04-07T14:08:40.355073Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2025-04-07T14:08:40.355109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 138 -> 240 2025-04-07T14:08:40.357014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-07T14:08:40.357120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-07T14:08:40.357152Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-07T14:08:40.357237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-07T14:08:40.357274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-07T14:08:40.357325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-07T14:08:40.357366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-07T14:08:40.357394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-07T14:08:40.357430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-07T14:08:40.357483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-07T14:08:40.359200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-07T14:08:40.359240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-07T14:08:40.359637Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-07T14:08:40.359725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-07T14:08:40.359759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:836:2717] TestWaitNotification: OK eventTxId 106 2025-04-07T14:08:40.360336Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:08:40.360526Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 195us result status StatusSuccess 2025-04-07T14:08:40.360825Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:40.361348Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2025-04-07T14:08:40.361507Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409546 describe path "/MyRoot/ServerLess0" took 144us result status StatusSuccess 2025-04-07T14:08:40.361764Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 2025-04-07T14:08:40.362288Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:08:40.362419Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 136us result status StatusSuccess 2025-04-07T14:08:40.362680Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:40.363131Z node 1 :HIVE INFO: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:08:39.601965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:08:39.602098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:39.602140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:08:39.602189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:08:39.602917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:08:39.602972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:08:39.603052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:39.604172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:08:39.672650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:08:39.672729Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:39.683095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:08:39.683230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:08:39.683395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:08:39.688848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:08:39.689016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:39.689630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:39.689863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:08:39.691631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:39.694403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:39.694474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:39.694614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:39.694719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:39.694785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:39.694950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.702236Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:08:39.829836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:39.830102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.830319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:08:39.830550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:39.830617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.833531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:39.836820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:08:39.837047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.837124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:08:39.837170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:08:39.837205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:08:39.841424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.841511Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:39.841575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:08:39.845241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.845294Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.845334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:39.845399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:08:39.860254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:39.864284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:08:39.864478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:08:39.865592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:39.865737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:39.865788Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:39.865996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:08:39.866051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:39.866196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:39.866262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:08:39.868240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:39.868310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:39.868438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:39.868474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:08:39.868690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.868734Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:08:39.868824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:08:39.868859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:39.868915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:08:39.868969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:39.869014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:08:39.869043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:08:39.869100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:39.869146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:08:39.869183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:08:39.870788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:39.870887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:39.870921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:08:39.870952Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:08:39.870998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:39.871095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... on FindRelatedPartByTabletId, TxId: 104, tablet: 72075186234409547, partId: 0 2025-04-07T14:08:40.330114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409547 2025-04-07T14:08:40.330163Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#104:0 HandleReply TEvConfigureStatus operationId:104:0 at schemeshard:72057594046678944 2025-04-07T14:08:40.330215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#104:0 Got OK TEvConfigureStatus from tablet# 72075186234409547 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2025-04-07T14:08:40.330980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186234409548, partId: 0 2025-04-07T14:08:40.331067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409548 2025-04-07T14:08:40.331099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#104:0 HandleReply TEvConfigureStatus operationId:104:0 at schemeshard:72057594046678944 2025-04-07T14:08:40.331130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#104:0 Got OK TEvConfigureStatus from tablet# 72075186234409548 shardIdx# 72057594046678944:7 at schemeshard# 72057594046678944 2025-04-07T14:08:40.331170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 3 -> 128 2025-04-07T14:08:40.336228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:08:40.336654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:08:40.338489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:08:40.338629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:08:40.338685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:08:40.338730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2025-04-07T14:08:40.338785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-04-07T14:08:40.338931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:40.341421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-07T14:08:40.341539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-07T14:08:40.341836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:40.341973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:40.342021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-07T14:08:40.342260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-07T14:08:40.342315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-07T14:08:40.342416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-07T14:08:40.342529Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:601:2527], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-04-07T14:08:40.345964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:40.346003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:08:40.346118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:40.346143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-07T14:08:40.346377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:08:40.346417Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-04-07T14:08:40.346473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2025-04-07T14:08:40.347101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:08:40.347186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:08:40.347218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:08:40.347251Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-07T14:08:40.347290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-04-07T14:08:40.347385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-07T14:08:40.353429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:08:40.353496Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-07T14:08:40.353607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-07T14:08:40.353684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:08:40.353740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-07T14:08:40.353782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:08:40.353829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T14:08:40.353864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T14:08:40.354015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-07T14:08:40.354288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-07T14:08:40.357184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-07T14:08:40.357241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-07T14:08:40.357668Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-07T14:08:40.357783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:08:40.357819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:755:2636] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-07T14:08:40.360379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:40.360536Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2025-04-07T14:08:40.360584Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2025-04-07T14:08:40.360737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-04-07T14:08:40.360788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-04-07T14:08:40.363540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:40.363706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 >> TSchemeShardServerLess::BaseCase |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TMultiversionObjectMap::MonteCarlo [GOOD] >> TPQTest::TestSetClientOffset [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc [GOOD] Test command err: 2025-04-07T14:07:06.456500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:07:06.456823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:07:06.457053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001893/r3tmp/tmpz45R5U/pdisk_1.dat 2025-04-07T14:07:06.728025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:07:06.760799Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:06.807920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:06.808055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:06.819235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:06.929384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:07:06.962041Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:07:06.962931Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:07:06.963299Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:07:06.963507Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:07.003604Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:07:07.004176Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:07.004285Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:07:07.005580Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:07:07.005643Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:07:07.005691Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:07:07.005959Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:07:07.031482Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:07:07.031661Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:07:07.031775Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:07:07.031814Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:07:07.031844Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:07:07.031876Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:07.032231Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:07.032275Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:07.032425Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:07:07.032511Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:07:07.032875Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:07.032905Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:07.032943Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:07:07.032991Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:07.033043Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:07.033086Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:07:07.033116Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:07:07.033145Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:07:07.033176Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:07:07.033217Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:07:07.033317Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:07:07.033347Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:07:07.033421Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:07:07.033636Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:07:07.033683Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:07:07.033777Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:07:07.033831Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:07:07.033862Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:07:07.033891Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:07:07.033917Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:07.034134Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:07:07.034163Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:07:07.034194Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:07:07.034232Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:07.034277Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:07:07.034300Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:07:07.034326Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:07:07.034354Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:07.034377Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:07:07.035600Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:07:07.035642Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:07:07.046122Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:07:07.046169Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:07.046197Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:07.046230Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:07:07.046291Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:07:07.224191Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:07.224246Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:07.224287Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:07:07.224591Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:07:07.224627Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:07:07.224744Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:07.224775Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:07:07.224807Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:07:07.224849Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:07:07.227686Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:07:07.227742Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:07.228453Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:07.228496Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:07.228532Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:07.228559Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:07:07.228590Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:07:07.228625Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:08:39.911750Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-07T14:08:39.911810Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:08:39.911887Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:8] at 72075186224037888 for ExecuteRead 2025-04-07T14:08:39.912237Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [15:54:2101], Recipient [15:1022:2819]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 15 Status: STATUS_NOT_FOUND 2025-04-07T14:08:39.913176Z node 15 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 6, sender: [15:558:2485], selfId: [15:50:2097], source: [15:1119:2892] 2025-04-07T14:08:39.913287Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:632:2537], Recipient [15:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:08:39.913323Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:08:39.913370Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:08:39.913429Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:08:39.913464Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:8] at 72075186224037888 2025-04-07T14:08:39.913494Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:08:39.913606Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-07T14:08:39.914016Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-07T14:08:39.914070Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:558:2485], 3} after executionsCount# 2 2025-04-07T14:08:39.914111Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:39.914276Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 3} finished in read 2025-04-07T14:08:39.914348Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-07T14:08:39.914375Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:08:39.914403Z node 15 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:08:39.914430Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:08:39.914479Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-07T14:08:39.914503Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:08:39.914525Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-04-07T14:08:39.914551Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:08:39.914592Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:08:39.914653Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:08:39.914727Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:08:39.915378Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=ZDBkOGRkMDktYzljYTdmYjctNmY4M2IzYzgtOWZiNWI2NTU=, workerId: [15:1119:2892], local sessions count: 0 2025-04-07T14:08:39.916865Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:558:2485], Recipient [15:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-07T14:08:39.917028Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:08:39.917134Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CheckRead 2025-04-07T14:08:39.917269Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-07T14:08:39.917345Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:08:39.917421Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:39.917478Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:08:39.917529Z node 15 :TX_DATASHARD TRACE: Activated operation [0:9] at 72075186224037888 2025-04-07T14:08:39.917585Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-07T14:08:39.917624Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:39.917665Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:08:39.917692Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:08:39.917841Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-07T14:08:39.918233Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-07T14:08:39.918326Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:558:2485], 4} after executionsCount# 1 2025-04-07T14:08:39.918419Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 4} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:39.918691Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 4} finished in read 2025-04-07T14:08:39.918788Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-07T14:08:39.918816Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:08:39.918842Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:08:39.918869Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:08:39.918920Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-07T14:08:39.918941Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:08:39.918989Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:9] at 72075186224037888 has finished 2025-04-07T14:08:39.919048Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:08:39.920062Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:558:2485], Recipient [15:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-07T14:08:39.920199Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:08:39.920303Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CheckRead 2025-04-07T14:08:39.920436Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-07T14:08:39.920499Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:08:39.920566Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:39.920630Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:08:39.920935Z node 15 :TX_DATASHARD TRACE: Activated operation [0:10] at 72075186224037888 2025-04-07T14:08:39.921007Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-07T14:08:39.921040Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:39.921072Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:08:39.921100Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:08:39.921290Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-07T14:08:39.921712Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-04-07T14:08:39.921794Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:558:2485], 5} after executionsCount# 1 2025-04-07T14:08:39.921877Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 5} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:08:39.922112Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 5} finished in read 2025-04-07T14:08:39.922211Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-07T14:08:39.922239Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:08:39.922270Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:08:39.922297Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:08:39.922348Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-07T14:08:39.922370Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:08:39.922408Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:10] at 72075186224037888 has finished 2025-04-07T14:08:39.922479Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 >> ResultFormatter::List >> ResultFormatter::Tuple [GOOD] >> ResultFormatter::Tagged [GOOD] >> ResultFormatter::List [GOOD] >> ResultFormatter::Null [GOOD] >> ResultFormatter::FormatEmptySchema [GOOD] >> ResultFormatter::FormatNonEmptySchema [GOOD] >> ResultFormatter::EmptyDict [GOOD] >> ResultFormatter::Dict [GOOD] >> ResultFormatter::Decimal [GOOD] >> ResultFormatter::Primitive [GOOD] >> ResultFormatter::Struct [GOOD] >> ResultFormatter::Utf8WithQuotes [GOOD] >> ResultFormatter::VariantStruct [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestSetClientOffset [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2025-04-07T14:04:51.788889Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.788964Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:175:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.822746Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:51.847328Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-07T14:04:51.848244Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:182:2195] 2025-04-07T14:04:51.851138Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:51.857246Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:183:2196] 2025-04-07T14:04:51.859082Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:183:2196] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.870414Z node 1 :PERSQUEUE INFO: new Cookie owner1|bdab0c24-2996cbcd-c3a995d6-9b47819c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-04-07T14:04:51.870970Z node 1 :PERSQUEUE INFO: new Cookie owner2|687575eb-5e4318f9-b2268d1-9f2b81c2_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:51.885628Z node 1 :PERSQUEUE INFO: new Cookie owner1|4becb4d2-e03b4448-536a9b51-36ffc40_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2025-04-07T14:04:52.389696Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:52.389778Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:175:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:52.403004Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:52.403737Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-07T14:04:52.404204Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:182:2195] 2025-04-07T14:04:52.406159Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:04:52.407974Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:183:2196] 2025-04-07T14:04:52.409682Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:52.415508Z node 2 :PERSQUEUE INFO: new Cookie owner1|312342fc-2bd06947-53941ae3-e587ea30_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-04-07T14:04:52.416012Z node 2 :PERSQUEUE INFO: new Cookie owner2|d830a627-fdf7af78-a50e9756-7338ad6a_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:52.435348Z node 2 :PERSQUEUE INFO: new Cookie owner1|fdcb4c87-1c3a2a11-f3c7f493-61b5aa06_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] 2025-04-07T14:04:52.760179Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:52.760239Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:147:2057] recipient: [3:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:147:2057] recipient: [3:145:2168] Leader for TabletID 72057594037927938 is [3:151:2172] sender: [3:152:2057] recipient: [3:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:175:2057] recipient: [3:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:52.775865Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:52.776555Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-07T14:04:52.777052Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:182:2195] 2025-04-07T14:04:52.779034Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:04:52.780444Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:183:2196] 2025-04-07T14:04:52.782042Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:183:2196] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:52.787756Z node 3 :PERSQUEUE INFO: new Cookie owner1|99dd7ebb-87939521-120d7b93-60275858_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-04-07T14:04:52.788110Z node 3 :PERSQUEUE INFO: new Cookie owner2|f0bee2c2-6e371af2-fd00609c-ad1ad5a9_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:04:52.802165Z node 3 :PERSQUEUE INFO: new Cookie owner1|930d3702-9dcbeb32-74651a04-f0e6c602_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] 2025-04-07T14:04:53.216210Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:04:53.216290Z node 4 :PERSQUEUE I ... ON_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:08:40.461864Z node 147 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic rt3.dc1--asdfgs--topic partition 0 client user1 EndOffset 0 offset 100 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR !Reboot 72057594037927937 (actor [147:105:2137]) on event NKikimr::TEvPersQueue::TEvRequest ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [147:105:2137] sender: [147:211:2057] recipient: [147:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [147:105:2137] sender: [147:214:2057] recipient: [147:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [147:105:2137] sender: [147:215:2057] recipient: [147:213:2218] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [147:216:2219] sender: [147:217:2057] recipient: [147:213:2218] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COMPACTION_BROKER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:08:40.502816Z node 147 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:08:40.502900Z node 147 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-07T14:08:40.503732Z node 147 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [147:267:2262] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:08:40.506693Z node 147 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [147:268:2263] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:08:40.516462Z node 147 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:08:40.516561Z node 147 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [147:267:2262] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:08:40.517076Z node 147 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:08:40.517119Z node 147 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [147:268:2263] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR !Reboot 72057594037927937 (actor [147:105:2137]) rebooted! Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST !Reboot 72057594037927937 (actor [147:105:2137]) tablet resolver refreshed! new actor is[147:216:2219] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [147:216:2219] sender: [147:300:2057] recipient: [147:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [148:101:2057] recipient: [148:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [148:101:2057] recipient: [148:99:2133] Leader for TabletID 72057594037927937 is [148:105:2137] sender: [148:106:2057] recipient: [148:99:2133] 2025-04-07T14:08:41.533690Z node 148 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:08:41.533768Z node 148 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [148:147:2057] recipient: [148:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [148:147:2057] recipient: [148:145:2168] Leader for TabletID 72057594037927938 is [148:151:2172] sender: [148:152:2057] recipient: [148:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [148:105:2137] sender: [148:175:2057] recipient: [148:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:08:41.556907Z node 148 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:08:41.557865Z node 148 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 148 actor [148:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 10 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 148 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 148 ReadRuleGenerations: 148 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 148 Important: false } Consumers { Name: "user1" Generation: 148 Important: false } 2025-04-07T14:08:41.558869Z node 148 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [148:182:2195] 2025-04-07T14:08:41.561665Z node 148 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [148:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-07T14:08:41.564706Z node 148 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [148:183:2196] 2025-04-07T14:08:41.566478Z node 148 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [148:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:08:41.576821Z node 148 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic rt3.dc1--asdfgs--topic partition 0 client user1 EndOffset 0 offset 100 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [149:101:2057] recipient: [149:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [149:101:2057] recipient: [149:99:2133] Leader for TabletID 72057594037927937 is [149:105:2137] sender: [149:106:2057] recipient: [149:99:2133] 2025-04-07T14:08:41.958033Z node 149 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:08:41.958120Z node 149 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [149:147:2057] recipient: [149:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [149:147:2057] recipient: [149:145:2168] Leader for TabletID 72057594037927938 is [149:151:2172] sender: [149:152:2057] recipient: [149:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [149:105:2137] sender: [149:175:2057] recipient: [149:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:08:41.980464Z node 149 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:08:41.981359Z node 149 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 149 actor [149:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 10 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 149 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 149 ReadRuleGenerations: 149 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 149 Important: false } Consumers { Name: "user1" Generation: 149 Important: false } 2025-04-07T14:08:41.982279Z node 149 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [149:182:2195] 2025-04-07T14:08:41.985015Z node 149 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [149:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-07T14:08:41.987824Z node 149 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [149:183:2196] 2025-04-07T14:08:41.989754Z node 149 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [149:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-07T14:08:42.005260Z node 149 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic rt3.dc1--asdfgs--topic partition 0 client user1 EndOffset 0 offset 100 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> ResultFormatter::Optional [GOOD] >> ResultFormatter::Pg |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Null [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatNonEmptySchema [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Decimal [GOOD] >> TSchemeShardServerLess::BaseCase [GOOD] >> ResultFormatter::Pg [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantStruct [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Tagged [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Struct [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Pg [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:08:41.958009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:08:41.958116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:41.958154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:08:41.958214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:08:41.958262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:08:41.958290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:08:41.958352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:41.958601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:08:42.023712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:08:42.023775Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:42.036130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:08:42.036293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:08:42.036462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:08:42.042901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:08:42.043146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:42.043783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:42.044014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:08:42.047533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:42.048833Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:42.048922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:42.049065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:42.049158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:42.049209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:42.049352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:08:42.061503Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:08:42.164436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:42.164638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:42.164985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:08:42.165171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:42.165219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:42.167129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:42.167236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:08:42.167376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:42.167419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:08:42.167466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:08:42.167493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:08:42.169432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:42.169503Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:42.169540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:08:42.171628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:42.171679Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:42.171712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:42.171751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:08:42.174293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:42.175775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:08:42.175930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:08:42.176719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:42.176847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:42.176909Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:42.177083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:08:42.177115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:42.177238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:42.177292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:08:42.178739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:42.178791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:42.178911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:42.178941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:08:42.179106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:42.179142Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:08:42.179211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:08:42.179236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:42.179286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:08:42.179328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:42.179355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:08:42.179376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:08:42.179419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:42.179466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:08:42.179491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:08:42.180859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:42.180930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:42.180951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:08:42.180976Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:08:42.181012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:42.181083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... SCHEMESHARD DEBUG: Free shard 72057594046678944:7 hive 72075186233409546 at ss 72057594046678944 2025-04-07T14:08:42.842277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72075186233409546 at ss 72057594046678944 2025-04-07T14:08:42.842873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-07T14:08:42.842920Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-07T14:08:42.843023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-07T14:08:42.843088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-07T14:08:42.843133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-07T14:08:42.843172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-07T14:08:42.843209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-07T14:08:42.843247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-07T14:08:42.843385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-07T14:08:42.844351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-07T14:08:42.844929Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409546 Forgetting tablet 72075186234409546 2025-04-07T14:08:42.845175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-07T14:08:42.845451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:08:42.845935Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409548 2025-04-07T14:08:42.846596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:42.849557Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409547 2025-04-07T14:08:42.850794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-04-07T14:08:42.851068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409548 Forgetting tablet 72075186234409547 2025-04-07T14:08:42.852626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-07T14:08:42.853033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-07T14:08:42.853255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:08:42.855219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:08:42.855275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:08:42.855379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:08:42.855752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:08:42.855807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:08:42.855875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:42.863323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-07T14:08:42.863417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2025-04-07T14:08:42.863533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-07T14:08:42.863555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2025-04-07T14:08:42.863604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-07T14:08:42.863646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2025-04-07T14:08:42.865608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:08:42.865754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-07T14:08:42.866033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-07T14:08:42.866074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-07T14:08:42.866514Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-07T14:08:42.866606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-07T14:08:42.866640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:918:2781] TestWaitNotification: OK eventTxId 106 2025-04-07T14:08:42.867237Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:08:42.867460Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 219us result status StatusPathDoesNotExist 2025-04-07T14:08:42.867623Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:08:42.868194Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:08:42.868340Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 162us result status StatusPathDoesNotExist 2025-04-07T14:08:42.868481Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:08:42.869221Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:08:42.869393Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 185us result status StatusSuccess 2025-04-07T14:08:42.869725Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409546 is deleted wait until 72075186234409547 is deleted wait until 72075186234409548 is deleted wait until 72075186234409549 is deleted 2025-04-07T14:08:42.873436Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409546 2025-04-07T14:08:42.873554Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409547 2025-04-07T14:08:42.873606Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409548 2025-04-07T14:08:42.873673Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 Deleted tabletId 72075186234409546 Deleted tabletId 72075186234409547 Deleted tabletId 72075186234409548 Deleted tabletId 72075186234409549 >> ResultFormatter::StructWithNoFields [GOOD] >> ResultFormatter::StructTypeNameAsString [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::StructTypeNameAsString [GOOD] |93.2%| [TA] $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] >> DataShardReadIteratorBatchMode::RangeFromInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeFromNonInclusive >> TGroupMapperTest::Block42_1disk [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] >> Secret::Deactivated [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_1disk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: 2025-04-07T14:08:32.211250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0021b3/r3tmp/tmpuRrjXj/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3255, node 1 TClient is connected to server localhost:17635 2025-04-07T14:08:33.242897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.292858Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:33.300095Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:33.300151Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:33.300181Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:33.300479Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:08:33.348617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:33.348785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:33.364678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-07T14:08:46.623897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:656:2547], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:46.624065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 >> TSequence::CreateSequenceParallel >> TSequence::CreateSequence |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables >> OlapEstimationRowsCorrectness::TPCH11 [GOOD] >> OlapEstimationRowsCorrectness::TPCDS78 >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed >> LocalTableWriter::SupportedTypes [GOOD] >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2025-04-07T14:08:48.091299Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572982839669908:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:48.091356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016c6/r3tmp/tmpmBWrg3/pdisk_1.dat 2025-04-07T14:08:48.608363Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:48.635996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:48.636101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:48.638291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23219 TServer::EnableGrpc on GrpcPort 6858, node 1 2025-04-07T14:08:49.071589Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:49.071617Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:49.071629Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:49.071759Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:49.797062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:49.850115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034929980 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "int32_value" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "uint32_value" Type: "... (TRUNCATED) 2025-04-07T14:08:50.034893Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490572991429605185:2350] Handshake: worker# [1:7490572987134637798:2290] 2025-04-07T14:08:50.035858Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490572991429605185:2350] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T14:08:50.036446Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490572991429605185:2350] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-07T14:08:50.036476Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490572991429605185:2350] Send handshake: worker# [1:7490572987134637798:2290] 2025-04-07T14:08:50.037370Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490572991429605185:2350] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 4 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 5 Data: 41b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 6 Data: 41b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 7 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 8 Data: 44b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 9 Data: 66b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 10 Data: 71b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 11 Data: 72b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 12 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 13 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 14 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 15 Data: 58b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 16 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 17 Data: 54b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 18 Data: 57b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 19 Data: 76b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 20 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 21 Data: 54b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 22 Data: 61b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 23 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 24 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 25 Data: 46b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 26 Data: 47b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 27 Data: 50b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 28 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 29 Data: 72b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 30 Data: 57b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 31 Data: 64b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-07T14:08:50.038223Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490572991429605185:2350] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2025-04-07T14:08:50.039430Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7490572991429605188:2350] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-07T14:08:50.039478Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490572991429605185:2350] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-07T14:08:50.039797Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7490572991429605188:2350] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2025-04-07T14:08:50.138021Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7490572991429605188:2350] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-07T14:08:50.138103Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490572991429605185:2350] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-07T14:08:50.138175Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7490572991429605185:2350] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable |93.2%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence >> KqpQueryPerf::Update+QueryService >> KqpQueryPerf::Replace+QueryService >> KqpQueryPerf::IndexReplace-QueryService >> KqpQueryPerf::IndexUpsert+QueryService >> KqpQueryPerf::Upsert-QueryService >> KqpQueryPerf::MultiDeleteFromTable+QueryService >> KqpQueryPerf::AggregateToScalar+QueryService >> KqpQueryPerf::KvRead+QueryService >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService >> KqpQueryPerf::RangeLimitRead-QueryService >> KqpWorkload::KV >> TGroupMapperTest::Block42_2disk [GOOD] >> TableCreator::CreateTables [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_2disk [GOOD] >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence >> KqpQueryPerf::Upsert+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2025-04-07T14:08:50.391679Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572989430384035:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:50.392217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ee8/r3tmp/tmpOfAxQj/pdisk_1.dat 2025-04-07T14:08:50.834862Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:50.843930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:50.844385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:50.846181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7630 TServer::EnableGrpc on GrpcPort 63104, node 1 2025-04-07T14:08:51.277190Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:51.277212Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:51.277236Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:51.277312Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T14:08:51.712408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:51.753187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T14:08:51.754804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 |93.3%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> DataShardReadIteratorBatchMode::RangeFromNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::MultipleRanges >> KqpJoinOrder::TPCH21-StreamLookupJoin+ColumnStore [GOOD] >> KqpQueryPerf::MultiRead-QueryService >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex >> KqpQueryPerf::RangeLimitRead+QueryService >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:08:49.984907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:08:49.985022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:49.985070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:08:49.985107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:08:49.986696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:08:49.986751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:08:49.986840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:49.988995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:08:50.071593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:08:50.071676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:50.086936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:08:50.087112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:08:50.087267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:08:50.096117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:08:50.096314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:50.098608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:50.098920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:08:50.104484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:50.113155Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:50.113266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:50.113418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:50.113490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:50.113615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:50.113817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.129050Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:08:50.238399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:50.239458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.240295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:08:50.245956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:50.246080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.249308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:50.250079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:08:50.250359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.250426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:08:50.250529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:08:50.250582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:08:50.253005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.253064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:50.253102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:08:50.261805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.261886Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.261944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:50.262015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:08:50.266144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:50.268600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:08:50.270435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:08:50.271612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:50.271779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:50.271830Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:50.272119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:08:50.272174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:50.272367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:50.272440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:08:50.274952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:50.275001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:50.275187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:50.275232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:08:50.275475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.275524Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:08:50.275658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:08:50.275696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:50.275739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:08:50.275780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:50.275814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:08:50.275842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:08:50.275918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:50.275964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:08:50.275996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:08:50.277904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:50.278018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:50.278060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:08:50.278114Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:08:50.278168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:50.278281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 46678944, cookie: 102 2025-04-07T14:08:58.015321Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:08:58.015350Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:08:58.015383Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-07T14:08:58.015418Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T14:08:58.015506Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-04-07T14:08:58.015535Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:08:58.016551Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.017072Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.017110Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:2 2025-04-07T14:08:58.017232Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:334:2314] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-04-07T14:08:58.017609Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-07T14:08:58.017650Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-07T14:08:58.017713Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-07T14:08:58.017762Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:58.018069Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:08:58.018227Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:08:58.018265Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-04-07T14:08:58.018313Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-04-07T14:08:58.018364Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-04-07T14:08:58.018927Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.018962Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:0 2025-04-07T14:08:58.019032Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:338:2317] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-04-07T14:08:58.025367Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-07T14:08:58.025432Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-07T14:08:58.025509Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:08:58.025558Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:58.025852Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:08:58.025965Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:08:58.026001Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-04-07T14:08:58.026032Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-07T14:08:58.026074Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-04-07T14:08:58.026156Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:416:2371] message: TxId: 102 2025-04-07T14:08:58.026211Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-07T14:08:58.026266Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:08:58.026309Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:08:58.026427Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:08:58.026474Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-07T14:08:58.026495Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-07T14:08:58.026541Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:08:58.026575Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-04-07T14:08:58.026595Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-04-07T14:08:58.026636Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T14:08:58.026662Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-04-07T14:08:58.026682Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-04-07T14:08:58.026724Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-07T14:08:58.027360Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435080, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-04-07T14:08:58.027408Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-04-07T14:08:58.027482Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:08:58.027535Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-07T14:08:58.027626Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:08:58.028553Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:08:58.028599Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.028736Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:08:58.028768Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.028840Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:08:58.028866Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.028903Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:08:58.028958Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.029010Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:08:58.029032Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.036274Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:08:58.036330Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.036427Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.040161Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.040318Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:416:2371] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2025-04-07T14:08:58.040501Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:08:58.040564Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:510:2464] 2025-04-07T14:08:58.040804Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:08:58.040938Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:512:2466], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:08:58.040979Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:08:58.041008Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-07T14:08:58.041448Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:587:2541], Recipient [7:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-07T14:08:58.041504Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-07T14:08:58.041621Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:08:58.041862Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 241us result status StatusPathDoesNotExist 2025-04-07T14:08:58.042016Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 102" Path: "/MyRoot/Table" PathId: 2 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH21-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 63373, MsgBus: 32215 2025-04-07T14:07:16.401046Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572587793439725:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:16.401117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001026/r3tmp/tmp3eG9ft/pdisk_1.dat 2025-04-07T14:07:16.689011Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63373, node 1 2025-04-07T14:07:16.783173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:16.783279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:16.786693Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:16.786723Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:16.786730Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:16.786826Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:07:16.787916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32215 TClient is connected to server localhost:32215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:17.270284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:17.281820Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:07:17.300298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:17.436719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:17.564762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:17.617863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:19.015921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572600678343347:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:19.016040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:19.250278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:19.279509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:19.303313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:19.372597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:19.401955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:19.435885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:19.485036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572600678343845:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:19.485107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:19.487275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572600678343850:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:19.490831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:19.500183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572600678343852:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:20.617081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:20.644918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:20.669009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:07:20.690501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T14:07:20.714013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:07:20.815856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T14:07:20.842216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:07:20.867514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T14:07:20.927956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T14:07:20.954032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T14:07:20.977622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T14:07:21.002201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T14:07:21.067433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T14:07:21.401463Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572587793439725:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:21.401525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:21.542644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T14:07:21.566042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T14:07:21.629524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T14:07:21.655735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T14:07:21.691220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T14:07:21.717418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type ... 72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:08:42.468339Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:08:42.468373Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:08:42.468530Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:08:42.468560Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:08:42.468650Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:08:42.469259Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:08:42.469432Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:08:42.469460Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:08:42.469543Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:08:42.469565Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:08:42.473917Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:08:42.473982Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:08:42.474072Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:08:42.474100Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:08:42.474272Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:08:42.474299Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:08:42.474417Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:08:42.474451Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:08:42.474512Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:08:42.474540Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:08:42.474580Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:08:42.474610Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:08:42.477035Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:08:42.477123Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:08:42.477331Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:08:42.477366Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:08:42.477500Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:08:42.477535Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:08:42.477720Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:08:42.477754Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:08:42.477907Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:08:42.477936Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:08:42.478721Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:08:42.478765Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:08:42.478865Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:08:42.478903Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:08:42.479076Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:08:42.479109Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:08:42.479212Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:08:42.479248Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:08:42.479313Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:08:42.479343Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:08:42.479384Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:08:42.479424Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:08:42.479821Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:08:42.479870Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:08:42.480091Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:08:42.480133Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:08:42.480281Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:08:42.480314Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:08:42.480517Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:08:42.480560Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:08:42.480750Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:08:42.480793Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> TSequence::AlterTableSetDefaultFromSequence [GOOD] >> TVersions::Wreck0Reverse [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:08:49.984644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:08:49.984797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:49.984840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:08:49.984877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:08:49.986216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:08:49.986277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:08:49.986363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:49.989042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:08:50.065670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:08:50.065746Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:50.092263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:08:50.092420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:08:50.092589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:08:50.118946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:08:50.119172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:50.119857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:50.120094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:08:50.122297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:50.123547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:50.123605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:50.123756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:50.123804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:50.123872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:50.124057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.136459Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:08:50.264364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:50.264541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.264733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:08:50.264927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:50.264992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.267994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:50.268170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:08:50.268380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.268433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:08:50.268466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:08:50.268525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:08:50.270442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.270490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:50.270517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:08:50.272076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.272125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.272178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:50.272246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:08:50.275645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:50.279734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:08:50.279928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:08:50.280978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:50.281117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:50.281168Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:50.281430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:08:50.281483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:50.281676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:50.281778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:08:50.283764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:50.283805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:50.283965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:50.284007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:08:50.284228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:50.284286Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:08:50.284396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:08:50.284454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:50.284542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:08:50.284584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:50.284618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:08:50.284645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:08:50.284740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:50.284781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:08:50.284812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:08:50.286734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:50.286843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:50.286881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:08:50.286923Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:08:50.286981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:50.287072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 114 2025-04-07T14:08:58.842493Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 114, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-04-07T14:08:58.842518Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-04-07T14:08:58.842614Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 0/1, is published: true 2025-04-07T14:08:58.842651Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:08:58.843917Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269550080, Sender [7:973:2922], Recipient [7:123:2149]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1349 } } 2025-04-07T14:08:58.843958Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2025-04-07T14:08:58.844027Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1349 } } 2025-04-07T14:08:58.844054Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-04-07T14:08:58.844170Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1349 } } 2025-04-07T14:08:58.844262Z node 7 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1349 } } 2025-04-07T14:08:58.844303Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:08:58.845892Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1031:2973], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:08:58.845931Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:08:58.845959Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-07T14:08:58.846602Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [7:973:2922], Recipient [7:123:2149]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 973 RawX2: 30064773994 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-07T14:08:58.846645Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-07T14:08:58.846758Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 973 RawX2: 30064773994 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-07T14:08:58.846821Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-04-07T14:08:58.846992Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 973 RawX2: 30064773994 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-07T14:08:58.847056Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:08:58.847164Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 973 RawX2: 30064773994 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-07T14:08:58.847256Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:58.847296Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-07T14:08:58.847337Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-07T14:08:58.847393Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 114:0 129 -> 240 2025-04-07T14:08:58.847579Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:08:58.850727Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.850852Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-04-07T14:08:58.850905Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.852977Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-04-07T14:08:58.853024Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.853123Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-07T14:08:58.853145Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.853279Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-07T14:08:58.853324Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.853381Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 114:0 2025-04-07T14:08:58.853501Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:973:2922] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2025-04-07T14:08:58.853848Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-07T14:08:58.853885Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-07T14:08:58.853955Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-07T14:08:58.854004Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 114:0 ProgressState 2025-04-07T14:08:58.854143Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:08:58.854180Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2025-04-07T14:08:58.854223Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-04-07T14:08:58.854278Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2025-04-07T14:08:58.854352Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:393:2358] message: TxId: 114 2025-04-07T14:08:58.854409Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-04-07T14:08:58.854453Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 114:0 2025-04-07T14:08:58.854489Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 114:0 2025-04-07T14:08:58.854625Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-07T14:08:58.856808Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:08:58.856920Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:393:2358] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2025-04-07T14:08:58.857123Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-04-07T14:08:58.857169Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1003:2945] 2025-04-07T14:08:58.857377Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1005:2947], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:08:58.857412Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:08:58.857446Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2025-04-07T14:08:58.858582Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:1040:2982], Recipient [7:123:2149]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2025-04-07T14:08:58.858648Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T14:08:58.863510Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:58.863819Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2025-04-07T14:08:58.864273Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2025-04-07T14:08:58.864524Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:08:58.875037Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:58.875244Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2025-04-07T14:08:58.875306Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2025-04-07T14:06:17.655561Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:17.740167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:17.740229Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:17.741942Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:17.742304Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:17.742568Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:17.751947Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:17.794474Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:17.794941Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:17.796369Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:17.796459Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:17.796507Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:17.796870Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:17.821285Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:17.821413Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:17.821484Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:17.821509Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:17.821532Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:17.821553Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:17.821671Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.821701Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.822018Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:17.822082Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:17.822126Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:17.822164Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:17.822218Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:17.822250Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:17.822273Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:17.822294Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:17.822315Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:17.870904Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.870978Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.871714Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:17.878073Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:17.878152Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:17.878237Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:17.878389Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:17.878464Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:17.878523Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:17.878574Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:17.878612Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:17.878648Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:17.878678Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:17.878929Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:17.879002Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:17.879049Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:17.879086Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:17.879140Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:17.879164Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:17.879190Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:17.879228Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:17.879250Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:17.907677Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:17.907773Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:17.907821Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:17.907869Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:17.907939Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:17.908415Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.908463Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:17.908504Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:17.908650Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:17.908712Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:17.908819Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:17.908863Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:17.908898Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:17.908936Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:17.912505Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:17.912567Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:17.912763Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.912799Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:17.912842Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:17.912882Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:17.912913Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:17.912953Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:17.912986Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:17.913020Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:17.913056Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:17.913089Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:17.913118Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:17.913284Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:17.913324Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:17.913453Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:17.913482Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:17.913504Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:17.913570Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:17.913603Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:17.913638Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:17.913670Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:17.913715Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:17.913744Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:17.913778Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:17.913834Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:17.913861Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:17.913882Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... propose latency: 56 ms, status: COMPLETE 2025-04-07T14:08:47.197387Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is DelayComplete 2025-04-07T14:08:47.197418Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit FinishPropose 2025-04-07T14:08:47.197448Z node 3 :TX_DATASHARD TRACE: Add [0:10] at 9437184 to execution unit CompletedOperations 2025-04-07T14:08:47.197480Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:10] at 9437184 on unit CompletedOperations 2025-04-07T14:08:47.197533Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is Executed 2025-04-07T14:08:47.197560Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit CompletedOperations 2025-04-07T14:08:47.197585Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:10] at 9437184 has finished 2025-04-07T14:08:47.230489Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:08:47.230555Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:10] at 9437184 on unit FinishPropose 2025-04-07T14:08:47.230606Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:08:50.934650Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:229:2224]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2025-04-07T14:08:50.934714Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-07T14:08:50.935020Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:629:2608], Recipient [3:229:2224]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:08:50.935054Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:08:50.935090Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:628:2607], serverId# [3:629:2608], sessionId# [0:0:0] 2025-04-07T14:08:50.935280Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:229:2224]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\354\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006\ 2025-04-07T14:08:50.935313Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:08:50.935384Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:08:50.936320Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2025-04-07T14:08:50.955567Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-07T14:08:50.955708Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2025-04-07T14:08:50.955751Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:08:50.955788Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:08:50.955846Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-07T14:08:50.955912Z node 3 :TX_DATASHARD TRACE: Activated operation [0:11] at 9437184 2025-04-07T14:08:50.955948Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-07T14:08:50.955971Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:08:50.955994Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2025-04-07T14:08:50.956017Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-07T14:08:50.960823Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-07T14:08:50.961027Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-07T14:08:50.961074Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-07T14:08:51.032511Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:08:51.032610Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-07T14:08:51.033348Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-07T14:08:51.036584Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-07T14:08:51.036770Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-07T14:08:51.036817Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-07T14:08:51.186791Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:08:51.186851Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-07T14:08:51.187547Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-07T14:08:51.191486Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-07T14:08:51.191679Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-07T14:08:51.191728Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-07T14:08:51.376442Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:08:51.376521Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-07T14:08:51.378056Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-07T14:08:51.404942Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2025-04-07T14:08:51.405226Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-07T14:08:51.405273Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-07T14:08:51.405729Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:08:51.405774Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-07T14:08:51.406458Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-07T14:08:51.674383Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2025-04-07T14:08:51.675371Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-07T14:08:51.675435Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-07T14:08:51.854847Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:08:51.854940Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-07T14:08:51.855687Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-07T14:08:51.861144Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-07T14:08:51.861337Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-07T14:08:51.861389Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-07T14:08:51.893999Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:08:51.894074Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-07T14:08:51.894806Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-07T14:08:51.896573Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-07T14:08:51.896750Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-07T14:08:51.896800Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-07T14:08:51.917546Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:08:51.917617Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-07T14:08:51.918290Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-07T14:08:51.923333Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-07T14:08:51.923522Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-07T14:08:51.923589Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-07T14:08:52.339766Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:08:52.339864Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-07T14:08:52.340862Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-07T14:08:53.359113Z node 3 :TX_DATASHARD TRACE: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2025-04-07T14:08:53.359208Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-07T14:08:53.359269Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-07T14:08:53.359302Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2025-04-07T14:08:53.359329Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit FinishPropose 2025-04-07T14:08:53.359365Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit FinishPropose 2025-04-07T14:08:53.359418Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 60 ms, propose latency: 60 ms, status: COMPLETE 2025-04-07T14:08:53.359545Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is DelayComplete 2025-04-07T14:08:53.359569Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2025-04-07T14:08:53.359593Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit CompletedOperations 2025-04-07T14:08:53.359628Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2025-04-07T14:08:53.359678Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-07T14:08:53.359697Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2025-04-07T14:08:53.359717Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:11] at 9437184 has finished 2025-04-07T14:08:53.386204Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:08:53.386281Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:11] at 9437184 on unit FinishPropose 2025-04-07T14:08:53.386346Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> KqpQueryPerf::Replace+QueryService [GOOD] >> KqpQueryPerf::KvRead+QueryService [GOOD] >> KqpQueryPerf::Upsert-QueryService [GOOD] >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] >> KqpQueryPerf::Update+QueryService [GOOD] |93.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] |93.3%| [TA] $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::Upsert+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TVersions::Wreck0Reverse [GOOD] Test command err: SmallQueue: MainQueue: {11 0f 1b}, {14 1f 1b}, {15 2f 1b}, {18 0f 1b}, {19 0f 1b}, {23 0f 1b}, {27 0f 1b} GhostQueue: 9, 12, 13, 16, 17, 20, 21, 24, 25, 28 0.2937 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:24.088032Z 00000.007 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.015 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.015 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU 00000.015 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU ...compacting ...waiting until compacted Counters: Active:5748446/8388608, Passive:131, MemLimit:0 Counters: Active:5748446/8388608, Passive:131, MemLimit:104857600 Counters: Active:5748446/6291456, Passive:131, MemLimit:6291456 00000.576 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.585 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 123 reqs hit {67 5638578b} miss {243 21422336b} 00000.585 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.585 II| FAKE_ENV: DS.0 gone, left {4715b, 8}, put {30358b, 308} 00000.586 II| FAKE_ENV: DS.1 gone, left {47655250b, 148}, put {47735244b, 314} 00000.607 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.607 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.608 II| FAKE_ENV: All BS storage groups are stopped 00000.608 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.611 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 18}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:24.709196Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.010 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.010 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU 00000.010 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU ...compacting ...waiting until compacted Counters: Active:5748446/8388608, Passive:131, MemLimit:0 00000.411 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 104857600 ReplacementPolicy: ThreeLeveledLRU Counters: Active:5748446/8388608, Passive:131, MemLimit:0 00000.411 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 2097152 ReplacementPolicy: ThreeLeveledLRU Counters: Active:5748446/2097152, Passive:131, MemLimit:0 00000.412 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 0 ReplacementPolicy: ThreeLeveledLRU Counters: Active:1028127/0, Passive:131, MemLimit:1048576 00000.412 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.430 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 123 reqs hit {67 5638578b} miss {243 21422336b} 00000.430 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.430 II| FAKE_ENV: DS.0 gone, left {4715b, 8}, put {30358b, 308} 00000.430 II| FAKE_ENV: DS.1 gone, left {47655250b, 148}, put {47735244b, 314} 00000.448 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.448 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.448 II| FAKE_ENV: All BS storage groups are stopped 00000.448 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.451 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 21}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:25.168802Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.009 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.009 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU 00000.009 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU ...compacting ...waiting until compacted Counters: Active:8311925/8388608, Passive:131, MemLimit:0 Counters: Active:5747446/8388608, Passive:131, MemLimit:8388608 Counters: Active:8311925/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312827/8388608, Passive:131, MemLimit:8388608 Counters: Active:5747446/8388608, Passive:131, MemLimit:8388608 Counters: Active:5747446/8388608, Passive:131, MemLimit:8388608 Counters: Active:5747446/8388608, Passive:131, MemLimit:8388608 00000.861 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.867 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 651 reqs hit {0 0b} miss {654 57705781b} 00000.868 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.868 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10315b, 111} 00000.868 II| FAKE_ENV: DS.1 gone, left {10257096b, 5}, put {10308509b, 109} 00000.878 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.879 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.879 II| FAKE_ENV: All BS storage groups are stopped 00000.879 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.000s 00000.881 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 18}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:26.062493Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.009 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: S3FIFO ...compacting ...waiting until compacted Counters: Active:8313507/8388608, Passive:0, MemLimit:0 Counters: Active:8313958/8388608, Passive:0, MemLimit:8388608 Counters: Active:8312605/8388608, Passive:0, MemLimit:8388608 Counters: Active:8313376/8388608, Passive:131, MemLimit:8388608 Counters: Active:8313827/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8313827/8388608, Passive:131, MemLimit:8388608 00000.731 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.737 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 449 reqs hit {0 0b} miss {452 39564499b} 00000.738 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.738 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10315b, 111} 00000.738 II| FAKE_ENV: DS.1 gone, left {10257096b, 5}, put {10308509b, 109} 00000.752 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.752 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.752 II| FAKE_ENV: All BS storage groups are stopped 00000.752 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.000s 00000.755 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:26.829733Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.009 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ClockPro 00000.009 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ClockPro 00000.009 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ClockPro ...compacting ...waiting until compacted Counters: Active:8313376/8388608, Passive:131, MemLimit:0 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312474/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312474/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312474/8388608, Passive:131, MemLimit:8388608 00000.868 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.875 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 651 reqs hit {0 0b} miss {654 57297397b} 00000.883 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.883 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10315b, 111} 00000.883 II| FAKE_ENV: DS.1 gone, left {10257096b, 5}, put {10308509b, 109} 00000.884 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.884 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.884 II| FAKE_ENV: All BS storage groups are stopped 00000.885 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.000s 00000.887 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 18}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:27.730513Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.010 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.010 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU 00000.010 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU ...compacting ...waiting until compacted 00000.165 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: S3FIFO 00000.165 II| TABLET_SAUSAGECACHE: Replacement policy switch from ThreeLeveledLRU to S3FIFO 00000.165 II| TABLET_SAUSAGECACHE: Replacement policy switch from ThreeLeveledLRU to S3FIFO 00000.169 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.170 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 12 reqs hit {0 0b} miss {13 616620b} 00000.170 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.170 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10191b, 107} 00000.170 II| FAKE_ENV: DS.1 gone, left {10257096b, 5}, put {10305919b, 107} 00000.172 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.172 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.172 II| FAKE_ENV: All BS storage groups are stopped 00000.172 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.172 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 21}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:27.908148Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.010 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: S3FIFO ...compacting ...waiting until compacted Counters: Active:8313507/8388608, Passive:0, MemLimit:0 Counters: Active:8313507/8388608, Passive:0, MemLimit:0 00000.160 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.166 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 21 reqs hit {0 0b} miss {21 1947111b} 00000.166 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.166 II| FAKE_ENV: DS.0 gone, left {10109b, 104}, put {10129b, 105} 00000.167 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.167 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.167 II| FAKE_ENV: DS.1 gone, left {10304624b, 106}, put {10304624b, 106} 00000.179 II| FAKE_ENV: All BS storage groups are stopped 00000.179 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.181 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:28.095350Z 00000.007 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.010 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: S3FIFO ...compacting ...waiting until compacted Counters: Active:8309079/8388608, Passive:0, MemLimit:0 Counters: Active:8309079/8388608, Passive:0, MemLimit:0 00000.149 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.154 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 19 reqs hit {0 0b} miss {19 1946417b} 00000.160 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.160 II| FAKE_ENV: DS.0 gone, left {10109b, 104}, put {10129b, 105} 00000.160 II| FAKE_ENV: DS.1 gone, left {10298442b, 106}, put {10298442b, 106} 00000.162 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.162 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.162 II| FAKE_ENV: All BS storage groups are stopped 00000.162 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.164 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:28.268577Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.010 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 0 ReplacementPolicy: S3FIFO ...compacting ...waiting until compacted Counters: Active:0/0, Passive:131, MemLimit:0 Counters: Active:0/0, Passive:131, MemLimit:0 00000.223 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.228 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 400 reqs hit {0 0b} miss {400 10324782b} 00000.228 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.229 II| FAKE_ENV: DS.0 gone, left {10109b, 104}, put {10129b, 105} 00000.229 II| FAKE_ENV: DS.1 gone, left {10304624b, 106}, put {10304624b, 106} 00000.231 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.231 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.231 II| FAKE_ENV: All BS storage groups are stopped 00000.231 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.237 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-07T14:06:28.513223Z 00000.006 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.010 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 0 ReplacementPolicy: S3FIFO ...compacting ...waiting until compacted Counters: Active:0/0, Passive:2772, MemLimit:0 Counters: Active:0/0, Passive:2772, MemLimit:0 00000.173 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.179 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 100 reqs hit {0 0b} miss {100 10244300b} 00000.179 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.179 II| FAKE_ENV: DS.0 gone, left {10109b, 104}, put {10129b, 105} 00000.180 II| FAKE_ENV: DS.1 gone, left {10298442b, 106}, put {10298442b, 106} 00000.188 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.188 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.188 II| FAKE_ENV: All BS storage groups are stopped 00000.188 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.190 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped |93.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::AggregateToScalar-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 20734, MsgBus: 61059 2025-04-07T14:08:53.353735Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573000907510261:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:53.353801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dfc/r3tmp/tmpFoefqC/pdisk_1.dat 2025-04-07T14:08:53.859864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:53.859998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:53.867860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:08:53.920471Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20734, node 1 2025-04-07T14:08:54.145312Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:54.145338Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:54.145349Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:54.145438Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61059 TClient is connected to server localhost:61059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:55.237541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.261372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.417617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.634537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.721533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:57.208676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573018087381140:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.208760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.586382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.635810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.691816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.725098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.810519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.921182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.022066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573022382348943:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.022148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.022374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573022382348948:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.025970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:08:58.041668Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:08:58.041933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573022382348950:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:08:58.360757Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573000907510261:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:58.360812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 16998, MsgBus: 10548 2025-04-07T14:08:53.425693Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573000835381285:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:53.425743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e0e/r3tmp/tmpNCADqx/pdisk_1.dat 2025-04-07T14:08:54.059532Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:54.073323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:54.073409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:54.082204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16998, node 1 2025-04-07T14:08:54.237012Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:54.237041Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:54.237050Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:54.237142Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10548 TClient is connected to server localhost:10548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:55.034676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.062017Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:08:55.076433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.283313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.511228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.585517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:57.561503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573018015252173:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.621028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.922176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.001443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.082762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.127692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.199542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.293567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.357220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573022310219977:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.357301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.357662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573022310219982:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.361751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:08:58.370807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573022310219984:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:08:58.428301Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573000835381285:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:58.428358Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 7011, MsgBus: 23522 2025-04-07T14:08:53.376696Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573004409868467:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:53.377155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ded/r3tmp/tmpHLF5Ah/pdisk_1.dat 2025-04-07T14:08:53.894171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:53.894255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:53.897710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:08:53.930604Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7011, node 1 2025-04-07T14:08:54.107382Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:54.107401Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:54.107411Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:54.107501Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23522 TClient is connected to server localhost:23522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:55.382261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.408811Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:08:55.428191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.574675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.847306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.980211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:57.781562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573021589739349:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.781734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.077003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.110329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.141176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.176545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.246800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.286992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.379336Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573004409868467:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:58.380691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:08:58.383064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573025884707146:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.383123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.383290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573025884707151:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.387225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:08:58.417657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573025884707153:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 27135, MsgBus: 61242 2025-04-07T14:08:53.325549Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573000635921547:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:53.325609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e00/r3tmp/tmpAGcWsv/pdisk_1.dat 2025-04-07T14:08:53.999796Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:54.044055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:54.044160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:54.057924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27135, node 1 2025-04-07T14:08:54.177137Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:54.177161Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:54.177180Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:54.177268Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61242 TClient is connected to server localhost:61242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:55.100122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.147836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.318681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.530234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.640849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:57.419642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573017815792453:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.419748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.691361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.761721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.847654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.919354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.960355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.040898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.095579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573022110760256:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.095727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.095981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573022110760261:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.099154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:08:58.112956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573022110760263:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:08:58.332847Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573000635921547:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:58.387859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 24086, MsgBus: 25618 2025-04-07T14:08:53.343034Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573001100453424:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:53.353022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001df7/r3tmp/tmplAFsNM/pdisk_1.dat 2025-04-07T14:08:53.850537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:53.850673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:53.853792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:08:53.940091Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24086, node 1 2025-04-07T14:08:54.105962Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:54.105998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:54.106008Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:54.106137Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25618 TClient is connected to server localhost:25618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:55.115853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.144878Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:08:55.151952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.370900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.565984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.687659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:57.760784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573018280324234:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.772648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.831910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.888028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.921149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.959118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.004873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.056112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.147420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573022575292028:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.147493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.147831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573022575292033:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.152370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:08:58.163849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573022575292035:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:08:58.369397Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573001100453424:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:58.369700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 27316, MsgBus: 17804 2025-04-07T14:08:53.340228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573001014964632:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:53.340611Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e1e/r3tmp/tmp2pbjGj/pdisk_1.dat 2025-04-07T14:08:53.957241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:53.957326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:53.958735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:08:53.985635Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27316, node 1 2025-04-07T14:08:54.189989Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:54.190007Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:54.190029Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:54.190116Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17804 TClient is connected to server localhost:17804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:55.237765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.256966Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:08:55.274838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:08:55.489616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:08:55.674166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.763948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:57.341013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573018194835378:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.341122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.661446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.713938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.752561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.805367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.843893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.903113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.007530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573022489803172:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.007613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.007873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573022489803177:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.011183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:08:58.032313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573022489803179:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:08:58.339216Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573001014964632:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:58.339289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Insert-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 24287, MsgBus: 11650 2025-04-07T14:08:53.319973Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573002444717153:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:53.320031Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e28/r3tmp/tmpS8f1Oi/pdisk_1.dat 2025-04-07T14:08:53.899053Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:53.916037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:53.916172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:53.920502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24287, node 1 2025-04-07T14:08:54.102911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:54.102934Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:54.102949Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:54.103030Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11650 TClient is connected to server localhost:11650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:55.060921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.101754Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:08:55.121926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.296437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.488041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.561471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:57.034029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573019624587851:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.034138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.582450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.632001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.691632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.747800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.806599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.872326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.003314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573019624588351:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.003461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.003735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573023919555652:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.010621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:08:58.020807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573023919555654:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:08:58.385273Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573002444717153:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:58.385420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexUpdateOn+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 13276, MsgBus: 61472 2025-04-07T14:08:54.989202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573005962009976:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:54.989897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001de8/r3tmp/tmpALVgdh/pdisk_1.dat 2025-04-07T14:08:55.488450Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:55.493236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:55.493343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:55.505130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13276, node 1 2025-04-07T14:08:55.686352Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:55.686365Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:55.686389Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:55.686444Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61472 TClient is connected to server localhost:61472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:56.324606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:56.364936Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:08:56.381326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:56.563517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:56.712104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:56.783927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:58.554487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573023141880715:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.554584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.829070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.905279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.960034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:08:59.008070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:08:59.046743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:08:59.084063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:08:59.178366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573027436848518:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:59.178427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:59.181867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573027436848523:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:59.191742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:08:59.203794Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:08:59.203966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573027436848525:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:00.013446Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573005962009976:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:00.013760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 27729, MsgBus: 17355 2025-04-07T14:08:53.397716Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573000575049670:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:53.397780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001df8/r3tmp/tmpINClTy/pdisk_1.dat 2025-04-07T14:08:53.988813Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:53.989045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:53.989155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:53.997050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27729, node 1 2025-04-07T14:08:54.109264Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:54.109283Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:54.109293Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:54.109384Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17355 TClient is connected to server localhost:17355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:55.013648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.054241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.247989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.526015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.662798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:57.849333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573017754920553:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.865792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.933080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.978826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.016288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.049580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.085816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.182293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.268285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573022049888348:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.268346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.268692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573022049888353:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.276446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:08:58.289225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573022049888355:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:08:58.398502Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573000575049670:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:58.398564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexUpsert+QueryService [GOOD] >> KqpQueryPerf::MultiRead-QueryService [GOOD] |93.3%| [TA] $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleSplits Test command err: Trying to start YDB, gRPC: 6886, MsgBus: 10377 2025-04-07T14:00:06.651005Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570738033762785:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:06.651147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fde/r3tmp/tmpGCJFl2/pdisk_1.dat 2025-04-07T14:00:07.062237Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6886, node 1 2025-04-07T14:00:07.078519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:07.078590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:07.099343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:00:07.145708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:07.145739Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:07.145753Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:07.145838Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10377 TClient is connected to server localhost:10377 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:07.723147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:07.733449Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:08.213572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:00:11.649739Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570738033762785:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:11.649794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:00:13.389402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570768098537601:2306];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:13.389664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570768098537601:2306];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:13.389897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570768098537601:2306];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:13.390024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570768098537601:2306];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:13.390126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570768098537601:2306];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:13.390232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570768098537601:2306];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:13.390341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570768098537601:2306];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:13.390459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570768098537601:2306];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:13.390559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570768098537601:2306];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:13.401871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570768098537601:2306];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:13.402100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570768098537601:2306];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:13.402229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570768098537601:2306];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:13.413024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7490570768098537658:2312];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:13.413092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7490570768098537658:2312];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:13.413288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7490570768098537658:2312];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:13.413388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7490570768098537658:2312];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:13.413482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7490570768098537658:2312];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:13.413590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7490570768098537658:2312];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:13.413683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7490570768098537658:2312];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:00:13.413776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7490570768098537658:2312];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:00:13.413875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7490570768098537658:2312];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:00:13.413993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7490570768098537658:2312];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:00:13.414095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7490570768098537658:2312];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:00:13.414190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7490570768098537658:2312];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:00:13.449866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570768098537660:2314];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:00:13.449947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570768098537660:2314];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:00:13.450156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570768098537660:2314];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:00:13.450260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570768098537660:2314];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:00:13.450356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570768098537660:2314];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:00:13.450451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7490570768098537660:2314];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:00:13.450550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:749057076809 ... ARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736063:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:08:37.675719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736064:0, at schemeshard: 72057594046644480 2025-04-07T14:08:38.560370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736065:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:08:38.668158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736066:0, at schemeshard: 72057594046644480 2025-04-07T14:08:38.783266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736067:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:08:40.291501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736068:0, at schemeshard: 72057594046644480 2025-04-07T14:08:40.404727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736069:0, at schemeshard: 72057594046644480 2025-04-07T14:08:40.523323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736070:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2025-04-07T14:08:42.411523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736071:0, at schemeshard: 72057594046644480 2025-04-07T14:08:42.522475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736072:0, at schemeshard: 72057594046644480 2025-04-07T14:08:42.621826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736073:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:08:43.479180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736074:0, at schemeshard: 72057594046644480 2025-04-07T14:08:43.601590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736075:0, at schemeshard: 72057594046644480 2025-04-07T14:08:43.711835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736076:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2025-04-07T14:08:45.362646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736077:0, at schemeshard: 72057594046644480 2025-04-07T14:08:45.475493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736078:0, at schemeshard: 72057594046644480 2025-04-07T14:08:45.600229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736079:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:08:46.527277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736080:0, at schemeshard: 72057594046644480 2025-04-07T14:08:46.635900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736081:0, at schemeshard: 72057594046644480 2025-04-07T14:08:46.740172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736082:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2025-04-07T14:08:48.585489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736083:0, at schemeshard: 72057594046644480 2025-04-07T14:08:48.708316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736084:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:08:48.808619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736085:0, at schemeshard: 72057594046644480 2025-04-07T14:08:49.570445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736086:0, at schemeshard: 72057594046644480 2025-04-07T14:08:49.681918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736087:0, at schemeshard: 72057594046644480 2025-04-07T14:08:49.783594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736088:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2025-04-07T14:08:51.541048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736089:0, at schemeshard: 72057594046644480 2025-04-07T14:08:51.649927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736090:0, at schemeshard: 72057594046644480 2025-04-07T14:08:51.785447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736091:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:08:52.616412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736092:0, at schemeshard: 72057594046644480 2025-04-07T14:08:52.746623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736093:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:08:52.865509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736094:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:08:54.559918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736095:0, at schemeshard: 72057594046644480 2025-04-07T14:08:54.675037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736096:0, at schemeshard: 72057594046644480 2025-04-07T14:08:54.810750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736097:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () VERIFY failed (2025-04-07T14:08:54.889419Z): verification=clean;fline=blobs_sharing_ut.cpp:294; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed 2025-04-07T14:08:55.664844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736098:0, at schemeshard: 72057594046644480 2025-04-07T14:08:55.839942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736099:0, at schemeshard: 72057594046644480 2025-04-07T14:08:55.934373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736100:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.618307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736101:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.776506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736102:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.907309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736103:0, at schemeshard: 72057594046644480 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x180DC7E2 1. /-S/util/system/yassert.cpp:55: Panic @ 0x180CBE3A 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x1AF00E25 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:294: WaitResharding @ 0x17ABAEA9 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:591: Execute_ @ 0x17AC22D2 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 2025-04-07T14:08:59.335591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736104:0, at schemeshard: 72057594046644480 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7FD8C0D26D8F 18. ??:0: ?? @ 0x7FD8C0D26E3F 19. ??:0: ?? @ 0x15851028 >> KqpQueryPerf::IndexReplace-QueryService [GOOD] |93.3%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::IndexInsert+QueryService >> KqpQueryPerf::Insert+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 15926, MsgBus: 6402 2025-04-07T14:08:53.384890Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573001764009125:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:53.384954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e43/r3tmp/tmprwvj1I/pdisk_1.dat 2025-04-07T14:08:54.061914Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:54.069876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:54.070024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:54.074536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15926, node 1 2025-04-07T14:08:54.232373Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:54.232399Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:54.232411Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:54.232513Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6402 TClient is connected to server localhost:6402 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:55.011335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.053791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.278150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.488917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.574765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:57.199350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573018943879807:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.199459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.585863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.664297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.720345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.780860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.852980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.931393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.009681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573023238847606:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.009751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.009947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573023238847611:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.013120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:08:58.029451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573023238847613:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:08:58.402551Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573001764009125:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:58.402854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:08:59.180298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:08:59.230636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:08:59.277771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpQueryPerf::IndexUpdateOn-QueryService >> KqpQueryPerf::KvRead-QueryService >> KqpQueryPerf::Delete-QueryService >> KqpQueryPerf::IdxLookupJoin+QueryService >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 6350, MsgBus: 27350 2025-04-07T14:08:56.837884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573014772438508:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:56.837934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001de7/r3tmp/tmpRoKogF/pdisk_1.dat 2025-04-07T14:08:57.268432Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:57.272994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:57.273137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:57.277842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6350, node 1 2025-04-07T14:08:57.376734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:57.376759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:57.376793Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:57.377010Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27350 TClient is connected to server localhost:27350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:58.069034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:58.086241Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:08:58.106815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:58.234295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:58.401079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:58.464344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:00.388656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573031952309167:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:00.401276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:00.428413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:00.504741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:00.539583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:00.639626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:00.715910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:00.759212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:00.833782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573031952309669:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:00.833895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:00.834105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573031952309675:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:00.837501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:00.851543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573031952309677:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:01.838107Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573014772438508:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:01.838181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::RangeRead-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 7129, MsgBus: 16237 2025-04-07T14:08:53.339467Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573002871186825:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:53.339602Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001df4/r3tmp/tmp2jTgMU/pdisk_1.dat 2025-04-07T14:08:53.893938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:53.894040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:53.902180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:08:53.917014Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7129, node 1 2025-04-07T14:08:54.109603Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:54.109618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:54.109623Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:54.109678Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16237 TClient is connected to server localhost:16237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:55.020153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.058392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.238179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.571869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:55.679065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:57.560978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573020051057596:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.567942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.636910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.713949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.766080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.814287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.849389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:08:57.923306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.010136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573024346025394:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.010209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.010542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573024346025400:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.014263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:08:58.023294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573024346025402:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:08:58.349630Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573002871186825:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:58.349753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:08:59.390785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:08:59.447050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:08:59.527499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService >> KqpQueryPerf::ComputeLength-QueryService >> KqpQueryPerf::Delete+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 28148, MsgBus: 5961 2025-04-07T14:08:57.502973Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573020277498858:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:57.503029Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001de3/r3tmp/tmpTfk2Z7/pdisk_1.dat 2025-04-07T14:08:58.109446Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:58.138268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:58.138377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:58.149349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28148, node 1 2025-04-07T14:08:58.349138Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:58.349157Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:58.349164Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:58.349240Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5961 TClient is connected to server localhost:5961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:59.040050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:59.057357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:59.225200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:59.369797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:59.468814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:01.356330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573037457369531:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:01.356436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:01.691001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:01.766377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:01.829732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:01.871912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:01.947028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:02.015183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:02.072774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573041752337329:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:02.072838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:02.073083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573041752337334:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:02.077136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:02.095257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573041752337336:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:02.508777Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573020277498858:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:02.508857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService >> KqpWorkload::STOCK >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] >> KqpQueryPerf::ComputeLength+QueryService >> KqpQueryPerf::MultiDeleteFromTable-QueryService >> KqpQueryPerf::DeleteOn+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] Test command err: 2025-04-07T14:07:05.023591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:07:05.023898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:07:05.024172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018a8/r3tmp/tmpzkwWfL/pdisk_1.dat 2025-04-07T14:07:05.279946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:07:05.310667Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:05.358010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:05.358105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:05.369215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:05.479944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:07:05.506523Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:07:05.507210Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:07:05.507667Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:07:05.507833Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:05.536551Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:07:05.537164Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:05.537255Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:07:05.538634Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:07:05.538742Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:07:05.538797Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:07:05.539083Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:07:05.561386Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:07:05.561572Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:07:05.561678Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:07:05.561720Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:07:05.561752Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:07:05.561782Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:05.562132Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.562184Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.562357Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:07:05.562460Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:07:05.562865Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.562902Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.562948Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:07:05.563004Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:05.563059Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:05.563113Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:07:05.563145Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:07:05.563171Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:07:05.563198Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:07:05.563240Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:07:05.563354Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:07:05.563388Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:07:05.563475Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:07:05.563684Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:07:05.563730Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:07:05.563826Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:07:05.563873Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:07:05.563921Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:07:05.563957Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:07:05.563984Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:05.564222Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:07:05.564267Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:07:05.564299Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:07:05.564332Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:05.564368Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:07:05.564386Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:07:05.564407Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:07:05.564426Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:05.564440Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:07:05.565334Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:07:05.565384Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:07:05.575875Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:07:05.575926Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:05.575953Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:05.575985Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:07:05.576025Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:07:05.754390Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.754442Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.754471Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:07:05.754819Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:07:05.754851Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:07:05.754954Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:05.754988Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:07:05.755020Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:07:05.755048Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:07:05.758719Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:07:05.758778Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:05.759590Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.759629Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.759666Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:05.759699Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:07:05.759731Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:07:05.759762Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... 037889 2025-04-07T14:09:04.853602Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2025-04-07T14:09:04.853635Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2025-04-07T14:09:04.853670Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-07T14:09:04.853700Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2025-04-07T14:09:04.853733Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2025-04-07T14:09:04.853764Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2025-04-07T14:09:04.853907Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2025-04-07T14:09:04.853949Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-07T14:09:04.853978Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2025-04-07T14:09:04.854036Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-07T14:09:04.854075Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-07T14:09:04.854126Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2025-04-07T14:09:04.854171Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2025-04-07T14:09:04.854211Z node 15 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2025-04-07T14:09:04.854267Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-07T14:09:04.854295Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-07T14:09:04.854324Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-07T14:09:04.854356Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-07T14:09:04.854517Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-07T14:09:04.854559Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-07T14:09:04.854604Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-07T14:09:04.854653Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-07T14:09:04.854681Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-07T14:09:04.854708Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-07T14:09:04.854738Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2025-04-07T14:09:04.854777Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-07T14:09:04.854957Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2025-04-07T14:09:04.854998Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2025-04-07T14:09:04.855038Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2025-04-07T14:09:04.855074Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2025-04-07T14:09:04.855113Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-07T14:09:04.855140Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2025-04-07T14:09:04.855170Z node 15 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2025-04-07T14:09:04.855210Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:09:04.855250Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-07T14:09:04.855287Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-07T14:09:04.855328Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-07T14:09:04.877387Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3000 txid# 281474976715664} 2025-04-07T14:09:04.877540Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2025-04-07T14:09:04.877691Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:09:04.877765Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-07T14:09:04.877883Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1000:2801], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:09:04.877988Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:09:04.878414Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3000 txid# 281474976715664} 2025-04-07T14:09:04.878459Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2025-04-07T14:09:04.878511Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:09:04.878546Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-07T14:09:04.878594Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1000:2801], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:09:04.878643Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:09:04.880705Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:558:2485], Recipient [15:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 RangesSize: 3 2025-04-07T14:09:04.880890Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:09:04.881059Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-07T14:09:04.881267Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:09:04.881351Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:09:04.881425Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:09:04.881483Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:09:04.881544Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-07T14:09:04.881608Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:09:04.881644Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:09:04.881670Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:09:04.881700Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:09:04.881874Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 } 2025-04-07T14:09:04.881966Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2025-04-07T14:09:04.882255Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:09:04.882292Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:09:04.882322Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:09:04.882353Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:09:04.882409Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:09:04.882434Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:09:04.882475Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-07T14:09:04.882548Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:09:04.882715Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-07T14:09:04.884154Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553236, Sender [15:1020:2819], Recipient [15:632:2537]: NKikimr::TEvDataShard::TEvReadScanStarted 2025-04-07T14:09:04.884293Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553237, Sender [15:1020:2819], Recipient [15:632:2537]: NKikimr::TEvDataShard::TEvReadScanFinished 2025-04-07T14:09:04.884744Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:632:2537], Recipient [15:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:09:04.884803Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:09:04.884922Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:09:04.885006Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:09:04.885076Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:09:04.885145Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:09:04.885202Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:09:04.885285Z node 15 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:09:04.885391Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2025-04-07T14:08:39.960715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572943711777629:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:39.964555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a2a/r3tmp/tmpYXDXa0/pdisk_1.dat 2025-04-07T14:08:40.529771Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:40.538308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:40.538410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:40.543466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22972, node 1 2025-04-07T14:08:40.712546Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:40.712579Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:40.712588Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:40.712680Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:41.189497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:41.194916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:08:41.204900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:41.206904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:08:41.207093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:08:41.207125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:08:41.208688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:08:41.208721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:08:41.210652Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:08:41.210809Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:41.214936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034921258, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:08:41.214991Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:08:41.215390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:08:41.217250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:08:41.217413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:41.217451Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:08:41.217544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:08:41.217578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:08:41.217618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:08:41.220951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:08:41.220997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:08:41.222184Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:08:41.222272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:13747 2025-04-07T14:08:43.046370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572960891647802:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:43.046470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:43.493768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:08:43.494318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T14:08:43.494893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:08:43.494927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:08:43.497606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2025-04-07T14:08:43.497777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:08:43.497927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:43.497976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:08:43.499266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:08:43.499297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:08:43.499313Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:08:43.499523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:08:43.499541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:08:43.499550Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:08:43.501548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:08:43.515406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:08:43.515523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T14:08:43.517740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:08:43.593179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:08:43.593241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:08:43.593381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T14:08:43.599544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:08:43.605693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034923645, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:08:43.605747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744034923645 2025-04-07T14:08:43.605859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T14:08:43.609057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:08:43.609316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:43.609381Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:08:43.610588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:08:43.610622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:08:43.610636Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: ... node 1 :KQP_EXECUTER ERROR: TxId: 281474976715249. Ctx: { TraceId: 01jr89amtcaxcx9wrmjhjf9bwz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0N2FmNmUtZDJkYjJjMDgtNjBmZGZjZWUtOWQ0OWIyMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.843593Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715250. Ctx: { TraceId: 01jr89amtt0h8gyhksdnakba72, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTlmODJkNDctNDE0ZDMxOTQtMjhkNzkzZDctNzQzMzg2ODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.845850Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715251. Ctx: { TraceId: 01jr89amtt92tjdpks2fbena4q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2FjZWMzYjMtNDQxOTg2NjAtYTM4NDZmYmUtM2I4OWJjMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.847122Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715254. Ctx: { TraceId: 01jr89amtw6hj0rqmzzvhs02dc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTNiOGFhOWEtNzA4NmQzODktOWQ2Zjk1ZGUtZjYzYjgwZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.847370Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715253. Ctx: { TraceId: 01jr89amtw70pza5h8psd1w2wd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODNmYmY4YS02NGRhMjBlYy1kZmY3NWRlNy05MDQ4ODc1MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.847640Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715255. Ctx: { TraceId: 01jr89amtzb1btrqg97ydg99kp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFlOTRmY2UtYzI1YzEwNjgtYjQ4OTNhMDgtYWEwYjI4YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.847834Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715256. Ctx: { TraceId: 01jr89amttfs6nq03x5r99vmng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmU0YjRjYTUtYmM1M2Y3M2ItOGI2ZTJiYTItM2Y0NTJlZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.848905Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715252. Ctx: { TraceId: 01jr89amttfd5cycm591zz4nh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjk5NWY2OTgtZmM3OTEzMDMtMzJlZDEwMWQtYzQzZDIzN2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.849591Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715257. Ctx: { TraceId: 01jr89amtzc1yez3btzdebzhkk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0N2FmNmUtZDJkYjJjMDgtNjBmZGZjZWUtOWQ0OWIyMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.853580Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715258. Ctx: { TraceId: 01jr89amtzdhpjc6hrch0vgx3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE2YmFhNzAtZWRkOWQ3NzItNzZjYjUzMWItZmFkMWNmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.854099Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715259. Ctx: { TraceId: 01jr89amv5btr1t2ak95bs0dgb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE2ZDQ2MWQtNGY1OTM5N2MtMmQzNjhiMTMtMzIyNzY4YzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034923645 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2025-04-07T14:09:03.910209Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715260. Ctx: { TraceId: 01jr89amw59871zf82ygerfq70, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0N2FmNmUtZDJkYjJjMDgtNjBmZGZjZWUtOWQ0OWIyMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.910733Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715261. Ctx: { TraceId: 01jr89amw52kwrg2wakza5vqwq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE2YmFhNzAtZWRkOWQ3NzItNzZjYjUzMWItZmFkMWNmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.911177Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715262. Ctx: { TraceId: 01jr89amw51n7k6s6e6060d7r0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTNiOGFhOWEtNzA4NmQzODktOWQ2Zjk1ZGUtZjYzYjgwZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.912451Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715263. Ctx: { TraceId: 01jr89amw53wmmq2wp67jgzm4g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODNmYmY4YS02NGRhMjBlYy1kZmY3NWRlNy05MDQ4ODc1MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.912930Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715264. Ctx: { TraceId: 01jr89amw53v71t3fqdbgryvvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTlmODJkNDctNDE0ZDMxOTQtMjhkNzkzZDctNzQzMzg2ODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.914041Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715265. Ctx: { TraceId: 01jr89amw5f9q42mfrymry845q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2FjZWMzYjMtNDQxOTg2NjAtYTM4NDZmYmUtM2I4OWJjMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.921638Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715266. Ctx: { TraceId: 01jr89amw57fth4srhq6qsq6b5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFlOTRmY2UtYzI1YzEwNjgtYjQ4OTNhMDgtYWEwYjI4YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:03.922214Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715267. Ctx: { TraceId: 01jr89amwxcge77gbnkjs6kd76, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE2ZDQ2MWQtNGY1OTM5N2MtMmQzNjhiMTMtMzIyNzY4YzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034923645 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2025-04-07T14:09:04.162173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 104656 rowCount 1381 cpuUsage 0 2025-04-07T14:09:04.165945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 182039 rowCount 2837 cpuUsage 0 2025-04-07T14:09:04.173652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 125950 rowCount 1737 cpuUsage 0 2025-04-07T14:09:04.262726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-04-07T14:09:04.262901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 1737, DataSize 125950 2025-04-07T14:09:04.263000Z node 1 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 2], datashard# 72075186224037890, compactionInfo# {72057594046644480:3, SH# 3, Rows# 1737, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 584.038946s, rate# 1.157407407e-05, in queue# 2 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2025-04-07T14:09:04.263138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 2837, DataSize 182039 2025-04-07T14:09:04.264955Z node 1 :TX_DATASHARD INFO: Started background compaction# 2 of 72075186224037890 tableId# 2 localTid# 1001, requested from [1:7490572948006745363:2279], partsCount# 2, memtableSize# 40120, memtableWaste# 15688, memtableRows# 357 2025-04-07T14:09:04.265111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-07T14:09:04.299157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 107745 rowCount 1737 cpuUsage 0 2025-04-07T14:09:04.354195Z node 1 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 2], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 91 ms, with status# 0, next wakeup in# 583.947763s, rate# 1.157407407e-05, in queue# 2 shards, waiting after compaction# 2 shards, running# 0 shards at schemeshard 72057594046644480 2025-04-07T14:09:04.365932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 108377 rowCount 1737 cpuUsage 0 2025-04-07T14:09:04.404797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-07T14:09:04.404936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 1737, DataSize 108377 2025-04-07T14:09:04.412756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] >> KqpQueryPerf::Insert-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 19951, MsgBus: 25021 2025-04-07T14:09:01.869839Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573038461529983:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:01.870205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dc8/r3tmp/tmpEZKOBn/pdisk_1.dat 2025-04-07T14:09:02.404913Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:02.407113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:02.407197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:02.409578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19951, node 1 2025-04-07T14:09:02.528170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:02.528210Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:02.528222Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:02.528323Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25021 TClient is connected to server localhost:25021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:03.262628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:03.297482Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:03.306306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:03.512515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:03.771264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:03.865377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:05.969167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573055641400773:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:05.969309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.025815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.101433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.176928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.208040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.238748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.322843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.391602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573059936368571:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.391685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.391857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573059936368576:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.395091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:06.410743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573059936368578:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:06.860845Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573038461529983:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:06.860915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService >> KqpQueryPerf::KvRead-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 16906, MsgBus: 28782 2025-04-07T14:09:02.469323Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573040563380954:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:02.469385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dc7/r3tmp/tmp7xwzgW/pdisk_1.dat 2025-04-07T14:09:02.977239Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:03.009045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:03.009163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 16906, node 1 2025-04-07T14:09:03.015704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:03.123085Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:03.123116Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:03.123123Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:03.123199Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28782 TClient is connected to server localhost:28782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:03.844405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:03.861911Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:03.876837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:04.031457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:04.196823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:04.294473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.323333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573057743251845:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.323443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.589625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.672874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.702888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.806245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.837010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.878158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.942180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573057743252345:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.942245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.942849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573057743252350:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.946471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:06.972880Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:09:06.973059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573057743252352:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:07.480228Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573040563380954:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:07.489984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 7627, MsgBus: 24436 2025-04-07T14:09:02.053223Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573037489023927:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:02.053687Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dd8/r3tmp/tmpYSMGSu/pdisk_1.dat 2025-04-07T14:09:02.648468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:02.648563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:02.678226Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:02.684562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7627, node 1 2025-04-07T14:09:02.843177Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:02.843202Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:02.843209Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:02.843313Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24436 TClient is connected to server localhost:24436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:03.671045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:09:03.715807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:03.924923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:09:04.144375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:09:04.221842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.472154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573058963861971:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.483941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.551875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.609560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.637840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.691824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.735329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.814234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.873240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573058963862464:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.873318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.873637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573058963862469:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.876426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:06.884946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573058963862471:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:07.000771Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573037489023927:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:07.000846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Insert+QueryService [GOOD] >> KqpQueryPerf::RangeRead-QueryService [GOOD] >> KqpQueryPerf::Delete-QueryService [GOOD] >> KqpQueryPerf::IndexUpdateOn+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoin+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 31882, MsgBus: 12175 2025-04-07T14:09:04.329100Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573050366857074:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:04.329143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d30/r3tmp/tmp5hBdyL/pdisk_1.dat 2025-04-07T14:09:04.827669Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:04.844348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:04.844441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:04.846163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31882, node 1 2025-04-07T14:09:04.953148Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:04.953173Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:04.953181Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:04.953255Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12175 TClient is connected to server localhost:12175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:05.595491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:05.630480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:05.761819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:09:06.032075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.137227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:07.923193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573063251760678:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:07.923308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.168411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.197994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.247117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.276656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.316077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.387050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.470034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573067546728472:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.470083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.470239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573067546728477:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.473860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:08.488948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573067546728479:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:09.331904Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573050366857074:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:09.377386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::UpdateOn+QueryService [GOOD] >> KqpQueryPerf::Delete+QueryService [GOOD] >> KqpQueryPerf::ComputeLength-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 21953, MsgBus: 8254 2025-04-07T14:09:04.339139Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573049888179563:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:04.339243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d25/r3tmp/tmpcugkOz/pdisk_1.dat 2025-04-07T14:09:04.750304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:04.750418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:04.752585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21953, node 1 2025-04-07T14:09:04.917068Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:09:04.917181Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:09:04.921793Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:04.921809Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:04.921816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:04.921889Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:04.941935Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8254 TClient is connected to server localhost:8254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:05.712083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:05.743997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:05.915520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.134694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.243559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:07.917094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573062773083013:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:07.917205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.205279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.267280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.298441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.337474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.411543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.449102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.598762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573067068050818:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.598849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.599161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573067068050823:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.604491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:08.621345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573067068050825:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:09.340779Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573049888179563:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:09.340853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexInsert-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 29089, MsgBus: 5924 2025-04-07T14:09:04.560998Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573050320278171:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:04.561324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d14/r3tmp/tmpSf16oL/pdisk_1.dat 2025-04-07T14:09:05.120094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:05.124342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:05.129809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29089, node 1 2025-04-07T14:09:05.156599Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:09:05.157030Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:09:05.164236Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:05.258804Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:05.258832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:05.258842Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:05.258933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5924 TClient is connected to server localhost:5924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:05.928085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:05.963859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:09:06.136593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.312596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.373236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:08.686487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573067500148904:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.711736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.745604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.788919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.825839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.855159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.916427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.988158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.054667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573071795116698:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.054745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.054908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573071795116703:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.058201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:09.066785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573071795116705:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:09.558612Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573050320278171:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:09.558668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 28809, MsgBus: 20423 2025-04-07T14:09:04.385109Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573049847472916:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:04.385164Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d21/r3tmp/tmph9WaCJ/pdisk_1.dat 2025-04-07T14:09:04.965528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:04.987401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:04.987520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:04.991339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28809, node 1 2025-04-07T14:09:05.174347Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:05.174371Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:05.174380Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:05.174458Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20423 TClient is connected to server localhost:20423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:05.827708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:05.857122Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:05.868154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.003327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.204410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.296187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:08.367999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573067027343588:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.368866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.605370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.640268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.669682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.758339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.797710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.870530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.948533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573067027344094:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.948606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.948926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573067027344099:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.952526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:08.964620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573067027344101:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:09.383603Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573049847472916:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:09.383674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService >> KqpQueryPerf::IndexUpdateOn-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 14040, MsgBus: 2024 2025-04-07T14:09:04.481865Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573049682313718:2252];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:04.481917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d15/r3tmp/tmpmzkkpt/pdisk_1.dat 2025-04-07T14:09:04.960038Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:04.995541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:04.995686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:04.996932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14040, node 1 2025-04-07T14:09:05.160322Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:05.160341Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:05.160353Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:05.160457Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2024 TClient is connected to server localhost:2024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:05.866430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:05.882626Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:05.905885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.095476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.311525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.400113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:08.505234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573066862184400:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.519611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.544465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.580037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.619246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.656222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.697128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.807445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.908822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573066862184902:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.908919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.912291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573066862184907:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.916506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:08.930754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573066862184909:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:09.485459Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573049682313718:2252];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:09.485505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 23814, MsgBus: 28774 2025-04-07T14:09:02.779639Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573039599778401:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:02.779677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001dbb/r3tmp/tmpCSi0Eg/pdisk_1.dat 2025-04-07T14:09:03.362105Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:03.366638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:03.367822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:03.369633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23814, node 1 2025-04-07T14:09:03.452637Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:03.452675Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:03.452682Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:03.452772Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28774 TClient is connected to server localhost:28774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:04.031532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:04.061430Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:04.083425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:04.234467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:04.379159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:04.462720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.302879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573056779649308:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.348173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.539663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.570316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.654513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.695663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.736765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.828905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:06.916223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573056779649817:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.916300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.916547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573056779649822:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:06.920321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:06.945535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573056779649824:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:07.785696Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573039599778401:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:07.785782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:09:08.056428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.102384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.185023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 29441, MsgBus: 12293 2025-04-07T14:09:04.833335Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573049026389921:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:04.833590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d10/r3tmp/tmp9N1w0Y/pdisk_1.dat 2025-04-07T14:09:05.356187Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:05.375759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:05.377170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:05.401324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29441, node 1 2025-04-07T14:09:05.533963Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:05.533989Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:05.533997Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:05.534091Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12293 TClient is connected to server localhost:12293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:06.267663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.301265Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:06.310589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.475471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.700471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.796791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:08.604011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573066206260659:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.604080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.870577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.940506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.006060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.065334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.095003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.153212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.241841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573070501228454:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.241909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.242275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573070501228459:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.246170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:09.258407Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:09:09.258718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573070501228461:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:09.883667Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573049026389921:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:09.883801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::UpdateOn-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 31813, MsgBus: 2158 2025-04-07T14:09:05.545976Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573053211218830:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:05.546019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001cfd/r3tmp/tmpf3IAgA/pdisk_1.dat 2025-04-07T14:09:06.081919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:06.082015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:06.082965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:06.132962Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31813, node 1 2025-04-07T14:09:06.345080Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:06.345099Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:06.345109Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:06.345180Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2158 TClient is connected to server localhost:2158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:07.063602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:07.100896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:07.251194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:07.399941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:07.505496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:09.296936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573070391089523:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.297043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.550443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.626422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.655431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.684945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.759610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.797631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.855182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573070391090029:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.855254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.855606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573070391090034:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.858559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:09.867727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573070391090036:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:10.546879Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573053211218830:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:10.546942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexInsert+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 24680, MsgBus: 31108 2025-04-07T14:09:05.093892Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573054734372854:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:05.094132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d04/r3tmp/tmp0hZyM3/pdisk_1.dat 2025-04-07T14:09:05.649802Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:05.670015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:05.670145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:05.672007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24680, node 1 2025-04-07T14:09:05.848722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:05.848745Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:05.848751Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:05.848830Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31108 TClient is connected to server localhost:31108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:06.639908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.690442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.878885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:07.085291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:07.166669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:08.949830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573067619276446:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.949919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.235243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.274605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.320577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.354890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.400399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.474695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.575533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573071914244247:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.575611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.579388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573071914244252:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.583370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:09.600313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573071914244254:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:10.094608Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573054734372854:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:10.094678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::ComputeLength+QueryService [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 13160, MsgBus: 14514 2025-04-07T14:09:04.332651Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573050459017066:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:04.332796Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d2a/r3tmp/tmpsHH624/pdisk_1.dat 2025-04-07T14:09:04.875441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:04.887637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:04.887738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:04.894868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13160, node 1 2025-04-07T14:09:05.101204Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:05.101228Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:05.101234Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:05.101330Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14514 TClient is connected to server localhost:14514 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:05.787528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:05.805180Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:05.809937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:05.953388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.128814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:06.196517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:08.030703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573063343920520:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.038699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.086526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.133564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.164505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.207861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.238590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.280507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.354271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573067638888311:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.354413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.360264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573067638888317:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.364477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:08.377171Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:09:08.378731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573067638888319:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:09.324931Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573050459017066:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:09.325014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:09:09.577234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.615779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.659115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin+ColumnStore [GOOD] >> KqpQueryPerf::DeleteOn-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 20585, MsgBus: 7675 2025-04-07T14:09:03.837964Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573046120390549:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:03.838021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001d7d/r3tmp/tmpgnDLuA/pdisk_1.dat 2025-04-07T14:09:04.358131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:04.358261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:04.360166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20585, node 1 2025-04-07T14:09:04.390057Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:04.577128Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:04.577150Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:04.577159Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:04.577238Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7675 TClient is connected to server localhost:7675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:05.279843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:05.324818Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:05.339972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:05.490305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:05.663863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:05.773297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:07.993704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573063300261215:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:07.994041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.037957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.088352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.159063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.200177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.229506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.294956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.361092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573067595229012:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.361201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.361486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573067595229017:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:08.364980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:08.380440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573067595229019:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:08.840754Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573046120390549:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:08.840822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:09:09.660043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.750063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.837419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpQueryPerf::DeleteOn+QueryService [GOOD] >> KqpQueryPerf::MultiDeleteFromTable-QueryService [GOOD] >> KqpQueryPerf::Update-QueryService >> KqpQueryPerf::IndexDeleteOn-QueryService >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService >> KqpQueryPerf::MultiRead+QueryService >> KqpQueryPerf::IndexReplace+QueryService >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 20744, MsgBus: 29045 2025-04-07T14:09:07.089224Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573061730944688:2169];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:07.090415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ce8/r3tmp/tmpSmqpM1/pdisk_1.dat 2025-04-07T14:09:07.573887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:07.574007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:07.579485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:07.609642Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20744, node 1 2025-04-07T14:09:07.626827Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:09:07.626985Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:09:07.817128Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:07.817148Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:07.817155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:07.817237Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29045 TClient is connected to server localhost:29045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:08.564627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:08.593270Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:08.612165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:08.775689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:08.964610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:09.069829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:10.865159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573074615848151:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:10.885296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:11.214724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.245997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.337845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.373753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.406635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.442392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.508037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573078910815947:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:11.508224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:11.508520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573078910815952:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:11.511727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:11.520579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573078910815954:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:12.111086Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573061730944688:2169];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:12.111232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexUpsert-QueryService [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn >> KqpQueryPerf::RangeRead+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 29954, MsgBus: 62188 2025-04-07T14:09:07.237720Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573060558124388:2189];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ce3/r3tmp/tmpNYMECq/pdisk_1.dat 2025-04-07T14:09:07.552712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:09:07.799776Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:07.801119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:07.801252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:07.804470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29954, node 1 2025-04-07T14:09:07.924486Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:07.924508Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:07.924514Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:07.924593Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62188 TClient is connected to server localhost:62188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:08.591672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:08.633754Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:08.644261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:09:08.809783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:09.105759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:09.208329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:10.990855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573073443027823:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:10.990981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:11.314314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.355984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.411135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.500478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.575225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.620362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.678415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573077737995619:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:11.678496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:11.678724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573077737995624:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:11.683998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:11.702427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573077737995626:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:12.192799Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573060558124388:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:12.211206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 2675, MsgBus: 24230 2025-04-07T14:09:08.039170Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573067515937434:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:08.044984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001cdc/r3tmp/tmpfutsbN/pdisk_1.dat 2025-04-07T14:09:08.586619Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:08.604275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:08.604396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:08.606360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2675, node 1 2025-04-07T14:09:08.713219Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:08.713251Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:08.713259Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:08.713355Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24230 TClient is connected to server localhost:24230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:09.431002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:09.445869Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:09.455159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:09.613646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:09.811436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:09.900421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:11.716352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573080400840891:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:11.716494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:12.005522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:12.042082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:12.074625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:12.155472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:12.225245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:12.281831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:12.373873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573084695808692:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:12.373998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:12.374377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573084695808698:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:12.378767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:12.392026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573084695808700:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:13.040791Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573067515937434:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:13.040866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Replace-QueryService >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 62252, MsgBus: 63842 2025-04-07T14:09:06.334378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573058460544331:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:06.334597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001cf7/r3tmp/tmpgVsCsy/pdisk_1.dat 2025-04-07T14:09:06.768191Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:06.811099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:06.811270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 62252, node 1 2025-04-07T14:09:06.814592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:06.933836Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:06.933855Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:06.933862Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:06.933937Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63842 TClient is connected to server localhost:63842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:07.723894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:07.771908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:07.925502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:08.108099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:08.193305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:10.084993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573075640415055:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:10.085108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:10.493233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:10.522301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:10.576051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:10.609488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:10.688926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:10.737713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:10.847411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573075640415561:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:10.847506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:10.847775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573075640415566:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:10.860402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:10.903520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573075640415568:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:11.335126Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573058460544331:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:11.335214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:09:11.893294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.931593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.970515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 13673, MsgBus: 61273 2025-04-07T14:07:08.614564Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572551805650743:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:08.614651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00103f/r3tmp/tmp2lnAHm/pdisk_1.dat 2025-04-07T14:07:08.911408Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13673, node 1 2025-04-07T14:07:08.965669Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:08.965689Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:08.965695Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:08.965775Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:07:08.970637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:08.970761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:08.972390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61273 TClient is connected to server localhost:61273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:09.407735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:09.429983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:09.523600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:09.655963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:09.714526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:11.178406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572564690554363:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.178502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.348214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.369302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.391945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.416846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.444978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.477779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.556441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572564690554862:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.556541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.556551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572564690554867:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.559352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:11.567397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572564690554869:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:12.490063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.511514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.532526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.557515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.594950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.697074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.721872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.745299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.769860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.790137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.852332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.913544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.939610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.390584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T14:07:13.465335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.530448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.554311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.581599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.608339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.614769Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572551805650743:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:13.614835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:13.636503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part ... cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:03.255773Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:09:03.255790Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:09:03.255828Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:09:03.255837Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:09:03.255903Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:09:03.255916Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:09:03.255935Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:09:03.255949Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:09:03.255975Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:09:03.255988Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:09:03.256022Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:09:03.256024Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:09:03.256376Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:09:03.256428Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:03.256630Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:03.257536Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:03.257747Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:03.257792Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:03.257998Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:03.258044Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:03.258156Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:03.258184Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:09:03.258811Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:03.258849Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:03.258946Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:03.258976Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:03.259145Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:03.259175Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:03.259270Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:09:03.259323Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:09:03.259398Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:09:03.259428Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:09:03.259469Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:09:03.259520Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:09:03.259875Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:09:03.259945Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:03.260160Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:03.260205Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:03.260353Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:03.260384Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:03.260413Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:09:03.260493Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:03.260582Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:03.260615Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:03.260779Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:03.260818Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:03.261318Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:03.261361Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:09:03.308634Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:03.308758Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:03.308980Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:03.309024Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:03.309132Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:03.309175Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> KqpQueryPerf::IdxLookupJoin-QueryService >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 6692, MsgBus: 15458 2025-04-07T14:09:10.350946Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573073450343752:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:10.351602Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ccf/r3tmp/tmp0IsZk6/pdisk_1.dat 2025-04-07T14:09:10.902437Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:10.904013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:10.904093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:10.912079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6692, node 1 2025-04-07T14:09:11.005178Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:11.005214Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:11.005231Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:11.005326Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15458 TClient is connected to server localhost:15458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:11.595790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:11.623424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:11.829304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:12.019236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:12.096911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:14.224640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573090630214633:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:14.235362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:14.272184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:14.316401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:14.374219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:14.402767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:14.475655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:14.524092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:14.607818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573090630215137:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:14.607886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:14.608098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573090630215142:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:14.612113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:14.636490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573090630215144:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:15.350672Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573073450343752:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:15.350784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 22361, MsgBus: 20343 2025-04-07T14:07:12.133718Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572566954241880:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:12.133838Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001032/r3tmp/tmpXxJaDx/pdisk_1.dat 2025-04-07T14:07:12.379827Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22361, node 1 2025-04-07T14:07:12.384986Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:07:12.385127Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:07:12.416122Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:12.416146Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:12.416157Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:12.416240Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:07:12.460386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:12.460510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:12.462228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20343 TClient is connected to server localhost:20343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:12.827579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:12.849301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:12.951646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:13.073514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:13.150903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:14.573437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572575544178196:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:14.573570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:14.784337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.815819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.839446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.866956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.911952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.940256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.981646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572575544178688:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:14.981706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:14.982068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572575544178693:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:14.985273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:14.994428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572575544178695:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:15.999124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.023211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.043120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.066703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.087567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.189559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.253016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.287811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.317249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.342135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.419219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.490521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T14:07:16.515105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T14:07:17.024764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T14:07:17.053315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T14:07:17.079285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T14:07:17.106221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T14:07:17.129959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T14:07:17.133906Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572566954241880:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:17.133965Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:17.157006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, sub ... SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:06.942142Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:06.942173Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:09:06.942246Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:06.942279Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:06.942411Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:06.942444Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:06.942628Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:06.942650Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:06.942655Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:06.942688Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:06.942755Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:06.942778Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:09:06.942785Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:06.942819Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:06.942995Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:06.943029Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:06.948798Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:09:06.948873Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:09:06.948957Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:09:06.948991Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:09:06.949040Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:09:06.949072Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:09:06.949490Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:09:06.949536Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:06.949738Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:06.949770Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:06.949920Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:06.949953Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:06.950146Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:06.950194Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:06.950324Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:06.950354Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:09:06.971931Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:06.972009Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:06.972118Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:06.972168Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:06.972364Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:06.972399Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:06.972496Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:09:06.972533Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:09:06.972604Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:09:06.972637Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:09:06.976806Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:09:06.976886Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:09:06.977310Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:09:06.977360Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:06.977584Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:06.977622Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:06.977785Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:06.977823Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:06.978026Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:06.978059Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:06.978177Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:06.978205Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> KqpQueryPerf::IndexDeleteOn+QueryService [GOOD] >> KqpQueryPerf::UpdateOn-QueryService [GOOD] >> TSchemeShardServerLess::StorageBilling [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 12663, MsgBus: 4864 2025-04-07T14:09:10.265402Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573074876815241:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:10.265459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001cda/r3tmp/tmppDYa9u/pdisk_1.dat 2025-04-07T14:09:10.838285Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:10.884346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:10.884451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:10.892166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12663, node 1 2025-04-07T14:09:10.995339Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:10.995357Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:10.995366Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:10.995457Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4864 TClient is connected to server localhost:4864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:11.559756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:11.574484Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:11.586050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:11.754897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:11.952784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:12.019276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:13.901743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573087761718844:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:13.901835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:14.093805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:14.118632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:14.150332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:14.189732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:14.217376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:14.258013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:14.343270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573092056686638:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:14.343324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573092056686643:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:14.343400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:14.347367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:14.358944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573092056686645:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:15.266514Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573074876815241:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:15.266570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:09:15.549680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:09:15.626745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:09:15.669120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap >> KqpQueryPerf::DeleteOn-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:08:39.603056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:08:39.603148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:39.603197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:08:39.603226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:08:39.603258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:08:39.603285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:08:39.603337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:08:39.604176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:08:39.666881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:08:39.666928Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:39.676519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:08:39.676621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:08:39.676763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:08:39.682075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:08:39.682280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:39.683807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:39.684032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:08:39.688225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:39.694349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:39.694405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:39.694505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:08:39.694551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:39.694663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:08:39.694765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.700961Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:08:39.831424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:08:39.831624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.831825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:08:39.831987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:08:39.832033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.834484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:39.834615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:08:39.834893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.834946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:08:39.835025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:08:39.835054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:08:39.839846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.839912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:08:39.839944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:08:39.843727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.843775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.843809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:39.843868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:08:39.847478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:08:39.850706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:08:39.850905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:08:39.851826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:08:39.851988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:08:39.852036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:39.852229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:08:39.852268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:08:39.852422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:39.852480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:08:39.854378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:08:39.854451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:08:39.854566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:08:39.854599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:08:39.854781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:08:39.854819Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:08:39.854891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:08:39.854919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:39.854967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:08:39.855014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:08:39.855041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:08:39.855090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:08:39.855144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:08:39.855202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:08:39.855233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:08:39.857069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:39.857179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:08:39.857207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:08:39.857238Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:08:39.857282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:08:39.857383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... HARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409549 2025-04-07T14:09:19.051473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T14:09:19.051505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-04-07T14:09:19.051616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72075186233409549 2025-04-07T14:09:19.051738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-04-07T14:09:19.051873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2025-04-07T14:09:19.051935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-07T14:09:19.055644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-07T14:09:19.058618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-07T14:09:19.058861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409549 2025-04-07T14:09:19.058893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 1] 2025-04-07T14:09:19.059016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 2] 2025-04-07T14:09:19.059172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409549 2025-04-07T14:09:19.059201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:649:2564], at schemeshard: 72075186233409549, txId: 107, path id: 1 2025-04-07T14:09:19.059230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:649:2564], at schemeshard: 72075186233409549, txId: 107, path id: 2 2025-04-07T14:09:19.059601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-07T14:09:19.059638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2025-04-07T14:09:19.059706Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-07T14:09:19.059737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2025-04-07T14:09:19.059776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-07T14:09:19.060549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-04-07T14:09:19.060647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-04-07T14:09:19.060704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-04-07T14:09:19.060740Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 9 2025-04-07T14:09:19.060778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2025-04-07T14:09:19.062107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-04-07T14:09:19.062206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-04-07T14:09:19.062242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-04-07T14:09:19.062273Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:09:19.062303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 4 2025-04-07T14:09:19.062401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-04-07T14:09:19.069717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-07T14:09:19.069785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2025-04-07T14:09:19.070029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-07T14:09:19.070129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-07T14:09:19.070154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-07T14:09:19.070189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-07T14:09:19.070266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:799:2678] message: TxId: 107 2025-04-07T14:09:19.070298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-07T14:09:19.070325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-07T14:09:19.070347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-07T14:09:19.070406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 2 2025-04-07T14:09:19.071218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-04-07T14:09:19.072147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-04-07T14:09:19.073070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-07T14:09:19.073130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:2187:4036] TestWaitNotification: OK eventTxId 107 2025-04-07T14:09:19.094012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 763 RawX2: 4294969951 } TabletId: 72075186233409552 State: 4 2025-04-07T14:09:19.094138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2025-04-07T14:09:19.096116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2025-04-07T14:09:19.096707Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409552 2025-04-07T14:09:19.098779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2025-04-07T14:09:19.099001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 1 2025-04-07T14:09:19.099486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2025-04-07T14:09:19.099537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 2], at schemeshard: 72075186233409549 2025-04-07T14:09:19.099579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2025-04-07T14:09:19.102448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409549:4 2025-04-07T14:09:19.102515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2025-04-07T14:09:19.102754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2025-04-07T14:09:19.228557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-04-07T14:09:19.228646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-04-07T14:09:19.228716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-04-07T14:09:19.228801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-04-07T14:09:19.228828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-04-07T14:09:19.228856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-04-07T14:09:19.228889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-07T14:09:19.228919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-07T14:09:19.228942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-07T14:09:19.299624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:09:19.299979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":1600452180,"quantity":59,"finish":1600452239,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":0},"id":"72057594046678944-3-1600452180-1600452239-0","cloud_id":"CLOUD_ID_VAL","source_wt":1600452240,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 2020-09-18T18:04:00.028000Z, LastBillTime: 2020-09-18T18:02:00.000000Z, lastBilled: 2020-09-18T18:02:00.000000Z--2020-09-18T18:02:59.000000Z, toBill: 2020-09-18T18:03:00.000000Z--2020-09-18T18:03:59.000000Z, next retry at: 2020-09-18T18:05:00.000000Z 2025-04-07T14:09:19.302285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete grabMeteringMessage has happened 2025-04-07T14:09:19.302433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TFakeMetering got TEvMetering::TEvWriteMeteringJson >> KqpQueryPerf::IndexInsert-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 21817, MsgBus: 24925 2025-04-07T14:09:12.902724Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573085567594293:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:12.902770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001c71/r3tmp/tmpBLDRoR/pdisk_1.dat 2025-04-07T14:09:13.349260Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:13.349587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:13.349698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:13.352519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21817, node 1 2025-04-07T14:09:13.509263Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:13.509295Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:13.509306Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:13.509410Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24925 TClient is connected to server localhost:24925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:14.290659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:14.310745Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:14.320391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:14.472845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:14.679844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:14.760367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.652279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573102747465189:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:16.652404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:16.933959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:16.971849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.003963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.049634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.131584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.218113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.287099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573107042432990:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:17.287204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:17.287348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573107042432995:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:17.290795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:17.302952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573107042432997:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:17.908179Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573085567594293:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:17.908249Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Update-QueryService [GOOD] |93.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 31271, MsgBus: 25408 2025-04-07T14:09:12.765513Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573085798164062:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:12.974671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001c93/r3tmp/tmpyzc1Vw/pdisk_1.dat 2025-04-07T14:09:13.239618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:13.239724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:13.241471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:13.261163Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31271, node 1 2025-04-07T14:09:13.389117Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:13.389138Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:13.389145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:13.389232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25408 TClient is connected to server localhost:25408 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:14.087429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:14.108102Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:14.120546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:14.308258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:14.485673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:14.562996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.626096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573102978034751:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:16.626244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:16.970645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.009858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.054418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.137920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.205166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.275981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.337251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573107273002547:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:17.337356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:17.338585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573107273002552:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:17.342570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:17.357693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573107273002554:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:17.733411Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573085798164062:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:17.733497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 1973, MsgBus: 28008 2025-04-07T14:07:10.386216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572562067871475:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:10.386992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001036/r3tmp/tmpP5pjC9/pdisk_1.dat 2025-04-07T14:07:10.695135Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1973, node 1 2025-04-07T14:07:10.727318Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:10.727348Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:10.727359Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:10.727474Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:07:10.742683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:10.742811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:10.744791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28008 TClient is connected to server localhost:28008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:11.132869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:11.147620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:11.246707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:11.383892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:11.461167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:12.790385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572570657807781:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:12.790504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:13.034301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.056736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.086185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.109338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.135874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.170288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.247205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572574952775575:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:13.247287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:13.247438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572574952775581:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:13.250404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:13.284794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572574952775583:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:14.227297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.248511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.315606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.342076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.369232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.480580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.510201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.537894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.563085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.588399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.616069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.645371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T14:07:14.669542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T14:07:15.133289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T14:07:15.163026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T14:07:15.194818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T14:07:15.219612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T14:07:15.245009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T14:07:15.268270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T14:07:15.297007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-07T14:07:15.329007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-07T14:07:15.35847 ... 72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:09:07.979337Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:09:07.979376Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:07.979573Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:07.979605Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:07.979742Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:07.979776Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:07.979945Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:07.979980Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:07.981332Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:07.981368Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:09:07.981833Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:07.981874Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:07.981962Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:07.981995Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:07.982156Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:07.982188Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:07.982276Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:09:07.982312Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:09:07.982381Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:09:07.982408Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:09:07.982450Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:09:07.982482Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:09:07.982807Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:09:07.982846Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:07.983023Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:07.983050Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:07.983188Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:07.983215Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:07.983386Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:07.983417Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:07.983537Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:07.983603Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:09:07.984108Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:07.984144Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:07.984225Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:07.984252Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:07.984403Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:07.984431Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:07.984522Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:09:07.984552Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:09:07.984620Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:09:07.984649Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:09:07.989729Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:09:07.989786Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:09:07.990193Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:09:07.990240Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:07.990415Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:07.990450Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:07.990574Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:07.990610Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:07.990764Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:07.990797Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:07.990897Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:07.990929Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> TestYmqHttpProxy::TestSendMessage >> TestYmqHttpProxy::TestGetQueueUrlWithIAM >> KqpQueryPerf::MultiRead+QueryService [GOOD] >> TestKinesisHttpProxy::MissingAction >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] >> TestKinesisHttpProxy::CreateStreamInIncorrectDb >> KqpQueryPerf::RangeRead+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 62911, MsgBus: 23735 2025-04-07T14:09:14.234801Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573092264773286:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:14.234854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001c37/r3tmp/tmp8uKlRn/pdisk_1.dat 2025-04-07T14:09:14.706520Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:14.745846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:14.745956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:14.748726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62911, node 1 2025-04-07T14:09:14.915908Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:14.915936Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:14.915945Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:14.916189Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23735 TClient is connected to server localhost:23735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:15.718855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:15.747101Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:15.755617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:15.867767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.043753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.119206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:18.241424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573105149676909:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.258020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.290653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.340523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.372602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.426420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.454616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.504804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.582925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573109444644702:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.583034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.583260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573109444644707:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.587199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:18.599785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573109444644709:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:19.236341Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573092264773286:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:19.236409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Replace-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 11945, MsgBus: 26006 2025-04-07T14:09:12.381188Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573083963707795:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:12.381235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001c9e/r3tmp/tmpEzB7UN/pdisk_1.dat 2025-04-07T14:09:12.823995Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:12.849242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:12.849371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11945, node 1 2025-04-07T14:09:12.851090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:12.933125Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:12.933146Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:12.933154Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:12.933245Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26006 TClient is connected to server localhost:26006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:13.604813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:13.646242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:13.792023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:14.006106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:14.094852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.109532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573096848611198:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:16.109663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:16.160770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:16.207825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:16.241339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:16.326062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:16.400918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:16.499774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:16.552490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573101143579004:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:16.552560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:16.552825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573101143579009:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:16.555604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:16.565358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573101143579011:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:17.381580Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573083963707795:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:17.381636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:09:17.648856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.724931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.762192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 3779, MsgBus: 62564 2025-04-07T14:09:14.555919Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573091861912932:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:14.555951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001c00/r3tmp/tmpfGFMej/pdisk_1.dat 2025-04-07T14:09:15.089717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:15.089825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:15.100809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3779, node 1 2025-04-07T14:09:15.138704Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:15.167476Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:09:15.168836Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:09:15.358363Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:15.358383Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:15.358390Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:15.358487Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62564 TClient is connected to server localhost:62564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:16.085588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.115349Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:16.124637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.295382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.492351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.585008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:18.235145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573109041783844:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.235491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.442614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.479028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.511099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.562667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.635274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.708770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.763767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573109041784344:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.763826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.766207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573109041784349:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.770369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:18.781613Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:09:18.781870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573109041784351:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:19.557017Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573091861912932:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:19.557094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 15550, MsgBus: 24614 2025-04-07T14:09:14.867464Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573091021704813:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:14.871402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b92/r3tmp/tmpSBRziH/pdisk_1.dat 2025-04-07T14:09:15.389179Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:15.393011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:15.393133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:15.395593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15550, node 1 2025-04-07T14:09:15.582194Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:15.582223Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:15.582231Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:15.583213Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24614 TClient is connected to server localhost:24614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:16.275487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.305401Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:16.314962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.507421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.661761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.732389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:18.450369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573108201575687:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.450467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.694573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.726733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.764437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.807817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.834827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.917579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.978111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573108201576186:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.978176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.978642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573108201576191:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.982469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:19.000377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573108201576193:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:19.869434Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573091021704813:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:19.869601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 10098, MsgBus: 25781 2025-04-07T14:09:14.997998Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573093834408153:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:14.998046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b90/r3tmp/tmpX9aaKK/pdisk_1.dat 2025-04-07T14:09:15.492703Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:15.515227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:15.515340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:15.516982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10098, node 1 2025-04-07T14:09:15.620579Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:15.620599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:15.620605Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:15.620742Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25781 TClient is connected to server localhost:25781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:16.405417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.431985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.590447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.790579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.886758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:18.287541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573111014279050:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.287648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.516743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.544417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.573122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.644795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.690395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.771189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.826007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573111014279553:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.826056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.826163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573111014279558:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.831191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:18.840612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573111014279560:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:19.998531Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573093834408153:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:19.998626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 21089, MsgBus: 25852 2025-04-07T14:09:15.342221Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573095085247002:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:15.342444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b89/r3tmp/tmpoJMkYp/pdisk_1.dat 2025-04-07T14:09:15.931262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:15.931379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:15.954159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:15.975924Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21089, node 1 2025-04-07T14:09:16.186555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:16.186580Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:16.186588Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:16.186685Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25852 TClient is connected to server localhost:25852 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:16.906666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.928523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:17.090053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:17.254663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:17.360907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:18.926649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573107970150459:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.926757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:19.190027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.236945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.281787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.311186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.344199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.383438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.436353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573112265118248:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:19.436426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:19.436606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573112265118253:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:19.439992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:19.450421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573112265118255:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:20.360467Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573095085247002:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:20.369335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 2025, MsgBus: 4509 2025-04-07T14:09:15.810041Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573099093257691:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:15.810112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b84/r3tmp/tmpbPTQ4Y/pdisk_1.dat 2025-04-07T14:09:16.270341Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:16.295240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:16.295367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:16.324279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2025, node 1 2025-04-07T14:09:16.417955Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:16.417974Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:16.417990Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:16.418062Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4509 TClient is connected to server localhost:4509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:17.181928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:17.201552Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:17.209949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:17.376222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:17.545945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:17.632947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:19.190430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573116273128445:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:19.190507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:19.449812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.479309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.507275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.537770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.607996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.660130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.750376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573116273128942:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:19.750449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:19.750680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573116273128947:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:19.754785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:19.766414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573116273128949:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:20.810893Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573099093257691:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:20.810973Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 14364, MsgBus: 63262 2025-04-07T14:09:14.800410Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573094614320774:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:14.800603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b94/r3tmp/tmpttgTuH/pdisk_1.dat 2025-04-07T14:09:15.319746Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:15.325080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:15.325192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:15.327343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14364, node 1 2025-04-07T14:09:15.495064Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:15.495084Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:15.495091Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:15.495175Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63262 TClient is connected to server localhost:63262 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:16.146051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.176471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.330089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.525091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.624735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:18.494986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573111794191518:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.495087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.709440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.738624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.814937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.845332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.879808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.948506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.996971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573111794192020:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.997074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.997318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573111794192025:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:19.001159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:19.011538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573111794192027:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:09:19.804286Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573094614320774:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:19.804377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexDeleteOn-QueryService [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode >> TraverseDatashard::TraverseTwoTables |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables >> TraverseDatashard::TraverseOneTableServerless >> AnalyzeDatashard::AnalyzeOneTable >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve >> KqpQueryPerf::IndexReplace+QueryService [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 2110, MsgBus: 3591 2025-04-07T14:09:14.552864Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573091747518233:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:14.552984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ba1/r3tmp/tmpKLto8N/pdisk_1.dat 2025-04-07T14:09:15.007400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:15.007498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:15.012588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:15.041253Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2110, node 1 2025-04-07T14:09:15.131472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:15.131488Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:15.131496Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:15.131598Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3591 TClient is connected to server localhost:3591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:15.868171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:09:15.895620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.084899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:16.268192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.324340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:18.053666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573108927388965:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.053818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.287571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.361209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.398066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.472035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.504077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.548301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.591883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573108927389467:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.591958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.592116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573108927389472:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.594951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:18.607755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573108927389474:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:19.566071Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573091747518233:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:19.567245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:09:19.829856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.885323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.920530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 15841, MsgBus: 64269 2025-04-07T14:09:15.040536Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573095386972240:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:15.040588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b8e/r3tmp/tmpjuTIvh/pdisk_1.dat 2025-04-07T14:09:15.645104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:15.645207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:15.651126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:15.669038Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15841, node 1 2025-04-07T14:09:15.852828Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:15.852854Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:15.852876Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:15.852979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64269 TClient is connected to server localhost:64269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:16.600523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.631683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:16.802506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:17.011774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:17.077074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:18.549874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573108271875736:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.549984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:18.749173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.776476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.820035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.843303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.868604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.916799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:19.004864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573112566843530:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:19.004946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:19.005173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573112566843535:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:19.010055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:19.023598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573112566843537:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:20.040688Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573095386972240:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:20.040769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:09:20.172489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:09:20.258460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:09:20.299409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 27529, MsgBus: 3507 2025-04-07T14:09:17.018702Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573107568211874:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:17.018866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b81/r3tmp/tmp4EUAcA/pdisk_1.dat 2025-04-07T14:09:17.540918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:17.541048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:17.543368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:17.565374Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27529, node 1 2025-04-07T14:09:17.698477Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:17.698507Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:17.698518Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:17.698613Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3507 TClient is connected to server localhost:3507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:18.262733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:18.275089Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:18.291198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:18.431247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:18.613889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:18.694936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:20.444275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573120453115333:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:20.444415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:20.696944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:20.724610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:09:20.764779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:09:20.809290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:09:20.851389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:09:20.896601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:09:20.995447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573120453115834:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:20.995547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:20.995740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573120453115839:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:20.998949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:09:21.007726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573120453115841:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:09:22.027843Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573107568211874:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:22.028290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> AnalyzeColumnshard::AnalyzeStatus >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution >> TraverseColumnShard::TraverseServerlessColumnTable |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave >> AnalyzeColumnshard::AnalyzeDeadline >> AnalyzeColumnshard::Analyze |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> TestKinesisHttpProxy::MissingAction [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] >> TestYmqHttpProxy::TestSendMessage [GOOD] >> TestYmqHttpProxy::TestReceiveMessage >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl >> AnalyzeColumnshard::AnalyzeMultiOperationId >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2025-04-07T14:08:39.910176Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572942090528863:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:39.910262Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a22/r3tmp/tmp8IdsAo/pdisk_1.dat 2025-04-07T14:08:40.437438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:40.437533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:40.445950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:08:40.485232Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23665, node 1 2025-04-07T14:08:40.517452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-07T14:08:40.526541Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:08:40.526573Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:08:40.703940Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:40.703962Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:40.703969Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:40.704066Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:08:40.712647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:40.713384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:08:40.713606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:08:40.716852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:40.717342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:08:40.717362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:40.717404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:08:40.717470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TClient is connected to server localhost:4078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:41.201878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:41.207059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:08:41.208206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:41.209815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:08:41.209982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:08:41.210009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:08:41.214904Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:08:41.214932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:08:41.215476Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:08:41.220870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:41.230560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034921272, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:08:41.230596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:08:41.230778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:08:41.233763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:08:41.233992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:41.234069Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:08:41.234174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:08:41.234233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:08:41.234281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:08:41.236390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:08:41.236449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:08:41.236472Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:08:41.236550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:4078 2025-04-07T14:08:43.141547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572959270398966:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:43.141687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:43.493819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:08:43.494290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T14:08:43.494796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:08:43.494831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:08:43.501572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2025-04-07T14:08:43.501773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:08:43.501957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:43.502009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:08:43.502869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:08:43.502903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:08:43.502925Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:08:43.503147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:08:43.503164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:08:43.503173Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:08:43.504369Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:08:43.516146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:08:43.516265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T14:08:43.521629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:08:43.584860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:08:43.584887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:08:43.584972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T14:08:43.586961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCre ... : , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzAyNGFjNjktNjYyNzhhMjgtYzQwMWM1NGItZGQ4MTcwMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.282028Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731911. Ctx: { TraceId: 01jr89bcpp9c7yj4k9f0epvbbh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI5NzI2ZTUtNDM5MzI5YTEtMjJhZTU5MDAtZTEwMDdiNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.282496Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731912. Ctx: { TraceId: 01jr89bcpn1c0w1tbjqc49nmgn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2E3ZTNmMzgtODdhMzZjMWItZjY3ZDUyYzktYjA4YzE3N2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.282500Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731913. Ctx: { TraceId: 01jr89bcpn6n844r7tbg3n5y2y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI0OTc2NGQtZTM5MTM5MTctMzdkMWFhZjktOWI1YzJiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.283549Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731914. Ctx: { TraceId: 01jr89bcpn4hx36db2ex3zyfem, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZkODY4NTItMmQxOWI3NmMtYWNlNmI0NDQtNjUzNTFlZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.283754Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731915. Ctx: { TraceId: 01jr89bcpn06c20339gcjhcd9e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDlkOTJjNi03M2YwMDM3Ny1kODc0MjQ5OC1lZjYxMjgzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.289025Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731916. Ctx: { TraceId: 01jr89bcpwd4axwa2gs6bbgnma, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2IyZmUzZTMtMWI5OWMyOS04ZjY4YjZlZC1hNjY2ZWMwNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.289099Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731917. Ctx: { TraceId: 01jr89bcpv2zfm197h9ac5sr9e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWVkMzhiZDctYTUwYjliODUtYzFlOGQ4MTUtZDBjM2NlNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.290039Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731918. Ctx: { TraceId: 01jr89bcpvddq8nsm1ja5898jd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTE5YzU2M2YtMTk2OTc2MS00OGQ2YzVjZC00YWRjYjVmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.291509Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731919. Ctx: { TraceId: 01jr89bcpx06b34x9g4w3eyzzz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmQwMjc5OTAtYmU1NWRmOGItN2Q5N2VkYzMtMTYwZjc1YzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.294149Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731920. Ctx: { TraceId: 01jr89bcq4e8bz8s2kyn0sp6t8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzAyNGFjNjktNjYyNzhhMjgtYzQwMWM1NGItZGQ4MTcwMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: 2025-04-07T14:09:28.294701Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731921. Ctx: { TraceId: 01jr89bcq59r880j5vmhe4e790, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI0OTc2NGQtZTM5MTM5MTctMzdkMWFhZjktOWI1YzJiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.294725Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731922. Ctx: { TraceId: 01jr89bcq5bhqn79fx4nzgamhd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI5NzI2ZTUtNDM5MzI5YTEtMjJhZTU5MDAtZTEwMDdiNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.295184Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731923. Ctx: { TraceId: 01jr89bcq5e48c8jk522h7tj6p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2E3ZTNmMzgtODdhMzZjMWItZjY3ZDUyYzktYjA4YzE3N2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034923638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2025-04-07T14:09:28.298856Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731924. Ctx: { TraceId: 01jr89bcq93myb8g8x2zs5ydsd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmQwMjc5OTAtYmU1NWRmOGItN2Q5N2VkYzMtMTYwZjc1YzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.298884Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731925. Ctx: { TraceId: 01jr89bcq91dnch20j85bke9jg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2IyZmUzZTMtMWI5OWMyOS04ZjY4YjZlZC1hNjY2ZWMwNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.299292Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731926. Ctx: { TraceId: 01jr89bcq94ah6eycjw1mtgyjk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWVkMzhiZDctYTUwYjliODUtYzFlOGQ4MTUtZDBjM2NlNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.307340Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731929. Ctx: { TraceId: 01jr89bcqb4gafddkwf573fm5x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZkODY4NTItMmQxOWI3NmMtYWNlNmI0NDQtNjUzNTFlZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.310355Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731927. Ctx: { TraceId: 01jr89bcqb297p4kd21dzmta71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTE5YzU2M2YtMTk2OTc2MS00OGQ2YzVjZC00YWRjYjVmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.310958Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731928. Ctx: { TraceId: 01jr89bcqbdbe5x24t1ecfgs6v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDlkOTJjNi03M2YwMDM3Ny1kODc0MjQ5OC1lZjYxMjgzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.312162Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731931. Ctx: { TraceId: 01jr89bcqh3wefmmrcbzjcxvdn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI5NzI2ZTUtNDM5MzI5YTEtMjJhZTU5MDAtZTEwMDdiNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.312578Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731930. Ctx: { TraceId: 01jr89bcqbcjp9rjz18y2yb9b9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzAyNGFjNjktNjYyNzhhMjgtYzQwMWM1NGItZGQ4MTcwMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:28.318874Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731932. Ctx: { TraceId: 01jr89bcqg5wjqfbd75bca9x4x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI0OTc2NGQtZTM5MTM5MTctMzdkMWFhZjktOWI1YzJiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034923638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2025-04-07T14:09:28.947345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037894 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 22444 rowCount 274 cpuUsage 0 2025-04-07T14:09:28.947424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037892 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 19319 rowCount 323 cpuUsage 0 2025-04-07T14:09:28.949297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 10803 rowCount 183 cpuUsage 0 2025-04-07T14:09:28.949903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037894 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 16329 rowCount 274 cpuUsage 0 2025-04-07T14:09:28.963860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037893 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 13281 rowCount 220 cpuUsage 0 2025-04-07T14:09:29.046608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 4 2025-04-07T14:09:29.046808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037894 followerId=0, pathId 2: RowCount 274, DataSize 16329 2025-04-07T14:09:29.046969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037892 followerId=0, pathId 2: RowCount 323, DataSize 19319 2025-04-07T14:09:29.047076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037891 followerId=0, pathId 2: RowCount 183, DataSize 10803 2025-04-07T14:09:29.047144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037893 followerId=0, pathId 2: RowCount 220, DataSize 13281 2025-04-07T14:09:29.047589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 4 shards |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] >> TestYmqHttpProxy::TestReceiveMessage [GOOD] >> TestKinesisHttpProxy::CreateDeleteStream >> TestYmqHttpProxy::TestSendMessageFifoQueue >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> TestYmqHttpProxy::TestReceiveMessageWithAttributes >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin+ColumnStore [GOOD] >> TraverseDatashard::TraverseTwoTables [GOOD] >> TraverseDatashard::TraverseOneTableServerless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleSplitsThenMerges Test command err: Trying to start YDB, gRPC: 10685, MsgBus: 27025 2025-04-07T14:00:57.737049Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570959000609343:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:00:57.737104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f7b/r3tmp/tmprei1ln/pdisk_1.dat 2025-04-07T14:00:58.226234Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:00:58.237035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:00:58.237148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:00:58.239311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10685, node 1 2025-04-07T14:00:58.333267Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:00:58.333299Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:00:58.333315Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:00:58.333419Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27025 TClient is connected to server localhost:27025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:00:59.053732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:00:59.073457Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:00:59.543389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:02.737001Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490570959000609343:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:02.737128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:01:05.588105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7490570993360352730:2313];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:05.588296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7490570993360352730:2313];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:05.588501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7490570993360352730:2313];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:05.588579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7490570993360352730:2313];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:05.588638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7490570993360352730:2313];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:05.596830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7490570993360352730:2313];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:05.597057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7490570993360352730:2313];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:05.597191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7490570993360352730:2313];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:05.597292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7490570993360352730:2313];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:05.597407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7490570993360352730:2313];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:05.597529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7490570993360352730:2313];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:05.597618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7490570993360352730:2313];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:05.606854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7490570993360352728:2311];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:05.606908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7490570993360352728:2311];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:05.607124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7490570993360352728:2311];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:05.607219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7490570993360352728:2311];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:05.607313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7490570993360352728:2311];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:05.607414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7490570993360352728:2311];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:05.607497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7490570993360352728:2311];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:05.607584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7490570993360352728:2311];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:05.607667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7490570993360352728:2311];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:05.607759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7490570993360352728:2311];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:05.607862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7490570993360352728:2311];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:05.607949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7490570993360352728:2311];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:05.655346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570993360352752:2335];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:05.655403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570993360352752:2335];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:05.655609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570993360352752:2335];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:05.655733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570993360352752:2335];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:05.655839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570993360352752:2335];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:05.655962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570993360352752:2335];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:05.656056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7490570993 ... 11323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736074:0, at schemeshard: 72057594046644480 2025-04-07T14:09:15.340826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736075:0, at schemeshard: 72057594046644480 2025-04-07T14:09:15.478412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736076:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2025-04-07T14:09:17.183133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736077:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.316833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736078:0, at schemeshard: 72057594046644480 2025-04-07T14:09:17.423130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736079:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:09:18.245238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736080:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.366919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736081:0, at schemeshard: 72057594046644480 2025-04-07T14:09:18.480118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736082:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2025-04-07T14:09:20.223798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736083:0, at schemeshard: 72057594046644480 2025-04-07T14:09:20.368317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736084:0, at schemeshard: 72057594046644480 2025-04-07T14:09:20.481932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736085:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:09:21.204321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736086:0, at schemeshard: 72057594046644480 2025-04-07T14:09:21.315970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736087:0, at schemeshard: 72057594046644480 2025-04-07T14:09:21.434248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736088:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2025-04-07T14:09:23.071300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736089:0, at schemeshard: 72057594046644480 2025-04-07T14:09:23.193792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736090:0, at schemeshard: 72057594046644480 2025-04-07T14:09:23.314886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736091:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:09:24.267431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736092:0, at schemeshard: 72057594046644480 2025-04-07T14:09:24.417039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736093:0, at schemeshard: 72057594046644480 2025-04-07T14:09:24.532970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736094:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2025-04-07T14:09:26.255137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736095:0, at schemeshard: 72057594046644480 2025-04-07T14:09:26.402401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736096:0, at schemeshard: 72057594046644480 2025-04-07T14:09:26.523111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736097:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2025-04-07T14:09:28.000725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736098:0, at schemeshard: 72057594046644480 2025-04-07T14:09:28.123529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736099:0, at schemeshard: 72057594046644480 2025-04-07T14:09:28.249463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736100:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:09:29.074244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736101:0, at schemeshard: 72057594046644480 2025-04-07T14:09:29.221811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736102:0, at schemeshard: 72057594046644480 2025-04-07T14:09:29.340790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736103:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-04-07T14:09:30.314208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736104:0, at schemeshard: 72057594046644480 2025-04-07T14:09:30.468913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736105:0, at schemeshard: 72057594046644480 2025-04-07T14:09:30.583461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736106:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () VERIFY failed (2025-04-07T14:09:31.859461Z): verification=clean;fline=blobs_sharing_ut.cpp:294; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed 2025-04-07T14:09:32.258767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736107:0, at schemeshard: 72057594046644480 2025-04-07T14:09:32.395892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736108:0, at schemeshard: 72057594046644480 2025-04-07T14:09:32.523840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736109:0, at schemeshard: 72057594046644480 2025-04-07T14:09:33.199693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736110:0, at schemeshard: 72057594046644480 2025-04-07T14:09:33.319932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736111:0, at schemeshard: 72057594046644480 2025-04-07T14:09:33.421632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736112:0, at schemeshard: 72057594046644480 2025-04-07T14:09:34.267064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736113:0, at schemeshard: 72057594046644480 2025-04-07T14:09:34.384090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736114:0, at schemeshard: 72057594046644480 2025-04-07T14:09:34.504087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736115:0, at schemeshard: 72057594046644480 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x180DC7E2 1. /-S/util/system/yassert.cpp:55: Panic @ 0x180CBE3A 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x1AF00E25 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:294: WaitResharding @ 0x17ABAEA9 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:607: Execute_ @ 0x17AC302F 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7FEF74566D8F 18. ??:0: ?? @ 0x7FEF74566E3F 19. ??:0: ?? @ 0x15851028 >> YdbIndexTable::OnlineBuild [GOOD] >> YdbIndexTable::OnlineBuildWithDataColumn >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] Test command err: 2025-04-07T14:09:26.286977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:26.287529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:26.287723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f4/r3tmp/tmpNEuwdf/pdisk_1.dat 2025-04-07T14:09:26.796228Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10615, node 1 2025-04-07T14:09:27.324304Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:27.324361Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:27.324396Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:27.327096Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:27.329435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:27.454476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:27.454655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:27.471232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12828 2025-04-07T14:09:28.169821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:31.657562Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:31.703065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:31.703178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:31.748305Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:31.752857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:31.990797Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:32.057873Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:32.057968Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:32.088261Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:32.088428Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:32.088640Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:32.088721Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:32.088782Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:32.088835Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:32.088890Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:32.088945Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:32.089356Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:32.305213Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.305323Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.318249Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:32.337295Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:32.338399Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:32.339586Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:32.371206Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:32.371272Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:32.371344Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:32.375383Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.375510Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.390336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:32.412745Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:32.412907Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:32.429719Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:32.450615Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:32.541899Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:32.820766Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:32.987252Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:33.825292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:33.825411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:33.840062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:34.266753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2426:3064], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.266899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.268240Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2431:3068]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:34.268414Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:09:34.268489Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2433:3070] 2025-04-07T14:09:34.272807Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2433:3070] 2025-04-07T14:09:34.273526Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2434:2945] 2025-04-07T14:09:34.273872Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2433:3070], server id = [2:2434:2945], tablet id = 72075186224037897, status = OK 2025-04-07T14:09:34.274087Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2434:2945], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:09:34.274146Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:09:34.274322Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:09:34.274387Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2431:3068], StatRequests.size() = 1 2025-04-07T14:09:34.311858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2438:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.311942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.312248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2443:3079], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.318536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:09:34.543602Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:09:34.543665Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:09:34.620008Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2433:3070], schemeshard count = 1 2025-04-07T14:09:34.904020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2445:3081], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:09:35.092412Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2586:3168]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:35.092599Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:09:35.092635Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2586:3168], StatRequests.size() = 1 2025-04-07T14:09:35.283463Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89bjgp5k2g00e9vbs000b1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTBiNDk3Yi1hMjAxZjMzYS03ZGU2NmUxLWFkMDcwNWMy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:35.386991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037889 2025-04-07T14:09:35.808617Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2919:3233]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:35.808861Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:09:35.808908Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2919:3233], StatRequests.size() = 1 2025-04-07T14:09:35.830484Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:2928:3242]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:35.830645Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-07T14:09:35.830672Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:2928:3242], StatRequests.size() = 1 2025-04-07T14:09:35.883167Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr89bm1ncs2043c42c8bwnm1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODM4MmM2OTQtOGEzMjQ5Ni1jNmIwZjhmZC01Yjk4Nzk4Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:35.952401Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:2974:3209]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:35.955165Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:09:35.955224Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:09:35.955622Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:09:35.955674Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:09:35.955725Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:09:35.970914Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-07T14:09:35.972452Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-07T14:09:35.972916Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:2999:3222]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:35.976191Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:09:35.976259Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:09:35.976558Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:09:35.976613Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:09:35.976690Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:09:35.979940Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-07T14:09:35.980225Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTableServerless [GOOD] Test command err: 2025-04-07T14:09:26.483432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:399:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:26.483785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:26.483924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f0/r3tmp/tmpGCs3IN/pdisk_1.dat 2025-04-07T14:09:26.871436Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4155, node 1 2025-04-07T14:09:27.320546Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:27.320603Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:27.320628Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:27.321566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:27.328883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:27.442406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:27.442547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:27.459061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18344 2025-04-07T14:09:28.150359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:31.427897Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:31.477546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:31.477664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:31.537893Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:31.543320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:31.757532Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:31.803120Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:31.803211Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:31.834418Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:31.836263Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:31.836473Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:31.836529Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:31.836586Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:31.836643Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:31.836729Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:31.836793Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:31.837473Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:32.061820Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.061929Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2550], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.067908Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1771:2559] 2025-04-07T14:09:32.071592Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1788:2569] 2025-04-07T14:09:32.071857Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1788:2569], schemeshard id = 72075186224037889 2025-04-07T14:09:32.078366Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2025-04-07T14:09:32.108427Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:32.108492Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:32.108553Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-07T14:09:32.112567Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.112698Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.136099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:32.150250Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:32.150398Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:32.165056Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:32.182162Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:32.207501Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:32.537757Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:32.690455Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:33.361910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:33.954430Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:34.134146Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-07T14:09:34.134230Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:09:34.134335Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2492:2904], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:09:34.135601Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2493:2905] 2025-04-07T14:09:34.135762Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2493:2905], schemeshard id = 72075186224037899 2025-04-07T14:09:35.191037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2624:3193], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:35.191211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:35.209124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-07T14:09:35.517097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2919:3242], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:35.517267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:35.529463Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2924:3246]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:35.529662Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:09:35.529853Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-07T14:09:35.529921Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2927:3249] 2025-04-07T14:09:35.529989Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2927:3249] 2025-04-07T14:09:35.530544Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2928:3138] 2025-04-07T14:09:35.530903Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2927:3249], server id = [2:2928:3138], tablet id = 72075186224037897, status = OK 2025-04-07T14:09:35.531100Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2928:3138], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:09:35.531178Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:09:35.531373Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:09:35.531473Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2924:3246], StatRequests.size() = 1 2025-04-07T14:09:35.551168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2932:3253], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:35.551402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:35.551986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2937:3258], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:35.557451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-04-07T14:09:35.681062Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:09:35.681157Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:09:35.738449Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2927:3249], schemeshard count = 1 2025-04-07T14:09:36.161530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2939:3260], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-07T14:09:36.358058Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3076:3338]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:36.358204Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:09:36.358233Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3076:3338], StatRequests.size() = 1 2025-04-07T14:09:36.423761Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr89bkrveqe7yjhdxj936kx5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDA5MzU4ZjctOTAwZjM0ZTAtNDgzZTg4MDAtNDQxODM0N2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:36.490627Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3114:3190]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:36.493614Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:09:36.493687Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:09:36.494220Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:09:36.494275Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-07T14:09:36.494324Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:09:36.500727Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-07T14:09:36.501048Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 11955, MsgBus: 31987 2025-04-07T14:07:23.870022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572615355917222:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:23.870385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00101c/r3tmp/tmpek1WhE/pdisk_1.dat 2025-04-07T14:07:24.157751Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11955, node 1 2025-04-07T14:07:24.214742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:24.214902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:24.216571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:24.226073Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:24.226102Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:24.226111Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:24.226219Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31987 TClient is connected to server localhost:31987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:24.662024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:24.681605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:24.807842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:24.935822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:25.008249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:26.641069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572628240820667:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:26.641147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:26.828724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:26.858675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:26.881438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:26.905721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:26.932313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:26.957324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:26.997645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572628240821159:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:26.997714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:26.997836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572628240821164:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:27.000803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:27.008265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572628240821166:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:07:27.938470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:27.970533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:28.040862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:07:28.063807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:07:28.092314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:07:28.206809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T14:07:28.240115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T14:07:28.307443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-07T14:07:28.363532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-07T14:07:28.397533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-07T14:07:28.461915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-07T14:07:28.527893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-07T14:07:28.553749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-07T14:07:28.869288Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572615355917222:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:28.869373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:29.049970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2025-04-07T14:07:29.088636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-07T14:07:29.114717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:07:29.147681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-07T14:07:29.175399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-07T14:07:29.207099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-07T14:07:29.235259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part ... 72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:09:27.763184Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:09:27.763232Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:27.763390Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:27.763422Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:27.763548Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:27.763582Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:27.763744Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:27.763777Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:27.763868Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:27.763893Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:09:27.766989Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:27.767036Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:27.767141Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:27.767171Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:27.767332Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:27.767362Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:27.767473Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:09:27.767530Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:09:27.767600Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:09:27.767631Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:09:27.767674Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:09:27.767707Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:09:27.768956Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:09:27.769016Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:27.769182Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:27.769217Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:27.769342Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:27.769373Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:27.769546Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:27.769578Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:27.769684Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:27.769718Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:09:27.777212Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:27.777278Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:27.777380Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:27.777415Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:27.777576Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:27.777611Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:27.777704Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:09:27.777740Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:09:27.777814Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:09:27.777848Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:09:27.777891Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:09:27.777925Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:09:27.778271Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:09:27.778316Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:27.778483Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:27.778515Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:27.778659Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:27.778694Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:27.778880Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:27.778916Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:27.779024Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:27.779056Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTable [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin+ColumnStore [GOOD] >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTable [GOOD] Test command err: 2025-04-07T14:09:27.875575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:27.876070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:27.876271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e3/r3tmp/tmpDeGgob/pdisk_1.dat 2025-04-07T14:09:28.235547Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24553, node 1 2025-04-07T14:09:28.452034Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:28.452082Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:28.452111Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:28.452755Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:28.454743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:28.559966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:28.560359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:28.575086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18618 2025-04-07T14:09:29.290530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:32.740536Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:32.792594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.792748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.834463Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:32.838991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:33.046809Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:33.105074Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:33.105167Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:33.135736Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:33.135910Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:33.136114Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:33.136181Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:33.136241Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:33.136306Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:33.136352Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:33.136410Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:33.136854Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:33.352274Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:33.352391Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:33.362312Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:33.379938Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:33.380779Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:33.381794Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:33.407083Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:33.407151Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:33.407220Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:33.409240Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:33.409346Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:33.422720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:33.435588Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:33.435757Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:33.448790Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:33.468597Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:33.512359Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:33.814233Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:34.001587Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:34.879671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.879829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.899314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:35.006080Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:35.006302Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:35.006626Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:35.006799Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:35.006946Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:35.007088Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:35.007231Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:35.007343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:35.007472Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:35.007572Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:35.007676Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:35.007777Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:35.045850Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:35.045968Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:35.046120Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:35.046212Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:35.046459Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:35.046538Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:35.046669Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:09:35.046718Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:09:35.046807Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:09:35.046884Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:09:35.046952Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:09:35.046992Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:09:35.047612Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:09:35.047690Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:35.047920Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:35.047984Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:35.048175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:35.048218Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:35.048429Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:35.048781Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:35.048995Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:35.049059Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:09:36.007904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2430:3067], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:36.008119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:36.012305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037889 2025-04-07T14:09:36.730929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2520:3104], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:36.731068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:36.734329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037889 waiting actualization: 0/0.000019s FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] Test command err: 2025-04-07T14:09:26.774138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:26.774529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:26.774698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ec/r3tmp/tmpB4YI3A/pdisk_1.dat 2025-04-07T14:09:27.144302Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14031, node 1 2025-04-07T14:09:27.354927Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:27.354985Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:27.355015Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:27.355621Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:27.357437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:27.470886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:27.471043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:27.498633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10754 2025-04-07T14:09:28.236251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:31.207763Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:31.256565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:31.256707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:31.302710Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:31.307379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:31.571518Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:31.622268Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:31.622361Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:31.662252Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:31.662463Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:31.662707Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:31.662776Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:31.662840Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:31.662892Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:31.662997Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:31.663057Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:31.663449Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:31.919785Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:31.919897Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:31.928608Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:31.956302Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:31.964786Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:31.966252Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2025-04-07T14:09:32.002464Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:32.002522Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:32.002595Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-07T14:09:32.006198Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.006316Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.024277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:32.041742Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:32.041913Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:32.058664Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:32.078983Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:32.126829Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:32.478868Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:32.645768Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:33.339579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:33.964743Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:34.119651Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-07T14:09:34.119717Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:09:34.119832Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2487:2905], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:09:34.126044Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2489:2906] 2025-04-07T14:09:34.127071Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2489:2906], schemeshard id = 72075186224037899 2025-04-07T14:09:34.972644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:09:35.568091Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:35.850898Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-04-07T14:09:35.850986Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037905 2025-04-07T14:09:35.851089Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2972:3108], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037905 2025-04-07T14:09:35.852604Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2974:3110] 2025-04-07T14:09:35.852987Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2974:3110], schemeshard id = 72075186224037905 2025-04-07T14:09:36.977975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3101:3354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:36.978135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:36.993804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-04-07T14:09:37.341749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3392:3401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:37.354996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:37.356557Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3397:3405]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:37.356790Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:09:37.356967Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-07T14:09:37.357043Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3400:3408] 2025-04-07T14:09:37.357127Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3400:3408] 2025-04-07T14:09:37.357715Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3401:3340] 2025-04-07T14:09:37.358011Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3400:3408], server id = [2:3401:3340], tablet id = 72075186224037897, status = OK 2025-04-07T14:09:37.358232Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:3401:3340], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:09:37.358308Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:09:37.358503Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:09:37.358589Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3397:3405], StatRequests.size() = 1 2025-04-07T14:09:37.377133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3405:3412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:37.377403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:37.377976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3410:3417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:37.388977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-04-07T14:09:37.693658Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:09:37.693727Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:09:37.754847Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3400:3408], schemeshard count = 1 2025-04-07T14:09:38.047469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3412:3419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-07T14:09:38.199373Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3559:3510]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:38.199542Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:09:38.199573Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3559:3510], StatRequests.size() = 1 2025-04-07T14:09:38.276607Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr89bnht1f8w8hwjzdnn0wk2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjNmNTJlOTgtZDRmOWM5YjgtYWQ1ZGYzZWQtNTdmZTQyM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:38.379474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72075186224037905 2025-04-07T14:09:38.758390Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3892:3573]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:38.758701Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:09:38.759145Z node 2 :STATISTICS DEBUG: [72075186224037897] EvRequestStats, node id = 1, schemeshard count = 1, urgent = 0 2025-04-07T14:09:38.759201Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:09:38.759576Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:09:38.759641Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3892:3573], StatRequests.size() = 1 2025-04-07T14:09:38.784552Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3901:3582]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:38.784858Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-07T14:09:38.784903Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3901:3582], StatRequests.size() = 1 2025-04-07T14:09:38.840527Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr89bpxt8cy9acw6ytzcsdf5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU0ZTI5YmYtOWMzODA2YjgtOTg0NTNkODUtOTI1NDllNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:38.908146Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3945:3598]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:38.911949Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:09:38.912016Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:09:38.912683Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:09:38.912746Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-07T14:09:38.912815Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:09:38.920548Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-07T14:09:38.920842Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-07T14:09:38.921443Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3970:3611]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:38.929416Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:09:38.929495Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:09:38.929934Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:09:38.929989Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-07T14:09:38.930058Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:09:38.936898Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-07T14:09:38.937182Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> TraverseColumnShard::TraverseColumnTableRebootColumnshard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2025-04-07T14:06:08.699861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:08.699921Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:08.762879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:09.576599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:09.576694Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:09.624807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:10.356008Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:10.356052Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:10.391573Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:12.755991Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:12.756050Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:12.794239Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:14.955057Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:14.955122Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:14.991996Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:15.802533Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:15.802594Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:15.857770Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:16.249705Z node 6 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 7 2025-04-07T14:06:16.249781Z node 6 :PIPE_SERVER ERROR: [72057594037936131] NodeDisconnected NodeId# 7 2025-04-07T14:06:16.249943Z node 6 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 7 2025-04-07T14:06:16.250265Z node 7 :TX_PROXY WARN: actor# [7:340:2085] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-07T14:06:16.827103Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:16.827179Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:16.873859Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:17.887815Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:17.887866Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:17.928726Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:18.867069Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:18.867123Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:18.915095Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:19.828083Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-07T14:06:19.932690Z node 16 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:19.933073Z node 16 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/00117d/r3tmp/tmpuBSIxt/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:19.933512Z node 16 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/00117d/r3tmp/tmpuBSIxt/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/00117d/r3tmp/tmpuBSIxt/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6938352849482177336 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:19.974225Z node 15 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:19.974651Z node 15 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/00117d/r3tmp/tmpuBSIxt/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:19.974790Z node 15 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/00117d/r3tmp/tmpuBSIxt/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/00117d/r3tmp/tmpuBSIxt/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6251372777471750827 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:20.003279Z node 17 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:20.003653Z node 17 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/00117d/r3tmp/tmpuBSIxt/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:20.003801Z node 17 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/00117d/r3tmp/tmpuBSIxt/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/00117d/r3tmp/tmpuBSIxt/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2246495064201998818 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:06:20.056268Z node 12 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:06:20.056650Z node 12 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/00117d/r3tmp/tmpuBSIxt/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:06:20.056811Z node 12 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/00117d/r3tmp/tmpuBSIxt/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/00117d/r3tmp/tmpuBSIxt/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 721715765513549350 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 ... 00-s[16/16]o)]} 2025-04-07T14:08:47.314265Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:08:47.314518Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:08:51.334785Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:08:51.335119Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:08:55.575966Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:08:55.576336Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:00.071433Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:00.072114Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:00.222045Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:00.222490Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:04.446372Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:04.446817Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:08.795271Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:08.795602Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:13.117056Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:13.117279Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:17.241255Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:17.241505Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:21.525775Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:21.526051Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:25.877426Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:25.877741Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:30.040463Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:30.051998Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:34.248245Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:34.248510Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:38.416302Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2025-04-07T14:09:38.416567Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] >> AnalyzeColumnshard::AnalyzeSameOperationId |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 29237, MsgBus: 31411 2025-04-07T14:07:31.555066Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572650714586621:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:31.556400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001011/r3tmp/tmp9YKl4B/pdisk_1.dat 2025-04-07T14:07:31.907409Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29237, node 1 2025-04-07T14:07:31.954177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:31.954274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:31.955623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:31.962734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:31.962765Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:31.962773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:31.962856Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31411 TClient is connected to server localhost:31411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:32.466469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:32.481418Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:07:32.499445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:32.647305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:32.789338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:32.842751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:34.539053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572663599490002:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:34.555912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:34.594079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:34.621119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:34.654088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:34.729853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:34.754872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:34.782710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:34.822564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572663599490500:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:34.822647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:34.822821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572663599490505:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:34.825934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:34.839060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572663599490507:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:35.699583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:35.722256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:35.745969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:07:35.767499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T14:07:35.787204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:07:35.880796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T14:07:35.900828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:07:35.920656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T14:07:35.947434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.021327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.043717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.070958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.094686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.531691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T14:07:36.553265Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572650714586621:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:36.554131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:36.572261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.602723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.623764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.692297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T14:07:36.767786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type ... vent=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:32.175752Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:32.175789Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:32.175947Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:32.175983Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:32.176230Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:32.176267Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:32.176394Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:32.176421Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:09:32.181725Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:32.181800Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:32.181909Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:32.181943Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:32.182129Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:32.182162Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:32.182268Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:09:32.182304Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:09:32.182376Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:09:32.182405Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:09:32.182447Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:09:32.182480Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:09:32.182850Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:09:32.182897Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:32.183099Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:32.183132Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:32.183288Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:32.183323Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:32.183541Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:32.183575Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:32.183691Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:32.183734Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:09:32.206427Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:32.206510Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:32.206618Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:32.206653Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:32.206832Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:32.206865Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:32.206965Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:09:32.207003Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:09:32.207080Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:09:32.207115Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:09:32.207166Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:09:32.207198Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:09:32.207595Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:09:32.207677Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:09:32.207901Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:09:32.207947Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:09:32.208110Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:09:32.208145Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:09:32.208338Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:09:32.208384Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:09:32.208504Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:09:32.208546Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TestKinesisHttpProxy::TestConsumersEmptyNames |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TestYmqHttpProxy::TestSendMessageWithAttributes >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate >> AnalyzeColumnshard::AnalyzeTwoColumnTables |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2025-04-07T14:08:40.057826Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572947427522596:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:40.057953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a1a/r3tmp/tmpmaefXk/pdisk_1.dat 2025-04-07T14:08:40.513060Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:40.518314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:40.518399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:40.525152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19986, node 1 2025-04-07T14:08:40.703219Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:40.703241Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:40.703248Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:40.703312Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:41.257521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:41.263362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:08:41.265276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:41.269727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:08:41.269942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:08:41.269973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:08:41.273577Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:08:41.273620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:08:41.276878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:08:41.281117Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:41.289779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034921335, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:08:41.289830Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:08:41.290221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:08:41.297583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:08:41.297780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:41.297830Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:08:41.297920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:08:41.297954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:08:41.298009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:08:41.302081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:08:41.302136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:08:41.302157Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:08:41.302241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 Triggering split by load TClient is connected to server localhost:4930 2025-04-07T14:08:43.187974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572960312425352:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:43.188145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:43.491907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:08:43.493140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T14:08:43.493651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:08:43.493672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:08:43.497187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2025-04-07T14:08:43.497423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:08:43.497645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:43.497735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:08:43.499161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:08:43.499216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:08:43.499235Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:08:43.499442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:08:43.499488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:08:43.499500Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:08:43.499868Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:08:43.508397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:08:43.508488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T14:08:43.516241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:08:43.578896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:08:43.578921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:08:43.578967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T14:08:43.580358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:08:43.584621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034923631, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:08:43.584658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744034923631 2025-04-07T14:08:43.584779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T14:08:43.589296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:08:43.591365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:43.591416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:08:43.592406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:08:43.592431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:08:43.592443Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPu ... ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) Table has 2 shards Fast forward > 10h to trigger the merge TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034923631 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2025-04-07T14:09:38.774170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.2272 2025-04-07T14:09:38.797204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.2898 2025-04-07T14:09:38.874590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-04-07T14:09:38.874712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-07T14:09:38.874894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:38.875227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:38.875676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-07T14:09:38.875731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-07T14:09:38.877466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-04-07T14:09:38.883728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:09:38.883839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-07T14:09:38.885869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:38.894962Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7490573196535724837:12060] 2025-04-07T14:09:38.900854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-07T14:09:38.908954Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-04-07T14:09:38.909039Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-04-07T14:09:38.909153Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-07T14:09:38.912845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId#281474976715658:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715658:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715658 TabletId: 72075186224037891 2025-04-07T14:09:38.912881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 131 2025-04-07T14:09:38.914116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:09:38.927904Z node 1 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2025-04-07T14:09:38.927978Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:09:38.928021Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-07T14:09:38.928042Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2025-04-07T14:09:38.928277Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-07T14:09:38.931870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId#281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037889 2025-04-07T14:09:38.932075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId#281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037890 2025-04-07T14:09:38.932287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 131 -> 132 2025-04-07T14:09:38.933535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-07T14:09:38.934103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:09:38.934359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:38.934425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:09:38.934774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:09:38.934827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:09:38.934842Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-07T14:09:38.939684Z node 1 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-07T14:09:38.941113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-07T14:09:38.942503Z node 1 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-04-07T14:09:38.942596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-04-07T14:09:38.942648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T14:09:38.942686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T14:09:38.944769Z node 1 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-07T14:09:38.944893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976715658:0 2025-04-07T14:09:38.945059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-07T14:09:38.945336Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-07T14:09:38.945525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-07T14:09:38.948705Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-07T14:09:38.949189Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-07T14:09:38.949230Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-07T14:09:38.949674Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-07T14:09:38.965952Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-07T14:09:38.966368Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-07T14:09:38.966407Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-07T14:09:38.966697Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-07T14:09:39.033360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-07T14:09:39.033519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037891 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-07T14:09:39.033714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-07T14:09:40.527626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2025-04-07T14:09:40.527682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2025-04-07T14:09:40.527723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034923631 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 3 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete >> OlapEstimationRowsCorrectness::TPCH5 [GOOD] >> OlapEstimationRowsCorrectness::TPCH9 >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumers >> TestYmqHttpProxy::TestSetQueueAttributes |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TestYmqHttpProxy::TestListQueues |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup >> StatisticsSaveLoad::Simple >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] >> TestKinesisHttpProxy::TestCounters >> TestKinesisHttpProxy::BadRequestUnknownMethod >> TestYmqHttpProxy::TestSendMessageBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2025-04-07T14:08:39.920825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572942256263896:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:39.920970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a23/r3tmp/tmpzfpq7Z/pdisk_1.dat 2025-04-07T14:08:40.421862Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:40.434046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:40.434132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:40.451884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24960, node 1 2025-04-07T14:08:40.717335Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:40.717364Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:40.717375Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:40.717534Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:41.188916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:41.215227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:08:41.216498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:41.219761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:08:41.219949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:08:41.219982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T14:08:41.222711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:08:41.222726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:08:41.224441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:41.231037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034921272, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:08:41.231075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:08:41.231334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:08:41.235966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:08:41.236127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:41.236171Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:08:41.236261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:08:41.236294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:08:41.236352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-04-07T14:08:41.239232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:08:41.239292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:08:41.239307Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:08:41.239384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2025-04-07T14:08:41.239608Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:15346 2025-04-07T14:08:43.244976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572959436133881:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:43.245106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:43.491907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:08:43.492855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T14:08:43.493422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:08:43.493460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:08:43.497169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2025-04-07T14:08:43.497356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:08:43.497529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:43.497588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:08:43.498649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:08:43.498679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:08:43.498695Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:08:43.498873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:08:43.498887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:08:43.498895Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:08:43.500961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:08:43.507228Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:08:43.507401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T14:08:43.509251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:08:43.573197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:08:43.573226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:08:43.573303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T14:08:43.581576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:08:43.600000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034923638, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:08:43.600056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744034923638 2025-04-07T14:08:43.600175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T14:08:43.606128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:08:43.606454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:43.606524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:08:43.611426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:08:43.611474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:08:43.611489Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057 ... t, use /Root 2025-04-07T14:09:53.195621Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727589. Ctx: { TraceId: 01jr89c5155sm2q7senzc9j6yj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFiNWEwM2QtYTlmMmQyMjktYTlkYzRkOWYtYWQ4ZDI0MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.198045Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727590. Ctx: { TraceId: 01jr89c5155agp1cka36k8vsnd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjhlNTFlZTktMzRkMjA2YzktOGJhNjE4N2YtYjI3YjdiMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.198796Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727591. Ctx: { TraceId: 01jr89c5182whv6vzj19kgypty, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFjNjhlYzMtNmY2NDJjN2MtYTk1NTBmNDctYjZjNmFkNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.200797Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727592. Ctx: { TraceId: 01jr89c51930m4zxwc8vx8818k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU1ZGI1NGEtNDAxODdiMDAtOWRhMzkzMWYtMjAzZTAwMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.203852Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727593. Ctx: { TraceId: 01jr89c51hdgt1ev42kym81nh8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI3NjBiNzgtMTk0ZGJjYzgtZmYxNmQ5YzktNWVmOWE3NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.204802Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727594. Ctx: { TraceId: 01jr89c51j3b8eyd0khy837h5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE1ZDZiZjgtNjlkZTFmNzktZDJjZDAyY2EtOTdhNTYxMDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.205105Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727596. Ctx: { TraceId: 01jr89c51jc3ad1wzgx7az41rm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYxM2Y0NDEtZjllYmY0NzctYzE3NTE1MTAtMjg2MjU4MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.205373Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727595. Ctx: { TraceId: 01jr89c51j4hsk56rgd5v7q7t1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JjMThiNmEtODg2YmYxZTEtMTQxMDUyYWYtOTA1NzBhYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.207356Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727597. Ctx: { TraceId: 01jr89c51jfp08kqqge3fkevr1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODVmZTQzM2QtZTc3ZmYzNzItN2ZlOWMxM2EtZGNiN2JlMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.207890Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727598. Ctx: { TraceId: 01jr89c51j4kg8fqmcmejcswph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTllOTI1NjgtNmZjYjg4MDQtZmM4OWI4NGItZjYxZGMxOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.212199Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727600. Ctx: { TraceId: 01jr89c51rfpzvawwy6gd37xkw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjhlNTFlZTktMzRkMjA2YzktOGJhNjE4N2YtYjI3YjdiMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.213420Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727599. Ctx: { TraceId: 01jr89c51r5yecj2epg9xnmz9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFiNWEwM2QtYTlmMmQyMjktYTlkYzRkOWYtYWQ4ZDI0MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.214571Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727602. Ctx: { TraceId: 01jr89c51s2b0dpdpbqbw4kqnx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFjNjhlYzMtNmY2NDJjN2MtYTk1NTBmNDctYjZjNmFkNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.214860Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727601. Ctx: { TraceId: 01jr89c51s7wm68m7g7fv27x6v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU1ZGI1NGEtNDAxODdiMDAtOWRhMzkzMWYtMjAzZTAwMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.220345Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727603. Ctx: { TraceId: 01jr89c51x9qycxxgb7w7hapa7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYxM2Y0NDEtZjllYmY0NzctYzE3NTE1MTAtMjg2MjU4MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.220939Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727606. Ctx: { TraceId: 01jr89c52192z8tb4rrawnwmas, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JjMThiNmEtODg2YmYxZTEtMTQxMDUyYWYtOTA1NzBhYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.221526Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727604. Ctx: { TraceId: 01jr89c521adr6a7a3s9wkrpb7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE1ZDZiZjgtNjlkZTFmNzktZDJjZDAyY2EtOTdhNTYxMDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.222260Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727605. Ctx: { TraceId: 01jr89c521feftcmnwf86z82v9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI3NjBiNzgtMTk0ZGJjYzgtZmYxNmQ5YzktNWVmOWE3NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.222807Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727607. Ctx: { TraceId: 01jr89c5243b99fa0sq3jq8ztx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTllOTI1NjgtNmZjYjg4MDQtZmM4OWI4NGItZjYxZGMxOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: 2025-04-07T14:09:53.227685Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727608. Ctx: { TraceId: 01jr89c52449eg210qpnc0xak4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODVmZTQzM2QtZTc3ZmYzNzItN2ZlOWMxM2EtZGNiN2JlMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.228169Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727609. Ctx: { TraceId: 01jr89c5276x0vrc5yp2wt45m5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU1ZGI1NGEtNDAxODdiMDAtOWRhMzkzMWYtMjAzZTAwMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034923638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2025-04-07T14:09:53.230230Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727611. Ctx: { TraceId: 01jr89c52643chm5ammb1ka05v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFiNWEwM2QtYTlmMmQyMjktYTlkYzRkOWYtYWQ4ZDI0MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.230305Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727610. Ctx: { TraceId: 01jr89c525f36pwc60ggmse9t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjhlNTFlZTktMzRkMjA2YzktOGJhNjE4N2YtYjI3YjdiMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.233673Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727612. Ctx: { TraceId: 01jr89c52eevdnbetkfyg7xgs8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JjMThiNmEtODg2YmYxZTEtMTQxMDUyYWYtOTA1NzBhYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.234371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727613. Ctx: { TraceId: 01jr89c52e0174kh101t89f4r4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTllOTI1NjgtNmZjYjg4MDQtZmM4OWI4NGItZjYxZGMxOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:53.251396Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976727614. Ctx: { TraceId: 01jr89c52n48t8c070jdh989ej, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFjNjhlYzMtNmY2NDJjN2MtYTk1NTBmNDctYjZjNmFkNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744034923638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2025-04-07T14:09:53.721798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 66263 rowCount 1080 cpuUsage 0 2025-04-07T14:09:53.724754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 56347 rowCount 920 cpuUsage 0 2025-04-07T14:09:53.821845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-04-07T14:09:53.821993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 1080, DataSize 66263 2025-04-07T14:09:53.822102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 920, DataSize 56347 2025-04-07T14:09:53.822371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards >> TestYmqHttpProxy::TestListQueues [GOOD] >> TestYmqHttpProxy::TestPurgeQueue >> DataShardTxOrder::RandomPoints_DelayData [GOOD] >> StatisticsSaveLoad::Delete [GOOD] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [GOOD] Test command err: 2025-04-07T14:09:51.431535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:51.431942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:51.432100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00154c/r3tmp/tmpqUFacO/pdisk_1.dat 2025-04-07T14:09:51.882774Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27531, node 1 2025-04-07T14:09:52.415175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:52.415236Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:52.415266Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:52.416002Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:52.432997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:52.549699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:52.550197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:52.563904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11185 2025-04-07T14:09:53.240945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:56.208412Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:56.254369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:56.254488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:56.295925Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:56.301385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:56.505095Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:56.565083Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:56.565207Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:56.597236Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:56.597429Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:56.597645Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:56.597719Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:56.597772Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:56.597848Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:56.597906Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:56.597961Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:56.598389Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:56.816836Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:56.816955Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:56.825147Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:56.834967Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:56.836807Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:56.837947Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:56.863500Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:56.863563Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:56.863636Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:56.865770Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:56.865875Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:56.881777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:56.894847Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:56.895051Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:56.909419Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:56.928344Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:56.970430Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:57.303375Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:57.460114Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:58.049233Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:58.049716Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:09:58.080922Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:09:58.089445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2158:3029], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:58.089542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2175:3035], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:58.089615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:58.097306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037889 2025-04-07T14:09:58.148367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2179:3038], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:09:58.769818Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2289:3079]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:58.770067Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:09:58.770179Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2291:3081] 2025-04-07T14:09:58.770833Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2291:3081] 2025-04-07T14:09:58.771535Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2292:2804] 2025-04-07T14:09:58.771853Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2291:3081], server id = [2:2292:2804], tablet id = 72075186224037897, status = OK 2025-04-07T14:09:58.772022Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2292:2804], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:09:58.772091Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:09:58.772306Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:09:58.772412Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2289:3079], StatRequests.size() = 1 2025-04-07T14:09:59.171331Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YjBlZDc4MzUtMjM5ZDhiY2QtNWQyMWE3NDUtZGRiYWM3YjM=, TxId: 2025-04-07T14:09:59.171422Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=YjBlZDc4MzUtMjM5ZDhiY2QtNWQyMWE3NDUtZGRiYWM3YjM=, TxId: 2025-04-07T14:09:59.172293Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:09:59.174700Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:09:59.192649Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2320:3102]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:59.192890Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:09:59.192934Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2320:3102], StatRequests.size() = 1 2025-04-07T14:09:59.346585Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=N2JlNTI5YTktNTYwOTNhYmYtYzQ3YmUxZjQtYzg0YTMzNTc=, TxId: 2025-04-07T14:09:59.346669Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=N2JlNTI5YTktNTYwOTNhYmYtYzQ3YmUxZjQtYzg0YTMzNTc=, TxId: 2025-04-07T14:09:59.347893Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:09:59.350085Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-04-07T14:09:59.379723Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2352:3118]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:59.379943Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:09:59.379990Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2352:3118], StatRequests.size() = 1 2025-04-07T14:09:59.504895Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YzRkZmYyNjktNWRiNTZiNjAtMmU5MjJjODctNzI2MTU5ZWQ=, TxId: 01jr89cb5wfx7b3dcr467nxdvt 2025-04-07T14:09:59.505776Z node 1 :STATISTICS WARN: [TQueryBase] Finish with BAD_REQUEST, Issues: {
: Error: No data }, SessionId: ydb://session/3?node_id=1&id=YzRkZmYyNjktNWRiNTZiNjAtMmU5MjJjODctNzI2MTU5ZWQ=, TxId: 01jr89cb5wfx7b3dcr467nxdvt >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] >> TestKinesisHttpProxy::TestCounters [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] Test command err: 2025-04-07T14:06:24.122220Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:06:24.190067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:24.190110Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:24.193994Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:06:24.194305Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:06:24.194506Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:24.201954Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:06:24.241896Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:24.242262Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:06:24.243581Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:06:24.243652Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:06:24.243693Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:06:24.243994Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:06:24.270175Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:06:24.270333Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:06:24.270431Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:06:24.270470Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:06:24.270499Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:06:24.270530Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:24.270736Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.270785Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.271079Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:06:24.271148Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:06:24.271187Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:24.271219Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:06:24.271270Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:06:24.271300Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:06:24.271328Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:06:24.271355Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:06:24.271385Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:06:24.333372Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.333447Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.333498Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:06:24.336047Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:06:24.336108Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:06:24.336204Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:06:24.336512Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:06:24.336579Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:06:24.336626Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:06:24.336685Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:24.336720Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:06:24.336764Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:06:24.336797Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:24.337059Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:06:24.337105Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:06:24.337141Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:06:24.337179Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:24.337255Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:06:24.337287Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:06:24.337337Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:06:24.337369Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:24.337391Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:06:24.363501Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:06:24.363572Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:06:24.363615Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:06:24.363663Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:06:24.363721Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:06:24.364226Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.364269Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:06:24.364306Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:06:24.364442Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:06:24.364470Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:06:24.364581Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:06:24.364622Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:24.364679Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:06:24.364711Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:06:24.368175Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:06:24.368248Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:06:24.368427Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.368474Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:06:24.368511Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:06:24.368546Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:06:24.368574Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:06:24.368631Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:06:24.368676Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:06:24.368730Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:24.368766Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:06:24.368798Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:06:24.368826Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:06:24.368967Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:06:24.368994Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:24.369027Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:06:24.369048Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:06:24.369078Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:06:24.369129Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:06:24.369156Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:06:24.369185Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:06:24.369216Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:06:24.369265Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:06:24.369293Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:06:24.369321Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:06:24.369376Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:06:24.369395Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:06:24.369416Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit Ma ... d 521 2025-04-07T14:09:59.931125Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 522 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2025-04-07T14:09:59.931159Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.931185Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 522 2025-04-07T14:09:59.931302Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 523 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2025-04-07T14:09:59.931337Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.931378Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 523 2025-04-07T14:09:59.931506Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-07T14:09:59.931537Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.931563Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2025-04-07T14:09:59.931751Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 524 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-04-07T14:09:59.931783Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.931807Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 524 2025-04-07T14:09:59.931937Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 525 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-04-07T14:09:59.931968Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.931992Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 525 2025-04-07T14:09:59.932102Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 526 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-04-07T14:09:59.932136Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.932160Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 526 2025-04-07T14:09:59.932342Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 527 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-04-07T14:09:59.932377Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.932424Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 527 2025-04-07T14:09:59.932526Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 528 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-04-07T14:09:59.932556Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.932581Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 528 2025-04-07T14:09:59.932778Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 529 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-04-07T14:09:59.932811Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.932837Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 529 2025-04-07T14:09:59.932981Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 530 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-04-07T14:09:59.933015Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.933039Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 530 2025-04-07T14:09:59.933153Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-07T14:09:59.933186Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.933221Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2025-04-07T14:09:59.933378Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 531 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-04-07T14:09:59.933411Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.933437Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 531 2025-04-07T14:09:59.933592Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 532 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-04-07T14:09:59.933628Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.933655Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 532 2025-04-07T14:09:59.933807Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 533 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-04-07T14:09:59.933840Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.933866Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 533 2025-04-07T14:09:59.934041Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 534 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-04-07T14:09:59.934074Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.934101Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 534 2025-04-07T14:09:59.934253Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 535 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-04-07T14:09:59.934292Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.934318Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 535 2025-04-07T14:09:59.934491Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 536 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-04-07T14:09:59.934529Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.934557Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 536 2025-04-07T14:09:59.934657Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 537 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-04-07T14:09:59.934689Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.934713Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 537 2025-04-07T14:09:59.934920Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-07T14:09:59.934957Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.934983Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 2025-04-07T14:09:59.957719Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:09:59.957788Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:538] at 9437184 on unit CompleteOperation 2025-04-07T14:09:59.957851Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 538] from 9437184 at tablet 9437184 send result to client [16:97:2132], exec latency: 1 ms, propose latency: 2 ms 2025-04-07T14:09:59.957922Z node 16 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-07T14:09:59.957987Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:09:59.958276Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:09:59.958310Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:539] at 9437184 on unit CompleteOperation 2025-04-07T14:09:59.958354Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 539] from 9437184 at tablet 9437184 send result to client [16:97:2132], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:09:59.958392Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:09:59.958713Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:230:2225], Recipient [16:334:2307]: {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-07T14:09:59.958761Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-07T14:09:59.958796Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 538 expect 26 28 30 30 31 28 29 17 29 22 25 25 30 30 31 26 23 15 26 25 28 15 31 31 26 23 23 24 15 - 19 - actual 26 28 30 30 31 28 29 17 29 22 25 25 30 30 31 26 23 15 26 25 28 15 31 31 26 23 23 24 15 - 19 - interm 26 28 30 30 24 28 29 17 29 22 25 25 30 30 30 26 23 15 26 25 28 15 24 24 26 23 23 24 15 - 19 - |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 62002, MsgBus: 21289 2025-04-07T14:07:44.056096Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572707149720361:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:44.057045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d5c/r3tmp/tmpJOYGEk/pdisk_1.dat 2025-04-07T14:07:44.455275Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62002, node 1 2025-04-07T14:07:44.463910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:44.464114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:44.465819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:44.640155Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:44.640172Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:44.640177Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:44.640236Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21289 TClient is connected to server localhost:21289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:45.263591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:45.279236Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:07:45.287361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:45.418246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:45.580045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:45.667332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:47.291621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572720034623823:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:47.291729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:47.556347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:47.587067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:47.620513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:47.652008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:47.691678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:47.763899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:47.869299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572720034624323:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:47.869366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:47.869532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572720034624328:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:47.873744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:47.884906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572720034624330:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:48.934326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:49.054870Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572707149720361:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:49.054928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:49.824795Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr898ch96fs7ht6kycjrd9bt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmEzZWUxYjUtZWQ3ODY2YmMtZDc5NmU1NjYtNWRmNTZlNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:49.879176Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr898ch95xxt8q3bp25a8c0b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTRiZjNlYTMtNGMwOTQzYzYtYWQ2NzdhMDQtYzMzYThmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:49.932458Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jr898ch96fs7ht6kycjrd9bt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmEzZWUxYjUtZWQ3ODY2YmMtZDc5NmU1NjYtNWRmNTZlNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:49.934715Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr898chha83kgt2cydds8vhf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTVlNTU3MDYtMmFkY2ZlZjktMjVkNzY3MTgtOGM5MWZmZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:49.944685Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jr898ckna7x17tnq1nf1xjqm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGIyNjBhMWUtNzA2NDlkMmEtNTVlZWU5Y2YtZmFjNTM4MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:49.945363Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr898ckz6egh4e6c5a2m1q2k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzExMzZiM2QtNzA3OGQ2NGEtNDIwYTU5OTUtZDgzNzRjMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:49.945766Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jr898cm95awc4g1mzfm6h5n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWEzMjRkZmYtOTc2MTg0MjktYmE0ODRhM2YtNDRmYmJmMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:49.946361Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr898cm97n3wwfw95fssbncj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjk1NjI0ZmYtNzU1Y2U5YmUtMjk4MWM0MTMtNzBiNmYzYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:49.947506Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr898cm9eh3prtqt10x7kfjk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2U2OGE0YzEtNzQ4MjM2YzktNjY4Nzc4M2UtMTYxYWNjYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:49.950739Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jr898ch96fs7ht6kycjrd9bt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmEzZWUxYjUtZWQ3ODY2YmMtZDc5NmU1NjYtNWRmNTZlNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:49.950958Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01jr898ch95xxt8q3bp25a8c0b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTRiZjNlYTMtNGMwOTQzYzYtYWQ2NzdhMDQtYzMzYThmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:49.952612Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710684. Ctx: { TraceId: 01jr898chha83kgt2cydds8vhf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTVlNTU3MDYtMmFkY2ZlZjktMjVkNzY3MTgtOGM5MWZmZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Data ... sion/3?node_id=3&id=NGEwMWUzMTAtY2QwYjI0Mi1jNGE2MDkyNi02Y2Q0NmI3Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.863521Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721483. Ctx: { TraceId: 01jr89cahp5vgdbkgmqqxp3ktq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGEwMWUzMTAtY2QwYjI0Mi1jNGE2MDkyNi02Y2Q0NmI3Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.865089Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721485. Ctx: { TraceId: 01jr89caja2m231j9ztjfj4y58, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGJmNWJmYjItZDY4ZDA5NTQtNmRlNzNlYy00ZDAxNDY3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.865410Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721484. Ctx: { TraceId: 01jr89cahpf4wcd0qg399qtyre, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzVlMDJjMGQtNWY4YzJmNTItMThhZDZiNjAtZmNiN2I3YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.865595Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721486. Ctx: { TraceId: 01jr89cahp1r8078jt9r8wghrw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGEwMWI2MzgtNjg4NWE5MDctY2VjYmM1ZWEtZTU1ZjI0M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.872223Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721489. Ctx: { TraceId: 01jr89caja2m231j9ztjfj4y58, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGJmNWJmYjItZDY4ZDA5NTQtNmRlNzNlYy00ZDAxNDY3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.872224Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721487. Ctx: { TraceId: 01jr89cahp1r8078jt9r8wghrw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGEwMWI2MzgtNjg4NWE5MDctY2VjYmM1ZWEtZTU1ZjI0M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.873631Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721488. Ctx: { TraceId: 01jr89cajfdf37xgbnkvsmcm43, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2NlM2IxNWEtM2MxNjUxYzMtMmQxYzQ3NzgtZDUxODNhZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.878822Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721490. Ctx: { TraceId: 01jr89cajfdf37xgbnkvsmcm43, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2NlM2IxNWEtM2MxNjUxYzMtMmQxYzQ3NzgtZDUxODNhZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.880271Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721491. Ctx: { TraceId: 01jr89cajfdf37xgbnkvsmcm43, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2NlM2IxNWEtM2MxNjUxYzMtMmQxYzQ3NzgtZDUxODNhZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.884493Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721492. Ctx: { TraceId: 01jr89cak05z7q2qen8r5eea5z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGEwMWUzMTAtY2QwYjI0Mi1jNGE2MDkyNi02Y2Q0NmI3Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.884494Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721493. Ctx: { TraceId: 01jr89cak13er21d09hpjw41mg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzEwZmU1MWEtZDdlYzBiMS05NDJlZTAwNC0zNWUwZjJhMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.887986Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721494. Ctx: { TraceId: 01jr89cak2d3d8hp8938wqmhq8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzVlMDJjMGQtNWY4YzJmNTItMThhZDZiNjAtZmNiN2I3YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.891413Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721495. Ctx: { TraceId: 01jr89cak05z7q2qen8r5eea5z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGEwMWUzMTAtY2QwYjI0Mi1jNGE2MDkyNi02Y2Q0NmI3Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.891793Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721496. Ctx: { TraceId: 01jr89cak13er21d09hpjw41mg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzEwZmU1MWEtZDdlYzBiMS05NDJlZTAwNC0zNWUwZjJhMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.894168Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721497. Ctx: { TraceId: 01jr89cak46twrdyca279b6qnq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGJmNWJmYjItZDY4ZDA5NTQtNmRlNzNlYy00ZDAxNDY3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.897113Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721498. Ctx: { TraceId: 01jr89cak13er21d09hpjw41mg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzEwZmU1MWEtZDdlYzBiMS05NDJlZTAwNC0zNWUwZjJhMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.898782Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721499. Ctx: { TraceId: 01jr89cak05z7q2qen8r5eea5z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGEwMWUzMTAtY2QwYjI0Mi1jNGE2MDkyNi02Y2Q0NmI3Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.901920Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721500. Ctx: { TraceId: 01jr89cak2d3d8hp8938wqmhq8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzVlMDJjMGQtNWY4YzJmNTItMThhZDZiNjAtZmNiN2I3YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.903711Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721502. Ctx: { TraceId: 01jr89cak46twrdyca279b6qnq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGJmNWJmYjItZDY4ZDA5NTQtNmRlNzNlYy00ZDAxNDY3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.904453Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721503. Ctx: { TraceId: 01jr89cak05z7q2qen8r5eea5z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGEwMWUzMTAtY2QwYjI0Mi1jNGE2MDkyNi02Y2Q0NmI3Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.906198Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721501. Ctx: { TraceId: 01jr89cakeerz6d7rytyj3n8mg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGEwMWI2MzgtNjg4NWE5MDctY2VjYmM1ZWEtZTU1ZjI0M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.906354Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721504. Ctx: { TraceId: 01jr89cak2d3d8hp8938wqmhq8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzVlMDJjMGQtNWY4YzJmNTItMThhZDZiNjAtZmNiN2I3YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.912685Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721505. Ctx: { TraceId: 01jr89cakeerz6d7rytyj3n8mg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGEwMWI2MzgtNjg4NWE5MDctY2VjYmM1ZWEtZTU1ZjI0M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-07T14:09:58.914740Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721506. Ctx: { TraceId: 01jr89cakeerz6d7rytyj3n8mg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGEwMWI2MzgtNjg4NWE5MDctY2VjYmM1ZWEtZTU1ZjI0M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-07T14:09:58.920162Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721507. Ctx: { TraceId: 01jr89cam548dnxy2nc9tmxrxy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2NlM2IxNWEtM2MxNjUxYzMtMmQxYzQ3NzgtZDUxODNhZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.926601Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721508. Ctx: { TraceId: 01jr89cam743p9kh6n2j2w8byh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGJmNWJmYjItZDY4ZDA5NTQtNmRlNzNlYy00ZDAxNDY3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.931030Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721509. Ctx: { TraceId: 01jr89cam548dnxy2nc9tmxrxy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2NlM2IxNWEtM2MxNjUxYzMtMmQxYzQ3NzgtZDUxODNhZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.933855Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721510. Ctx: { TraceId: 01jr89cam743p9kh6n2j2w8byh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGJmNWJmYjItZDY4ZDA5NTQtNmRlNzNlYy00ZDAxNDY3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.935156Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721511. Ctx: { TraceId: 01jr89cam548dnxy2nc9tmxrxy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2NlM2IxNWEtM2MxNjUxYzMtMmQxYzQ3NzgtZDUxODNhZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.940751Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721513. Ctx: { TraceId: 01jr89cam743p9kh6n2j2w8byh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGJmNWJmYjItZDY4ZDA5NTQtNmRlNzNlYy00ZDAxNDY3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-07T14:09:58.942818Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721512. Ctx: { TraceId: 01jr89cam64ye36rqc6d9kq2az, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzEwZmU1MWEtZDdlYzBiMS05NDJlZTAwNC0zNWUwZjJhMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.943460Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721514. Ctx: { TraceId: 01jr89cam743p9kh6n2j2w8byh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGJmNWJmYjItZDY4ZDA5NTQtNmRlNzNlYy00ZDAxNDY3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.947181Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721515. Ctx: { TraceId: 01jr89cam64ye36rqc6d9kq2az, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzEwZmU1MWEtZDdlYzBiMS05NDJlZTAwNC0zNWUwZjJhMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-07T14:09:58.948936Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721516. Ctx: { TraceId: 01jr89cam64ye36rqc6d9kq2az, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzEwZmU1MWEtZDdlYzBiMS05NDJlZTAwNC0zNWUwZjJhMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:58.952087Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721517. Ctx: { TraceId: 01jr89cam64ye36rqc6d9kq2az, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzEwZmU1MWEtZDdlYzBiMS05NDJlZTAwNC0zNWUwZjJhMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> StatisticsSaveLoad::ForbidAccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2025-04-07T14:09:21.320146Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573123640776383:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:21.320186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001760/r3tmp/tmpVbcOhy/pdisk_1.dat 2025-04-07T14:09:21.831548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:21.831791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:21.846393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:21.854482Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17040, node 1 2025-04-07T14:09:22.064426Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:22.064447Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:22.064459Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:22.064544Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9766 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:22.604919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.613962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.615446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:22.616159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:09:22.616313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:09:22.616332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T14:09:22.616909Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:09:22.616923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:09:22.617223Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.618573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034962663, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:09:22.618593Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:09:22.618826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:09:22.619333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:09:22.619440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:22.619473Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:09:22.619554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:09:22.619582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:09:22.619626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-04-07T14:09:22.630552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:09:22.630632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:09:22.630655Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:09:22.630733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2025-04-07T14:09:22.631105Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:9766 2025-04-07T14:09:22.832971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.833187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.833205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.833736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:09:22.833852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.835035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034962880, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:09:22.835070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1744034962880, at schemeshard: 72057594046644480 2025-04-07T14:09:22.835276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:09:22.835347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:09:22.835377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:09:22.835755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:09:22.835901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:22.837826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:09:22.837860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:09:22.837875Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:09:22.837934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 waiting... 2025-04-07T14:09:22.845347Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:09:22.857740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.858015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.858034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.858096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:09:22.858221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:09:22.858255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2025-04-07T14:09:22.858738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2025-04-07T14:09:22.858932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:09:22.859146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:22.860208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T14:09:22.860262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T14:09:22.860276Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:09:22.860327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 waiting... 2025-04-07T14:09:22.862162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.862420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.863086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: ... 00.015897Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/SQS/.STD/MessageData, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:10:00.016344Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 1 -> 2 2025-04-07T14:10:00.016965Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715686:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:10:00.016997Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:10:00.017619Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715686, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/SQS/.STD/MessageData 2025-04-07T14:10:00.017828Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:10:00.018071Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:10:00.018150Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:10:00.018800Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 15 PathOwnerId: 72057594046644480, cookie: 281474976715686 2025-04-07T14:10:00.018871Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2025-04-07T14:10:00.018891Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 15 2025-04-07T14:10:00.019084Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715686 2025-04-07T14:10:00.019110Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2025-04-07T14:10:00.019125Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 2 2025-04-07T14:10:00.019223Z node 6 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715686, at schemeshard: 72057594046644480 2025-04-07T14:10:00.021069Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:10:00.021128Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 2 -> 3 2025-04-07T14:10:00.021476Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:10:00.037853Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:10:00.037883Z node 6 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:10:00.037961Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 3 -> 128 2025-04-07T14:10:00.038479Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:10:00.039900Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035000085, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:10:00.039968Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744035000085 2025-04-07T14:10:00.040113Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 128 -> 129 2025-04-07T14:10:00.040755Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:10:00.041146Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:10:00.041222Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:10:00.042063Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 16 PathOwnerId: 72057594046644480, cookie: 281474976715686 2025-04-07T14:10:00.042103Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2025-04-07T14:10:00.042124Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 16 2025-04-07T14:10:00.042620Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715686 2025-04-07T14:10:00.042705Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2025-04-07T14:10:00.042731Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 4 2025-04-07T14:10:00.043805Z node 6 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037906 Status: COMPLETE TxId: 281474976715686 Step: 1744035000085 OrderId: 281474976715686 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037906 CpuTimeUsec: 1454 } } 2025-04-07T14:10:00.044031Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:10:00.044064Z node 6 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:10:00.044085Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 129 -> 240 2025-04-07T14:10:00.044471Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2025-04-07T14:10:00.044555Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-04-07T14:10:00.044601Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2025-04-07T14:10:01.082615Z node 6 :HTTP INFO: Listening on http://[::]:19547 2025-04-07T14:10:01.095417Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7490573296388820608:2405], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-07T14:10:01.100163Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7490573296388820670:2416], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-07T14:10:01.100239Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7490573296388820673:2418], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-07T14:10:01.101116Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573296388820660:2413], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:01.101242Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:01.247002Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [6:7490573296388820663:2414]: Pool not found 2025-04-07T14:10:01.268890Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490573274913981988:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:10:01.268958Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:10:01.633849Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [6:7490573296388820651:2412]: Pool not found 2025-04-07T14:10:01.636107Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573296388820803:2437], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:01.636114Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7490573296388820804:2438], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-07T14:10:01.636236Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:01.888714Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [6:7490573296388820801:2436]: Pool not found 2025-04-07T14:10:02.084120Z node 6 :HTTP DEBUG: (#44,[::1]:52200) incoming connection opened 2025-04-07T14:10:02.084204Z node 6 :HTTP DEBUG: (#44,[::1]:52200) -> (POST /Root) 2025-04-07T14:10:02.084355Z node 6 :HTTP_PROXY INFO: proxy service: incoming request from [f854:4500:6050:0:e054:4500:6050:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: 52af31dd-1aa26224-218aa56c-10b1d0ec 2025-04-07T14:10:02.084643Z node 6 :HTTP_PROXY INFO: http request [UnknownMethodName] requestId [52af31dd-1aa26224-218aa56c-10b1d0ec] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2025-04-07T14:10:02.084847Z node 6 :HTTP DEBUG: (#44,[::1]:52200) <- (400 InvalidAction) 2025-04-07T14:10:02.084908Z node 6 :HTTP DEBUG: (#44,[::1]:52200) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 3 { } 0 2025-04-07T14:10:02.084949Z node 6 :HTTP DEBUG: (#44,[::1]:52200) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: 52af31dd-1aa26224-218aa56c-10b1d0ec x-amz-crc32: 139748724 Content-Type: application/x-amz-json-1.1 Content-Length: 76 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2025-04-07T14:10:02.085038Z node 6 :HTTP DEBUG: (#44,[::1]:52200) connection closed Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2025-04-07T14:09:21.348956Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573124025578131:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:21.351313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001754/r3tmp/tmpr7gbaF/pdisk_1.dat 2025-04-07T14:09:21.824326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:21.828677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:21.830817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:21.881314Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25498, node 1 2025-04-07T14:09:21.894225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:09:21.894442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:09:21.896647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:21.897252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:09:21.897301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:21.897358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:09:21.897432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:09:21.900730Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:09:22.051373Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:22.051396Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:22.051403Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:22.051531Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:22.582589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.595580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.598439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.600827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:09:22.601063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:09:22.601159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:09:22.604274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:09:22.604305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:09:22.604756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.606626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:22.608755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034962656, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:09:22.608843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:09:22.609126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:09:22.609895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:09:22.610041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:22.610093Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:09:22.610235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:09:22.610271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:09:22.610317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:09:22.616883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:09:22.616982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:09:22.617001Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:09:22.617153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:19675 2025-04-07T14:09:22.842205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.842442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.842468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.843093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:09:22.843237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.844938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034962887, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:09:22.844964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1744034962887, at schemeshard: 72057594046644480 2025-04-07T14:09:22.845169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:09:22.845236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:09:22.845272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:09:22.845695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:09:22.845873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:22.846829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:09:22.846865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:09:22.846884Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:09:22.846944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 waiting... 2025-04-07T14:09:22.851595Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:09:22.853996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.854294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.854320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.854376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:09:22.854496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:09:22.854509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2025-04-07T14:09:22.854964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2025-04-07T14:09:22.855076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:09:22.855261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:22.855794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshar ... RY, path: /Root/SQS/cloud4/000000000000000101v0 2025-04-07T14:10:02.435404Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:10:02.435537Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:10:02.435578Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715700:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:10:02.436417Z node 6 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715700, at schemeshard: 72057594046644480 2025-04-07T14:10:02.436497Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715700 2025-04-07T14:10:02.436521Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715700 2025-04-07T14:10:02.436542Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 5 2025-04-07T14:10:02.436841Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715700 2025-04-07T14:10:02.436873Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715700 2025-04-07T14:10:02.436886Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 3 2025-04-07T14:10:02.437950Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035002486, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:10:02.437985Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715700:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035002486, at schemeshard: 72057594046644480 2025-04-07T14:10:02.438078Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715700:0 128 -> 240 2025-04-07T14:10:02.438571Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:10:02.438771Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:10:02.438833Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715700:0 ProgressState 2025-04-07T14:10:02.438928Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715700:0 progress is 1/1 2025-04-07T14:10:02.438974Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715700:0 2025-04-07T14:10:02.439019Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715700, publications: 2, subscribers: 1 2025-04-07T14:10:02.439465Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715700 2025-04-07T14:10:02.439511Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715700 2025-04-07T14:10:02.439530Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 6 2025-04-07T14:10:02.439702Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715700 2025-04-07T14:10:02.439737Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715700 2025-04-07T14:10:02.439750Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 4 2025-04-07T14:10:02.439792Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715700, subscribers: 1 2025-04-07T14:10:02.441867Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000101v0/v2, operationId: 281474976715701:0, at schemeshard: 72057594046644480 2025-04-07T14:10:02.442041Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715701:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:10:02.442626Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715701, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000101v0/v2 2025-04-07T14:10:02.442860Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:10:02.443036Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:10:02.443092Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715701:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:10:02.443271Z node 6 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715701, at schemeshard: 72057594046644480 2025-04-07T14:10:02.443582Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715701 2025-04-07T14:10:02.443620Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715701 2025-04-07T14:10:02.443636Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715701, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 5 2025-04-07T14:10:02.443785Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 30 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715701 2025-04-07T14:10:02.443848Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715701 2025-04-07T14:10:02.443861Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715701, pathId: [OwnerId: 72057594046644480, LocalPathId: 30], version: 3 2025-04-07T14:10:02.451277Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035002500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:10:02.451352Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715701:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035002500, at schemeshard: 72057594046644480 2025-04-07T14:10:02.451460Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715701:0 128 -> 240 2025-04-07T14:10:02.452051Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:10:02.452256Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:10:02.452309Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715701:0 ProgressState 2025-04-07T14:10:02.452395Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715701:0 progress is 1/1 2025-04-07T14:10:02.452439Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715701:0 2025-04-07T14:10:02.452483Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715701, publications: 2, subscribers: 1 2025-04-07T14:10:02.452878Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715701 2025-04-07T14:10:02.452950Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715701 2025-04-07T14:10:02.452974Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715701, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 6 2025-04-07T14:10:02.453153Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 30 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715701 2025-04-07T14:10:02.453179Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715701 2025-04-07T14:10:02.453189Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715701, pathId: [OwnerId: 72057594046644480, LocalPathId: 30], version: 4 2025-04-07T14:10:02.453225Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715701, subscribers: 1 2025-04-07T14:10:02.494935Z node 6 :HTTP_PROXY DEBUG: http request [CreateQueue] requestId [8e76a287-9270b0b2-db8352e8-bfed443] Got succesfult GRPC response. 2025-04-07T14:10:02.495026Z node 6 :HTTP_PROXY INFO: http request [CreateQueue] requestId [8e76a287-9270b0b2-db8352e8-bfed443] reply ok 2025-04-07T14:10:02.495176Z node 6 :HTTP DEBUG: (#44,[::1]:38378) <- (200 ) Http output full {"QueueUrl":"http://ghrun-lqtfcktism.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName.fifo"} 2025-04-07T14:10:02.495297Z node 6 :HTTP DEBUG: (#44,[::1]:38378) connection closed 2025-04-07T14:10:02.496479Z node 6 :HTTP DEBUG: (#44,[::1]:38384) incoming connection opened 2025-04-07T14:10:02.496564Z node 6 :HTTP DEBUG: (#44,[::1]:38384) -> (POST /Root) 2025-04-07T14:10:02.496684Z node 6 :HTTP_PROXY INFO: proxy service: incoming request from [d83f:ef00:6050:0:c03f:ef00:6050:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: 319b081b-30b6b587-e3937c47-611af9d8 2025-04-07T14:10:02.497585Z node 6 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [319b081b-30b6b587-e3937c47-611af9d8] got new request from [d83f:ef00:6050:0:c03f:ef00:6050:0] 2025-04-07T14:10:02.503092Z node 6 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [319b081b-30b6b587-e3937c47-611af9d8] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T14:10:02.503144Z node 6 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [319b081b-30b6b587-e3937c47-611af9d8] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T14:10:02.574233Z node 6 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [319b081b-30b6b587-e3937c47-611af9d8] Got succesfult GRPC response. 2025-04-07T14:10:02.574463Z node 6 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [319b081b-30b6b587-e3937c47-611af9d8] reply ok 2025-04-07T14:10:02.574628Z node 6 :HTTP DEBUG: (#44,[::1]:38384) <- (200 ) 2025-04-07T14:10:02.574715Z node 6 :HTTP DEBUG: (#44,[::1]:38384) connection closed Http output full {"Successful":[{"SequenceNumber":"1","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MD5OfMessageAttributes":"3d778967e1fa431d626ffb890c486385","MessageId":"31ef6ec1-44b46cf1-e39ed69-a985d4e2"},{"SequenceNumber":"2","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"765cf46f-651a4f57-b6e740b5-496f239a"}],"Failed":[{"Message":"No MessageGroupId parameter.","Id":"Id-2","Code":"MissingParameter","SenderFault":true}]} >> StatisticsSaveLoad::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] Test command err: 2025-04-07T14:09:54.587607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:54.587933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:54.588077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014d2/r3tmp/tmpvHRBmS/pdisk_1.dat 2025-04-07T14:09:54.889551Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28923, node 1 2025-04-07T14:09:55.077045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:55.077088Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:55.077116Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:55.077528Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:55.082050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:55.172644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:55.172801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:55.187301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14577 2025-04-07T14:09:55.816043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:58.579799Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:58.618144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:58.618241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:58.657682Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:58.661626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:58.838862Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:58.890938Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:58.891042Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:58.920824Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:58.921038Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:58.921239Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:58.921303Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:58.921357Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:58.921407Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:58.921459Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:58.921508Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:58.921901Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:59.125276Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:59.125405Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:59.131076Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:59.138994Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:59.139993Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:59.141018Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:59.163172Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:59.163232Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:59.163292Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:59.165081Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:59.165263Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:59.180259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:59.192412Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:59.192587Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:59.205851Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:59.221966Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:59.263518Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:59.505737Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:59.685282Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:10:00.455269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:00.455422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:00.476112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:10:00.780321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2426:3064], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:00.780447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:00.781613Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2431:3068]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:10:00.781822Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:10:00.781909Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2433:3070] 2025-04-07T14:10:00.781973Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2433:3070] 2025-04-07T14:10:00.782483Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2434:2945] 2025-04-07T14:10:00.782682Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2433:3070], server id = [2:2434:2945], tablet id = 72075186224037897, status = OK 2025-04-07T14:10:00.782844Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2434:2945], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:10:00.782919Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:10:00.783176Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:10:00.783272Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2431:3068], StatRequests.size() = 1 2025-04-07T14:10:00.797289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2438:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:00.797367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:00.797695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2443:3079], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:00.802613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:10:00.968412Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:10:00.968474Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:10:01.066862Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2433:3070], schemeshard count = 1 2025-04-07T14:10:01.418924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2445:3081], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:10:01.540889Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2584:3168]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:10:01.541068Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:10:01.541104Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2584:3168], StatRequests.size() = 1 2025-04-07T14:10:01.613477Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89cce14gmg8gn1qsma1nw7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njk1YzZjZTYtNGYxYzQ0MGMtNzlhMTI0NTAtNzFmNTE0ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:01.750361Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2659:3199], for# user@builtin, access# DescribeSchema 2025-04-07T14:10:01.750427Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2659:3199], for# user@builtin, access# DescribeSchema 2025-04-07T14:10:01.760509Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:2649:3195], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/Database/.metadata/_statistics]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:10:01.762120Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTc4YjQ5MjItZmJlZDRhYmItYWFmNDNmNTctYWY4NTU1YWE=, ActorId: [1:2640:3187], ActorState: ExecuteState, TraceId: 01jr89cdc640fefp1hpj7jvbbs, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> TestYmqHttpProxy::TestPurgeQueue [GOOD] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestYmqHttpProxy::TestListDeadLetterSourceQueues |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [GOOD] Test command err: 2025-04-07T14:09:57.025029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:57.025366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:57.025503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014c1/r3tmp/tmpgzqT4f/pdisk_1.dat 2025-04-07T14:09:57.332881Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16775, node 1 2025-04-07T14:09:57.522209Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:57.522270Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:57.522301Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:57.522947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:57.528906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:57.623620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:57.623743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:57.637790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21354 2025-04-07T14:09:58.233020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:10:00.876297Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:10:00.929405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:10:00.929508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:10:00.967784Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:10:00.970940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:10:01.144800Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:01.191110Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:10:01.191204Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:10:01.214394Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:10:01.214540Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:10:01.214693Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:10:01.214737Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:10:01.214774Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:10:01.214876Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:10:01.214919Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:10:01.214953Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:10:01.215258Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:10:01.418686Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:10:01.418788Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:10:01.423616Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:10:01.430280Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:10:01.430959Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:10:01.431884Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:10:01.455794Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:10:01.455858Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:10:01.455923Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:10:01.457924Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:10:01.458015Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:10:01.471203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:10:01.482094Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:10:01.482262Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:10:01.499794Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:10:01.517543Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:10:01.560523Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:10:01.775061Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:10:01.930685Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:10:02.519504Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:10:02.520048Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:10:02.547534Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:10:02.551204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2158:3029], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:02.551295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2175:3035], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:02.551375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:02.557233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037889 2025-04-07T14:10:02.599756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2179:3038], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:10:03.091865Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2289:3079]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:10:03.092134Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:10:03.092231Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2291:3081] 2025-04-07T14:10:03.092306Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2291:3081] 2025-04-07T14:10:03.092805Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2292:2804] 2025-04-07T14:10:03.093031Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2291:3081], server id = [2:2292:2804], tablet id = 72075186224037897, status = OK 2025-04-07T14:10:03.093220Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2292:2804], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:10:03.093278Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:10:03.093421Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:10:03.093482Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2289:3079], StatRequests.size() = 1 2025-04-07T14:10:03.233811Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YjgzMmJiOWQtN2U1ZjE1ZDEtNDAwOWQ1ZTUtMWU1OWRkNDI=, TxId: 2025-04-07T14:10:03.233892Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=YjgzMmJiOWQtN2U1ZjE1ZDEtNDAwOWQ1ZTUtMWU1OWRkNDI=, TxId: 2025-04-07T14:10:03.234775Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:10:03.237072Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-04-07T14:10:03.254752Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2320:3102]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:10:03.254930Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:10:03.254970Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2320:3102], StatRequests.size() = 1 2025-04-07T14:10:03.397632Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NjRjMGUyZDctOGEwYmJmYzItNWNjN2FiMTUtYjU3OTRmMjA=, TxId: 01jr89ceyefhp33mm197jsc8fg 2025-04-07T14:10:03.397754Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NjRjMGUyZDctOGEwYmJmYzItNWNjN2FiMTUtYjU3OTRmMjA=, TxId: 01jr89ceyefhp33mm197jsc8fg 2025-04-07T14:10:03.400743Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:10:03.403211Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-04-07T14:10:03.421245Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YzFiYjkzMDctN2M0MTNlYTMtOTA5YTNlZDQtYjIwNTJlZjc=, TxId: 01jr89cf0b3rs2vnerc4fc2hbw 2025-04-07T14:10:03.421360Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=YzFiYjkzMDctN2M0MTNlYTMtOTA5YTNlZDQtYjIwNTJlZjc=, TxId: 01jr89cf0b3rs2vnerc4fc2hbw |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TA] $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2025-04-07T14:09:21.332700Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573122267655924:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:21.333266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00176c/r3tmp/tmpvWkLZX/pdisk_1.dat 2025-04-07T14:09:21.765824Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:21.819291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:21.819408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:21.820959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1235, node 1 2025-04-07T14:09:22.061119Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:22.061143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:22.061150Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:22.061240Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:22.622928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.635952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.637555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.638213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:09:22.638409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:09:22.638433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T14:09:22.639020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:09:22.639034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:09:22.639317Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:22.645182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034962684, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:09:22.645232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:09:22.645548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:09:22.646406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:09:22.646566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:22.646617Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:09:22.646700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:09:22.646742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:09:22.646774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-04-07T14:09:22.653287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:09:22.653369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:09:22.653395Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:09:22.653481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2025-04-07T14:09:22.672030Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:9108 2025-04-07T14:09:22.925373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.925623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.925647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:22.926282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:09:22.926448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.927626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034962971, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:09:22.927659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1744034962971, at schemeshard: 72057594046644480 2025-04-07T14:09:22.927868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:09:22.927939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:09:22.927965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:09:22.928438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:09:22.928570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:22.930006Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:09:22.932145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:09:22.932182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:09:22.932223Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:09:22.932281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2025-04-07T14:09:22.935847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.936121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.936142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.936219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:09:22.936316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:09:22.936328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2025-04-07T14:09:22.936797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2025-04-07T14:09:22.936899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:09:22.937145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:22.937753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T14:09:22.937793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T14:09:22.937813Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:09:22.937872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2025-04-07T14:09:22.941946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.942130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.942884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CREATE ... pose, path: /Root/SQS/.STD/MessageData, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:10:06.809646Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 1 -> 2 2025-04-07T14:10:06.810302Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715686:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:10:06.810342Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:10:06.811054Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715686, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/SQS/.STD/MessageData 2025-04-07T14:10:06.811238Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:10:06.811479Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:10:06.811581Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 waiting... 2025-04-07T14:10:06.812194Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 15 PathOwnerId: 72057594046644480, cookie: 281474976715686 2025-04-07T14:10:06.812243Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2025-04-07T14:10:06.812261Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 15 2025-04-07T14:10:06.812455Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715686 2025-04-07T14:10:06.812503Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2025-04-07T14:10:06.812522Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 2 2025-04-07T14:10:06.812825Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715686, at schemeshard: 72057594046644480 2025-04-07T14:10:06.813861Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:10:06.813937Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 2 -> 3 2025-04-07T14:10:06.814435Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:10:06.829712Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:10:06.829741Z node 7 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:10:06.829806Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 3 -> 128 2025-04-07T14:10:06.830319Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:10:06.831508Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035006875, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:10:06.831553Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744035006875 2025-04-07T14:10:06.831648Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 128 -> 129 2025-04-07T14:10:06.832278Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:10:06.832721Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:10:06.832783Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:10:06.834698Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 16 PathOwnerId: 72057594046644480, cookie: 281474976715686 2025-04-07T14:10:06.834746Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2025-04-07T14:10:06.834766Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 16 2025-04-07T14:10:06.834950Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715686 2025-04-07T14:10:06.834981Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2025-04-07T14:10:06.834993Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 4 2025-04-07T14:10:06.835857Z node 7 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037906 Status: COMPLETE TxId: 281474976715686 Step: 1744035006875 OrderId: 281474976715686 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037906 CpuTimeUsec: 1332 } } 2025-04-07T14:10:06.836246Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:10:06.836273Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:10:06.836289Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 129 -> 240 2025-04-07T14:10:06.836621Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2025-04-07T14:10:06.836786Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-04-07T14:10:06.836825Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2025-04-07T14:10:07.718236Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7490573300550432018:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:10:07.718394Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:10:07.879930Z node 7 :HTTP INFO: Listening on http://[::]:16558 2025-04-07T14:10:07.894410Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7490573322025270632:2406], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-07T14:10:07.897736Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7490573322025270689:2417], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-07T14:10:07.898668Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490573322025270688:2416], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:07.898977Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:07.899241Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7490573322025270702:2419], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-07T14:10:08.056432Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7490573322025270698:2418]: Pool not found 2025-04-07T14:10:08.465532Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7490573322025270684:2413]: Pool not found 2025-04-07T14:10:08.468919Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490573326320238126:2437], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:08.469015Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7490573326320238127:2438], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-07T14:10:08.469069Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:08.735844Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7490573326320238124:2436]: Pool not found 2025-04-07T14:10:08.881373Z node 7 :HTTP DEBUG: (#44,[::1]:47066) incoming connection opened 2025-04-07T14:10:08.881482Z node 7 :HTTP DEBUG: (#44,[::1]:47066) -> (POST /Root) 2025-04-07T14:10:08.881637Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [9843:7d00:6050:0:8043:7d00:6050:0] request [CreateStream] url [/Root] database [/Root] requestId: d596e496-ebf991b6-22b14c54-f1ecb52d 2025-04-07T14:10:08.888414Z node 7 :HTTP_PROXY INFO: http request [CreateStream] requestId [d596e496-ebf991b6-22b14c54-f1ecb52d] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:394: Top level of json value is not a map 2025-04-07T14:10:08.888595Z node 7 :HTTP DEBUG: (#44,[::1]:47066) <- (400 MissingParameter) 2025-04-07T14:10:08.888679Z node 7 :HTTP DEBUG: (#44,[::1]:47066) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 4 null 0 2025-04-07T14:10:08.888713Z node 7 :HTTP DEBUG: (#44,[::1]:47066) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: d596e496-ebf991b6-22b14c54-f1ecb52d x-amz-crc32: 3671469627 Content-Type: application/x-amz-json-1.1 Content-Length: 127 {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:394: Top level of json value is not a map"} 2025-04-07T14:10:08.888809Z node 7 :HTTP DEBUG: (#44,[::1]:47066) connection closed Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:394: Top level of json value is not a map"} |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] >> YdbTableSplit::RenameTablesAndSplit [GOOD] >> TSchemeShardTest::ManyDirs [GOOD] >> TSchemeShardTest::NestedDirs ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2025-04-07T14:08:39.883385Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572943501733022:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:39.883776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a15/r3tmp/tmpSVxTx1/pdisk_1.dat 2025-04-07T14:08:40.348940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:40.349022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:40.362300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:08:40.442652Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9436, node 1 2025-04-07T14:08:40.489838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:08:40.522462Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:08:40.522480Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:08:40.522745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:08:40.522962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:08:40.546155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:40.546724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:08:40.546745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:40.546793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:08:40.546868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:08:40.704623Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:40.704639Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:40.704646Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:40.704775Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:41.188971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:41.196712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:08:41.198515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:41.204843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:08:41.205055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:08:41.205139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:08:41.213508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:08:41.213576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:08:41.217352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:08:41.221686Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:41.233362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034921272, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:08:41.233407Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:08:41.233793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:08:41.243002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:08:41.243179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:08:41.243228Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:08:41.243322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:08:41.243357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:08:41.243411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:08:41.246340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:08:41.246400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:08:41.246422Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:08:41.246495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:08:43.045825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572960681603083:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:43.045941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:43.491942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:08:43.494624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:08:43.494665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:08:43.501548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2025-04-07T14:08:43.597730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034923645, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:08:43.672574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:08:43.733318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572960681603305:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:43.733387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:43.749586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:08:43.750033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:08:43.750053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:08:43.751923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo 2025-04-07T14:08:43.764296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034923806, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:08:43.774676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 Fast forward 1m 2025-04-07T14:08:44.882967Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572943501733022:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:44.883040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; partitions 2 Fast forward 1m partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2025-04-07T14:08:53.689122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:08:53.690560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:08:53.816961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07 ... SHARD DEBUG: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2025-04-07T14:10:10.779286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-07T14:10:10.779446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710664:0 progress is 1/1 2025-04-07T14:10:10.779466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2025-04-07T14:10:10.779491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710664, ready parts: 1/1, is published: true 2025-04-07T14:10:10.779561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7490573334343762205:2780] message: TxId: 281474976710664 2025-04-07T14:10:10.779617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2025-04-07T14:10:10.779641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710664:0 2025-04-07T14:10:10.779656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710664:0 2025-04-07T14:10:10.779777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-07T14:10:10.780057Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037890:1:118:1:12288:10992:0] ] return ack processed 2025-04-07T14:10:10.780151Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-07T14:10:10.780310Z node 1 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-04-07T14:10:10.780470Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7490573334343762264:2783], serverId# [1:7490573334343762271:4825], sessionId# [0:0:0] 2025-04-07T14:10:10.781285Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 parts [ [72075186224037890:1:118:1:12288:10992:0] ] return ack processed 2025-04-07T14:10:10.781323Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-07T14:10:10.781409Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-07T14:10:10.781470Z node 1 :TX_DATASHARD INFO: 72075186224037892 Initiating switch from PreOffline to Offline state 2025-04-07T14:10:10.782621Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-07T14:10:10.782683Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-07T14:10:10.782744Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7490573334343762267:2785], serverId# [1:7490573334343762273:4827], sessionId# [0:0:0] 2025-04-07T14:10:10.783119Z node 1 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-07T14:10:10.783245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7490573003631276693 RawX2: 4503603922340151 } TabletId: 72075186224037890 State: 4 2025-04-07T14:10:10.783298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-07T14:10:10.783495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7490573003631276693 RawX2: 4503603922340151 } TabletId: 72075186224037890 State: 4 2025-04-07T14:10:10.783534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-07T14:10:10.783646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7490573325753827227 RawX2: 4503603922340541 } TabletId: 72075186224037891 State: 4 2025-04-07T14:10:10.783696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-07T14:10:10.784039Z node 1 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-04-07T14:10:10.784088Z node 1 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-04-07T14:10:10.784351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7490573325753827231 RawX2: 4503603922340542 } TabletId: 72075186224037892 State: 4 2025-04-07T14:10:10.784419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-07T14:10:10.784543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7490573325753827231 RawX2: 4503603922340542 } TabletId: 72075186224037892 State: 4 2025-04-07T14:10:10.784566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-07T14:10:10.785171Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-07T14:10:10.785216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-07T14:10:10.785643Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-07T14:10:10.785643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-07T14:10:10.785804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-07T14:10:10.785848Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-07T14:10:10.786178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-07T14:10:10.786184Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-07T14:10:10.786317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-07T14:10:10.786321Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-07T14:10:10.788012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-07T14:10:10.788136Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-07T14:10:10.788188Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-07T14:10:10.788211Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-04-07T14:10:10.788214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-07T14:10:10.788402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-07T14:10:10.788544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-07T14:10:10.788696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-07T14:10:10.788736Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-07T14:10:10.788776Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-07T14:10:10.788824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-07T14:10:10.788944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-07T14:10:10.789062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-07T14:10:10.789106Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-07T14:10:10.789139Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-07T14:10:10.789299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-07T14:10:10.789329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-07T14:10:10.789391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:10:10.789655Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-07T14:10:10.789720Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-07T14:10:10.789928Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-07T14:10:10.790418Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-07T14:10:10.790465Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-07T14:10:10.794049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-07T14:10:10.794080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-07T14:10:10.794135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-07T14:10:10.794157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-07T14:10:10.794172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-07T14:10:10.794197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-07T14:10:10.794210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-07T14:10:10.794252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-07T14:10:10.794332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] Test command err: 2025-04-07T14:09:21.313689Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573120832416031:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:21.313768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00175f/r3tmp/tmpMNQRLh/pdisk_1.dat 2025-04-07T14:09:21.836461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:21.839693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:21.840116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:21.844453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22488, node 1 2025-04-07T14:09:22.053471Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:22.053495Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:22.053509Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:22.053615Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:22.590078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.606139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.607566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.608203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:09:22.608378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:09:22.608397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T14:09:22.609334Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:09:22.609362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:09:22.609949Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.611724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034962656, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:09:22.611745Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:09:22.612080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 waiting... 2025-04-07T14:09:22.614474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:09:22.614654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:22.614709Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:09:22.614795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:09:22.614831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:09:22.614901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-04-07T14:09:22.633709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:09:22.633798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:09:22.633818Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:09:22.633924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2025-04-07T14:09:22.634279Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:64734 2025-04-07T14:09:22.840253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.840473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.840501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.840994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:09:22.841506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.842512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744034962887, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:09:22.842530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1744034962887, at schemeshard: 72057594046644480 2025-04-07T14:09:22.842729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:09:22.842832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:09:22.842859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:09:22.843304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:09:22.843459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:22.844856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:09:22.844895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:09:22.844909Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:09:22.844961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 waiting... 2025-04-07T14:09:22.850030Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:09:22.853516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.853818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.853855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.853911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:09:22.853993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:09:22.854005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2025-04-07T14:09:22.854502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2025-04-07T14:09:22.854627Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:09:22.854857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:09:22.855872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T14:09:22.855903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T14:09:22.855916Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:09:22.855984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2025-04-07T14:09:22.858835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:09:22.859033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:09:22.866725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CREATE D ... 976715708:0 progress is 1/1 2025-04-07T14:10:11.446723Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715708:0 2025-04-07T14:10:11.446770Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715708, publications: 2, subscribers: 1 2025-04-07T14:10:11.448149Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715708 2025-04-07T14:10:11.448188Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715708 2025-04-07T14:10:11.448206Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715708, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 8 2025-04-07T14:10:11.448361Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715708 2025-04-07T14:10:11.448389Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715708 2025-04-07T14:10:11.448399Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715708, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 4 2025-04-07T14:10:11.448430Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715708, subscribers: 1 2025-04-07T14:10:11.450895Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000301v0/v4, operationId: 281474976715709:0, at schemeshard: 72057594046644480 2025-04-07T14:10:11.451109Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715709:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:10:11.451811Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715709, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000301v0/v4 2025-04-07T14:10:11.451947Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:10:11.452130Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:10:11.452198Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715709:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:10:11.452403Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715709, at schemeshard: 72057594046644480 2025-04-07T14:10:11.452776Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715709 2025-04-07T14:10:11.452807Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715709 2025-04-07T14:10:11.452824Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715709, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 5 2025-04-07T14:10:11.452978Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715709 2025-04-07T14:10:11.452996Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715709 2025-04-07T14:10:11.453007Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715709, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 3 2025-04-07T14:10:11.460050Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035011509, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:10:11.460099Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715709:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035011509, at schemeshard: 72057594046644480 2025-04-07T14:10:11.460236Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715709:0 128 -> 240 2025-04-07T14:10:11.460913Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:10:11.461105Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:10:11.461155Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715709:0 ProgressState 2025-04-07T14:10:11.461246Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715709:0 progress is 1/1 2025-04-07T14:10:11.461287Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715709:0 2025-04-07T14:10:11.461324Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715709, publications: 2, subscribers: 1 2025-04-07T14:10:11.461812Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715709 2025-04-07T14:10:11.461859Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715709 2025-04-07T14:10:11.461876Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715709, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 6 2025-04-07T14:10:11.462042Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715709 2025-04-07T14:10:11.462066Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715709 2025-04-07T14:10:11.462077Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715709, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 4 2025-04-07T14:10:11.462113Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715709, subscribers: 1 2025-04-07T14:10:11.507660Z node 7 :HTTP_PROXY DEBUG: http request [CreateQueue] requestId [6c13dcc4-8a51d03c-43af4b00-1576fcab] Got succesfult GRPC response. 2025-04-07T14:10:11.507754Z node 7 :HTTP_PROXY INFO: http request [CreateQueue] requestId [6c13dcc4-8a51d03c-43af4b00-1576fcab] reply ok 2025-04-07T14:10:11.507892Z node 7 :HTTP DEBUG: (#44,[::1]:56538) <- (200 ) 2025-04-07T14:10:11.507998Z node 7 :HTTP DEBUG: (#44,[::1]:56538) connection closed Http output full {"QueueUrl":"http://ghrun-lqtfcktism.auto.internal:8771/cloud4/000000000000000301v0/DlqName"} 2025-04-07T14:10:11.508778Z node 7 :HTTP DEBUG: (#44,[::1]:56544) incoming connection opened 2025-04-07T14:10:11.508868Z node 7 :HTTP DEBUG: (#44,[::1]:56544) -> (POST /Root) 2025-04-07T14:10:11.508992Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [1880:8100:6050:0:80:8100:6050:0] request [GetQueueAttributes] url [/Root] database [/Root] requestId: af100d88-6f58e7c6-5cdbb2a0-4e76b4f4 2025-04-07T14:10:11.509375Z node 7 :HTTP_PROXY INFO: http request [GetQueueAttributes] requestId [af100d88-6f58e7c6-5cdbb2a0-4e76b4f4] got new request from [1880:8100:6050:0:80:8100:6050:0] 2025-04-07T14:10:11.514784Z node 7 :HTTP_PROXY DEBUG: http request [GetQueueAttributes] requestId [af100d88-6f58e7c6-5cdbb2a0-4e76b4f4] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T14:10:11.514823Z node 7 :HTTP_PROXY INFO: http request [GetQueueAttributes] requestId [af100d88-6f58e7c6-5cdbb2a0-4e76b4f4] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T14:10:11.544067Z node 7 :HTTP_PROXY DEBUG: http request [GetQueueAttributes] requestId [af100d88-6f58e7c6-5cdbb2a0-4e76b4f4] Got succesfult GRPC response. 2025-04-07T14:10:11.544199Z node 7 :HTTP_PROXY INFO: http request [GetQueueAttributes] requestId [af100d88-6f58e7c6-5cdbb2a0-4e76b4f4] reply ok 2025-04-07T14:10:11.544398Z node 7 :HTTP DEBUG: (#44,[::1]:56544) <- (200 ) 2025-04-07T14:10:11.544491Z node 7 :HTTP DEBUG: (#44,[::1]:56544) connection closed Http output full {"Attributes":{"QueueArn":"yrn:yc:ymq:ru-central1:folder4:DlqName"}} 2025-04-07T14:10:11.549026Z node 7 :HTTP DEBUG: (#44,[::1]:56556) incoming connection opened 2025-04-07T14:10:11.549107Z node 7 :HTTP DEBUG: (#44,[::1]:56556) -> (POST /Root) 2025-04-07T14:10:11.550059Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [38d1:8200:6050:0:20d1:8200:6050:0] request [SetQueueAttributes] url [/Root] database [/Root] requestId: db3aeb31-51fc2087-a9f6b2e1-b7d3548f 2025-04-07T14:10:11.550731Z node 7 :HTTP_PROXY INFO: http request [SetQueueAttributes] requestId [db3aeb31-51fc2087-a9f6b2e1-b7d3548f] got new request from [38d1:8200:6050:0:20d1:8200:6050:0] 2025-04-07T14:10:11.552613Z node 7 :HTTP_PROXY DEBUG: http request [SetQueueAttributes] requestId [db3aeb31-51fc2087-a9f6b2e1-b7d3548f] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T14:10:11.552648Z node 7 :HTTP_PROXY INFO: http request [SetQueueAttributes] requestId [db3aeb31-51fc2087-a9f6b2e1-b7d3548f] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T14:10:11.582893Z node 7 :HTTP_PROXY DEBUG: http request [SetQueueAttributes] requestId [db3aeb31-51fc2087-a9f6b2e1-b7d3548f] Got succesfult GRPC response. 2025-04-07T14:10:11.582947Z node 7 :HTTP_PROXY INFO: http request [SetQueueAttributes] requestId [db3aeb31-51fc2087-a9f6b2e1-b7d3548f] reply ok 2025-04-07T14:10:11.583071Z node 7 :HTTP DEBUG: (#44,[::1]:56556) <- (200 ) 2025-04-07T14:10:11.583132Z node 7 :HTTP DEBUG: (#44,[::1]:56556) connection closed Http output full {} 2025-04-07T14:10:11.583679Z node 7 :HTTP DEBUG: (#44,[::1]:56562) incoming connection opened 2025-04-07T14:10:11.583799Z node 7 :HTTP DEBUG: (#44,[::1]:56562) -> (POST /Root) 2025-04-07T14:10:11.583888Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [5868:6e00:6050:0:4068:6e00:6050:0] request [ListDeadLetterSourceQueues] url [/Root] database [/Root] requestId: b5d84f60-5bb2fdbe-9953c83f-177da3c3 2025-04-07T14:10:11.584249Z node 7 :HTTP_PROXY INFO: http request [ListDeadLetterSourceQueues] requestId [b5d84f60-5bb2fdbe-9953c83f-177da3c3] got new request from [5868:6e00:6050:0:4068:6e00:6050:0] 2025-04-07T14:10:11.584707Z node 7 :HTTP_PROXY DEBUG: http request [ListDeadLetterSourceQueues] requestId [b5d84f60-5bb2fdbe-9953c83f-177da3c3] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-07T14:10:11.584738Z node 7 :HTTP_PROXY INFO: http request [ListDeadLetterSourceQueues] requestId [b5d84f60-5bb2fdbe-9953c83f-177da3c3] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-07T14:10:11.596044Z node 7 :HTTP_PROXY DEBUG: http request [ListDeadLetterSourceQueues] requestId [b5d84f60-5bb2fdbe-9953c83f-177da3c3] Got succesfult GRPC response. 2025-04-07T14:10:11.596177Z node 7 :HTTP_PROXY INFO: http request [ListDeadLetterSourceQueues] requestId [b5d84f60-5bb2fdbe-9953c83f-177da3c3] reply ok 2025-04-07T14:10:11.596310Z node 7 :HTTP DEBUG: (#44,[::1]:56562) <- (200 ) 2025-04-07T14:10:11.596389Z node 7 :HTTP DEBUG: (#44,[::1]:56562) connection closed Http output full {"NextToken":"","QueueUrls":["http://ghrun-lqtfcktism.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName"]} |93.8%| [TA] $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 >> TColumnShardTestReadWrite::WriteRead >> EvWrite::WriteWithSplit >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes >> EvWrite::WriteWithLock >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot >> TColumnShardTestReadWrite::CompactionGC |93.8%| [TA] {RESULT} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::NestedDirs [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase >> KqpWorkload::STOCK [GOOD] |93.8%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot |93.8%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 >> EvWrite::WriteWithLock [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |93.8%| [TA] $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] Test command err: 304 176 28 48 32 24 16 24 56 >> EvWrite::WriteWithSplit [GOOD] |93.8%| [TA] {RESULT} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::STOCK [GOOD] Test command err: Trying to start YDB, gRPC: 18692, MsgBus: 7043 2025-04-07T14:09:06.553094Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573057759758341:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:06.553131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001cef/r3tmp/tmpB6dTu7/pdisk_1.dat 2025-04-07T14:09:07.010008Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:07.026195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:07.026276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:07.033885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18692, node 1 2025-04-07T14:09:07.196401Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:07.196426Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:07.196440Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:07.196554Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7043 TClient is connected to server localhost:7043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:09:08.044390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:09:08.075779Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:09:09.985107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573070644660869:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:09.985224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:10.304626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:09:10.430307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.091627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:09:11.557057Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573057759758341:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:09:11.557129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:09:11.600168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573079234599474:2605], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:11.600270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:11.600479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573079234599479:2608], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:11.603567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:09:11.627174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573079234599481:2609], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:09:22.005858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:09:22.005902Z node 1 :IMPORT WARN: Table profiles were not loaded took: 0.590809s took: 0.599195s took: 0.606116s took: 0.605872s took: 0.610045s took: 0.618450s took: 0.618806s took: 0.626789s took: 0.636011s took: 0.632031s took: 6.423488s took: 6.432736s took: 6.435747s took: 6.465864s took: 6.472605s took: 6.475815s took: 6.480684s took: 6.482826s took: 6.482850s took: 6.485314s took: 5.240950s 2025-04-07T14:10:11.951452Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjRkZDI0YzUtNjBjNmQyMmYtZjhjYjNhZGYtOWE1MmNjNDI=, ActorId: [1:7490573315457814136:5367], ActorState: ExecuteState, TraceId: 01jr89cjqj1x4tmw34vaq7dqjf, Create QueryResponse for error on request, msg: 2025-04-07T14:10:11.952305Z node 1 :TX_DATASHARD ERROR: Complete [1744035011950 : 281474976711337] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-07T14:10:11.952417Z node 1 :TX_DATASHARD ERROR: Complete [1744035011958 : 281474976711338] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-07T14:10:11.952451Z node 1 :TX_DATASHARD ERROR: Complete [1744035011971 : 281474976711339] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-07T14:10:11.952485Z node 1 :TX_DATASHARD ERROR: Complete [1744035011979 : 281474976711340] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-07T14:10:11.953637Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzcwMTExOGUtNTIxMzZmNWEtODU4NDkzLWU1ZDAwYTE5, ActorId: [1:7490573315457814148:5375], ActorState: ExecuteState, TraceId: 01jr89cjqe2xcgn2213nbzevhy, Create QueryResponse for error on request, msg: 2025-04-07T14:10:11.953644Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjU4MWMwYjAtY2IxNWMzZTktMWRmZThkMDEtMmFhOTcyMTI=, ActorId: [1:7490573315457814145:5372], ActorState: ExecuteState, TraceId: 01jr89cjr3a2rgqvatwq171fe4, Create QueryResponse for error on request, msg: 2025-04-07T14:10:11.956505Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTdkZjgyODctZGRhMGU1Mi0yZjNiNTM4MS02YTVkZTYwYw==, ActorId: [1:7490573315457814140:5369], ActorState: ExecuteState, TraceId: 01jr89cjrc8vn0qkzdzbqh9ez1, Create QueryResponse for error on request, msg: 2025-04-07T14:10:11.956596Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTQwNmIyMDctNTJjMzAzNDItZGFlNWNhMTYtYTc4ZDExZWM=, ActorId: [1:7490573315457814127:5360], ActorState: ExecuteState, TraceId: 01jr89cjqc1cprs5a59wqcvge4, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-07T14:10:11.957400Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDdlYzU0ZmQtNjA4MTE5MDEtOTM3ZWYxYTgtNzgxMjNmOTk=, ActorId: [1:7490573315457814150:5377], ActorState: ExecuteState, TraceId: 01jr89cjq1ejas1p2sz2ycvnjh, Create QueryResponse for error on request, msg: 2025-04-07T14:10:11.958763Z node 1 :TX_DATASHARD ERROR: Complete [1744035011971 : 281474976711339] from 72075186224037912 at tablet 72075186224037912, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-07T14:10:11.959453Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzYxMGNjODgtMzA2YjFjNzYtNjhhMWUyMWMtZjlkMjUwMzc=, ActorId: [1:7490573315457814143:5370], ActorState: ExecuteState, TraceId: 01jr89cjpz9r0eqzrg90gqe8e4, Create QueryResponse for error on request, msg: 2025-04-07T14:10:11.960068Z node 1 :TX_DATASHARD ERROR: Complete [1744035011979 : 281474976711340] from 72075186224037928 at tablet 72075186224037928, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-07T14:10:11.960132Z node 1 :TX_DATASHARD ERROR: Complete [1744035011958 : 281474976711338] from 72075186224037900 at tablet 72075186224037900, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-07T14:10:11.960315Z node 1 :TX_DATASHARD ERROR: Complete [1744035011992 : 281474976711341] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-07T14:10:11.960467Z node 1 :TX_DATASHARD ERROR: Complete [1744035011992 : 281474976711342] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-07T14:10:11.961039Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGEyOTJkYTEtNDNmYzUzZmQtZjZhOTIwZmEtZjFmOTI5YzU=, ActorId: [1:7490573315457814144:5371], ActorState: ExecuteState, TraceId: 01jr89cjr83hp4c52aqkwa7na8, Create QueryResponse for error on request, msg: 2025-04-07T14:10:11.961206Z node 1 :TX_DATASHARD ERROR: Complete [1744035011950 : 281474976711337] from 72075186224037896 at tablet 72075186224037896, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-07T14:10:11.961487Z node 1 :TX_DATASHARD ERROR: Complete [1744035012001 : 281474976711343] from 72075186224037921 at tablet 72075186224037921, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-07T14:10:11.962075Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTIxZGEwYmEtZjk1NzQ0NmMtOWVmNDhjNmYtYjkyYmVmNzc=, ActorId: [1:7490573315457814147:5374], ActorState: ExecuteState, TraceId: 01jr89cjqv21nz3ny4bp4jq1xd, Create QueryResponse for error on request, msg: tx has deferred effect ... 4:10:12.814586Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-04-07T14:10:12.814598Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-07T14:10:12.814611Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-04-07T14:10:12.814623Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-04-07T14:10:12.814637Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-04-07T14:10:12.817970Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2025-04-07T14:10:12.818007Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-04-07T14:10:12.818021Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2025-04-07T14:10:12.818069Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-04-07T14:10:12.818084Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-07T14:10:12.818293Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-04-07T14:10:12.818493Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-07T14:10:12.818533Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-04-07T14:10:12.818546Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-04-07T14:10:12.818558Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-04-07T14:10:12.818570Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-04-07T14:10:12.818582Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-04-07T14:10:12.818595Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-04-07T14:10:12.818607Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-04-07T14:10:12.818619Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-07T14:10:12.818635Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-04-07T14:10:12.818657Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-04-07T14:10:12.818676Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-04-07T14:10:12.818689Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-04-07T14:10:12.818701Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-04-07T14:10:12.818714Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2025-04-07T14:10:12.818727Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-07T14:10:12.818740Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2025-04-07T14:10:12.818755Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2025-04-07T14:10:12.818767Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-07T14:10:12.818779Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-04-07T14:10:12.818791Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-04-07T14:10:12.818802Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-04-07T14:10:12.818813Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-04-07T14:10:12.818824Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-04-07T14:10:12.818837Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-04-07T14:10:12.818849Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-04-07T14:10:12.818871Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-04-07T14:10:12.949209Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2025-04-07T14:10:12.949433Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037963 not found 2025-04-07T14:10:12.949459Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037946 not found 2025-04-07T14:10:12.949472Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found 2025-04-07T14:10:12.949483Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037948 not found 2025-04-07T14:10:12.949496Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037965 not found 2025-04-07T14:10:12.949509Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037937 not found 2025-04-07T14:10:12.949523Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037958 not found 2025-04-07T14:10:12.966624Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037949 not found 2025-04-07T14:10:12.966687Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037957 not found 2025-04-07T14:10:12.966697Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037945 not found 2025-04-07T14:10:12.966738Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037968 not found 2025-04-07T14:10:12.966749Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037959 not found 2025-04-07T14:10:12.966758Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2025-04-07T14:10:12.966768Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037950 not found 2025-04-07T14:10:12.966778Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2025-04-07T14:10:12.966787Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037953 not found 2025-04-07T14:10:12.966797Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037961 not found 2025-04-07T14:10:12.966807Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037955 not found 2025-04-07T14:10:12.966838Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-04-07T14:10:12.966854Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2025-04-07T14:10:12.966864Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037967 not found 2025-04-07T14:10:12.966877Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037956 not found 2025-04-07T14:10:12.966888Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037939 not found 2025-04-07T14:10:12.966897Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-04-07T14:10:12.966906Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037947 not found 2025-04-07T14:10:12.966916Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037952 not found 2025-04-07T14:10:12.966924Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037938 not found 2025-04-07T14:10:12.966962Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2025-04-07T14:10:12.966973Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037943 not found 2025-04-07T14:10:12.966981Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037951 not found 2025-04-07T14:10:12.966999Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037954 not found 2025-04-07T14:10:12.967008Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037969 not found 2025-04-07T14:10:12.967017Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037966 not found 2025-04-07T14:10:12.967050Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037936 not found 2025-04-07T14:10:12.967063Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037935 not found 2025-04-07T14:10:12.967076Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037960 not found 2025-04-07T14:10:12.967106Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037962 not found 2025-04-07T14:10:12.967121Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2025-04-07T14:10:12.967134Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037964 not found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithLock [GOOD] Test command err: 2025-04-07T14:10:13.664096Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:13.816505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:13.838371Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:13.838452Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:13.838676Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:13.846212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:13.846403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:13.846633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:13.846761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:13.846857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:13.846949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:13.847089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:13.847220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:13.847351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:13.847459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.847553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:13.847648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:13.870982Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:13.874973Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:13.875279Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:13.875343Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:13.875490Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:13.875621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:13.875680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:13.875731Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:13.875822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:13.875900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:13.875967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:13.876006Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:13.876179Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:13.876243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:13.876280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:13.876310Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:13.876422Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:13.876481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:13.876535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:13.876571Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:13.876636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:13.876711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:13.876742Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:13.876785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:13.876821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:13.876848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:13.876998Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=40; 2025-04-07T14:10:13.877100Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-07T14:10:13.877189Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-07T14:10:13.877261Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-07T14:10:13.877422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:13.877493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:13.877535Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:13.877761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:13.877809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.877841Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.877972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:13.878010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:13.878058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:13.878236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:13.878268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:13.878293Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:13.878393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... vent=start_construct_result;interval_idx=0;interval_id=1; 2025-04-07T14:10:14.619893Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=merge.cpp:131;event=DoExecute;interval_idx=0; 2025-04-07T14:10:14.637232Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-04-07T14:10:14.637374Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.637423Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T14:10:14.637471Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:10:14.637582Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T14:10:14.637644Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.637682Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T14:10:14.637720Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:10:14.637854Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T14:10:14.637892Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T14:10:14.637936Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=1; 2025-04-07T14:10:14.637987Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=4096;merger=0;interval_id=1; 2025-04-07T14:10:14.638035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T14:10:14.638112Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.638151Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=4096;finished=1; 2025-04-07T14:10:14.638184Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:10:14.638361Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:14.638510Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:4096;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.638577Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T14:10:14.638705Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=4096; 2025-04-07T14:10:14.638776Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=458752;num_rows=4096;batch_columns=key,field; 2025-04-07T14:10:14.639282Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:279:2297] send ScanData to [1:276:2294] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 458752 rows: 4096 page faults: 0 finished: 0 pageFault: 0 arrow schema: key: uint64 field: string 2025-04-07T14:10:14.639424Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.639569Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.639692Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.640019Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:14.640140Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.640255Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.640343Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:279:2297] finished for tablet 9437184 2025-04-07T14:10:14.640433Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:279:2297] send ScanData to [1:276:2294] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:10:14.640937Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:279:2297] and sent to [1:276:2294] packs: 0 txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.031},{"events":["f_ack"],"t":0.032},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.034}],"full":{"a":1744035014606258,"name":"_full_task","f":1744035014606258,"d_finished":0,"c":0,"l":1744035014640482,"d":34224},"events":[{"name":"bootstrap","f":1744035014606839,"d_finished":2711,"c":1,"l":1744035014609550,"d":2711},{"a":1744035014639992,"name":"ack","f":1744035014638333,"d_finished":1388,"c":1,"l":1744035014639721,"d":1878},{"a":1744035014639977,"name":"processing","f":1744035014609607,"d_finished":21944,"c":9,"l":1744035014639723,"d":22449},{"name":"ProduceResults","f":1744035014608141,"d_finished":2971,"c":12,"l":1744035014640286,"d":2971},{"a":1744035014640291,"name":"Finish","f":1744035014640291,"d_finished":0,"c":0,"l":1744035014640482,"d":191},{"name":"task_result","f":1744035014609626,"d_finished":20414,"c":8,"l":1744035014638224,"d":20414}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;) 2025-04-07T14:10:14.641034Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:10:14.605728Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=37632;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=37632;selected_rows=0; 2025-04-07T14:10:14.641077Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:10:14.641139Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T14:10:14.641239Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:279:2297];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithSplit [GOOD] Test command err: 2025-04-07T14:10:13.663600Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:13.816526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:13.838518Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:13.838600Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:13.838856Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:13.846887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:13.847112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:13.847373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:13.847492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:13.847586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:13.847692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:13.847808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:13.847944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:13.848051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:13.848156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.848257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:13.848354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:13.868246Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:13.871705Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:13.872075Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:13.872129Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:13.872291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:13.872465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:13.872537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:13.872584Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:13.872706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:13.872783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:13.872827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:13.872861Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:13.873048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:13.873118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:13.873160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:13.873197Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:13.873338Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:13.873398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:13.873426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:13.873453Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:13.873518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:13.873555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:13.873582Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:13.873623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:13.873658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:13.873683Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:13.873839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-07T14:10:13.873943Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-07T14:10:13.874014Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-07T14:10:13.874095Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-04-07T14:10:13.874207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:13.874248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:13.874272Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:13.874452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:13.874485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.874503Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.874603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:13.874645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:13.874670Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:13.874869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:13.874906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:13.874926Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:13.875005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 8400896 rows: 1024 page faults: 0 finished: 0 pageFault: 0 arrow schema: key: uint64 field: string 2025-04-07T14:10:14.974761Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.974911Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.974956Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T14:10:14.975003Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:10:14.976958Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:14.977100Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.977143Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T14:10:14.977251Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:219;stage=no data is ready yet;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.977402Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T14:10:14.977437Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T14:10:14.977475Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=1; 2025-04-07T14:10:14.977519Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1024;merger=0;interval_id=1; 2025-04-07T14:10:14.977557Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T14:10:14.977650Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.977685Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1024;finished=1; 2025-04-07T14:10:14.977780Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=1024; 2025-04-07T14:10:14.977834Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=8400896;num_rows=1024;batch_columns=key,field; 2025-04-07T14:10:14.977986Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:272:2287] send ScanData to [1:264:2279] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 8400896 rows: 1024 page faults: 0 finished: 0 pageFault: 0 arrow schema: key: uint64 field: string 2025-04-07T14:10:14.978087Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.978161Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.978259Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.980157Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:14.980250Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.980355Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-07T14:10:14.980388Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:272:2287] finished for tablet 9437184 2025-04-07T14:10:14.980452Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:272:2287] send ScanData to [1:264:2279] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:10:14.980813Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:272:2287] and sent to [1:264:2279] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.004},{"events":["f_ProduceResults"],"t":0.006},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.007},{"events":["f_ack"],"t":0.104},{"events":["l_task_result"],"t":0.136},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.138}],"full":{"a":1744035014842126,"name":"_full_task","f":1744035014842126,"d_finished":0,"c":0,"l":1744035014980486,"d":138360},"events":[{"name":"bootstrap","f":1744035014846663,"d_finished":2488,"c":1,"l":1744035014849151,"d":2488},{"a":1744035014980136,"name":"ack","f":1744035014946167,"d_finished":29232,"c":2,"l":1744035014977289,"d":29582},{"a":1744035014980121,"name":"processing","f":1744035014849228,"d_finished":74104,"c":11,"l":1744035014978310,"d":74469},{"name":"ProduceResults","f":1744035014848223,"d_finished":31109,"c":15,"l":1744035014980378,"d":31109},{"a":1744035014980379,"name":"Finish","f":1744035014980379,"d_finished":0,"c":0,"l":1744035014980486,"d":107},{"name":"task_result","f":1744035014849239,"d_finished":44725,"c":9,"l":1744035014978307,"d":44725}],"id":"9437184::1"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;) 2025-04-07T14:10:14.980887Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:10:14.841402Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14766240;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14766240;selected_rows=0; 2025-04-07T14:10:14.980925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:10:14.981022Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.052869s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.051323s;size=0.014766232;details={columns=1,2,4294967040,4294967041,4294967042;};};]};; 2025-04-07T14:10:14.981100Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; |93.8%| [TA] $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TA] {RESULT} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:06:54.242498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:06:54.242583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.242618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:06:54.242645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:06:54.246353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:06:54.246413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:06:54.246492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:06:54.247595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:06:54.312031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:54.312136Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:54.321652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:06:54.321769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:06:54.321897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:06:54.326798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:06:54.326982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:06:54.328841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.330299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:06:54.334611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.347642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:06:54.347687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.347725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:06:54.347898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.354118Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:06:54.465457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:06:54.465633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.465779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:06:54.465933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:06:54.465995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.467873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.467996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:06:54.468139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.468195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:06:54.468280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:06:54.468309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:06:54.469801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.469844Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:06:54.469881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:06:54.471198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.471233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.471267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.471322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.474952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:06:54.476397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:06:54.476545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:06:54.477462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:06:54.477587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:06:54.477686Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.477889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:06:54.477934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:06:54.478058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.478133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:06:54.479680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:06:54.479719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:06:54.479845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:06:54.479882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:06:54.480058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:06:54.480105Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:06:54.480180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:06:54.480210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.480246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:06:54.480294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:06:54.480328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:06:54.480351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:06:54.480399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:06:54.480440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:06:54.480486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:06:54.482069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.482176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:06:54.482209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:06:54.482253Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:06:54.482286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:06:54.482421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... hType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:14.437108Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:10:14.437346Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir/Table/UserDefinedIndex" took 252us result status StatusSuccess 2025-04-07T14:10:14.438250Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "indexed" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:14.439135Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:10:14.439414Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir/Table/UserDefinedIndex/indexImplTable" took 298us result status StatusSuccess 2025-04-07T14:10:14.440047Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TColumnShardTestReadWrite::WriteOverload |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy >> TColumnShardTestReadWrite::WriteReadDuplicate |93.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::ReadStale >> TColumnShardTestReadWrite::CompactionInGranule_PKString >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift |93.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> OlapEstimationRowsCorrectness::TPCDS78 [GOOD] >> OlapEstimationRowsCorrectness::TPCDS96 >> TColumnShardTestReadWrite::WriteRead [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteRead [GOOD] Test command err: 2025-04-07T14:10:13.663986Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:13.816515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:13.837239Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:13.837312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:13.837527Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:13.848004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:13.848197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:13.848439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:13.848556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:13.848696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:13.848799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:13.848901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:13.849000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:13.849103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:13.849194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.849307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:13.849404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:13.870751Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:13.872722Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:13.872860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:13.872898Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:13.873035Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:13.873161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:13.873214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:13.873251Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:13.873319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:13.873361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:13.873385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:13.873415Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:13.873542Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:13.873604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:13.873646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:13.873674Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:13.873772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:13.873823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:13.873855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:13.873877Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:13.873920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:13.873955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:13.873979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:13.874013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:13.874059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:13.874093Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:13.874256Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-04-07T14:10:13.874332Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-07T14:10:13.874430Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-07T14:10:13.874513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-07T14:10:13.874626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:13.874669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:13.874691Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:13.874820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:13.874851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.874872Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.874954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:13.874993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:13.875011Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:13.875134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:13.875159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:13.875182Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:13.875283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... dx=0;count=31;merger=0;interval_id=25; 2025-04-07T14:10:17.451694Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T14:10:17.451752Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:17.451769Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=31;finished=1; 2025-04-07T14:10:17.451800Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:10:17.451992Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:17.452148Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:17.452181Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T14:10:17.452278Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-07T14:10:17.452339Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-07T14:10:17.452427Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:426:2441] send ScanData to [1:425:2440] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 1984 rows: 31 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string 2025-04-07T14:10:17.452523Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:17.452594Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:17.452712Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:17.452857Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:17.452945Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:17.453043Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:17.453069Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:426:2441] finished for tablet 9437184 2025-04-07T14:10:17.453119Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:426:2441] send ScanData to [1:425:2440] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:10:17.453402Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:426:2441] and sent to [1:425:2440] packs: 0 txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.009},{"events":["f_ack"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":1744035017441892,"name":"_full_task","f":1744035017441892,"d_finished":0,"c":0,"l":1744035017453144,"d":11252},"events":[{"name":"bootstrap","f":1744035017442030,"d_finished":1924,"c":1,"l":1744035017443954,"d":1924},{"a":1744035017452840,"name":"ack","f":1744035017451968,"d_finished":771,"c":1,"l":1744035017452739,"d":1075},{"a":1744035017452832,"name":"processing","f":1744035017443997,"d_finished":4253,"c":10,"l":1744035017452741,"d":4565},{"name":"ProduceResults","f":1744035017443068,"d_finished":2490,"c":13,"l":1744035017453061,"d":2490},{"a":1744035017453062,"name":"Finish","f":1744035017453062,"d_finished":0,"c":0,"l":1744035017453144,"d":82},{"name":"task_result","f":1744035017444008,"d_finished":3370,"c":9,"l":1744035017451836,"d":3370}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;) 2025-04-07T14:10:17.453447Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:10:17.441444Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-07T14:10:17.453481Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:10:17.453544Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T14:10:17.453616Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::ReadStale [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2025-04-07T14:10:13.664026Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:13.816512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:13.835617Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:13.835683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:13.835895Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:13.845595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:13.845856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:13.846108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:13.846226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:13.846314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:13.846429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:13.846568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:13.846664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:13.846755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:13.846843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.846941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:13.847046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:13.866404Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:13.868563Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:13.868704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:13.868760Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:13.868953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:13.870530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:13.870602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:13.870630Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:13.870701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:13.870752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:13.870783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:13.870814Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:13.870952Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:13.870998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:13.871031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:13.871051Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:13.871131Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:13.871185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:13.871212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:13.871233Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:13.871271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:13.871304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:13.871325Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:13.871371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:13.871413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:13.871443Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:13.871631Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-07T14:10:13.871724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-07T14:10:13.871831Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-07T14:10:13.871944Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=53; 2025-04-07T14:10:13.872120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:13.872187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:13.872222Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:13.872443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:13.872490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.872515Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.872709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:13.872769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:13.872806Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:13.873012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:13.873058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:13.873086Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:13.873201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... x=0;count=31;merger=0;interval_id=25; 2025-04-07T14:10:17.501317Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T14:10:17.501380Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:17.501415Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=31;finished=1; 2025-04-07T14:10:17.501451Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:10:17.501625Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:17.501767Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:17.501797Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T14:10:17.501915Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-07T14:10:17.501995Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-07T14:10:17.502105Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:426:2441] send ScanData to [1:425:2440] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 2759 rows: 31 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary 2025-04-07T14:10:17.502232Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:17.502325Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:17.502428Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:17.502576Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:17.502674Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:17.502759Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:17.502797Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:426:2441] finished for tablet 9437184 2025-04-07T14:10:17.502853Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:426:2441] send ScanData to [1:425:2440] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:10:17.503199Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:426:2441] and sent to [1:425:2440] packs: 0 txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":1744035017491254,"name":"_full_task","f":1744035017491254,"d_finished":0,"c":0,"l":1744035017502883,"d":11629},"events":[{"name":"bootstrap","f":1744035017491418,"d_finished":2322,"c":1,"l":1744035017493740,"d":2322},{"a":1744035017502563,"name":"ack","f":1744035017501604,"d_finished":847,"c":1,"l":1744035017502451,"d":1167},{"a":1744035017502552,"name":"processing","f":1744035017494843,"d_finished":4885,"c":10,"l":1744035017502452,"d":5216},{"name":"ProduceResults","f":1744035017492654,"d_finished":2322,"c":13,"l":1744035017502774,"d":2322},{"a":1744035017502777,"name":"Finish","f":1744035017502777,"d_finished":0,"c":0,"l":1744035017502883,"d":106},{"name":"task_result","f":1744035017494866,"d_finished":3938,"c":9,"l":1744035017501490,"d":3938}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;) 2025-04-07T14:10:17.503256Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:10:17.490907Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13268;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13268;selected_rows=0; 2025-04-07T14:10:17.503299Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:10:17.503363Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T14:10:17.503440Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> KqpService::PatternCache [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadStale [GOOD] Test command err: 2025-04-07T14:10:17.125470Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:17.184629Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:17.202104Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:17.202147Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:17.202285Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:17.207789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:17.207931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:17.208086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:17.208141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:17.208219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:17.208302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:17.208373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:17.208432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:17.208484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:17.208555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:17.208615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:17.208704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:17.223073Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:17.224616Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:17.224731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:17.224759Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:17.224888Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:17.224987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:17.225028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:17.225049Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:17.225104Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:17.225145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:17.225169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:17.225190Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:17.225300Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:17.225345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:17.225382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:17.225398Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:17.225459Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:17.225492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:17.225527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:17.225550Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:17.225608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:17.225634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:17.225659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:17.225689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:17.225712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:17.225727Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:17.225829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=32; 2025-04-07T14:10:17.225879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=23; 2025-04-07T14:10:17.225940Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=21; 2025-04-07T14:10:17.226001Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=23; 2025-04-07T14:10:17.226099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:17.226131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:17.226153Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:17.226259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:17.226286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:17.226300Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:17.226370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:17.226392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:17.226422Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:17.226549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:17.226582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:17.226602Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:17.226685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... =complete;fline=manager.cpp:9;event=lock;process_id=CS::INDEXATION::8d8afdc-13ba11f0-9294d312-6d13d937; 2025-04-07T14:10:18.175146Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-04-07T14:10:18.175247Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:725;event=indexation;bytes=69128;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=8d8afdc-13ba11f0-9294d312-6d13d937; 2025-04-07T14:10:18.175503Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:615;event=start_changes;type=CS::INDEXATION;task_id=8d8afdc-13ba11f0-9294d312-6d13d937; 2025-04-07T14:10:18.175865Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=69641;external_task_id=8d8afdc-13ba11f0-9294d312-6d13d937;type=CS::INDEXATION;priority=0;; 2025-04-07T14:10:18.176077Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=1;task=cpu=0;mem=69641;external_task_id=8d8afdc-13ba11f0-9294d312-6d13d937;type=CS::INDEXATION;priority=0;; 2025-04-07T14:10:18.176117Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=8d8afdc-13ba11f0-9294d312-6d13d937;mem=69641;cpu=0; 2025-04-07T14:10:18.176247Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=8d8afdc-13ba11f0-9294d312-6d13d937;task_id=1;mem=69641;cpu=0; 2025-04-07T14:10:18.176352Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=8d8afdc-13ba11f0-9294d312-6d13d937;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=8d8afdc-13ba11f0-9294d312-6d13d937; 2025-04-07T14:10:18.189496Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=8d8afdc-13ba11f0-9294d312-6d13d937;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-07T14:10:18.189627Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-04-07T14:10:18.190119Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:212;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:10:18.190165Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:18.190231Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:740;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=69128;indexing_debug={task_ids=8d8afdc-13ba11f0-9294d312-6d13d937,;}; 2025-04-07T14:10:18.190325Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:10:18.190385Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:18.190435Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:18.190470Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:18.190510Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:18.190584Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:18.190897Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 1 version: {640000:max} readable: {1000000:max} at tablet 9437184 2025-04-07T14:10:18.202638Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 1 at tablet 9437184 2025-04-07T14:10:18.202764Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=constructor.cpp:24;event=overriden_columns;columns=; 2025-04-07T14:10:18.202851Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {640000:max}. CS min read snapshot: {700000:max}. now: 2025-04-07T14:10:18.202815Z; 2025-04-07T14:10:18.217046Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:129;method=PutObject;id=[9437184:2:1:255:1:13648:0]; 2025-04-07T14:10:18.220810Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-07T14:10:18.221034Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[7] (CS::INDEXATION) apply at tablet 9437184 2025-04-07T14:10:18.221949Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-07T14:10:18.222035Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:10:18.222068Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=8d8afdc-13ba11f0-9294d312-6d13d937;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:18.222427Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {640000:max} readable: {1000000:max} at tablet 9437184 2025-04-07T14:10:18.234177Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8d8afdc-13ba11f0-9294d312-6d13d937;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-07T14:10:18.234228Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8d8afdc-13ba11f0-9294d312-6d13d937;fline=with_appended.cpp:80;portions=1,;task_id=8d8afdc-13ba11f0-9294d312-6d13d937; 2025-04-07T14:10:18.234280Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8d8afdc-13ba11f0-9294d312-6d13d937;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=59640;portion_bytes=59668;portion_raw_bytes=88399; 2025-04-07T14:10:18.234348Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8d8afdc-13ba11f0-9294d312-6d13d937;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=1;before_size=0;after_size=59668;before_rows=0;after_rows=999; 2025-04-07T14:10:18.234401Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8d8afdc-13ba11f0-9294d312-6d13d937;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=59640;portion_bytes=59668;portion_raw_bytes=88399; 2025-04-07T14:10:18.234631Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8d8afdc-13ba11f0-9294d312-6d13d937;fline=manager.cpp:14;event=unlock;process_id=CS::INDEXATION::8d8afdc-13ba11f0-9294d312-6d13d937; 2025-04-07T14:10:18.234698Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8d8afdc-13ba11f0-9294d312-6d13d937;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:18.234756Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8d8afdc-13ba11f0-9294d312-6d13d937;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:18.234793Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8d8afdc-13ba11f0-9294d312-6d13d937;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:10:18.234851Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8d8afdc-13ba11f0-9294d312-6d13d937;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:18.234898Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8d8afdc-13ba11f0-9294d312-6d13d937;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:18.234941Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8d8afdc-13ba11f0-9294d312-6d13d937;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:18.234992Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8d8afdc-13ba11f0-9294d312-6d13d937;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:18.235086Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8d8afdc-13ba11f0-9294d312-6d13d937;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:18.235243Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:69128:0] 2025-04-07T14:10:18.235306Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-07T14:10:18.235392Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:18.235471Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T14:10:18.239253Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } Columns { Name: "message" } } } ; 2025-04-07T14:10:18.239348Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;message;];};]; 2025-04-07T14:10:18.239436Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {640000:max}. CS min read snapshot: {700000:max}. now: 2025-04-07T14:10:18.239395Z; 2025-04-07T14:10:18.239597Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=8d8afdc-13ba11f0-9294d312-6d13d937;mem=69641;cpu=0; >> Normalizers::EmptyTablesNormalizer >> TColumnShardTestReadWrite::WriteReadModifications >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::PatternCache [GOOD] Test command err: Trying to start YDB, gRPC: 4457, MsgBus: 12939 2025-04-07T14:03:34.975563Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571633214584341:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001075/r3tmp/tmpNdWdhP/pdisk_1.dat 2025-04-07T14:03:35.597699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.597791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.601606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:03:35.668325Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4457, node 1 2025-04-07T14:03:36.061216Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.061238Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.061244Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.061323Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12939 TClient is connected to server localhost:12939 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:37.134598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.167254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.378869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.563847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.645677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.586972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571650394455070:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.587057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.998790Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571633214584341:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.998995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.453984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.517276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.539465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.564436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.586676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.616717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.667098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571658984390178:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.667178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.667434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571658984390183:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.670983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.683174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571658984390185:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:41.559648Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=OWY1ODUwMWItNzkwMWM3YjQtN2ZjMDM5MGQtOTBlZjBlMDI=, ActorId: [1:7490571650394455052:2376], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T14:03:41.559696Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=OWY1ODUwMWItNzkwMWM3YjQtN2ZjMDM5MGQtOTBlZjBlMDI=, ActorId: [1:7490571650394455052:2376], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:03:41.559718Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWY1ODUwMWItNzkwMWM3YjQtN2ZjMDM5MGQtOTBlZjBlMDI=, ActorId: [1:7490571650394455052:2376], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T14:03:41.559737Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWY1ODUwMWItNzkwMWM3YjQtN2ZjMDM5MGQtOTBlZjBlMDI=, ActorId: [1:7490571650394455052:2376], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T14:03:41.559799Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWY1ODUwMWItNzkwMWM3YjQtN2ZjMDM5MGQtOTBlZjBlMDI=, ActorId: [1:7490571650394455052:2376], ActorState: unknown state, Session actor destroyed 2025-04-07T14:03:41.568511Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZWVkYWZmMjItYzY4MzdkMGMtNjZhNzgzYzItMzkwN2ZkNWI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZWVkYWZmMjItYzY4MzdkMGMtNjZhNzgzYzItMzkwN2ZkNWI= 2025-04-07T14:03:41.568749Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZWVkYWZmMjItYzY4MzdkMGMtNjZhNzgzYzItMzkwN2ZkNWI=, ActorId: [1:7490571663279357794:2464], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:03:41.576821Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODQxMWUzYzEtN2I0YmYwMmUtZmQ2NzUyMDktOGI3ZDQ0MzA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODQxMWUzYzEtN2I0YmYwMmUtZmQ2NzUyMDktOGI3ZDQ0MzA= 2025-04-07T14:03:41.576913Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODQxMWUzYzEtN2I0YmYwMmUtZmQ2NzUyMDktOGI3ZDQ0MzA=, ActorId: [1:7490571663279357797:2466], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:03:41.583456Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTZlMjNlOGQtMTQyYmMwMjktMmJkMmY3Y2MtZmNkYjE2NTI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTZlMjNlOGQtMTQyYmMwMjktMmJkMmY3Y2MtZmNkYjE2NTI= 2025-04-07T14:03:41.583816Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTZlMjNlOGQtMTQyYmMwMjktMmJkMmY3Y2MtZmNkYjE2NTI=, ActorId: [1:7490571663279357799:2468], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:03:41.596173Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODkxOTNhNTAtMTQzY2U3OGYtZmQ2OWE4MDgtMmJhNzI3NGQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODkxOTNhNTAtMTQzY2U3OGYtZmQ2OWE4MDgtMmJhNzI3NGQ= 2025-04-07T14:03:41.596267Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODkxOTNhNTAtMTQzY2U3OGYtZmQ2OWE4MDgtMmJhNzI3NGQ=, ActorId: [1:7490571663279357804:2471], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:03:41.604137Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjJlM2M1MWYtZDViNzdmOWMtMTU4MGEyOWUtNWMwMjcyNA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjJlM2M1MWYtZDViNzdmOWMtMTU4MGEyOWUtNWMwMjcyNA== 2025-04-07T14:03:41.604319Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjJlM2M1MWYtZDViNzdmOWMtMTU4MGEyOWUtNWMwMjcyNA==, ActorId: [1:7490571663279357806:2473], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:03:41.613622Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDM5YTY0OWYtZjNkMjUxY2MtNmYzM2M0YjktN2E3MzI1Mzk=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NDM5YTY0OWYtZjNkMjUxY2MtNmYzM2M0YjktN2E3MzI1Mzk= 2025-04-07T14:03:41.613802Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDM5YTY0OWYtZjNkMjUxY2MtNmYzM2M0YjktN2E3MzI1Mzk=, ActorId: [1:7490571663279357808:2475], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:03:41.625668Z node 1 :KQP_SESSION DEBUG: Sessio ... 7:7490572834130897396:2236];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001075/r3tmp/tmpleNGtf/pdisk_1.dat 2025-04-07T14:08:14.775389Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:08:14.878625Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:14.901953Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:14.902085Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:14.904829Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9188, node 7 2025-04-07T14:08:14.981491Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:14.981521Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:14.981534Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:14.981680Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8269 TClient is connected to server localhost:8269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:15.685895Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:19.683494Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7490572834130897396:2236];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:19.683571Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:08:19.805244Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734333:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.805244Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734354:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.805319Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734356:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.805325Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734353:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.805371Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734355:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.805728Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.808861Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734391:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.808957Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734394:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.809024Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.809129Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734396:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.811025Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734414:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.811496Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734412:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.811570Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.815820Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734444:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.815902Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734447:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.815951Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734448:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.816024Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.816268Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:08:19.824163Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734463:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.824240Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490572855605734465:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.824408Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:19.900895Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490572855605734458:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:08:19.900978Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490572855605734367:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:08:19.901040Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490572855605734460:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:08:19.901097Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490572855605734368:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:08:19.901154Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490572855605734406:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:08:19.901209Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490572855605734421:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:08:19.901268Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490572855605734365:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:08:19.901642Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490572855605734407:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:08:19.901920Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490572855605734366:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:08:29.857990Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:08:29.858020Z node 7 :IMPORT WARN: Table profiles were not loaded >> Normalizers::EmptyTablesNormalizer [GOOD] >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] Test command err: 2025-04-07T14:10:19.550377Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:19.612693Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:19.629746Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:19.629821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:19.630067Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:19.636074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=PortionsCleaner; 2025-04-07T14:10:19.636235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:19.636355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:19.636461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:19.636534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:19.636586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:19.636652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:19.636773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:19.636861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:19.636918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:19.636971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:19.637025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:19.637085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:19.655302Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:19.657883Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:19.658076Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=PortionsCleaner; 2025-04-07T14:10:19.658112Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-07T14:10:19.658287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=33; 2025-04-07T14:10:19.658343Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=26; 2025-04-07T14:10:19.658401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-04-07T14:10:19.658449Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=24; 2025-04-07T14:10:19.658576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=PortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:19.658626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-07T14:10:19.658653Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-07T14:10:19.658740Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:19.658791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:19.658813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:19.658841Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-07T14:10:19.658890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:19.658934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:19.658961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:19.658983Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:19.659096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:19.659139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:19.659164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:19.659185Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:19.659241Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:19.659297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:19.659347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:19.659373Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:19.659447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:19.659479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:19.659502Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:19.659540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:19.659579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:19.659603Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:19.659683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=17; 2025-04-07T14:10:19.659725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=16; 2025-04-07T14:10:19.659790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=16; 2025-04-07T14:10:19.659836Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=14; 2025-04-07T14:10:19.659924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:19.659952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:19.659968Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:19.660082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:19.660103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer ... =EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T14:10:20.643687Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2025-04-07T14:10:20.643735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=11703; 2025-04-07T14:10:20.643859Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:10:20.643980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:278:2291];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:10:20.644050Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:278:2291];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:10:20.644112Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:20.644217Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:10:20.644265Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:20.644335Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:20.644398Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:10:20.644469Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:20.644526Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:20.644572Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:20.644621Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:20.644696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:20.644774Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:20.645283Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:20.645553Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:10:20.645659Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T14:10:20.645693Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:10:20.645722Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T14:10:20.645773Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:20.645839Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:20.645894Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:10:20.645968Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:20.646021Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:20.646062Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:20.646112Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:20.646171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:20.646221Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:20.694259Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {11:111} readable: {11:max} at tablet 9437184 2025-04-07T14:10:20.694415Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-04-07T14:10:20.694623Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "key1" } Columns { Name: "key2" } Columns { Name: "field" } } } ; 2025-04-07T14:10:20.694711Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[key1;key2;field;];};]; 2025-04-07T14:10:20.695369Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:331:2336];trace_detailed=; 2025-04-07T14:10:20.696094Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:355;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2025-04-07T14:10:20.696368Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2025-04-07T14:10:20.696704Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:20.696855Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T14:10:20.697016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-07T14:10:20.697083Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:331:2336] finished for tablet 9437184 2025-04-07T14:10:20.697180Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:331:2336] send ScanData to [1:329:2335] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:10:20.697566Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:331:2336] and sent to [1:329:2335] packs: 0 txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744035020695283,"name":"_full_task","f":1744035020695283,"d_finished":0,"c":0,"l":1744035020697229,"d":1946},"events":[{"name":"bootstrap","f":1744035020695501,"d_finished":1034,"c":1,"l":1744035020696535,"d":1034},{"a":1744035020696678,"name":"ack","f":1744035020696678,"d_finished":0,"c":0,"l":1744035020697229,"d":551},{"a":1744035020696641,"name":"processing","f":1744035020696641,"d_finished":0,"c":0,"l":1744035020697229,"d":588},{"name":"ProduceResults","f":1744035020696518,"d_finished":336,"c":2,"l":1744035020697058,"d":336},{"a":1744035020697061,"name":"Finish","f":1744035020697061,"d_finished":0,"c":0,"l":1744035020697229,"d":168}],"id":"9437184::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;) 2025-04-07T14:10:20.697639Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:10:20.694784Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T14:10:20.697683Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:10:20.697728Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T14:10:20.697813Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] Test command err: 2025-04-07T14:10:19.860916Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:19.924240Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:19.937759Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:19.937806Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:19.937997Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:19.943582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:19.943725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:19.943900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:19.943995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:19.944062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:19.944133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:19.944203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:19.944274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:19.944334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:19.944391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:19.944447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:19.944514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:19.958340Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:19.960166Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:19.960309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:19.960356Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:19.960534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:19.960707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:19.960791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:19.960829Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:19.960901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:19.960954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:19.960992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:19.961020Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:19.961187Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:19.961273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:19.961316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:19.961350Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:19.961438Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:19.961494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:19.961529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:19.961561Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:19.961660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:19.961704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:19.961733Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:19.961773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:19.961804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:19.961825Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:19.961984Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-04-07T14:10:19.962071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-07T14:10:19.962168Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-07T14:10:19.962245Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-07T14:10:19.962397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:19.962447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:19.962479Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:19.962671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:19.962719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:19.962750Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:19.962896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:19.962937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:19.962961Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:19.963131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:19.963167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:19.963192Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:19.963327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... e_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:10:21.168314Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T14:10:21.168337Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:10:21.168740Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-07T14:10:21.168864Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:159;scan_step=name=ASSEMBLER::SPEC;duration=0.000000s;size=0;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};;scan_step_idx=2; 2025-04-07T14:10:21.169166Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:159;scan_step=name=ASSEMBLER::LAST_PK;duration=0.000000s;size=0;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};;scan_step_idx=3; 2025-04-07T14:10:21.169453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:159;scan_step=name=SNAPSHOT;duration=0.000000s;size=0;details={};;scan_step_idx=4; 2025-04-07T14:10:21.169780Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T14:10:21.169809Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=fetching.cpp:15;event=apply; 2025-04-07T14:10:21.169838Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=interval.cpp:31;event=fetched;interval_idx=0; 2025-04-07T14:10:21.169870Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=interval.cpp:15;event=start_construct_result;interval_idx=0;interval_id=6; 2025-04-07T14:10:21.170231Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:131;event=DoExecute;interval_idx=0; 2025-04-07T14:10:21.170579Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-04-07T14:10:21.170743Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:10:21.170787Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T14:10:21.170831Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:10:21.171055Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T14:10:21.171089Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T14:10:21.171123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-04-07T14:10:21.171161Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=6; 2025-04-07T14:10:21.171207Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T14:10:21.171290Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:10:21.171397Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:10:21.171701Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-07T14:10:21.171947Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[30] (CS::INDEXATION) apply at tablet 9437184 2025-04-07T14:10:21.172857Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-07T14:10:21.172967Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=1;blobs=2;rows=10;bytes=2812;raw_bytes=868; inactive portions=4;blobs=8;rows=40;bytes=11248;raw_bytes=3472; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:10:21.173007Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=a9c8d70-13ba11f0-8ae26a4a-87b0c76d;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:21.173400Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:21.173557Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:10:21.173671Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:10:21.173716Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:423:2441] finished for tablet 9437184 2025-04-07T14:10:21.173775Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:423:2441] send ScanData to [1:419:2437] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:10:21.174149Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:423:2441] and sent to [1:419:2437] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["l_task_result"],"t":0.013},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":1744035021157910,"name":"_full_task","f":1744035021157910,"d_finished":0,"c":0,"l":1744035021173811,"d":15901},"events":[{"name":"bootstrap","f":1744035021158221,"d_finished":3677,"c":1,"l":1744035021161898,"d":3677},{"a":1744035021173370,"name":"ack","f":1744035021173370,"d_finished":0,"c":0,"l":1744035021173811,"d":441},{"a":1744035021173358,"name":"processing","f":1744035021163126,"d_finished":3451,"c":10,"l":1744035021171468,"d":3904},{"name":"ProduceResults","f":1744035021160266,"d_finished":1930,"c":12,"l":1744035021173699,"d":1930},{"a":1744035021173702,"name":"Finish","f":1744035021173702,"d_finished":0,"c":0,"l":1744035021173811,"d":109},{"name":"task_result","f":1744035021163141,"d_finished":3306,"c":10,"l":1744035021171466,"d":3306}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) 2025-04-07T14:10:21.174240Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:10:21.157336Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=2812;inserted_portions_bytes=0;committed_portions_bytes=1384;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4196;selected_rows=0; 2025-04-07T14:10:21.174278Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:10:21.174360Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=;;; 2025-04-07T14:10:21.174447Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:423:2441];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; >> TColumnShardTestReadWrite::RebootWriteRead >> TColumnShardTestReadWrite::WriteReadZSTD >> TColumnShardTestReadWrite::WriteOverload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload [GOOD] Test command err: 2025-04-07T14:10:16.438456Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:16.506838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:16.525591Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:16.525657Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:16.525853Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:16.533159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:16.533304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:16.533483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:16.533584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:16.533697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:16.533804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:16.533907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:16.533981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:16.534043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:16.534098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:16.534155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:16.534218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:16.547925Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:16.549604Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:16.549709Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:16.549738Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:16.549859Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:16.549967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:16.550019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:16.550044Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:16.550093Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:16.550127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:16.550148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:16.550165Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:16.550291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:16.550338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:16.550363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:16.550378Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:16.550437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:16.550474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:16.550498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:16.550518Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:16.550565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:16.550587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:16.550616Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:16.550642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:16.550662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:16.550690Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:16.550826Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-07T14:10:16.550932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=50; 2025-04-07T14:10:16.551047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2025-04-07T14:10:16.551154Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-07T14:10:16.551316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:16.551381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:16.551420Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:16.551631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:16.551692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:16.551726Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:16.551845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:16.551872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:16.551892Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:16.552043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:16.552079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:16.552105Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:16.552198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... =8;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.185104Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 6 at tablet 9437184 2025-04-07T14:10:23.185353Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-07T14:10:23.197452Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-07T14:10:23.197600Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=9;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.199514Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 7 at tablet 9437184 2025-04-07T14:10:23.199749Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-07T14:10:23.211625Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-07T14:10:23.211772Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=10;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.222460Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 8 at tablet 9437184 2025-04-07T14:10:23.222754Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-07T14:10:23.234748Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-07T14:10:23.234903Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=11;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.236592Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 9 at tablet 9437184 2025-04-07T14:10:23.236854Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-07T14:10:23.248928Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-07T14:10:23.249072Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=12;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.250868Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 10 at tablet 9437184 2025-04-07T14:10:23.251137Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-07T14:10:23.263092Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-07T14:10:23.263246Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=13;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.279051Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 11 at tablet 9437184 2025-04-07T14:10:23.279362Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-07T14:10:23.291504Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-07T14:10:23.291683Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=14;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.293415Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 12 at tablet 9437184 2025-04-07T14:10:23.293673Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-07T14:10:23.305711Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-07T14:10:23.305860Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=15;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.307618Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 13 at tablet 9437184 2025-04-07T14:10:23.307901Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-07T14:10:23.319820Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-07T14:10:23.319960Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=16;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.330565Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 14 at tablet 9437184 2025-04-07T14:10:23.330814Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-07T14:10:23.342653Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-07T14:10:23.342795Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=17;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.344449Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 15 at tablet 9437184 2025-04-07T14:10:23.344732Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-07T14:10:23.356704Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-07T14:10:23.356856Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=18;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.358383Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 16 at tablet 9437184 2025-04-07T14:10:23.358636Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-07T14:10:23.370593Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-07T14:10:23.370738Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=19;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.379643Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 17 at tablet 9437184 2025-04-07T14:10:23.379899Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-07T14:10:23.391708Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-07T14:10:23.391845Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=20;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.393480Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 18 at tablet 9437184 2025-04-07T14:10:23.393741Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-07T14:10:23.405760Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-07T14:10:23.405905Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=21;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.407415Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 19 at tablet 9437184 2025-04-07T14:10:23.407653Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-07T14:10:23.419550Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-07T14:10:23.419690Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=22;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.428678Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 20 at tablet 9437184 2025-04-07T14:10:23.428937Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-07T14:10:23.441113Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-07T14:10:23.441268Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=23;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.442851Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 21 at tablet 9437184 2025-04-07T14:10:23.443100Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-07T14:10:23.455015Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-07T14:10:23.455151Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=24;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-07T14:10:23.456875Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 22 at tablet 9437184 2025-04-07T14:10:23.457100Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-07T14:10:23.468777Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-07T14:10:23.468899Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=25;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:23.482638Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:10:23.482721Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:10:23.672136Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];fline=actor.cpp:22;event=flush_writing;size=6330728;count=1; CATCH TEvWrite, status OK 2025-04-07T14:10:23.712581Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 23 at tablet 9437184 2025-04-07T14:10:23.712891Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:23 Blob count: 1 2025-04-07T14:10:23.724967Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:23 Blob count: 1 2025-04-07T14:10:23.725108Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=26;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; >> TColumnShardTestReadWrite::WriteReadExoticTypes >> KqpWorkload::KV [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::KV [GOOD] Test command err: Trying to start YDB, gRPC: 26778, MsgBus: 16602 2025-04-07T14:08:53.360504Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573002821175875:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:53.361323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e02/r3tmp/tmpl1d3A6/pdisk_1.dat 2025-04-07T14:08:53.870703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:53.870835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:53.882576Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:53.902392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26778, node 1 2025-04-07T14:08:54.109111Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:54.109130Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:54.109139Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:54.109204Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16602 TClient is connected to server localhost:16602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:08:55.068316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:08:56.996706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573015706078267:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:56.996827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:57.588469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:08:58.264140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573024296014403:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.264229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.264744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573024296014411:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:58.269360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:08:58.282679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573024296014413:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:08:58.359758Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573002821175875:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:08:58.359810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:09:08.878563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:09:08.878595Z node 1 :IMPORT WARN: Table profiles were not loaded took: 0.117287s took: 0.117258s took: 0.117558s took: 0.117815s took: 0.118286s took: 0.119852s took: 0.119953s took: 0.120360s took: 0.120669s took: 0.120955s took: 0.277506s took: 0.284940s took: 0.285446s took: 0.289859s took: 0.291407s took: 0.293213s took: 0.293313s took: 0.291785s took: 0.294678s took: 0.295390s took: 0.118109s took: 0.119758s took: 0.120523s took: 0.121668s took: 0.121381s took: 0.121657s took: 0.122584s took: 0.121967s took: 0.122236s took: 0.121946s took: 0.015930s took: 0.016085s took: 0.016557s took: 0.017796s took: 0.018104s took: 0.022068s took: 0.021392s took: 0.021355s took: 0.023257s took: 0.025540s took: 0.028247s took: 0.029671s took: 0.116646s took: 0.117749s took: 0.116989s took: 0.118473s took: 0.117550s took: 0.119675s took: 0.119123s took: 0.119133s 2025-04-07T14:10:27.526589Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-07T14:10:27.526631Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-04-07T14:10:27.526647Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-04-07T14:10:27.526661Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-04-07T14:10:27.526679Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-04-07T14:10:27.526692Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2025-04-07T14:10:27.526707Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-04-07T14:10:27.526720Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-04-07T14:10:27.526733Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-04-07T14:10:27.526749Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-04-07T14:10:27.534070Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-04-07T14:10:27.534132Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-07T14:10:27.534146Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-04-07T14:10:27.543097Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-04-07T14:10:27.543160Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-04-07T14:10:27.543181Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-04-07T14:10:27.543196Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-04-07T14:10:27.543214Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-04-07T14:10:27.543232Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-04-07T14:10:27.544813Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-07T14:10:27.544837Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-04-07T14:10:27.544884Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-04-07T14:10:27.544897Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-04-07T14:10:27.544937Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2025-04-07T14:10:27.544954Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-07T14:10:27.544965Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-07T14:10:27.544979Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-04-07T14:10:27.544990Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2025-04-07T14:10:27.545002Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-04-07T14:10:27.546664Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-04-07T14:10:27.546760Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2025-04-07T14:10:27.546775Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2025-04-07T14:10:27.546787Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-07T14:10:27.546799Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-04-07T14:10:27.546811Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-04-07T14:10:27.546822Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-04-07T14:10:27.546834Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-04-07T14:10:27.547044Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-04-07T14:10:27.547104Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2025-04-07T14:10:27.547122Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found |93.8%| [TA] $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2025-04-07T14:10:22.855940Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:22.919670Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:22.935076Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:22.935127Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:22.935324Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:22.942858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:22.943038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:22.943247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:22.943336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:22.943407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:22.943473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:22.943544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:22.943608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:22.943668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:22.943736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:22.943803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:22.943863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:22.959824Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:22.961386Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:22.961481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:22.961512Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:22.961652Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:22.961748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:22.961791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:22.961815Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:22.961866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:22.961910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:22.961933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:22.961956Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:22.962065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:22.962116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:22.962143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:22.962158Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:22.962216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:22.962265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:22.962293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:22.962314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:22.962357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:22.962384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:22.962401Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:22.962424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:22.962445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:22.962464Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:22.962568Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=33; 2025-04-07T14:10:22.962624Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=23; 2025-04-07T14:10:22.962693Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=27; 2025-04-07T14:10:22.962770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=26; 2025-04-07T14:10:22.962859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:22.962890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:22.962912Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:22.963038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:22.963076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:22.963096Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:22.963195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:22.963235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:22.963288Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:22.963478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:22.963545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:22.963584Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:22.963707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... al_id=26; 2025-04-07T14:10:29.096944Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T14:10:29.097038Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:29.097072Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=31;finished=1; 2025-04-07T14:10:29.097106Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:10:29.097379Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:29.097596Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:29.097635Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T14:10:29.097766Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-07T14:10:29.097860Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-07T14:10:29.097990Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1022:2893] send ScanData to [1:1021:2892] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 1984 rows: 31 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string 2025-04-07T14:10:29.098143Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:29.098250Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:29.098392Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:29.098604Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:29.098739Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:29.098891Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:29.098938Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1022:2893] finished for tablet 9437184 2025-04-07T14:10:29.099004Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1022:2893] send ScanData to [1:1021:2892] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:10:29.099448Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:1022:2893] and sent to [1:1021:2892] packs: 0 txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":1744035029085171,"name":"_full_task","f":1744035029085171,"d_finished":0,"c":0,"l":1744035029099043,"d":13872},"events":[{"name":"bootstrap","f":1744035029085367,"d_finished":2807,"c":1,"l":1744035029088174,"d":2807},{"a":1744035029098580,"name":"ack","f":1744035029097351,"d_finished":1076,"c":1,"l":1744035029098427,"d":1539},{"a":1744035029098568,"name":"processing","f":1744035029089472,"d_finished":5363,"c":10,"l":1744035029098429,"d":5838},{"name":"ProduceResults","f":1744035029086890,"d_finished":2959,"c":13,"l":1744035029098922,"d":2959},{"a":1744035029098925,"name":"Finish","f":1744035029098925,"d_finished":0,"c":0,"l":1744035029099043,"d":118},{"name":"task_result","f":1744035029089488,"d_finished":4161,"c":9,"l":1744035029097174,"d":4161}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;) 2025-04-07T14:10:29.099533Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:10:29.084721Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-07T14:10:29.099573Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:10:29.099659Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T14:10:29.099776Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] Test command err: 2025-04-07T14:10:23.390947Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:23.468883Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:23.484300Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:23.484376Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:23.484625Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:23.492430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:23.492645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:23.492884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:23.493050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:23.493169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:23.493287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:23.493426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:23.493554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:23.493648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:23.493730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:23.493798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:23.493876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:23.510467Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:23.512274Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:23.512411Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:23.512459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:23.512620Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:23.512808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:23.512884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:23.512924Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:23.513009Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:23.513073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:23.513122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:23.513150Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:23.513273Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:23.513328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:23.513362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:23.513404Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:23.513477Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:23.513531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:23.513593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:23.513632Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:23.513694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:23.513721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:23.513742Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:23.513771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:23.513793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:23.513810Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:23.513931Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=39; 2025-04-07T14:10:23.514009Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-07T14:10:23.514080Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-04-07T14:10:23.514157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-04-07T14:10:23.514273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:23.514311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:23.514346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:23.514488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:23.514560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:23.514601Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:23.514792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:23.514847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:23.514876Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:23.515103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:23.515182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:23.515224Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:23.515383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... val_id=26; 2025-04-07T14:10:29.775312Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T14:10:29.775417Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:29.775448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=31;finished=1; 2025-04-07T14:10:29.775481Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:10:29.775738Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:29.775987Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:29.776039Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T14:10:29.776193Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-07T14:10:29.776318Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-07T14:10:29.776502Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1022:2893] send ScanData to [1:1021:2892] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 1984 rows: 31 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string 2025-04-07T14:10:29.776696Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:29.776825Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:29.776967Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:29.777203Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:29.777347Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:29.777524Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:29.777581Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1022:2893] finished for tablet 9437184 2025-04-07T14:10:29.777676Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1022:2893] send ScanData to [1:1021:2892] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:10:29.778193Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:1022:2893] and sent to [1:1021:2892] packs: 0 txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.012}],"full":{"a":1744035029765203,"name":"_full_task","f":1744035029765203,"d_finished":0,"c":0,"l":1744035029777736,"d":12533},"events":[{"name":"bootstrap","f":1744035029765360,"d_finished":2301,"c":1,"l":1744035029767661,"d":2301},{"a":1744035029777181,"name":"ack","f":1744035029775705,"d_finished":1294,"c":1,"l":1744035029776999,"d":1849},{"a":1744035029777169,"name":"processing","f":1744035029768729,"d_finished":5467,"c":10,"l":1744035029777002,"d":6034},{"name":"ProduceResults","f":1744035029766601,"d_finished":2928,"c":13,"l":1744035029777562,"d":2928},{"a":1744035029777566,"name":"Finish","f":1744035029777566,"d_finished":0,"c":0,"l":1744035029777736,"d":170},{"name":"task_result","f":1744035029768741,"d_finished":4068,"c":9,"l":1744035029775531,"d":4068}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;) 2025-04-07T14:10:29.778292Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:10:29.764804Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-07T14:10:29.778345Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:10:29.778423Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T14:10:29.778537Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1022:2893];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] Test command err: 2025-04-07T14:10:26.142278Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:26.206667Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:26.223461Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:26.223526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:26.223712Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:26.229657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:26.229798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:26.229984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:26.230068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:26.230147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:26.230275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:26.230429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:26.230538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:26.230652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:26.230779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:26.230884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:26.230975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:26.246297Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:26.247985Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:26.248096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:26.248154Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:26.248366Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:26.248533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:26.248622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:26.248683Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:26.248777Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:26.248856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:26.248914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:26.248947Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:26.249083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:26.249154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:26.249193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:26.249220Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:26.249317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:26.249386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:26.249422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:26.249451Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:26.249496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:26.249518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:26.249536Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:26.249567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:26.249593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:26.249621Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:26.249754Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=39; 2025-04-07T14:10:26.249821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-07T14:10:26.249889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=26; 2025-04-07T14:10:26.249958Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=25; 2025-04-07T14:10:26.250067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:26.250101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:26.250124Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:26.250292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:26.250330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:26.250348Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:26.250443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:26.250471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:26.250498Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:26.250616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:26.250639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:26.250657Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:26.250751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... dx=0;count=31;merger=0;interval_id=25; 2025-04-07T14:10:30.179937Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T14:10:30.179991Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:30.180024Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=31;finished=1; 2025-04-07T14:10:30.180063Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:10:30.180251Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:30.180404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:30.180435Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T14:10:30.180531Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-07T14:10:30.180607Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-07T14:10:30.180723Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:426:2441] send ScanData to [1:425:2440] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 2759 rows: 31 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary 2025-04-07T14:10:30.180853Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:30.180936Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:30.181062Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:30.181214Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:30.181304Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:30.181411Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:30.181440Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:426:2441] finished for tablet 9437184 2025-04-07T14:10:30.181489Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:426:2441] send ScanData to [1:425:2440] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:10:30.181785Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:426:2441] and sent to [1:425:2440] packs: 0 txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":1744035030170759,"name":"_full_task","f":1744035030170759,"d_finished":0,"c":0,"l":1744035030181517,"d":10758},"events":[{"name":"bootstrap","f":1744035030170942,"d_finished":2292,"c":1,"l":1744035030173234,"d":2292},{"a":1744035030181199,"name":"ack","f":1744035030180230,"d_finished":858,"c":1,"l":1744035030181088,"d":1176},{"a":1744035030181191,"name":"processing","f":1744035030174253,"d_finished":3997,"c":10,"l":1744035030181090,"d":4323},{"name":"ProduceResults","f":1744035030172225,"d_finished":2201,"c":13,"l":1744035030181431,"d":2201},{"a":1744035030181433,"name":"Finish","f":1744035030181433,"d_finished":0,"c":0,"l":1744035030181517,"d":84},{"name":"task_result","f":1744035030174265,"d_finished":3059,"c":9,"l":1744035030180102,"d":3059}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;) 2025-04-07T14:10:30.181833Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:10:30.170375Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13268;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13268;selected_rows=0; 2025-04-07T14:10:30.181858Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:10:30.181945Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T14:10:30.182028Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::WriteReadStandalone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 31116, MsgBus: 14116 2025-04-07T14:07:36.772734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572672954127376:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:36.772822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000da4/r3tmp/tmp5HUWN0/pdisk_1.dat 2025-04-07T14:07:37.165589Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:37.188827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:37.188973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:37.194884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31116, node 1 2025-04-07T14:07:37.370557Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:37.370576Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:37.370597Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:37.370695Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14116 TClient is connected to server localhost:14116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:38.078395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:38.126946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:38.289417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:38.430237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:38.514444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:39.639524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572685839030989:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:39.639652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:40.077708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:40.104782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:40.137163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:40.160022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:40.183233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:40.248348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:40.296993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572690133998785:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:40.297084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:40.297166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572690133998790:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:40.299887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:40.307612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572690133998792:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:41.423039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:41.795664Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572672954127376:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:41.795723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:42.835361Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr8985ph633jd020c9dg07me, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY5MjEzYzItYjc2ZGFiYzEtYWE1NGYwOC0xNDI4OGY1MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.836825Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr8985phb2wtr5hn03fadah0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzdlMjE1NmItOWVlNjU0ZWMtODI2YTE3YTEtOTI2M2MzZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.844068Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr8985q87v3r6559cn4c7xef, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDE0NWJhYjAtNzgyODcyNzgtOTJmMGI0YTMtZjQ5NDY1ZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.844295Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr8985q8173fqt8c7jttry2f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc1MjFjN2EtYTk0NzJlYzUtNmQ0OTM3OC1mMDczMWU3Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.845235Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr8985q16rv5xyj7kyb6tr21, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzhmYjIwZWUtNzI5MDBhZTMtYTgxZjVmNDEtOTdhZjExYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.848420Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jr8985q7crxprxwmsnbjgspr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjlmMjFiMjQtZjZhYzAzYjktMzk2OWVhYmUtZWU3ODlmMDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.850304Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jr8985q92ngdbpx68nwwa50n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RlNmE4MGQtYjQwNWYxOGEtMWU0YzFmYjEtY2YzMTViZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.851127Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jr8985q75pqkcb8mta30zkba, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODNhMWNmZjEtYzJlNjZhZmQtNDliNGZkYmItNjFkNjQwMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.852026Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr8985q8e293w8qbd3xy4e94, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWM3NTdlNTQtYWMxYzY0OTItNTJlNDJhZGQtZjVjYjQ1ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.867035Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jr8985q7fbc840ytz3qm6c7b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODNhYzFkMS0zYjc5MWRlMi0xN2U3MzE0ZS1iZWFmNjdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.875701Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01jr8985q7crxprxwmsnbjgspr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjlmMjFiMjQtZjZhYzAzYjktMzk2OWVhYmUtZWU3ODlmMDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.877186Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jr8985q92ngdbpx68nwwa50n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RlNmE4MGQtYjQwNWYxOGEtMWU0YzFmYjEtY2YzMTViZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.886326Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710687. Ctx: { TraceId: 01jr8985ph633jd020c9dg07me, Database: , D ... sion/3?node_id=2&id=NzRhOWM0MDktODk1ZjYzYjUtNmM5NjFlMDEtOWI1YWFjNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.398080Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721632. Ctx: { TraceId: 01jr89d5e718y59axgbmh50agc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTlkNzhmNzktZjhlNDQ1Y2MtMjRlOWU1OS04NGIxMWZhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.398681Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721633. Ctx: { TraceId: 01jr89d5eq573bmg07kkgn7b26, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgwOGUzYjAtZDhjMTkzMDUtZGQxYmM3ZDItMjllMDdhYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.402380Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721635. Ctx: { TraceId: 01jr89d5eh9558q2tzk2fkwqv9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzRhOWM0MDktODk1ZjYzYjUtNmM5NjFlMDEtOWI1YWFjNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.402569Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721634. Ctx: { TraceId: 01jr89d5ekb5ces82k54gzs3xs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yjc5YmQ0NTgtYjFkOGI1ZGMtYzkzODEyZDYtZTBmYzY5ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.405032Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721636. Ctx: { TraceId: 01jr89d5eq573bmg07kkgn7b26, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgwOGUzYjAtZDhjMTkzMDUtZGQxYmM3ZDItMjllMDdhYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.407488Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721637. Ctx: { TraceId: 01jr89d5eh9558q2tzk2fkwqv9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzRhOWM0MDktODk1ZjYzYjUtNmM5NjFlMDEtOWI1YWFjNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.410746Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721639. Ctx: { TraceId: 01jr89d5ekb5ces82k54gzs3xs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yjc5YmQ0NTgtYjFkOGI1ZGMtYzkzODEyZDYtZTBmYzY5ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.413887Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721638. Ctx: { TraceId: 01jr89d5f48n8qqtrf66e8m482, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU3YmYzZTQtOTBhY2Y5YWItYTc5MDRkMTgtZmUxYjM1NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.415823Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721640. Ctx: { TraceId: 01jr89d5ekb5ces82k54gzs3xs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yjc5YmQ0NTgtYjFkOGI1ZGMtYzkzODEyZDYtZTBmYzY5ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.417835Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721641. Ctx: { TraceId: 01jr89d5fc5h0h2jev4pgr9hvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGVhYzZjMDktMWQyZTRkMDctMzI3OTc2ZGItODVkMjEwNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.418522Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721642. Ctx: { TraceId: 01jr89d5f48n8qqtrf66e8m482, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU3YmYzZTQtOTBhY2Y5YWItYTc5MDRkMTgtZmUxYjM1NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.422194Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721643. Ctx: { TraceId: 01jr89d5fhfnt7v4k4qe13ea77, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTlkNzhmNzktZjhlNDQ1Y2MtMjRlOWU1OS04NGIxMWZhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.422820Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721644. Ctx: { TraceId: 01jr89d5f48n8qqtrf66e8m482, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU3YmYzZTQtOTBhY2Y5YWItYTc5MDRkMTgtZmUxYjM1NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.423828Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721645. Ctx: { TraceId: 01jr89d5fc5h0h2jev4pgr9hvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGVhYzZjMDktMWQyZTRkMDctMzI3OTc2ZGItODVkMjEwNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.428406Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721646. Ctx: { TraceId: 01jr89d5f48n8qqtrf66e8m482, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU3YmYzZTQtOTBhY2Y5YWItYTc5MDRkMTgtZmUxYjM1NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.431447Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721648. Ctx: { TraceId: 01jr89d5fhfnt7v4k4qe13ea77, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTlkNzhmNzktZjhlNDQ1Y2MtMjRlOWU1OS04NGIxMWZhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.431638Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721649. Ctx: { TraceId: 01jr89d5fc5h0h2jev4pgr9hvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGVhYzZjMDktMWQyZTRkMDctMzI3OTc2ZGItODVkMjEwNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.437699Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721647. Ctx: { TraceId: 01jr89d5fn1rzeykn5y7pg8tar, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgwOGUzYjAtZDhjMTkzMDUtZGQxYmM3ZDItMjllMDdhYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.441123Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721650. Ctx: { TraceId: 01jr89d5g0cywttav1byz4s8wg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzRhOWM0MDktODk1ZjYzYjUtNmM5NjFlMDEtOWI1YWFjNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.443372Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721651. Ctx: { TraceId: 01jr89d5fc5h0h2jev4pgr9hvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGVhYzZjMDktMWQyZTRkMDctMzI3OTc2ZGItODVkMjEwNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.451913Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721652. Ctx: { TraceId: 01jr89d5fn1rzeykn5y7pg8tar, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgwOGUzYjAtZDhjMTkzMDUtZGQxYmM3ZDItMjllMDdhYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.455309Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721653. Ctx: { TraceId: 01jr89d5g0cywttav1byz4s8wg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzRhOWM0MDktODk1ZjYzYjUtNmM5NjFlMDEtOWI1YWFjNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.460232Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721654. Ctx: { TraceId: 01jr89d5gjeanj0ywsr2xbg4ba, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU3YmYzZTQtOTBhY2Y5YWItYTc5MDRkMTgtZmUxYjM1NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.463021Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721655. Ctx: { TraceId: 01jr89d5fn1rzeykn5y7pg8tar, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgwOGUzYjAtZDhjMTkzMDUtZGQxYmM3ZDItMjllMDdhYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.464527Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721657. Ctx: { TraceId: 01jr89d5gjeanj0ywsr2xbg4ba, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjU3YmYzZTQtOTBhY2Y5YWItYTc5MDRkMTgtZmUxYjM1NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.465900Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721656. Ctx: { TraceId: 01jr89d5ghfqvrqjt4wn6z62cc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yjc5YmQ0NTgtYjFkOGI1ZGMtYzkzODEyZDYtZTBmYzY5ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-04-07T14:10:26.470605Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721659. Ctx: { TraceId: 01jr89d5ghfqvrqjt4wn6z62cc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yjc5YmQ0NTgtYjFkOGI1ZGMtYzkzODEyZDYtZTBmYzY5ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.471938Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721658. Ctx: { TraceId: 01jr89d5h4f1dtzzjkpb36q6v7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTlkNzhmNzktZjhlNDQ1Y2MtMjRlOWU1OS04NGIxMWZhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.472250Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721660. Ctx: { TraceId: 01jr89d5h40vwr98evmn0xdypg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2FmZjk3MmQtY2M0ZWQyZWEtZTYyOTJmMDUtNDliOWYyYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.473020Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721661. Ctx: { TraceId: 01jr89d5ghfqvrqjt4wn6z62cc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yjc5YmQ0NTgtYjFkOGI1ZGMtYzkzODEyZDYtZTBmYzY5ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-07T14:10:26.480425Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721662. Ctx: { TraceId: 01jr89d5h40vwr98evmn0xdypg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2FmZjk3MmQtY2M0ZWQyZWEtZTYyOTJmMDUtNDliOWYyYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.480933Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721663. Ctx: { TraceId: 01jr89d5h4f1dtzzjkpb36q6v7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTlkNzhmNzktZjhlNDQ1Y2MtMjRlOWU1OS04NGIxMWZhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.483001Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721665. Ctx: { TraceId: 01jr89d5h4f1dtzzjkpb36q6v7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTlkNzhmNzktZjhlNDQ1Y2MtMjRlOWU1OS04NGIxMWZhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.483739Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721664. Ctx: { TraceId: 01jr89d5h40vwr98evmn0xdypg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2FmZjk3MmQtY2M0ZWQyZWEtZTYyOTJmMDUtNDliOWYyYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:26.488323Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721666. Ctx: { TraceId: 01jr89d5h4f1dtzzjkpb36q6v7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTlkNzhmNzktZjhlNDQ1Y2MtMjRlOWU1OS04NGIxMWZhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] Test command err: 2025-04-07T14:10:16.861031Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:16.923911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:16.938389Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:16.938447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:16.938640Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:16.944849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:16.945022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:16.945207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:16.945287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:16.945356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:16.945438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:16.945509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:16.945588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:16.945657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:16.945717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:16.945784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:16.945850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:16.962654Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:16.964795Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:16.964950Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:16.965007Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:16.965168Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:16.965281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:16.965333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:16.965364Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:16.965418Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:16.965467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:16.965507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:16.965543Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:16.965718Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:16.965777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:16.965803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:16.965825Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:16.965904Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:16.965947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:16.965976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:16.965996Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:16.966063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:16.966090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:16.966110Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:16.966156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:16.966180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:16.966198Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:16.966320Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-07T14:10:16.966393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-07T14:10:16.966491Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-07T14:10:16.966584Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-07T14:10:16.966699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:16.966747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:16.966768Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:16.966895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:16.966926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:16.966949Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:16.967042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:16.967068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:16.967088Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:16.967242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:16.967298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:16.967320Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:16.967403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... 4;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=interval.cpp:15;event=start_construct_result;interval_idx=0;interval_id=49; 2025-04-07T14:10:32.872256Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-04-07T14:10:32.872390Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:10:32.872433Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T14:10:32.872472Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:10:32.872823Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T14:10:32.872868Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T14:10:32.872905Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=49; 2025-04-07T14:10:32.872948Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=49; 2025-04-07T14:10:32.872989Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T14:10:32.873075Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:10:32.873106Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-04-07T14:10:32.873160Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:10:32.873983Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:32.874088Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:10;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:10:32.874118Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T14:10:32.874211Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=10; 2025-04-07T14:10:32.874263Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=80;num_rows=10;batch_columns=timestamp; 2025-04-07T14:10:32.874380Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:4116:6128] send ScanData to [1:4111:6123] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 80 rows: 10 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2025-04-07T14:10:32.874486Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:10:32.874601Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:10:32.874686Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:10:32.875058Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:32.875137Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:10:32.875202Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:10:32.875242Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:4116:6128] finished for tablet 9437184 2025-04-07T14:10:32.875295Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:4116:6128] send ScanData to [1:4111:6123] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:10:32.875714Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:4116:6128] and sent to [1:4111:6123] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.013},{"events":["f_ack"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":1744035032859596,"name":"_full_task","f":1744035032859596,"d_finished":0,"c":0,"l":1744035032875320,"d":15724},"events":[{"name":"bootstrap","f":1744035032860048,"d_finished":1663,"c":1,"l":1744035032861711,"d":1663},{"a":1744035032875043,"name":"ack","f":1744035032873964,"d_finished":744,"c":1,"l":1744035032874708,"d":1021},{"a":1744035032875033,"name":"processing","f":1744035032862492,"d_finished":2824,"c":8,"l":1744035032874710,"d":3111},{"name":"ProduceResults","f":1744035032860966,"d_finished":1851,"c":11,"l":1744035032875233,"d":1851},{"a":1744035032875234,"name":"Finish","f":1744035032875234,"d_finished":0,"c":0,"l":1744035032875320,"d":86},{"name":"task_result","f":1744035032862503,"d_finished":1967,"c":7,"l":1744035032873202,"d":1967}],"id":"9437184::49"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) 2025-04-07T14:10:32.875794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:10:32.859252Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=2812;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2812;selected_rows=0; 2025-04-07T14:10:32.875831Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:10:32.875895Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T14:10:32.875965Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:4116:6128];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] >> TBlobStorageProxyTest::TestDoubleGroups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] Test command err: 2025-04-07T14:10:13.664096Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:13.816526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:13.838787Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:13.838866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:13.839202Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:13.847261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:13.847463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:13.847673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:13.847802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:13.847904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:13.848026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:13.848127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:13.848230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:13.848364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:13.848476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.848582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:13.848748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:13.866950Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:13.868981Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:13.869120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:13.869161Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:13.869330Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:13.870570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:13.870671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:13.870737Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:13.870863Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:13.870955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:13.871012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:13.871054Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:13.871234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:13.871324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:13.871383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:13.871424Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:13.871551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:13.871618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:13.871659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:13.871694Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:13.871784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:13.871827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:13.871863Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:13.871913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:13.871950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:13.871980Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:13.872175Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-07T14:10:13.872265Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-07T14:10:13.872388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-07T14:10:13.872509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=53; 2025-04-07T14:10:13.872712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:13.872786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:13.872823Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:13.873020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:13.873079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.873114Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.873280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:13.873331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:13.873363Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:13.873577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:13.873630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:13.873683Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:13.873810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... age_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=2567; 2025-04-07T14:10:33.679607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=6115; 2025-04-07T14:10:33.680069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=425; 2025-04-07T14:10:33.681112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=470; 2025-04-07T14:10:33.681163Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1060; 2025-04-07T14:10:33.681238Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=42; 2025-04-07T14:10:33.681304Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T14:10:33.681333Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=67; 2025-04-07T14:10:33.681529Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=170; 2025-04-07T14:10:33.681695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=140; 2025-04-07T14:10:33.682495Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=771; 2025-04-07T14:10:33.683439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=906; 2025-04-07T14:10:33.683498Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=22; 2025-04-07T14:10:33.683537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=15; 2025-04-07T14:10:33.683565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-04-07T14:10:33.683591Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2025-04-07T14:10:33.683622Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-04-07T14:10:33.683666Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=23; 2025-04-07T14:10:33.683711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-07T14:10:33.683770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=32; 2025-04-07T14:10:33.683796Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=3; 2025-04-07T14:10:33.683848Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=20; 2025-04-07T14:10:33.683870Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=14384; 2025-04-07T14:10:33.683973Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7400888;raw_bytes=7389306; inactive portions=33;blobs=66;rows=1126788;bytes=87164812;raw_bytes=110830786; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:10:33.684037Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:10:33.684072Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:10:33.684097Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:33.684136Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:10:33.684160Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:33.684195Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:33.684249Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T14:10:33.684289Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T14:10:33.684312Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:33.684341Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:33.684362Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:33.684384Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:33.684427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:33.684464Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:33.685032Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:33.685108Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1980:3967];tablet_id=9437184;parent=[1:1947:3941];fline=manager.h:99;event=ask_data;request=request_id=81;1={portions_count=35};; 2025-04-07T14:10:33.685844Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:10:33.686544Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T14:10:33.686568Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:10:33.686584Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T14:10:33.686611Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:33.686654Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:33.686705Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T14:10:33.686747Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T14:10:33.686778Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:33.686817Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:33.686845Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:33.686878Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:33.686914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:33.686947Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:33.687277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=35;path_id=1; 2025-04-07T14:10:33.688067Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=35;path_id=1; 2025-04-07T14:10:33.689849Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T14:10:33.689881Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; >> TBlobStorageProxyTest::TestBlock >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] Test command err: 2025-04-07T14:10:14.938412Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:15.005320Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:15.019660Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:15.019724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:15.019900Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:15.025776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:15.025935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:15.026113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:15.026191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:15.026262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:15.026333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:15.026403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:15.026466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:15.026535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:15.026602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:15.026670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:15.026746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:15.040245Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:15.041931Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:15.042037Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:15.042079Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:15.042227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:15.042330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:15.042389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:15.042417Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:15.042476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:15.042515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:15.042545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:15.042571Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:15.042691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:15.042743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:15.042784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:15.042804Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:15.042873Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:15.042911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:15.042940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:15.042971Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:15.043024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:15.043060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:15.043100Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:15.043131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:15.043155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:15.043170Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:15.043305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=39; 2025-04-07T14:10:15.043372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=28; 2025-04-07T14:10:15.043443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-04-07T14:10:15.043509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-04-07T14:10:15.043639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:15.043732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:15.043762Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:15.043891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:15.043922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:15.043948Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:15.044035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:15.044057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:15.044099Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:15.044229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:15.044254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:15.044270Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:15.044349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... age_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=3038; 2025-04-07T14:10:34.176588Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=7370; 2025-04-07T14:10:34.177118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=492; 2025-04-07T14:10:34.178778Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=824; 2025-04-07T14:10:34.178848Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1686; 2025-04-07T14:10:34.178944Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=52; 2025-04-07T14:10:34.179010Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T14:10:34.179039Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=68; 2025-04-07T14:10:34.179337Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=265; 2025-04-07T14:10:34.179742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=337; 2025-04-07T14:10:34.180692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=870; 2025-04-07T14:10:34.181480Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=748; 2025-04-07T14:10:34.181529Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=18; 2025-04-07T14:10:34.181586Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=14; 2025-04-07T14:10:34.181621Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-04-07T14:10:34.181648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2025-04-07T14:10:34.181689Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-04-07T14:10:34.181737Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=24; 2025-04-07T14:10:34.181763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-04-07T14:10:34.181819Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=30; 2025-04-07T14:10:34.181850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T14:10:34.181890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=19; 2025-04-07T14:10:34.181911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=17461; 2025-04-07T14:10:34.182001Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=3;blobs=6;rows=75200;bytes=7465172;raw_bytes=7453400; inactive portions=42;blobs=84;rows=1026094;bytes=79798400;raw_bytes=101677198; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:10:34.182064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2012:4008];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:10:34.182096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2012:4008];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:10:34.182120Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:34.182159Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:10:34.182189Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:34.182222Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:34.182273Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T14:10:34.182310Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T14:10:34.182333Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:34.182363Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:34.182382Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:34.182404Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:34.182436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:34.182474Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:34.182979Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:34.183032Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2045:4034];tablet_id=9437184;parent=[1:2012:4008];fline=manager.h:99;event=ask_data;request=request_id=91;1={portions_count=45};; 2025-04-07T14:10:34.183717Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:10:34.184488Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T14:10:34.184514Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:10:34.184531Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T14:10:34.184555Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:34.184592Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:34.184632Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T14:10:34.184684Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T14:10:34.184718Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:34.184746Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:34.184770Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:34.184796Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:34.184833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:34.184859Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:34.185158Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=45;path_id=1; 2025-04-07T14:10:34.185992Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=45;path_id=1; 2025-04-07T14:10:34.187808Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T14:10:34.187839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] Test command err: 2025-04-07T14:10:31.466178Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:31.529893Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:31.542948Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:31.543004Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:31.543178Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:31.548814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:31.548945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:31.549123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:31.549201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:31.549274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:31.549334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:31.549408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:31.549467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:31.549528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:31.549581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:31.549656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:31.549714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:31.563248Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:31.565007Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:31.565107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:31.565141Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:31.565267Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:31.565381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:31.565440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:31.565464Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:31.565538Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:31.565577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:31.565599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:31.565618Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:31.565729Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:31.565778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:31.565807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:31.565824Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:31.565886Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:31.565934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:31.565959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:31.565981Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:31.566019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:31.566039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:31.566061Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:31.566085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:31.566117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:31.566140Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:31.566254Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=34; 2025-04-07T14:10:31.566300Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=19; 2025-04-07T14:10:31.566361Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-04-07T14:10:31.566429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2025-04-07T14:10:31.566516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:31.566542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:31.566562Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:31.566691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:31.566722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:31.566738Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:31.566831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:31.566857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:31.566878Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:31.566988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:31.567012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:31.567032Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:31.567114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... =0;count=31;merger=0;interval_id=25; 2025-04-07T14:10:34.780514Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T14:10:34.780618Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:34.780645Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=31;finished=1; 2025-04-07T14:10:34.780708Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:10:34.780964Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:34.781155Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:34.781195Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T14:10:34.781325Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-07T14:10:34.781413Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-07T14:10:34.781565Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:426:2441] send ScanData to [1:425:2440] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 1984 rows: 31 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string 2025-04-07T14:10:34.781705Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:34.781823Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:34.781950Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:34.782147Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:10:34.782287Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:34.782433Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-07T14:10:34.782471Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:426:2441] finished for tablet 9437184 2025-04-07T14:10:34.782540Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:426:2441] send ScanData to [1:425:2440] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:10:34.782940Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:426:2441] and sent to [1:425:2440] packs: 0 txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.009},{"events":["l_ack","l_processing","l_Finish"],"t":0.01}],"full":{"a":1744035034772555,"name":"_full_task","f":1744035034772555,"d_finished":0,"c":0,"l":1744035034782580,"d":10025},"events":[{"name":"bootstrap","f":1744035034772711,"d_finished":1938,"c":1,"l":1744035034774649,"d":1938},{"a":1744035034782127,"name":"ack","f":1744035034780943,"d_finished":1042,"c":1,"l":1744035034781985,"d":1495},{"a":1744035034782116,"name":"processing","f":1744035034774689,"d_finished":4217,"c":10,"l":1744035034781987,"d":4681},{"name":"ProduceResults","f":1744035034773777,"d_finished":2597,"c":13,"l":1744035034782459,"d":2597},{"a":1744035034782461,"name":"Finish","f":1744035034782461,"d_finished":0,"c":0,"l":1744035034782580,"d":119},{"name":"task_result","f":1744035034774697,"d_finished":3089,"c":9,"l":1744035034780764,"d":3089}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;) 2025-04-07T14:10:34.783007Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:10:34.772235Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-07T14:10:34.783043Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:10:34.783131Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T14:10:34.783251Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:426:2441];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TBlobStorageProxyTest::TestProxySimpleDiscover >> TBlobStorageProxyTest::TestEmptyDiscover >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] >> TBlobStorageProxyTest::TestBlock [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob >> TBlobStorageProxyTest::TestProxyPutSingleTimeout >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscover [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 27320, MsgBus: 11116 2025-04-07T14:07:36.767595Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572671752635149:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:36.768003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000dd5/r3tmp/tmpr6VSsa/pdisk_1.dat 2025-04-07T14:07:37.142347Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:37.174177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:37.174308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:37.177439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27320, node 1 2025-04-07T14:07:37.372891Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:37.372919Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:37.372927Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:37.373029Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11116 TClient is connected to server localhost:11116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:38.090687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:38.126768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:38.291146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:07:38.428921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:07:38.492441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:39.620706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572684637538609:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:39.620882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:40.078077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:40.103053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:40.127286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:40.149797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:40.183638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:40.212895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:40.276959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572688932506400:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:40.277061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:40.277334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572688932506405:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:40.282188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:40.290646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572688932506407:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:41.427735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:41.768779Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572671752635149:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:41.768868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:42.570138Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr8985e7b51eh7ywm52tdkam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmMxMDA1NDItZTY4M2UxMzQtNmI0Yjk3NS03MDgwNDUyNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.570164Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr8985e23msk45dj01wsr1hy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTEwN2JiZi1kNWYyZDI5LWQzNDVlZGFkLTZmZTZiYmQx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.573996Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr8985e42ds59ck01xedwemf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzNWQxZi0yY2Y4ZTZiYS1kY2YxM2UwNi1iMGJjNzQzYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.574488Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr8985e25ssdwpdsxc77mb06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI3OWExOGEtNjNhMzAxMTYtYjhkMjA2NTgtMzdmZmU0Zjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.574843Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr8985e43mbtz52zedmtaqvy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFhNTJiMi1kYWQwNjRlNC1jM2I4ZGQxNS1kODE0ZGQ4ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.575604Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jr8985e94z5c15bexpa6pqqr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjQzNTk1MWQtMTk0YTkzZGYtZDRmNDI2ZjQtM2I3MGJkODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.576036Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr8985e9csjqpnf3m7p9qepe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRkOTQzMjEtN2Q4NGJlNzctNjRlZmFkMDAtNWY1ZDg4MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.576419Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jr8985e43mjdkhbm6n7k993m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmZmYzQ5MDEtZDRhMTM3NmQtN2JhMWNjZDQtOTI3ZGI2NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.589159Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jr8985e2ap9jjz5gq629qn09, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWZmMjg1ODctOGU0OWZlZTctOTA2YWU5YjUtZjQ1NTg4MzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.595057Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jr8985e913jysmq3xr9vhyt3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI2ZjMxMWUtODczNjNmZGEtYzhmZDZlZGQtYTNjMDQ1OTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.612476Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710686. Ctx: { TraceId: 01jr8985e9csjqpnf3m7p9qepe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRkOTQzMjEtN2Q4NGJlNzctNjRlZmFkMDAtNWY1ZDg4MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.613581Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jr8985e25ssdwpdsxc77mb06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI3OWExOGEtNjNhMzAxMTYtYjhkMjA2NTgtMzdmZmU0Zjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:42.616158Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710685. Ctx: { TraceId: 01jr8985e2ap9jjz5gq629qn09, Database: , Datab ... sion/3?node_id=2&id=ZDVmM2I1ODctMjcwZjI1N2YtMzdkNDc2Y2MtMTE4OTc4ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.646037Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731636. Ctx: { TraceId: 01jr89dajb11yzmhyv5y1g3yjv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWY5MDBmZDgtOWRkNzhjZjgtNGNmMGU3ZWYtNWY5OGY3M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.648866Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731637. Ctx: { TraceId: 01jr89daja2jqvga2z9f7j5666, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDYzZDg3ZWQtMTc0ODk1OTAtYTgxZTA4NzItNWRiYjAwZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.653101Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731638. Ctx: { TraceId: 01jr89daj285dsqz8txbc065mx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDVmM2I1ODctMjcwZjI1N2YtMzdkNDc2Y2MtMTE4OTc4ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.656559Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731639. Ctx: { TraceId: 01jr89daja2jqvga2z9f7j5666, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDYzZDg3ZWQtMTc0ODk1OTAtYTgxZTA4NzItNWRiYjAwZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.658482Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731640. Ctx: { TraceId: 01jr89dajb11yzmhyv5y1g3yjv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWY5MDBmZDgtOWRkNzhjZjgtNGNmMGU3ZWYtNWY5OGY3M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.661187Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731642. Ctx: { TraceId: 01jr89dajb11yzmhyv5y1g3yjv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWY5MDBmZDgtOWRkNzhjZjgtNGNmMGU3ZWYtNWY5OGY3M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.665792Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731643. Ctx: { TraceId: 01jr89dajb11yzmhyv5y1g3yjv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWY5MDBmZDgtOWRkNzhjZjgtNGNmMGU3ZWYtNWY5OGY3M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.667367Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731644. Ctx: { TraceId: 01jr89daja2jqvga2z9f7j5666, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDYzZDg3ZWQtMTc0ODk1OTAtYTgxZTA4NzItNWRiYjAwZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.668319Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731641. Ctx: { TraceId: 01jr89dak0ddjb45tgy3bgp756, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2U1YjdkNDItNzA3OTRkNmQtZDdmM2FiNDgtYzhmNjliMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.678065Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731646. Ctx: { TraceId: 01jr89dakneknrqt0n41pp76hb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZkZmUwZWUtOGMyMzBhNTQtNzM3YTcxODctMjY1OGJmNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.678507Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731647. Ctx: { TraceId: 01jr89dak0ddjb45tgy3bgp756, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2U1YjdkNDItNzA3OTRkNmQtZDdmM2FiNDgtYzhmNjliMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.679235Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731645. Ctx: { TraceId: 01jr89dakm1mf9kx7mq02dwk41, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzNkZGY4ODktZmI0MzVlOTgtYTM2OGViNzAtNzExMmY5NDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.683524Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731648. Ctx: { TraceId: 01jr89dakneknrqt0n41pp76hb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZkZmUwZWUtOGMyMzBhNTQtNzM3YTcxODctMjY1OGJmNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.684392Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731649. Ctx: { TraceId: 01jr89dak0ddjb45tgy3bgp756, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2U1YjdkNDItNzA3OTRkNmQtZDdmM2FiNDgtYzhmNjliMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.687049Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731650. Ctx: { TraceId: 01jr89dakm1mf9kx7mq02dwk41, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzNkZGY4ODktZmI0MzVlOTgtYTM2OGViNzAtNzExMmY5NDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.688001Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731651. Ctx: { TraceId: 01jr89dak0ddjb45tgy3bgp756, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2U1YjdkNDItNzA3OTRkNmQtZDdmM2FiNDgtYzhmNjliMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.692711Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731652. Ctx: { TraceId: 01jr89dakm1mf9kx7mq02dwk41, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzNkZGY4ODktZmI0MzVlOTgtYTM2OGViNzAtNzExMmY5NDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-07T14:10:31.694963Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731653. Ctx: { TraceId: 01jr89dakm1mf9kx7mq02dwk41, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzNkZGY4ODktZmI0MzVlOTgtYTM2OGViNzAtNzExMmY5NDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.696896Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731654. Ctx: { TraceId: 01jr89damd3sxev1tghfr9hfcg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDVmM2I1ODctMjcwZjI1N2YtMzdkNDc2Y2MtMTE4OTc4ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.697664Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731656. Ctx: { TraceId: 01jr89dakm1mf9kx7mq02dwk41, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzNkZGY4ODktZmI0MzVlOTgtYTM2OGViNzAtNzExMmY5NDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.700783Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731655. Ctx: { TraceId: 01jr89damddrm7gjnwwc6293xk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWY5MDBmZDgtOWRkNzhjZjgtNGNmMGU3ZWYtNWY5OGY3M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.701801Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731658. Ctx: { TraceId: 01jr89dakm1mf9kx7mq02dwk41, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzNkZGY4ODktZmI0MzVlOTgtYTM2OGViNzAtNzExMmY5NDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.702898Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731657. Ctx: { TraceId: 01jr89damef1gdvyq91zvgvyfq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZkZmUwZWUtOGMyMzBhNTQtNzM3YTcxODctMjY1OGJmNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-07T14:10:31.703901Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731659. Ctx: { TraceId: 01jr89damd3sxev1tghfr9hfcg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDVmM2I1ODctMjcwZjI1N2YtMzdkNDc2Y2MtMTE4OTc4ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.706121Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731660. Ctx: { TraceId: 01jr89damddrm7gjnwwc6293xk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWY5MDBmZDgtOWRkNzhjZjgtNGNmMGU3ZWYtNWY5OGY3M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.709771Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731661. Ctx: { TraceId: 01jr89damef1gdvyq91zvgvyfq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZkZmUwZWUtOGMyMzBhNTQtNzM3YTcxODctMjY1OGJmNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.711327Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731663. Ctx: { TraceId: 01jr89damddrm7gjnwwc6293xk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWY5MDBmZDgtOWRkNzhjZjgtNGNmMGU3ZWYtNWY5OGY3M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.712385Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731664. Ctx: { TraceId: 01jr89damef1gdvyq91zvgvyfq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZkZmUwZWUtOGMyMzBhNTQtNzM3YTcxODctMjY1OGJmNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.713213Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731662. Ctx: { TraceId: 01jr89damd3sxev1tghfr9hfcg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDVmM2I1ODctMjcwZjI1N2YtMzdkNDc2Y2MtMTE4OTc4ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.714567Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731665. Ctx: { TraceId: 01jr89damddrm7gjnwwc6293xk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWY5MDBmZDgtOWRkNzhjZjgtNGNmMGU3ZWYtNWY5OGY3M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.715780Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731667. Ctx: { TraceId: 01jr89damef1gdvyq91zvgvyfq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZkZmUwZWUtOGMyMzBhNTQtNzM3YTcxODctMjY1OGJmNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.715781Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731666. Ctx: { TraceId: 01jr89damd3sxev1tghfr9hfcg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDVmM2I1ODctMjcwZjI1N2YtMzdkNDc2Y2MtMTE4OTc4ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-07T14:10:31.722981Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731668. Ctx: { TraceId: 01jr89damd3sxev1tghfr9hfcg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDVmM2I1ODctMjcwZjI1N2YtMzdkNDc2Y2MtMTE4OTc4ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:31.725002Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731669. Ctx: { TraceId: 01jr89damd3sxev1tghfr9hfcg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDVmM2I1ODctMjcwZjI1N2YtMzdkNDc2Y2MtMTE4OTc4ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-07T14:10:31.747866Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731670. Ctx: { TraceId: 01jr89damddrm7gjnwwc6293xk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWY5MDBmZDgtOWRkNzhjZjgtNGNmMGU3ZWYtNWY5OGY3M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TBlobStorageProxyTest::TestProxyPutSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] Test command err: 2025-04-07T14:10:17.111142Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:17.168681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:17.183584Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:17.183632Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:17.183822Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:17.189910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:17.190071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:17.190237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:17.190303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:17.190374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:17.190445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:17.190515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:17.190575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:17.190651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:17.190718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:17.190782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:17.190851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:17.205903Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:17.207713Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:17.207815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:17.207848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:17.207965Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:17.208071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:17.208112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:17.208136Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:17.208201Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:17.208234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:17.208268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:17.208291Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:17.208401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:17.208439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:17.208461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:17.208492Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:17.208558Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:17.208596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:17.208632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:17.208653Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:17.208729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:17.208769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:17.208794Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:17.208825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:17.208849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:17.208865Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:17.208976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=27; 2025-04-07T14:10:17.209039Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=27; 2025-04-07T14:10:17.209107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=21; 2025-04-07T14:10:17.209160Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=21; 2025-04-07T14:10:17.209246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:17.209286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:17.209315Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:17.209450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:17.209482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:17.209499Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:17.209583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:17.209611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:17.209627Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:17.209752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:17.209776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:17.209791Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:17.209862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... age_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=3494; 2025-04-07T14:10:37.528596Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8730; 2025-04-07T14:10:37.529129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=489; 2025-04-07T14:10:37.530271Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=434; 2025-04-07T14:10:37.530322Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1149; 2025-04-07T14:10:37.530407Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=49; 2025-04-07T14:10:37.530492Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T14:10:37.530525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=90; 2025-04-07T14:10:37.530719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=167; 2025-04-07T14:10:37.530902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=154; 2025-04-07T14:10:37.531675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=737; 2025-04-07T14:10:37.532499Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=783; 2025-04-07T14:10:37.532556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=21; 2025-04-07T14:10:37.532596Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=14; 2025-04-07T14:10:37.532625Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-04-07T14:10:37.532652Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2025-04-07T14:10:37.532706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-04-07T14:10:37.532761Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=24; 2025-04-07T14:10:37.532789Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-04-07T14:10:37.532862Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=48; 2025-04-07T14:10:37.532897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-07T14:10:37.532939Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=19; 2025-04-07T14:10:37.532972Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=17373; 2025-04-07T14:10:37.533128Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=3;blobs=6;rows=75200;bytes=7465172;raw_bytes=7453400; inactive portions=42;blobs=84;rows=1026094;bytes=79798400;raw_bytes=101677198; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:10:37.533231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2012:4008];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:10:37.533286Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2012:4008];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:10:37.533324Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:37.533382Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:10:37.533422Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:37.533491Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:37.533574Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T14:10:37.533631Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T14:10:37.533674Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:37.533727Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:37.533761Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:37.533801Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:37.533849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:37.533877Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:37.534340Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:37.534397Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2045:4034];tablet_id=9437184;parent=[1:2012:4008];fline=manager.h:99;event=ask_data;request=request_id=91;1={portions_count=45};; 2025-04-07T14:10:37.535800Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:10:37.535877Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T14:10:37.535896Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:10:37.535912Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T14:10:37.535939Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:37.535993Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:37.536037Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T14:10:37.536073Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T14:10:37.536111Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:37.536148Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:37.536174Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:37.536217Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:37.536260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:37.536290Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:37.536764Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=45;path_id=1; 2025-04-07T14:10:37.538109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=45;path_id=1; 2025-04-07T14:10:37.540272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T14:10:37.540308Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2012:4008];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; >> TBlobStorageProxyTest::TestDoubleGroups [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 >> TBlobStorageProxyTest::TestDoubleFailure >> TBlobStorageProxyTest::TestVPutVGet >> TBlobStorageProxyTest::TestQuadrupleGroups >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] >> TBlobStorageProxyTest::TestVPutVGet [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] Test command err: 2025-04-07T14:10:21.628249Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:21.688819Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:21.703617Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:21.703666Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:21.703835Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:21.709551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:21.709695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:21.709855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:21.709929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:21.710044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:21.710194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:21.710312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:21.710428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:21.710538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:21.710685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:21.710797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:21.710887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:21.725979Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:21.727664Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:21.727777Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:21.727812Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:21.727952Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:21.728080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:21.728131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:21.728164Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:21.728219Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:21.728258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:21.728290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:21.728325Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:21.728440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:21.728495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:21.728521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:21.728539Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:21.728605Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:21.728645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:21.728700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:21.728721Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:21.728776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:21.728798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:21.728820Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:21.728848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:21.728870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:21.728896Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:21.729004Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=33; 2025-04-07T14:10:21.729078Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-07T14:10:21.729152Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-04-07T14:10:21.729219Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-04-07T14:10:21.729344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:21.729380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:21.729403Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:21.729530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:21.729563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:21.729581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:21.729684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:21.729711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:21.729730Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:21.729857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:21.729885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:21.729906Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:21.729986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... _name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=3455; 2025-04-07T14:10:40.888373Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8128; 2025-04-07T14:10:40.889312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=885; 2025-04-07T14:10:40.890847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=805; 2025-04-07T14:10:40.890910Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1545; 2025-04-07T14:10:40.891011Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=60; 2025-04-07T14:10:40.891113Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T14:10:40.891151Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=105; 2025-04-07T14:10:40.891548Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=358; 2025-04-07T14:10:40.891913Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=320; 2025-04-07T14:10:40.893763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1801; 2025-04-07T14:10:40.895585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1765; 2025-04-07T14:10:40.895662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=28; 2025-04-07T14:10:40.895716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=21; 2025-04-07T14:10:40.895752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-07T14:10:40.895786Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-04-07T14:10:40.895822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-07T14:10:40.895881Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=30; 2025-04-07T14:10:40.895914Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-07T14:10:40.895992Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=42; 2025-04-07T14:10:40.896048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-07T14:10:40.896114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2025-04-07T14:10:40.896144Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=21342; 2025-04-07T14:10:40.896267Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7100088;raw_bytes=7088498; inactive portions=33;blobs=66;rows=1126788;bytes=85356024;raw_bytes=106323502; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:10:40.896352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:10:40.896395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:10:40.896429Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:40.896489Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:10:40.896521Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:40.896569Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:40.896636Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T14:10:40.896706Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T14:10:40.896740Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:40.896778Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:40.896806Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:40.896854Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:40.896906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:40.896939Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:40.897624Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:40.897711Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1980:3967];tablet_id=9437184;parent=[1:1947:3941];fline=manager.h:99;event=ask_data;request=request_id=81;1={portions_count=35};; 2025-04-07T14:10:40.898568Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:10:40.899287Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T14:10:40.899317Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:10:40.899337Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T14:10:40.899371Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:40.899430Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:40.899486Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T14:10:40.899534Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T14:10:40.899582Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:40.899633Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:40.899664Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:40.899697Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:40.899743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:40.899779Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:40.900268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=35;path_id=1; 2025-04-07T14:10:40.901251Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=35;path_id=1; 2025-04-07T14:10:40.903712Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T14:10:40.903758Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] >> TBlobStorageProxyTest::TestDoubleFailure [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] Test command err: 2025-04-07T14:10:39.257651Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001eb9/r3tmp/tmpLopIx9//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2025-04-07T14:10:39.257980Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001eb9/r3tmp/tmpLopIx9//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2025-04-07T14:10:39.260226Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:10:39.260409Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> OlapEstimationRowsCorrectness::TPCH9 [GOOD] >> OlapEstimationRowsCorrectness::TPCH21 >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] Test command err: 2025-04-07T14:10:13.663951Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:13.816507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:13.834322Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:13.834394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:13.834648Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:13.845597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:13.845888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:13.846120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:13.846231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:13.846341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:13.846481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:13.846592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:13.846724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:13.846832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:13.846928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.847063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:13.847164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:13.870945Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:13.873128Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:13.873279Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:13.873335Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:13.873503Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:13.873647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:13.873718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:13.873756Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:13.873860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:13.873927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:13.873967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:13.874029Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:13.874224Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:13.874297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:13.874343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:13.874373Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:13.874482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:13.874551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:13.874593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:13.874641Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:13.874721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:13.874762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:13.874795Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:13.874841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:13.874879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:13.874908Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:13.875094Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-04-07T14:10:13.875178Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-07T14:10:13.875280Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-07T14:10:13.875396Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=53; 2025-04-07T14:10:13.875553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:13.875627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:13.875662Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:13.875866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:13.875915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.875945Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.876098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:13.876145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:13.876180Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:13.876372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:13.876416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:13.876456Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:13.876576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=4113; 2025-04-07T14:10:41.946340Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9434; 2025-04-07T14:10:41.947169Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=769; 2025-04-07T14:10:41.948778Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=769; 2025-04-07T14:10:41.948859Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1618; 2025-04-07T14:10:41.948980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=71; 2025-04-07T14:10:41.949088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T14:10:41.949125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=105; 2025-04-07T14:10:41.949523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=346; 2025-04-07T14:10:41.949847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=267; 2025-04-07T14:10:41.951424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1513; 2025-04-07T14:10:41.953087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1599; 2025-04-07T14:10:41.953192Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=50; 2025-04-07T14:10:41.953259Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=25; 2025-04-07T14:10:41.953300Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-04-07T14:10:41.953343Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-04-07T14:10:41.953390Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-07T14:10:41.953471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2025-04-07T14:10:41.953525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-07T14:10:41.953627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=56; 2025-04-07T14:10:41.953673Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-07T14:10:41.953743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=36; 2025-04-07T14:10:41.953778Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=22863; 2025-04-07T14:10:41.953912Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7100088;raw_bytes=7088498; inactive portions=35;blobs=70;rows=1239297;bytes=95895024;raw_bytes=116845255; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:10:41.954017Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:10:41.954075Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:10:41.954112Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:41.954174Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:10:41.954212Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:41.954267Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:41.954353Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T14:10:41.954417Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T14:10:41.954455Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:41.954501Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:41.954540Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:41.954585Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:41.954640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:41.954681Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:41.955352Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:41.955442Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4059:5686];tablet_id=9437184;parent=[1:4004:5638];fline=manager.h:99;event=ask_data;request=request_id=177;1={portions_count=37};; 2025-04-07T14:10:41.956095Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:10:41.956316Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T14:10:41.956346Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:10:41.956368Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T14:10:41.956409Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:41.956476Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:41.956534Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T14:10:41.956588Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T14:10:41.956641Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:41.956708Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:41.956749Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:41.956786Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:41.956851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:41.956893Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:41.957346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=37;path_id=1; 2025-04-07T14:10:41.958780Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=37;path_id=1; 2025-04-07T14:10:41.961972Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T14:10:41.962019Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; >> TBlobStorageProxyTest::TestProxyGetSingleTimeout |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] >> TBlobStorageProxyTest::TestInFlightPuts >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block >> TBlobStorageProxyTest::TestPersistence >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable >> TBlobStorageProxyTest::TestDoubleEmptyGet >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] Test command err: 2025-04-07T14:10:39.922972Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001ea4/r3tmp/tmpizuUbC//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2025-04-07T14:10:39.923450Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001ea4/r3tmp/tmpizuUbC//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2025-04-07T14:10:39.926502Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:10:39.926928Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:10:42.816151Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001ea4/r3tmp/tmpSMMtaH//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2025-04-07T14:10:42.816984Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001ea4/r3tmp/tmpSMMtaH//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2025-04-07T14:10:42.819799Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:10:42.820066Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> TBlobStorageProxyTest::TestSingleFailure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] Test command err: 2025-04-07T14:10:14.383112Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:14.446582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:14.460513Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:14.460565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:14.460778Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:14.469023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:14.469232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:14.469519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:14.469644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:14.469747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:14.469870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:14.469983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:14.470087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:14.470217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:14.470341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:14.470467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:14.470573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:14.494844Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:14.497090Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:14.497226Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:14.497279Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:14.497487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:14.497668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:14.497755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:14.497806Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:14.497892Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:14.497952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:14.498002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:14.498057Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:14.498232Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:14.498296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:14.498338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:14.498375Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:14.498484Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:14.498567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:14.498612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:14.498646Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:14.498731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:14.498773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:14.498805Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:14.498852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:14.498892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:14.498921Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:14.499088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-07T14:10:14.499176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-07T14:10:14.499306Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-04-07T14:10:14.499403Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-07T14:10:14.499570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:14.499637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:14.499673Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:14.499882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:14.499931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:14.499962Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:14.500116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:14.500178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:14.500227Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:14.500433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:14.500496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:14.500527Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:14.500639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=4910; 2025-04-07T14:10:44.868472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=11041; 2025-04-07T14:10:44.869042Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=513; 2025-04-07T14:10:44.870119Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=460; 2025-04-07T14:10:44.870172Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1080; 2025-04-07T14:10:44.870250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=46; 2025-04-07T14:10:44.870314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T14:10:44.870341Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=67; 2025-04-07T14:10:44.870578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=204; 2025-04-07T14:10:44.870779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=165; 2025-04-07T14:10:44.871675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=858; 2025-04-07T14:10:44.872697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=976; 2025-04-07T14:10:44.872772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=29; 2025-04-07T14:10:44.872807Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=14; 2025-04-07T14:10:44.872852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-07T14:10:44.872887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2025-04-07T14:10:44.872915Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-07T14:10:44.872962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=27; 2025-04-07T14:10:44.873002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-07T14:10:44.873076Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=34; 2025-04-07T14:10:44.873120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-07T14:10:44.873160Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=20; 2025-04-07T14:10:44.873190Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=20129; 2025-04-07T14:10:44.873290Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=3;blobs=6;rows=75200;bytes=7465172;raw_bytes=7453400; inactive portions=51;blobs=102;rows=1251798;bytes=102220660;raw_bytes=124064310; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:10:44.873372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4155:5790];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:10:44.873413Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4155:5790];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:10:44.873487Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:44.873538Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:10:44.873565Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:44.873612Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:44.873678Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T14:10:44.873729Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T14:10:44.873762Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:44.873801Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:44.873824Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:44.873864Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:44.873898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:44.873926Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:44.874512Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:44.874576Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4210:5838];tablet_id=9437184;parent=[1:4155:5790];fline=manager.h:99;event=ask_data;request=request_id=199;1={portions_count=54};; 2025-04-07T14:10:44.875203Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:10:44.875448Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T14:10:44.875468Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:10:44.875482Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T14:10:44.875509Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:44.875543Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:44.875586Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T14:10:44.875623Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T14:10:44.875672Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:44.875715Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:44.875740Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:44.875778Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:44.875823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:44.875854Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:44.876626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=54;path_id=1; 2025-04-07T14:10:44.877813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=54;path_id=1; 2025-04-07T14:10:44.880526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T14:10:44.880558Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4155:5790];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; >> TBlobStorageProxyTest::TestInFlightPuts [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs >> TBlobStorageProxyTest::TestPersistence [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] Test command err: 2025-04-07T14:10:38.697145Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001ea8/r3tmp/tmpXFPvA4//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2025-04-07T14:10:38.698849Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:10:41.419618Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001ea8/r3tmp/tmpXFPvA4//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 3 2025-04-07T14:10:41.421074Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:10:42.677315Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001ea8/r3tmp/tmpXFPvA4//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 3 2025-04-07T14:10:42.679218Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:10:43.993214Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001ea8/r3tmp/tmpXFPvA4//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 3 2025-04-07T14:10:43.995361Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:10:45.313852Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001ea8/r3tmp/tmpXFPvA4//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 3 2025-04-07T14:10:45.315331Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] Test command err: 2025-04-07T14:10:17.839050Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:17.916727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:17.937651Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:17.937726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:17.937968Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:17.945987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:17.946172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:17.946446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:17.946574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:17.946665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:17.946783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:17.946882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:17.946982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:17.947119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:17.947222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:17.947351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:17.947454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:17.969218Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:17.971356Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:17.971493Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:17.971553Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:17.971757Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:17.971903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:17.971969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:17.972008Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:17.972088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:17.972169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:17.972220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:17.972253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:17.972413Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:17.972468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:17.972508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:17.972536Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:17.972628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:17.972704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:17.972751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:17.972795Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:17.972863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:17.972897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:17.972924Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:17.972970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:17.973007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:17.973031Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:17.973201Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=59; 2025-04-07T14:10:17.973292Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-04-07T14:10:17.973408Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-07T14:10:17.973495Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-04-07T14:10:17.973656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:17.973727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:17.973762Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:17.973956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:17.974008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:17.974049Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:17.974215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:17.974262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:17.974290Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:17.974478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:17.974516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:17.974545Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:17.974669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=3319; 2025-04-07T14:10:47.320082Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=7688; 2025-04-07T14:10:47.320831Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=698; 2025-04-07T14:10:47.322168Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=616; 2025-04-07T14:10:47.322226Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1347; 2025-04-07T14:10:47.322328Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=60; 2025-04-07T14:10:47.322434Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T14:10:47.322489Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=121; 2025-04-07T14:10:47.322810Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=284; 2025-04-07T14:10:47.323058Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=212; 2025-04-07T14:10:47.324444Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1339; 2025-04-07T14:10:47.325859Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1353; 2025-04-07T14:10:47.325939Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=29; 2025-04-07T14:10:47.325988Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=18; 2025-04-07T14:10:47.326021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-04-07T14:10:47.326054Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-04-07T14:10:47.326084Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-04-07T14:10:47.326158Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2025-04-07T14:10:47.326202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-07T14:10:47.326277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=38; 2025-04-07T14:10:47.326318Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-07T14:10:47.326385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=39; 2025-04-07T14:10:47.326422Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=19432; 2025-04-07T14:10:47.326536Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7100088;raw_bytes=7088498; inactive portions=35;blobs=70;rows=1239297;bytes=95895024;raw_bytes=116845255; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:10:47.326613Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:10:47.326652Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:10:47.326684Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:47.326735Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:10:47.326765Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:47.326807Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:47.326867Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T14:10:47.326927Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T14:10:47.326965Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:47.327000Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:47.327025Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:47.327068Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:47.327117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:47.327149Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:47.327687Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:47.327772Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4059:5686];tablet_id=9437184;parent=[1:4004:5638];fline=manager.h:99;event=ask_data;request=request_id=177;1={portions_count=37};; 2025-04-07T14:10:47.328380Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:10:47.328504Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T14:10:47.328528Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:10:47.328548Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T14:10:47.328590Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:47.328640Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:47.328700Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T14:10:47.328754Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T14:10:47.328789Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:47.328824Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:47.328854Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:47.328885Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:47.328938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:47.328987Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:47.329728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=37;path_id=1; 2025-04-07T14:10:47.330782Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=37;path_id=1; 2025-04-07T14:10:47.333348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T14:10:47.333399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap Test command err: Trying to start YDB, gRPC: 4009, MsgBus: 62549 2025-04-07T14:03:34.975738Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490571634211411275:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:34.975857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010fc/r3tmp/tmpmkKKcL/pdisk_1.dat 2025-04-07T14:03:35.623071Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:35.669707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:35.669796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:35.673424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4009, node 1 2025-04-07T14:03:36.061157Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:36.061176Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:36.061190Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:36.061276Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62549 TClient is connected to server localhost:62549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:36.795423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:36.874184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.161883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.318210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:37.387367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:38.316322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571651391282004:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:38.316411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:39.975386Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490571634211411275:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:39.975448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:03:40.453775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.535368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.555871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.575258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.600606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.623189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:03:40.657934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571659981217111:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.658010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.658410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490571659981217116:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:03:40.664037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:03:40.680861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490571659981217118:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:03:41.556958Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571664276184734:2469], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: COMMIT not supported inside YDB query, code: 2008 2025-04-07T14:03:41.558298Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2M1NTg3NWMtY2EzMDZiZmItM2M1NDVmNDktNzYyMzg2MzE=, ActorId: [1:7490571664276184726:2465], ActorState: ExecuteState, TraceId: 01jr890t2ha6jg8ctkt1r2c314, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:03:41.581974Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490571664276184742:2472], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: ROLLBACK not supported inside YDB query, code: 2008 2025-04-07T14:03:41.583020Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWI1ZWMxZTYtOGM3MDFjOWEtM2NkNWEzZTEtY2YyMWVmODk=, ActorId: [1:7490571664276184740:2471], ActorState: ExecuteState, TraceId: 01jr890t40e4bfkb4xq3d6rxc2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 15272, MsgBus: 21745 2025-04-07T14:03:42.398038Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490571664995070351:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:03:42.398105Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010fc/r3tmp/tmp0JzSOV/pdisk_1.dat 2025-04-07T14:03:42.491942Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:03:42.502747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:03:42.502799Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:03:42.504294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15272, node 2 2025-04-07T14:03:42.574411Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:03:42.574431Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:03:42.574439Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:03:42.574529Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21745 TClient is connected to server localhost:21745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:03:42.926481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:03:42.933540Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemes ... 86224039386;local_tx_no=47505;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039850; 2025-04-07T14:10:39.439074Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47506;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039859; 2025-04-07T14:10:39.439095Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47507;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039825; 2025-04-07T14:10:39.439115Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47508;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039840; 2025-04-07T14:10:39.439137Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47509;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039826; 2025-04-07T14:10:39.439157Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47510;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039838; 2025-04-07T14:10:39.439181Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47511;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039841; 2025-04-07T14:10:39.439212Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47512;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039797; 2025-04-07T14:10:39.439235Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47513;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039844; 2025-04-07T14:10:39.439279Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47514;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039843; 2025-04-07T14:10:39.439311Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47515;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039845; 2025-04-07T14:10:39.439333Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47516;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039834; 2025-04-07T14:10:39.439352Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47517;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039822; 2025-04-07T14:10:39.439373Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47518;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039849; 2025-04-07T14:10:39.439395Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47519;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039877; 2025-04-07T14:10:39.439416Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47520;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039885; 2025-04-07T14:10:39.439436Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47521;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039862; 2025-04-07T14:10:39.439458Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47522;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039886; 2025-04-07T14:10:39.439478Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47523;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039855; 2025-04-07T14:10:39.439497Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47524;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039851; 2025-04-07T14:10:39.439518Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47525;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039853; 2025-04-07T14:10:39.439542Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47526;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039837; 2025-04-07T14:10:39.439561Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47527;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039857; 2025-04-07T14:10:39.439590Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47528;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039836; 2025-04-07T14:10:39.439623Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47529;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039864; 2025-04-07T14:10:39.439650Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47530;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039868; 2025-04-07T14:10:39.439670Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47531;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039876; 2025-04-07T14:10:39.439692Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47532;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039829; 2025-04-07T14:10:39.439718Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47533;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039846; 2025-04-07T14:10:39.439738Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47534;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039854; 2025-04-07T14:10:39.439759Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47535;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039869; 2025-04-07T14:10:39.439779Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47536;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039861; 2025-04-07T14:10:39.439801Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47537;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039872; 2025-04-07T14:10:39.439825Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47538;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039887; 2025-04-07T14:10:39.456262Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47539;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039860; 2025-04-07T14:10:39.456330Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47540;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039879; 2025-04-07T14:10:39.456354Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47541;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039870; 2025-04-07T14:10:39.456377Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47542;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039882; 2025-04-07T14:10:39.456400Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47543;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039847; 2025-04-07T14:10:39.456423Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47544;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039867; 2025-04-07T14:10:39.456448Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47545;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039874; 2025-04-07T14:10:39.456471Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47546;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039848; 2025-04-07T14:10:39.456494Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47547;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039858; 2025-04-07T14:10:39.456516Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47548;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039865; 2025-04-07T14:10:39.456540Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47549;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039866; 2025-04-07T14:10:39.456561Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47550;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039871; 2025-04-07T14:10:39.456583Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47551;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039884; 2025-04-07T14:10:39.456604Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47552;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039883; 2025-04-07T14:10:39.456628Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47553;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039863; 2025-04-07T14:10:39.456652Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47554;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039873; 2025-04-07T14:10:39.456699Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47555;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039880; 2025-04-07T14:10:39.456720Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47556;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039878; 2025-04-07T14:10:39.456743Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47557;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039875; 2025-04-07T14:10:39.456764Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47558;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039856; 2025-04-07T14:10:39.456786Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47559;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039881; 2025-04-07T14:10:39.456807Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224039386;local_tx_no=47560;tx_info=;fline=primary.h:145;event=ack_tablet_duplication;wait=;receive=72075186224039752; VERIFY failed (2025-04-07T14:10:39.462681Z): ydb/core/tx/columnshard/columnshard__progress_tx.cpp:33 Execute(): requirement Self->ProgressTxInFlight failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1A70DDC2 1. /-S/util/system/yassert.cpp:55: Panic @ 0x1A6FD41A 2. /-S/ydb/core/tx/columnshard/columnshard__progress_tx.cpp:33: Execute @ 0x35E5A771 3. /-S/ydb/core/tablet_flat/flat_executor.cpp:1739: ExecuteTransaction @ 0x321B849C 4. /-S/ydb/core/tablet_flat/flat_executor.cpp:2668: Handle @ 0x321DA6DD 5. /-S/ydb/core/tablet_flat/flat_executor.cpp:3916: StateWork @ 0x32184339 6. /-S/ydb/library/actors/core/actor.h:533: Receive @ 0x1D4ECCF6 7. /-S/ydb/library/actors/core/executor_thread.cpp:248: Execute @ 0x1D4E6AC8 8. /-S/ydb/library/actors/core/executor_thread.cpp:425: operator() @ 0x1D4EFF24 9. /-S/ydb/library/actors/core/executor_thread.cpp:479: ProcessExecutorPool @ 0x1D4EEE2D 10. /-S/ydb/library/actors/core/executor_thread.cpp:510: ThreadProc @ 0x1D4F181A 11. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1A719964 12. /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239: asan_thread_start @ 0x1A3D1A28 13. ??:0: ?? @ 0x7FAEC0569AC2 14. ??:0: ?? @ 0x7FAEC05FB84F >> TBlobStorageProxyTest::TestGetMultipart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] Test command err: 2025-04-07T14:10:20.301766Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:20.379061Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:20.396907Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:20.396966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:20.397149Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:20.405396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:20.405593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:20.405820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:20.405929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:20.406029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:20.406152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:20.406250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:20.406370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:20.406502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:20.406602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:20.406736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:20.406836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:20.429014Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:20.431225Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:20.431405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:20.431459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:20.431671Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:20.431831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:20.431902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:20.431959Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:20.432043Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:20.432112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:20.432159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:20.432192Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:20.432362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:20.432421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:20.432457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:20.432488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:20.432602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:20.432684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:20.432728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:20.432776Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:20.432866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:20.432908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:20.432938Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:20.432985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:20.433022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:20.433047Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:20.433210Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-07T14:10:20.433310Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-04-07T14:10:20.433432Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-07T14:10:20.433513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-07T14:10:20.433673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:20.433728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:20.433758Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:20.433944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:20.434009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:20.434043Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:20.434202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:20.434244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:20.434274Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:20.434466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:20.434505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:20.434532Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:20.434643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=3469; 2025-04-07T14:10:47.618459Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=7910; 2025-04-07T14:10:47.619106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=592; 2025-04-07T14:10:47.620470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=551; 2025-04-07T14:10:47.620540Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1377; 2025-04-07T14:10:47.620643Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=59; 2025-04-07T14:10:47.620751Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T14:10:47.620793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=103; 2025-04-07T14:10:47.621053Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=226; 2025-04-07T14:10:47.621301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=210; 2025-04-07T14:10:47.622434Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1087; 2025-04-07T14:10:47.623597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1115; 2025-04-07T14:10:47.623665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=25; 2025-04-07T14:10:47.623744Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=35; 2025-04-07T14:10:47.623786Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-07T14:10:47.623825Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-04-07T14:10:47.623863Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-04-07T14:10:47.623923Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=34; 2025-04-07T14:10:47.623956Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-04-07T14:10:47.624024Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=39; 2025-04-07T14:10:47.624058Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T14:10:47.624107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=21; 2025-04-07T14:10:47.624136Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=19171; 2025-04-07T14:10:47.624253Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7100088;raw_bytes=7088498; inactive portions=35;blobs=70;rows=1239297;bytes=95895024;raw_bytes=116845255; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:10:47.624332Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:10:47.624376Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:10:47.624409Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:47.624471Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:10:47.624504Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:47.624565Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:47.624637Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T14:10:47.624705Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T14:10:47.624742Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:47.624784Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:47.624813Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:47.624844Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:47.624893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:47.624929Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:47.625458Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4059:5686];tablet_id=9437184;parent=[1:4004:5638];fline=manager.h:99;event=ask_data;request=request_id=177;1={portions_count=37};; 2025-04-07T14:10:47.625845Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:47.626237Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:10:47.626383Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T14:10:47.626420Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:10:47.626442Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T14:10:47.626491Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:47.626541Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:47.626583Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T14:10:47.626635Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T14:10:47.626673Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:47.626711Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:47.626742Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:47.626778Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:47.626822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:47.626868Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:47.627545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=37;path_id=1; 2025-04-07T14:10:47.628566Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=37;path_id=1; 2025-04-07T14:10:47.630953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T14:10:47.630997Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestSingleFailure [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> TBlobStorageProxyTest::TestProxyPutInvalidSize >> TBlobStorageProxyTest::TestCollectGarbagePersistence |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] |93.9%| [TA] $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2025-04-07T14:10:46.570708Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e98/r3tmp/tmppsjSDM//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2025-04-07T14:10:46.572649Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable [GOOD] >> ReadIteratorExternalBlobs::NotExtBlobs >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> TBlobStorageProxyTest::TestVPutVCollectVGetRace >> TBlobStorageProxyTest::TestProxyLongTailDiscover >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] |93.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] Test command err: 2025-04-07T14:10:30.335393Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:30.417065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:30.437936Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:30.438013Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:30.438229Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:30.445861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:30.446051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:30.446281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:30.446416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:30.446505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:30.446619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:30.446711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:30.446802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:30.446914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:30.447013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:30.447122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:30.447256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:30.469146Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:30.471639Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:30.471793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:30.471836Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:30.472025Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:30.472192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:30.472257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:30.472294Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:30.472397Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:30.472460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:30.472510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:30.472552Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:30.472729Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:30.472811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:30.472857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:30.472883Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:30.472980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:30.473038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:30.473096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:30.473160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:30.473223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:30.473257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:30.473286Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:30.473325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:30.473355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:30.473377Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:30.473521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=40; 2025-04-07T14:10:30.473584Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=26; 2025-04-07T14:10:30.473688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-04-07T14:10:30.473774Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-04-07T14:10:30.473917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:30.473963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:30.473991Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:30.474181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:30.474255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:30.474289Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:30.474405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:30.474438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:30.474475Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:30.474688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:30.474749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:30.474785Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:30.474919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... tage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=2497; 2025-04-07T14:10:49.979820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=5877; 2025-04-07T14:10:49.980273Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=419; 2025-04-07T14:10:49.981241Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=403; 2025-04-07T14:10:49.981286Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=979; 2025-04-07T14:10:49.981358Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=40; 2025-04-07T14:10:49.981425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T14:10:49.981449Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=62; 2025-04-07T14:10:49.981645Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=174; 2025-04-07T14:10:49.981808Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=139; 2025-04-07T14:10:49.982746Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=908; 2025-04-07T14:10:49.983573Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=789; 2025-04-07T14:10:49.983623Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=19; 2025-04-07T14:10:49.983658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=12; 2025-04-07T14:10:49.983681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-04-07T14:10:49.983703Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=4; 2025-04-07T14:10:49.983726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-04-07T14:10:49.983767Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=21; 2025-04-07T14:10:49.983790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-04-07T14:10:49.983857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=40; 2025-04-07T14:10:49.983889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-07T14:10:49.983925Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=15; 2025-04-07T14:10:49.983945Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=14281; 2025-04-07T14:10:49.984029Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7100088;raw_bytes=7088498; inactive portions=33;blobs=66;rows=1126788;bytes=85356024;raw_bytes=106323502; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:10:49.984101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:10:49.984138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:10:49.984165Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:49.984206Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:10:49.984228Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:49.984262Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:49.984309Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T14:10:49.984347Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T14:10:49.984371Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:49.984399Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:49.984433Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:49.984458Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:49.984491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:49.984514Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:49.985134Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:49.985196Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1980:3967];tablet_id=9437184;parent=[1:1947:3941];fline=manager.h:99;event=ask_data;request=request_id=81;1={portions_count=35};; 2025-04-07T14:10:49.985978Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:10:49.986683Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T14:10:49.986709Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:10:49.986723Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T14:10:49.986749Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:49.986787Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:49.986837Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T14:10:49.986872Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T14:10:49.986898Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:49.986927Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:49.986947Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:49.986973Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:49.987006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:49.987035Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:49.987430Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=35;path_id=1; 2025-04-07T14:10:49.988036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=35;path_id=1; 2025-04-07T14:10:49.989624Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T14:10:49.989658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 22685, MsgBus: 11209 2025-04-07T14:07:40.493826Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572690610244943:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:40.493906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d78/r3tmp/tmpsfhj7Z/pdisk_1.dat 2025-04-07T14:07:40.782617Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22685, node 1 2025-04-07T14:07:40.850737Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:40.850756Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:40.850764Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:40.850843Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:07:40.873651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:40.873802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:40.875657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11209 TClient is connected to server localhost:11209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:41.336441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:41.359402Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:07:41.373232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:07:41.526523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:07:41.708478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:41.791431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:43.316036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572703495148559:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:43.316156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:43.510273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:43.541991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:43.570821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:43.638589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:43.704163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:43.750456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:43.800831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572703495149061:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:43.801017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:43.801590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572703495149066:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:43.806160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:43.816447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572703495149069:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:44.843367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:45.465818Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr89889hbzds4sr3n4vmpvae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjc5NTY2NDQtODAzOWI1MjYtZDUzYzYyNDctMzQ4NTQ0ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:45.474619Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr89889hbzds4sr3n4vmpvae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjc5NTY2NDQtODAzOWI1MjYtZDUzYzYyNDctMzQ4NTQ0ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:45.478064Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr89889hbzds4sr3n4vmpvae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjc5NTY2NDQtODAzOWI1MjYtZDUzYzYyNDctMzQ4NTQ0ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:45.494064Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490572690610244943:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:45.494134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:07:45.541230Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr8988bg5fb79jjy7the2xt9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM3MjBlY2ItNWZkZTA1YmItNGE1NmMxMGItZjBhYzlhNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:45.548973Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr8988bg5fb79jjy7the2xt9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM3MjBlY2ItNWZkZTA1YmItNGE1NmMxMGItZjBhYzlhNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:45.551365Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr8988bg5fb79jjy7the2xt9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM3MjBlY2ItNWZkZTA1YmItNGE1NmMxMGItZjBhYzlhNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:45.578919Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jr8988d8dq8txqykv5c87kxq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjc5NTY2NDQtODAzOWI1MjYtZDUzYzYyNDctMzQ4NTQ0ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:45.587513Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jr8988d8dq8txqykv5c87kxq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjc5NTY2NDQtODAzOWI1MjYtZDUzYzYyNDctMzQ4NTQ0ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:45.589828Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jr8988d8dq8txqykv5c87kxq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjc5NTY2NDQtODAzOWI1MjYtZDUzYzYyNDctMzQ4NTQ0ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:45.625214Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jr8988ep83p2pztsw4kg4v0n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM3MjBlY2ItNWZkZTA1YmItNGE1NmMxMGItZjBhYzlhNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:45.632902Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jr8988ep83p2pztsw4kg4v0n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM3MjBlY2ItNWZkZTA1YmItNGE1NmMxMGItZjBhYzlhNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:07:45.635433Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01jr8988ep83p2pztsw4kg4v0n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM3MjBlY2ItNWZkZTA1YmItNGE1NmMxMGItZjBhYzlhNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Data ... : TxId: 281474976723132. Ctx: { TraceId: 01jr89dsp4815wf0d78a0dq02m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.114919Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723133. Ctx: { TraceId: 01jr89dsp4815wf0d78a0dq02m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.117102Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723134. Ctx: { TraceId: 01jr89dsp4815wf0d78a0dq02m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.134315Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723135. Ctx: { TraceId: 01jr89dspv7j3ddpt9wwn2h5hs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.137948Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723136. Ctx: { TraceId: 01jr89dspv7j3ddpt9wwn2h5hs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.140220Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723137. Ctx: { TraceId: 01jr89dspv7j3ddpt9wwn2h5hs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.158333Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723138. Ctx: { TraceId: 01jr89dsqkfydzh1cwg1vekk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.161832Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723139. Ctx: { TraceId: 01jr89dsqkfydzh1cwg1vekk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.163932Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723140. Ctx: { TraceId: 01jr89dsqkfydzh1cwg1vekk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.180840Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723141. Ctx: { TraceId: 01jr89dsracmgz3qspme7a6gv6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.184211Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723142. Ctx: { TraceId: 01jr89dsracmgz3qspme7a6gv6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.186224Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723143. Ctx: { TraceId: 01jr89dsracmgz3qspme7a6gv6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.201945Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723144. Ctx: { TraceId: 01jr89dsrz30nyehrhkb8ka8y7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.208692Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723145. Ctx: { TraceId: 01jr89dsrz30nyehrhkb8ka8y7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.210774Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723146. Ctx: { TraceId: 01jr89dsrz30nyehrhkb8ka8y7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.228492Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723147. Ctx: { TraceId: 01jr89dssscd1k99v6dast71e1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.232232Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723148. Ctx: { TraceId: 01jr89dssscd1k99v6dast71e1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.234849Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723149. Ctx: { TraceId: 01jr89dssscd1k99v6dast71e1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.250495Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723150. Ctx: { TraceId: 01jr89dstg9cywkpg4vdgcpk5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.254788Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723151. Ctx: { TraceId: 01jr89dstg9cywkpg4vdgcpk5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.257428Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723152. Ctx: { TraceId: 01jr89dstg9cywkpg4vdgcpk5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.281817Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723153. Ctx: { TraceId: 01jr89dsve0mcafdfs7xn2gz59, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.285418Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723154. Ctx: { TraceId: 01jr89dsve0mcafdfs7xn2gz59, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.287540Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723155. Ctx: { TraceId: 01jr89dsve0mcafdfs7xn2gz59, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.304294Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723156. Ctx: { TraceId: 01jr89dsw5f26xzen6wefqpm37, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.307532Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723157. Ctx: { TraceId: 01jr89dsw5f26xzen6wefqpm37, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.309350Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723158. Ctx: { TraceId: 01jr89dsw5f26xzen6wefqpm37, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.325801Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723159. Ctx: { TraceId: 01jr89dswtach74b3jw8zcs2bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.329486Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723160. Ctx: { TraceId: 01jr89dswtach74b3jw8zcs2bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.331579Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723161. Ctx: { TraceId: 01jr89dswtach74b3jw8zcs2bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.350437Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723162. Ctx: { TraceId: 01jr89dsxk82fexw1qq6hw3hrt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.354972Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723163. Ctx: { TraceId: 01jr89dsxk82fexw1qq6hw3hrt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.357598Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723164. Ctx: { TraceId: 01jr89dsxk82fexw1qq6hw3hrt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVhN2U0OTktZDcwMmMzNWYtZTFjMzk0NmItZmI3MDczNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.374006Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723165. Ctx: { TraceId: 01jr89dsyb7jajhr98p5ekf9cw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.379174Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723166. Ctx: { TraceId: 01jr89dsyb7jajhr98p5ekf9cw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:47.382660Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723167. Ctx: { TraceId: 01jr89dsyb7jajhr98p5ekf9cw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFjN2IyZS0yMjg2MjFmNy01ZGNlNTc3MS01YmY4MTRiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> TBlobStorageProxyTest::TestVPutVCollectVGetRace [GOOD] >> TBlobStorageProxyTest::TestVGetNoData >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure >> TBlobStorageProxyTest::TestGetMultipart [GOOD] >> TBlobStorageProxyTest::TestGetFail |94.0%| [TA] $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany |94.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestSingleFailureMirror >> TColumnShardTestReadWrite::CompactionGC [GOOD] >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects >> TSchemeShardSubDomainTest::DiskSpaceUsage >> TSchemeShardSubDomainTest::Create >> TSchemeShardSubDomainTest::LS >> TBlobStorageProxyTest::TestGetFail [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe >> Secret::Simple [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVGetNoData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGC [GOOD] Test command err: 2025-04-07T14:10:13.664010Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:13.816515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:13.838946Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:13.839019Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:13.839222Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:13.846565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:13.846742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:13.846944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:13.847061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:13.847186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:13.847306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:13.847408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:13.847502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:13.847594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:13.847704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.847795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:13.847884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:13.870411Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:13.872528Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:13.872637Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:13.872704Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:13.872829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:13.872964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:13.873015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:13.873044Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:13.873103Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:13.873147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:13.873177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:13.873237Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:13.873350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:13.873389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:13.873414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:13.873431Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:13.873515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:13.873577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:13.873616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:13.873653Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:13.873715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:13.873753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:13.873775Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:13.873804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:13.873835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:13.873855Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:13.873973Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=39; 2025-04-07T14:10:13.874031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=25; 2025-04-07T14:10:13.874099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-04-07T14:10:13.874162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-04-07T14:10:13.874288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:13.874334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:13.874356Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:13.874499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:13.874526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.874543Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:13.874639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:13.874674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:13.874694Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:13.874837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:13.874889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:13.874927Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:13.875054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... vent=skip_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=1cadfa80-13ba11f0-870943d0-38ed6ab,;}; 2025-04-07T14:10:52.030932Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:129;method=PutObject;id=[9437184:2:44:255:1:574112:0]; 2025-04-07T14:10:52.031048Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:129;method=PutObject;id=[9437184:2:44:255:2:592928:0]; GC for channel 2 deletes blobs: GC for channel 4 deletes blobs: GC for channel 3 deletes blobs: [9437184:2:85:3:0:5870200:0] Added portions: 151 152 2025-04-07T14:10:52.051577Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-07T14:10:52.051890Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[227] (CS::INDEXATION) apply at tablet 9437184 2025-04-07T14:10:52.054671Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:88 Blob count: 2 2025-04-07T14:10:52.054810Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=39;blobs=78;rows=1462497;bytes=92311612;raw_bytes=143732845; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75000;bytes=7381080;raw_bytes=7369506; inactive portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:10:52.054862Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:52.055934Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;method=TTxController::StartProposeOnExecute;tx_info=1043:TX_KIND_COMMIT_WRITE;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;fline=tx_controller.cpp:311;event=start; 2025-04-07T14:10:52.056048Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;method=TTxController::StartProposeOnExecute;tx_info=1043:TX_KIND_COMMIT_WRITE;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;fline=tx_controller.cpp:340;event=registered; 2025-04-07T14:10:52.068620Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-07T14:10:52.068711Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;fline=with_appended.cpp:80;portions=75,76,;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab; 2025-04-07T14:10:52.068781Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=2342880;portion_bytes=2342908;portion_raw_bytes=3657275; 2025-04-07T14:10:52.068847Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=75;before_size=99692692;after_size=102035600;before_rows=1537497;after_rows=1575000; 2025-04-07T14:10:52.068912Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=2342880;portion_bytes=2342908;portion_raw_bytes=3657275; 2025-04-07T14:10:52.069112Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=2389792;portion_bytes=2389820;portion_raw_bytes=3712231; 2025-04-07T14:10:52.069182Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=76;before_size=102035600;after_size=104425420;before_rows=1575000;after_rows=1612497; 2025-04-07T14:10:52.069232Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=2389792;portion_bytes=2389820;portion_raw_bytes=3712231; 2025-04-07T14:10:52.069357Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;fline=manager.cpp:14;event=unlock;process_id=CS::INDEXATION::1cadfa80-13ba11f0-870943d0-38ed6ab; 2025-04-07T14:10:52.069412Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:52.069487Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:52.069535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;tablet_id=9437184;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:52.069579Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=0; 2025-04-07T14:10:52.069636Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:52.069684Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:52.069722Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:52.069766Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:52.069835Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:52.069924Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:87:2:0:5870200:0] 2025-04-07T14:10:52.069998Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:88 Blob count: 2 2025-04-07T14:10:52.070080Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:52.075479Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=45;external_task_id=1cadfa80-13ba11f0-870943d0-38ed6ab;mem=5963210;cpu=0; 2025-04-07T14:10:52.076160Z node 1 :TX_COLUMNSHARD DEBUG: method=TTxController::FinishProposeOnComplete;tx_id=1043;fline=tx_controller.cpp:371;event=start;tx_info=1043:TX_KIND_COMMIT_WRITE;min=1744035015373;max=1744035045373;plan=0;src=[1:97:2132];cookie=0; 2025-04-07T14:10:52.076810Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 6080043 at tablet 9437184, mediator 0 2025-04-07T14:10:52.076873Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[229] execute at tablet 9437184 2025-04-07T14:10:52.076986Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:52.077195Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=abstract.h:78;progress_tx_id=1043;lock_id=1;broken=0; 2025-04-07T14:10:52.089504Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[229] complete at tablet 9437184 2025-04-07T14:10:52.089603Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=abstract.h:88;progress_tx_id=1043;lock_id=1;broken=0; 2025-04-07T14:10:52.089707Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=5870200; 2025-04-07T14:10:52.089804Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=manager.cpp:9;event=lock;process_id=CS::INDEXATION::1d0fafc8-13ba11f0-b634cfae-1d784dad; 2025-04-07T14:10:52.089848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-04-07T14:10:52.089941Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:725;event=indexation;bytes=5870200;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=1d0fafc8-13ba11f0-b634cfae-1d784dad; 2025-04-07T14:10:52.090003Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:615;event=start_changes;type=CS::INDEXATION;task_id=1d0fafc8-13ba11f0-b634cfae-1d784dad; 2025-04-07T14:10:52.090132Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=5963210;external_task_id=1d0fafc8-13ba11f0-b634cfae-1d784dad;type=CS::INDEXATION;priority=0;; 2025-04-07T14:10:52.090364Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=46;task=cpu=0;mem=5963210;external_task_id=1d0fafc8-13ba11f0-b634cfae-1d784dad;type=CS::INDEXATION;priority=0;; 2025-04-07T14:10:52.090409Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=1d0fafc8-13ba11f0-b634cfae-1d784dad;mem=5963210;cpu=0; 2025-04-07T14:10:52.090454Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=1d0fafc8-13ba11f0-b634cfae-1d784dad;task_id=46;mem=5963210;cpu=0; 2025-04-07T14:10:52.090584Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=1d0fafc8-13ba11f0-b634cfae-1d784dad;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=1d0fafc8-13ba11f0-b634cfae-1d784dad; Added portions: 153 154 2025-04-07T14:10:52.521306Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=1d0fafc8-13ba11f0-b634cfae-1d784dad;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-07T14:10:52.521447Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; Compactions happened: 14 Indexations happened: 31 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi >> TSchemeShardSubDomainTest::LS [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple [GOOD] Test command err: 2025-04-07T14:08:32.173234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:08:32.173714Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:08:32.174033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0021b0/r3tmp/tmpwquElz/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18005, node 1 TClient is connected to server localhost:13770 2025-04-07T14:08:33.243276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:08:33.288848Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:33.298090Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:33.298180Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:33.298235Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:33.298501Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:08:33.348932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:33.349061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:33.360640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-07T14:08:46.617261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:46.617434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:46.636711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-07T14:08:46.840871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:896:2725], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:46.840980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:46.841298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:901:2730], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:46.846620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-07T14:08:47.028073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:903:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:08:48.338284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:08:48.819896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-07T14:08:49.565783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-07T14:08:50.456382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:08:51.038088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T14:08:52.633267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-04-07T14:08:53.114703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: 2025-04-07T14:09:10.611573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:09:10.611627Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 2025-04-07T14:09:34.673530Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jr89bjkd7n6h14qkze7wk6wr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJhZmI1MmItMzE0OWYzZTUtZjE1MmI4MDgtYjdmZGQwZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-07T14:09:57.920016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715749:0, at schemeshard: 72057594046644480 2025-04-07T14:09:58.961699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715756:0, at schemeshard: 72057594046644480 2025-04-07T14:10:00.773304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715767:0, at schemeshard: 72057594046644480 2025-04-07T14:10:01.277217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715770:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-07T14:10:14.318083Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715785. Ctx: { TraceId: 01jr89csbm8x5qc0h8vy932ean, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWUyZTJmYzgtMzIyZTAwMDQtYWRiYjM1MDAtODFmZTU5MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 2025-04-07T14:10:51.194305Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715829. Ctx: { TraceId: 01jr89dxjq29zhssekdht2qn6q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmJkODAwZTctOWNhYWIzYjAtNWVjM2VmZDgtNmQ3Zjk5Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:53.438681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:53.438781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:53.438826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:53.438863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:53.439595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:53.439652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:53.439727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:53.440709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:53.492739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:53.492784Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:53.500831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:53.500966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:53.501132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:53.507225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:53.508179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:53.509429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:53.510114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:53.513720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:53.520074Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:53.520128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:53.520236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:53.520295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:53.520421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:53.520541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.525685Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:53.622992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:53.623172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.623317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:53.623483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:53.623530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.625381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:53.625505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:53.625619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.625659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:53.625689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:53.625731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:53.627074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.627120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:53.627157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:53.628512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.628551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.628591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:53.628679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:53.631498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:53.632637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:53.632771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:53.633492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:53.633592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:53.633631Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:53.633845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:53.633882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:53.634031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:53.634100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:53.635682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:53.635744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:53.635880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:53.635919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:53.636182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.636239Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:53.636329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:53.636371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:53.636447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:53.636495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:53.636528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:53.636558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:53.636629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:53.636691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:53.636725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:53.638590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:53.638706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:53.638745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:53.638785Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:53.638836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:53.638930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... dinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:53.722973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-07T14:10:53.723060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-07T14:10:53.723342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:53.723446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:53.723507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:10:53.723741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-07T14:10:53.723797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:10:53.723926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:53.723980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:10:53.724022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-07T14:10:53.725468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:53.725500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:53.725620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:10:53.725686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:53.725717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-07T14:10:53.725740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-07T14:10:53.725841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.725868Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-07T14:10:53.725931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-07T14:10:53.725955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T14:10:53.725986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-07T14:10:53.726012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T14:10:53.726048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-07T14:10:53.726080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-07T14:10:53.726239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T14:10:53.726284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-07T14:10:53.726327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T14:10:53.726357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-07T14:10:53.726743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:53.726791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:53.726824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-07T14:10:53.726853Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T14:10:53.726889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:53.727200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:53.727251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:53.727271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-07T14:10:53.727297Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:10:53.727314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:10:53.727366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-07T14:10:53.729463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T14:10:53.729682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-07T14:10:53.729851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-07T14:10:53.729881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-07T14:10:53.731189Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-07T14:10:53.731260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T14:10:53.731284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:443:2398] TestWaitNotification: OK eventTxId 100 2025-04-07T14:10:53.731607Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:53.731758Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 146us result status StatusSuccess 2025-04-07T14:10:53.732018Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:53.735818Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:53.735961Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 160us result status StatusSuccess 2025-04-07T14:10:53.736186Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] Test command err: 2025-04-07T14:10:33.360948Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:33.427303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:33.441822Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:33.441889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:33.442079Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:33.448590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:33.448774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:33.448958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:33.449027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:33.449103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:33.449170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:33.449249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:33.449319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:33.449380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:33.449469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:33.449534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:33.449609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:33.463729Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:33.465754Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:33.465896Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:33.465942Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:33.466141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:33.466293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:33.466363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:33.466401Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:33.466491Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:33.466547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:33.466598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:33.466639Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:33.466810Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:33.466889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:33.466932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:33.466960Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:33.467062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:33.467126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:33.467168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:33.467231Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:33.467321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:33.467363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:33.467397Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:33.467443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:33.467490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:33.467516Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:33.467676Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-07T14:10:33.467774Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=49; 2025-04-07T14:10:33.467887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-07T14:10:33.467983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-07T14:10:33.468138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:33.468218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:33.468255Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:33.468464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:33.468514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:33.468541Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:33.468721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:33.468776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:33.468815Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:33.469006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:33.469052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:33.469083Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:33.469231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... tage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=2671; 2025-04-07T14:10:53.521898Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=6416; 2025-04-07T14:10:53.522441Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=505; 2025-04-07T14:10:53.523399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=422; 2025-04-07T14:10:53.523447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=970; 2025-04-07T14:10:53.523526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=43; 2025-04-07T14:10:53.523618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T14:10:53.523650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=79; 2025-04-07T14:10:53.523856Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=179; 2025-04-07T14:10:53.524017Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=139; 2025-04-07T14:10:53.524828Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=779; 2025-04-07T14:10:53.525755Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=890; 2025-04-07T14:10:53.525808Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=20; 2025-04-07T14:10:53.525845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=13; 2025-04-07T14:10:53.525875Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=3; 2025-04-07T14:10:53.525900Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=4; 2025-04-07T14:10:53.525932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-04-07T14:10:53.525986Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=27; 2025-04-07T14:10:53.526015Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-07T14:10:53.526076Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=28; 2025-04-07T14:10:53.526121Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-07T14:10:53.526174Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=25; 2025-04-07T14:10:53.526196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=15013; 2025-04-07T14:10:53.526300Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7400888;raw_bytes=7389306; inactive portions=33;blobs=66;rows=1126788;bytes=87164812;raw_bytes=110830786; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:10:53.526379Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:10:53.526412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:10:53.526443Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:10:53.526499Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:10:53.526530Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:53.526573Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:53.526632Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T14:10:53.526681Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T14:10:53.526714Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:53.526752Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:53.526776Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:53.526801Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:53.526834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:53.526860Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:53.527579Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:53.527659Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1980:3967];tablet_id=9437184;parent=[1:1947:3941];fline=manager.h:99;event=ask_data;request=request_id=81;1={portions_count=35};; 2025-04-07T14:10:53.529042Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:10:53.529233Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T14:10:53.529263Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:10:53.529284Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T14:10:53.529326Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:10:53.529381Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:10:53.529443Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=8; 2025-04-07T14:10:53.529495Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-07T14:10:53.529535Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:10:53.529578Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:53.529614Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:10:53.529675Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:10:53.529730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:10:53.529775Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:10:53.530211Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=35;path_id=1; 2025-04-07T14:10:53.531302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=35;path_id=1; 2025-04-07T14:10:53.533820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T14:10:53.533867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1947:3941];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:53.438635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:53.438716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:53.438741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:53.438765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:53.439575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:53.439619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:53.439695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:53.440644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:53.494916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:53.494950Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:53.501820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:53.501903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:53.502041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:53.506806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:53.508185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:53.509380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:53.510115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:53.513813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:53.520063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:53.520238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:53.520329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:53.520371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:53.520405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:53.520501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.525547Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:53.613777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:53.614114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.614812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:53.616315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:53.616400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.618979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:53.620393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:53.620567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.620617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:53.620659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:53.620722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:53.622549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.622620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:53.622651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:53.624243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.624283Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.624316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:53.624388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:53.628356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:53.629972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:53.630480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:53.632062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:53.632180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:53.632246Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:53.632486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:53.632532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:53.632698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:53.632768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:53.634466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:53.634505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:53.634648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:53.634686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:53.634965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.635006Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:53.635083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:53.635124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:53.635186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:53.635225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:53.635255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:53.635280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:53.635354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:53.635391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:53.635417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:53.637054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:53.637149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:53.637179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:53.637212Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:53.637258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:53.637341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 551615 PathOwnerId: 72057594046678944, cookie: 106 2025-04-07T14:10:54.032945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 106 2025-04-07T14:10:54.032968Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-07T14:10:54.032997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:10:54.034058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2025-04-07T14:10:54.034130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2025-04-07T14:10:54.034164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-07T14:10:54.034196Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:10:54.034227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:10:54.034282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-07T14:10:54.037181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:54.037235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:54.037269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:54.037293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:54.037327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:54.038093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-07T14:10:54.039060Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2025-04-07T14:10:54.039354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-07T14:10:54.039636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409550 2025-04-07T14:10:54.043863Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-07T14:10:54.045067Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-07T14:10:54.045803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:54.045996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:10:54.046306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 Forgetting tablet 72075186233409546 2025-04-07T14:10:54.047182Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-07T14:10:54.047307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:10:54.047481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:10:54.048285Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2025-04-07T14:10:54.049797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:10:54.049910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2025-04-07T14:10:54.050441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-07T14:10:54.050548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:10:54.050944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:10:54.050997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:10:54.051042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:10:54.051232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-07T14:10:54.051727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:10:54.051759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:10:54.051890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:10:54.053375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-07T14:10:54.053422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-07T14:10:54.053500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:10:54.053519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:10:54.053563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:10:54.053579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T14:10:54.055370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:10:54.055403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:10:54.055493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T14:10:54.055533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-07T14:10:54.055624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-07T14:10:54.055709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:10:54.055779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:10:54.055812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:10:54.055872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:54.057047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-07T14:10:54.057270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-07T14:10:54.057303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-07T14:10:54.057682Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-07T14:10:54.057753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-07T14:10:54.057776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:764:2682] TestWaitNotification: OK eventTxId 106 2025-04-07T14:10:54.058242Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:54.058408Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 168us result status StatusSuccess 2025-04-07T14:10:54.058690Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousDeclare |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestSingleFailureMirror [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:53.438655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:53.438788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:53.438833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:53.438866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:53.439587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:53.439626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:53.439702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:53.440694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:53.492805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:53.492849Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:53.501153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:53.501253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:53.501365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:53.507254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:53.508192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:53.509410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:53.510109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:53.513773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:53.520079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:53.520147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:53.520278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:53.520326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:53.520373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:53.520530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.526003Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:53.641500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:53.641677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.641858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:53.642025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:53.642076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.643814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:53.643978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:53.644133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.644185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:53.644221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:53.644281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:53.646062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.646117Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:53.646150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:53.647773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.647817Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.647866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:53.647936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:53.651665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:53.653597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:53.653765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:53.654730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:53.654877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:53.654936Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:53.655225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:53.655276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:53.655438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:53.655520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:53.657506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:53.657549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:53.657723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:53.657769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:53.658025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.658069Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:53.658180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:53.658213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:53.658265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:53.658315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:53.658349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:53.658394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:53.658475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:53.658510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:53.658540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:53.660470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:53.660568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:53.660625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:53.660700Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:53.660744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:53.660838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... thOwnerId: 72057594046678944, cookie: 105 2025-04-07T14:10:54.637158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T14:10:54.637191Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-07T14:10:54.637218Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-07T14:10:54.637245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:10:54.637901Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T14:10:54.637966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T14:10:54.638000Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-07T14:10:54.638034Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:10:54.638061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:10:54.638119Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-07T14:10:54.638503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:54.638545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:54.638576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:54.638606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:54.640042Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-07T14:10:54.640340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:54.640578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:10:54.641652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T14:10:54.642444Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-07T14:10:54.645827Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-07T14:10:54.646004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:10:54.647932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:10:54.648557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:10:54.648750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:10:54.649387Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-07T14:10:54.649581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-07T14:10:54.649730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-04-07T14:10:54.650759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:10:54.650809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:10:54.650877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:10:54.651431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:10:54.651479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:10:54.651607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:10:54.651805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T14:10:54.651911Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T14:10:54.654047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:10:54.654096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:10:54.654177Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:10:54.654201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T14:10:54.654258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:10:54.654280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:10:54.655874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T14:10:54.655924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-07T14:10:54.656100Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-07T14:10:54.656166Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:10:54.656233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:10:54.656279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:10:54.656348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:54.657656Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-07T14:10:54.657920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-07T14:10:54.657964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-07T14:10:54.658378Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-07T14:10:54.658460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T14:10:54.658498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:650:2603] TestWaitNotification: OK eventTxId 105 2025-04-07T14:10:54.659004Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:54.659193Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 217us result status StatusPathDoesNotExist 2025-04-07T14:10:54.659353Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:10:54.659909Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:54.660050Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 156us result status StatusPathDoesNotExist 2025-04-07T14:10:54.660163Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> TSchemeShardSubDomainTest::ForceDropTwice >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:55.355039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:55.355174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:55.355222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:55.355267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:55.355318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:55.355345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:55.355423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:55.355756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:55.425281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:55.425344Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:55.435574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:55.435743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:55.435925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:55.440909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:55.441098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:55.441674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:55.441854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:55.443377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:55.444493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:55.444557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:55.444713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:55.444778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:55.444821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:55.444973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.451006Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:55.557571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:55.557738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.557885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:55.558053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:55.558101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.559702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:55.559814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:55.559928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.559971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:55.560035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:55.560070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:55.561283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.561329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:55.561356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:55.562401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.562429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.562458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:55.562521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:55.564957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:55.566323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:55.566441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:55.567203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:55.567303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:55.567340Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:55.567552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:55.567588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:55.567724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:55.567780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:55.569258Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:55.569293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:55.569443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:55.569489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:55.569687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.569726Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:55.569795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:55.569819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:55.569853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:55.569884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:55.569921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:55.569956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:55.570008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:55.570036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:55.570058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:55.571357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:55.571449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:55.571476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:55.571541Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:55.571588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:55.571651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 57594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:55.596333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:55.596349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2025-04-07T14:10:55.596376Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T14:10:55.596398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:10:55.596438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2025-04-07T14:10:55.599500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.599589Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#100:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:55.599639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-04-07T14:10:55.599951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T14:10:55.600136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T14:10:55.601275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.601319Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.601370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:10:55.601409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-07T14:10:55.601519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:55.602869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-07T14:10:55.603023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-07T14:10:55.603464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:55.603578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:55.603625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:10:55.603845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-07T14:10:55.603894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:10:55.604021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:55.604059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:10:55.604100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-07T14:10:55.605902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:55.605936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:55.606049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:10:55.606107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:55.606147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-07T14:10:55.606194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-07T14:10:55.606390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.606430Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-07T14:10:55.606511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-07T14:10:55.606534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T14:10:55.606565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-07T14:10:55.606592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T14:10:55.606618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-07T14:10:55.606660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-07T14:10:55.606701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:10:55.606760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-04-07T14:10:55.606792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T14:10:55.606834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-07T14:10:55.607346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:55.607404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:55.607425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-07T14:10:55.607452Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T14:10:55.607480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:55.608491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:55.608543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:55.608561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-07T14:10:55.608587Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:10:55.608617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:10:55.608694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-04-07T14:10:55.608742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:270:2262] 2025-04-07T14:10:55.611118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T14:10:55.611446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T14:10:55.611563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T14:10:55.611585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:271:2263] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2025-04-07T14:10:55.611920Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:55.612070Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 150us result status StatusSuccess 2025-04-07T14:10:55.612341Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:54.945202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:54.945295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:54.945328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:54.945363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:54.945407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:54.945435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:54.945479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:54.945802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:54.991469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:54.991519Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:54.998953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:54.999106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:54.999314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:55.002995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:55.003157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:55.003568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:55.003717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:55.004937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:55.005855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:55.005898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:55.005980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:55.006032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:55.006078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:55.006241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.010705Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:55.087362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:55.087548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.087713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:55.087853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:55.087902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.089664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:55.089785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:55.089909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.089952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:55.089978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:55.090019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:55.091217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.091256Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:55.091291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:55.092265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.092294Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.092322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:55.092381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:55.094646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:55.095775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:55.095914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:55.096601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:55.096726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:55.096775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:55.096963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:55.096999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:55.097145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:55.097209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:55.098379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:55.098412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:55.098552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:55.098588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:55.098786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.098817Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:55.098883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:55.098904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:55.098936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:55.098962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:55.098987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:55.099006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:55.099051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:55.099081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:55.099104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:55.100287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:55.100366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:55.100429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:55.100456Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:55.100501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:55.100580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-07T14:10:55.446937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-07T14:10:55.449918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:10:55.450205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.450393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:10:55.450517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.450730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.450767Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-07T14:10:55.450834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T14:10:55.450861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T14:10:55.450899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-07T14:10:55.450949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:487:2440] message: TxId: 103 2025-04-07T14:10:55.450990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T14:10:55.451017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T14:10:55.451040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T14:10:55.451138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-07T14:10:55.452540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:10:55.452578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:488:2441] TestWaitNotification: OK eventTxId 103 2025-04-07T14:10:55.453076Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:55.453272Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 210us result status StatusSuccess 2025-04-07T14:10:55.453725Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:55.454276Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:55.454522Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 223us result status StatusSuccess 2025-04-07T14:10:55.454879Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:55.455416Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:55.455560Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 138us result status StatusSuccess 2025-04-07T14:10:55.455820Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:55.456276Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:55.456429Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 168us result status StatusSuccess 2025-04-07T14:10:55.456748Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::Redefine >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:55.828789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:55.828877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:55.828915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:55.828946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:55.828986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:55.829005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:55.829049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:55.829275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:55.873799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:55.873854Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:55.881382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:55.881503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:55.881674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:55.886296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:55.886500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:55.886913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:55.887056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:55.888503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:55.889406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:55.889459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:55.889538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:55.889602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:55.889637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:55.889765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.894610Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:56.003495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:56.003698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.003896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:56.004069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:56.004109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.006260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.006384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:56.006536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.006588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:56.006620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:56.006673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:56.008270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.008319Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:56.008351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:56.009675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.009711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.009749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:56.009810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:56.013413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:56.014917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:56.015090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:56.015945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.016092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:56.016138Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:56.016365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:56.016404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:56.016551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:56.016618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:56.018239Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:56.018279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:56.018427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.018467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:56.018680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.018717Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:56.018793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:56.018820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:56.018856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:56.018894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:56.018925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:56.018951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:56.019012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:56.019079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:56.019108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:56.020868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:56.020959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:56.020993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:56.021025Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:56.021079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:56.021180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:56.059138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-07T14:10:56.059284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-07T14:10:56.059548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.059632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:56.059671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:10:56.059852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-07T14:10:56.059918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:10:56.060064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:56.060116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:10:56.060155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-07T14:10:56.061702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:56.061748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:56.061864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:10:56.061952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.061991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-07T14:10:56.062038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-07T14:10:56.062209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.062254Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-07T14:10:56.062351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-07T14:10:56.062379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T14:10:56.062415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-07T14:10:56.062447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T14:10:56.062475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-07T14:10:56.062500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-07T14:10:56.062549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:10:56.062591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-07T14:10:56.062625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T14:10:56.062664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-07T14:10:56.063294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:56.063356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:56.063381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-07T14:10:56.063412Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T14:10:56.063447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:56.063879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:56.063961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:56.063985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-07T14:10:56.064008Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:10:56.064029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:10:56.064090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-07T14:10:56.066667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T14:10:56.066777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-07T14:10:56.067002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-07T14:10:56.067042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-07T14:10:56.067149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:10:56.067171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T14:10:56.067531Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-07T14:10:56.067664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T14:10:56.067727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:306:2298] 2025-04-07T14:10:56.067847Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:10:56.067933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:10:56.067953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:306:2298] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-07T14:10:56.068299Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:56.068447Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 158us result status StatusSuccess 2025-04-07T14:10:56.068814Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:56.069269Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:56.069424Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 162us result status StatusPathDoesNotExist 2025-04-07T14:10:56.069556Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/USER_0\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/USER_0" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] Test command err: 2025-04-07T14:10:52.906244Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001df9/r3tmp/tmpcavKnP//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2025-04-07T14:10:52.908275Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TSchemeShardSubDomainTest::Redefine [GOOD] >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:56.152117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:56.152187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:56.152212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:56.152236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:56.152265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:56.152283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:56.152318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:56.152523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:56.200102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:56.200146Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:56.207175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:56.207285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:56.207426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:56.211354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:56.211536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:56.212030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.212197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:56.213679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.214710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:56.214757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.214905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:56.214947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:56.214981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:56.215136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.220262Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:56.286358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:56.286508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.286644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:56.286814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:56.286854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.288436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.288530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:56.288676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.288719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:56.288745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:56.288774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:56.289836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.289870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:56.289892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:56.290824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.290849Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.290872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:56.290927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:56.293083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:56.294083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:56.294191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:56.294822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.294900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:56.294932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:56.295099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:56.295143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:56.295244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:56.295309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:56.296719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:56.296771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:56.296918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.296951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:56.297179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.297213Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:56.297292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:56.297330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:56.297369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:56.297445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:56.297479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:56.297502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:56.297567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:56.297593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:56.297612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:56.298978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:56.299053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:56.299074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:56.299098Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:56.299142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:56.299205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 4, tablet: 72075186233409552, partId: 0 2025-04-07T14:10:56.634242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxId: 104 Origin: 72075186233409552 Status: OK 2025-04-07T14:10:56.634285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse at tablet72057594046678944 2025-04-07T14:10:56.634328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse message: TxId: 104 Origin: 72075186233409552 Status: OK at tablet72057594046678944 2025-04-07T14:10:56.635108Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:56.642229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-04-07T14:10:56.642322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxId: 104 Origin: 72075186233409550 Status: OK 2025-04-07T14:10:56.642402Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse at tablet72057594046678944 2025-04-07T14:10:56.642639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse message: TxId: 104 Origin: 72075186233409550 Status: OK at tablet72057594046678944 2025-04-07T14:10:56.643484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.643546Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 126us result status StatusSuccess 2025-04-07T14:10:56.643759Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/PQGroup_2" PathDescription { Self { Name: "PQGroup_2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: false CreateTxId: 104 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 1 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 1 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 } BalancerTabletID: 72075186233409552 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 5 ShardsInside: 7 ShardsLimit: 7 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 50 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:56.645848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.650029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-04-07T14:10:56.650117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxId: 104 Origin: 72075186233409551 Status: OK 2025-04-07T14:10:56.650152Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse at tablet72057594046678944 2025-04-07T14:10:56.650182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse message: TxId: 104 Origin: 72075186233409551 Status: OK at tablet72057594046678944 2025-04-07T14:10:56.650209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 3 -> 128 2025-04-07T14:10:56.658910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.659040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.659071Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.659107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-04-07T14:10:56.659220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:56.664381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-07T14:10:56.664493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000003 2025-04-07T14:10:56.665920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.666014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:56.666063Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvOperationPlan, step: 5000003, at tablet: 72057594046678944 2025-04-07T14:10:56.666232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-07T14:10:56.666353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:56.666391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 FAKE_COORDINATOR: Erasing txId 104 2025-04-07T14:10:56.669681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:56.669713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:56.669804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:10:56.669930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.669952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:330:2308], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-07T14:10:56.669971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:330:2308], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-07T14:10:56.670385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.670417Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-07T14:10:56.670499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-07T14:10:56.670524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:10:56.670556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-07T14:10:56.670577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:10:56.670630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T14:10:56.670651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T14:10:56.670748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-04-07T14:10:56.670776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-07T14:10:56.670796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-07T14:10:56.670810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-07T14:10:56.671425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:10:56.671488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:10:56.671508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:10:56.671536Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:10:56.671569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:10:56.672788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:10:56.672835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:10:56.672855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:10:56.672911Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-07T14:10:56.672938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-07T14:10:56.672983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-07T14:10:56.675305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T14:10:56.677302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:56.508938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:56.509042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:56.509079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:56.509117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:56.509185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:56.509206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:56.509263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:56.509560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:56.574324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:56.574378Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:56.584022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:56.584172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:56.584344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:56.588289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:56.588438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:56.588852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.588996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:56.590194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.591048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:56.591092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.591189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:56.591225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:56.591255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:56.591363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.595587Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:56.694449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:56.694619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.694762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:56.694920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:56.694964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.696616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.696740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:56.696856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.696893Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:56.696926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:56.696970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:56.698242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.698287Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:56.698312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:56.699401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.699427Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.699454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:56.699510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:56.701880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:56.703127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:56.703260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:56.703910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.703997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:56.704035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:56.704233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:56.704266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:56.704392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:56.704464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:56.705714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:56.705750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:56.705868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.705897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:56.706073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.706103Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:56.706163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:56.706191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:56.706226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:56.706255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:56.706286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:56.706307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:56.706363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:56.706391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:56.706410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:56.707624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:56.707697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:56.707720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:56.707764Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:56.707805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:56.707869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:56.901987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:56.902009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:56.902030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:56.903247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:10:56.904147Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2025-04-07T14:10:56.904509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-07T14:10:56.904798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 Forgetting tablet 72075186233409550 2025-04-07T14:10:56.905993Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-07T14:10:56.906150Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-07T14:10:56.906567Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-04-07T14:10:56.907195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.907385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T14:10:56.908362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:10:56.908498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409548 2025-04-07T14:10:56.909383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:10:56.909861Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-07T14:10:56.910228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-07T14:10:56.910375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 2025-04-07T14:10:56.911809Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-04-07T14:10:56.912739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:10:56.912859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:10:56.913705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-07T14:10:56.913788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:10:56.914569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:10:56.914611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:10:56.914694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:10:56.915430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:10:56.915480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:10:56.915522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:56.915684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-07T14:10:56.915711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-07T14:10:56.916325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:10:56.916350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:10:56.916401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:10:56.916411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T14:10:56.917835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-07T14:10:56.917866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-07T14:10:56.917915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:10:56.917927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:10:56.917948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T14:10:56.917967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-07T14:10:56.919097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:10:56.919182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-04-07T14:10:56.919380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:10:56.919407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-04-07T14:10:56.919471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T14:10:56.919504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T14:10:56.919818Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:10:56.919878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:10:56.919898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:672:2574] 2025-04-07T14:10:56.919954Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T14:10:56.920022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:10:56.920037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:672:2574] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-07T14:10:56.920334Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:56.920486Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 144us result status StatusPathDoesNotExist 2025-04-07T14:10:56.920624Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:10:56.920948Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:56.921072Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 109us result status StatusSuccess 2025-04-07T14:10:56.921258Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:56.798301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:56.798403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:56.798459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:56.798505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:56.798552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:56.798580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:56.798637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:56.798958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:56.868449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:56.868511Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:56.878400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:56.878544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:56.878730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:56.883585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:56.883791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:56.884384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.884545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:56.886187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.887311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:56.887369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.887514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:56.887572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:56.887615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:56.887753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.893784Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:56.993467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:56.993645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.993806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:56.993986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:56.994034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.995968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.996080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:56.996202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.996255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:56.996299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:56.996337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:56.997820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.997869Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:56.997907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:56.999269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.999306Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.999346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:56.999413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:57.002609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:57.003934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:57.004083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:57.004970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:57.005073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:57.005115Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:57.005341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:57.005386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:57.005527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:57.005590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:57.007240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:57.007277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:57.007392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:57.007428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:57.007653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.007692Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:57.007793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:57.007826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:57.007865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:57.007896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:57.007941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:57.007969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:57.008018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:57.008049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:57.008075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:57.009475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:57.009547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:57.009573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:57.009605Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:57.009659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:57.009743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 1/1 2025-04-07T14:10:57.157362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:10:57.157402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-07T14:10:57.157441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:10:57.157482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T14:10:57.157505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T14:10:57.157630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T14:10:57.157659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-07T14:10:57.157686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-07T14:10:57.157714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-07T14:10:57.158366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:10:57.158437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:10:57.158473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:10:57.158501Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:10:57.158527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:57.158891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:10:57.158951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:10:57.158970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:10:57.158997Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:10:57.159027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:10:57.159070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-07T14:10:57.160211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:57.160258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:57.160277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:10:57.161478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T14:10:57.161720Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-07T14:10:57.161980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:57.162186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-04-07T14:10:57.163464Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-07T14:10:57.163884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:10:57.164042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:10:57.164316Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-07T14:10:57.164553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:10:57.164698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2025-04-07T14:10:57.165666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:10:57.165740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:10:57.165874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:10:57.166771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:10:57.166809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:10:57.166857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:57.167032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T14:10:57.167803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:10:57.167840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:10:57.169667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:10:57.169700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T14:10:57.169751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:10:57.169775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:10:57.169875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:10:57.169927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-07T14:10:57.170164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-07T14:10:57.170206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-07T14:10:57.170516Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-07T14:10:57.170593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:10:57.170628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:567:2522] TestWaitNotification: OK eventTxId 104 2025-04-07T14:10:57.171036Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:57.171192Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 171us result status StatusPathDoesNotExist 2025-04-07T14:10:57.171315Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:10:57.171887Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:57.172054Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 167us result status StatusSuccess 2025-04-07T14:10:57.172407Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] >> TSchemeShardSubDomainTest::CopyRejects >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:57.857060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:57.857138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:57.857169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:57.857193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:57.857228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:57.857247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:57.857297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:57.857554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:57.904053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:57.904095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:57.911180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:57.911284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:57.911419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:57.915071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:57.915227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:57.915626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:57.915764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:57.917016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:57.918061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:57.918118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:57.918226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:57.918264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:57.918296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:57.918416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.922724Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:58.000955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:58.001142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.001282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:58.001437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:58.001479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.002963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:58.003095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:58.003260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.003342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:58.003390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:58.003456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:58.004948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.004986Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:58.005024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:58.006032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.006073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.006114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:58.006162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:58.008530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:58.009525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:58.009648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:58.010307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:58.010394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:58.010441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:58.010614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:58.010648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:58.010766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:58.010821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:58.012009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:58.012040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:58.012143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:58.012168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:58.012367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.012399Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:58.012471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:58.012497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:58.012532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:58.012566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:58.012591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:58.012611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:58.012676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:58.012734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:58.012765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:58.013982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:58.014049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:58.014073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:58.014098Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:58.014139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:58.014224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T14:10:58.016016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T14:10:58.016353Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-04-07T14:10:58.016761Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T14:10:58.027775Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T14:10:58.029356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:58.029553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.029620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.030556Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:10:58.031796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: TimeCastBucketsPerMediator is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:58.031940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-04-07T14:10:58.032604Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-07T14:10:58.032767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-07T14:10:58.032797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-07T14:10:58.033072Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-07T14:10:58.033140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T14:10:58.033174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:279:2271] TestWaitNotification: OK eventTxId 100 2025-04-07T14:10:58.033522Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:58.033633Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 114us result status StatusPathDoesNotExist 2025-04-07T14:10:58.033746Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable >> TBlobStorageProxyTest::TestCollectGarbagePersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] >> TSchemeShardSubDomainTest::CopyRejects [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:58.709528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:58.709601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:58.709629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:58.709654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:58.709693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:58.709717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:58.709758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:58.710012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:58.754382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:58.754428Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:58.761247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:58.761346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:58.761481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:58.765016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:58.765138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:58.765537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:58.765657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:58.766709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:58.767505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:58.767552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:58.767632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:58.767671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:58.767700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:58.767798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.771963Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:58.844039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:58.844207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.844353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:58.844486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:58.844518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.845933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:58.846029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:58.846124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.846177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:58.846205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:58.846242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:58.847321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.847361Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:58.847389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:58.848327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.848354Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.848382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:58.848431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:58.850715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:58.851806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:58.851967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:58.852626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:58.852726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:58.852763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:58.852934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:58.852964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:58.853097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:58.853155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:58.854362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:58.854391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:58.854518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:58.854548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:58.854763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.854807Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:58.854883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:58.854905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:58.854940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:58.854970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:58.854994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:58.855018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:58.855067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:58.855097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:58.855136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:58.856492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:58.856557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:58.856590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:58.856619Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:58.856654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:58.856761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T14:10:58.858627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T14:10:58.858906Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-04-07T14:10:58.859329Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T14:10:58.869981Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T14:10:58.871499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:58.871663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.871724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2025-04-07T14:10:58.872636Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:10:58.874110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:58.874244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 2025-04-07T14:10:58.875076Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2025-04-07T14:10:58.876806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:58.876977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.877038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2025-04-07T14:10:58.878413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:58.878538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-07T14:10:58.878707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-07T14:10:58.878736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-07T14:10:58.878879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:10:58.878911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T14:10:58.879243Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-07T14:10:58.879327Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:10:58.879351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T14:10:58.879374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:283:2275] 2025-04-07T14:10:58.879483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:10:58.879509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:283:2275] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-07T14:10:58.879812Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:58.879941Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 118us result status StatusPathDoesNotExist 2025-04-07T14:10:58.880084Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:10:58.880473Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:58.880644Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 154us result status StatusPathDoesNotExist 2025-04-07T14:10:58.880754Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:10:58.881053Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:58.881143Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 89us result status StatusSuccess 2025-04-07T14:10:58.881437Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:59.052975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:59.053040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:59.053076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:59.053100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:59.053137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:59.053158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:59.053191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:59.053404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:59.095387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:59.095422Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:59.103593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:59.103756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:59.103918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:59.108231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:59.108345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:59.108710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:59.108844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:59.110042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:59.110813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:59.110852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:59.110942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:59.110978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:59.111011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:59.111142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.115199Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:59.191856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:59.191992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.192127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:59.192280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:59.192321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.193705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:59.193792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:59.193899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.193935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:59.193957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:59.193987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:59.195212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.195260Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:59.195290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:59.196437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.196463Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.196487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:59.196535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:59.198836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:59.199946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:59.200055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:59.200714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:59.200796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:59.200831Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:59.201011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:59.201041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:59.201151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:59.201223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:59.202291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:59.202320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:59.202429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:59.202457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:59.202658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.202694Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:59.202751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:59.202772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:59.202797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:59.202825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:59.202847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:59.202865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:59.202907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:59.202933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:59.202954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:59.208207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:59.208280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:59.208300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:59.208335Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:59.208391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:59.208453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... CHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:59.264317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-07T14:10:59.264358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:59.264384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:10:59.264461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-04-07T14:10:59.264649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:59.264745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:10:59.264989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:10:59.265322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:10:59.266518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:59.266556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:59.266658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:10:59.266751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:59.266794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T14:10:59.266826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:10:59.267033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.267066Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-04-07T14:10:59.267160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:10:59.267189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:10:59.267227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:10:59.267261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:10:59.267290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:10:59.267333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:10:59.267378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:10:59.267408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-07T14:10:59.267435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-07T14:10:59.267477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-07T14:10:59.268064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:10:59.268124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:10:59.268151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:10:59.268181Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:10:59.268214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:59.269321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:10:59.269387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:10:59.269412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:10:59.269429Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:10:59.269450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:10:59.269509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T14:10:59.269628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:10:59.269654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:10:59.269702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:10:59.269999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:10:59.270026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:10:59.270060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:59.271083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:10:59.272200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:10:59.272271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:10:59.272328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T14:10:59.272464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:10:59.272487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T14:10:59.272744Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:10:59.272792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:10:59.272818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:334:2326] TestWaitNotification: OK eventTxId 101 2025-04-07T14:10:59.273112Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:59.273249Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 133us result status StatusPathDoesNotExist 2025-04-07T14:10:59.273358Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:10:59.273661Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:59.273753Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 96us result status StatusSuccess 2025-04-07T14:10:59.273957Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:59.069387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:59.069455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:59.069481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:59.069503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:59.069532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:59.069566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:59.069606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:59.069808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:59.111493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:59.111534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:59.118260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:59.118350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:59.118499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:59.122425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:59.122563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:59.122927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:59.123058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:59.124198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:59.125054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:59.125091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:59.125168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:59.125202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:59.125228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:59.125336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.129804Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:59.238714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:59.238919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.239089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:59.239279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:59.239325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.241142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:59.241259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:59.241398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.241449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:59.241496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:59.241540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:59.242968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.243014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:59.243048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:59.244313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.244347Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.244381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:59.244443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:59.252258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:59.254011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:59.254161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:59.255131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:59.255251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:59.255296Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:59.255533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:59.255576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:59.255737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:59.255824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:59.257500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:59.257538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:59.257667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:59.257718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:59.257958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.258006Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:59.258085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:59.258112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:59.258143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:59.258176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:59.258206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:59.258231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:59.258289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:59.258322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:59.258347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:59.259921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:59.260004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:59.260036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:59.260066Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:59.260118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:59.260191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... :10:59.283185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:59.284013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-07T14:10:59.284084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-07T14:10:59.284272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:59.284331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:59.284356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:10:59.284510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-07T14:10:59.284538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:10:59.284626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:59.284653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:10:59.284699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:10:59.285766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:59.285797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:59.285885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:10:59.285943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:59.285963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-07T14:10:59.285990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-04-07T14:10:59.286113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.286135Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-07T14:10:59.286188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-07T14:10:59.286207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T14:10:59.286232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-07T14:10:59.286253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T14:10:59.286272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-07T14:10:59.286300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-07T14:10:59.286340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:10:59.286484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-07T14:10:59.286508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T14:10:59.286524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-07T14:10:59.286867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:59.286904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:59.286924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-07T14:10:59.286967Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T14:10:59.286989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:59.287465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:59.287510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:10:59.287524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-07T14:10:59.287546Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:10:59.287564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:10:59.287596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-07T14:10:59.289202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T14:10:59.289246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-07T14:10:59.289374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-07T14:10:59.289397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-07T14:10:59.289659Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-07T14:10:59.289720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T14:10:59.289741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:302:2294] TestWaitNotification: OK eventTxId 100 2025-04-07T14:10:59.290003Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:59.290165Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 148us result status StatusSuccess 2025-04-07T14:10:59.290415Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:59.290682Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:59.290769Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 85us result status StatusSuccess 2025-04-07T14:10:59.290954Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:57.849993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:57.850059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:57.850091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:57.850114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:57.850141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:57.850158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:57.850213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:57.850461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:57.893656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:57.893694Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:57.900612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:57.900728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:57.900844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:57.904502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:57.904621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:57.905010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:57.905147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:57.906248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:57.907071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:57.907119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:57.907218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:57.907252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:57.907278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:57.907384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.911551Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:57.986196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:57.986365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.986509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:57.986638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:57.986668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.988279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:57.988370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:57.988497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.988544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:57.988579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:57.988609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:57.989887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.989922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:57.989953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:57.991011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.991039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.991063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:57.991128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:57.997173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:57.998479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:57.998629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:57.999381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:57.999497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:57.999535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:57.999691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:57.999720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:57.999832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:57.999900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:58.001163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:58.001193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:58.001309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:58.001344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:58.001516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.001541Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:58.001603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:58.001627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:58.001651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:58.001678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:58.001700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:58.001728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:58.001761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:58.001785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:58.001804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:58.006742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:58.006817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:58.006865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:58.006904Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:58.006932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:58.007002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:59.388123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2025-04-07T14:10:59.388232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 108 at step: 5000004 2025-04-07T14:10:59.388689Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:59.388782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:59.388821Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TPropose operationId#108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000004 2025-04-07T14:10:59.388952Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 129 2025-04-07T14:10:59.389097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:59.389143Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-07T14:10:59.390745Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:59.390808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:59.390923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-07T14:10:59.391007Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:59.391032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:334:2312], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-04-07T14:10:59.391063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:334:2312], at schemeshard: 72057594046678944, txId: 108, path id: 5 2025-04-07T14:10:59.391132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.391183Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TProposedWaitParts operationId#108:0 ProgressState at tablet: 72057594046678944 2025-04-07T14:10:59.391226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TAlterOlapStore TProposedWaitParts operationId#108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2025-04-07T14:10:59.391978Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-07T14:10:59.392072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-07T14:10:59.392108Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-07T14:10:59.392152Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-07T14:10:59.392194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:10:59.393072Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-04-07T14:10:59.393133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-04-07T14:10:59.393149Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-07T14:10:59.393179Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-04-07T14:10:59.393196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-07T14:10:59.393233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-04-07T14:10:59.393884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2025-04-07T14:10:59.394616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-04-07T14:10:59.395589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-04-07T14:10:59.407059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409549 TxId: 108 MinStep: 0 Step: 5000004 2025-04-07T14:10:59.407095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2025-04-07T14:10:59.407172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409549 TxId: 108 MinStep: 0 Step: 5000004 2025-04-07T14:10:59.407213Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409549 TxId: 108 MinStep: 0 Step: 5000004 FAKE_COORDINATOR: Erasing txId 108 2025-04-07T14:10:59.407444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-04-07T14:10:59.407473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2025-04-07T14:10:59.407547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-04-07T14:10:59.407582Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2025-04-07T14:10:59.409138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.409727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.409805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.409827Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2025-04-07T14:10:59.409910Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-07T14:10:59.409946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-07T14:10:59.409977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-04-07T14:10:59.410015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:487:2439] message: TxId: 108 2025-04-07T14:10:59.410054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-07T14:10:59.410082Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-04-07T14:10:59.410103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-04-07T14:10:59.410172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-07T14:10:59.411550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-07T14:10:59.411599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:855:2783] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2025-04-07T14:10:59.413613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:59.413820Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.414144Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2025-04-07T14:10:59.415544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:59.415657Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-04-07T14:10:59.415976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-04-07T14:10:59.416013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-04-07T14:10:59.416412Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-04-07T14:10:59.416465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-04-07T14:10:59.416487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:893:2821] TestWaitNotification: OK eventTxId 109 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:59.293146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:59.293216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:59.293246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:59.293271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:59.293303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:59.293323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:59.293363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:59.293611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:59.336091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:59.336141Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:59.343020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:59.343144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:59.343311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:59.347078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:59.347236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:59.347626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:59.347774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:59.349026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:59.349861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:59.349904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:59.350005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:59.350058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:59.350088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:59.350218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.354999Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:59.435559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:59.435803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.436017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:59.436194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:59.436249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.438336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:59.438455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:59.438622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.438678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:59.438721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:59.438778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:59.440498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.440545Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:59.440579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:59.442013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.442054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.442092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:59.442160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:59.444575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:59.445733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:59.445873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:59.446816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:59.446924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:59.446962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:59.447157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:59.447192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:59.447312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:59.447367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:59.448591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:59.448619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:59.448769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:59.448797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:59.448982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.449012Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:59.449079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:59.449100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:59.449131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:59.449158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:59.449184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:59.449203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:59.449261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:59.449293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:59.449314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:59.450477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:59.450543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:59.450567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:59.450592Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:59.450629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:59.450710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... cute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:10:59.646631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:10:59.646657Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-07T14:10:59.646687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T14:10:59.647566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:10:59.647610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:10:59.647626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:10:59.647657Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T14:10:59.647678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:10:59.647719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T14:10:59.648184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 880 } } 2025-04-07T14:10:59.648210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-04-07T14:10:59.648286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 880 } } 2025-04-07T14:10:59.648348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 880 } } 2025-04-07T14:10:59.649134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 493 RawX2: 4294969741 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:10:59.649170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-04-07T14:10:59.649256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 493 RawX2: 4294969741 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:10:59.649287Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:10:59.649338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 493 RawX2: 4294969741 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:10:59.649374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:59.649414Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.649441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-07T14:10:59.649466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T14:10:59.651511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:10:59.651615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:10:59.651670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.651758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.651844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:10:59.651869Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:10:59.651940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:10:59.651975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:10:59.652008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T14:10:59.652053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:301:2293] message: TxId: 102 2025-04-07T14:10:59.652088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:10:59.652115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:10:59.652140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:10:59.652225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:10:59.653378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:10:59.653407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:527:2466] TestWaitNotification: OK eventTxId 102 2025-04-07T14:10:59.653733Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:59.653861Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 135us result status StatusSuccess 2025-04-07T14:10:59.654117Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:59.654513Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:10:59.654663Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 121us result status StatusSuccess 2025-04-07T14:10:59.654930Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:58.611274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:58.611358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:58.611397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:58.611427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:58.611463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:58.611491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:58.611543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:58.611822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:58.657283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:58.657332Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:58.665571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:58.665670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:58.665794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:58.669230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:58.669349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:58.669731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:58.669867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:58.670972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:58.671739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:58.671775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:58.671858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:58.671884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:58.671906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:58.672021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.677427Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:58.754539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:58.754681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.754812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:58.754953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:58.754991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.756728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:58.756819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:58.756915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.756951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:58.756974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:58.757007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:58.758295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.758329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:58.758348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:58.759465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.759491Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.759514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:58.759565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:58.761743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:58.762941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:58.763055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:58.763638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:58.763726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:58.763756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:58.763921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:58.763949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:58.764060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:58.764113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:58.765562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:58.765601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:58.765698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:58.765720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:58.765876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.765908Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:58.765961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:58.765979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:58.766003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:58.766037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:58.766060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:58.766089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:58.766123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:58.766145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:58.766164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:58.767310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:58.767378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:58.767408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:58.767440Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:58.767467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:58.767531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 2025-04-07T14:11:00.040001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Source { RawX1: 430 RawX2: 8589936986 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-07T14:11:00.040036Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 106:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:11:00.040082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 430 RawX2: 8589936986 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-07T14:11:00.040119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:00.040156Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.040194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-04-07T14:11:00.040224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-07T14:11:00.040258Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 129 -> 240 2025-04-07T14:11:00.041588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.041809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.041843Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0ProgressState, operation type TxCopyTable 2025-04-07T14:11:00.041888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 106:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T14:11:00.041917Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 106, done: 0, blocked: 1 2025-04-07T14:11:00.041975Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 106 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-07T14:11:00.042005Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 240 -> 240 2025-04-07T14:11:00.043362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.043409Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-07T14:11:00.043512Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-07T14:11:00.043544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-07T14:11:00.043578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-07T14:11:00.043632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:661:2573] message: TxId: 106 2025-04-07T14:11:00.043671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-07T14:11:00.043702Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-07T14:11:00.043727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-07T14:11:00.043820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-07T14:11:00.043851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:11:00.045037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-07T14:11:00.045078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:814:2715] TestWaitNotification: OK eventTxId 106 2025-04-07T14:11:00.045547Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:00.045718Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table" took 196us result status StatusSuccess 2025-04-07T14:11:00.046020Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table" PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:00.046536Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:00.046689Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dst" took 171us result status StatusSuccess 2025-04-07T14:11:00.046932Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dst" PathDescription { Self { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "dst" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:00.047353Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:00.047457Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 118us result status StatusSuccess 2025-04-07T14:11:00.047717Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] >> TSchemeShardSubDomainTest::CreateWithNoEqualName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] Test command err: 2025-04-07T14:10:31.987194Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:32.052612Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:32.066181Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:32.066232Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:32.066396Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:32.072161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:32.072323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:32.072491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:32.072557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:32.072614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:32.072717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:32.072779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:32.072864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:32.072952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:32.073033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:32.073100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:32.073155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:32.088806Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:32.090784Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:32.090890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:32.090927Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:32.091052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:32.091159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:32.091215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:32.091258Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:32.091315Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:32.091370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:32.091398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:32.091420Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:32.091510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:32.091539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:32.091558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:32.091573Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:32.091641Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:32.091680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:32.091733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:32.091772Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:32.091820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:32.091841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:32.091859Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:32.091882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:32.091904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:32.091919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:32.092029Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=32; 2025-04-07T14:10:32.092088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=27; 2025-04-07T14:10:32.092157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=22; 2025-04-07T14:10:32.092207Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=22; 2025-04-07T14:10:32.092299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:32.092331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:32.092354Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:32.092493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:32.092532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:32.092549Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:32.092633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:32.092679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:32.092701Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:32.092819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:32.092844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:32.092869Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-07T14:10:32.092943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... ge_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=3074; 2025-04-07T14:11:00.006962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=7071; 2025-04-07T14:11:00.007528Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=520; 2025-04-07T14:11:00.008685Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=458; 2025-04-07T14:11:00.008741Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1170; 2025-04-07T14:11:00.008831Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=55; 2025-04-07T14:11:00.008914Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T14:11:00.008951Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=90; 2025-04-07T14:11:00.009184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=195; 2025-04-07T14:11:00.009375Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=155; 2025-04-07T14:11:00.010234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=827; 2025-04-07T14:11:00.011150Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=877; 2025-04-07T14:11:00.011210Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=21; 2025-04-07T14:11:00.011257Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=15; 2025-04-07T14:11:00.011285Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-04-07T14:11:00.011313Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2025-04-07T14:11:00.011341Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-04-07T14:11:00.011392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=27; 2025-04-07T14:11:00.011423Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-04-07T14:11:00.011478Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=32; 2025-04-07T14:11:00.011504Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=3; 2025-04-07T14:11:00.011547Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=20; 2025-04-07T14:11:00.011585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=15955; 2025-04-07T14:11:00.011694Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7400888;raw_bytes=7389306; inactive portions=35;blobs=70;rows=1239297;bytes=98153860;raw_bytes=121802583; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:11:00.011771Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:11:00.011810Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:11:00.011839Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:11:00.011888Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:11:00.011915Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:11:00.011959Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:11:00.012015Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T14:11:00.012071Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T14:11:00.012112Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:11:00.012149Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:11:00.012191Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:11:00.012223Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:11:00.012274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:11:00.012313Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:11:00.012892Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4059:5686];tablet_id=9437184;parent=[1:4004:5638];fline=manager.h:99;event=ask_data;request=request_id=177;1={portions_count=37};; 2025-04-07T14:11:00.013153Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:11:00.013599Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:11:00.013858Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T14:11:00.013880Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:11:00.013899Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T14:11:00.013955Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:11:00.013997Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:11:00.014038Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=9; 2025-04-07T14:11:00.014086Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-07T14:11:00.014126Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:11:00.014159Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:11:00.014186Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:11:00.014216Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:11:00.014286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:11:00.014329Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:11:00.014731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=37;path_id=1; 2025-04-07T14:11:00.015734Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=37;path_id=1; 2025-04-07T14:11:00.017940Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T14:11:00.017977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4004:5638];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:56.884821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:56.884924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:56.884974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:56.885012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:56.885059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:56.885089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:56.885161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:56.885493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:56.939684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:56.939747Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:56.949144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:56.949281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:56.949472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:56.953606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:56.953808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:56.954297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.954469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:56.955834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.956751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:56.956795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.956877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:56.956914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:56.956944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:56.957048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.961706Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:57.055698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:57.055918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.056132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:57.056354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:57.056419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.058514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:57.058633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:57.058787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.058844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:57.058879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:57.058931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:57.060512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.060560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:57.060596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:57.061736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.061788Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.061820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:57.061878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:57.064246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:57.065455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:57.065572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:57.066299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:57.066398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:57.066438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:57.066637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:57.066670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:57.066792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:57.066865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:57.068196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:57.068227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:57.068341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:57.068367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:57.068571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:57.068602Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:57.068722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:57.068749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:57.068789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:57.068820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:57.068846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:57.068867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:57.068924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:57.068956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:57.068977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:57.070300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:57.070371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:57.070395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:57.070420Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:57.070456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:57.070521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify ... atency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 619 } } 2025-04-07T14:11:00.534706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 434 RawX2: 4294969696 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-07T14:11:00.534755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-04-07T14:11:00.534853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 434 RawX2: 4294969696 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-07T14:11:00.534892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-07T14:11:00.535577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.535631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-07T14:11:00.535686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T14:11:00.535723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-04-07T14:11:00.535813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-07T14:11:00.535913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-04-07T14:11:00.536018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:00.536077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:11:00.537201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.538360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.538517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:00.538553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:00.538708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:11:00.538869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:00.538904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-07T14:11:00.538942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-07T14:11:00.539158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.539210Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-07T14:11:00.539280Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.539312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-07T14:11:00.539365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-07T14:11:00.540184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:11:00.540276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:11:00.540320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T14:11:00.540353Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-07T14:11:00.540393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:00.541199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:11:00.541267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:11:00.541289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T14:11:00.541314Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-07T14:11:00.541339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:11:00.541388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-07T14:11:00.543228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.543271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:00.543560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:11:00.543711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T14:11:00.543748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T14:11:00.543788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-07T14:11:00.543841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:403:2372] message: TxId: 103 2025-04-07T14:11:00.543897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T14:11:00.543940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T14:11:00.543969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T14:11:00.544038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:11:00.544334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:00.544371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:00.545571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:11:00.545783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:11:00.546423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:00.546464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-07T14:11:00.546537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:11:00.546567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:661:2598] 2025-04-07T14:11:00.547172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-04-07T14:11:00.547959Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:00.548116Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 174us result status StatusSuccess 2025-04-07T14:11:00.548572Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:00.258498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:00.258582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:00.258610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:00.258635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:00.258665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:00.258685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:00.258744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:00.258983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:00.301915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:00.301961Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:00.308554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:00.308653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:00.308828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:00.312567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:00.312740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:00.313125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:00.313282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:00.314459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:00.315315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:00.315353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:00.315427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:00.315466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:00.315492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:00.315604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.320185Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:00.401333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:00.401499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.401634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:00.401789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:00.401831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.403733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:00.403846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:00.403996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.404045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:00.404079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:00.404117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:00.405355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.405388Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:00.405409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:00.406407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.406432Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.406455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:00.406497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:00.408501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:00.409473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:00.409615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:00.410179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:00.410263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:00.410295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:00.410457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:00.410485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:00.410590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:00.410655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:00.411661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:00.411685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:00.411799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:00.411824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:00.412014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.412042Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:00.412122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:00.412144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:00.412168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:00.412200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:00.412229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:00.412252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:00.412293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:00.412322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:00.412346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:00.417189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:00.417275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:00.417309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:00.417336Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:00.417371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:00.417443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... d schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.731249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-07T14:11:00.731290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-07T14:11:00.733942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:11:00.734109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.734217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:11:00.734299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.734455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:00.734479Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-07T14:11:00.734527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T14:11:00.734548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T14:11:00.734577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-07T14:11:00.734615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:487:2440] message: TxId: 103 2025-04-07T14:11:00.734649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T14:11:00.734670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T14:11:00.734684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T14:11:00.734737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-07T14:11:00.735683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:11:00.735709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:488:2441] TestWaitNotification: OK eventTxId 103 2025-04-07T14:11:00.736026Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:00.736151Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 130us result status StatusSuccess 2025-04-07T14:11:00.736419Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:00.736808Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:00.736914Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 111us result status StatusSuccess 2025-04-07T14:11:00.737134Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:00.737483Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:00.737610Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 100us result status StatusSuccess 2025-04-07T14:11:00.737776Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:00.738107Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:00.738237Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 133us result status StatusSuccess 2025-04-07T14:11:00.738426Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir >> TSchemeShardSubDomainTest::CreateAndWait ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:01.025134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:01.025231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:01.025262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:01.025290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:01.025323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:01.025349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:01.025391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:01.025627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:01.072937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:01.072990Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:01.080342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:01.080446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:01.080600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:01.084103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:01.084247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:01.084676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.084818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:01.085943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.086877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:01.086917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.087000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:01.087051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:01.087080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:01.087211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.091671Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:01.166587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:01.166834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.167039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:01.167260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:01.167349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.169098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.169194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:01.169307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.169353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:01.169379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:01.169428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:01.170564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.170604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:01.170639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:01.171627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.171657Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.171685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:01.171735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:01.174097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:01.175241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:01.175391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:01.176034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.176129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:01.176169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:01.176365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:01.176407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:01.176520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:01.176575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:01.177735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:01.177780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:01.177885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.177916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:01.178108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.178165Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:01.178234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:01.178259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:01.178288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:01.178316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:01.178338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:01.178360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:01.178413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:01.178442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:01.178466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:01.179658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:01.179729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:01.179756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:01.179783Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:01.179820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:01.179896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 4Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-07T14:11:01.200304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-07T14:11:01.200901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.200971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:01.200999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:11:01.201108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-07T14:11:01.201142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:11:01.201278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:01.201313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:01.201343Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:01.201768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:01.201833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:11:01.202612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:01.202634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:01.202704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:01.202757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.202779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T14:11:01.202810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T14:11:01.202845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.202869Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:11:01.202943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:11:01.202964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:01.202992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:11:01.203027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:01.203050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:11:01.203072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:11:01.203122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:01.203146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-07T14:11:01.203167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T14:11:01.203185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-07T14:11:01.203772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:01.203820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:01.203838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:01.203862Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T14:11:01.203888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:01.204568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:01.204620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:01.204642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:01.204681Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:11:01.204700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:01.204742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T14:11:01.205625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:01.206320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-07T14:11:01.208106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:01.208242Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, feature flag EnableAlterDatabaseCreateHiveFirst 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } 2025-04-07T14:11:01.208264Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, path /MyRoot/SomeDatabase 2025-04-07T14:11:01.208383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-04-07T14:11:01.208422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-04-07T14:11:01.209604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: " TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:01.209696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-04-07T14:11:01.209871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:11:01.209906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-07T14:11:01.209973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:11:01.209986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:11:01.210237Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:11:01.210301Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:11:01.210326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:01.210347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:302:2294] 2025-04-07T14:11:01.210445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:11:01.210461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:302:2294] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] >> TSchemeShardSubDomainTest::RedefineErrors >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:01.469053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:01.469132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:01.469163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:01.469191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:01.469224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:01.469243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:01.469281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:01.469507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:01.512373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:01.512414Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:01.520763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:01.520875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:01.521016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:01.524462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:01.524608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:01.525003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.525124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:01.526211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.527068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:01.527131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.527214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:01.527253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:01.527290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:01.527387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.531580Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:01.607655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:01.607826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.607966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:01.608109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:01.608159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.609542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.609631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:01.609729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.609768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:01.609792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:01.609824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:01.610819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.610854Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:01.610876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:01.611783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.611810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.611842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:01.611894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:01.613924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:01.614870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:01.614987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:01.615643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.615733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:01.615767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:01.615952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:01.615985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:01.616092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:01.616149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:01.617206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:01.617233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:01.617331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.617356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:01.617517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.617545Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:01.617610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:01.617631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:01.617660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:01.617686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:01.617708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:01.617728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:01.617769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:01.617805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:01.617836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:01.618938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:01.619012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:01.619035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:01.619059Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:01.619106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:01.619167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 3409548 2025-04-07T14:11:01.746481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-07T14:11:01.746585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:01.746793Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-07T14:11:01.747301Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409551 2025-04-07T14:11:01.747687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:01.747785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-04-07T14:11:01.748154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-07T14:11:01.748254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-04-07T14:11:01.748717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:11:01.749300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:01.749370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:01.749454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:01.751152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-07T14:11:01.751202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-07T14:11:01.751319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2025-04-07T14:11:01.751398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-07T14:11:01.751497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:01.751578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:01.751606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:01.751671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:01.753708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:11:01.753741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:11:01.753824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:11:01.753842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T14:11:01.753881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-07T14:11:01.753898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-07T14:11:01.753972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-04-07T14:11:01.754223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:11:01.754250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:11:01.754478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T14:11:01.754529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-07T14:11:01.754611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:01.755270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2025-04-07T14:11:01.755504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-07T14:11:01.755539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-07T14:11:01.755625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:11:01.755642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-07T14:11:01.755738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:11:01.755758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:11:01.756145Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-07T14:11:01.756244Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:11:01.756298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T14:11:01.756323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:638:2542] 2025-04-07T14:11:01.756475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:01.756492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:638:2542] 2025-04-07T14:11:01.756545Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:11:01.756584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:11:01.756602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:638:2542] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-07T14:11:01.756990Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:01.757111Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 169us result status StatusPathDoesNotExist 2025-04-07T14:11:01.757261Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:11:01.757560Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:01.757655Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 102us result status StatusPathDoesNotExist 2025-04-07T14:11:01.757724Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:11:01.757975Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:01.758128Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 108us result status StatusSuccess 2025-04-07T14:11:01.758369Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::Restart >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:01.380378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:01.380452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:01.380477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:01.380500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:01.380535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:01.380552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:01.380593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:01.380838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:01.422697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:01.422741Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:01.429657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:01.429748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:01.429902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:01.433477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:01.433602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:01.433950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.434083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:01.435145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.435932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:01.435969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.436042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:01.436074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:01.436099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:01.436196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.440355Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:01.510463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:01.510653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.510792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:01.510919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:01.510958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.512833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.512913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:01.513016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.513065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:01.513092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:01.513134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:01.514578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.514622Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:01.514653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:01.515826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.515855Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.515877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:01.515922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:01.521874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:01.523162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:01.523283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:01.523947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.524090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:01.524140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:01.524371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:01.524416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:01.524562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:01.524630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:01.526101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:01.526129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:01.526235Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.526257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:01.526436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.526462Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:01.526591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:01.526614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:01.526642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:01.526665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:01.526685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:01.526701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:01.526749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:01.526773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:01.526791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:01.527900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:01.527957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:01.527980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:01.528001Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:01.528033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:01.528093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 6233409548, at schemeshard: 72057594046678944 2025-04-07T14:11:01.792735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T14:11:01.793274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:11:01.793336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:11:01.794204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.794291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.794424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.794447Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:11:01.794519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:11:01.794542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:11:01.794570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T14:11:01.794617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:631:2562] message: TxId: 102 2025-04-07T14:11:01.794653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:11:01.794677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:11:01.794695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:11:01.794755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:11:01.795653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:11:01.795687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:632:2563] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 108 2025-04-07T14:11:01.797263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "USER_3" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:01.797397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.797483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-07T14:11:01.798619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 108 SchemeshardId: 72057594046678944 PathId: 5 PathCreateTxId: 106, at schemeshard: 72057594046678944 2025-04-07T14:11:01.798701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2025-04-07T14:11:01.799038Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:01.799191Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 129us result status StatusSuccess 2025-04-07T14:11:01.799450Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:01.799792Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:01.799902Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 117us result status StatusSuccess 2025-04-07T14:11:01.800103Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:01.800520Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:01.800595Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 81us result status StatusSuccess 2025-04-07T14:11:01.800779Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:01.801102Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:01.801208Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 83us result status StatusSuccess 2025-04-07T14:11:01.801366Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 5 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 5 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:01.738822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:01.738912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:01.738951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:01.738991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:01.739037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:01.739062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:01.739143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:01.739451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:01.804122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:01.804176Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:01.813179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:01.813321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:01.813484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:01.817871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:01.818030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:01.818521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.818691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:01.820124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.821208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:01.821262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.821372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:01.821422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:01.821460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:01.821606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.826885Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:01.945515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:01.945684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.945871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:01.946056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:01.946118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.947817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.947925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:01.948058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.948107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:01.948140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:01.948177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:01.949605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.949649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:01.949679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:01.950901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.950939Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.950969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:01.951030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:01.954467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:01.955902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:01.956043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:01.956916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.957002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:01.957037Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:01.957205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:01.957237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:01.957372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:01.957430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:01.958582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:01.958612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:01.958726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.958762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:01.958924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.958950Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:01.959008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:01.959027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:01.959053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:01.959078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:01.959118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:01.959136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:01.959178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:01.959206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:01.959227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:01.960326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:01.960395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:01.960417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:01.960442Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:01.960477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:01.960543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... -07T14:11:01.996638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:11:01.996829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:01.997058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:11:01.997933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:01.997967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:01.998033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:11:01.998092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.998134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T14:11:01.998155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-07T14:11:01.998274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.998294Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:11:01.998353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:11:01.998373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:01.998401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:11:01.998426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:01.998446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:11:01.998475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:11:01.998511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:11:01.998548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-07T14:11:01.998571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-07T14:11:01.998588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-07T14:11:01.998996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:01.999048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:01.999067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:01.999113Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-07T14:11:01.999145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:01.999543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:01.999585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:01.999603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:01.999623Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T14:11:01.999656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:11:01.999691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T14:11:02.001417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:02.001468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T14:11:02.001599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:11:02.001627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T14:11:02.001875Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:11:02.001925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:02.001954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:328:2320] TestWaitNotification: OK eventTxId 101 2025-04-07T14:11:02.002297Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:02.002396Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 111us result status StatusSuccess 2025-04-07T14:11:02.002648Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.002950Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:02.003067Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 115us result status StatusSuccess 2025-04-07T14:11:02.003305Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.003600Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/MyDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:02.003719Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/MyDir" took 101us result status StatusSuccess 2025-04-07T14:11:02.003861Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/MyDir" PathDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:01.808768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:01.808869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:01.808905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:01.808945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:01.808995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:01.809018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:01.809070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:01.809397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:01.875504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:01.875559Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:01.885297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:01.885425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:01.885588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:01.890572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:01.890756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:01.891330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.891486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:01.892942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.894051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:01.894105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:01.894215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:01.894267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:01.894310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:01.894449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.899844Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:01.995997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:01.996138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.996283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:01.996424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:01.996465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.997950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:01.998054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:01.998149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.998184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:01.998206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:01.998236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:01.999405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:01.999439Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:01.999458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:02.000482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.000509Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.000540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.000584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.002748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:02.003879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:02.003979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:02.004551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.004639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.004688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.004873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:02.004901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.004994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:02.005038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:02.006205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.006237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.006329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.006351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:02.006513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.006541Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:02.006607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:02.006630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.006653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:02.006685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.006709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:02.006727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:02.006773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:02.006797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:02.006814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:02.007870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:02.007932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:02.007952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:02.007975Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:02.008008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:02.008065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... txId: 101 at step: 5000003 2025-04-07T14:11:02.043796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.043848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.043873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:11:02.044019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-07T14:11:02.044064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:11:02.044190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:02.044227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:11:02.044253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:11:02.045388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.045414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:02.045490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:11:02.045552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.045577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T14:11:02.045601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 3 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:11:02.045774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.045804Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:11:02.045851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:11:02.045878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:02.045902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:11:02.045920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:02.045936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:11:02.045950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:11:02.045979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:11:02.045998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-07T14:11:02.046013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-07T14:11:02.046030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-07T14:11:02.046556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:02.046611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:02.046631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:02.046648Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-07T14:11:02.046664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:02.047177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:02.047223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:02.047239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:02.047255Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T14:11:02.047274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:11:02.047311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T14:11:02.049548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:02.049633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 100, wait until txId: 101 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-07T14:11:02.049806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-07T14:11:02.049831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-07T14:11:02.049889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:11:02.049901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T14:11:02.050227Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-07T14:11:02.050286Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:11:02.050308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T14:11:02.050329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:325:2317] 2025-04-07T14:11:02.050419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:02.050440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:325:2317] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-07T14:11:02.050693Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:02.050804Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/USER_0" took 138us result status StatusSuccess 2025-04-07T14:11:02.051050Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.051414Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:02.051540Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir" took 121us result status StatusSuccess 2025-04-07T14:11:02.051727Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:58.216250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:58.216340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:58.216366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:58.216391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:58.216425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:58.216452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:58.216504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:58.216828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:58.265258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:58.265299Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:58.273169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:58.273277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:58.273426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:58.277761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:58.277899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:58.278264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:58.278393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:58.279965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:58.281030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:58.281072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:58.281148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:58.281185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:58.281225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:58.281368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.287315Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:58.368601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:58.368787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.368955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:58.369093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:58.369136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.370964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:58.371068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:58.371204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.371241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:58.371267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:58.371300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:58.372570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.372636Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:58.372683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:58.373951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.373985Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.374017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:58.374064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:58.376603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:58.377791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:58.377901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:58.378726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:58.378853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:58.378891Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:58.379077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:58.379125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:58.379243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:58.379298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:58.380600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:58.380632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:58.380772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:58.380800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:58.381007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:58.381036Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:58.381129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:58.381161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:58.381191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:58.381218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:58.381240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:58.381261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:58.381304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:58.381331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:58.381352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:58.382488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:58.382565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:58.382588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:58.382613Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:58.382648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:58.382706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify ... us: COMPLETE TxId: 104 Step: 10100 OrderId: 104 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 435 } } 2025-04-07T14:11:01.933088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 518 RawX2: 4294969763 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-07T14:11:01.933119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-04-07T14:11:01.933194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 518 RawX2: 4294969763 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-07T14:11:01.933224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-04-07T14:11:01.934396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:01.934438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-04-07T14:11:01.934467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T14:11:01.934491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-04-07T14:11:01.934543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-04-07T14:11:01.934617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-04-07T14:11:01.934695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-04-07T14:11:01.934735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-07T14:11:01.936142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:01.936311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:01.937016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-07T14:11:01.937055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-07T14:11:01.937170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-04-07T14:11:01.937280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-07T14:11:01.937313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:440:2394], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-04-07T14:11:01.937347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:440:2394], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-04-07T14:11:01.937514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:01.937561Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-04-07T14:11:01.937607Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:01.937630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-04-07T14:11:01.937660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-07T14:11:01.938327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-07T14:11:01.938387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-07T14:11:01.938420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-07T14:11:01.938451Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-04-07T14:11:01.938475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-04-07T14:11:01.938958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-07T14:11:01.939015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-07T14:11:01.939031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-07T14:11:01.939046Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:11:01.939075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-07T14:11:01.939126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-07T14:11:01.940567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:01.940602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-04-07T14:11:01.940868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-07T14:11:01.940983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-07T14:11:01.941014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:11:01.941042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-07T14:11:01.941082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:552:2488] message: TxId: 104 2025-04-07T14:11:01.941108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:11:01.941130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T14:11:01.941152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T14:11:01.941209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-04-07T14:11:01.941589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-07T14:11:01.941626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-07T14:11:01.942560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-07T14:11:01.942857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-07T14:11:01.943136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-07T14:11:01.943178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:440:2394], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-07T14:11:01.943242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:11:01.943264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:742:2662] 2025-04-07T14:11:01.943732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-04-07T14:11:01.944288Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-07T14:11:01.944407Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 134us result status StatusSuccess 2025-04-07T14:11:01.944629Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:53.438944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:53.439044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:53.439081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:53.439117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:53.439600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:53.439630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:53.439694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:53.440723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:53.493372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:53.493405Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:53.501161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:53.501289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:53.501414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:53.507026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:53.508187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:53.509434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:53.510120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:53.513747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:53.520105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:53.520167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:53.520267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:53.520322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:53.520369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:53.520512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.525494Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:53.612686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:53.614162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.614835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:53.616332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:53.616402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.618966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:53.620383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:53.620565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.620615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:53.620715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:53.620763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:53.622491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.622555Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:53.622585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:53.624111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.624159Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.624193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:53.624266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:53.628056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:53.629372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:53.630466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:53.632060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:53.632169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:53.632211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:53.632416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:53.632465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:53.632602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:53.632658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:53.634295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:53.634336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:53.634493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:53.634546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:53.634813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:53.634854Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:53.634987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:53.635025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:53.635082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:53.635126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:53.635173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:53.635198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:53.635259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:53.635300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:53.635327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:53.637042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:53.637133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:53.637163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:53.637190Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:53.637238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:53.637320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify ... plete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1728 DataSize: 1728 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.178212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:477:2437] sender: [1:751:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:477:2437] sender: [1:754:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:477:2437] sender: [1:755:2058] recipient: [1:753:2678] Leader for TabletID 72057594046678944 is [1:756:2679] sender: [1:757:2058] recipient: [1:753:2678] 2025-04-07T14:11:02.207307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:02.207373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.207414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:02.207438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:02.207461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:02.207479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:02.207513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.207776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:02.219624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:02.220816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:02.220935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:02.221018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:02.221037Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:02.221228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:02.221664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-07T14:11:02.221725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:02.221752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table2, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:11:02.221792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.221838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.222082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:02.222149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-07T14:11:02.222183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-07T14:11:02.222333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2025-04-07T14:11:02.222421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.222495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-04-07T14:11:02.222521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:02.222540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:11:02.222551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:11:02.222612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 3, at schemeshard: 72057594046678944 2025-04-07T14:11:02.222729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.222893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-04-07T14:11:02.223154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.223325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.223736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.223826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.224010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.224097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.224175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.224339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.224412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.224565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.224805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.224926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.224980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.225033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.231142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.231192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.231293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:02.231323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.231344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:02.231509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:756:2679] sender: [1:809:2058] recipient: [1:15:2062] 2025-04-07T14:11:02.263162Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:02.263329Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 147us result status StatusSuccess 2025-04-07T14:11:02.263751Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1728 DataSize: 1728 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:02.252401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:02.252486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.252516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:02.252549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:02.252586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:02.252607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:02.252647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.252964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:02.298080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:02.298142Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:02.305249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:02.305366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:02.305521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:02.309682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:02.309840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:02.310262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.310415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:02.311653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.312560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.312605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.312718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:02.312759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.312793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:02.312910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.317544Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:02.395422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:02.395604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.395750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:02.395934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:02.395986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.397573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.397688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:02.397802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.397847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:02.397875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:02.397918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:02.399290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.399330Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:02.399353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:02.400355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.400393Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.400436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.400487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.402957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:02.404109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:02.404261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:02.404992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.405125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.405162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.405357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:02.405393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.405515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:02.405597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:02.406892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.406926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.407051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.407079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:02.407285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.407345Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:02.407418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:02.407446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.407484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:02.407514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.407545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:02.407569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:02.407625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:02.407654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:02.407679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:02.408970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:02.409056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:02.409091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:02.409124Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:02.409165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:02.409245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T14:11:02.411456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T14:11:02.411791Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-04-07T14:11:02.412256Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T14:11:02.423580Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T14:11:02.425121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { Coordinators: 1 Mediators: 1 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:02.425316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.425375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.426469Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:11:02.427783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: plan resolution is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.427900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-04-07T14:11:02.428530Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-07T14:11:02.428696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-07T14:11:02.428724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-07T14:11:02.428984Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-07T14:11:02.429039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T14:11:02.429061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:279:2271] TestWaitNotification: OK eventTxId 100 2025-04-07T14:11:02.429429Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:02.429542Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 134us result status StatusPathDoesNotExist 2025-04-07T14:11:02.429658Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] >> TSchemeShardSubDomainTest::Restart [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:02.326633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:02.326702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.326731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:02.326755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:02.326797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:02.326831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:02.326871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.327118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:02.370795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:02.370859Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:02.378586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:02.378700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:02.378840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:02.382663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:02.382806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:02.383196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.383333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:02.384522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.385430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.385472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.385559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:02.385605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.385638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:02.385749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.390889Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:02.469698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:02.469851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.470008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:02.470165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:02.470209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.472062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.472186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:02.472303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.472345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:02.472389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:02.472442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:02.473832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.473875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:02.473903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:02.475117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.475148Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.475176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.475230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.477437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:02.478805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:02.478926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:02.479803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.479909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.479972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.480155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:02.480201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.480321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:02.480392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:02.482106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.482136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.482241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.482277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:02.482479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.482514Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:02.482581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:02.482601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.482631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:02.482663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.482694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:02.482719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:02.482769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:02.482796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:02.482814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:02.484035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:02.484107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:02.484168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:02.484202Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:02.484244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:02.484310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... d: 107, at schemeshard: 72057594046678944 2025-04-07T14:11:02.621165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-07T14:11:02.621196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:559:2514] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-07T14:11:02.623058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 2 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:02.623212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/USER_0, opId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.623330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:02.623451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:02.623481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.624969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAccepted TxId: 108 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:02.625091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /MyRoot/USER_0 2025-04-07T14:11:02.625204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.625257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 108:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:02.625308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 108:0 ProgressState no shards to create, do next state 2025-04-07T14:11:02.625331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 2 -> 3 2025-04-07T14:11:02.626407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.626441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#108:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:02.626464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 3 -> 128 2025-04-07T14:11:02.627771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.627815Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.627852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet# 72057594046678944 2025-04-07T14:11:02.627889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2025-04-07T14:11:02.627991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:02.629113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2025-04-07T14:11:02.629232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000007 2025-04-07T14:11:02.629433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.629500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.629535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-04-07T14:11:02.629678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 240 2025-04-07T14:11:02.629707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-04-07T14:11:02.629809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:02.629870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:02.631231Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.631285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:02.631451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.631490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 108, path id: 2 FAKE_COORDINATOR: Erasing txId 108 2025-04-07T14:11:02.631896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.631947Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2025-04-07T14:11:02.632022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-07T14:11:02.632043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-07T14:11:02.632069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2025-04-07T14:11:02.632092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-07T14:11:02.632121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-04-07T14:11:02.632160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-04-07T14:11:02.632228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T14:11:02.632251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2025-04-07T14:11:02.632272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 2], 8 2025-04-07T14:11:02.632565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-07T14:11:02.632628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-07T14:11:02.632654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2025-04-07T14:11:02.632717Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-07T14:11:02.632747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:02.632801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2025-04-07T14:11:02.634402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-07T14:11:02.634590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-07T14:11:02.634615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-07T14:11:02.634938Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-07T14:11:02.634989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-07T14:11:02.635018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:584:2539] TestWaitNotification: OK eventTxId 108 2025-04-07T14:11:02.635424Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:02.635541Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 135us result status StatusSuccess 2025-04-07T14:11:02.635790Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:02.730082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:02.730156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.730188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:02.730218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:02.730245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:02.730271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:02.730307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.730535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:02.786517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:02.786563Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:02.795890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:02.796026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:02.796198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:02.800835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:02.801002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:02.801534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.801702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:02.803230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.804295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.804350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.804457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:02.804505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.804548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:02.804726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.810341Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:02.923412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:02.923567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.923694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:02.923822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:02.923866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.925414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.925534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:02.925652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.925694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:02.925727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:02.925764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:02.927170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.927214Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:02.927242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:02.928592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.928626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.928656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.928741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.931579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:02.932703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:02.932805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:02.933365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.933445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.933476Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.933641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:02.933669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.933776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:02.933840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:02.935180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.935218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.935366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.935414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:02.935632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.935660Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:02.935720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:02.935739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.935764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:02.935789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.935809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:02.935829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:02.935871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:02.935896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:02.935917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:02.937015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:02.937088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:02.937117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:02.937141Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:02.937180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:02.937245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.980714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropSubdomain TPropose operationId#101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-07T14:11:02.980759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.980783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:02.980854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-04-07T14:11:02.980924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:02.980964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:02.981118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:02.981384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:02.982253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.982286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.982340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:02.982415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.982435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T14:11:02.982458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:11:02.982617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.982644Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-04-07T14:11:02.982683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:11:02.982702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:02.982727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:11:02.982751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:02.982780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:11:02.982799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:11:02.982834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:02.982857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-07T14:11:02.982876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-07T14:11:02.982894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-07T14:11:02.983286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:02.983348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:02.983372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:02.983394Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:11:02.983415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:02.984019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:02.984063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:02.984077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:02.984114Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:11:02.984136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:02.984180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T14:11:02.984284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:02.984307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:02.984379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:02.984691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:02.984720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:02.984774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:02.985761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:02.987085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:02.987178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:02.987234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T14:11:02.987352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:11:02.987373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T14:11:02.987594Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:11:02.987640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:02.987678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:334:2326] TestWaitNotification: OK eventTxId 101 2025-04-07T14:11:02.987973Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:02.988087Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 127us result status StatusPathDoesNotExist 2025-04-07T14:11:02.988197Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:11:02.988517Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:02.988604Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 85us result status StatusSuccess 2025-04-07T14:11:02.988824Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:02.617341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:02.617404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.617438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:02.617461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:02.617551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:02.617571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:02.617608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.617884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:02.670340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:02.670381Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:02.676851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:02.676959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:02.677086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:02.680458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:02.680583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:02.680963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.681091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:02.682154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.682938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.682978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.683050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:02.683085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.683137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:02.683242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.687253Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:02.765271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:02.765481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.765686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:02.765861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:02.765905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.767779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.767916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:02.768058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.768110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:02.768144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:02.768188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:02.769787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.769835Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:02.769863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:02.771206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.771247Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.771282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.771348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.773702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:02.775262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:02.775436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:02.776329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.776452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.776495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.776767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:02.776815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.776975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:02.777060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:02.778693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.778746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.778903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.778972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:02.779228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.779267Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:02.779344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:02.779377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.779410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:02.779452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.779481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:02.779506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:02.779575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:02.779614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:02.779639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:02.781309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:02.781411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:02.781446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:02.781476Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:02.781529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:02.781628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-04-07T14:11:03.036849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:11:03.038100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:03.038134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:03.038228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:11:03.038317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:03.038344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T14:11:03.038369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-07T14:11:03.038431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.038459Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:11:03.038527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:11:03.038553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:03.038578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:11:03.038600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:03.038626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:11:03.038644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:11:03.038764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-04-07T14:11:03.038797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2025-04-07T14:11:03.038823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-04-07T14:11:03.038846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-07T14:11:03.039520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:03.039600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:03.039626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:03.039657Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-04-07T14:11:03.039686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:03.040299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:03.040365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:03.040388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:03.040409Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T14:11:03.040440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-04-07T14:11:03.040504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-04-07T14:11:03.040535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:910:2744] 2025-04-07T14:11:03.042417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:03.042481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:03.042536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:03.042553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:911:2745] TestWaitNotification: OK eventTxId 101 2025-04-07T14:11:03.042900Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:03.043044Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_0" took 142us result status StatusSuccess 2025-04-07T14:11:03.043303Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:03.043631Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:03.043755Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_1" took 135us result status StatusSuccess 2025-04-07T14:11:03.043960Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_1" PathDescription { Self { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 4 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:03.044344Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:03.044436Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains" took 106us result status StatusSuccess 2025-04-07T14:11:03.044626Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains" PathDescription { Self { Name: "SubDomains" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeclareAndDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:02.934304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:02.934409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.934456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:02.934484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:02.934516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:02.934536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:02.934581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.934819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:02.981037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:02.981080Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:02.988498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:02.988589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:02.988745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:02.992499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:02.992642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:02.993046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.993171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:02.994414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.995309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.995359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.995449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:02.995497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.995561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:02.995731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.000391Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:03.085591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:03.085791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.085953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:03.086109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:03.086146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.087853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:03.087958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:03.088066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.088109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:03.088151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:03.088187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:03.089541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.089603Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:03.089638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:03.090926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.090954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.090984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:03.091033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:03.093920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:03.095008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:03.095154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:03.095796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:03.095917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:03.095963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:03.096158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:03.096192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:03.096311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:03.096377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:03.097618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:03.097666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:03.097818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:03.097861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:03.098149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.098197Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:03.098291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:03.098316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:03.098350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:03.098379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:03.098416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:03.098441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:03.098492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:03.098520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:03.098542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:03.103977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:03.104103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:03.104145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:03.104190Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:03.104262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:03.104364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 944 2025-04-07T14:11:03.131559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#100:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:03.131588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-04-07T14:11:03.131879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T14:11:03.132143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T14:11:03.133599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.133640Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.133682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:11:03.133716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-07T14:11:03.133801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:03.134814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-07T14:11:03.134906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-07T14:11:03.135149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:03.135218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:03.135250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:11:03.135389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-07T14:11:03.135424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:11:03.135528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:03.135576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:03.135609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-07T14:11:03.136908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:03.136937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:03.137035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:03.137107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:03.137146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-07T14:11:03.137176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-07T14:11:03.137309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.137366Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-07T14:11:03.137436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-07T14:11:03.137470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T14:11:03.137507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-07T14:11:03.137554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T14:11:03.137581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-07T14:11:03.137603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-07T14:11:03.137644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:03.137671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-07T14:11:03.137695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T14:11:03.137716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-07T14:11:03.138168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:11:03.138228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:11:03.138253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-07T14:11:03.138287Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T14:11:03.138321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:03.138676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:11:03.138721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:11:03.138739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-07T14:11:03.138757Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:11:03.138774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:03.138808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-07T14:11:03.140911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T14:11:03.141057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-07T14:11:03.141306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-07T14:11:03.141338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-07T14:11:03.141409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:11:03.141423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T14:11:03.141698Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-07T14:11:03.141779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T14:11:03.141808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:306:2298] 2025-04-07T14:11:03.141896Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:11:03.141965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:03.142006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:306:2298] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-07T14:11:03.142257Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:03.142383Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 131us result status StatusSuccess 2025-04-07T14:11:03.142648Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:02.636621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:02.636711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.636741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:02.636763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:02.636793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:02.636812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:02.636855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.637105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:02.683335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:02.683369Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:02.690049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:02.690153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:02.690341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:02.694744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:02.694912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:02.695442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.695614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:02.697151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.698235Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.698292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.698402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:02.698468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.698513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:02.698637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.702676Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:02.789613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:02.789813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.789997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:02.790192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:02.790251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.792184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.792309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:02.792417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.792453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:02.792479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:02.792511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:02.793743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.793778Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:02.793799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:02.795129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.795166Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.795205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.795280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.798521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:02.799675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:02.799791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:02.800404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:02.800521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.800560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.800781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:02.800827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:02.800945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:02.801000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:02.802166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.802193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.802298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.802340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:02.802578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.802631Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:02.802711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:02.802739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.802775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:02.802814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:02.802849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:02.802875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:02.802951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:02.802990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:02.803018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:02.804625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:02.804737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:02.804768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:02.804800Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:02.804853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:02.804929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... chemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.879220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:449:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:452:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:453:2058] recipient: [1:451:2404] Leader for TabletID 72057594046678944 is [1:454:2405] sender: [1:455:2058] recipient: [1:451:2404] 2025-04-07T14:11:02.904112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:02.904164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.904194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:02.904230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:02.904257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:02.904283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:02.904320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:02.904512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:02.915449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:02.916481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:02.916618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:02.916808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:02.916839Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:02.916920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:02.917506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:02.917580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:02.917659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.917727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.917967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-07T14:11:02.918207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.918381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.918479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.918589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-04-07T14:11:02.918641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:02.918669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:02.918687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:02.918770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.918886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.919071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-04-07T14:11:02.919399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.919488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.919862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.919940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.920207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.920284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.920353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.920521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.920601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.920750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.920937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.921048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.921096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.921135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:02.925827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:02.925867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:02.925922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:02.925963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:02.925991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:02.926038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:454:2405] sender: [1:511:2058] recipient: [1:15:2062] 2025-04-07T14:11:02.968096Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:02.968268Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 200us result status StatusSuccess 2025-04-07T14:11:02.968567Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:02.969067Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:02.969201Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 128us result status StatusSuccess 2025-04-07T14:11:02.969495Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeQuotas >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice >> TSchemeShardSubDomainTest::CreateDropNbs >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable >> TSchemeShardSubDomainTest::RestartAtInFly >> TSchemeShardSubDomainTest::RmDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2025-04-07T14:10:54.848844Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e35/r3tmp/tmpAgBGGa//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2025-04-07T14:10:54.850812Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:10:56.296869Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e35/r3tmp/tmpAgBGGa//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2025-04-07T14:10:56.300017Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:10:57.734945Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e35/r3tmp/tmpAgBGGa//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 3 2025-04-07T14:10:57.737389Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:10:59.177489Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e35/r3tmp/tmpAgBGGa//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 4 2025-04-07T14:10:59.179595Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:3:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:11:00.652613Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e35/r3tmp/tmpAgBGGa//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 5 2025-04-07T14:11:00.654802Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:4:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-07T14:11:02.141322Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e35/r3tmp/tmpAgBGGa//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 6 2025-04-07T14:11:02.143677Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:5:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapSparsed::SwitchingMultiColumn 2025-04-07 14:11:01,847 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-07 14:11:02,050 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 290110 75.5M 74.9M 21.2M test_tool run_ut @/home/runner/.ya/build/build_root/u6pr/000f62/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/chunk171/testing_out_stuff/test_tool.args 290320 2.1G 2.1G 1.6G └─ ydb-core-kqp-ut-olap --trace-path-append /home/runner/.ya/build/build_root/u6pr/000f62/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/chunk171/ytest.report Test command err: Trying to start YDB, gRPC: 19780, MsgBus: 18943 2025-04-07T14:01:03.743858Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490570985675834043:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:03.760065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f62/r3tmp/tmp6uUe4y/pdisk_1.dat 2025-04-07T14:01:04.300650Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:01:04.335988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:01:04.336070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:01:04.354346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19780, node 1 2025-04-07T14:01:04.596609Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:01:04.596629Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:01:04.596636Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:01:04.596739Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18943 TClient is connected to server localhost:18943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:01:05.350332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:01:05.365063Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:01:05.439270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:01:05.550162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:05.550471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:05.550723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:05.550878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:05.550994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:05.551109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:05.551213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:05.551357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:05.551473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:05.551577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:05.551692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:05.551795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:05.595743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570994265769165:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:05.595801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570994265769165:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:05.596208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570994265769165:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:05.596351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570994265769165:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:05.596436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570994265769165:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:05.596555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570994265769165:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:05.596636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570994265769165:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:05.596774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570994265769165:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:01:05.596901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570994265769165:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:01:05.597005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570994265769165:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:01:05.597129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570994265769165:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:01:05.597250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490570994265769165:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:01:05.624418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:01:05.624476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:01:05.624698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:01:05.624824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:01:05.624935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:01:05.625051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:01:05.625141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:01:05.625266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... HARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.233675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.233772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.296764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570994265769163:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.296953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570994265769163:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.333724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.333962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.334146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.334296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.397254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570994265769163:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.397511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570994265769163:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.433967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.434142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.434383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.434482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.497593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570994265769163:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.497846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570994265769163:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.534368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.534658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.534866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.535038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.597860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570994265769163:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.598097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570994265769163:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.634701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.635014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.635176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.635340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.698178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570994265769163:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.698388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570994265769163:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.734983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.735198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.735483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.735646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490570994265769177:2293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.798620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570994265769163:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.798910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490570994265769163:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2025-04-07T14:10:49.835342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; Timing: checkTable took 41 seconds WAIT_COMPACTION: 9 2025-04-07T14:10:49.835673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490570994265769164:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 11 WAIT_COMPACTION: 11 WAIT_COMPACTION: 11 WAIT_COMPACTION: 11 WAIT_COMPACTION: 11 Timing: wait took 6 seconds ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapStore/olapTable` RESULT: count: 14000 2025-04-07T14:10:57.051040Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035056000, txId: 18446744073709551615] shutting down Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7573699373/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/u6pr/000f62/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/chunk171/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1747, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7573699373/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/u6pr/000f62/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/chunk171/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:03.308456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:03.308525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:03.308554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:03.308578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:03.308605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:03.308622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:03.308658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:03.308905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:03.352692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:03.352734Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:03.359765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:03.359862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:03.360012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:03.363991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:03.364135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:03.364499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:03.364643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:03.365850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:03.366672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:03.366713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:03.366796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:03.366835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:03.366862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:03.366968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.372263Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:03.445875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:03.446045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.446182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:03.446317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:03.446349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.448033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:03.448125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:03.448229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.448273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:03.448305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:03.448334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:03.449612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.449648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:03.449668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:03.450643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.450669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.450692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:03.450738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:03.452972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:03.454098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:03.454222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:03.454825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:03.454909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:03.454945Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:03.455124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:03.455153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:03.455281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:03.455330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:03.456401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:03.456426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:03.456519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:03.456544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:03.456719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.456746Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:03.456942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:03.456962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:03.456986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:03.457014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:03.457038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:03.457058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:03.457112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:03.457137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:03.457167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:03.458273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:03.458332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:03.458352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:03.458373Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:03.458421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:03.458488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 7968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-07T14:11:03.772255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-04-07T14:11:03.772399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409552 Forgetting tablet 72075186233409546 2025-04-07T14:11:03.775511Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-07T14:11:03.775687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:03.775813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-07T14:11:03.776257Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-04-07T14:11:03.776517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:03.776561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-07T14:11:03.776611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:11:03.776647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:11:03.776687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T14:11:03.777944Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-07T14:11:03.779345Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-07T14:11:03.779572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:11:03.779722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409551 2025-04-07T14:11:03.781327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-07T14:11:03.781453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-04-07T14:11:03.782135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:03.782265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:03.782648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:11:03.782860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-07T14:11:03.782995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:03.783278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:11:03.783767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:11:03.783837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:03.783892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:03.783992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:03.786362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-07T14:11:03.786403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-07T14:11:03.786474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-07T14:11:03.786502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-04-07T14:11:03.786568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:11:03.786587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:11:03.786655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:03.786857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:03.786899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:03.786954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:03.789520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:11:03.789581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T14:11:03.789647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-07T14:11:03.789665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-07T14:11:03.789720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:11:03.789739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:11:03.789797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T14:11:03.789834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-07T14:11:03.789914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:03.791259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T14:11:03.791454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T14:11:03.791491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T14:11:03.792036Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T14:11:03.792117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:11:03.792145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:778:2667] TestWaitNotification: OK eventTxId 103 2025-04-07T14:11:03.792524Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:03.792642Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 146us result status StatusPathDoesNotExist 2025-04-07T14:11:03.792777Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:11:03.793030Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:03.793155Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 92us result status StatusSuccess 2025-04-07T14:11:03.793359Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateDelete >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:03.958099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:03.958175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:03.958206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:03.958230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:03.958264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:03.958289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:03.958340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:03.958567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:04.000216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:04.000255Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:04.007410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:04.007513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:04.007664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:04.011058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:04.011212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:04.011632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.011760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:04.012853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.013662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.013709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.013792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:04.013831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.013861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:04.013973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.018443Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:04.093441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:04.093620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.093772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:04.093924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:04.093962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.095599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.095711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:04.095837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.095887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:04.095925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:04.095959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:04.097170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.097207Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:04.097231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:04.098182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.098210Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.098235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.098306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.104735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:04.106058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:04.106193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:04.106963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.107078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:04.107136Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.107334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:04.107372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.107497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.107558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:04.108831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.108860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.108977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.109008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:04.109192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.109222Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:04.109286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:04.109307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.109338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:04.109369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.109403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:04.109427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:04.109488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:04.109526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:04.109548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:04.110884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.110963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.110989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:04.111012Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:04.111051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.111130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 44, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:04.147984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:04.148009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:04.148033Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:11:04.148056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:04.148117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-07T14:11:04.149238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-07T14:11:04.149348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-07T14:11:04.149676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.149751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:04.149783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropSubdomain TPropose operationId#101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-07T14:11:04.149860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.149885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:04.149958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-04-07T14:11:04.150087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.150129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:04.150674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:04.151365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:11:04.152419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.152461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.152542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:04.152650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.152698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T14:11:04.152740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T14:11:04.152776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.152802Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-04-07T14:11:04.152839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:11:04.152862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:04.152892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:11:04.152914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:04.152937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:11:04.152960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:11:04.153018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:04.153048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-07T14:11:04.153071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-07T14:11:04.153104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-07T14:11:04.153833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:04.153889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:04.153915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:04.153943Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:11:04.153972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:04.154598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:04.154648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:04.154667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:04.154691Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:11:04.154713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:04.154760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T14:11:04.154997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:04.155049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:04.155134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:04.155453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:04.155483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:04.155526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.156627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:04.157907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:04.157970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:04.158026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T14:11:04.158207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:11:04.158237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T14:11:04.158542Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:11:04.158608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:04.158637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:333:2325] TestWaitNotification: OK eventTxId 101 2025-04-07T14:11:04.158987Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:04.159188Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 172us result status StatusPathDoesNotExist 2025-04-07T14:11:04.159323Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables >> TSchemeShardSubDomainTest::CreateForceDropSolomon >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:04.070107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:04.070187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.070217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:04.070239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:04.070270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:04.070286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:04.070322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.070536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:04.111748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:04.111781Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:04.118390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:04.118473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:04.118612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:04.121930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:04.122066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:04.122457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.122583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:04.123725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.124517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.124554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.124641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:04.124700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.124742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:04.124854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.128828Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:04.203270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:04.203420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.203551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:04.203699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:04.203733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.205240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.205338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:04.205438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.205475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:04.205495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:04.205530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:04.206631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.206665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:04.206684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:04.207788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.207816Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.207857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.207911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.210021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:04.211114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:04.211234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:04.211896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.211993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:04.212030Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.212199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:04.212231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.212348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.212411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:04.213611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.213660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.213761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.213785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:04.213947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.213973Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:04.214033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:04.214058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.214087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:04.214110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.214139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:04.214164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:04.214223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:04.214260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:04.214316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:04.215437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.215508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.215536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:04.215561Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:04.215595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.215667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... chemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:04.253107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:11:04.253122Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:11:04.253138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:04.253720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:04.253765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:04.253782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:11:04.253797Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:11:04.253844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-07T14:11:04.253874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 1 2025-04-07T14:11:04.253890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:271:2263] 2025-04-07T14:11:04.255398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:04.255426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:04.255448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:04.255470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:04.255482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:04.255493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:04.256149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:11:04.256252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:11:04.256285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:11:04.256314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:272:2264] 2025-04-07T14:11:04.256432Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 2025-04-07T14:11:04.256614Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-04-07T14:11:04.256709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-07T14:11:04.256902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-07T14:11:04.257047Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-04-07T14:11:04.257183Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 2025-04-07T14:11:04.257235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.257346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T14:11:04.257455Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-04-07T14:11:04.257564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:11:04.257644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:04.257751Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-04-07T14:11:04.257887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-07T14:11:04.257982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:04.258194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:04.258289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:04.258542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-07T14:11:04.258628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:04.258879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:04.258909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:04.259001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:04.259486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:04.259514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:04.259548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.260030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-07T14:11:04.261160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:11:04.261262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:11:04.261302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-07T14:11:04.262700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:11:04.262764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T14:11:04.262828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:04.262921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-07T14:11:04.263318Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:04.263449Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 132us result status StatusPathDoesNotExist 2025-04-07T14:11:04.263561Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:11:04.263799Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:04.263890Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 99us result status StatusSuccess 2025-04-07T14:11:04.264110Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] >> TSchemeShardSubDomainTest::RmDir [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:04.349794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:04.349864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.349888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:04.349907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:04.349934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:04.349951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:04.349988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.350251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:04.392466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:04.392509Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:04.399004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:04.399133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:04.399271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:04.402884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:04.403016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:04.403394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.403533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:04.404609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.405399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.405438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.405522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:04.405563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.405602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:04.405708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.409613Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:04.483386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:04.483546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.483694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:04.483844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:04.483886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.485269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.485365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:04.485457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.485492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:04.485526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:04.485560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:04.486652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.486688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:04.486709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:04.487670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.487698Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.487721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.487835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.493571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:04.494773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:04.494885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:04.495492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.495576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:04.495612Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.495794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:04.495823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.495940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.495995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:04.497127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.497161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.497256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.497280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:04.497447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.497477Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:04.497533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:04.497569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.497594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:04.497619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.497649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:04.497670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:04.497717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:04.497743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:04.497761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:04.498898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.498959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.498982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:04.499004Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:04.499042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.499113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 72057594046678944 is [1:127:2152] sender: [1:446:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:447:2058] recipient: [1:445:2398] Leader for TabletID 72057594046678944 is [1:448:2399] sender: [1:449:2058] recipient: [1:445:2398] 2025-04-07T14:11:04.582151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:04.582206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.582245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:04.582273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:04.582308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:04.582329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:04.582365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.582547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:04.592421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:04.593214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:04.593313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:04.593512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:04.593544Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:04.593598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:04.593997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:04.594054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:04.594124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.594170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.594305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-07T14:11:04.594469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.594620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.594671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.594740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-04-07T14:11:04.594762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:04.594789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:04.594803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:04.594874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.594915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.595025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-04-07T14:11:04.595275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.595351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.595594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.595668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.595808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.595871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.595943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.596047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.596109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.596198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.596329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.596394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.596421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.596448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.600988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.601033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.601179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:04.601215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.601244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:04.601289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2025-04-07T14:11:04.642768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-07T14:11:04.642810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:448:2399] sender: [1:507:2058] recipient: [1:15:2062] 2025-04-07T14:11:04.643272Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-07T14:11:04.643335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T14:11:04.643360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:505:2444] TestWaitNotification: OK eventTxId 100 2025-04-07T14:11:04.643646Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:04.643781Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 162us result status StatusSuccess 2025-04-07T14:11:04.644032Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:04.644332Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:04.644422Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 100us result status StatusSuccess 2025-04-07T14:11:04.644604Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeleteAndRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:04.215933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:04.216008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.216039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:04.216071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:04.216103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:04.216124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:04.216164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.216419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:04.260759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:04.260811Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:04.267679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:04.267833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:04.267975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:04.271608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:04.271752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:04.272175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.272310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:04.273578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.274416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.274457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.274546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:04.274594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.274623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:04.274727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.279230Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:04.343405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:04.343579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.343735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:04.343886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:04.343931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.345539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.345661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:04.345782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.345829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:04.345854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:04.345906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:04.346990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.347024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:04.347051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:04.347990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.348020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.348062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.348111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.350288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:04.351321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:04.351438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:04.352056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.352143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:04.352194Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.352376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:04.352409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.352522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.352580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:04.353799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.353830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.353965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.353993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:04.354171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.354202Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:04.354269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:04.354302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.354328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:04.354358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.354382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:04.354412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:04.354481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:04.354517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:04.354540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:04.355710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.355805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.355837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:04.355871Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:04.355926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.356017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... thOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:04.534553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:04.534571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:11:04.534588Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-07T14:11:04.534622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:11:04.535208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:04.535249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:04.535263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:11:04.535278Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:11:04.535294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:04.535333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-07T14:11:04.535841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:04.535883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:04.535913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:04.535930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:04.536585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:11:04.537014Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-07T14:11:04.537894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.538077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:04.538343Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-07T14:11:04.538439Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-07T14:11:04.538899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:11:04.539018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409547 2025-04-07T14:11:04.539634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:04.539795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:04.539929Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409549 2025-04-07T14:11:04.540547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-07T14:11:04.540645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:11:04.540920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:11:04.541313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:04.541347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:11:04.541392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:04.541604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:04.541633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:04.541703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:04.542020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:11:04.543422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:11:04.543469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:11:04.543599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:11:04.543615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T14:11:04.543882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:11:04.543900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:11:04.543944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T14:11:04.543968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-07T14:11:04.544730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:04.544794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:04.544820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:04.544862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.545007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:04.545830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T14:11:04.546009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:11:04.546040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:11:04.546379Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:11:04.546433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:11:04.546457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:533:2490] TestWaitNotification: OK eventTxId 102 2025-04-07T14:11:04.558041Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:04.558230Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 214us result status StatusPathDoesNotExist 2025-04-07T14:11:04.558391Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:11:04.558996Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:04.559151Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 157us result status StatusPathDoesNotExist 2025-04-07T14:11:04.559271Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:04.318853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:04.318921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.318946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:04.318967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:04.318997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:04.319015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:04.319047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.319256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:04.358507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:04.358541Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:04.365183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:04.365269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:04.365415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:04.368697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:04.368825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:04.369158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.369274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:04.370352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.371152Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.371190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.371262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:04.371298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.371330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:04.371435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.375805Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:04.442942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:04.443106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.443235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:04.443368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:04.443402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.445028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.445119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:04.445216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.445253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:04.445275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:04.445309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:04.446387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.446421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:04.446440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:04.447387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.447411Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.447435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.447485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.449484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:04.450501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:04.450681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:04.451443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.451532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:04.451563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.451714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:04.451742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.451838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.451894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:04.452996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.453020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.453114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.453136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:04.453284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.453308Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:04.453362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:04.453383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.453408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:04.453430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.453451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:04.453468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:04.453509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:04.453537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:04.453562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:04.454630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.454696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.454729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:04.454753Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:04.454784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.454843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... nerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-07T14:11:04.691391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-07T14:11:04.693117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:04.693177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:04.693191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:04.693208Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T14:11:04.693224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:11:04.693259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-07T14:11:04.693412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 818 } } 2025-04-07T14:11:04.693429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-04-07T14:11:04.693507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 818 } } 2025-04-07T14:11:04.693568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 818 } } 2025-04-07T14:11:04.694041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 619 RawX2: 4294969825 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:11:04.694069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-04-07T14:11:04.694143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 619 RawX2: 4294969825 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:11:04.694169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:11:04.694231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 619 RawX2: 4294969825 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:11:04.694290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:7, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.694314Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.694335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-04-07T14:11:04.694365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-07T14:11:04.696330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:04.696431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:04.696581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.696698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.696845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.696872Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:11:04.696958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:11:04.696986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:04.697013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-07T14:11:04.697065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:270:2262] message: TxId: 101 2025-04-07T14:11:04.697092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:04.697124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:11:04.697145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:11:04.697206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:11:04.698200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:04.698230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:271:2263] TestWaitNotification: OK eventTxId 101 2025-04-07T14:11:04.698518Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:04.698651Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 139us result status StatusSuccess 2025-04-07T14:11:04.698919Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:04.699348Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:04.699484Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 114us result status StatusSuccess 2025-04-07T14:11:04.699729Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:04.414802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:04.414869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.414900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:04.414927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:04.414957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:04.414974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:04.415008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.415250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:04.456024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:04.456058Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:04.463106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:04.463236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:04.463390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:04.467719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:04.467894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:04.468337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.468478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:04.470204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.471269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.471320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.471408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:04.471457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.471489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:04.471613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.476471Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:04.555402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:04.555585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.555728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:04.555888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:04.555935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.557470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.557564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:04.557682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.557723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:04.557749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:04.557782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:04.558841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.558886Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:04.558910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:04.559934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.559966Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.559996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.560043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.562365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:04.563381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:04.563507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:04.564156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.564243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:04.564282Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.564475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:04.564508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.564618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.564695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:04.565923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.565955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.566058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.566085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:04.566263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.566293Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:04.566363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:04.566388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.566434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:04.566467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.566492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:04.566513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:04.566557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:04.566587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:04.566610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:04.567913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.567997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.568031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:04.568057Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:04.568095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.568174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... : 100:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.720041Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.720072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:11:04.720104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-07T14:11:04.720207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:04.721190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-07T14:11:04.721268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-07T14:11:04.721480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.721556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:04.721592Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:11:04.721807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-07T14:11:04.721853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-07T14:11:04.721973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.722031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-07T14:11:04.722063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-07T14:11:04.723133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.723168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.723313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:04.723397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.723426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-07T14:11:04.723458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-07T14:11:04.723500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.723530Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-07T14:11:04.723611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-07T14:11:04.723637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T14:11:04.723683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-07T14:11:04.723717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-07T14:11:04.723744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-07T14:11:04.723769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-07T14:11:04.723939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-07T14:11:04.723970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-04-07T14:11:04.723997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T14:11:04.724017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-07T14:11:04.724602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:11:04.724691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:11:04.724724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-07T14:11:04.724761Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T14:11:04.724787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:04.725232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:11:04.725279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-07T14:11:04.725303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-07T14:11:04.725324Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:11:04.725344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-07T14:11:04.725393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-04-07T14:11:04.725433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:572:2483] 2025-04-07T14:11:04.727011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T14:11:04.727597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-07T14:11:04.727672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T14:11:04.727692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:573:2484] TestWaitNotification: OK eventTxId 100 2025-04-07T14:11:04.727977Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:04.728082Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 116us result status StatusSuccess 2025-04-07T14:11:04.728368Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T14:11:04.729918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:04.730010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.730089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-07T14:11:04.731323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:04.731440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardSubDomainTest::SchemeLimitsRejects >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] >> TSchemeShardSubDomainTest::DeleteAdd >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:55.135352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:55.135440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:55.135471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:55.135497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:55.135532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:55.135566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:55.135610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:55.135865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:55.182709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:55.182752Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:55.190245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:55.190351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:55.190488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:55.194913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:55.195090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:55.195495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:55.195625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:55.196871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:55.197716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:55.197757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:55.197831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:55.197867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:55.197894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:55.197996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.203138Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:55.284241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:55.284392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.284534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:55.284684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:55.284727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.286286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:55.286381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:55.286484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.286529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:55.286558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:55.286586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:55.287924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.287956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:55.287986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:55.289179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.289209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.289234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:55.289281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:55.291451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:55.292617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:55.292758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:55.293389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:55.293484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:55.293517Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:55.293702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:55.293732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:55.293838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:55.293889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:55.295198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:55.295231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:55.295328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:55.295367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:55.295649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:55.295692Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:55.295792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:55.295824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:55.295863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:55.295903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:55.295936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:55.295964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:55.296028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:55.296067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:55.296096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:55.297715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:55.297801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:55.297833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:55.297866Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:55.297918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:55.297997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify ... dId: 72075186233409548 CpuTimeUsec: 490 } } 2025-04-07T14:11:04.793407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 434 RawX2: 4294969696 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-07T14:11:04.793448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-04-07T14:11:04.793552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 434 RawX2: 4294969696 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-07T14:11:04.793599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-07T14:11:04.793926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.793974Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-07T14:11:04.794034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T14:11:04.794068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-04-07T14:11:04.794126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-07T14:11:04.794217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-04-07T14:11:04.794312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:04.794362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:11:04.796171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.797320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.797521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.797555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:04.797686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:11:04.797824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.797867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-07T14:11:04.797906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-07T14:11:04.798193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.798239Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-07T14:11:04.798299Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.798329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-07T14:11:04.798364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-07T14:11:04.799108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:11:04.799193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:11:04.799222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T14:11:04.799258Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2025-04-07T14:11:04.799293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:04.800209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:11:04.800274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:11:04.800296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-07T14:11:04.800320Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-07T14:11:04.800343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:11:04.800393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-07T14:11:04.802521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.802570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:04.802833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:11:04.802999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T14:11:04.803030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T14:11:04.803070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-07T14:11:04.803136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:403:2372] message: TxId: 103 2025-04-07T14:11:04.803173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T14:11:04.803209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T14:11:04.803237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T14:11:04.803305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:11:04.803722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.803753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:04.804287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:11:04.805337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:11:04.806224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.806267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-07T14:11:04.806545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:11:04.806581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1337:3267] 2025-04-07T14:11:04.807076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 11 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-04-07T14:11:04.810615Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:04.810777Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 188us result status StatusSuccess 2025-04-07T14:11:04.811170Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDefine >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:05.144738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:05.144825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.144857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:05.144881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:05.144909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:05.144927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:05.144964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.145183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:05.186823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:05.186861Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:05.193661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:05.193767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:05.193920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:05.197813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:05.197946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:05.198341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.198473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:05.199686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.200488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.200529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.200634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:05.200690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:05.200721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:05.200823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.205111Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:05.282893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:05.283040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.283213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:05.283397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:05.283440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.285084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.285250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:05.285353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.285389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:05.285414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:05.285447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:05.286668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.286701Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:05.286721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:05.287745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.287769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.287796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.287845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.289976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:05.290970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:05.291100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:05.291701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.291789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:05.291821Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.292018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:05.292047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.292160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.292223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:05.293348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.293382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:05.293475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.293516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:05.293659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.293685Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:05.293748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:05.293773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.293798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:05.293822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.293845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:05.293861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:05.293905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:05.293931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:05.293951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:05.295103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:05.295164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:05.295186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:05.295219Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:05.295253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.295331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 72057594046678944 2025-04-07T14:11:05.403329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:05.403759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:05.404538Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2025-04-07T14:11:05.404744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-07T14:11:05.404938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-07T14:11:05.405118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:05.405248Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-07T14:11:05.405308Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-07T14:11:05.405648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.405771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T14:11:05.406152Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-04-07T14:11:05.406744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:11:05.406837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409546 2025-04-07T14:11:05.407624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-07T14:11:05.407737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2025-04-07T14:11:05.408241Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409551 2025-04-07T14:11:05.408683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:05.408782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:05.408938Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 2025-04-07T14:11:05.409960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-07T14:11:05.410086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-04-07T14:11:05.410360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:05.410388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:05.410465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:05.410760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:05.410788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:05.410823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.411808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-07T14:11:05.411849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-07T14:11:05.412474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2025-04-07T14:11:05.413049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:11:05.413086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:11:05.413145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:11:05.413160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T14:11:05.413225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-07T14:11:05.413243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-07T14:11:05.413305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-04-07T14:11:05.414882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:11:05.414914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:11:05.414973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T14:11:05.415017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-07T14:11:05.415199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:05.415265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-07T14:11:05.415499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-07T14:11:05.415534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-07T14:11:05.415612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:11:05.415630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T14:11:05.415935Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-07T14:11:05.416040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-07T14:11:05.416071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:618:2525] 2025-04-07T14:11:05.416218Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:11:05.416292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:05.416313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:618:2525] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-07T14:11:05.416650Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:05.416852Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 183us result status StatusPathDoesNotExist 2025-04-07T14:11:05.417033Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:11:05.417351Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:05.417471Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 125us result status StatusSuccess 2025-04-07T14:11:05.417805Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] >> TSchemeShardSubDomainTest::Delete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:05.101497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:05.101591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.101635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:05.101671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:05.101714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:05.101735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:05.101785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.102055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:05.164284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:05.164331Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:05.174041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:05.174166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:05.174335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:05.178805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:05.178968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:05.179484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.179661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:05.181133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.182162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.182213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.182319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:05.182376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:05.182413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:05.182547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.187728Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:05.256604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:05.256756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.256881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:05.257032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:05.257072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.258729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.258813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:05.258909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.258940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:05.258966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:05.258996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:05.260162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.260200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:05.260220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:05.261148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.261174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.261199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.261259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.263448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:05.264438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:05.264553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:05.265241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.265326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:05.265359Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.265528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:05.265564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.265679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.265745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:05.266894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.266938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:05.267063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.267103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:05.267280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.267313Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:05.267392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:05.267417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.267442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:05.267467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.267490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:05.267508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:05.267616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:05.267643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:05.267673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:05.268830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:05.268902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:05.268928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:05.268976Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:05.269013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.269075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 22Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409550 shardIdx# 72057594046678944:5 at schemeshard# 72057594046678944 2025-04-07T14:11:05.414628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.426590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409551, partId: 0 2025-04-07T14:11:05.426689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409551 2025-04-07T14:11:05.426723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-07T14:11:05.426752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409551 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2025-04-07T14:11:05.426781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2025-04-07T14:11:05.428657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.428799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.428835Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.428869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:11:05.428925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-07T14:11:05.429031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:05.430235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-07T14:11:05.430348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-07T14:11:05.430593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.430670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:05.430708Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:11:05.430973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-07T14:11:05.431016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:11:05.431153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.431198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-07T14:11:05.431239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:11:05.432580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.432615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:05.432768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:05.432862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.432910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T14:11:05.432949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T14:11:05.433001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.433033Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:11:05.433128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:11:05.433171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:05.433210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:11:05.433241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:05.433279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:11:05.433311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:11:05.433475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-07T14:11:05.433509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2025-04-07T14:11:05.433547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T14:11:05.433575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-07T14:11:05.434508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:05.434587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:05.434619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:05.434648Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T14:11:05.434679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:05.435181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:05.435249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:05.435274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:05.435295Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:11:05.435316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-07T14:11:05.435366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-04-07T14:11:05.435414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:563:2473] 2025-04-07T14:11:05.437366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:05.438183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:05.438253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:05.438276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:564:2474] TestWaitNotification: OK eventTxId 101 2025-04-07T14:11:05.438699Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:05.438842Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 152us result status StatusSuccess 2025-04-07T14:11:05.439206Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:05.157932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:05.157990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.158027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:05.158047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:05.158076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:05.158093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:05.158136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.158329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:05.198746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:05.198784Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:05.205107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:05.205198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:05.205335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:05.208311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:05.208437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:05.208808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.208933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:05.210097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.210852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.210890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.210968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:05.211035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:05.211060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:05.211174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.214807Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:05.290884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:05.291033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.291172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:05.291315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:05.291350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.292750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.292838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:05.292951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.292987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:05.293010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:05.293036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:05.294033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.294069Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:05.294089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:05.294892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.294917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.294940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.294995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.297434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:05.298346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:05.298453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:05.299021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.299109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:05.299139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.299290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:05.299317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.299413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.299472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:05.300465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.300492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:05.300581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.300604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:05.300771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.300795Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:05.300848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:05.300872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.300900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:05.300923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.300944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:05.301009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:05.301052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:05.301077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:05.301095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:05.302033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:05.302091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:05.302111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:05.302133Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:05.302174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.302224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 107 2025-04-07T14:11:05.764719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 107 2025-04-07T14:11:05.764741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-07T14:11:05.764761Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-04-07T14:11:05.764785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-04-07T14:11:05.764831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/3, is published: true 2025-04-07T14:11:05.765756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 107 Step: 300 OrderId: 107 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 761 } } 2025-04-07T14:11:05.765798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 2 2025-04-07T14:11:05.765892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:2, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 107 Step: 300 OrderId: 107 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 761 } } 2025-04-07T14:11:05.765976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 107 Step: 300 OrderId: 107 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 761 } } 2025-04-07T14:11:05.766490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 601 RawX2: 4294969839 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-07T14:11:05.766525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 2 2025-04-07T14:11:05.766607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:2, at schemeshard: 72057594046678944, message: Source { RawX1: 601 RawX2: 4294969839 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-07T14:11:05.766650Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:11:05.766704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 601 RawX2: 4294969839 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-07T14:11:05.766739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:2, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.766769Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-07T14:11:05.766789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-07T14:11:05.766811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:2 129 -> 240 2025-04-07T14:11:05.769705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.769779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-07T14:11:05.769821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-07T14:11:05.769882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.770141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.770179Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:0 ProgressState 2025-04-07T14:11:05.770257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 2/3 2025-04-07T14:11:05.770287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-04-07T14:11:05.770338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 2/3, is published: true 2025-04-07T14:11:05.770515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-07T14:11:05.772167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-07T14:11:05.772264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-07T14:11:05.772349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-07T14:11:05.772568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-07T14:11:05.772601Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:2 ProgressState 2025-04-07T14:11:05.772652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:2 progress is 3/3 2025-04-07T14:11:05.772703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-04-07T14:11:05.772761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 3/3, is published: true 2025-04-07T14:11:05.772813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:476:2428] message: TxId: 107 2025-04-07T14:11:05.772852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-04-07T14:11:05.772885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-07T14:11:05.772910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-07T14:11:05.772996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T14:11:05.773026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:1 2025-04-07T14:11:05.773045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:1 2025-04-07T14:11:05.773065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-07T14:11:05.773081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:2 2025-04-07T14:11:05.773094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:2 2025-04-07T14:11:05.773144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-07T14:11:05.774870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-07T14:11:05.774903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:531:2483] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-07T14:11:05.777862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "Table7" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value0" Type: "Utf8" } Columns { Name: "Value1" Type: "Utf8" } Columns { Name: "Value2" Type: "Utf8" } Columns { Name: "Value3" Type: "Utf8" } Columns { Name: "Value4" Type: "Utf8" } KeyColumnNames: "RowId" } IndexDescription { Name: "UserDefinedIndexByValue0" KeyColumnNames: "Value0" } IndexDescription { Name: "UserDefinedIndexByValue1" KeyColumnNames: "Value1" } IndexDescription { Name: "UserDefinedIndexByValue2" KeyColumnNames: "Value2" } IndexDescription { Name: "UserDefinedIndexByValue3" KeyColumnNames: "Value3" } IndexDescription { Name: "UserDefinedIndexByValue4" KeyColumnNames: "Value4" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:05.778251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/USER_0/Table7 domain path id: [OwnerId: 72057594046678944, LocalPathId: 2] domain path: /MyRoot/USER_0 shardsToCreate: 6 GetShardsInside: 4 MaxShards: 7 2025-04-07T14:11:05.778332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 108:0, explain: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-04-07T14:11:05.778392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-04-07T14:11:05.779986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusResourceExhausted Reason: "indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:05.780115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/USER_0/Table7 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-07T14:11:05.780417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-07T14:11:05.780472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-07T14:11:05.780945Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-07T14:11:05.781015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-07T14:11:05.781056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:721:2643] TestWaitNotification: OK eventTxId 108 >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] >> TSchemeShardSubDomainTest::DeclareDefineAndDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:10:56.087243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:10:56.087334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:56.087370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:10:56.087398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:10:56.087436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:10:56.087459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:10:56.087507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:10:56.087800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:10:56.145950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:10:56.146005Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:56.152590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:10:56.152708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:10:56.152850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:10:56.156653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:10:56.156816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:10:56.157160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.157288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:10:56.158429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.159284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:56.159325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.159401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:10:56.159436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:56.159462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:10:56.159563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.163584Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:10:56.262043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:10:56.262219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.262381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:10:56.262553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:10:56.262601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.264563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.264693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:10:56.264837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.264880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:10:56.264909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:10:56.264943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:10:56.266398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.266466Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:10:56.266507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:10:56.267895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.267932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.267962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:56.268059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:10:56.270643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:10:56.271809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:10:56.271950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:10:56.272652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:10:56.272770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:10:56.272810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:56.273007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:10:56.273040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:10:56.273158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:56.273216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:10:56.274446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:10:56.274479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:10:56.274590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:10:56.274621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:10:56.274834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:10:56.274918Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:10:56.274983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:10:56.275018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:56.275051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:10:56.275080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:10:56.275106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:10:56.275144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:10:56.275192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:10:56.275220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:10:56.275244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:10:56.276447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:56.276527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:10:56.276553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:10:56.276584Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:10:56.276626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:10:56.276721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify ... atency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 415 } } 2025-04-07T14:11:05.750909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 761 RawX2: 4294969997 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-07T14:11:05.750933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 0 2025-04-07T14:11:05.751011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 761 RawX2: 4294969997 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-07T14:11:05.751049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-07T14:11:05.752396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.752433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-07T14:11:05.752471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T14:11:05.752495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-04-07T14:11:05.752543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-07T14:11:05.752608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-04-07T14:11:05.752911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:05.752996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:11:05.754258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.754418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.755528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.755574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:05.755695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T14:11:05.755785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.755825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-04-07T14:11:05.755857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 107, path id: 4 2025-04-07T14:11:05.755899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.755934Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-04-07T14:11:05.756000Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.756035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-07T14:11:05.756065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-07T14:11:05.756921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-04-07T14:11:05.756991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-04-07T14:11:05.757015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-07T14:11:05.757042Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 14 2025-04-07T14:11:05.757070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:05.757576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-07T14:11:05.757643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-07T14:11:05.757681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-07T14:11:05.757718Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-07T14:11:05.757746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T14:11:05.757787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-04-07T14:11:05.759294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.759354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:05.759613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:11:05.759712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-07T14:11:05.759738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-07T14:11:05.759778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-07T14:11:05.759804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-07T14:11:05.759845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-07T14:11:05.759870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-07T14:11:05.759929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T14:11:05.760293Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.760315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:05.761269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-07T14:11:05.761380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-07T14:11:05.762112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.762148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-07T14:11:05.762595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 15 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification wait txId: 107 2025-04-07T14:11:05.762996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-04-07T14:11:05.763028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-04-07T14:11:05.763371Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-04-07T14:11:05.763427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-07T14:11:05.763454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:992:2923] TestWaitNotification: OK eventTxId 107 2025-04-07T14:11:05.763882Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:05.764000Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 131us result status StatusSuccess 2025-04-07T14:11:05.764393Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:04.038557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:04.038649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.038683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:04.038716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:04.038747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:04.038766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:04.038811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.039034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:04.083069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:04.083124Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:04.090478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:04.090607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:04.090764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:04.094720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:04.094851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:04.095205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.095331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:04.096577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.097382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.097456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.097535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:04.097569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.097612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:04.097713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.102202Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:04.176655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:04.176827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.176968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:04.177102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:04.177147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.178885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.178986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:04.179133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.179177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:04.179202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:04.179235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:04.180424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.180477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:04.180503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:04.181544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.181570Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.181594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.181644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.187811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:04.189199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:04.189346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:04.189969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.190053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:04.190113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.190287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:04.190318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.190429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.190475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:04.191910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.191938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.192034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.192089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:04.192271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.192299Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:04.192356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:04.192377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.192451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:04.192484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.192505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:04.192522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:04.192565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:04.192590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:04.192610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:04.193908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.194005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.194041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:04.194075Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:04.194133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.194228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:05.736384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.736482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, schema: Name: "Table11" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key", at schemeshard: 72057594046678944 2025-04-07T14:11:05.736916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_0, child name: Table11, child id: [OwnerId: 72057594046678944, LocalPathId: 10], at schemeshard: 72057594046678944 2025-04-07T14:11:05.736994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2025-04-07T14:11:05.737050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 1 2025-04-07T14:11:05.737121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-04-07T14:11:05.737154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 1 -> 2 2025-04-07T14:11:05.737589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 137:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:05.737649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 137:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.737767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 11 2025-04-07T14:11:05.737818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 3 2025-04-07T14:11:05.739866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 10, at schemeshard: 72057594046678944 2025-04-07T14:11:05.740003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-04-07T14:11:05.740205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.740249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:05.740432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2025-04-07T14:11:05.740526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.740581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1018:2890], at schemeshard: 72057594046678944, txId: 137, path id: 2 2025-04-07T14:11:05.740633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1018:2890], at schemeshard: 72057594046678944, txId: 137, path id: 10 2025-04-07T14:11:05.741136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.741215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046678944 2025-04-07T14:11:05.741415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-07T14:11:05.742230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-04-07T14:11:05.742325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-04-07T14:11:05.742375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-04-07T14:11:05.742430Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18 2025-04-07T14:11:05.742474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 2025-04-07T14:11:05.743816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-04-07T14:11:05.743869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-04-07T14:11:05.743890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-04-07T14:11:05.743914Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 1 2025-04-07T14:11:05.743936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-04-07T14:11:05.743984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2025-04-07T14:11:05.745562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2025-04-07T14:11:05.745705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2025-04-07T14:11:05.745747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-04-07T14:11:05.746097Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-07T14:11:05.746297Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2025-04-07T14:11:05.746398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-04-07T14:11:05.746436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-04-07T14:11:05.746567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-04-07T14:11:05.746614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-04-07T14:11:05.746686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-04-07T14:11:05.746767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 2 -> 3 2025-04-07T14:11:05.747644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-04-07T14:11:05.749313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-04-07T14:11:05.750555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.750678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.750729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#137:0 ProgressState at tabletId# 72057594046678944 2025-04-07T14:11:05.750807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId#137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2025-04-07T14:11:05.751160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId#137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 968 RawX2: 4294970146 } TxBody: "\n\236\004\n\007Table11\020\n\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020\n:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 2 2025-04-07T14:11:05.754270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2025-04-07T14:11:05.754417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 >> TSchemeShardSubDomainTest::CreateDropSolomon >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:05.611958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:05.612033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.612068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:05.612097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:05.612129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:05.612155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:05.612209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.612469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:05.657509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:05.657558Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:05.664409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:05.664518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:05.664649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:05.668169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:05.668338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:05.668777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.668908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:05.670156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.670983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.671027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.671133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:05.671171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:05.671203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:05.671303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.675607Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:05.742116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:05.742288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.742418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:05.742548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:05.742582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.743950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.744043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:05.744140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.744172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:05.744208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:05.744241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:05.745320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.745354Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:05.745390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:05.746303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.746333Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.746365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.746435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.755400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:05.756638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:05.756783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:05.757469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.757562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:05.757612Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.757802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:05.757839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.757949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.758001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:05.759276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.759305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:05.759399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.759421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:05.759598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.759627Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:05.759688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:05.759708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.759736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:05.759765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.759802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:05.759825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:05.759871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:05.759910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:05.759931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:05.761048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:05.761114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:05.761134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:05.761160Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:05.761209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.761272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... -04-07T14:11:05.908968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:05.909025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 101 2025-04-07T14:11:05.961060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:11:05.961111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 Leader for TabletID 72057594046678944 is [1:543:2477] sender: [1:601:2058] recipient: [1:15:2062] 2025-04-07T14:11:05.961770Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:11:05.961858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:05.961892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:599:2521] TestWaitNotification: OK eventTxId 101 2025-04-07T14:11:05.962334Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:05.962492Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 179us result status StatusPathDoesNotExist 2025-04-07T14:11:05.962646Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:11:05.963629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:543:2477] sender: [1:605:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:543:2477] sender: [1:608:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:543:2477] sender: [1:609:2058] recipient: [1:607:2526] Leader for TabletID 72057594046678944 is [1:610:2527] sender: [1:611:2058] recipient: [1:607:2526] 2025-04-07T14:11:05.986881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:05.986950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.986986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:05.987009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:05.987032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:05.987049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:05.987095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.987285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:05.997408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:05.998473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:05.998586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:05.998657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:05.998717Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:05.998866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:05.999290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.999353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.999390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.999639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.999764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.999816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.999899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.999963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.000025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.000193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.000351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.000449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.000708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.000780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.000942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.001020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.001078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.001208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.001261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.001387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.001500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.001568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.001605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.001645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.005697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:06.005778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.006004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:06.006081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:06.006117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:06.007590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:610:2527] sender: [1:668:2058] recipient: [1:15:2062] 2025-04-07T14:11:06.038988Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:06.039146Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 183us result status StatusPathDoesNotExist 2025-04-07T14:11:06.039238Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:11:06.039611Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:06.039733Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 100us result status StatusSuccess 2025-04-07T14:11:06.039952Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:05.162700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:05.162789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.162821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:05.162847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:05.162880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:05.162902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:05.162942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.163183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:05.212378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:05.212424Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:05.219164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:05.219267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:05.219398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:05.223899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:05.224138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:05.224757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.224993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:05.226550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.227452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.227494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.227592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:05.227632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:05.227662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:05.227770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.232165Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:05.301605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:05.301759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.301894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:05.302056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:05.302104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.303875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.303970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:05.304065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.304116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:05.304142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:05.304181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:05.305240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.305275Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:05.305298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:05.306166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.306195Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.306220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.306270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.308485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:05.309472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:05.309586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:05.310462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.310600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:05.310657Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.310899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:05.310942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.311047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.311133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:05.312268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.312298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:05.312397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.312423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:05.312590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.312618Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:05.312699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:05.312724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.312753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:05.312784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.312808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:05.312866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:05.312916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:05.312956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:05.312979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:05.314164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:05.314243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:05.314267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:05.314293Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:05.314330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.314388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ting tablet 72075186233409564 Forgetting tablet 72075186233409569 Forgetting tablet 72075186233409568 Forgetting tablet 72075186233409573 Forgetting tablet 72075186233409577 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409581 2025-04-07T14:11:05.914369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2025-04-07T14:11:05.914407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2025-04-07T14:11:05.914486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2025-04-07T14:11:05.914505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2025-04-07T14:11:05.914610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2025-04-07T14:11:05.914650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2025-04-07T14:11:05.914748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2025-04-07T14:11:05.914769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-04-07T14:11:05.915138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2025-04-07T14:11:05.915165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2025-04-07T14:11:05.915238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2025-04-07T14:11:05.915257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2025-04-07T14:11:05.915461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2025-04-07T14:11:05.915485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2025-04-07T14:11:05.915588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:11:05.915609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T14:11:05.915660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2025-04-07T14:11:05.915677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2025-04-07T14:11:05.915713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:8 2025-04-07T14:11:05.915731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:8 tabletId 72075186233409553 2025-04-07T14:11:05.915776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:12 2025-04-07T14:11:05.915800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:12 tabletId 72075186233409557 2025-04-07T14:11:05.915892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-07T14:11:05.915918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-04-07T14:11:05.915968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2025-04-07T14:11:05.915999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409561 2025-04-07T14:11:05.918397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:25 2025-04-07T14:11:05.918435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:25 tabletId 72075186233409570 2025-04-07T14:11:05.918506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:20 2025-04-07T14:11:05.918519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:20 tabletId 72075186233409565 2025-04-07T14:11:05.918570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:29 2025-04-07T14:11:05.918586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:29 tabletId 72075186233409574 2025-04-07T14:11:05.918627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:33 2025-04-07T14:11:05.918639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:33 tabletId 72075186233409578 2025-04-07T14:11:05.918719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:11:05.918746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:11:05.918796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:37 2025-04-07T14:11:05.918816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:37 tabletId 72075186233409582 2025-04-07T14:11:05.918876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2025-04-07T14:11:05.918899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2025-04-07T14:11:05.918969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-07T14:11:05.918991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-07T14:11:05.922337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2025-04-07T14:11:05.922366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-04-07T14:11:05.922467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-04-07T14:11:05.922491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-04-07T14:11:05.922544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2025-04-07T14:11:05.922558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-04-07T14:11:05.922594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-07T14:11:05.922606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-04-07T14:11:05.922673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-04-07T14:11:05.922691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-04-07T14:11:05.922838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2025-04-07T14:11:05.922851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-04-07T14:11:05.922953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2025-04-07T14:11:05.922972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-04-07T14:11:05.922998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:11:05.923018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:11:05.923060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2025-04-07T14:11:05.923100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-04-07T14:11:05.923225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:05.923283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:05.923341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:05.923371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:05.923428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.925200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T14:11:05.925357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T14:11:05.925389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T14:11:05.925716Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T14:11:05.925777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:11:05.925804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:2055:3660] TestWaitNotification: OK eventTxId 103 2025-04-07T14:11:05.926149Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:05.926279Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 148us result status StatusPathDoesNotExist 2025-04-07T14:11:05.926407Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:11:05.926788Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:05.926932Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 152us result status StatusPathDoesNotExist 2025-04-07T14:11:05.927023Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false >> TSchemeShardSubDomainTest::Delete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:05.856681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:05.856745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.856770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:05.856796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:05.856830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:05.856848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:05.856899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.857116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:05.897346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:05.897391Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:05.904283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:05.904383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:05.904537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:05.908436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:05.908576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:05.908977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.909134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:05.910342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.911222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.911262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.911338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:05.911382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:05.911410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:05.911513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.915960Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:05.991523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:05.991681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.991809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:05.991947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:05.991991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.993586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.993674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:05.993772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.993809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:05.993833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:05.993876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:05.995051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.995099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:05.995121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:05.996033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.996060Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.996082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.996131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.001870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:06.003148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:06.003273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:06.003977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.004089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:06.004123Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.004308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:06.004344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.004451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:06.004507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:06.005643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:06.005669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:06.005782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.005805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:06.005968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.005993Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:06.006050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:06.006070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.006095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:06.006122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.006144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:06.006162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:06.006207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:06.006237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:06.006255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:06.007398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:06.007469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:06.007489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:06.007514Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:06.007550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:06.007606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 2025-04-07T14:11:06.306160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-07T14:11:06.306232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:06.307405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:11:06.307532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000004 2025-04-07T14:11:06.307765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.307837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:06.307873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-07T14:11:06.308074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-07T14:11:06.308105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-07T14:11:06.308209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:06.308248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-04-07T14:11:06.308282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:11:06.309450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:06.309477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:06.309574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:11:06.309629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.309650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-07T14:11:06.309690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-07T14:11:06.309916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.309953Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:11:06.310030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:11:06.310054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:11:06.310090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-07T14:11:06.310119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:11:06.310142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:11:06.310161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:11:06.310288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-04-07T14:11:06.310316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 1 2025-04-07T14:11:06.310336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-07T14:11:06.310353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-07T14:11:06.310682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:06.310740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:06.310762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:11:06.310785Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-07T14:11:06.310808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:06.311172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:06.311215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:06.311229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:11:06.311244Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T14:11:06.311281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-04-07T14:11:06.311324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 1 2025-04-07T14:11:06.311350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:572:2483] 2025-04-07T14:11:06.312904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:11:06.313600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:11:06.313681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:11:06.313713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:960:2784] TestWaitNotification: OK eventTxId 102 2025-04-07T14:11:06.314086Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:06.314198Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 116us result status StatusSuccess 2025-04-07T14:11:06.314557Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:06.315003Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:06.315144Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 146us result status StatusSuccess 2025-04-07T14:11:06.315331Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:06.223411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:06.223520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:06.223559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:06.223597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:06.223649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:06.223679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:06.223749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:06.224086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:06.276752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:06.276792Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:06.283041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:06.283169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:06.283329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:06.286868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:06.287012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:06.287414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.287546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:06.288716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.289509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:06.289553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.289628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:06.289662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:06.289689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:06.289807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.294082Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:06.363455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:06.363619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.363741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:06.363868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:06.363897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.365295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.365374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:06.365464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.365517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:06.365551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:06.365581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:06.366649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.366692Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:06.366711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:06.367834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.367865Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.367896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.367956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.371007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:06.372330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:06.372448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:06.373217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.373293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:06.373326Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.373494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:06.373524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.373630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:06.373679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:06.374788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:06.374816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:06.374940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.374969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:06.375141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.375167Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:06.375222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:06.375241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.375268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:06.375296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.375340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:06.375358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:06.375400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:06.375426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:06.375455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:06.376639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:06.376724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:06.376752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:06.376778Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:06.376812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:06.376882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 025-04-07T14:11:06.419933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 0, tablet: 72075186233409547 2025-04-07T14:11:06.419942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 0, tablet: 72075186233409548 2025-04-07T14:11:06.447258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-07T14:11:06.447396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409546 2025-04-07T14:11:06.447437Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-07T14:11:06.447476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409546 shardIdx# 72057594046678944:1 at schemeshard# 72057594046678944 2025-04-07T14:11:06.448326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-07T14:11:06.448399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409547 2025-04-07T14:11:06.448455Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-07T14:11:06.448477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409547 shardIdx# 72057594046678944:2 at schemeshard# 72057594046678944 2025-04-07T14:11:06.449103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409548, partId: 0 2025-04-07T14:11:06.449161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409548 2025-04-07T14:11:06.449177Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-07T14:11:06.449194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-04-07T14:11:06.449211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2025-04-07T14:11:06.451811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.451988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.452359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.452438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.452460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.452482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:11:06.452512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-07T14:11:06.452590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:06.453618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-07T14:11:06.453691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-07T14:11:06.453890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.453972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:06.454003Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:11:06.454156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-07T14:11:06.454200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:11:06.454297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:06.454340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:11:06.455664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:06.455690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:06.455803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.455833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T14:11:06.455878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.455917Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:11:06.455982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:11:06.456014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:06.456040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:11:06.456063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:06.456097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:11:06.456121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:11:06.456228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T14:11:06.456254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2025-04-07T14:11:06.456274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-04-07T14:11:06.456875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:06.456928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:06.456947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:06.456967Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-07T14:11:06.457001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:06.457094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-04-07T14:11:06.457138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:301:2293] 2025-04-07T14:11:06.458912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:06.458967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:06.458991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:308:2300] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-07T14:11:06.459378Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:06.459523Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 121us result status StatusSuccess 2025-04-07T14:11:06.459758Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Delete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:06.437074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:06.437140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:06.437167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:06.437187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:06.437225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:06.437247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:06.437294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:06.437528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:06.477467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:06.477512Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:06.484166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:06.484272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:06.484429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:06.487760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:06.487899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:06.488257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.488373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:06.489517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.490296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:06.490336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.490410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:06.490458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:06.490485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:06.490597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.494639Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:06.559583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:06.559729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.559852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:06.559996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:06.560034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.561412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.561492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:06.561576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.561611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:06.561636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:06.561668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:06.562740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.562773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:06.562793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:06.563645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.563669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.563702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.563748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.569409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:06.570694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:06.570830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:06.571498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.571593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:06.571624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.571801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:06.571838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.571954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:06.572001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:06.573232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:06.573258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:06.573353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.573376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:06.573614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.573641Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:06.573697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:06.573716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.573740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:06.573771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.573791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:06.573810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:06.573856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:06.573879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:06.573898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:06.574997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:06.575068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:06.575106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:06.575127Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:06.575160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:06.575220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:06.654832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:06.654850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:06.654866Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:11:06.654883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:06.654918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T14:11:06.656538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:06.656571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:06.656595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:06.657143Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-07T14:11:06.657802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.657985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:06.658193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:06.658244Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-07T14:11:06.658425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:06.658528Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-07T14:11:06.659063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:11:06.659175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-04-07T14:11:06.659509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:06.659590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:06.659772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:06.659839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:06.659909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:06.660267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:06.660312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:06.660350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:06.661645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:11:06.661680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:11:06.661875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:11:06.661893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T14:11:06.662911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:11:06.662938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:11:06.663036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:06.663101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T14:11:06.663222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:11:06.663257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T14:11:06.663442Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:11:06.663486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:06.663505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:483:2438] TestWaitNotification: OK eventTxId 101 2025-04-07T14:11:06.663755Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:06.663864Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 118us result status StatusPathDoesNotExist 2025-04-07T14:11:06.663959Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:11:06.664227Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:06.664319Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 105us result status StatusSuccess 2025-04-07T14:11:06.664487Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-04-07T14:11:06.664768Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-07T14:11:06.666300Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-04-07T14:11:06.666344Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-04-07T14:11:06.667366Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:06.667507Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 135us result status StatusSuccess 2025-04-07T14:11:06.667715Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:06.772481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:06.772563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:06.772592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:06.772616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:06.772646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:06.772683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:06.772764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:06.773064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:06.817405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:06.817441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:06.824073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:06.824158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:06.824317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:06.827444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:06.827567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:06.827953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.828082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:06.829220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.830055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:06.830104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.830182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:06.830220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:06.830247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:06.830344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.834704Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:06.914801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:06.914929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.915051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:06.915222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:06.915266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.916846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.916937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:06.917038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.917098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:06.917127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:06.917156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:06.918176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.918211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:06.918239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:06.919177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.919201Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.919234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.919280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.921366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:06.922532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:06.922651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:06.923314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.923396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:06.923427Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.923594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:06.923622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.923719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:06.923770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:06.924832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:06.924866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:06.924983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.925008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:06.925192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.925221Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:06.925281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:06.925299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.925329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:06.925367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.925395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:06.925414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:06.925457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:06.925481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:06.925498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:06.926819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:06.926893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:06.926914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:06.926947Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:06.926984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:06.927054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:07.016342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:11:07.016480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:11:07.017452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:07.017477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:07.017526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:07.017584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:07.017602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-07T14:11:07.017618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:11:07.017694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.017727Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-04-07T14:11:07.017754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:11:07.017772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:11:07.017820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-07T14:11:07.017863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:11:07.017893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:11:07.017943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:11:07.018079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T14:11:07.018112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-07T14:11:07.018158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-07T14:11:07.018186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-07T14:11:07.018677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:07.018714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:07.018729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:11:07.018766Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:11:07.018802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:07.019238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:07.019297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:07.019316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:11:07.019331Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:11:07.019357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:07.019400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-07T14:11:07.021030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:07.021071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:07.021111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:07.021812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:11:07.022256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:11:07.022340Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-07T14:11:07.022561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:07.022770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:07.022983Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-07T14:11:07.023519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:11:07.023691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 2025-04-07T14:11:07.024520Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-07T14:11:07.024850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:07.024964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-04-07T14:11:07.025863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:07.025905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:07.026006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:07.026475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:07.026504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:07.026541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:07.027693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:11:07.027725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:11:07.027969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:11:07.027999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T14:11:07.028040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:11:07.028064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:11:07.029001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:07.029053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T14:11:07.029242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:11:07.029273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:11:07.029522Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:11:07.029583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:11:07.029603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:515:2470] TestWaitNotification: OK eventTxId 102 2025-04-07T14:11:07.029934Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:07.030036Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 116us result status StatusPathDoesNotExist 2025-04-07T14:11:07.030142Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:06.606119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:06.606214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:06.606255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:06.606290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:06.606334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:06.606359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:06.606416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:06.606699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:06.648174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:06.648210Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:06.655498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:06.655582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:06.655729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:06.659313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:06.659450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:06.659800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.659934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:06.661150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.661959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:06.661998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.662070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:06.662104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:06.662131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:06.662235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.666174Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:06.736173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:06.736349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.736480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:06.736613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:06.736642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.738573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.738666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:06.738762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.738802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:06.738824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:06.738857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:06.740070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.740109Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:06.740132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:06.741125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.741151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.741178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.741227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.743257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:06.744339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:06.744470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:06.745077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.745154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:06.745185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.745372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:06.745401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.745497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:06.745545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:06.746703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:06.746728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:06.746827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.746862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:06.747070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.747113Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:06.747168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:06.747188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.747217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:06.747244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.747266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:06.747287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:06.747331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:06.747360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:06.747394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:06.748483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:06.748556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:06.748579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:06.748600Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:06.748629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:06.748713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... .026076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:11:07.026121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-07T14:11:07.026387Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-07T14:11:07.026896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:11:07.026992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409548 2025-04-07T14:11:07.027387Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-07T14:11:07.027558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409551 2025-04-07T14:11:07.028132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-07T14:11:07.028274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:07.028534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-04-07T14:11:07.029203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:07.029299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:07.030055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-07T14:11:07.030158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:07.030623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:07.030653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:07.030734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:07.031218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-07T14:11:07.031250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-07T14:11:07.031359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-07T14:11:07.031376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-04-07T14:11:07.032645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:11:07.032700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:11:07.032749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:07.032807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:07.032840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:07.032884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:07.032981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:11:07.032995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T14:11:07.033096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-07T14:11:07.033131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-07T14:11:07.034356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:11:07.034383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:11:07.034430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T14:11:07.034454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-07T14:11:07.034607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:07.035559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-04-07T14:11:07.035734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:11:07.035772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-07T14:11:07.035842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:11:07.035860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:11:07.036216Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:11:07.036332Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:11:07.036377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:07.036407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:731:2616] 2025-04-07T14:11:07.036530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:11:07.036552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:731:2616] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-07T14:11:07.036949Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:07.037091Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 155us result status StatusPathDoesNotExist 2025-04-07T14:11:07.037249Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:11:07.037579Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:07.037755Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 133us result status StatusPathDoesNotExist 2025-04-07T14:11:07.037879Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:11:07.038247Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:07.038381Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 139us result status StatusSuccess 2025-04-07T14:11:07.038679Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:07.157250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:07.157321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:07.157349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:07.157370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:07.157397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:07.157414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:07.157466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:07.157685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:07.200797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:07.200839Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:07.207569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:07.207690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:07.207858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:07.212557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:07.212692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:07.213038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:07.213156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:07.214273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:07.215008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:07.215044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:07.215134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:07.215171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:07.215199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:07.215314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.219441Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:07.296190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:07.296348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.296477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:07.296610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:07.296651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.298290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:07.298378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:07.298477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.298514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:07.298537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:07.298572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:07.299865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.299907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:07.299933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:07.301000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.301037Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.301083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:07.301138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:07.303245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:07.304276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:07.304386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:07.305010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:07.305094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:07.305128Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:07.305299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:07.305327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:07.305451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:07.305525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:07.306678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:07.306705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:07.306799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:07.306821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:07.306986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.307018Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:07.307110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:07.307131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:07.307155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:07.307182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:07.307216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:07.307234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:07.307275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:07.307302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:07.307324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:07.312294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:07.312366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:07.312388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:07.312412Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:07.312447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:07.312511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2025-04-07T14:11:07.335787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:07.335843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:07.336905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.336947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.336971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:11:07.337010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-07T14:11:07.337085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:07.338245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-07T14:11:07.338328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-07T14:11:07.338539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:07.338597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:07.338623Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:11:07.338793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-07T14:11:07.338838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-07T14:11:07.338969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:07.339012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:07.339043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:07.340348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:07.340383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:07.340453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:07.340494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:07.340511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T14:11:07.340541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:11:07.340691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.340722Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:11:07.340783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:11:07.340834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:07.340868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:11:07.340899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:07.340947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:11:07.340977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:11:07.341036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:07.341069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-07T14:11:07.341098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T14:11:07.341121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-07T14:11:07.341470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:07.341519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:07.341547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:07.341573Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T14:11:07.341601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:07.342128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:07.342176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:07.342193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:07.342218Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:11:07.342234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:07.342276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T14:11:07.344221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:07.344267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-07T14:11:07.346029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:07.346162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/SomeDatabase, opId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.346270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, at schemeshard: 72057594046678944 2025-04-07T14:11:07.347540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:07.347643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-04-07T14:11:07.347826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:11:07.347855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-07T14:11:07.347934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:11:07.347947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:11:07.348239Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:11:07.348297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:11:07.348317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:306:2298] 2025-04-07T14:11:07.348406Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:11:07.348467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:11:07.348481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:306:2298] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:04.742325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:04.742389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.742413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:04.742436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:04.742474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:04.742493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:04.742531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:04.742769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:04.784116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:04.784151Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:04.790345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:04.790429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:04.790555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:04.794048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:04.794171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:04.794570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.794683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:04.795714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.796542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.796581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.796657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:04.796708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.796736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:04.796834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.800775Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:04.872713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:04.872897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.873076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:04.873264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:04.873321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.874908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.875001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:04.875129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.875176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:04.875211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:04.875250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:04.876905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.876956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:04.876994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:04.878015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.878042Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.878069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.878114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.880216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:04.881249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:04.881361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:04.881969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:04.882061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:04.882095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.882262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:04.882295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:04.882399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.882447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:04.883551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:04.883578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:04.883673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:04.883697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:04.883888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:04.883913Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:04.883987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:04.884007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.884037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:04.884062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:04.884081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:04.884099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:04.884140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:04.884167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:04.884189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:04.885322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.885383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:04.885405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:04.885428Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:04.885467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:04.885536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify ... let# 72075186233409546 2025-04-07T14:11:07.390368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T14:11:07.390399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-04-07T14:11:07.390459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-04-07T14:11:07.390534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-04-07T14:11:07.390600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-04-07T14:11:07.390635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-07T14:11:07.390907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 518 RawX2: 4294969763 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-07T14:11:07.390933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-04-07T14:11:07.391014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 518 RawX2: 4294969763 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-07T14:11:07.391048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72075186233409546 2025-04-07T14:11:07.391143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 518 RawX2: 4294969763 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-07T14:11:07.391188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-07T14:11:07.392978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:07.393811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-07T14:11:07.393843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-07T14:11:07.393971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-04-07T14:11:07.394159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:07.394194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-07T14:11:07.394224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:440:2394], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-04-07T14:11:07.394275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:440:2394], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-04-07T14:11:07.394795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:07.394843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-04-07T14:11:07.394919Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:07.394953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-04-07T14:11:07.394982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-07T14:11:07.395666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-07T14:11:07.395755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-07T14:11:07.395799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-07T14:11:07.395833Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-04-07T14:11:07.395870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-04-07T14:11:07.396593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-07T14:11:07.396655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-07T14:11:07.396692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-07T14:11:07.396731Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:11:07.396757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-07T14:11:07.396809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-07T14:11:07.398560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:07.398605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-04-07T14:11:07.398955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-07T14:11:07.399132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-07T14:11:07.399161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:11:07.399220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-07T14:11:07.399270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:552:2488] message: TxId: 104 2025-04-07T14:11:07.399318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:11:07.399348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T14:11:07.399386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T14:11:07.399475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-04-07T14:11:07.399881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-07T14:11:07.399914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-07T14:11:07.400464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-07T14:11:07.401252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-07T14:11:07.401983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-07T14:11:07.402013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:440:2394], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-07T14:11:07.402073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:11:07.402099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:811:2732] 2025-04-07T14:11:07.402496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-04-07T14:11:07.403475Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-07T14:11:07.403647Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 166us result status StatusSuccess 2025-04-07T14:11:07.403913Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> OlapEstimationRowsCorrectness::TPCDS96 [GOOD] >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:05.782023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:05.782106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.782136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:05.782161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:05.782189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:05.782209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:05.782244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:05.782463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:05.826009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:05.826045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:05.832468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:05.832580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:05.832719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:05.836004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:05.836133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:05.836471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.836581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:05.837702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.838444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.838480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.838585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:05.838623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:05.838650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:05.838782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.842734Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:05.913213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:05.913382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.913526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:05.913675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:05.913726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.915122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.915213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:05.915316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.915358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:05.915383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:05.915413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:05.916493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.916532Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:05.916562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:05.917554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.917580Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.917607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.917653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.923658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:05.924914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:05.925026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:05.925668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:05.925753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:05.925804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.925973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:05.926002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:05.926113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.926168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:05.927402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:05.927430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:05.927534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:05.927562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:05.927738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:05.927770Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:05.927831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:05.927849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.927883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:05.927917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:05.927942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:05.927959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:05.928001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:05.928032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:05.928093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:05.929254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:05.929315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:05.929337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:05.929360Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:05.929395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:05.929457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... erId: 72057594046678944, cookie: 139 2025-04-07T14:11:07.981852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 139 2025-04-07T14:11:07.981870Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 16], version: 18446744073709551615 2025-04-07T14:11:07.981889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 4 2025-04-07T14:11:07.982760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2025-04-07T14:11:07.982830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2025-04-07T14:11:07.982860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 139 2025-04-07T14:11:07.982878Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:11:07.982906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:07.982950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 139, subscribers: 0 2025-04-07T14:11:07.984008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:07.984044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:15 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:07.984060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:14 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:07.984073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:07.984087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:16 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:07.984388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-04-07T14:11:07.985490Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-07T14:11:07.985882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:07.986088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-04-07T14:11:07.987218Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 15 TabletID: 72075186233409556 2025-04-07T14:11:07.987431Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 14 TabletID: 72075186233409555 Forgetting tablet 72075186233409556 2025-04-07T14:11:07.989062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 15 ShardOwnerId: 72057594046678944 ShardLocalIdx: 15, at schemeshard: 72057594046678944 2025-04-07T14:11:07.989203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 3 2025-04-07T14:11:07.989708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-04-07T14:11:07.989860Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-07T14:11:07.990069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 14 ShardOwnerId: 72057594046678944 ShardLocalIdx: 14, at schemeshard: 72057594046678944 2025-04-07T14:11:07.990208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 2 Forgetting tablet 72075186233409555 2025-04-07T14:11:07.991034Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 16 TabletID: 72075186233409557 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409557 2025-04-07T14:11:07.992975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:07.993092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:07.993321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 16 ShardOwnerId: 72057594046678944 ShardLocalIdx: 16, at schemeshard: 72057594046678944 2025-04-07T14:11:07.993428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 2025-04-07T14:11:07.993617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-04-07T14:11:07.993951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:07.993997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-04-07T14:11:07.994048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:07.994246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:07.994284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:07.994364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:07.995447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:11:07.995487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:11:07.995570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-04-07T14:11:07.995583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409556 2025-04-07T14:11:07.995616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:14 2025-04-07T14:11:07.995640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:14 tabletId 72075186233409555 2025-04-07T14:11:07.997566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:11:07.997596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:11:07.997675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2025-04-07T14:11:07.997720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409557 2025-04-07T14:11:07.997908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:07.998003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:07.998097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:07.998136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:07.998223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:07.999787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 139, wait until txId: 139 TestWaitNotification wait txId: 139 2025-04-07T14:11:08.000541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: send EvNotifyTxCompletion 2025-04-07T14:11:08.000586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 139 2025-04-07T14:11:08.001751Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2025-04-07T14:11:08.001853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2025-04-07T14:11:08.001894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [1:2086:3889] TestWaitNotification: OK eventTxId 139 2025-04-07T14:11:08.003480Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:08.003662Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 233us result status StatusSuccess 2025-04-07T14:11:08.004034Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 5 ShardsInside: 0 ShardsLimit: 6 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 20 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:06.843693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:06.843767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:06.843798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:06.843823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:06.843854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:06.843872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:06.843923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:06.844167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:06.890473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:06.890521Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:06.897437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:06.897568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:06.897700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:06.901559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:06.901696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:06.902080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.902210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:06.903405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.904237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:06.904280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:06.904370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:06.904408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:06.904438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:06.904544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.908881Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:06.982112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:06.982279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.982438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:06.982586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:06.982619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.984152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.984251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:06.984367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.984415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:06.984441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:06.984494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:06.985615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.985651Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:06.985673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:06.986868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.986909Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:06.986947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.987029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:06.994964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:06.996853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:06.997056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:06.998017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:06.998150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:06.998203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.998454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:06.998503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:06.998660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:06.998738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:07.000564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:07.000609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:07.000775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:07.000819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:07.001066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.001107Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:07.001196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:07.001230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:07.001289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:07.001333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:07.001365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:07.001427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:07.001495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:07.001537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:07.001570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:07.003325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:07.003425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:07.003457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:07.003510Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:07.003562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:07.003674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... xAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-04-07T14:11:07.731575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:07.731614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:07.731732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:07.731888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:07.731922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-07T14:11:07.731974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-07T14:11:07.732322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.732365Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 104:0 ProgressState 2025-04-07T14:11:07.732474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-07T14:11:07.732525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:11:07.732571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-07T14:11:07.732603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:11:07.732634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T14:11:07.732696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T14:11:07.732816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:07.732855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-07T14:11:07.732889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-07T14:11:07.732933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-07T14:11:07.733505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:11:07.733567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:11:07.733594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:11:07.733631Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:11:07.733666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:07.734232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:11:07.734350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:11:07.734386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:11:07.734413Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:11:07.734436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:07.734512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-07T14:11:07.735925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:07.735973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:07.737202Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-07T14:11:07.738049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:07.738260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:07.738443Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-07T14:11:07.739116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409547 2025-04-07T14:11:07.739537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:11:07.739716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:07.740073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T14:11:07.740490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:07.740533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:07.740623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:11:07.740929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:07.740968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:11:07.741024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:07.742697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:11:07.742746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:11:07.743618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:11:07.743682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-07T14:11:07.744741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:07.744846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-07T14:11:07.745169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-07T14:11:07.745212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-07T14:11:07.745742Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-07T14:11:07.745818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:11:07.745855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2093:3700] TestWaitNotification: OK eventTxId 104 2025-04-07T14:11:07.753015Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:07.753247Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 201us result status StatusPathDoesNotExist 2025-04-07T14:11:07.753405Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:11:07.754111Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:07.754255Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 147us result status StatusPathDoesNotExist 2025-04-07T14:11:07.754382Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:08.118394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:08.118471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:08.118498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:08.118523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:08.118556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:08.118572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:08.118608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:08.118838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:08.162557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:08.162600Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:08.169358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:08.169450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:08.169580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:08.173397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:08.173523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:08.173872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:08.173990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:08.175071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:08.175860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:08.175907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:08.175984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:08.176017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:08.176047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:08.176151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:08.181135Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:08.258857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:08.259008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:08.259170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:08.259299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:08.259349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:08.260918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:08.261024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:08.261134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:08.261176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:08.261211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:08.261246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:08.262306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:08.262341Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:08.262362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:08.263387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:08.263414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:08.263436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:08.263488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:08.266154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:08.267262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:08.267396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:08.268042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:08.268131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:08.268165Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:08.268326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:08.268356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:08.268457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:08.268505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:08.269658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:08.269684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:08.269778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:08.269814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:08.269976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:08.270003Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:08.270063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:08.270085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:08.270110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:08.270141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:08.270165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:08.270185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:08.270246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:08.270275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:08.270292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:08.271484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:08.271548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:08.271569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:08.271592Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:08.271630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:08.271718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... lete, at schemeshard: 72057594046678944 2025-04-07T14:11:08.306200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T14:11:08.306223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-07T14:11:08.306326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:08.306376Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#101:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:08.306421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-07T14:11:08.306510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:08.306916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:08.306985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:08.307007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:08.307029Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-07T14:11:08.307049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:08.308137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:08.308193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:08.308210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:08.308226Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-07T14:11:08.308241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:11:08.308283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-07T14:11:08.309241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-07T14:11:08.309363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-07T14:11:08.309692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:08.309756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:08.309797Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-07T14:11:08.309868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-07T14:11:08.309995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:11:08.310028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:11:08.310520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:08.311093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-07T14:11:08.312012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:08.312034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:08.312104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:11:08.312143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:08.312171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-07T14:11:08.312196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-07T14:11:08.312233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:11:08.312255Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:11:08.312315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:11:08.312336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:08.312363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:11:08.312400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:08.312430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:11:08.312456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:11:08.312490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:11:08.312511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-07T14:11:08.312533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-07T14:11:08.312548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-07T14:11:08.313280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:08.313336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:08.313357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:08.313391Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-07T14:11:08.313413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:08.313982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:08.314036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:08.314054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:08.314082Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T14:11:08.314099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:11:08.314142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T14:11:08.315881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:11:08.316190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-07T14:11:08.318643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0/dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:08.318894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:08.318962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, schema: Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId", at schemeshard: 72057594046678944 2025-04-07T14:11:08.319061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, at schemeshard: 72057594046678944 2025-04-07T14:11:08.320458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Inclusive subDomain do not support shared transactions" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:08.320596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot/USER_0, subject: , status: StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, operation: CREATE TABLE, path: /MyRoot/USER_0/dir/table_0 TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] Test command err: 2025-04-07T14:07:04.650116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:07:04.650433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:07:04.650680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018b6/r3tmp/tmpSpMYY2/pdisk_1.dat 2025-04-07T14:07:04.980645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:07:05.016452Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:05.069252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:05.069379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:05.080489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:07:05.190267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:07:05.220380Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:07:05.221133Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:07:05.221437Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:07:05.221614Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:07:05.262333Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:07:05.263108Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:07:05.263215Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:07:05.264856Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:07:05.264956Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:07:05.265034Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:07:05.265397Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:07:05.286403Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:07:05.286558Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:07:05.286651Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:07:05.286688Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:07:05.286712Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:07:05.286738Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:05.287035Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.287076Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.287230Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:07:05.287298Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:07:05.287591Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.287614Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.287642Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:07:05.287685Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:05.287733Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:07:05.287778Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:07:05.287807Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:07:05.287831Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:07:05.287854Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:07:05.287887Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:07:05.287976Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:07:05.288010Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:07:05.288088Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:07:05.288322Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:07:05.288361Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:07:05.288435Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:07:05.288477Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:07:05.288504Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:07:05.288530Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:07:05.288550Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:05.288759Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:07:05.288786Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:07:05.288812Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:07:05.288842Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:05.288880Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:07:05.288906Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:07:05.288932Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:07:05.288953Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:05.288972Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:07:05.289909Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:07:05.289946Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:07:05.300486Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:07:05.300546Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:07:05.300579Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:07:05.300628Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:07:05.300711Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:07:05.480079Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.480134Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:07:05.480169Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:07:05.480527Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:07:05.480577Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:07:05.480710Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:07:05.480752Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:07:05.480793Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:07:05.480829Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:07:05.485049Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:07:05.485117Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:07:05.485986Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.486037Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:07:05.486089Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:07:05.486126Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:07:05.486159Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:07:05.486198Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... CHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:10:48.466020Z node 17 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:48.515624Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:10:48.515844Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:10:48.527755Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:10:48.643402Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:10:48.958220Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:708:2591], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:48.958368Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:719:2596], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:48.958477Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:48.965360Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:10:49.173773Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [17:722:2599], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:10:49.448565Z node 17 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr89dvfv7vr8bh2k4tf7xyym, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=17&id=MjE5ZDVhYjYtNmQzNjM1Y2EtYzg0NDlkYi0xOTAxNzYwMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:53.288239Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:296:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:10:53.288621Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:10:53.288712Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018b6/r3tmp/tmpRONmHO/pdisk_1.dat 2025-04-07T14:10:53.617838Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:10:53.650042Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:10:53.699798Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:10:53.699991Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:10:53.711817Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:10:53.825279Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:10:54.182949Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:704:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:54.183060Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:715:2593], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:54.183155Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:10:54.188021Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:10:54.388979Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [18:718:2596], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:10:54.758576Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr89e0k57yvw08sar1yjt3em, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=MmJmODVlZDctYTg4YTljZGEtZDJhNzc1NzUtZTBmZjgwMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:55.255299Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89e1933v0ek4vnarhdza2r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=MTA1OGE4ODUtODQyOTFhZGYtMjQwMWZjZWMtYjU4OWZhMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:55.682158Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89e1qd02334vc7d3sgxgr9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YmQ4ZGU1NDEtOGNmNTVkOTgtOTFiYzJmYzktZmE2MzBhOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:56.074666Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89e22y2wcd1phx4aypcmah, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=OTEwYzczY2QtYmE2OGVjMjctYzdlNGVhYWUtY2M2NjhmZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:56.398354Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr89e2f92tskr4qh5pyaf5zz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=OWRkYzQzMWEtY2ZiMzQ5NTQtZmVhNDU3NGUtMjU4M2E2ZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:56.669068Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr89e2sffs54p5cyhpsf8f2r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=MTkyNTcyYWQtZGQ3NmU2NC1mYTVlNjBjMC0xNzcwNzZlNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:56.977156Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr89e3231kq3fn5rsqaexrrn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=NWQxYjhjMzMtM2MyMWRkZWItODczMjYwYjktZmZiNTVlMDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:57.275758Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr89e3bm324mg8gxbc76zscq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=NjNjMGU2ZWQtYTU4MWIzNjktMWNlMDRiYzMtMTliN2Y1MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:57.543986Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr89e3mr6zz5x5v68ycq0ck9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=ZTQ4MTQ2Y2ItZjBiNTkzMzktNjdmOTJiOC1mYWZjMDg2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:10:57.814833Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr89e3xd623tkq4w655t9zf5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=NWEwNjNlZWYtYTVkZjllMzAtZTc0YjllZGItYWJkNGQyNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-04-07T14:11:00.978616Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:11:00.978695Z node 18 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 10487152 RowCount: 10 IndexSize: 0 InMemSize: 10487152 LastAccessTime: 1550 LastUpdateTime: 1550 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 9776 Memory: 17425232 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 10487152 RowCount: 10 IndexSize: 0 InMemSize: 10487152 LastAccessTime: 1550 LastUpdateTime: 1550 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 5432 Memory: 124716 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 2 TableStats { DataSize: 10486220 RowCount: 10 IndexSize: 0 InMemSize: 0 LastAccessTime: 1550 LastUpdateTime: 1550 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 10486220 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 5432 Memory: 124716 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-07T14:11:07.410164Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr89eda7ftm3tq7efkkmtd2e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=OTA5Y2M2OGMtZGI3YTliYWYtYWRhMDMzNi03ZmQxODc5ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |94.2%| [TA] $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCDS96 [GOOD] Test command err: Trying to start YDB, gRPC: 27233, MsgBus: 25394 2025-04-07T14:07:08.753394Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572550359140923:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:08.753468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001042/r3tmp/tmpZ8K4VJ/pdisk_1.dat 2025-04-07T14:07:09.067253Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27233, node 1 2025-04-07T14:07:09.134766Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:09.134788Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:09.134795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:09.134895Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:07:09.149023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:09.149136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:09.150985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25394 TClient is connected to server localhost:25394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:09.595343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:09.609801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:09.762783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:09.877790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:09.937125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:11.196500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572563244044528:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.196574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.427849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.451769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.470262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.494191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.518524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.547960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.619386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572563244045023:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.619477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.619480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572563244045028:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.622543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:11.630652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572563244045030:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:12.530203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.553314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.574941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.599186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.621715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.724985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.787639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.809826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.835009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.859615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.880997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.906539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.929977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.438298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-07T14:07:13.472580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.495375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.522283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.544738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.571393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.599761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.627126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-07T14:07:13.656 ... 72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:57.012426Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:57.012471Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:57.012632Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:57.012689Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:57.012833Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:57.012870Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:57.013030Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:57.013065Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:57.013172Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:10:57.013199Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:10:57.013553Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:57.013586Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:57.013651Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:57.013687Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:57.013848Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:57.013885Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:57.013962Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:57.013987Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:57.014033Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:57.014055Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:57.014080Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:57.014106Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:57.014348Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:57.014381Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:57.014524Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:57.014554Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:57.014664Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:57.014701Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:57.014848Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:57.014875Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:57.014955Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:10:57.014984Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:10:57.021111Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:57.021157Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:57.021225Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:57.021257Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:57.021383Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:57.021410Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:57.021479Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:57.021507Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:57.021559Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:57.021584Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:57.021613Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:57.021639Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:57.021882Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:57.021922Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:57.022094Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:57.022124Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:57.022233Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:57.022261Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:57.022398Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:57.022426Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:57.022514Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:10:57.022541Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |94.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> TargetDiscoverer::Negative >> TargetDiscoverer::Dirs >> TargetDiscoverer::IndexedTable >> TargetDiscoverer::Basic >> KqpScan::ScanDuringSplit10 >> KqpScan::RemoteShardScan >> KqpScan::ScanRetryRead >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] >> TBackupTests::BackupUuidColumn[Zstd] >> TBackupTests::BackupUuidColumn[Raw] >> TargetDiscoverer::Negative [GOOD] >> TargetDiscoverer::Basic [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TargetDiscoverer::Dirs [GOOD] >> TargetDiscoverer::IndexedTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] Test command err: 2025-04-07T14:11:10.308105Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573591335672803:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:10.308149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ad5/r3tmp/tmpSwh7ct/pdisk_1.dat 2025-04-07T14:11:10.546365Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:10.645280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:10.645423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:10.646963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61530 TServer::EnableGrpc on GrpcPort 28973, node 1 2025-04-07T14:11:10.802779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:10.802801Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:10.802818Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:10.802927Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:11.153491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:11.196965Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-04-07T14:11:11.197018Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/Table, status# SCHEME_ERROR, issues# {
: Error: Path not found } >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] >> TBackupTests::ShouldSucceedOnLargeData[Zstd] >> TWebLoginService::AuditLogLdapLoginBadPassword >> TSchemeShardLoginTest::BasicLogin |94.2%| [TA] $(B)/ydb/core/kqp/ut/olap/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Basic [GOOD] Test command err: 2025-04-07T14:11:10.308078Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573590783765300:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:10.308124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001aa8/r3tmp/tmpG4eMGp/pdisk_1.dat 2025-04-07T14:11:10.547956Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:10.644821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:10.644938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:10.646396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8140 TServer::EnableGrpc on GrpcPort 26542, node 1 2025-04-07T14:11:10.802799Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:10.802828Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:10.802835Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:10.802934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:11.170155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:11.180033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:11.287872Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744035071219, tx_id: 1 } } } 2025-04-07T14:11:11.287904Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-07T14:11:11.301816Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744035071289, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-07T14:11:11.301838Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-07T14:11:12.125031Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744035071289, tx_id: 281474976710658 } } } 2025-04-07T14:11:12.125060Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-04-07T14:11:12.125075Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Dirs [GOOD] Test command err: 2025-04-07T14:11:10.308121Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573589746236604:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:10.308174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ab0/r3tmp/tmprUdNjY/pdisk_1.dat 2025-04-07T14:11:10.568986Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:10.669052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:10.669142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:10.670771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29269 TServer::EnableGrpc on GrpcPort 17759, node 1 2025-04-07T14:11:10.802850Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:10.802874Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:10.802882Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:10.802997Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:11.154010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:11:11.181786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:11.287142Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744035071212, tx_id: 1 } } } 2025-04-07T14:11:11.287203Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-07T14:11:11.300267Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Dir, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744035071226, tx_id: 281474976715658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-07T14:11:11.300302Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-07T14:11:11.305384Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744035071289, tx_id: 281474976715659 } }] } } 2025-04-07T14:11:11.305415Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root/Dir 2025-04-07T14:11:12.076781Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744035071289, tx_id: 281474976715659 } } } 2025-04-07T14:11:12.076805Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Dir/Table 2025-04-07T14:11:12.076818Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Dir/Table, dstPath# /Root/Replicated/Dir/Table, kind# Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::IndexedTable [GOOD] Test command err: 2025-04-07T14:11:10.308058Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573592154953853:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:10.308105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a96/r3tmp/tmpqwRETw/pdisk_1.dat 2025-04-07T14:11:10.556269Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:10.632735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:10.633354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:10.634949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15451 TServer::EnableGrpc on GrpcPort 64905, node 1 2025-04-07T14:11:10.802924Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:10.802960Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:10.802967Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:10.803074Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:11.153876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:11.170557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:11.480915Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744035071205, tx_id: 1 } } } 2025-04-07T14:11:11.480948Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-07T14:11:11.486843Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744035071429, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-07T14:11:11.486865Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-07T14:11:12.080986Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744035071429, tx_id: 281474976710658 } } } 2025-04-07T14:11:12.081015Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-04-07T14:11:12.081031Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table 2025-04-07T14:11:12.081110Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table/Index, dstPath# /Root/Replicated/Table/Index/indexImplTable, kind# IndexTable >> TWebLoginService::AuditLogLogout >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] >> TBackupTests::BackupUuidColumn[Raw] [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] >> TSchemeShardLoginTest::BasicLogin [GOOD] >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> TWebLoginService::AuditLogLogout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:03.712962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:03.713043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:03.713081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:03.713110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:03.713146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:03.713163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:03.713199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:03.713405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:03.752635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:03.752686Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:03.759386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:03.759488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:03.759626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:03.763298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:03.763433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:03.763822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:03.763946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:03.765147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:03.766035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:03.766074Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:03.766170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:03.766213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:03.766238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:03.766488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.770601Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:03.839772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:03.839971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.840199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:03.840415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:03.840481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.842624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:03.842755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:03.842913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.842971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:03.843004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:03.843048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:03.844599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.844638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:03.844686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:03.845988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.846023Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.846056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:03.846127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:03.849343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:03.850812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:03.850980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:03.851875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:03.851998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:03.852037Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:03.852261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:03.852302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:03.852443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:03.852523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:03.853969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:03.854006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:03.854153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:03.854193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:03.854465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:03.854502Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:03.854573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:03.854611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:03.854648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:03.854680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:03.854709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:03.854737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:03.854790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:03.854826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:03.854852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:03.861078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:03.861197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:03.861229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:03.861258Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:03.861306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:03.861401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify ... 186233409546 2025-04-07T14:11:12.262961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:11:12.262994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-07T14:11:12.263980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T14:11:12.264030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-07T14:11:12.264444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:12.264544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 AckTo { RawX1: 375 RawX2: 4294969642 } } Step: 3150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:12.264603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropPQ TPropose, operationId: 103:0 HandleReply TEvOperationPlan, step: 3150, at schemeshard: 72057594046678944 2025-04-07T14:11:12.264759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:11:12.264833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-07T14:11:12.264992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:12.265040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:11:12.266418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:12.266453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:11:12.266789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:12.266819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:12.266928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:12.266970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:11:12.267078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:12.267118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-07T14:11:12.267146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-07T14:11:12.267166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-07T14:11:12.267500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.267540Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-07T14:11:12.267624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-07T14:11:12.267654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T14:11:12.267693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-07T14:11:12.267725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-07T14:11:12.267772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-07T14:11:12.267800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-07T14:11:12.267903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:11:12.267942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-04-07T14:11:12.267971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 9 2025-04-07T14:11:12.267996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-07T14:11:12.268646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:11:12.268739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:11:12.268771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-07T14:11:12.268806Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-07T14:11:12.268840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:11:12.269421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:11:12.269471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:11:12.269523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:11:12.269760Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-04-07T14:11:12.269969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:11:12.270029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-07T14:11:12.270053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-07T14:11:12.270075Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 9 2025-04-07T14:11:12.270096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:12.270146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-07T14:11:12.270422Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-04-07T14:11:12.271078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:11:12.271359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-07T14:11:12.273477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:11:12.273569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:11:12.274996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:11:12.275096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:11:12.275180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T14:11:12.275514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T14:11:12.275555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T14:11:12.275967Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T14:11:12.276044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:11:12.276078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:715:2638] TestWaitNotification: OK eventTxId 103 2025-04-07T14:11:12.659600Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:12.659857Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 303us result status StatusSuccess 2025-04-07T14:11:12.660221Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:12.780898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:12.780984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:12.781012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:12.781041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:12.781799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:12.781823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:12.781888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:12.782824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:12.834021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:12.834075Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:12.842179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:12.842281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:12.842392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:12.847871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:12.847990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:12.850040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:12.850790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:12.855381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:12.862349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:12.862411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:12.862545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:12.862603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:12.862750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:12.862899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.868860Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:12.957696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:12.959466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.960733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:12.962108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:12.962180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.964370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:12.965077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:12.965219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.965264Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:12.965386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:12.965420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:12.967014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.967087Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:12.967126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:12.968335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.968363Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.968396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:12.968440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:12.976233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:12.977733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:12.977857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:12.978910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:12.979247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:12.979297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:12.979542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:12.979593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:12.979738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:12.979845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:12.981700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:12.981743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:12.981941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:12.981993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:12.982239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.982282Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:12.982367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:12.982391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:12.982431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:12.982473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:12.982497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:12.982518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:12.982560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:12.982586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:12.982610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:12.984052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:12.984127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:12.984152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:12.984188Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:12.984218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:12.984283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... : 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:13.268598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:11:13.268719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-07T14:11:13.269526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.269635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:13.269690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-07T14:11:13.269861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-07T14:11:13.269967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:13.527243Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:402:2375], attempt# 0 2025-04-07T14:11:13.548759Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:402:2375], sender# [1:401:2374] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:11:13.552861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:13.552921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:13.553220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.553285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:11:13.553636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.553712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:62615 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FE1D4AFA-5F69-44CD-A7AA-F964533103012025-04-07T14:11:13.554575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-07T14:11:13.554706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:13.554745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:11:13.554800Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:11:13.554846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:13.554926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T14:11:13.555349Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:402:2375], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:62615 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9AAE963E-5C05-409A-841C-BB234F450689 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2025-04-07T14:11:13.567882Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:402:2375], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } 2025-04-07T14:11:13.568949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:62615 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C5CA4B07-E1D9-45E7-83BE-3D7C3A0CB13E amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-07T14:11:13.571091Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:402:2375], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-04-07T14:11:13.571202Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:401:2374] 2025-04-07T14:11:13.571319Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:402:2375], sender# [1:401:2374], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:62615 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F6413FC8-2DC5-4AA0-B10F-5CEC42068140 amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2025-04-07T14:11:13.573628Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:402:2375], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2025-04-07T14:11:13.573688Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:402:2375], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-07T14:11:13.573821Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:401:2374], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-07T14:11:13.579645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-07T14:11:13.579688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T14:11:13.579840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-07T14:11:13.579914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-07T14:11:13.579965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.579996Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.580021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:11:13.580061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T14:11:13.580182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:13.581607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.581723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.581759Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:11:13.581826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:11:13.581854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:11:13.581891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T14:11:13.581938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 102 2025-04-07T14:11:13.581967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:11:13.581990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:11:13.582010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:11:13.582096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:13.583522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:11:13.583567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:386:2360] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:13.654830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:13.654904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:13.654932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:13.654957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:13.654991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:13.655012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:13.655053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:13.655309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:13.703104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:13.703169Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:13.711500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:13.711600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:13.711698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:13.717597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:13.717824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:13.720547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.720803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:13.725397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.732480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:13.732536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.732638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:13.732704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:13.732802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:13.732939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.738233Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:13.825054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:13.825197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.825384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:13.825582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:13.825628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.827432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.827637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:13.827805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.827853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:13.827897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:13.827931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:13.829475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.829529Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:13.829564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:13.830951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.830992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.831028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:13.831067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:13.834940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:13.836491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:13.836654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:13.837584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.837687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:13.837732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:13.837965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:13.838013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:13.838192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:13.838266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:13.839883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:13.839974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:13.840099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.840142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:13.840348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.840388Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:13.840478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:13.840512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:13.840552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:13.840588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:13.840629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:13.840680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:13.840731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:13.840777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:13.840815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:13.842504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:13.842599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:13.842679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:13.842721Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:13.842762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:13.842853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T14:11:13.845365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T14:11:13.845797Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T14:11:13.847837Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T14:11:13.862867Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T14:11:13.865096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:13.865262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Login authentication is disabled, at schemeshard: 72057594046678944 2025-04-07T14:11:13.869563Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:11:13.871283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Login authentication is disabled" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:13.871441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Login authentication is disabled, operation: CREATE USER, path: /MyRoot 2025-04-07T14:11:13.872439Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 2025-04-07T14:11:13.873338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2025-04-07T14:11:13.873380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-07T14:11:13.984428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Error: "Login authentication is disabled", at schemeshard: 72057594046678944 2025-04-07T14:11:13.984557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:13.984621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:13.984808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.984852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-07T14:11:13.985381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 0 2025-04-07T14:11:13.985750Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:13.985916Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 160us result status StatusSuccess 2025-04-07T14:11:13.986324Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvofG7+lhUYl4QIFb1x9N\nZ4zc5Rw0UgUYA4oqKylL7JaDAZWiid1IqkZEjTR8flV5t7Sw0AYrdJJEhRKqZtR1\nKbkgGNHU3md/H++1SkzB4sfX+YfhhpL8ijjKLlvM/EAZpntztSTJxC/drrVwEIxg\nw3qAZgFLVTf67978sCJyPODAMZkRHeQbdJOhjtIzhFbk2I39KKAXSIvjpVooBV9U\n2SEWCJWqeMxm+9Kq6Zh/xv3FKKj7wQ1KTlhQ/Mpup37wYd2/+TbBk2PcO5as2jpq\ne/P4VCwaR7XG3JBewhog5ndqtqFN0m1r40/qNaLJsop42dPQNPoxvEHNMFbUKpMi\nTwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744121473982 } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLogout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:107:2139] 2025-04-07T14:11:14.018822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:14.018884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:14.018907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:14.018929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:14.018956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:14.018977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:14.019012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:14.019228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:14.064188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:14.064244Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:14.076006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:14.076103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:14.076261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:14.082641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:14.082914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:14.083474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:14.083656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:14.085690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:14.086639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:14.086681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:14.086782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:14.086811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:14.086837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:14.086984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:14.091132Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-07T14:11:14.160239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:14.160400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:14.160542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:14.160720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:14.160753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:14.162385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:14.162529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:14.162695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:14.162736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:14.162759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:14.162779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:14.164207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:14.164246Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:14.164268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:14.165357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:14.165387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:14.165416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:14.165451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:14.167611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:14.168760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:14.168881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:14.169460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:14.169534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:14.169559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:14.169721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:14.169754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:14.169884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:14.169931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:14.171469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:14.171530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:14.171710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:14.171749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:14.172063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:14.172109Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:14.172215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:14.172248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:14.172287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:14.172340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:14.172373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:14.172400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:14.172454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:14.172485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:14.172513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:14.174164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:14.174264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:14.174368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:14.174401Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:14.174433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:14.174517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T14:11:14.177222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T14:11:14.177563Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T14:11:14.179427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:14.182508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:14.182589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:11:14.182614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:14.182657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:14.182688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:11:14.182717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:14.182738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:11:14.182764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-04-07T14:11:14.182790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-07T14:11:14.182966Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] Bootstrap 2025-04-07T14:11:14.193155Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] Become StateWork (SchemeCache [1:273:2265]) 2025-04-07T14:11:14.193942Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:11:14.195795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:14.195930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-04-07T14:11:14.196090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:14.196113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:14.196197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:14.196236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T14:11:14.197063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:14.197148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:14.197173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:14.197194Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-07T14:11:14.197216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:14.197277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T14:11:14.197627Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:11:14.198558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-04-07T14:11:14.198877Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-07T14:11:14.205311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2025-04-07T14:11:14.205360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-07T14:11:14.347361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc4Mjc0LCJpYXQiOjE3NDQwMzUwNzQsInN1YiI6InVzZXIxIn0.E-BCRQn8cHWE6bMCA9ORfslzRqTDG68v_HvAbzeEVqpkSOpwEI1oQQiC1m54ufhOBooapWaMNkoEbpZR9WD3XDxoy645AFZfJN3eBSU-4q1jfTfEGXTQiqukZXoJzao5PtrSY9b15FWPdfxsqbdDy68jmXG7i1O1tPT_5gxcsouHiddKWkNlhqE7ZPZxeVCZJmw7tljrO0K46xl0SjsAKhK78U4xbT6q1Y0Fuk_m6vywR6v3SJLjPvKnjd3bRyaYLEDBudwt-R068Zm6etfMrFJxYvBNfw12T-OgDuJtcDqZ4J0S-SiR14QoX9Yg9ZUuRwxGccmr-_-KIOw_-8T6YQ" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc4Mjc0LCJpYXQiOjE3NDQwMzUwNzQsInN1YiI6InVzZXIxIn0.**", at schemeshard: 72057594046678944 2025-04-07T14:11:14.348097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:14.348150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:14.348323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:14.348358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-07T14:11:14.349493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-04-07T14:11:14.350235Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:14.350397Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 161us result status StatusSuccess 2025-04-07T14:11:14.350781Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr9zj1Ql3nFH+po3z9N/K\ndUAdsznO21Rw6IZfMOoVlEk60zMCeVQlFXAbWROhXBMYBwbQlvyP6tpGJRJq+cpN\ntKzLz8kZmK6Skku3woRrQkX5YrZ3h6GbdZGLD3pxt97OR6adWdmjxPuqbbmlmaZf\nk7iq/oxUMQu0dTzKmZUxunjBvKyD2MSadMs8jEC74BqHMWtWiy+ml8qEF3NV0wPj\n3Ls2BrVVCDrL/FZ8RSPPIMtQL0Y0nua8m+FTyqDG8a9BFGZTGIK7o5UScs840i0U\nU3jygfIZ0FQqb87BtQUvO0zriqyC/VOHLEnW3Myl8P4JTp1T+tdM217FF09lahEl\nBwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744121474337 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:14.351247Z node 1 :HTTP WARN: 127.0.0.1:0 POST /logout 2025-04-07T14:11:14.351295Z node 1 :HTTP ERROR: Logout: No ydb_session_id cookie 2025-04-07T14:11:14.352701Z node 1 :HTTP WARN: 127.0.0.1:0 POST /logout 2025-04-07T14:11:14.358959Z node 1 :TICKET_PARSER ERROR: Ticket **** (589A015B): Token is not in correct format 2025-04-07T14:11:14.359055Z node 1 :HTTP ERROR: Logout: Token is not in correct format 2025-04-07T14:11:14.359463Z node 1 :HTTP WARN: 127.0.0.1:0 POST /logout AUDIT LOG buffer(4): 2025-04-07T14:11:14.162493Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-07T14:11:14.195893Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user=user1 2025-04-07T14:11:14.347910Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc4Mjc0LCJpYXQiOjE3NDQwMzUwNzQsInN1YiI6InVzZXIxIn0.** 2025-04-07T14:11:14.360364Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc4Mjc0LCJpYXQiOjE3NDQwMzUwNzQsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2025-04-07T14:11:14.360364Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc4Mjc0LCJpYXQiOjE3NDQwMzUwNzQsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:107:2139] 2025-04-07T14:11:13.649461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:13.649530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:13.649558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:13.649582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:13.650257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:13.650281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:13.650326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:13.651270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:13.703275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:13.703313Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:13.711260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:13.711330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:13.711436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:13.718789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:13.719055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:13.720558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.720747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:13.725568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.732566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:13.732616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.732740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:13.732772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:13.732808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:13.732920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.737738Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-07T14:11:13.821296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:13.822197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.822963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:13.824481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:13.824585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.826970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.827671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:13.827796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.827856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:13.827926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:13.827947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:13.829194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.829230Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:13.829251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:13.830598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.830630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.830660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:13.830700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:13.833610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:13.834715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:13.835655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:13.836312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.836384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:13.836420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:13.836620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:13.836655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:13.836839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:13.836900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:13.838363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:13.838392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:13.838512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.838539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:13.838731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.838761Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:13.838822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:13.838844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:13.838873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:13.838906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:13.838932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:13.838954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:13.838987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:13.839009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:13.839029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:13.840109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:13.840178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:13.840205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:13.840264Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:13.840290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:13.840379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T14:11:13.842198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T14:11:13.843099Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.845297Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] Bootstrap 2025-04-07T14:11:13.858320Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] Become StateWork (SchemeCache [1:275:2267]) 2025-04-07T14:11:13.858495Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-07T14:11:13.859416Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:31777, port: 31777 2025-04-07T14:11:13.872880Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:11:13.895464Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=user1, attributes: 1.1 2025-04-07T14:11:13.936890Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: uid=user1,dc=search,dc=yandex,dc=net 2025-04-07T14:11:13.937259Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP login failed for user uid=user1,dc=search,dc=yandex,dc=net on server ldap://localhost:31777. Invalid credentials 2025-04-07T14:11:13.938811Z node 1 :HTTP ERROR: Login fail for user1@ldap: Could not login via LDAP 2025-04-07T14:11:13.939067Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:11:13.941046Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2025-04-07T14:11:13.827630Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-07T14:11:13.937980Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP login failed for user uid=user1,dc=search,dc=yandex,dc=net on server ldap://localhost:31777. Invalid credentials, login_user=user1@ldap, sanitized_token={none} AUDIT LOG checked line: 2025-04-07T14:11:13.937980Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP login failed for user uid=user1,dc=search,dc=yandex,dc=net on server ldap://localhost:31777. Invalid credentials, login_user=user1@ldap, sanitized_token={none} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::BasicLogin [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:13.656217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:13.656292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:13.656332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:13.656363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:13.656400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:13.656421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:13.656469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:13.656757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:13.718486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:13.718534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:13.727732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:13.727834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:13.727968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:13.732489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:13.732638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:13.733147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.733303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:13.734786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.735759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:13.735818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.735923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:13.735968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:13.736007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:13.736133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.740978Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:13.847503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:13.847675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.847848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:13.848067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:13.848123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.850030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.850147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:13.850277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.850319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:13.850347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:13.850378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:13.851884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.851931Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:13.851961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:13.853295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.853337Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.853369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:13.853416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:13.856721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:13.858264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:13.858435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:13.859288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.859409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:13.859452Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:13.859656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:13.859695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:13.859844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:13.859919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:13.861510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:13.861673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:13.861806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.861856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:13.862062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.862096Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:13.862221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:13.862248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:13.862284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:13.862314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:13.862343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:13.862370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:13.862432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:13.862480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:13.862527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:13.864303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:13.864435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:13.864470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:13.864500Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:13.864579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:13.864681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T14:11:13.867012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T14:11:13.867454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T14:11:13.867826Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T14:11:13.882001Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T14:11:13.884162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:13.888947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:13.889066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:11:13.889096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:13.889170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:13.889220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:11:13.889254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:11:13.889286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:11:13.889326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-04-07T14:11:13.889386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-07T14:11:13.890610Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:11:13.892379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:13.892516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-04-07T14:11:13.893070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:13.893110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:13.893324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.893363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-07T14:11:13.894110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:13.894184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:11:13.894219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:11:13.894266Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-07T14:11:13.894301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:13.894373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T14:11:13.894718Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:11:13.895890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-04-07T14:11:13.896203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2025-04-07T14:11:13.896233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-07T14:11:13.953373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc4MjczLCJpYXQiOjE3NDQwMzUwNzMsInN1YiI6InVzZXIxIn0.cb8zYOMy1F_dOtVFD3ZFBYl5AHPFOYXuZsa2D1i9oYdMr2a-UUGwI7qA398sAXeSmmQVYIMb6YHyMKQ8NFa-q9UQ6gB07K8dSYl0iAJ7RBMI6F51GXodH8reUxb6hDvjjC8ullJgc8hOVkHy4fQ7ItTbgZ7TpBiHLOTTTf9b8ytSdUm9CLPK4o8IVmXWQW1wUYfrH2pqYRq5s1otUbuOAq-e8J5pl9m_fklf5W41TJHsa69qHE9sj79jEn8in3JYbyEzXZMAHsmHEb7owTkikRFhfw8XQlgSt985Jl5TgqYpn4HHh6luj2zR2LKe3muLdlWLMH08E1UYyG9451dFkA" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MDc4MjczLCJpYXQiOjE3NDQwMzUwNzMsInN1YiI6InVzZXIxIn0.**", at schemeshard: 72057594046678944 2025-04-07T14:11:13.953547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:13.953574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:13.953695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.953724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-07T14:11:13.954030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-04-07T14:11:13.954643Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:11:13.954790Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 120us result status StatusSuccess 2025-04-07T14:11:13.955762Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtthLu59JiRb00VADT/or\nOMlLVegPx33uhcH4/En2d4avKUK8qp6gofH8ICZ6a9AJ+2Ie5Xzh+zlSXMI0Jp4c\nfcLdaI5x/PDVnDD/1774+Nk2dUrY2vqI/R8xJlsVYmgvJRWX60OHK/xgt239sPib\nb01EUMFKuGEsaBG0692bbvolhLJkA4CtRde4BYGrPXe0EhsdoCyPi6RrLm3xmC5a\n8Ttly33GZA3uZ4WsRxme5zfQix5z0WOsQjblI23ieB2AaCI7jV56q1p/+AnYglye\nnQ6iBdWkHwnLjsCV9HOImqpRBZwcqNB2sW+Azicdm/51hgAXfqqkICjeCSu9l7mt\nbwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744121473946 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:12.780903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:12.780983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:12.781013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:12.781050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:12.781804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:12.781836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:12.781878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:12.782868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:12.833171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:12.833240Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:12.842683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:12.842757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:12.842836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:12.847436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:12.847854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:12.850040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:12.850835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:12.855412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:12.862279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:12.862345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:12.862562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:12.862620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:12.862685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:12.862885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.869374Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:12.960980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:12.961121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.961306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:12.962112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:12.962167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.964633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:12.965056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:12.965262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.965308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:12.965341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:12.965362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:12.966934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.966974Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:12.967003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:12.968187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.968217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.968243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:12.968289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:12.971398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:12.972623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:12.973527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:12.974467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:12.974554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:12.974585Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:12.974750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:12.974783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:12.974922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:12.974999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:12.976513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:12.976545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:12.976693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:12.976729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:12.976890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.976937Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:12.977038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:12.977058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:12.977083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:12.977133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:12.977175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:12.977200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:12.977237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:12.977259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:12.977282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:12.978657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:12.978800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:12.978837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:12.978895Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:12.978941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:12.979041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:13.266157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:11:13.266243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-07T14:11:13.266982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.267051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:13.267093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-07T14:11:13.267249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-07T14:11:13.267373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:13.456594Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:402:2375], attempt# 0 2025-04-07T14:11:13.474969Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:402:2375], sender# [1:401:2374] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:11:13.479338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:13.479397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:13.479669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.479716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:11:13.480000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.480046Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:13.480634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:13.480731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:11:13.480761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:11:13.480791Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:11:13.480820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:13.480879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T14:11:13.483042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:13468 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BDB1B469-B9C3-4AA3-89CD-D1297ACE0F59 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-07T14:11:13.485251Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:402:2375], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:13468 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D8AAD136-A45A-4E3C-9A2E-4EF280B1AC2A amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2025-04-07T14:11:13.489486Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:402:2375], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:13468 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 17ECAF62-E6DF-43A9-9A60-E2B3355CE06E amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-07T14:11:13.494522Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:402:2375], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-04-07T14:11:13.494598Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:401:2374] 2025-04-07T14:11:13.494685Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:402:2375], sender# [1:401:2374], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:13468 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F4924001-6879-4436-95D3-CC381A695689 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-04-07T14:11:13.497002Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:402:2375], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-04-07T14:11:13.497054Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:402:2375], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-07T14:11:13.497192Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:401:2374], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-07T14:11:13.510903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T14:11:13.510989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T14:11:13.511391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T14:11:13.511602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-07T14:11:13.511706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.511787Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.511872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:11:13.511955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T14:11:13.512264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:13.514322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.514758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.514835Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:11:13.514984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:11:13.515034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:11:13.515102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T14:11:13.515257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 102 2025-04-07T14:11:13.515329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:11:13.515385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:11:13.515433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:11:13.515603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:13.517416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:11:13.517496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:386:2360] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:12.781130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:12.781219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:12.781253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:12.781288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:12.781838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:12.781884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:12.781963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:12.782889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:12.848884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:12.848944Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:12.858768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:12.858888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:12.859015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:12.864014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:12.864183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:12.864790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:12.864968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:12.866547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:12.867675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:12.867722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:12.867859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:12.867917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:12.867977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:12.868136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.873860Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:12.977216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:12.977432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.977675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:12.977899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:12.977954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.979870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:12.980102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:12.980257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.980310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:12.980348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:12.980558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:12.982218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.982270Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:12.982302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:12.983807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.983842Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.983874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:12.983925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:12.987446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:12.988908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:12.989080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:12.990054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:12.990157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:12.990197Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:12.990416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:12.990456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:12.990593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:12.990717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:12.992425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:12.992461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:12.992622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:12.992766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:12.993017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:12.993065Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:12.993149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:12.993189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:12.993226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:12.993273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:12.993304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:12.993328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:12.993376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:12.993406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:12.993435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:12.995148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:12.995242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:12.995272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:12.995326Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:12.995358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:12.995433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... Step: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:13.266610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:11:13.266698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-07T14:11:13.267082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.267199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:13.267249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-07T14:11:13.267335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-07T14:11:13.267424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:13.494868Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:403:2376], attempt# 0 2025-04-07T14:11:13.509569Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:403:2376], sender# [1:402:2375] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-07T14:11:13.512289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:13.512340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:11:13.512696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.512744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:11:13.513029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.513073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: 2025-04-07T14:11:13.513941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:61407 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DA60E15D-15D3-404A-80D4-A0266BEFA9E6 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD 2025-04-07T14:11:13.514037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-07T14:11:13.514068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:11:13.514107Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:11:13.514134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:11:13.514196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:11:13.515100Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:403:2376], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-04-07T14:11:13.518103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:61407 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 370F19F2-1045-46F7-A998-25D2AEDC139D amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2025-04-07T14:11:13.519161Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:403:2376], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:61407 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F0E2F523-ABEB-4B3B-BD00-2A8380CC4E74 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-07T14:11:13.521899Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:403:2376], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-04-07T14:11:13.521974Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:402:2375] 2025-04-07T14:11:13.522633Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:403:2376], sender# [1:402:2375], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:61407 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4F23A8D4-BF8A-4523-B5D9-A6A006118BFE amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2025-04-07T14:11:13.524818Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:403:2376], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2025-04-07T14:11:13.524873Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:403:2376], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-07T14:11:13.525021Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:402:2375], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-07T14:11:13.530826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-07T14:11:13.530867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T14:11:13.531007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-07T14:11:13.531089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-07T14:11:13.531167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.531207Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.531261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:11:13.531326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T14:11:13.531505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:13.532879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.533074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.533108Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:11:13.533172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:11:13.533203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:11:13.533232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T14:11:13.533308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2025-04-07T14:11:13.533352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:11:13.533390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:11:13.533420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:11:13.533522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:13.534810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:11:13.534842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:388:2362] TestWaitNotification: OK eventTxId 102 |94.3%| [TA] $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} >> TPopulatorTest::MakeDir |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |94.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/olap/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] |94.3%| [TA] $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} >> TPopulatorTest::MakeDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:06.937405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:06.937496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:06.937545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:06.937569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:06.937597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:06.937617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:06.937667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:06.937912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:07.000428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:07.000528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:07.009967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:07.010093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:07.010260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:07.015223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:07.015425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:07.015955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:07.016134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:07.017726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:07.018880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:07.018932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:07.019035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:07.019100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:07.019137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:07.019296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.025106Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:07.126965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:07.127124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.127277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:07.127410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:07.127453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.128966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:07.129099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:07.129219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.129263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:07.129292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:07.129312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:07.130484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.130517Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:07.130546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:07.131599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.131628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.131664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:07.131701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:07.133832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:07.135046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:07.135190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:07.135780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:07.135872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:07.135927Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:07.136093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:07.136123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:07.136223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:07.136306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:07.137462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:07.137490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:07.137586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:07.137612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:07.137794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:07.137822Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:07.137890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:07.137914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:07.137940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:07.137974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:07.137998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:07.138020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:07.138052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:07.138076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:07.138105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:07.139284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:07.139353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:07.139378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:07.139411Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:07.139437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:07.139506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify ... cy: 2 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 548 } } 2025-04-07T14:11:16.637754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 518 RawX2: 4294969763 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-07T14:11:16.637797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-04-07T14:11:16.637913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 518 RawX2: 4294969763 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-07T14:11:16.637947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-04-07T14:11:16.638299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:16.638342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-04-07T14:11:16.638376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T14:11:16.638410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-04-07T14:11:16.638474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-04-07T14:11:16.638568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-04-07T14:11:16.638667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-04-07T14:11:16.638713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-07T14:11:16.640328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:16.641222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:16.641431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-07T14:11:16.641462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-07T14:11:16.641598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-04-07T14:11:16.641740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-07T14:11:16.641772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:440:2394], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-04-07T14:11:16.641818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:440:2394], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-04-07T14:11:16.642054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:16.642094Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-04-07T14:11:16.642173Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:16.642213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-04-07T14:11:16.642257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-07T14:11:16.643283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-04-07T14:11:16.643363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-04-07T14:11:16.643393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-07T14:11:16.643422Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 11 2025-04-07T14:11:16.643453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-04-07T14:11:16.644005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-07T14:11:16.644055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-07T14:11:16.644072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-07T14:11:16.644091Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:11:16.644110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-07T14:11:16.644150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-07T14:11:16.646533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-07T14:11:16.646577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-04-07T14:11:16.646926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-07T14:11:16.647078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-07T14:11:16.647107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:11:16.647153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-07T14:11:16.647198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:552:2488] message: TxId: 104 2025-04-07T14:11:16.647230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:11:16.647260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T14:11:16.647293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T14:11:16.647356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-04-07T14:11:16.647683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-07T14:11:16.647711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-07T14:11:16.648113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-07T14:11:16.648915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-07T14:11:16.649809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-07T14:11:16.649853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:440:2394], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-07T14:11:16.650254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:11:16.650287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1419:3331] 2025-04-07T14:11:16.650572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-04-07T14:11:16.653621Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-07T14:11:16.653771Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 181us result status StatusSuccess 2025-04-07T14:11:16.654078Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2025-04-07T14:11:16.783564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:16.783646Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-04-07T14:11:16.868068Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 311, preserialized size# 48 2025-04-07T14:11:16.868142Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-04-07T14:11:16.868967Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T14:11:16.869021Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T14:11:16.869038Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T14:11:16.869213Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 217, preserialized size# 2 2025-04-07T14:11:16.869236Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-04-07T14:11:16.869288Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-04-07T14:11:16.869318Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-04-07T14:11:16.869352Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-04-07T14:11:16.870818Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T14:11:16.870845Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T14:11:16.870883Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T14:11:16.871035Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:93:2120], cookie# 100 2025-04-07T14:11:16.871092Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:94:2121], cookie# 100 2025-04-07T14:11:16.871112Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-04-07T14:11:16.871154Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-04-07T14:11:16.871193Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-04-07T14:11:16.871215Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-04-07T14:11:16.871443Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:95:2122], cookie# 100 2025-04-07T14:11:16.871545Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:93:2120], cookie# 100 2025-04-07T14:11:16.872607Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:94:2121], cookie# 100 2025-04-07T14:11:16.872630Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-04-07T14:11:16.872679Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:95:2122], cookie# 100 2025-04-07T14:11:16.872696Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-04-07T14:11:16.876016Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 321, preserialized size# 53 2025-04-07T14:11:16.876045Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-04-07T14:11:16.876112Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T14:11:16.876142Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T14:11:16.876197Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T14:11:16.876583Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 222, preserialized size# 2 2025-04-07T14:11:16.876614Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2025-04-07T14:11:16.876698Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 100 2025-04-07T14:11:16.876724Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 100 2025-04-07T14:11:16.876743Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 100 2025-04-07T14:11:16.876809Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:93:2120], cookie# 100 2025-04-07T14:11:16.876878Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T14:11:16.876898Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T14:11:16.876929Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2025-04-07T14:11:16.876971Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:94:2121], cookie# 100 2025-04-07T14:11:16.876986Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-04-07T14:11:16.877190Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:95:2122], cookie# 100 2025-04-07T14:11:16.877209Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-04-07T14:11:16.877259Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-04-07T14:11:16.877278Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-04-07T14:11:16.878499Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 100 2025-04-07T14:11:16.878553Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 100 2025-04-07T14:11:16.878577Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-04-07T14:11:16.878700Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 100 2025-04-07T14:11:16.878715Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting >> LocalPartitionReader::Simple |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] >> LocalPartitionReader::Booting [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact |94.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind |94.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |94.3%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] >> RetryPolicy::RetryWithBatching [GOOD] >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:107:2139] 2025-04-07T14:11:19.038455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:19.038514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:19.038534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:19.038555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:19.038583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:19.038600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:19.038635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:19.038830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:19.084307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:19.084349Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:19.094316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:19.094387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:19.094515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:19.099589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:19.099820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:19.100392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:19.100527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:19.102579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:19.103730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:19.103764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:19.103890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:19.103921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:19.103945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:19.104047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:19.108630Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-07T14:11:19.173940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:19.174100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:19.174239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:19.174395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:19.174426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:19.176166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:19.176270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:19.176375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:19.176417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:19.176440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:19.176458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:19.177690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:19.177725Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:19.177747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:19.178929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:19.178960Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:19.178986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:19.179023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:19.181026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:19.182291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:19.182455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:19.183045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:19.183116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:19.183155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:19.183314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:19.183349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:19.183468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:19.183523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:19.184730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:19.184762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:19.184890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:19.184917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:19.185123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:19.185155Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:19.185216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:19.185237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:19.185262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:19.185295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:19.185318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:19.185336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:19.185371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:19.185392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:19.185408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:19.186670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:19.186740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:19.186766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:19.186786Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:19.186846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:19.186902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T14:11:19.188839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T14:11:19.189162Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:19.190130Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] Bootstrap 2025-04-07T14:11:19.199968Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] Become StateWork (SchemeCache [1:275:2267]) 2025-04-07T14:11:19.200154Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-07T14:11:19.200443Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:15160, port: 15160 2025-04-07T14:11:19.201154Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:11:19.207064Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:15160. Invalid credentials 2025-04-07T14:11:19.207953Z node 1 :HTTP ERROR: Login fail for user1@ldap: Could not login via LDAP 2025-04-07T14:11:19.208184Z node 1 :TX_PROXY DEBUG: actor# [1:267:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:11:19.209996Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2025-04-07T14:11:19.176244Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-07T14:11:19.207725Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:15160. Invalid credentials, login_user=user1@ldap, sanitized_token={none} AUDIT LOG checked line: 2025-04-07T14:11:19.207725Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:15160. Invalid credentials, login_user=user1@ldap, sanitized_token={none} |94.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-04-07T14:06:10.709988Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.710028Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.710048Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:06:10.711365Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T14:06:10.711414Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.711458Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.712419Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007717s 2025-04-07T14:06:10.712793Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T14:06:10.712813Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.712825Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.712853Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007253s 2025-04-07T14:06:10.713139Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T14:06:10.713158Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.713187Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:06:10.713219Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007936s 2025-04-07T14:06:10.725356Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1744034770725331 2025-04-07T14:06:11.049735Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572306160645892:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:11.049843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:06:11.077616Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490572304810099880:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:06:11.085068Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:06:11.261811Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:06:11.276451Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e49/r3tmp/tmpMe9Z9I/pdisk_1.dat 2025-04-07T14:06:11.455749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:11.455857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:11.463320Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:06:11.464481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:11.478005Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29982, node 1 2025-04-07T14:06:11.508155Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:06:11.508181Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:06:11.509670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:06:11.509742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:06:11.514692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:06:11.609274Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001e49/r3tmp/yandexkFgkPx.tmp 2025-04-07T14:06:11.609299Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001e49/r3tmp/yandexkFgkPx.tmp 2025-04-07T14:06:11.609449Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001e49/r3tmp/yandexkFgkPx.tmp 2025-04-07T14:06:11.609575Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:06:11.786036Z INFO: TTestServer started on Port 15625 GrpcPort 29982 TClient is connected to server localhost:15625 PQClient connected to localhost:29982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:06:12.040689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T14:06:14.101018Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572317695002073:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.101105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490572317695002081:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.101191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.101962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572319045548767:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.102061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572319045548775:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.102120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:06:14.105714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:06:14.118659Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490572317695002087:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:06:14.125660Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-07T14:06:14.125814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572319045548781:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:06:14.420421Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490572319045548884:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:06:14.420731Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490572317695002132:2290], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:06:14.420917Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjU4NzdkOTEtY2VhNDhmODUtYmQ2NjE5OS1iZGMyNGI3NQ==, ActorId: [2:7490572317695002071:2280], ActorState: ExecuteState, TraceId: 01jr895f2h7xqvnbfa6zxz54cq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:06:14.424199Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:06:14.426754Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGQyZjEyMjUtMjlmZjI5NTUtNDgxZTdmYjQtYmRkN2QyZDg=, ActorId: [1:7490572319045548764:2303], ActorState: ExecuteState, TraceId: 01jr895f2c4v4d9ge02pp9s7mw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:06:14.427106Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:06:14.465108Z node 1 :FLAT_ ... rtition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1454 count 9 nextOffset 9 batches 1 2025-04-07T14:11:18.232777Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0test-message-group-id' seqNo 10 partNo 0 2025-04-07T14:11:18.232794Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1612 count 10 nextOffset 10 batches 1 2025-04-07T14:11:18.233442Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,10 HeadOffset 0 endOffset 0 curOffset 10 d0000000000_00000000000000000000_00000_0000000010_00000| size 1208 WTime 1744035078233 2025-04-07T14:11:18.233631Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T14:11:18.236365Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:11:18.236435Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:11:18.236503Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-07T14:11:18.236554Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:11:18.236607Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-07T14:11:18.236642Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:11:18.236698Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-07T14:11:18.236733Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:11:18.236817Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-04-07T14:11:18.236872Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:11:18.236922Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-04-07T14:11:18.236948Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:11:18.236985Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-04-07T14:11:18.237004Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:11:18.237026Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2025-04-07T14:11:18.237044Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:11:18.237067Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2025-04-07T14:11:18.237084Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:11:18.237105Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2025-04-07T14:11:18.237118Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:11:18.237151Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2025-04-07T14:11:18.237158Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-07T14:11:18.237210Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T14:11:18.237298Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T14:11:18.237330Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-07T14:11:18.237428Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2025-04-07T14:11:18.237456Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T14:11:18.237598Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-07T14:11:18.237617Z node 17 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T14:11:18.237664Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1744035078231 queuesize 0 startOffset 0 2025-04-07T14:11:18.237779Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 1 } 2025-04-07T14:11:18.237830Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session: acknoledged message 1 2025-04-07T14:11:18.237871Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session: acknoledged message 2 2025-04-07T14:11:18.237886Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session: acknoledged message 3 2025-04-07T14:11:18.237902Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session: acknoledged message 4 2025-04-07T14:11:18.237917Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session: acknoledged message 5 2025-04-07T14:11:18.237932Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session: acknoledged message 6 2025-04-07T14:11:18.237950Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session: acknoledged message 7 2025-04-07T14:11:18.237969Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session: acknoledged message 8 2025-04-07T14:11:18.237987Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session: acknoledged message 9 2025-04-07T14:11:18.238003Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session: acknoledged message 10 2025-04-07T14:11:18.238223Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session: close. Timeout = 0 ms 2025-04-07T14:11:18.238263Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session will now close 2025-04-07T14:11:18.238310Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session: aborting 2025-04-07T14:11:18.238693Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session: gracefully shut down, all writes complete 2025-04-07T14:11:18.239174Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0 grpc read done: success: 0 data: 2025-04-07T14:11:18.239196Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0 grpc read failed 2025-04-07T14:11:18.239288Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0] Write session: destroy 2025-04-07T14:11:18.239511Z node 17 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 7 sessionId: test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0 2025-04-07T14:11:18.239537Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|912406aa-42246ad0-30131e29-efeda511_0 is DEAD 2025-04-07T14:11:18.239884Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:11:18.240033Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:11:18.240092Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7490573624356862374:2606] destroyed 2025-04-07T14:11:18.240134Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. |94.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] >> TConsoleConfigTests::TestAddConfigItem |94.4%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> TConsoleConfigTests::TestAffectedConfigs |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TA] $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TA] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2025-04-07T14:06:08.696695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:08.696752Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:08.762337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:09.734264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:09.734307Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:09.773151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:10.545026Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:10.545075Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:10.578935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:11.459471Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:11.459521Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:11.524183Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:12.532997Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:12.533063Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:12.571987Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:13.556768Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:13.556837Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:13.596845Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:14.609969Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:14.610047Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:14.654281Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:15.437100Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:15.437165Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:15.474601Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:16.289845Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:16.289918Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:16.327466Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:17.771909Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:17.771965Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:17.807990Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:19.236185Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:19.236289Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:19.283572Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:20.741453Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:20.741521Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:20.786729Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:22.203693Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:22.203772Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:22.252723Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:23.903603Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:23.903653Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:23.954650Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:25.608683Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:25.608756Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:25.650609Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:27.205398Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:27.205489Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:27.247031Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:28.919692Z node 17 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:28.919781Z node 17 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:28.964570Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:30.650212Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:30.650306Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:30.696749Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:31.128889Z node 18 :CMS_CONFIGS ERROR: Unexpected config sender died for subscription id=1 2025-04-07T14:06:31.820110Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:31.820214Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:31.869954Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:32.550394Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:06:32.550498Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:32.637319Z node 19 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=8651011 serviceid=[0:0:0] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2025-04-07T14:06:33.197903Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:33.197997Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:33.242171Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:34.000717Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:06:34.000840Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:34.098480Z node 20 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[20:8246204620103118691:7960687] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2025-04-07T14:06:34.724639Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:34.724747Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:34.769494Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:37.445713Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:37.445784Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:37.490976Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:40.207446Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:40.207535Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:40.253025Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:41.392649Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:41.392766Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:41.438496Z node 24 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:42.512296Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:06:42.512377Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:06:42.560309Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:06:47.310483Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:06:47.310580Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:51.178710Z node 25 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[100:28538277257700723:0] nodeid=100 host=host100 tenant=tenant-100 nodetype=type100 kinds=2 lastprovidedconfig= 2025-04-07T14:09:52.078769Z node 26 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:09:52.078864Z node 26 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:52.124636Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:09:56.836729Z node 26 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:09:56.836841Z node 26 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:21.476850Z node 27 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:21.476918Z node 27 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:21.512351Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:11:22.573256Z node 28 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:22.573349Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:22.616293Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.5%| [TA] $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {RESULT} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::QueryWithAlter >> KqpConstraints::CreateTableSerialTypeForbidden >> KqpOlapScheme::CreateTableWithoutTtl >> KqpScheme::CreateExternalTable >> KqpScheme::AlterCompressionLevelInColumnFamily >> KqpScheme::CreateTableWithDefaultSettings >> KqpScheme::ChangefeedAwsRegion >> KqpOlapScheme::DropTable >> KqpConstraints::SerialTypeNegative1 >> KqpOlapScheme::AddColumnWithTtl >> KqpAcl::ReadSuccess >> KqpScheme::SchemaVersionMissmatchWithRead >> KqpScheme::CreateFamilyWithCompressionLevel >> KqpOlapScheme::AddColumnLongPk >> KqpScheme::AlterIndexImplTable >> KqpScheme::CreateAndAlterTableWithPartitionBy >> KqpScheme::DropKeyColumn >> KqpScheme::CreateTableWithReadReplicasUncompat >> KqpOlapScheme::WithoutDefaultColumnFamily >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> KqpScan::ScanPg >> KqpScan::ScanRetryReadRanges [GOOD] >> Secret::ValidationQueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: 2025-04-07T14:11:15.241560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T14:11:15.252462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:11:15.252865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:11:15.253035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:11:15.253746Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:11:15.253784Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016a8/r3tmp/tmp8aLahS/pdisk_1.dat 2025-04-07T14:11:15.505886Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:15.632423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:11:15.734444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:15.734529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:15.737046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:15.737096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:15.750248Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:11:15.750722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:15.751064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:16.092910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:11:16.670703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1326:2797], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:16.670775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1337:2802], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:16.670850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:16.674666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:11:17.161968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1340:2805], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:11:18.057988Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr89ephxasbx05z2qjkm570d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQzYTE5MzEtZDkwMWNlOGMtNjkxMjRlNjEtODcwZGEyYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 2 2025-04-07T14:11:18.651187Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89eqy773zq77hq3aawcr24, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNhNDljMjYtYmY1NWYwNjEtMzUzMDI4NDItMzQ5YTViOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [1:1542:2928] -> [2:1498:2402] -- EvScanData from [2:1546:2409]: pass 2025-04-07T14:11:19.129730Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89eqy773zq77hq3aawcr24, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNhNDljMjYtYmY1NWYwNjEtMzUzMDI4NDItMzQ5YTViOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1} 2025-04-07T14:11:19.131509Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-04-07T14:11:23.538922Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:436:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:11:23.539141Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:11:23.539341Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:11:23.540537Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:433:2130], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:11:23.540700Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:11:23.540758Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016a8/r3tmp/tmpNZH8OX/pdisk_1.dat 2025-04-07T14:11:23.726345Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:23.839783Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:11:23.933299Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:23.933401Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:23.935586Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:23.935642Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:23.947706Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-07T14:11:23.947980Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:23.948280Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:24.269443Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:11:24.801892Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1335:2803], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:24.801981Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1346:2808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:24.802061Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:24.806414Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:11:25.291944Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1349:2811], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:11:25.704542Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr89eyg0540qx0dqrh1ze44x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZmMyNDk0NzItZTdhMzAyYTItYmNiNzNkY2ItYWU2ZTY4NDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 4 2025-04-07T14:11:26.375664Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89ezcx0zzrg4rd12qz5kmm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTEwMTYyODItM2Q3N2JiMTQtOTJjNzVjODctNzVmOTk4MmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [3:1551:2934] -> [4:1505:2405] -- EvScanData from [4:1555:2412]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2} -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}]},"SeqNo":2,"QueryResultIndex":0,"ChannelId":2} 2025-04-07T14:11:26.390512Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService [GOOD] Test command err: 2025-04-07T14:08:35.795950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:08:35.796257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:08:35.796477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002197/r3tmp/tmptkGsgA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21262, node 1 TClient is connected to server localhost:3587 2025-04-07T14:08:36.311742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:08:36.353164Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:08:36.357139Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:08:36.357214Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:08:36.357249Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:08:36.357442Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:08:36.405728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:08:36.405831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:08:36.417449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-07T14:08:49.148016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:713:2594], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:49.148134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:724:2599], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:49.148236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:08:49.158827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T14:08:49.199174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:727:2602], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T14:08:49.287780Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:787:2642], status: GENERIC_ERROR, issues:
:1:20: Error: Unexpected token '-' : syntax error... 2025-04-07T14:08:49.289431Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjlmNDlkNGQtNTY3MjA2MS0zMTIzNDI4My03YTE0YzBhZA==, ActorId: [1:711:2592], ActorState: ExecuteState, TraceId: 01jr89a6fm0td5ssw90w2swfq3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: Unexpected token '-' : syntax error... ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-04-07T14:08:59.762822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-07T14:09:00.515439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:09:01.076487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-04-07T14:09:01.960800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-07T14:09:03.124961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:09:03.881285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T14:09:04.568305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T14:09:05.473708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-07T14:09:08.610452Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjZiNTQxNmMtZDg5NzkzMzQtYzM4ZjViY2ItNWE4ZjBmMTM=, ActorId: [1:811:2658], ActorState: ExecuteState, TraceId: 01jr89agnefz56gkxtmv60r15n, Create QueryResponse for error on request, msg: 2025-04-07T14:09:08.612201Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jr89agnefz56gkxtmv60r15n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjZiNTQxNmMtZDg5NzkzMzQtYzM4ZjViY2ItNWE4ZjBmMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 2025-04-07T14:09:09.247568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:09:09.247626Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-07T14:09:44.855004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-04-07T14:09:45.848256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715726:0, at schemeshard: 72057594046644480 2025-04-07T14:09:47.303065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715733:0, at schemeshard: 72057594046644480 2025-04-07T14:09:48.016128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715738:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 2025-04-07T14:10:01.047953Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmNlYjcwZDQtOGU1M2ZlMjMtNmExMWI2M2YtOTE0YzlhOTQ=, ActorId: [1:3262:4541], ActorState: ExecuteState, TraceId: 01jr89cc5k5wh7ask2vqv35wab, Create QueryResponse for error on request, msg: 2025-04-07T14:10:01.048954Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jr89cc5k5wh7ask2vqv35wab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmNlYjcwZDQtOGU1M2ZlMjMtNmExMWI2M2YtOTE0YzlhOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-07T14:10:13.354291Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3666:4839], TxId: 281474976715768, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MmMyMjQ3NWQtYTQxMTM1My1hMTBmMGM0My00YzRhNDc=. TraceId : 01jr89crgr48wxd8z4h88639bw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T14:10:13.362640Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3667:4840], TxId: 281474976715768, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MmMyMjQ3NWQtYTQxMTM1My1hMTBmMGM0My00YzRhNDc=. CustomerSuppliedId : . TraceId : 01jr89crgr48wxd8z4h88639bw. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:3663:4767], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:10:13.364400Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmMyMjQ3NWQtYTQxMTM1My1hMTBmMGM0My00YzRhNDc=, ActorId: [1:3566:4767], ActorState: ExecuteState, TraceId: 01jr89crgr48wxd8z4h88639bw, Create QueryResponse for error on request, msg: 2025-04-07T14:10:13.373601Z node 1 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jr89cr9k80jbcvh57ch35m5y" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=MmMyMjQ3NWQtYTQxMTM1My1hMTBmMGM0My00YzRhNDc=" tx_control { tx_id: "01jr89cr9k80jbcvh57ch35m5y" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-04-07T14:10:25.342804Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzQ2YmQ4NzMtZjg5ODdjY2UtOWZhNTM2Y2EtODMwOGRkZGM=, ActorId: [1:3878:4995], ActorState: ExecuteState, TraceId: 01jr89d3xg5vnr6hbsbe9t2sfn, Create QueryResponse for error on request, msg: 2025-04-07T14:10:25.344206Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715781. Ctx: { TraceId: 01jr89d3xg5vnr6hbsbe9t2sfn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ2YmQ4NzMtZjg5ODdjY2UtOWZhNTM2Y2EtODMwOGRkZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-04-07T14:10:36.783588Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4201:5240], for# root@builtin, access# DescribeSchema 2025-04-07T14:10:36.783698Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4201:5240], for# root@builtin, access# DescribeSchema 2025-04-07T14:10:36.785269Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:4198:5237], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:10:36.786736Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTczMmQwYjUtNTU5ZTZlNGUtODZlYWYxMTgtZTQyNDNiM2E=, ActorId: [1:4193:5233], ActorState: ExecuteState, TraceId: 01jr89dfk57ycgc24ayzd9egfm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-07T14:10:48.197764Z node 1 :TICKET_PARSER ERROR: Ticket **** (51449FAE): Could not find correct token validator 2025-04-07T14:10:48.825907Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGY2NTMzNjEtNDc4ZjkyZDUtZDRhZWEzYS0yMTU1ZTg4Ng==, ActorId: [1:4460:5434], ActorState: ExecuteState, TraceId: 01jr89dtr582smj5nndk2yz7qh, Create QueryResponse for error on request, msg: 2025-04-07T14:10:48.826688Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715805. Ctx: { TraceId: 01jr89dtr582smj5nndk2yz7qh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY2NTMzNjEtNDc4ZjkyZDUtZDRhZWEzYS0yMTU1ZTg4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-07T14:11:00.786473Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjQ1NjUxYjEtNzY0YmIxNjAtY2UwMWJlMTAtOTk1ZjdhNTk=, ActorId: [1:4855:5730], ActorState: ExecuteState, TraceId: 01jr89e6ghcn6c2gy2x76hp8fc, Create QueryResponse for error on request, msg: 2025-04-07T14:11:00.787393Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715821. Ctx: { TraceId: 01jr89e6ghcn6c2gy2x76hp8fc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjQ1NjUxYjEtNzY0YmIxNjAtY2UwMWJlMTAtOTk1ZjdhNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-07T14:11:25.807124Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715853. Ctx: { TraceId: 01jr89ez88ahvajhsjb89yzekp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWNhNzAxOS00NDgxN2QwZC04NGM2ZWM0Ny04NTJhYzkxMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> KqpOlapScheme::AddColumnLongPk [GOOD] >> KqpOlapScheme::AddColumnOldSchemeBulkUpsert >> KqpOlapScheme::WithoutDefaultColumnFamily [GOOD] >> KqpOlapTypes::Decimal >> KqpOlapScheme::CreateTableWithoutTtl [GOOD] >> KqpOlapScheme::DropColumn >> KqpOlapScheme::DropTable [GOOD] >> KqpOlapScheme::DropColumnOldSchemeBulkUpsert |94.5%| [TA] $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {RESULT} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::CreateTableWithWrongPartitionAtKeys >> KqpConstraints::SerialTypeNegative1 [GOOD] >> KqpConstraints::SerialTypeForNonKeyColumn >> KqpScheme::CreateFamilyWithCompressionLevel [GOOD] >> KqpScheme::CreateTableStoreNegative >> KqpScheme::CreateExternalTable [GOOD] >> KqpScheme::CreateExternalTableCheckPrimaryKey >> KqpConstraints::CreateTableSerialTypeForbidden [GOOD] >> KqpConstraints::CreateTableWithDefaultForbidden >> KqpScheme::CreateTableWithReadReplicasUncompat [GOOD] >> KqpScheme::CreateTableWithReadReplicasCompat >> KqpScheme::AlterCompressionLevelInColumnFamily [GOOD] >> KqpScheme::AddDropColumn >> KqpScheme::ChangefeedAwsRegion [GOOD] >> KqpScheme::ChangefeedRetentionPeriod >> KqpAcl::ReadSuccess [GOOD] >> KqpAcl::WriteSuccess >> KqpScheme::DropKeyColumn [GOOD] >> KqpScheme::DropIndexDataColumn >> KqpScheme::SchemaVersionMissmatchWithRead [GOOD] >> KqpScheme::SchemaVersionMissmatchWithWrite >> KqpScheme::CreateAndAlterTableWithPartitionBy [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitionSizeCompat >> KqpScheme::CreateTableWithDefaultSettings [GOOD] >> KqpScheme::CreateTableWithCompactionPolicyUncompat >> KqpScheme::AlterTableAlterVectorIndex >> KqpScheme::AlterIndexImplTable [GOOD] >> KqpScheme::AlterIndexImplTableUsingPublicAPI >> KqpScheme::QueryWithAlter [GOOD] >> KqpScheme::RenameTable >> KqpOlapScheme::DropColumnOldSchemeBulkUpsert [GOOD] >> KqpOlapScheme::DropColumnErrors >> KqpScheme::CreateTableStoreNegative [GOOD] >> KqpScheme::CreateResourcePool >> KqpOlapScheme::AddColumnOldSchemeBulkUpsert [GOOD] >> KqpOlapScheme::AddColumnWithStore >> KqpOlapScheme::AddColumnWithTtl [GOOD] >> KqpOlapScheme::AddPgColumnWithStore >> KqpScheme::CreateTableWithWrongPartitionAtKeys [GOOD] >> KqpScheme::CreateUserWithPassword >> KqpOlapScheme::DropColumn [GOOD] >> KqpOlapScheme::DropColumnAfterAdd >> KqpScheme::CreateTableWithReadReplicasCompat [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysUuid >> KqpAcl::WriteSuccess [GOOD] >> KqpAcl::RecursiveCreateTableShouldSuccess >> KqpConstraints::CreateTableWithDefaultForbidden [GOOD] >> KqpConstraints::AlterTableAddColumnWithDefaultValue >> OlapEstimationRowsCorrectness::TPCH21 [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitionSizeCompat [GOOD] >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsUncompat >> KqpScheme::AddDropColumn [GOOD] >> KqpScheme::AlterGroup >> KqpScheme::CreateTableWithCompactionPolicyUncompat [GOOD] >> KqpScheme::CreateTableWithCompactionPolicyCompat >> KqpConstraints::SerialTypeForNonKeyColumn [GOOD] >> KqpConstraints::SerialTypeSerial2 >> KqpScheme::SchemaVersionMissmatchWithWrite [GOOD] >> KqpScheme::SchemaVersionMissmatchWithIndexRead >> KqpScheme::CreateExternalTableCheckPrimaryKey [GOOD] >> KqpScheme::CreateExternalTableValidation >> KqpScheme::ChangefeedRetentionPeriod [GOOD] >> KqpScheme::ChangefeedTopicAutoPartitioning >> KqpScheme::AlterTableAlterVectorIndex [GOOD] >> KqpScheme::AlterTableAlterMissedIndex >> KqpOlapTypes::Decimal [GOOD] >> KqpOlapTypes::Decimal35 >> KqpScheme::DropIndexDataColumn [GOOD] >> KqpScheme::DropExternalTable >> KqpOlapScheme::DropColumnErrors [GOOD] >> KqpOlapScheme::DropColumnTableStoreErrors >> KqpScheme::RenameTable [GOOD] >> KqpScheme::ResourcePoolsValidation >> KqpScheme::CreateUserWithPassword [GOOD] >> KqpScheme::CreateUserWithoutPassword ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH21 [GOOD] Test command err: Trying to start YDB, gRPC: 16006, MsgBus: 17064 2025-04-07T14:07:08.619752Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490572552721521387:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:07:08.619823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00103a/r3tmp/tmpywtTfT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16006, node 1 2025-04-07T14:07:08.925458Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:07:08.925475Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:07:08.928411Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:07:08.966280Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:07:08.966303Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:07:08.966321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:07:08.966388Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:07:09.021697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:07:09.021786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:07:09.023560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17064 TClient is connected to server localhost:17064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:07:09.425570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:09.447341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:09.539177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:07:09.654722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:07:09.717080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.016222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572565606424995:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.016316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.192575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.215891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.241201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.265763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.289267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.324894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:07:11.356540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572565606425487:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.356605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.356691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490572565606425492:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:07:11.358998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:07:11.365646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490572565606425494:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:07:12.337880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:07:12.430673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490572569901393327:2473];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:07:12.430862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490572569901393327:2473];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:07:12.431132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490572569901393327:2473];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:07:12.431249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490572569901393327:2473];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:07:12.431351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490572569901393327:2473];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:07:12.431509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490572569901393327:2473];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:07:12.431626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490572569901393327:2473];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:07:12.431727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490572569901393327:2473];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:07:12.431832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490572569901393327:2473];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:07:12.431931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490572569901393327:2473];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:07:12.432041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490572569901393327:2473];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:07:12.432140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7490572569901393327:2473];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:07:12.432954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7490572569901393338:2484];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:07:12.433001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7490572569901393338:2484];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:07:12.433161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7490572569901393338:2484];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:07:12.433263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7490572569901393338:2484];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:07:12.433350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:749057256 ... d=16; 2025-04-07T14:11:20.251104Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:11:20.251306Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:20.251363Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:20.251554Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:20.251579Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:20.251693Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:20.251722Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:20.251865Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:20.251894Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:20.251979Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:11:20.252002Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:11:20.255583Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:11:20.255644Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:11:20.255720Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:11:20.255748Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:11:20.255892Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:11:20.255937Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:11:20.256035Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:11:20.256072Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:11:20.256142Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:11:20.256165Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:11:20.256194Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:11:20.256219Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:11:20.256464Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:20.256501Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:20.256656Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:20.256707Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:20.256827Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:20.256867Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:20.257020Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:20.257053Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:20.257171Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:11:20.257212Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:11:20.262089Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:11:20.262160Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:11:20.262263Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:11:20.262302Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:11:20.262521Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:11:20.262569Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:11:20.262678Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:11:20.262714Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:11:20.262783Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:11:20.262833Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:11:20.262897Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:11:20.262931Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:11:20.263352Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:20.263431Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:20.314702Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:20.314769Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:20.314875Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:20.314905Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:20.315056Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:20.315084Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:20.315188Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:11:20.315220Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> KqpScheme::CreateResourcePool [GOOD] >> KqpScheme::CreateResourcePoolClassifier >> KqpOlapScheme::DropColumnAfterAdd [GOOD] >> KqpOlapScheme::CreateWithoutColumnFamily >> KqpScan::ScanPg [GOOD] >> KqpScheme::AlterIndexImplTableUsingPublicAPI [GOOD] >> KqpScheme::AlterResourcePool >> KqpScheme::CreateTableWithPartitionAtKeysUuid [GOOD] >> KqpScheme::CreateTableWithStoreExternalBlobs >> KqpAcl::RecursiveCreateTableShouldSuccess [GOOD] >> KqpConstraints::AddSerialColumnForbidden >> KqpOlapScheme::AddColumnWithStore [GOOD] >> KqpOlapScheme::AddColumnFamily >> KqpScheme::ChangefeedTopicAutoPartitioning [GOOD] >> KqpScheme::ChangefeedAttributes >> KqpOlapScheme::AddPgColumnWithStore [GOOD] >> KqpOlapScheme::AlterCompressionLevel >> KqpOlapScheme::CreateTableWithTtl >> KqpScheme::CreateExternalTableValidation [GOOD] >> KqpScheme::CreateExternalTableWithSettings >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsUncompat [GOOD] >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsCompat >> KqpScheme::AlterGroup [GOOD] >> KqpScheme::AlterColumnTableTtl >> KqpScheme::CreateTableWithCompactionPolicyCompat [GOOD] >> KqpScheme::CreateTableWithDefaultFamily >> KqpConstraints::SerialTypeSerial2 [GOOD] >> KqpConstraints::SerialTypeSerial >> KqpConstraints::AlterTableAddColumnWithDefaultValue [GOOD] >> KqpConstraints::DefaultValuesForTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: 2025-04-07T14:11:15.156967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T14:11:15.167775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:11:15.168118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:11:15.168241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:11:15.168804Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:11:15.168842Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016b6/r3tmp/tmpyfEApu/pdisk_1.dat 2025-04-07T14:11:15.422924Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:15.552398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:11:15.650718Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:11:15.653420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:15.653508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:15.654424Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:11:15.654836Z node 2 :TX_PROXY DEBUG: actor# [2:194:2108] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:11:15.655770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:15.655821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:15.656566Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-04-07T14:11:15.668319Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:11:15.668571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:15.668825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:16.010603Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Handle TEvProposeTransaction 2025-04-07T14:11:16.010651Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:11:16.011494Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1162:2709] 2025-04-07T14:11:16.076442Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2709] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:11:16.077508Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2709] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:11:16.077628Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2709] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:11:16.077881Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2709] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:11:16.078015Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2709] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:11:16.078129Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2709] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:11:16.078332Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2709] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T14:11:16.080108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:11:16.081986Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2709] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T14:11:16.082034Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2709] txid# 281474976715657 SEND to# [1:1069:2649] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T14:11:16.152534Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1215:2356] 2025-04-07T14:11:16.152792Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:16.190713Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:16.190881Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:11:16.192517Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:11:16.192580Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:11:16.192611Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:11:16.200153Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:11:16.217872Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:11:16.218012Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:11:16.218093Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1239:2371] 2025-04-07T14:11:16.218118Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:11:16.218139Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:11:16.218170Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:11:16.218726Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:11:16.218795Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:11:16.218899Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:11:16.218929Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:11:16.218955Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:11:16.218983Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:11:16.273335Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1196:2739], serverId# [2:1243:2372], sessionId# [0:0:0] 2025-04-07T14:11:16.273673Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:11:16.273906Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:11:16.273982Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:11:16.275453Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:11:16.287518Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:11:16.287639Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:11:16.542051Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-07T14:11:16.542309Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-07T14:11:16.542433Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:11:16.604435Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-07T14:11:16.604606Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-07T14:11:16.604774Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:11:16.605005Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:11:16.617784Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1273:2761], serverId# [2:1276:2382], sessionId# [0:0:0] 2025-04-07T14:11:16.621755Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 992 RawX2: 4294969883 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:11:16.621825Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:11:16.622393Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:11:16.622429Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:11:16.622460Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:11:16.622682Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:11:16.622826Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:11:16.623177Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:11:16.623239Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:11:16.623668Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:11:16.623905Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at ... 74976715664, task: 1. Resume compute actor 2025-04-07T14:11:26.471580Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:1575:2943], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=YWZkZmU1ZGYtODYyNDVmNDYtNmMxNmQzZDEtZWQ4ZmIwYQ==. TraceId : 01jr89ez6sbbzz3p4k1g3szyry. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:11:26.471606Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1575:2943], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=YWZkZmU1ZGYtODYyNDVmNDYtNmMxNmQzZDEtZWQ4ZmIwYQ==. TraceId : 01jr89ez6sbbzz3p4k1g3szyry. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2025-04-07T14:11:26.471628Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1575:2943], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=YWZkZmU1ZGYtODYyNDVmNDYtNmMxNmQzZDEtZWQ4ZmIwYQ==. TraceId : 01jr89ez6sbbzz3p4k1g3szyry. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2025-04-07T14:11:26.471652Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1575:2943], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=YWZkZmU1ZGYtODYyNDVmNDYtNmMxNmQzZDEtZWQ4ZmIwYQ==. TraceId : 01jr89ez6sbbzz3p4k1g3szyry. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2025-04-07T14:11:26.471671Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1575:2943], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=YWZkZmU1ZGYtODYyNDVmNDYtNmMxNmQzZDEtZWQ4ZmIwYQ==. TraceId : 01jr89ez6sbbzz3p4k1g3szyry. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2025-04-07T14:11:26.471691Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1575:2943], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=YWZkZmU1ZGYtODYyNDVmNDYtNmMxNmQzZDEtZWQ4ZmIwYQ==. TraceId : 01jr89ez6sbbzz3p4k1g3szyry. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2025-04-07T14:11:26.471733Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. Tasks execution finished 2025-04-07T14:11:26.471761Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:1575:2943], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=YWZkZmU1ZGYtODYyNDVmNDYtNmMxNmQzZDEtZWQ4ZmIwYQ==. TraceId : 01jr89ez6sbbzz3p4k1g3szyry. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:11:26.471819Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. pass away 2025-04-07T14:11:26.471882Z node 3 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715664;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:11:26.471984Z node 3 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715664, taskId: 1. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2025-04-07T14:11:26.472138Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1572:2903] TxId: 281474976715664. Ctx: { TraceId: 01jr89ez6sbbzz3p4k1g3szyry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWZkZmU1ZGYtODYyNDVmNDYtNmMxNmQzZDEtZWQ4ZmIwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1575:2943], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1529 DurationUs: 2000 Tasks { TaskId: 1 CpuTimeUs: 399 FinishTimeMs: 1744035086471 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 52 BuildCpuTimeUs: 347 HostName: "ghrun-lqtfcktism" NodeId: 3 StartTimeMs: 1744035086469 } MaxMemoryUsage: 1048576 } 2025-04-07T14:11:26.472191Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715664. Ctx: { TraceId: 01jr89ez6sbbzz3p4k1g3szyry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWZkZmU1ZGYtODYyNDVmNDYtNmMxNmQzZDEtZWQ4ZmIwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:1575:2943] 2025-04-07T14:11:26.472261Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1572:2903] TxId: 281474976715664. Ctx: { TraceId: 01jr89ez6sbbzz3p4k1g3szyry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWZkZmU1ZGYtODYyNDVmNDYtNmMxNmQzZDEtZWQ4ZmIwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:11:26.472293Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1572:2903] TxId: 281474976715664. Ctx: { TraceId: 01jr89ez6sbbzz3p4k1g3szyry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWZkZmU1ZGYtODYyNDVmNDYtNmMxNmQzZDEtZWQ4ZmIwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-07T14:11:26.472327Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1572:2903] TxId: 281474976715664. Ctx: { TraceId: 01jr89ez6sbbzz3p4k1g3szyry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWZkZmU1ZGYtODYyNDVmNDYtNmMxNmQzZDEtZWQ4ZmIwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001529s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-07T14:11:26.473146Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-04-07T14:11:26.473224Z node 3 :TX_PROXY DEBUG: actor# [3:164:2150] Handle TEvProposeTransaction 2025-04-07T14:11:26.473255Z node 3 :TX_PROXY DEBUG: actor# [3:164:2150] TxId# 0 ProcessProposeTransaction 2025-04-07T14:11:26.473334Z node 3 :TX_PROXY DEBUG: actor# [3:164:2150] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1577:2944] SnapshotReq marker# P0 2025-04-07T14:11:26.474071Z node 3 :TX_PROXY DEBUG: Actor# [3:1579:2944] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-04-07T14:11:26.474237Z node 3 :TX_PROXY DEBUG: Actor# [3:1579:2944] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-04-07T14:11:26.474322Z node 3 :TX_PROXY DEBUG: Actor# [3:1577:2944] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-04-07T14:11:32.788834Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:635:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T14:11:32.798301Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:637:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:11:32.798650Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:11:32.798794Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:11:32.800828Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:11:32.800914Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0016b6/r3tmp/tmpAnErBU/pdisk_1.dat 2025-04-07T14:11:33.089845Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:33.219925Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:11:33.321317Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:33.321457Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:33.323607Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:33.323698Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:33.337325Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-04-07T14:11:33.337704Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:33.337981Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:33.678420Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:11:34.234548Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1326:2797], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:34.234643Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1337:2802], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:34.234708Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:34.239470Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:11:34.752268Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1340:2805], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:11:35.404780Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr89f7psb6vtrf0jay0mkb3h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NjljMTVjNjQtZmFhOTY5OGItMjA1ZDZiYmQtMzE2MDFjNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:11:36.136779Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89f8waajxwd6qp505t54a3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZTEyNWJkNjUtYzRjZTA3MDgtNzZhNGJmYzAtZDc0M2Y4YmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:11:36.550681Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89f8waajxwd6qp505t54a3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZTEyNWJkNjUtYzRjZTA3MDgtNzZhNGJmYzAtZDc0M2Y4YmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:11:36.554074Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down >> KqpScheme::SchemaVersionMissmatchWithIndexRead [GOOD] >> KqpScheme::SchemaVersionMissmatchWithIndexWrite >> KqpOlapScheme::DropColumnTableStoreErrors [GOOD] >> KqpOlapScheme::DropColumnAfterInsert >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] >> KqpScheme::DropExternalTable [GOOD] >> KqpScheme::DropNonExistingExternalDataSource >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> TBlobStorageProxyTest::TestEmptyRange >> KqpScheme::AlterTableAlterMissedIndex [GOOD] >> KqpScheme::AlterTableRenameIndex >> KqpScheme::CreateTableWithPartitionAtKeysSimpleUncompat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: 2025-04-07T14:11:14.971623Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T14:11:15.012979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:11:15.013366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:11:15.013490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:11:15.014001Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:11:15.014043Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00169c/r3tmp/tmpw2DXFK/pdisk_1.dat 2025-04-07T14:11:15.387896Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:15.552830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:11:15.651328Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:11:15.653148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:15.653258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:15.654571Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:11:15.654976Z node 2 :TX_PROXY DEBUG: actor# [2:194:2108] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:11:15.655948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:15.655999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:15.656855Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-04-07T14:11:15.668889Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:11:15.669216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:15.669457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:16.011361Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Handle TEvProposeTransaction 2025-04-07T14:11:16.011426Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:11:16.011547Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1159:2706] 2025-04-07T14:11:16.090146Z node 1 :TX_PROXY DEBUG: Actor# [1:1159:2706] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:11:16.090762Z node 1 :TX_PROXY DEBUG: Actor# [1:1159:2706] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:11:16.090862Z node 1 :TX_PROXY DEBUG: Actor# [1:1159:2706] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:11:16.091142Z node 1 :TX_PROXY DEBUG: Actor# [1:1159:2706] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:11:16.091288Z node 1 :TX_PROXY DEBUG: Actor# [1:1159:2706] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:11:16.091374Z node 1 :TX_PROXY DEBUG: Actor# [1:1159:2706] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:11:16.091598Z node 1 :TX_PROXY DEBUG: Actor# [1:1159:2706] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T14:11:16.092929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:11:16.095034Z node 1 :TX_PROXY DEBUG: Actor# [1:1159:2706] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T14:11:16.095142Z node 1 :TX_PROXY DEBUG: Actor# [1:1159:2706] txid# 281474976715657 SEND to# [1:1069:2649] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T14:11:16.177153Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1236:2763] 2025-04-07T14:11:16.177513Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:16.211181Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1239:2765] 2025-04-07T14:11:16.211441Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:16.222848Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1243:2768] 2025-04-07T14:11:16.223008Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:16.229819Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:16.230125Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:11:16.231449Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T14:11:16.231523Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T14:11:16.231575Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T14:11:16.231836Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:11:16.252297Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T14:11:16.252485Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:11:16.252590Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:1333:2814] 2025-04-07T14:11:16.252626Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T14:11:16.252655Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T14:11:16.252702Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:11:16.254602Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T14:11:16.254699Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T14:11:16.255008Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:11:16.255049Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:11:16.255086Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T14:11:16.255138Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:11:16.255749Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:1251:2771], serverId# [1:1300:2796], sessionId# [0:0:0] 2025-04-07T14:11:16.256067Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:11:16.257983Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:11:16.258108Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-07T14:11:16.260646Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:16.261155Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:11:16.262098Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-04-07T14:11:16.262136Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2025-04-07T14:11:16.262170Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2025-04-07T14:11:16.262350Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:11:16.262385Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037894 2025-04-07T14:11:16.262446Z node 1 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:11:16.262500Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037894, actorId: [1:1344:2822] 2025-04-07T14:11:16.262524Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-04-07T14:11:16.262545Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2025-04-07T14:11:16.262564Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-07T14:11:16.263938Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037894 2025-04-07T14:11:16.263991Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2025-04-07T14:11:16.264147Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-04-07T14:11:16.264174Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:11:16.264213Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037894 TxInFly 0 2025-04-07T14:11:16.264247Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-04-07T14:11:16.264818Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [1:1256:2776], serverId# [1:1318:2808], sessionId# [0:0:0] 2025-04-07T14:11:16.265898Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-07T14:11:16.266052Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037894 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:11:16.266123Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037894 2025-04-07T14:11:16.270523Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 720575 ... 67, task: 1. Ctx: { TraceId : 01jr89f9ccf7r8w5vtzj9wfjd2. SessionId : ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Send stats to executor actor [5:1892:2935] TaskId: 1 Stats: CpuTimeUs: 441 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 179 FinishTimeMs: 1744035098473 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 50 BuildCpuTimeUs: 129 HostName: "ghrun-lqtfcktism" NodeId: 5 StartTimeMs: 1744035098472 } MaxMemoryUsage: 1048576 2025-04-07T14:11:38.473653Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1892:2935] TxId: 281474976715667. Ctx: { TraceId: 01jr89f9ccf7r8w5vtzj9wfjd2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1895:3122], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-07T14:11:38.473703Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1892:2935] TxId: 281474976715667. Ctx: { TraceId: 01jr89f9ccf7r8w5vtzj9wfjd2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [5:1895:3122], 2025-04-07T14:11:38.473915Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1892:2935] TxId: 281474976715667. Ctx: { TraceId: 01jr89f9ccf7r8w5vtzj9wfjd2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [5:1566:2935], seqNo: 1, nRows: 1 2025-04-07T14:11:38.473990Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1895:3122], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jr89f9ccf7r8w5vtzj9wfjd2. SessionId : ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:11:38.474028Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3122], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jr89f9ccf7r8w5vtzj9wfjd2. SessionId : ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2025-04-07T14:11:38.474061Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3122], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jr89f9ccf7r8w5vtzj9wfjd2. SessionId : ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2025-04-07T14:11:38.474091Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3122], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jr89f9ccf7r8w5vtzj9wfjd2. SessionId : ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2025-04-07T14:11:38.474135Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3122], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jr89f9ccf7r8w5vtzj9wfjd2. SessionId : ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2025-04-07T14:11:38.474163Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3122], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jr89f9ccf7r8w5vtzj9wfjd2. SessionId : ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2025-04-07T14:11:38.474211Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-07T14:11:38.474371Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1892:2935] TxId: 281474976715667. Ctx: { TraceId: 01jr89f9ccf7r8w5vtzj9wfjd2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1895:3122], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 441 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 179 FinishTimeMs: 1744035098473 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 50 BuildCpuTimeUs: 129 HostName: "ghrun-lqtfcktism" NodeId: 5 StartTimeMs: 1744035098472 } MaxMemoryUsage: 1048576 } 2025-04-07T14:11:38.474406Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1892:2935] TxId: 281474976715667. Ctx: { TraceId: 01jr89f9ccf7r8w5vtzj9wfjd2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [5:1895:3122], ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } } SeqNo: 1 QueryResultIndex: 0 ChannelId: 1 2025-04-07T14:11:38.474685Z node 5 :KQP_EXECUTER DEBUG: TxId: 281474976715667, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 100, to: [5:1896:3122] 2025-04-07T14:11:38.474780Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Received channel data ack for channelId: 1, seqNo: 1, lastSentSeqNo: 1, freeSpace: 100, early finish: 0 2025-04-07T14:11:38.474832Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. PeerState, peerState:(freeSpace:100;inFlightBytes:0;inFlightCount:0;), sentSeqNo: 1, ackSeqNo: 1 2025-04-07T14:11:38.474871Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Resume compute actor 2025-04-07T14:11:38.474934Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1895:3122], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jr89f9ccf7r8w5vtzj9wfjd2. SessionId : ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:11:38.474962Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3122], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jr89f9ccf7r8w5vtzj9wfjd2. SessionId : ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2025-04-07T14:11:38.474982Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3122], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jr89f9ccf7r8w5vtzj9wfjd2. SessionId : ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2025-04-07T14:11:38.475016Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3122], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jr89f9ccf7r8w5vtzj9wfjd2. SessionId : ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2025-04-07T14:11:38.475051Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3122], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jr89f9ccf7r8w5vtzj9wfjd2. SessionId : ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2025-04-07T14:11:38.475076Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3122], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jr89f9ccf7r8w5vtzj9wfjd2. SessionId : ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2025-04-07T14:11:38.475100Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished 2025-04-07T14:11:38.475127Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1895:3122], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jr89f9ccf7r8w5vtzj9wfjd2. SessionId : ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:11:38.475232Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. pass away 2025-04-07T14:11:38.475307Z node 5 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715667;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:11:38.475428Z node 5 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715667, taskId: 1. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2025-04-07T14:11:38.475593Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1892:2935] TxId: 281474976715667. Ctx: { TraceId: 01jr89f9ccf7r8w5vtzj9wfjd2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1895:3122], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1830 DurationUs: 3000 Tasks { TaskId: 1 CpuTimeUs: 182 FinishTimeMs: 1744035098475 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 53 BuildCpuTimeUs: 129 HostName: "ghrun-lqtfcktism" NodeId: 5 StartTimeMs: 1744035098472 } MaxMemoryUsage: 1048576 } 2025-04-07T14:11:38.475639Z node 5 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr89f9ccf7r8w5vtzj9wfjd2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [5:1895:3122] 2025-04-07T14:11:38.475719Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1892:2935] TxId: 281474976715667. Ctx: { TraceId: 01jr89f9ccf7r8w5vtzj9wfjd2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:11:38.475788Z node 5 :KQP_EXECUTER TRACE: ActorId: [5:1892:2935] TxId: 281474976715667. Ctx: { TraceId: 01jr89f9ccf7r8w5vtzj9wfjd2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-07T14:11:38.475841Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1892:2935] TxId: 281474976715667. Ctx: { TraceId: 01jr89f9ccf7r8w5vtzj9wfjd2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjMwZjdmNzQtMTdmNGRlNTUtOTM2ZjU0ZTYtZWZkYzM0MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001830s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 576 >> KqpOlapScheme::CreateWithoutColumnFamily [GOOD] >> KqpOlapScheme::CreateWithDefaultColumnFamily >> KqpScheme::CreateUserWithoutPassword [GOOD] >> KqpScheme::DescribeIndexTable |94.6%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapTypes::Decimal35 [GOOD] >> KqpOlapTypes::DecimalCsv >> KqpScheme::ResourcePoolsValidation [GOOD] >> KqpScheme::ResourcePoolClassifiersValidation >> KqpOlapScheme::CreateTableWithTtl [GOOD] >> KqpOlapScheme::BulkError >> TColumnShardTestReadWrite::ReadGroupBy [GOOD] >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> KqpOlapScheme::AlterCompressionLevel [GOOD] >> KqpOlapScheme::AlterCompressionLevelError >> KqpOlapScheme::AddColumnFamily [GOOD] >> KqpOlapScheme::AddColumnWithColumnFamily >> KqpScheme::AlterResourcePool [GOOD] >> KqpScheme::AlterNonExistingResourcePool >> KqpAcl::FailNavigate >> KqpScheme::AlterColumnTableTtl [GOOD] >> KqpScheme::AlterColumnTableTiering |94.6%| [TA] $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8328;columns=19; -- group by key: 0 2025-04-07T14:10:16.827916Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:10:16.894798Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:10:16.908414Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:10:16.908473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:10:16.908695Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:10:16.914342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:10:16.914491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:10:16.914645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:10:16.914736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:10:16.914817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:10:16.914890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:10:16.914943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:10:16.915000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:10:16.915077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:10:16.915151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:10:16.915228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:10:16.915305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:10:16.928108Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:10:16.929614Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:10:16.929697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:10:16.929727Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:10:16.929854Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:16.929953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:10:16.930011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:10:16.930045Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:10:16.930100Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:10:16.930141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:10:16.930162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:10:16.930178Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:10:16.930287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:10:16.930327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:10:16.930353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:10:16.930367Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:10:16.930427Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:10:16.930459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:10:16.930487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:10:16.930507Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:10:16.930562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:10:16.930585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:10:16.930615Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:10:16.930656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:10:16.930676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:10:16.930694Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:10:16.930793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=33; 2025-04-07T14:10:16.930840Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=21; 2025-04-07T14:10:16.930887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=18; 2025-04-07T14:10:16.930933Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=22; 2025-04-07T14:10:16.931051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:10:16.931086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:10:16.931106Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:10:16.931219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:10:16.931242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:10:16.931260Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:10:16.931378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:10:16.931403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:10:16.931435Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-07T14:10:16.931590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:10:16.931623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:10:16.931640Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-0 ... d=0;TxId=100;ScanGen=0;fline=interval.cpp:31;event=fetched;interval_idx=0; 2025-04-07T14:11:41.028979Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=interval.cpp:15;event=start_construct_result;interval_idx=0;interval_id=2052; 2025-04-07T14:11:41.029420Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=merge.cpp:131;event=DoExecute;interval_idx=0; 2025-04-07T14:11:41.031156Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-04-07T14:11:41.031290Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-07T14:11:41.031329Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-07T14:11:41.031365Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:11:41.031535Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T14:11:41.031569Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T14:11:41.031602Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2052; 2025-04-07T14:11:41.031645Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=2052; 2025-04-07T14:11:41.031680Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T14:11:41.031775Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-07T14:11:41.031809Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-04-07T14:11:41.031845Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:11:41.032302Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:11:41.032450Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:1;schema=res_min: binary res_max: binary res_some: binary res_count: uint64 yson: binary json: string jsondoc: binary;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-07T14:11:41.032484Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T14:11:41.032577Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;);columns=7;rows=1; 2025-04-07T14:11:41.032650Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=69;num_rows=1;batch_columns=res_min,res_max,res_some,res_count,yson,json,jsondoc; 2025-04-07T14:11:41.032766Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: Scan [54:431:2449] send ScanData to [54:430:2448] txId: 100 scanId: 0 gen: 0 tablet: 9437184 bytes: 69 rows: 1 page faults: 0 finished: 0 pageFault: 0 arrow schema: res_min: binary res_max: binary res_some: binary res_count: uint64 yson: binary json: string jsondoc: binary 2025-04-07T14:11:41.032872Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-07T14:11:41.032976Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-07T14:11:41.033063Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-07T14:11:41.033310Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:11:41.033409Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-07T14:11:41.033516Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-07T14:11:41.033552Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: Scan [54:431:2449] finished for tablet 9437184 2025-04-07T14:11:41.033614Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: Scan [54:431:2449] send ScanData to [54:430:2448] txId: 100 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:11:41.034029Z node 54 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [54:431:2449] and sent to [54:430:2448] packs: 0 txId: 100 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.012}],"full":{"a":1744035101021469,"name":"_full_task","f":1744035101021469,"d_finished":0,"c":0,"l":1744035101033647,"d":12178},"events":[{"name":"bootstrap","f":1744035101021624,"d_finished":2052,"c":1,"l":1744035101023676,"d":2052},{"a":1744035101033290,"name":"ack","f":1744035101032278,"d_finished":806,"c":1,"l":1744035101033084,"d":1163},{"a":1744035101033277,"name":"processing","f":1744035101023737,"d_finished":5214,"c":10,"l":1744035101033086,"d":5584},{"name":"ProduceResults","f":1744035101022763,"d_finished":2416,"c":13,"l":1744035101033538,"d":2416},{"a":1744035101033540,"name":"Finish","f":1744035101033540,"d_finished":0,"c":0,"l":1744035101033647,"d":107},{"name":"task_result","f":1744035101023748,"d_finished":4277,"c":9,"l":1744035101031891,"d":4277}],"id":"9437184::2052"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;) 2025-04-07T14:11:41.034105Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:11:41.021106Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=16001;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=16001;selected_rows=0; 2025-04-07T14:11:41.034143Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:11:41.034203Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=context.h:77;profile=;; 2025-04-07T14:11:41.034277Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:431:2449];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> KqpScheme::CreateExternalTableWithSettings [GOOD] >> KqpScheme::CreateExternalTableWithUpperCaseSettings >> KqpConstraints::DefaultValuesForTable [GOOD] >> KqpScheme::ChangefeedAttributes [GOOD] >> KqpScheme::CreateTableWithDefaultFamily [GOOD] >> KqpScheme::CreateTableWithDecimalColumn >> KqpScheme::ChangefeedOnIndexTable >> KqpScheme::CreateTableWithStoreExternalBlobs [GOOD] >> KqpConstraints::DefaultValuesForTableNegative2 >> KqpConstraints::AddSerialColumnForbidden [GOOD] >> KqpConstraints::AddColumnWithDefaultForbidden >> KqpScheme::CreateTableWithPgColumn >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsCompat [GOOD] >> KqpScheme::CreateAndAlterTableWithBloomFilterUncompat >> KqpConstraints::SerialTypeSerial [GOOD] >> KqpConstraints::SerialTypeBigSerial >> KqpOlapScheme::BulkError [GOOD] >> KqpOlapScheme::ColumnFamilyWithFieldData >> KqpScheme::DropNonExistingExternalDataSource [GOOD] >> KqpScheme::DropNonExistingResourcePool >> KqpScheme::SchemaVersionMissmatchWithIndexWrite [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexRead >> KqpScheme::CreateTableWithPartitionAtKeysSimpleUncompat [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysSimpleCompat >> KqpOlapScheme::CreateWithDefaultColumnFamily [GOOD] >> KqpOlapScheme::CreateWithColumnFamily >> KqpScheme::AlterTableAddImplicitSyncIndex >> KqpScheme::TouchIndexAfterMoveIndexWrite >> KqpOlapScheme::AlterCompressionLevelError [GOOD] >> KqpOlapScheme::AddColumnWithoutColumnFamily |94.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:11:13.527695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:11:13.527756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:13.527788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:11:13.527818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:11:13.527864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:11:13.527885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:11:13.527921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:11:13.528113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:11:13.578221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:11:13.578282Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:13.585056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:11:13.585130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:11:13.585236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:11:13.589146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:11:13.589285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:11:13.589701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.589847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:11:13.591027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.591918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:13.591967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.592052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:11:13.592091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:13.592116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:11:13.592239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.596933Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:11:13.671753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:11:13.671930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.672130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:11:13.672273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:11:13.672314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.674422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.674537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:11:13.674674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.674724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:11:13.674766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:11:13.674788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:11:13.676122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.676162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:11:13.676191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:11:13.677306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.677339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.677363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:13.677399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:11:13.679998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:11:13.681081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:11:13.681211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:11:13.681885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:13.681959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:11:13.681987Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:13.682161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:11:13.682205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:11:13.682347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:13.682413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:11:13.683754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:11:13.683802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:13.683977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:11:13.684023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:11:13.684275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:11:13.684353Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:11:13.684453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:11:13.684484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:13.684510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:11:13.684545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:11:13.684571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:11:13.684589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:11:13.684629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:11:13.684653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:11:13.684697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:11:13.685829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:13.685887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:11:13.685909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:11:13.685942Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:11:13.685973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:11:13.686056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... vFeed: self# [1:3446:5412] 2025-04-07T14:11:42.719666Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3447:5413], sender# [1:3446:5412], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv.zst?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:30564 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2E2B72AB-B6FF-498C-ACF4-DD0909541F15 amz-sdk-request: attempt=1 content-length: 55 content-md5: Ry5TonSXZhxkAEEJCUBcTg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=99&uploadId=1 / 55 2025-04-07T14:11:42.722136Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3447:5413], result# UploadPartResult { ETag: 472e53a27497661c6400410909405c4e } 2025-04-07T14:11:42.722315Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3446:5412] 2025-04-07T14:11:42.722473Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3447:5413], sender# [1:3446:5412], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:30564 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F05A0D7C-4C4E-4AA0-9B6A-A95E20881013 amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2025-04-07T14:11:42.724632Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3447:5413], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2025-04-07T14:11:42.724955Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3446:5412] 2025-04-07T14:11:42.725043Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3447:5413], sender# [1:3446:5412], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:30564 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5CDFBE54-31ED-495A-872D-17D63211EDA9 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2025-04-07T14:11:42.727277Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3447:5413], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-04-07T14:11:42.727325Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3447:5413], success# 1, error# , multipart# 1, uploadId# 1 2025-04-07T14:11:42.733004Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3447:5413], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:30564 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 023A7334-69A3-441A-ACC5-5404E3ABF27B amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2025-04-07T14:11:42.743812Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3447:5413], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2025-04-07T14:11:42.744256Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3446:5412], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-07T14:11:42.759980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-07T14:11:42.760062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T14:11:42.760222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-07T14:11:42.760349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 302 RawX2: 4294969586 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-07T14:11:42.760415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:11:42.760457Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:42.760493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:11:42.760537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T14:11:42.760742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:11:42.764657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:42.765328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:11:42.765385Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:11:42.765500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:11:42.765539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:11:42.765589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T14:11:42.765671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 102 2025-04-07T14:11:42.765745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:11:42.765789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:11:42.765821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:11:42.765944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:11:42.769941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:11:42.769996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3432:5399] TestWaitNotification: OK eventTxId 102 |94.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapScheme::AddColumnWithColumnFamily [GOOD] >> KqpOlapScheme::AddColumnFamilyWithNotSupportedCodec >> KqpScheme::DescribeIndexTable [GOOD] >> KqpScheme::CreatedAt >> KqpScheme::AlterTableRenameIndex [GOOD] >> KqpScheme::AlterTableReplaceIndex |94.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpAcl::FailNavigate [GOOD] >> KqpAcl::FailResolve >> KqpScheme::ResourcePoolClassifiersValidation [GOOD] >> KqpScheme::ResourcePoolClassifiersRankValidation >> KqpOlapTypes::DecimalCsv [GOOD] >> KqpOlapTypes::AttributeNegative >> KqpOlapScheme::ColumnFamilyWithFieldData [GOOD] >> KqpOlapScheme::CrateWithWrongCodec >> KqpOlapScheme::TtlRunInterval >> KqpScheme::AlterNonExistingResourcePool [GOOD] >> KqpScheme::AlterResourcePoolClassifier >> KqpScheme::CreateResourcePoolClassifier [GOOD] >> KqpScheme::CreateResourcePoolClassifierOnServerless >> KqpOlapScheme::CreateWithColumnFamily [GOOD] >> KqpConstraints::DefaultValuesForTableNegative2 [GOOD] >> KqpConstraints::AlterTableAddNotNullWithDefault >> KqpScheme::ChangefeedOnIndexTable [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysSimpleCompat [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysSigned >> KqpConstraints::AddColumnWithDefaultForbidden [GOOD] >> KqpConstraints::AddNonColumnDoesnotReturnInternalError >> KqpOlapScheme::DropColumnAfterInsert [GOOD] >> KqpOlapScheme::DropColumnAndResetTtl >> KqpScheme::CreateExternalTableWithUpperCaseSettings [GOOD] |94.6%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::CreateTableWithDecimalColumn [GOOD] >> KqpScheme::CreateAndAlterTableWithBloomFilterUncompat [GOOD] >> KqpConstraints::SerialTypeBigSerial [GOOD] >> KqpConstraints::IndexedTableAndNotNullColumnAddNotNullColumn >> KqpOlapScheme::AddColumnWithoutColumnFamily [GOOD] >> KqpOlapScheme::AddExsitsColumnFamily >> KqpScheme::DropNonExistingResourcePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::CreateWithColumnFamily [GOOD] Test command err: Trying to start YDB, gRPC: 64118, MsgBus: 23186 2025-04-07T14:11:26.146270Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573660102619802:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.146751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001489/r3tmp/tmpSJXeCX/pdisk_1.dat 2025-04-07T14:11:26.449024Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64118, node 1 2025-04-07T14:11:26.515770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.515848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.532630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.594159Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.594177Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.594182Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.594257Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23186 TClient is connected to server localhost:23186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.113427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:11:28.295899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668692554947:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.296003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.585009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.627230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573668692555023:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:11:28.627363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573668692555023:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:11:28.627567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573668692555023:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:28.627665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573668692555023:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:28.627757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573668692555023:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:28.627842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573668692555023:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:11:28.627923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573668692555023:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:11:28.628037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573668692555023:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:11:28.628131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573668692555023:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:11:28.628188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573668692555023:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:11:28.628246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573668692555023:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:11:28.628296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573668692555023:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:11:28.630079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:11:28.630130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:11:28.630203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:11:28.630223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:11:28.630333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:11:28.630350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:11:28.630396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:11:28.630412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:11:28.630453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:11:28.630478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:11:28.630520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:11:28.630542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:11:28.631028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:28.631089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:28.631251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:28.631280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:28.631411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:28.631435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:28.631578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:28.631606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:28.631676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:11:28.631693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Exe ... Actor;event=undelivered;self_id=[6:7490573736295337337:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:44.074846Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001489/r3tmp/tmp5hiNBd/pdisk_1.dat 2025-04-07T14:11:44.156920Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:44.176620Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:44.176714Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:44.178319Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8934, node 6 2025-04-07T14:11:44.212788Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:44.212810Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:44.212822Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:44.212921Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62135 TClient is connected to server localhost:62135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:44.659692Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/TableWithColumnFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family2, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="zstd", COMPRESSION_LEVEL=3), FAMILY family2 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:11:46.732561Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573744885272553:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.732656Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.743711Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.795325Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573744885272599:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:11:46.795434Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573744885272599:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:11:46.795708Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573744885272599:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:46.795827Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573744885272599:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:46.795932Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573744885272599:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:46.796012Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573744885272599:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:11:46.796091Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573744885272599:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:11:46.796176Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573744885272599:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:11:46.796261Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573744885272599:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:11:46.796351Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573744885272599:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:11:46.796434Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573744885272599:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:11:46.796525Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573744885272599:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:11:46.799625Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:11:46.799695Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:11:46.799815Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:11:46.799854Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:11:46.800054Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:11:46.800092Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:11:46.800208Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:11:46.800245Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:11:46.800335Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:11:46.800372Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:11:46.800429Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:11:46.800478Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:11:46.800928Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:46.800978Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:46.801186Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:46.801231Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:46.801397Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:46.801437Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:46.801641Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:46.801683Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:46.801818Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:11:46.801852Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> KqpScheme::CreateTableWithPgColumn [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexWrite [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexRead [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexReadReplace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateExternalTableWithUpperCaseSettings [GOOD] Test command err: Trying to start YDB, gRPC: 14023, MsgBus: 19208 2025-04-07T14:11:26.144520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573657724119615:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.144593Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014bb/r3tmp/tmpe8RxKH/pdisk_1.dat 2025-04-07T14:11:26.494614Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14023, node 1 2025-04-07T14:11:26.548528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.548809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.553711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.597891Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.597909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.597944Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.598011Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19208 TClient is connected to server localhost:19208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.114486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.126788Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:11:27.136831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.246445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.370979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.423834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:28.376466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573666314055910:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.376572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.583819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.602866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.623034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.644480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.667525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.702942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.734882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573666314056410:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.734935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.735096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573666314056415:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.737453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:28.744543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573666314056417:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:29.466560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:29.472679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5468, MsgBus: 12082 2025-04-07T14:11:29.937688Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573672074949547:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:29.937747Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014bb/r3tmp/tmpWJZP06/pdisk_1.dat 2025-04-07T14:11:29.993445Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5468, node 2 2025-04-07T14:11:30.037419Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:30.037437Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:30.037443Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:30.037546Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:30.037846Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:30.037918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:30.039517Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12082 TClient is connected to server localhost:12082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:30.383539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:11:30.403654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.475140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:11:30.599150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.648967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.789122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573684959853121:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND ... at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:39.027422Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:39.077266Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:40.756430Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573718475525145:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:40.756492Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:40.788048Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:40.811833Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:40.834893Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:40.860376Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:40.887858Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:40.912011Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.044019Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573722770492938:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.044084Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.044143Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573722770492943:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.047348Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:41.055866Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573722770492945:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:42.041500Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.054515Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9778, MsgBus: 21263 2025-04-07T14:11:42.766495Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573726458594130:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:42.766546Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014bb/r3tmp/tmpzEUOr1/pdisk_1.dat 2025-04-07T14:11:42.850788Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:42.875568Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:42.875670Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:42.876954Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9778, node 5 2025-04-07T14:11:42.913338Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:42.913373Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:42.913380Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:42.913482Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21263 TClient is connected to server localhost:21263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:43.342288Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.352482Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.421805Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.559556Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.629871Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:45.782953Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573739343497736:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:45.783034Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:45.820042Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.846969Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.874345Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.903942Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.930603Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.958593Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.008156Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573743638465524:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.008227Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.008416Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573743638465529:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.011215Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:46.018665Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573743638465532:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:46.953081Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.961985Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::ChangefeedOnIndexTable [GOOD] Test command err: Trying to start YDB, gRPC: 17460, MsgBus: 6476 2025-04-07T14:11:26.154188Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573661421139555:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.154459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014a8/r3tmp/tmpkbyN5G/pdisk_1.dat 2025-04-07T14:11:26.452358Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17460, node 1 2025-04-07T14:11:26.489240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.489354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.491122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.589876Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.589901Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.589914Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.590015Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6476 TClient is connected to server localhost:6476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.084181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.109995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.221723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.341175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.394025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:28.317920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573670011075820:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.317986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.583800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.644930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.664871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.684595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.704390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.724248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.754787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573670011076324:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.754821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.754877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573670011076329:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.756891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:28.763855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573670011076331:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:29.427080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22185, MsgBus: 23909 2025-04-07T14:11:30.132443Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573677722198604:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:30.132538Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014a8/r3tmp/tmpdwqPix/pdisk_1.dat 2025-04-07T14:11:30.206307Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22185, node 2 2025-04-07T14:11:30.233628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:30.233710Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:30.235400Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:30.259716Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:30.259742Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:30.259752Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:30.259852Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23909 TClient is connected to server localhost:23909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:30.593739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.610612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.683673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.865045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.916207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.583673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573686312134888:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.583749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.613167Z node 2 :FLAT_TX_SCHEMESHARD ... sed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:39.016433Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:39.074943Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:40.906689Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573720065020629:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:40.906772Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:40.945474Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:40.968951Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:40.995638Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.020347Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.048980Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.073672Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.188328Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573724359988422:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.188394Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.189073Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573724359988427:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.192171Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:41.201889Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573724359988429:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:42.180987Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30801, MsgBus: 27767 2025-04-07T14:11:42.927998Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573727197164801:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:42.928082Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014a8/r3tmp/tmp0wIHdQ/pdisk_1.dat 2025-04-07T14:11:43.003325Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:43.033183Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:43.033295Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 30801, node 5 2025-04-07T14:11:43.034890Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:43.068210Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:43.068235Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:43.068244Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:43.068368Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27767 TClient is connected to server localhost:27767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:43.499169Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.505860Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.573257Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.711003Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.771107Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:45.529792Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573740082068397:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:45.529885Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:45.565436Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.592566Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.623327Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.653469Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.680999Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.708247Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.767145Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573740082068888:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:45.767215Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573740082068893:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:45.767230Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:45.770054Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:45.777526Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573740082068895:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:46.722825Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.901157Z node 5 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037920:1][5:7490573744377036883:2489] Failed entry at 'ResolveCdcStream': entry# { Path: TableId: [72057594046644480:21:0] RequestType: ByTableId Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } |94.6%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithDecimalColumn [GOOD] Test command err: Trying to start YDB, gRPC: 14307, MsgBus: 27766 2025-04-07T14:11:26.143867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573658559120218:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.146284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001494/r3tmp/tmp4F96ha/pdisk_1.dat 2025-04-07T14:11:26.451192Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14307, node 1 2025-04-07T14:11:26.513341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.513429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.517223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.589972Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.590003Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.590010Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.590095Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27766 TClient is connected to server localhost:27766 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.124318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.144301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.248314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.362520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.413189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:28.503952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573667149056520:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.504069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.692921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.715867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.737909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.758219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.780171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.843893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.871205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573667149057016:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.871271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.871375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573667149057021:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.873617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:28.879533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573667149057023:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:29.731050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29203, MsgBus: 10207 2025-04-07T14:11:30.386827Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573678531525160:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:30.386897Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001494/r3tmp/tmp6NEz43/pdisk_1.dat 2025-04-07T14:11:30.454985Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29203, node 2 2025-04-07T14:11:30.488280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:30.488348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:30.489270Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:30.498192Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:30.498209Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:30.498213Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:30.498282Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10207 TClient is connected to server localhost:10207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:30.860478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.868786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.908982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:31.037052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:31.106821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.568680Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573687121461448:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.568754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.602175Z node 2 :FLAT_TX_SCHEMESH ... 490573723047629944:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.060512Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.077801Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.102929Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.127868Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.150686Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.175262Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.205850Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.347584Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573723047630438:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.347654Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.347785Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573723047630443:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.350705Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:41.357676Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573723047630445:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:42.222968Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.277168Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25984, MsgBus: 6834 2025-04-07T14:11:42.961838Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573730225813849:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:42.961891Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001494/r3tmp/tmpQWD7IB/pdisk_1.dat 2025-04-07T14:11:43.033766Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25984, node 5 2025-04-07T14:11:43.064763Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:43.064845Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:43.066198Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:43.102270Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:43.102288Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:43.102293Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:43.102384Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6834 TClient is connected to server localhost:6834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:43.481741Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.497613Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.567621Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.744581Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.816641Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:45.823605Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573743110717453:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:45.823691Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:45.862388Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.886748Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.911655Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.934547Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.961224Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.026889Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.100492Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573747405685252:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.100585Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.100589Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573747405685257:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.105227Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:46.114127Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573747405685259:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:47.177034Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.232140Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.285339Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.342296Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.395154Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAndAlterTableWithBloomFilterUncompat [GOOD] Test command err: Trying to start YDB, gRPC: 7550, MsgBus: 27376 2025-04-07T14:11:26.155177Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573660114363064:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.155431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00146f/r3tmp/tmp8wHPr0/pdisk_1.dat 2025-04-07T14:11:26.428841Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7550, node 1 2025-04-07T14:11:26.510805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.511593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.513889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.593123Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.593147Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.593154Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.593222Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27376 TClient is connected to server localhost:27376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.111505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.131360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.240200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:11:27.363950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:11:27.435953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:28.470079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668704299371:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.470193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.630055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.652923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.673981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.696944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.719974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.743224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.778074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668704299861:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.778151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.778288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668704299866:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.780623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:28.787285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573668704299868:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:29.681619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63249, MsgBus: 4700 2025-04-07T14:11:30.408042Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573676224389147:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:30.408157Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00146f/r3tmp/tmpALSG2S/pdisk_1.dat 2025-04-07T14:11:30.478847Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63249, node 2 2025-04-07T14:11:30.510056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:30.510353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:30.512506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:30.547337Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:30.547359Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:30.547366Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:30.547449Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4700 TClient is connected to server localhost:4700 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:30.913100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.929573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.970586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:31.085686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:31.133323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.690283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573684814325426:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.690368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.722612Z node 2 :FLAT_TX_SCHEMESHARD W ... hTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1744035102593 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TableWithMinMaxPartitions" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNa... (TRUNCATED) Trying to start YDB, gRPC: 19251, MsgBus: 61586 2025-04-07T14:11:43.318027Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573731965267903:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:43.318129Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00146f/r3tmp/tmpd8skLM/pdisk_1.dat 2025-04-07T14:11:43.388954Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19251, node 5 2025-04-07T14:11:43.418688Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:43.418762Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:43.420452Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:43.447417Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:43.447448Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:43.447461Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:43.447544Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61586 TClient is connected to server localhost:61586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:43.847668Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.863263Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.932750Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.065055Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.125082Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.470354Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573744850171490:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.470435Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.509148Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.539592Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.566483Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.593122Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.618229Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.646557Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.694301Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573744850171981:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.694378Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.694387Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573744850171986:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.697504Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:46.705892Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573744850171988:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:47.574866Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithBloomFilter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithBloomFilter" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1744035107640 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TableWithBloomFilter" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key"... (TRUNCATED) 2025-04-07T14:11:47.620876Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithBloomFilter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithBloomFilter" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1744035107640 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TableWithBloomFilter" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key"... (TRUNCATED) 2025-04-07T14:11:47.652993Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithBloomFilter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithBloomFilter" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1744035107640 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TableWithBloomFilter" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key"... (TRUNCATED) >> KqpOlapScheme::AddColumnFamilyWithNotSupportedCodec [GOOD] >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] >> KqpScheme::AlterTableAddImplicitSyncIndex [GOOD] >> KqpScheme::AlterTableAddExplicitSyncIndex >> KqpOlapScheme::CrateWithWrongCodec [GOOD] >> KqpOlapScheme::AlterCompressionType ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropNonExistingResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 27562, MsgBus: 3661 2025-04-07T14:11:26.170736Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573659348895153:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.170793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014af/r3tmp/tmpLKDMJy/pdisk_1.dat 2025-04-07T14:11:26.484492Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27562, node 1 2025-04-07T14:11:26.549504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.549592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.551822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.593101Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.593120Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.593127Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.593212Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3661 TClient is connected to server localhost:3661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.126822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.145245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.253821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.374776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.445952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:28.369923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573667938831440:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.370029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.583706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.603638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.618967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.635862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.656117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.694241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.765655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573667938831945:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.765710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.765765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573667938831950:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.768219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:28.774142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573667938831952:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Type annotation, code: 1030
:3:47: Error: At function: KiAlterTable!
:3:47: Error: AlterTable : db.[/Root/KeyValue] Column: "Key" is a key column. Key column drop is not supported Trying to start YDB, gRPC: 64153, MsgBus: 15191 2025-04-07T14:11:30.239834Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573677843584464:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:30.239902Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014af/r3tmp/tmpmzCC3g/pdisk_1.dat 2025-04-07T14:11:30.312004Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:30.346758Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:30.346832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:30.348771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64153, node 2 2025-04-07T14:11:30.383980Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:30.384003Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:30.384010Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:30.384085Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15191 TClient is connected to server localhost:15191 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:30.691936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.700503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.744977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.855068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.934242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.661803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573686433520762:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.661876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.689191Z node 2 :FLAT_TX_SCHEMESHARD WARN: ... 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:39.886081Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:39.893458Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:39.963838Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:40.080386Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:40.127043Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:42.015856Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573726322199477:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:42.015940Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:42.051873Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.075760Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.097749Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.120929Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.144316Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.173401Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.290910Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573726322199971:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:42.290993Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:42.291034Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573726322199976:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:42.293780Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:42.301118Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573726322199978:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 16030, MsgBus: 13767 2025-04-07T14:11:43.815993Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573732527920637:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:43.816083Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014af/r3tmp/tmpQu7501/pdisk_1.dat 2025-04-07T14:11:43.923325Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:43.925510Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:43.925576Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:43.927380Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16030, node 5 2025-04-07T14:11:43.961782Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:43.961807Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:43.961818Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:43.961927Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13767 TClient is connected to server localhost:13767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:44.372244Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.389834Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.459914Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.575038Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.625267Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.738945Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573745412824225:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.739051Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.779547Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.803546Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.827330Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.855125Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.895351Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.923818Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.960854Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573745412824721:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.960926Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.960986Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573745412824726:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.964594Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:46.973470Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573745412824728:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithPgColumn [GOOD] Test command err: Trying to start YDB, gRPC: 15119, MsgBus: 16011 2025-04-07T14:11:26.167453Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573660395189225:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.167498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014b3/r3tmp/tmpnpuZAy/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15119, node 1 2025-04-07T14:11:26.511719Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:11:26.511764Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:11:26.524971Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:26.543820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.543932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.548938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.589919Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.589943Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.589953Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.590018Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16011 TClient is connected to server localhost:16011 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.111970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.127299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.217283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.347938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.419260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:28.353754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668985125506:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.353851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.583599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.601012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.618801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.640429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.658850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.693488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.723062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668985126007:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.723145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.723311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668985126012:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.727899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:28.734862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573668985126014:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:29.511009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:29.561777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:11:29.564823Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found Trying to start YDB, gRPC: 21048, MsgBus: 8387 2025-04-07T14:11:30.097476Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573675992437004:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:30.097573Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014b3/r3tmp/tmpWVgI1s/pdisk_1.dat 2025-04-07T14:11:30.159786Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21048, node 2 2025-04-07T14:11:30.201275Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:30.201296Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:30.201302Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:30.201409Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:30.213573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:30.213641Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:30.215265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8387 TClient is connected to server localhost:8387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:30.465492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.481214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.551454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.663263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.734604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.243671Z node 2 :KQP_WORKLOAD_SERVICE WARN: [Wor ... 976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.324604Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.495958Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573724474998830:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.496029Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.496210Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573724474998835:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.499560Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:41.506692Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573724474998837:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:42.364460Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10899, MsgBus: 12112 2025-04-07T14:11:43.077962Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573733345423402:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:43.078058Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014b3/r3tmp/tmpfdGg1j/pdisk_1.dat 2025-04-07T14:11:43.171343Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:43.203805Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:43.203901Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:43.206837Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10899, node 5 2025-04-07T14:11:43.246473Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:43.246497Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:43.246505Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:43.246630Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12112 TClient is connected to server localhost:12112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:43.624323Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.631871Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.698813Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.823877Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.874565Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.235277Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573746230326981:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.235350Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.274393Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.302032Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.328128Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.354582Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.379057Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.406436Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.441941Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573746230327472:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.442038Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.442123Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573746230327477:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.444682Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:46.451825Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573746230327479:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:47.540447Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.596294Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.654964Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.709302Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.762427Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.818233Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.872779Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.920280Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.969340Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-07T14:11:48.018946Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-07T14:11:48.070055Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-07T14:11:48.089721Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573733345423402:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:48.089820Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpAcl::FailResolve [GOOD] >> KqpAcl::FailedReadAccessDenied ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::TouchIndexAfterMoveIndexRead [GOOD] Test command err: Trying to start YDB, gRPC: 29091, MsgBus: 13621 2025-04-07T14:11:26.166054Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573660224813140:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.166127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014b5/r3tmp/tmpPJEWY1/pdisk_1.dat 2025-04-07T14:11:26.470373Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29091, node 1 2025-04-07T14:11:26.525127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.525364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.531540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.589480Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.589500Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.589508Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.589589Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13621 TClient is connected to server localhost:13621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.144879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.177610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.298108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.421989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.494291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:28.388020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668814749442:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.388112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.604559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.623396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.645282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.664688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.698315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.723906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.751058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668814749943:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.751123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668814749948:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.751123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.753423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:28.758783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573668814749950:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:29.566366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64387, MsgBus: 2927 2025-04-07T14:11:30.343811Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573676642921986:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:30.343882Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014b5/r3tmp/tmpe1S5ET/pdisk_1.dat 2025-04-07T14:11:30.410712Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64387, node 2 2025-04-07T14:11:30.464848Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:30.464868Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:30.464894Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:30.464975Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:30.465678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:30.465778Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:30.467400Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2927 TClient is connected to server localhost:2927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:30.754576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.772610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:11:30.818749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:11:31.009373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:31.067321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.659220Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573685232858268:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.659279Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.686862Z node 2 :FLAT_TX_SCHEMESHARD ... waiting... 2025-04-07T14:11:39.548899Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:41.159500Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573722850627439:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.159566Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.195442Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.218956Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.240089Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.259287Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.282788Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.305655Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.438474Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573722850627935:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.438532Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.438556Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573722850627940:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.440635Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:41.446821Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573722850627942:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:42.483586Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T14:11:42.514213Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.845101Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715676:1, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 27919, MsgBus: 12358 2025-04-07T14:11:43.877402Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573731334773422:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:43.877476Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014b5/r3tmp/tmpLmF6nr/pdisk_1.dat 2025-04-07T14:11:43.949850Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27919, node 5 2025-04-07T14:11:43.978340Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:43.978425Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:43.980307Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:44.010606Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:44.010627Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:44.010633Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:44.010727Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12358 TClient is connected to server localhost:12358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:44.401153Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.418347Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.489754Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.614147Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.682954Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.472137Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573744219677019:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.472201Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.536335Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.559837Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.622791Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.648388Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.675467Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.706576Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.740592Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573744219677514:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.740651Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573744219677519:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.740676Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.743062Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:46.750173Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573744219677521:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:47.799276Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T14:11:47.832531Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 waiting... >> KqpScheme::DropChangefeedNegative >> KqpScheme::CreatedAt [GOOD] >> KqpOlapScheme::DropThenAddColumn >> KqpScheme::DisableResourcePools ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::AddColumnFamilyWithNotSupportedCodec [GOOD] Test command err: Trying to start YDB, gRPC: 25922, MsgBus: 2952 2025-04-07T14:11:26.146719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573659222246018:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.146758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014c0/r3tmp/tmpyA7b8g/pdisk_1.dat 2025-04-07T14:11:26.435584Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25922, node 1 2025-04-07T14:11:26.485303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.485417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.487936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.589678Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.589697Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.589702Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.589770Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2952 TClient is connected to server localhost:2952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.136065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:11:28.357028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573667812181248:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.357099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.584924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.635410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573667812181324:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:11:28.635626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573667812181324:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:11:28.635889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573667812181324:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:28.635995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573667812181324:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:28.636104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573667812181324:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:28.636236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573667812181324:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:11:28.636374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573667812181324:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:11:28.636494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573667812181324:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:11:28.636614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573667812181324:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:11:28.636735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573667812181324:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:11:28.636859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573667812181324:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:11:28.636964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573667812181324:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:11:28.639676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:11:28.639740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:11:28.639858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:11:28.639932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:11:28.640115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:11:28.640153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:11:28.640244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:11:28.640276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:11:28.640371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:11:28.640406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:11:28.640443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:11:28.640479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:11:28.643227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:28.643346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:28.643635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:28.643693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:28.643879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:28.643916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:28.644128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:28.644159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:28.644298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:11:28.644358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstr ... ode 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:45.655258Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:45.655366Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26453 TClient is connected to server localhost:26453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:46.115299Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/TableWithFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family1, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="lz4"), FAMILY family2 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:11:48.872300Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573752091985476:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:48.872393Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:48.884503Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:48.944130Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573752091985523:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:11:48.944237Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573752091985523:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:11:48.944552Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573752091985523:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:48.944731Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573752091985523:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:48.944873Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573752091985523:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:48.945289Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573752091985523:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:11:48.945445Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573752091985523:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:11:48.945585Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573752091985523:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:11:48.945736Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573752091985523:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:11:48.945908Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573752091985523:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:11:48.946086Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573752091985523:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:11:48.946218Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573752091985523:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:11:48.950007Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:11:48.950084Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:11:48.950191Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:11:48.950234Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:11:48.950435Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:11:48.950474Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:11:48.950570Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:11:48.950606Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:11:48.950684Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:11:48.950718Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:11:48.950763Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:11:48.950803Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:11:48.951183Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:48.951234Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:48.951456Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:48.951500Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:48.951649Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:48.951683Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:48.951866Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:48.951902Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:48.952023Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:11:48.952060Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:11:48.985024Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573752091985594:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:48.985110Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:49.000053Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573752091985599:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:49.000138Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpOlapScheme::TtlRunInterval [GOOD] >> KqpOlapScheme::TenThousandColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] Test command err: 2025-04-07T14:09:29.740706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:399:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:29.741119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:29.741260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ce/r3tmp/tmp7C4ToA/pdisk_1.dat 2025-04-07T14:09:30.132156Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14387, node 1 2025-04-07T14:09:30.487220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:30.487276Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:30.487305Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:30.487459Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:30.490414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:30.593695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:30.593821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:30.619270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6270 2025-04-07T14:09:31.295119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:34.744403Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:34.787751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:34.787878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:34.840562Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:34.844550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:35.083251Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:35.141847Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:35.141954Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:35.186044Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:35.187277Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:35.187517Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:35.187576Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:35.187627Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:35.187732Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:35.187789Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:35.187834Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:35.188491Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:35.400503Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:35.400588Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1753:2548], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:35.411309Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1764:2556] 2025-04-07T14:09:35.420818Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1797:2571] 2025-04-07T14:09:35.421257Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1797:2571], schemeshard id = 72075186224037889 2025-04-07T14:09:35.424474Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:35.444298Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:35.444371Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:35.444444Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:35.447945Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:35.448052Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:35.462596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:35.473932Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:35.474088Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:35.486680Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:35.502512Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:35.540312Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:35.830301Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:35.996857Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:36.849755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2136:3018], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:36.849891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:36.866194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:36.984051Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2219:2804];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:36.984294Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2219:2804];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:36.984626Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2219:2804];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:36.984780Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2219:2804];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:36.984896Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2219:2804];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:36.985027Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2219:2804];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:36.985142Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2219:2804];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:36.985284Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2219:2804];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:36.985419Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2219:2804];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:36.985625Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2219:2804];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:36.985769Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2219:2804];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:36.985909Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2219:2804];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:37.013066Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:37.013176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:37.013300Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:37.013339Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:37.013609Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:37.013656Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:37.013769Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema: ... 065Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 219.000000s, at schemeshard: 72075186224037889 2025-04-07T14:11:40.458180Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 51 2025-04-07T14:11:40.472369Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:11:41.357647Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:11:41.357740Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:11:41.357807Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T14:11:41.357850Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2025-04-07T14:11:41.357892Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:11:41.358344Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:11:41.363135Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:11:41.366906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7071:5305], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.366988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7081:5310], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.367062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:41.376617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2025-04-07T14:11:41.418996Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7085:5313], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-07T14:11:41.729332Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7202:5375]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:11:41.729578Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:11:41.729666Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7204:5377] 2025-04-07T14:11:41.729730Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7204:5377] 2025-04-07T14:11:41.730008Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7205:5378] 2025-04-07T14:11:41.730112Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7205:5378], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:11:41.730175Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-07T14:11:41.730287Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7204:5377], server id = [2:7205:5378], tablet id = 72075186224037897, status = OK 2025-04-07T14:11:41.730370Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:11:41.730456Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7202:5375], StatRequests.size() = 1 2025-04-07T14:11:41.872575Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmI5ZDY1ZmItY2UxNjYwOTktYjM1Y2VlNGItOWJiYWVhMzI=, TxId: 2025-04-07T14:11:41.872695Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmI5ZDY1ZmItY2UxNjYwOTktYjM1Y2VlNGItOWJiYWVhMzI=, TxId: 2025-04-07T14:11:41.873185Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:11:41.886932Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:11:41.887005Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:11:41.963217Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:11:41.963316Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:11:42.007016Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7204:5377], schemeshard count = 1 2025-04-07T14:11:42.693132Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:11:42.693239Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:11:42.695869Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:11:42.710391Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:11:42.710790Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:11:42.710845Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2025-04-07T14:11:42.723470Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:11:43.633728Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:11:43.633812Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:11:43.633840Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T14:11:43.633870Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:11:43.633939Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:11:43.634551Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:11:43.647542Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-07T14:11:43.647659Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:11:43.648025Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:11:43.648079Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-04-07T14:11:43.649130Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:11:44.003366Z node 2 :STATISTICS ERROR: [72075186224037897] Delete long analyze operation, OperationId=operationId 2025-04-07T14:11:44.174709Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:11:44.174878Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:11:45.211681Z node 2 :SYSTEM_VIEWS WARN: [72075186224037896] TEvSendTopPartitions, time mismath: , partition interval end# 1970-01-03T00:02:04.000000Z, event time# 1970-01-03T00:02:04.010024Z 2025-04-07T14:11:46.097844Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-07T14:11:46.097937Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:11:46.097979Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:11:46.098020Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-07T14:11:48.298512Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2025-04-07T14:11:48.298586Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 212.000000s, at schemeshard: 72075186224037889 2025-04-07T14:11:48.298717Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 51 2025-04-07T14:11:48.412527Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:11:48.412642Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeDeadline::Complete. Send TEvAnalyzeResponse for deleted operation, OperationId=operationId, ActorId=[1:2651:3165] 2025-04-07T14:11:48.412716Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:11:48.412854Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:11:48.413258Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7457:5481], server id = [2:7458:5482], tablet id = 72075186224037899, status = OK 2025-04-07T14:11:48.413369Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7457:5481], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:11:48.414732Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:11:48.414817Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:11:48.414957Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:11:48.415110Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:11:48.415340Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:11:48.417359Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7457:5481], server id = [2:7458:5482], tablet id = 72075186224037899 2025-04-07T14:11:48.417393Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:11:48.417911Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:11:48.439493Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7489:5501]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:11:48.439637Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:11:48.439673Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7489:5501], StatRequests.size() = 1 >> KqpScheme::AlterColumnTableTiering [GOOD] >> KqpScheme::AlterAsyncReplication >> KqpScheme::InvalidationAfterDropCreate >> KqpScheme::CreateTableWithVectorIndexCovered >> KqpScheme::InvalidationAfterDropCreateTable2NoEffects >> KqpConstraints::DropCreateSerial >> KqpScheme::UseUnauthorizedTable >> KqpOlapScheme::DropColumnAndResetTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreatedAt [GOOD] Test command err: Trying to start YDB, gRPC: 25104, MsgBus: 63272 2025-04-07T14:11:29.809761Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573670485771267:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:29.809818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001459/r3tmp/tmpNiJfi3/pdisk_1.dat 2025-04-07T14:11:30.010714Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25104, node 1 2025-04-07T14:11:30.078366Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:30.078399Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:30.078410Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:30.078513Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:30.170703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:30.170809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:30.172921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63272 TClient is connected to server localhost:63272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:30.482998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.505547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.612259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.733523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.784353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:31.796232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573679075707549:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:31.796327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:31.972521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:31.990124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:32.006730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:32.022570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:32.038292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:32.057523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:32.090074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573683370675339:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.090122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.090123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573683370675344:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.092189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:32.097827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573683370675346:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 11581, MsgBus: 19376 2025-04-07T14:11:33.274408Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573688594879686:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:33.274456Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001459/r3tmp/tmpDLNTPo/pdisk_1.dat 2025-04-07T14:11:33.325390Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11581, node 2 2025-04-07T14:11:33.370416Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:33.370435Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:33.370442Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:33.370504Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:33.374501Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:33.374551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:33.375812Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19376 TClient is connected to server localhost:19376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:33.627638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:33.635685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:33.699835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:33.795692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:33.830400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:35.113131Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573697184815990:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:35.113188Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:35.124690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:35.143173Z node 2 :FLAT_TX_SCHEMESH ... bDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:11:41.192778Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:41.266510Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:41.402798Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:41.476623Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.766064Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573732945404661:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:43.766126Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:43.809242Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:43.840381Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:43.862817Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:43.889188Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:43.914644Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:43.946856Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:44.039188Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573737240372452:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:44.039272Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:44.039278Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573737240372457:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:44.042309Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:44.050583Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573737240372459:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:44.867434Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2795, MsgBus: 2999 2025-04-07T14:11:45.543424Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573740871355468:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:45.543505Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001459/r3tmp/tmp1TAkbA/pdisk_1.dat 2025-04-07T14:11:45.609886Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2795, node 5 2025-04-07T14:11:45.645231Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:45.645341Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:45.646897Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:45.661819Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:45.661847Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:45.661859Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:45.661986Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2999 TClient is connected to server localhost:2999 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:46.082688Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.099921Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.143753Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.274499Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.328483Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:48.570011Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573753756259081:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:48.570075Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:48.608540Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:48.637635Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:48.703915Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:48.729113Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:48.754922Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:48.790483Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:48.829032Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573753756259579:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:48.829085Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:48.829143Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573753756259584:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:48.831672Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:48.839309Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573753756259586:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:49.700838Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 >> KqpScheme::CreateTableWithPartitionAtKeysSigned [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysComplex >> KqpOlapTypes::AttributeNegative [GOOD] >> KqpOlapTypes::JsonImport >> KqpScheme::FamilyColumnTest >> KqpScheme::CreateDropTableMultipleTime >> KqpScheme::AlterTableReplaceIndex [GOOD] >> KqpScheme::AlterTableWithDecimalColumn >> KqpOlapScheme::AddExsitsColumnFamily [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::DropColumnAndResetTtl [GOOD] Test command err: Trying to start YDB, gRPC: 10300, MsgBus: 4031 2025-04-07T14:11:26.159477Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573659587701904:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.159547Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014b7/r3tmp/tmpz6HmSH/pdisk_1.dat 2025-04-07T14:11:26.439967Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10300, node 1 2025-04-07T14:11:26.502281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.502399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.503418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.589544Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.589578Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.589584Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.589669Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4031 TClient is connected to server localhost:4031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.115805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.127189Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16); 2025-04-07T14:11:28.309334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668177636905:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.309402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.585646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.667801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490573668177637097:2307];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:11:28.667863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490573668177637114:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:11:28.667989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490573668177637114:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:11:28.667989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490573668177637097:2307];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:11:28.668215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490573668177637097:2307];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:28.668227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490573668177637114:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:28.668336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490573668177637097:2307];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:28.668338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490573668177637114:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:28.668459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490573668177637097:2307];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:28.668460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490573668177637114:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:28.668575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490573668177637114:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:11:28.668576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490573668177637097:2307];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:11:28.668741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490573668177637097:2307];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:11:28.668741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490573668177637114:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:11:28.668842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490573668177637114:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:11:28.668853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490573668177637097:2307];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:11:28.668958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490573668177637097:2307];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:11:28.668961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490573668177637114:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:11:28.669089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490573668177637114:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:11:28.669090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490573668177637097:2307];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:11:28.669183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490573668177637097:2307];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:11:28.669193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490573668177637114:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:11:28.669313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490573668177637114:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:11:28.669337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490573668177637097:2307];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:11:28.695126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490573668177637099:2309];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:11:28.695207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490573668177637099:2309];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:11:28.695402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490573668177637099:2309];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:28.695520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490573668177637099:2309];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:28.695596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490573668177637099:2309];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:28.695655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751 ... PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:48.439351Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, timestamp Timestamp NOT NULL, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:11:50.653255Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573760875079912:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:50.653358Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:50.671460Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:50.718732Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573760875079958:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:11:50.718796Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573760875079958:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:11:50.719022Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573760875079958:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:50.719116Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573760875079958:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:50.719197Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573760875079958:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:50.719283Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573760875079958:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:11:50.719372Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573760875079958:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:11:50.719457Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573760875079958:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:11:50.719576Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573760875079958:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:11:50.719677Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573760875079958:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:11:50.719755Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573760875079958:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:11:50.719834Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573760875079958:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:11:50.722363Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:11:50.722414Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:11:50.722511Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:11:50.722546Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:11:50.722701Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:11:50.722736Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:11:50.722825Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:11:50.722874Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:11:50.722959Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:11:50.722996Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:11:50.723038Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:11:50.723068Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:11:50.723452Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:50.723511Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:50.723700Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:50.723742Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:50.723865Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:50.723897Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:50.724062Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:50.724099Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:50.724210Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:11:50.724240Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:11:50.766998Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573760875080028:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:50.767096Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:50.768525Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:11:50.791469Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573760875080055:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:50.791549Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:50.805827Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:11:50.825315Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573760875080089:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:50.825410Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:50.839538Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 >> KqpConstraints::SerialTypeSmallSerial >> KqpScheme::TouchIndexAfterMoveIndexReadReplace [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexWriteReplace >> KqpOlapScheme::AlterCompressionType [GOOD] >> KqpOlapScheme::CreateTableStoreWithFamily ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::AddExsitsColumnFamily [GOOD] Test command err: Trying to start YDB, gRPC: 21071, MsgBus: 21453 2025-04-07T14:11:26.140005Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573661359289623:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.142151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014a9/r3tmp/tmpRSiUaJ/pdisk_1.dat 2025-04-07T14:11:26.498972Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21071, node 1 2025-04-07T14:11:26.517988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.518836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.520171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.589552Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.589579Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.589588Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.589682Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21453 TClient is connected to server localhost:21453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.141098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.157482Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:11:28.565911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573669949224712:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.566001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.758543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.805897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573669949224788:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:11:28.806086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573669949224788:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:11:28.806315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573669949224788:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:28.806421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573669949224788:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:28.806544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573669949224788:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:28.806642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573669949224788:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:11:28.806749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573669949224788:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:11:28.806853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573669949224788:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:11:28.806976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573669949224788:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:11:28.807082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573669949224788:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:11:28.807187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573669949224788:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:11:28.807297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573669949224788:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:11:28.809867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:11:28.809938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:11:28.810077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:11:28.810109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:11:28.810291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:11:28.810325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:11:28.810434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:11:28.810493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:11:28.810566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:11:28.810593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:11:28.810662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:11:28.810693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:11:28.811159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:28.811238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:28.811454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:28.811502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:28.811627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:28.811660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:28.811852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:28.811898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:28.812025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished ... ode 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:48.965139Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:48.965242Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14610 TClient is connected to server localhost:14610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:49.390368Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/TableWithFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family1, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="lz4"), FAMILY family2 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:11:51.676189Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573766580891841:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:51.676313Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:51.696929Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:51.783531Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573766580891887:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:11:51.783646Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573766580891887:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:11:51.783965Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573766580891887:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:51.784120Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573766580891887:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:51.784268Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573766580891887:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:51.784400Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573766580891887:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:11:51.784501Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573766580891887:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:11:51.784599Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573766580891887:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:11:51.784803Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573766580891887:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:11:51.784987Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573766580891887:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:11:51.785196Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573766580891887:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:11:51.785365Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573766580891887:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:11:51.791868Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:11:51.791945Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:11:51.792080Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:11:51.792124Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:11:51.792377Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:11:51.792424Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:11:51.792558Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:11:51.792625Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:11:51.792734Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:11:51.792772Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:11:51.792838Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:11:51.792875Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:11:51.793441Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:51.793502Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:51.793769Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:51.793819Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:51.793998Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:51.794040Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:51.794235Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:51.794273Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:51.794410Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:11:51.794454Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:11:51.831145Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573766580891959:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:51.831253Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:51.845636Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573766580891964:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:51.845760Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpScheme::CreateDroppedTable >> KqpOlapScheme::DropThenAddColumn [GOOD] >> KqpOlapScheme::DropThenAddColumnIndexation >> KqpAcl::FailedReadAccessDenied [GOOD] >> KqpAcl::FailedWriteAccessDenied >> KqpScheme::DisableResourcePools [GOOD] >> KqpScheme::DisableResourcePoolsOnServerless >> KqpScheme::DropChangefeedNegative [GOOD] >> KqpScheme::DropExternalDataSource >> KqpScheme::InvalidationAfterDropCreate [GOOD] >> KqpScheme::InvalidationAfterDropCreateCompatSchema >> KqpScheme::CreateTableWithVectorIndexCovered [GOOD] >> KqpScheme::CreateTableWithVectorIndexCaseIncentive >> KqpScheme::DisableExternalDataSourcesOnServerless >> KqpScheme::AlterTableAddExplicitSyncIndex [GOOD] >> KqpScheme::AlterTableAddExplicitAsyncIndex >> KqpScheme::UseUnauthorizedTable [GOOD] >> KqpScheme::UseNonexistentTable >> KqpConstraints::DropCreateSerial [GOOD] >> KqpConstraints::DefaultsAndDeleteAndUpdate >> KqpScheme::InvalidationAfterDropCreateTable2NoEffects [GOOD] >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTxNoEffects >> KqpScheme::FamilyColumnTest [GOOD] >> KqpScheme::Int8Int16 >> KqpOlapTypes::JsonImport [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysComplex [GOOD] >> KqpScheme::CreateTableWithFamiliesRegular >> KqpOlapScheme::CreateTableStoreWithFamily [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpOlapTypes::JsonImport [GOOD] Test command err: Trying to start YDB, gRPC: 25578, MsgBus: 2118 2025-04-07T14:11:26.223312Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573657801229252:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.223382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014aa/r3tmp/tmpKVnfqN/pdisk_1.dat 2025-04-07T14:11:26.562262Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:26.577428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.577558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.579156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25578, node 1 2025-04-07T14:11:26.610467Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.610499Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.610509Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.610606Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2118 TClient is connected to server localhost:2118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.058756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/TableWithFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family1, PRIMARY KEY (Key), FAMILY family1 (COMPRESSION="off")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:11:28.276968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573666391164260:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.277073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.585018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.639050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573666391164336:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:11:28.639282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573666391164336:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:11:28.639583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573666391164336:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:28.639715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573666391164336:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:28.639813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573666391164336:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:28.639924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573666391164336:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:11:28.640043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573666391164336:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:11:28.640152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573666391164336:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:11:28.640262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573666391164336:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:11:28.640365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573666391164336:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:11:28.640467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573666391164336:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:11:28.640570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573666391164336:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:11:28.644954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:11:28.645019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:11:28.645120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:11:28.645154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:11:28.645359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:11:28.645399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:11:28.645520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:11:28.645560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:11:28.645625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:11:28.645654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:11:28.645706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:11:28.645741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:11:28.646265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:28.646311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:28.646516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:28.646556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:28.646676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:28.646706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:28.646977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:28.647013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:28.647155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:11:28.647219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fl ... ASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:52.036072Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:52.036078Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:52.036179Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25206 TClient is connected to server localhost:25206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:52.563890Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int64 NOT NULL, json Json, json_doc JsonDocument, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:11:55.323876Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573784291703204:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:55.323962Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:55.344612Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.397529Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573784291703251:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:11:55.397610Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573784291703251:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:11:55.397892Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573784291703251:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:55.398038Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573784291703251:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:55.398164Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573784291703251:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:55.398282Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573784291703251:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:11:55.398398Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573784291703251:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:11:55.398514Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573784291703251:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:11:55.398639Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573784291703251:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:11:55.398754Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573784291703251:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:11:55.398874Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573784291703251:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:11:55.398983Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573784291703251:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:11:55.402168Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:11:55.402238Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:11:55.402353Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:11:55.402399Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:11:55.402574Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:11:55.402604Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:11:55.402685Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:11:55.402723Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:11:55.402788Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:11:55.402808Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:11:55.402852Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:11:55.402873Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:11:55.403264Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:55.403325Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:55.403538Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:55.403593Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:55.403821Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:55.403871Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:55.404193Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:55.404238Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:55.404424Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:11:55.404470Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=512;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=400;columns=3; Bulk upsert to table '/Root/ColumnTableTest'bad batch in data: Invalid UTF8 sequence at string index 0; order:id, json, json_doc >> KqpConstraints::AlterTableAddNotNullWithDefault [GOOD] >> KqpConstraints::IndexedTableAndNotNullColumnAddNotNullColumn [GOOD] >> KqpConstraints::SerialTypeSmallSerial [GOOD] >> KqpConstraints::SerialTypeSerial4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::CreateTableStoreWithFamily [GOOD] Test command err: Trying to start YDB, gRPC: 14052, MsgBus: 19797 2025-04-07T14:11:38.362132Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573709156116029:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:38.362336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001454/r3tmp/tmpywtXX5/pdisk_1.dat 2025-04-07T14:11:38.620486Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14052, node 1 2025-04-07T14:11:38.629580Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:11:38.667275Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:38.667294Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:38.667313Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:38.667416Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:38.725001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:38.725170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:38.726683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19797 TClient is connected to server localhost:19797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:39.060340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:39.070326Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (created_at, id_second)) PARTITION BY HASH(created_at) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1, TTL = Interval("PT1H") ON created_at); 2025-04-07T14:11:40.377496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573717746051112:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:40.377612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:40.542646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:40.582090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573717746051188:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:11:40.582232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573717746051188:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:11:40.582387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573717746051188:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:40.582464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573717746051188:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:40.582590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573717746051188:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:40.582691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573717746051188:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:11:40.582778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573717746051188:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:11:40.582891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573717746051188:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:11:40.582994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573717746051188:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:11:40.583088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573717746051188:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:11:40.583184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573717746051188:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:11:40.583308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573717746051188:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:11:40.585894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:11:40.585971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:11:40.586095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:11:40.586127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:11:40.586315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:11:40.586343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:11:40.586456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:11:40.586495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:11:40.586572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:11:40.586611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:11:40.586664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:11:40.586717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:11:40.587122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:40.587223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:40.587427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:40.587464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:40.587619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:40.587656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:40.587828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:40.587861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:40.5879 ... er_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:52.417615Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573772018537091:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:52.417739Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573772018537091:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:52.417874Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573772018537091:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:11:52.417987Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573772018537091:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:11:52.418102Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573772018537091:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:11:52.418219Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573772018537091:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:11:52.418329Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573772018537091:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:11:52.418440Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573772018537091:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:11:52.418551Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573772018537091:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:11:52.421816Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:11:52.421878Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:11:52.421976Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:11:52.422012Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:11:52.422183Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:11:52.422219Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:11:52.422305Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:11:52.422337Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:11:52.422400Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:11:52.422429Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:11:52.422475Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:11:52.422502Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:11:52.422846Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:52.422878Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:52.423063Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:52.423089Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:52.423229Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:52.423254Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:52.423438Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:52.423466Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:52.423569Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:11:52.423596Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:11:52.515778Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573772018537161:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:52.515852Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:52.531137Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5196, MsgBus: 64722 2025-04-07T14:11:53.117023Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490573775270008727:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:53.117063Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001454/r3tmp/tmpwUOPkE/pdisk_1.dat 2025-04-07T14:11:53.209974Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:53.218008Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:53.218102Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:53.219700Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5196, node 6 2025-04-07T14:11:53.258803Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:53.258828Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:53.258834Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:53.258928Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64722 TClient is connected to server localhost:64722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:53.736901Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLESTORE `/Root/TableStoreWithColumnFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family2, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="zstd", COMPRESSION_LEVEL=1), FAMILY family2 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:11:56.325241Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573788154911234:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:56.325340Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpScheme::AlterTableWithDecimalColumn [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexWriteReplace [GOOD] >> KqpScheme::TouchIndexAfterMoveTableRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::AlterTableAddNotNullWithDefault [GOOD] Test command err: Trying to start YDB, gRPC: 28684, MsgBus: 16526 2025-04-07T14:11:26.154906Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573661049117999:2239];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.154981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00147e/r3tmp/tmptQnZ20/pdisk_1.dat 2025-04-07T14:11:26.501911Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:26.518332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.518484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28684, node 1 2025-04-07T14:11:26.522852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.590011Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.590035Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.590040Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.590109Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16526 TClient is connected to server localhost:16526 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.094390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.118624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.225692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.329063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.376886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:28.340044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573669639054112:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.340141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.583598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.600005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.618182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.635610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.655130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.718168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.749653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573669639054609:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.749727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.749895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573669639054614:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.752778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:28.760376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573669639054616:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 4721, MsgBus: 23046 2025-04-07T14:11:30.065713Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573675801912290:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:30.065798Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00147e/r3tmp/tmp5qh4sJ/pdisk_1.dat 2025-04-07T14:11:30.151696Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:30.169624Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:30.169698Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4721, node 2 2025-04-07T14:11:30.170935Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:30.197739Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:30.197767Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:30.197774Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:30.197882Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23046 TClient is connected to server localhost:23046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:30.520251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.526170Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:11:30.534622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.607428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.735718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.787633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.173142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573684391848583:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.173215Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.211697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo uns ... ut propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:45.507311Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573739485979174:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:45.507397Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:45.558436Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.583560Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.607934Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.633492Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.658525Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.686476Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:45.720244Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573739485979664:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:45.720321Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:45.720423Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573739485979669:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:45.723512Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:45.731519Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573739485979671:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:46.893488Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10115, MsgBus: 65175 2025-04-07T14:11:47.575362Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490573749874271671:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:47.575466Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00147e/r3tmp/tmpI68ybR/pdisk_1.dat 2025-04-07T14:11:47.708188Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:47.712299Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:47.712391Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:47.714474Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10115, node 6 2025-04-07T14:11:47.755091Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:47.755119Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:47.755129Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:47.755248Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65175 TClient is connected to server localhost:65175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:48.170883Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:48.187546Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:48.246761Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:48.419417Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:48.486916Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:50.655506Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573762759175276:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:50.655603Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:50.685358Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:50.714168Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:50.744166Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:50.771434Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:50.802022Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:50.871103Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:50.909891Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573762759175776:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:50.909969Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:50.909970Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573762759175781:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:50.912937Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:50.921606Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490573762759175783:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:52.060302Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:52.319816Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715757:0, at schemeshard: 72057594046644480 2025-04-07T14:11:52.352185Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715760:0, at schemeshard: 72057594046644480 2025-04-07T14:11:52.575577Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490573749874271671:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:52.575677Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpScheme::CreateDroppedTable [GOOD] >> KqpScheme::CreateExternalDataSource >> KqpScheme::DropExternalDataSource [GOOD] >> KqpScheme::DropDependentExternalDataSource >> KqpScheme::InvalidationAfterDropCreateCompatSchema [GOOD] >> KqpScheme::InvalidationAfterDropCreateTable2 >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] >> KqpAcl::FailedWriteAccessDenied [GOOD] >> KqpAcl::AclForOltpAndOlap+isOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::IndexedTableAndNotNullColumnAddNotNullColumn [GOOD] Test command err: Trying to start YDB, gRPC: 65436, MsgBus: 20202 2025-04-07T14:11:26.146186Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573659174747315:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.147460Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001488/r3tmp/tmpXBD2gf/pdisk_1.dat 2025-04-07T14:11:26.480372Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65436, node 1 2025-04-07T14:11:26.517337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.517424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.525155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.592368Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.592420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.592434Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.592509Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20202 TClient is connected to server localhost:20202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.112474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.134308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.232558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.343184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.414029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:28.407211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573667764683599:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.407310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.583877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.601319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.619937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.642440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.662578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.724520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.793500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573667764684102:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.793567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573667764684107:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.793574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.796277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:28.802855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573667764684109:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 11162, MsgBus: 19659 2025-04-07T14:11:29.918334Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573671986526859:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:29.918418Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001488/r3tmp/tmp1gggQe/pdisk_1.dat 2025-04-07T14:11:29.986975Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11162, node 2 2025-04-07T14:11:30.019914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:30.019974Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:30.021098Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:30.036528Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:30.036548Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:30.036554Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:30.036650Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19659 TClient is connected to server localhost:19659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:30.339005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.346190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.420263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:11:30.554603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:11:30.621456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.194966Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573684871430447:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.195031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.225018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:32.248804Z node 2 :FLAT_TX_SCHEMESH ... 14:11:48.693547Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490573755837243401:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:48.693676Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001488/r3tmp/tmpgEslh1/pdisk_1.dat 2025-04-07T14:11:48.787781Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:48.804303Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:48.804412Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 23546, node 6 2025-04-07T14:11:48.805995Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:48.845521Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:48.845557Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:48.845566Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:48.845672Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31087 TClient is connected to server localhost:31087 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:49.257234Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:49.275326Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:49.346590Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:49.479094Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:49.546372Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.730227Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573768722147004:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:51.730322Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:51.775462Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:51.811205Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:51.841855Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:51.909981Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:51.935667Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:51.968921Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:52.018760Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573773017114799:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:52.018855Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:52.018870Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573773017114804:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:52.022262Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:52.031115Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490573773017114806:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:53.019555Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.693992Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490573755837243401:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:53.694082Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:11:54.520504Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710757:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.552114Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.731282Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710762:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.761086Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710765:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.954577Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710767:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.983544Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710770:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.208759Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710772:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.237798Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710775:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.429001Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710777:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.462961Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710780:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.679881Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710782:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.712862Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710785:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.925477Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710787:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.964168Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710790:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.195205Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710792:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.224028Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710795:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.453358Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710797:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.483650Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710800:0, at schemeshard: 72057594046644480 >> KqpScheme::AlterTableWithPgColumn >> KqpScheme::CreateTableWithVectorIndexCaseIncentive [GOOD] >> KqpScheme::CreateTableWithVectorIndexNoFeatureFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterTableWithDecimalColumn [GOOD] Test command err: Trying to start YDB, gRPC: 2244, MsgBus: 16283 2025-04-07T14:11:30.554315Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573678624108397:2250];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:30.554358Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001455/r3tmp/tmpy2RlOq/pdisk_1.dat 2025-04-07T14:11:30.827409Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2244, node 1 2025-04-07T14:11:30.899379Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:30.899408Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:30.899415Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:30.899563Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:30.929406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:30.929538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:30.931144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16283 TClient is connected to server localhost:16283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:31.291953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:31.306138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:31.414029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:31.518795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:31.588620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.896084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573687214044509:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.896708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:33.128491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:33.148773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:33.167846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:33.189681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:33.221725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:33.243097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:33.272727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573691509012299:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:33.272795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:33.272865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573691509012304:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:33.275225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:33.281604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573691509012306:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:33.907685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27384, MsgBus: 4973 2025-04-07T14:11:34.645343Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573694308850623:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:34.645413Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001455/r3tmp/tmpPDtKrK/pdisk_1.dat 2025-04-07T14:11:34.707389Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27384, node 2 2025-04-07T14:11:34.746143Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:34.746205Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:34.747326Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:34.753361Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:34.753378Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:34.753382Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:34.753454Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4973 TClient is connected to server localhost:4973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:35.050130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:35.058425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:35.124861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:35.279413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:35.332236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:36.953445Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573702898786927:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:36.953559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:36.965549Z node 2 :FLAT_TX_SCHEMESHARD W ... : 72057594046644480 2025-04-07T14:11:48.741401Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:48.769656Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:48.809074Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573754432353169:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:48.809139Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:48.809274Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573754432353174:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:48.812612Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:48.820877Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573754432353176:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:50.059018Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:50.128959Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:11:50.159474Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:11:50.601459Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490573741547449074:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:50.601529Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:11:51.741169Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T14:11:51.774066Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19544, MsgBus: 22562 2025-04-07T14:11:52.580206Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573772191613787:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:52.580271Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001455/r3tmp/tmpshhWgS/pdisk_1.dat 2025-04-07T14:11:52.684505Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:52.698811Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:52.698903Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:52.700531Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19544, node 5 2025-04-07T14:11:52.746496Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:52.746528Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:52.746537Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:52.746655Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22562 TClient is connected to server localhost:22562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:53.173502Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.181824Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.235928Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.406851Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.480996Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.558766Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573785076517395:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:55.558857Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:55.596973Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.626267Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.654012Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.681500Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.711879Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.741709Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.795681Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573785076517892:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:55.795758Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:55.795763Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573785076517897:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:55.798933Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:55.807408Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573785076517899:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:56.958681Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:57.026096Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:11:57.099354Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:11:57.130350Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:11:57.158948Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:11:57.193503Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> AnalyzeColumnshard::Analyze [GOOD] >> KqpScheme::UseNonexistentTable [GOOD] >> KqpScheme::UseDroppedTable >> KqpScheme::DisableS3ExternalDataSource >> KqpScheme::ResourcePoolClassifiersRankValidation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] Test command err: 2025-04-07T14:09:26.602184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:26.602582Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:26.602750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001400/r3tmp/tmpcO6QHH/pdisk_1.dat 2025-04-07T14:09:26.985739Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22986, node 1 2025-04-07T14:09:27.324346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:27.324416Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:27.324450Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:27.325211Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:27.327365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:27.444946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:27.445121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:27.464142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3391 2025-04-07T14:09:28.174636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:31.460868Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:31.500739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:31.500859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:31.548204Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:31.552430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:31.801650Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:31.881970Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:31.882058Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:31.925511Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:31.925682Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:31.925873Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:31.925928Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:31.925991Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:31.926043Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:31.926097Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:31.926152Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:31.926529Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:32.152373Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.152477Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.159686Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:32.167877Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:32.168824Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:32.169962Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:32.196765Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:32.196828Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:32.196901Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:32.199022Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.199114Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.213404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:32.230187Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:32.230353Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:32.243978Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:32.260578Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:32.313599Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:32.630727Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:32.798656Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:33.730902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:33.731059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:33.809917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:34.024725Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:34.024948Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:34.025239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:34.025359Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:34.025500Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:34.025606Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:34.025702Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:34.025809Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:34.025920Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:34.026030Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:34.026146Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:34.026271Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:34.125779Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:34.125862Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:34.126110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:34.126220Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:34.126335Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:34.126457Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxI ... 369410Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:11:57.369523Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:11:57.370326Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:11:57.383618Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:11:57.383797Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:11:57.384557Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8169:6083], server id = [2:8174:6088], tablet id = 72075186224037899, status = OK 2025-04-07T14:11:57.384933Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8169:6083], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:11:57.386147Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8170:6084], server id = [2:8175:6089], tablet id = 72075186224037900, status = OK 2025-04-07T14:11:57.386239Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8170:6084], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:11:57.386458Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8171:6085], server id = [2:8176:6090], tablet id = 72075186224037901, status = OK 2025-04-07T14:11:57.386521Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8171:6085], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:11:57.386638Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:11:57.387433Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8172:6086], server id = [2:8177:6091], tablet id = 72075186224037902, status = OK 2025-04-07T14:11:57.387499Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8172:6086], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:11:57.387921Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8173:6087], server id = [2:8178:6092], tablet id = 72075186224037903, status = OK 2025-04-07T14:11:57.387991Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8173:6087], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:11:57.388860Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T14:11:57.389121Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8169:6083], server id = [2:8174:6088], tablet id = 72075186224037899 2025-04-07T14:11:57.389156Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:11:57.389333Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-07T14:11:57.389506Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-07T14:11:57.389774Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-07T14:11:57.390073Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8170:6084], server id = [2:8175:6089], tablet id = 72075186224037900 2025-04-07T14:11:57.390098Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:11:57.390283Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8179:6093], server id = [2:8180:6094], tablet id = 72075186224037904, status = OK 2025-04-07T14:11:57.390347Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8179:6093], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:11:57.390584Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8171:6085], server id = [2:8176:6090], tablet id = 72075186224037901 2025-04-07T14:11:57.390610Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:11:57.391041Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8172:6086], server id = [2:8177:6091], tablet id = 72075186224037902 2025-04-07T14:11:57.391068Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:11:57.391163Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8173:6087], server id = [2:8178:6092], tablet id = 72075186224037903 2025-04-07T14:11:57.391185Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:11:57.391245Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8181:6095], server id = [2:8184:6098], tablet id = 72075186224037905, status = OK 2025-04-07T14:11:57.391322Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8181:6095], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:11:57.391820Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8182:6096], server id = [2:8186:6100], tablet id = 72075186224037906, status = OK 2025-04-07T14:11:57.391888Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8182:6096], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:11:57.391991Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8183:6097], server id = [2:8187:6101], tablet id = 72075186224037907, status = OK 2025-04-07T14:11:57.392047Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8183:6097], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:11:57.392174Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-07T14:11:57.393059Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8185:6099], server id = [2:8188:6102], tablet id = 72075186224037908, status = OK 2025-04-07T14:11:57.393137Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8185:6099], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:11:57.393283Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T14:11:57.393355Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T14:11:57.393896Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8179:6093], server id = [2:8180:6094], tablet id = 72075186224037904 2025-04-07T14:11:57.393924Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:11:57.394100Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T14:11:57.394178Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T14:11:57.394216Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:11:57.394391Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:11:57.394583Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:11:57.394976Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8181:6095], server id = [2:8184:6098], tablet id = 72075186224037905 2025-04-07T14:11:57.395001Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:11:57.395103Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8183:6097], server id = [2:8187:6101], tablet id = 72075186224037907 2025-04-07T14:11:57.395125Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:11:57.395219Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8182:6096], server id = [2:8186:6100], tablet id = 72075186224037906 2025-04-07T14:11:57.395243Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:11:57.395342Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:11:57.395592Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8185:6099], server id = [2:8188:6102], tablet id = 72075186224037908 2025-04-07T14:11:57.395619Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:11:57.408833Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:11:57.409000Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T14:11:57.409525Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8191:6105], server id = [2:8192:6106], tablet id = 72075186224037900, status = OK 2025-04-07T14:11:57.409618Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8191:6105], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:11:57.410235Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T14:11:57.410311Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:11:57.410429Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:11:57.410567Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:11:57.410880Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:11:57.412600Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8191:6105], server id = [2:8192:6106], tablet id = 72075186224037900 2025-04-07T14:11:57.412632Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:11:57.413181Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:11:57.433144Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8209:6123]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:11:57.433364Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:11:57.433404Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8209:6123], StatRequests.size() = 1 2025-04-07T14:11:57.550832Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDVlNWIzYjUtOTk3NzAyZjEtNDlmZGFmNjgtYWJlMzk5MTM=, TxId: 2025-04-07T14:11:57.550889Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDVlNWIzYjUtOTk3NzAyZjEtNDlmZGFmNjgtYWJlMzk5MTM=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-07T14:11:57.551520Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8218:6129]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:11:57.551709Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:11:57.552122Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:11:57.552170Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:11:57.555033Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:11:57.555092Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:11:57.555139Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:11:57.560057Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 probe = 3 >> KqpScheme::MoveTableWithSerialTypes >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTxNoEffects [GOOD] >> KqpScheme::ModifyPermissions >> KqpScheme::CreateTableWithTtlSettingsUncompat >> KqpScheme::Int8Int16 [GOOD] >> KqpScheme::DropResourcePool >> KqpScheme::AlterResourcePoolClassifier [GOOD] >> KqpScheme::AlterNonExistingResourcePoolClassifier ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::Analyze [GOOD] Test command err: 2025-04-07T14:09:29.957384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:29.957762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:29.957936Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013c6/r3tmp/tmpDtDvLN/pdisk_1.dat 2025-04-07T14:09:30.510417Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19779, node 1 2025-04-07T14:09:30.758290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:30.758341Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:30.758373Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:30.758935Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:30.761343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:30.867081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:30.867229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:30.885193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20310 2025-04-07T14:09:31.631069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:34.859793Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:34.904955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:34.905058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:34.949274Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:34.953636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:35.177454Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:35.230162Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:35.230264Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:35.258713Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:35.258933Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:35.259174Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:35.259238Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:35.259292Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:35.259358Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:35.259416Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:35.259490Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:35.259928Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:35.465312Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:35.465414Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:35.472559Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:35.488525Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:35.489404Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:35.490249Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:35.514792Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:35.514847Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:35.514912Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:35.516880Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:35.516973Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:35.531416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:35.546535Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:35.546662Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:35.558923Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:35.575215Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:35.623895Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:35.923813Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:36.109280Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:36.959294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:36.959454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:36.976595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:37.078523Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:37.078764Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:37.079020Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:37.079140Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:37.079269Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:37.079424Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:37.079561Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:37.079669Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:37.079777Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:37.079878Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:37.079992Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:37.080147Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:37.106812Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:37.106904Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:37.107027Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:37.107087Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:37.107278Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:37.107325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:37.107414Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema ... nt = 0, need schemeshards count = 1 2025-04-07T14:11:54.183023Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-07T14:11:54.183113Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7211:5384], server id = [2:7212:5385], tablet id = 72075186224037897, status = OK 2025-04-07T14:11:54.183170Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:11:54.183224Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7209:5382], StatRequests.size() = 1 2025-04-07T14:11:54.286772Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGI4MzY3NjEtOWJiYmNmMzYtNjdjZWVlNDMtODczNTM2ZGU=, TxId: 2025-04-07T14:11:54.286821Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGI4MzY3NjEtOWJiYmNmMzYtNjdjZWVlNDMtODczNTM2ZGU=, TxId: 2025-04-07T14:11:54.287157Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:11:54.299810Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:11:54.299866Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:11:54.375270Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:11:54.375362Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:11:54.429031Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7211:5384], schemeshard count = 1 2025-04-07T14:11:55.192298Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:11:55.192402Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:11:55.195470Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:11:55.211614Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:11:55.212089Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:11:55.212153Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2025-04-07T14:11:55.236936Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:11:56.223803Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:11:56.223853Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:11:56.223877Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T14:11:56.223904Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:11:56.223954Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:11:56.224474Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:11:56.237821Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-07T14:11:56.237951Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:11:56.238361Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:11:56.238423Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:11:56.239448Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2025-04-07T14:11:56.239555Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-07T14:11:56.239770Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:11:56.253093Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2025-04-07T14:11:56.253185Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:11:56.253358Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:11:56.253828Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7346:5461], server id = [2:7347:5462], tablet id = 72075186224037899, status = OK 2025-04-07T14:11:56.253948Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7346:5461], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:11:56.254917Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:11:56.255027Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:11:56.255318Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:11:56.255509Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:11:56.255731Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7346:5461], server id = [2:7347:5462], tablet id = 72075186224037899 2025-04-07T14:11:56.255767Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:11:56.255951Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:11:56.258358Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:11:56.277585Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7364:5479]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:11:56.277786Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:11:56.277823Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7364:5479], StatRequests.size() = 1 2025-04-07T14:11:56.397097Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGZkYjMwYTUtOTFmODVmZjYtZTQ5NWFiOWEtZmRhYzc4Nw==, TxId: 2025-04-07T14:11:56.397169Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGZkYjMwYTUtOTFmODVmZjYtZTQ5NWFiOWEtZmRhYzc4Nw==, TxId: 2025-04-07T14:11:56.397585Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:11:56.410806Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:11:56.410859Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:11:56.926806Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T14:11:56.926904Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:11:57.409525Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:11:57.409610Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:11:57.409659Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:11:58.432540Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:11:58.432706Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:11:58.454208Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:11:58.454329Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:11:58.454361Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:11:58.454967Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:11:58.467871Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:11:58.468191Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:11:58.468243Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:11:58.468502Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:11:58.481673Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:11:58.481843Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T14:11:58.482353Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7457:5531], server id = [2:7458:5532], tablet id = 72075186224037899, status = OK 2025-04-07T14:11:58.482450Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7457:5531], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:11:58.482961Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:11:58.483027Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:11:58.483203Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:11:58.483341Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:11:58.483502Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:11:58.485212Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7457:5531], server id = [2:7458:5532], tablet id = 72075186224037899 2025-04-07T14:11:58.485244Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:11:58.485611Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:11:58.505082Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjI3MjgwZGQtNTY0MDdkOTAtZTVhMTIwN2MtZjQ0NDBmZTQ=, TxId: 2025-04-07T14:11:58.505137Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjI3MjgwZGQtNTY0MDdkOTAtZTVhMTIwN2MtZjQ0NDBmZTQ=, TxId: 2025-04-07T14:11:58.505511Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:11:58.518674Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:11:58.518735Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2655:3163] >> KqpConstraints::AddNonColumnDoesnotReturnInternalError [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsUncompat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::ResourcePoolClassifiersRankValidation [GOOD] Test command err: Trying to start YDB, gRPC: 21099, MsgBus: 8263 2025-04-07T14:11:26.146469Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573659443608824:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.149255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00149d/r3tmp/tmpL7i0pE/pdisk_1.dat 2025-04-07T14:11:26.519747Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:26.526571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.526652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.527999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21099, node 1 2025-04-07T14:11:26.593121Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.593145Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.593151Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.593227Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8263 TClient is connected to server localhost:8263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.110286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.119979Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:11:27.135371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.256737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.386902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.461892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:28.341595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668033545022:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.341678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.583890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.602830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.618894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.637663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.669990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.694144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.723583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668033545522:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.723633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.723696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668033545527:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.727906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:28.734898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573668033545529:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:30.544697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710772:0, at schemeshard: 72057594046644480 2025-04-07T14:11:30.576219Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490573676623481854:2767], TxId: 281474976710770, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDgzOTVlY2YtMjBmNmU4NzQtNGM2Nzg2MC0xMmU2ODJmMA==. CustomerSuppliedId : . TraceId : 01jr89f41nccbbpe7typ09kjaj. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037891 node# 1 state# Ready) } } 2025-04-07T14:11:30.578684Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490573676623481854:2767], TxId: 281474976710770, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDgzOTVlY2YtMjBmNmU4NzQtNGM2Nzg2MC0xMmU2ODJmMA==. CustomerSuppliedId : . TraceId : 01jr89f41nccbbpe7typ09kjaj. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037891 node# 1 state# Ready) } }. 2025-04-07T14:11:30.579161Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490573676623481856:2769], TxId: 281474976710771, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=Nzg0ZmQyZGUtYjVhZmQ1YTctNjNlNDZhOWQtNDBiMDUxZDg=. TraceId : 01jr89f419bwgv3zmfhy0jq6td. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037891 node# 1 state# Ready) } } 2025-04-07T14:11:30.579216Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490573676623481856:2769], TxId: 281474976710771, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=Nzg0ZmQyZGUtYjVhZmQ1YTctNjNlNDZhOWQtNDBiMDUxZDg=. TraceId : 01jr89f419bwgv3zmfhy0jq6td. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037891 node# 1 state# Ready) } }. 2025-04-07T14:11:30.579437Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490573676623481858:2771], TxId: 281474976710773, task: 1. Ctx: { TraceId : 01jr89f4160e2vmm8dmc9nbqra. SessionId : ydb://session/3?node_id=1&id=YzViODkxNjEtZTM1MWNiNTktOGFhMmU5ZTItN2U1OTYwNTA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037891 node# 1 state# Ready) } } 2025-04-07T14:11:30.579458Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490573676623481858:2771], TxId: 281474976710773, task: 1. Ctx: { TraceId : 01jr89f4160e2vmm8dmc9nbqra. SessionId : ydb://session/3?node_id=1&id=YzViODkxNjEtZTM1MWNiNTktOGFhMmU5ZTItN2U1OTYwNTA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037891 node# 1 state# Ready) } }. 2025-04-07T14:11:30.588140Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490573676623481860:2773], TxId: 281474976710774, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NjZlMTUxYmUtM2MyYWQxYzEtYzFkODRiYzAtYWIwZWY5NTk=. TraceId : 01jr89f4152wxnar8qyyctawfd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037892 node# 1 state# Ready) } } 2025-04-07T14:11:30.588178Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490573676623481860:2773], TxId: 281474976710774, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NjZlMTUxYmUtM2MyYWQxYzEtYzFkODRiYzAtYWIwZWY5NTk=. TraceId : 01jr89f4152wxnar8qyyctawfd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. } ... _FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:44.103478Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:44.103495Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573737451585055:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:44.106564Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:44.114578Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573737451585057:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 1375, MsgBus: 17205 2025-04-07T14:11:46.086204Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573744413499365:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:46.086294Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00149d/r3tmp/tmpwydvPY/pdisk_1.dat 2025-04-07T14:11:46.175510Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:46.207213Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:46.207340Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:46.208956Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1375, node 5 2025-04-07T14:11:46.243080Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:46.243102Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:46.243111Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:46.243219Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17205 TClient is connected to server localhost:17205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:46.660514Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.676852Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.745612Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.898290Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.955902Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:49.087387Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573757298402965:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:49.087535Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:49.129468Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.155660Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.185113Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.208967Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.234143Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.301352Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.377067Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573757298403469:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:49.377142Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:49.377287Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573757298403474:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:49.381250Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:49.389126Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573757298403476:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:50.386688Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-04-07T14:11:50.982469Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:11:51.086622Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573744413499365:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:51.086692Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:11:51.507265Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2025-04-07T14:11:52.000106Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-07T14:11:52.580601Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.043526Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.007935Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573787363176082:2769], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-04-07T14:11:56.008747Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-04-07T14:11:56.018525Z node 5 :KQP_GATEWAY WARN: [TQueryBase] [TRanksCheckerActor] TraceId: /Root, Finish with ALREADY_EXISTS, Issues: {
: Error: Classifier with rank 42 already exists, its name ClassifierRank42 }, SessionId: ydb://session/3?node_id=5&id=ZGY4MjFiNzItMmMzMGU1M2ItMmY2ZmQ4ZWEtYzBiNDVlOGE=, TxId: 01jr89fwxeay3z7wr3y8e0rstj 2025-04-07T14:11:58.281912Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573795953111114:2911], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-04-07T14:11:58.282018Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-04-07T14:11:58.506400Z node 5 :KQP_GATEWAY WARN: [TQueryBase] [TRanksCheckerActor] TraceId: /Root, Finish with ALREADY_EXISTS, Issues: {
: Error: Classifier with rank 42 already exists, its name ClassifierRank42 }, SessionId: ydb://session/3?node_id=5&id=MjVhN2VkYjEtMzBmNGY0YzktMWQyYTIxNjgtOGEzZWQ2YmQ=, TxId: 01jr89fzcjecxv5dydcmezfsn0 >> KqpConstraints::DefaultsAndDeleteAndUpdate [GOOD] >> KqpConstraints::DefaultValuesForTableNegative3 >> KqpOlapScheme::InvalidColumnInTieringRule >> KqpScheme::AlterTableAddExplicitAsyncIndex [GOOD] >> KqpScheme::AlterTableAddExplicitSyncVectorKMeansTreeIndex >> KqpScheme::CreateTableWithFamiliesRegular [GOOD] >> KqpScheme::DisableResourcePoolsOnServerless [GOOD] >> KqpScheme::DisableResourcePoolClassifiers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::AddNonColumnDoesnotReturnInternalError [GOOD] Test command err: Trying to start YDB, gRPC: 16131, MsgBus: 12780 2025-04-07T14:11:26.154421Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573660155893286:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.154505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00149e/r3tmp/tmpOZRUdp/pdisk_1.dat 2025-04-07T14:11:26.480992Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16131, node 1 2025-04-07T14:11:26.535686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.535780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.537441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.590115Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.590148Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.590154Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.590231Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12780 TClient is connected to server localhost:12780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.137575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.159609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.279345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.396995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.454017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:28.455046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668745829573:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.455125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.624335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.646545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.665213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.686898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.708704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.731309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.761361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668745830063:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.761428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.761617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668745830068:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.764613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:28.771464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573668745830070:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:29.458611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:29.573092Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr89f32863x1x1b2b0w4h6df, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Y0YjA0ZDItZjdkZjA4MGItZTA1ZThiMWMtZTQyYTdhNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Trying to start YDB, gRPC: 2560, MsgBus: 18340 2025-04-07T14:11:30.137484Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573678459739094:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:30.137527Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00149e/r3tmp/tmpY8uATR/pdisk_1.dat 2025-04-07T14:11:30.203021Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2560, node 2 2025-04-07T14:11:30.240308Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:30.240382Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:30.242041Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:30.252958Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:30.252976Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:30.252982Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:30.253058Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18340 TClient is connected to server localhost:18340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:11:30.581816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:11:30.587240Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.598559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:30.651685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.773782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.845797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.388058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:74905736870496 ... ssor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:11:50.615202Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00149e/r3tmp/tmpcN8pBK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5205, node 6 2025-04-07T14:11:51.022756Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:51.023693Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:51.023754Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:51.023797Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:51.024080Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:51.069389Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:51.069532Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:51.080888Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14063 TClient is connected to server localhost:14063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:51.409537Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.568225Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.934632Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.336388Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.665903Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.190257Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:1725:3346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.190536Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.210404Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.456032Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.722908Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.002807Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.294546Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.642955Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.936495Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:2295:3789], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.936635Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.937013Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:2300:3794], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.942371Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:55.143757Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:2302:3796], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:56.104689Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:57.332862Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710757:0, at schemeshard: 72057594046644480 2025-04-07T14:11:59.012658Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:3057:4412], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject, At function: SqlProjectItem
:2:37: Error: At function: Member
:2:37: Error: Member not found: Value3. Did you mean Value? 2025-04-07T14:11:59.014637Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NWM0ZTFiOTktODkyZjkwZjctYzczMzcxNGUtYzY1YmY2MmM=, ActorId: [6:2587:4034], ActorState: ExecuteState, TraceId: 01jr89fzwg85gktt1y9f6kww7x, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: GENERIC_ERROR:
: Error: Type annotation, code: 1030
:2:17: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject, At function: SqlProjectItem
:2:37: Error: At function: Member
:2:37: Error: Member not found: Value3. Did you mean Value? 2025-04-07T14:11:59.046473Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:3066:4421], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Column is under build operation, write operation is not allowed to column: Value3 for table: /Root/AddNonColumnDoesnotReturnInternalError, code: 2017 2025-04-07T14:11:59.048505Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NWM0ZTFiOTktODkyZjkwZjctYzczMzcxNGUtYzY1YmY2MmM=, ActorId: [6:2587:4034], ActorState: ExecuteState, TraceId: 01jr89fzxn9vd2npc7gdj2wn6g, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: BAD_REQUEST:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Column is under build operation, write operation is not allowed to column: Value3 for table: /Root/AddNonColumnDoesnotReturnInternalError, code: 2017 2025-04-07T14:11:59.078885Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:3075:4430], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:2:94: Error: At function: KiUpdateTable!
:2:94: Error: Column 'Value3' is under the build operation '/Root/AddNonColumnDoesnotReturnInternalError'., code: 2017 2025-04-07T14:11:59.081057Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NWM0ZTFiOTktODkyZjkwZjctYzczMzcxNGUtYzY1YmY2MmM=, ActorId: [6:2587:4034], ActorState: ExecuteState, TraceId: 01jr89fzyqa421pnj9cc8y6whx, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: BAD_REQUEST:
: Error: Type annotation, code: 1030
:2:94: Error: At function: KiUpdateTable!
:2:94: Error: Column 'Value3' is under the build operation '/Root/AddNonColumnDoesnotReturnInternalError'., code: 2017 2025-04-07T14:11:59.126684Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:3084:4439], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:89: Error: At tuple, At tuple, At function: KqlDeleteRows, At function: Map
:2:29: Error: At function: Filter
:2:88: Error: At function: ==
:2:82: Error: At function: Member, At function: Member, At function: Member
:2:82: Error: Member not found: Value3. Did you mean Value? 2025-04-07T14:11:59.128865Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NWM0ZTFiOTktODkyZjkwZjctYzczMzcxNGUtYzY1YmY2MmM=, ActorId: [6:2587:4034], ActorState: ExecuteState, TraceId: 01jr89fzzqbdd4e9a6xqz03w0w, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: GENERIC_ERROR:
: Error: Execution, code: 1060
:2:89: Error: At tuple, At tuple, At function: KqlDeleteRows, At function: Map
:2:29: Error: At function: Filter
:2:88: Error: At function: ==
:2:82: Error: At function: Member, At function: Member, At function: Member
:2:82: Error: Member not found: Value3. Did you mean Value? 2025-04-07T14:11:59.317766Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 >> KqpConstraints::SerialTypeSerial4 [GOOD] >> KqpConstraints::SerialTypeSerial8 >> KqpScheme::CreateResourcePoolClassifierOnServerless [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeUncompat >> KqpScheme::DisableExternalDataSourcesOnServerless [GOOD] >> KqpScheme::DisableCreateExternalDataSource >> KqpScheme::CreateAndDropUser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithFamiliesRegular [GOOD] Test command err: Trying to start YDB, gRPC: 12021, MsgBus: 32586 2025-04-07T14:11:40.179651Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573718922619604:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:40.179724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00144f/r3tmp/tmp45n3m0/pdisk_1.dat 2025-04-07T14:11:40.396222Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12021, node 1 2025-04-07T14:11:40.467512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:40.467549Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:40.467562Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:40.467724Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:40.514246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:40.514409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:40.516043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32586 TClient is connected to server localhost:32586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:40.861562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:40.883011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:40.984399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:41.102581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:41.161903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:42.326218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573727512555928:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:42.326310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:42.525139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.545496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.563482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.581592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.602534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.665710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:42.692371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573727512556423:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:42.692418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:42.692441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573727512556428:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:42.694670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:42.700226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573727512556430:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:43.416598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24166, MsgBus: 16749 2025-04-07T14:11:43.986766Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573733369160293:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:43.986846Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00144f/r3tmp/tmpNr6U9r/pdisk_1.dat 2025-04-07T14:11:44.064079Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24166, node 2 2025-04-07T14:11:44.088977Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:44.089055Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:44.090576Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:44.102774Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:44.102798Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:44.102804Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:44.102883Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16749 TClient is connected to server localhost:16749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:44.378352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.386730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.454557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.545339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.611374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.086087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573746254063880:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.086154Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.118025Z node 2 :FLAT_TX_SCHEMESH ... Id: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.180386Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.219673Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.350278Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.422474Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:54.645782Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573780806769214:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.645898Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.683784Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.718065Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.748955Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.774217Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.797431Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.835427Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.907363Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573780806769710:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.907436Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.907480Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573780806769715:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.909960Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:54.916075Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573780806769717:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:55.948283Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7839, MsgBus: 1594 2025-04-07T14:11:56.707897Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573789418440204:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:56.707997Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00144f/r3tmp/tmppM3QHX/pdisk_1.dat 2025-04-07T14:11:56.795034Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:56.819763Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:56.819846Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:56.821286Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7839, node 5 2025-04-07T14:11:56.865512Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:56.865535Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:56.865545Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:56.865634Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1594 TClient is connected to server localhost:1594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:57.304215Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:57.311537Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:57.381751Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:57.533587Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:57.585332Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:59.645445Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573802303343802:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:59.645527Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:59.685274Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:59.714914Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:59.739527Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:59.764888Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:59.795394Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:59.863814Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:59.894998Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573802303344301:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:59.895062Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:59.895088Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573802303344306:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:59.898141Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:59.905765Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573802303344308:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:00.897924Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpScheme::CreateExternalDataSource [GOOD] >> KqpScheme::CreateExternalDataSourceWithSa >> KqpScheme::InvalidationAfterDropCreateTable2 [GOOD] >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateResourcePoolClassifierOnServerless [GOOD] Test command err: Trying to start YDB, gRPC: 10825, MsgBus: 5687 2025-04-07T14:11:26.148714Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573660187695476:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.148939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014ae/r3tmp/tmpdpx2hx/pdisk_1.dat 2025-04-07T14:11:26.452494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:26.481430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.481540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.485752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10825, node 1 2025-04-07T14:11:26.593130Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.593151Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.593159Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.593246Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5687 TClient is connected to server localhost:5687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.104206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.121419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.241144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.359889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.430976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:28.333597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668777631550:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.333782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.600956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.622662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.643033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.662462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.684551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.706236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.733841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668777632052:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.733909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.733951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668777632057:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.736374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:28.742274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573668777632059:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 3622, MsgBus: 17518 2025-04-07T14:11:29.979236Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573672854351811:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:29.979333Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014ae/r3tmp/tmpYPee8x/pdisk_1.dat 2025-04-07T14:11:30.043174Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3622, node 2 2025-04-07T14:11:30.080586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:30.080698Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:30.082102Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:30.100451Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:30.100475Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:30.100482Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:30.100566Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17518 TClient is connected to server localhost:17518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:11:30.444892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:11:32.224367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573685739254319:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.224435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.236837Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573685739254331:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.236939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.249859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573685739254336:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.249909Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 11154, MsgBus: 15098 2025-04-07T14:11:32.811524Z node 3 :METADATA_PROVIDER ... DUtYTNlYjcyNjM=, ActorId: [6:7490573812025651335:2723], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:12:01.348070Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NmM2YWI2OTYtOTdjOTVmMTAtZGE1ODRhMDUtYTNlYjcyNjM=, ActorId: [6:7490573812025651335:2723], ActorState: ReadyState, TraceId: 01jr89g264fdpbnqtn4g7fph5r, received request, proxyRequestId: 57 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpcActor: [6:7490573812025651336:2724] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2025-04-07T14:12:01.348098Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NmM2YWI2OTYtOTdjOTVmMTAtZGE1ODRhMDUtYTNlYjcyNjM=, ActorId: [6:7490573812025651335:2723], ActorState: ReadyState, TraceId: 01jr89g264fdpbnqtn4g7fph5r, request placed into pool from cache: default 2025-04-07T14:12:01.348189Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NmM2YWI2OTYtOTdjOTVmMTAtZGE1ODRhMDUtYTNlYjcyNjM=, ActorId: [6:7490573812025651335:2723], ActorState: ExecuteState, TraceId: 01jr89g264fdpbnqtn4g7fph5r, Sending CompileQuery request 2025-04-07T14:12:01.357945Z node 6 :SCHEME_BOARD_SUBSCRIBER WARN: [main][6:7490573781960878298:2600][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 20, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-07T14:12:01.358007Z node 6 :SCHEME_BOARD_SUBSCRIBER WARN: [main][6:7490573781960878298:2600][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 21, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-07T14:12:01.358789Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490573812025651338:2725], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-04-07T14:12:01.359021Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NmM2YWI2OTYtOTdjOTVmMTAtZGE1ODRhMDUtYTNlYjcyNjM=, ActorId: [6:7490573812025651335:2723], ActorState: ExecuteState, TraceId: 01jr89g264fdpbnqtn4g7fph5r, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-04-07T14:12:01.359067Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NmM2YWI2OTYtOTdjOTVmMTAtZGE1ODRhMDUtYTNlYjcyNjM=, ActorId: [6:7490573812025651335:2723], ActorState: ExecuteState, TraceId: 01jr89g264fdpbnqtn4g7fph5r, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:12:01.359087Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NmM2YWI2OTYtOTdjOTVmMTAtZGE1ODRhMDUtYTNlYjcyNjM=, ActorId: [6:7490573812025651335:2723], ActorState: ExecuteState, TraceId: 01jr89g264fdpbnqtn4g7fph5r, EndCleanup, isFinal: 0 2025-04-07T14:12:01.359246Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NmM2YWI2OTYtOTdjOTVmMTAtZGE1ODRhMDUtYTNlYjcyNjM=, ActorId: [6:7490573812025651335:2723], ActorState: ExecuteState, TraceId: 01jr89g264fdpbnqtn4g7fph5r, Sent query response back to proxy, proxyRequestId: 57, proxyId: [6:7490573764781008305:2255] 2025-04-07T14:12:01.359885Z node 6 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-04-07T14:12:01.360191Z node 6 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-04-07T14:12:01.360241Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NmM2YWI2OTYtOTdjOTVmMTAtZGE1ODRhMDUtYTNlYjcyNjM=, ActorId: [6:7490573812025651335:2723], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T14:12:01.360266Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NmM2YWI2OTYtOTdjOTVmMTAtZGE1ODRhMDUtYTNlYjcyNjM=, ActorId: [6:7490573812025651335:2723], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:12:01.360289Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NmM2YWI2OTYtOTdjOTVmMTAtZGE1ODRhMDUtYTNlYjcyNjM=, ActorId: [6:7490573812025651335:2723], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T14:12:01.360304Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NmM2YWI2OTYtOTdjOTVmMTAtZGE1ODRhMDUtYTNlYjcyNjM=, ActorId: [6:7490573812025651335:2723], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T14:12:01.360364Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NmM2YWI2OTYtOTdjOTVmMTAtZGE1ODRhMDUtYTNlYjcyNjM=, ActorId: [6:7490573812025651335:2723], ActorState: unknown state, Session actor destroyed 2025-04-07T14:12:01.566626Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M= 2025-04-07T14:12:01.566728Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M=, ActorId: [6:7490573812025651345:2729], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:12:01.567103Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M=, ActorId: [6:7490573812025651345:2729], ActorState: ReadyState, TraceId: 01jr89g2czcyvkwwjxxxd1dgxd, received request, proxyRequestId: 59 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/initialization/migrations`; rpcActor: [6:7490573812025651346:2730] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2025-04-07T14:12:01.567124Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M=, ActorId: [6:7490573812025651345:2729], ActorState: ReadyState, TraceId: 01jr89g2czcyvkwwjxxxd1dgxd, request placed into pool from cache: default 2025-04-07T14:12:01.567178Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M=, ActorId: [6:7490573812025651345:2729], ActorState: ExecuteState, TraceId: 01jr89g2czcyvkwwjxxxd1dgxd, Sending CompileQuery request 2025-04-07T14:12:01.574637Z node 6 :SCHEME_BOARD_SUBSCRIBER WARN: [main][6:7490573769075976346:2547][/Root/test-shared/.metadata/initialization/migrations] Sync is done: cookie# 44, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-07T14:12:01.574750Z node 6 :SCHEME_BOARD_SUBSCRIBER WARN: [main][6:7490573769075976346:2547][/Root/test-shared/.metadata/initialization/migrations] Sync is done: cookie# 45, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-07T14:12:01.575406Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490573812025651348:2731], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/initialization/migrations]
: Error: LookupError, code: 2005 2025-04-07T14:12:01.575586Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M=, ActorId: [6:7490573812025651345:2729], ActorState: ExecuteState, TraceId: 01jr89g2czcyvkwwjxxxd1dgxd, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-04-07T14:12:01.575624Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M=, ActorId: [6:7490573812025651345:2729], ActorState: ExecuteState, TraceId: 01jr89g2czcyvkwwjxxxd1dgxd, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:12:01.575643Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M=, ActorId: [6:7490573812025651345:2729], ActorState: ExecuteState, TraceId: 01jr89g2czcyvkwwjxxxd1dgxd, EndCleanup, isFinal: 0 2025-04-07T14:12:01.575757Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M=, ActorId: [6:7490573812025651345:2729], ActorState: ExecuteState, TraceId: 01jr89g2czcyvkwwjxxxd1dgxd, Sent query response back to proxy, proxyRequestId: 59, proxyId: [6:7490573764781008305:2255] 2025-04-07T14:12:01.576315Z node 6 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/initialization/migrations]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-04-07T14:12:01.576639Z node 6 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/initialization/migrations]
: Error: LookupError, code: 2005 2025-04-07T14:12:01.576704Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M=, ActorId: [6:7490573812025651345:2729], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T14:12:01.576731Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M=, ActorId: [6:7490573812025651345:2729], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:12:01.576768Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M=, ActorId: [6:7490573812025651345:2729], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T14:12:01.576792Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M=, ActorId: [6:7490573812025651345:2729], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T14:12:01.576854Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzA0NzlmYTUtNzA5MmE2YTYtNjBlYjYwODItZDMxZWI4Y2M=, ActorId: [6:7490573812025651345:2729], ActorState: unknown state, Session actor destroyed >> KqpScheme::DropDependentExternalDataSource [GOOD] >> KqpScheme::DropAsyncReplication >> KqpScheme::CreateTableWithVectorIndexNoFeatureFlag [GOOD] >> KqpScheme::CreateTableWithVectorIndexPublicApi >> KqpScheme::AlterTableWithPgColumn [GOOD] >> KqpScheme::AlterUser >> KqpScheme::CreateAndAlterTableWithBloomFilterCompat >> KqpScheme::TouchIndexAfterMoveTableRead [GOOD] >> KqpScheme::TouchIndexAfterMoveTableWrite >> KqpScheme::UseDroppedTable [GOOD] >> KqpScheme::UnknownFamilyTest >> KqpScheme::CreateTableWithTtlSettingsUncompat [GOOD] >> KqpScheme::CreateTableWithTtlSettingsCompat >> KqpScheme::DropResourcePool [GOOD] >> KqpScheme::DropResourcePoolClassifier >> KqpScheme::CreateTableWithUniformPartitionsUncompat [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsUuid >> KqpOlapTypes::Timestamp >> KqpScheme::MoveTableWithSerialTypes [GOOD] >> KqpScheme::PathWithNoRoot |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] >> KqpScheme::ModifyPermissions [GOOD] >> KqpScheme::ModifyPermissionsByRelativePath >> KqpConstraints::DefaultValuesForTableNegative3 [GOOD] >> KqpConstraints::DefaultValuesForTableNegative4 |94.7%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeUncompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeCompat >> KqpScheme::CreateAndDropUser [GOOD] >> KqpScheme::CreateAndDropGroup >> KqpAcl::AclForOltpAndOlap+isOlap [GOOD] >> KqpAcl::AclForOltpAndOlap-isOlap >> KqpConstraints::SerialTypeSerial8 [GOOD] >> KqpConstraints::Utf8AndDefault >> KqpScheme::DisableCreateExternalDataSource [GOOD] >> KqpScheme::DisableDropExternalDataSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] Test command err: 2025-04-07T14:09:26.341714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:26.342061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:26.342203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f8/r3tmp/tmp2T5dQu/pdisk_1.dat 2025-04-07T14:09:26.801290Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31876, node 1 2025-04-07T14:09:27.317036Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:27.317086Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:27.317116Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:27.317708Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:27.326951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:27.442586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:27.442721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:27.467593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29499 2025-04-07T14:09:28.188289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:31.314839Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:31.373546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:31.373660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:31.418999Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:31.427481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:31.635413Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:31.700546Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:31.700632Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:31.730125Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:31.730260Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:31.730462Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:31.730518Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:31.730561Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:31.730608Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:31.730652Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:31.730769Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:31.731133Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:31.945760Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:31.945857Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:31.952062Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:31.961071Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:31.961910Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:31.962941Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:31.995727Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:31.995808Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:31.995884Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:31.998173Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:31.998269Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.021556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:32.035089Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:32.035252Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:32.051824Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:32.072104Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:32.114387Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:32.488870Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:32.629194Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:33.730445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:33.730576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:33.809780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:34.136686Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2284:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:34.136934Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2284:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:34.137250Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2284:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:34.137416Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2284:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:34.137567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2284:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:34.137698Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2284:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:34.137813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2284:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:34.137930Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2284:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:34.138053Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2284:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:34.138174Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2284:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:34.138304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2284:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:34.138417Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2284:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:34.194363Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2849];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:34.194463Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2849];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:34.194741Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2849];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:34.194917Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2849];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:34.195039Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2849];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:34.195148Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2849];tablet_id=72075186224037900;process=TTx ... = [2:8048:6017], schemeshard count = 1 2025-04-07T14:12:02.983072Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:02.983134Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:12:02.983172Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:02.983212Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:02.989102Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:03.004804Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:03.005318Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:03.005418Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:03.006291Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:03.006351Z node 2 :STATISTICS WARN: [72075186224037897] TTxResponseTabletDistribution::Execute. Some tablets do not exist in Hive anymore; tablet count = 3 2025-04-07T14:12:03.006401Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:04.388188Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:04.388266Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:04.388677Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:04.402029Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:04.402240Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:12:04.403275Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8213:6106], server id = [2:8218:6111], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:04.403805Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8213:6106], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:04.404117Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8214:6107], server id = [2:8219:6112], tablet id = 72075186224037900, status = OK 2025-04-07T14:12:04.404187Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8214:6107], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:04.404327Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8215:6108], server id = [2:8220:6113], tablet id = 72075186224037901, status = OK 2025-04-07T14:12:04.404362Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8215:6108], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:04.405717Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:04.405861Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T14:12:04.406058Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8216:6109], server id = [2:8221:6114], tablet id = 72075186224037902, status = OK 2025-04-07T14:12:04.406104Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8216:6109], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:04.406246Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-07T14:12:04.406426Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8217:6110], server id = [2:8222:6115], tablet id = 72075186224037903, status = OK 2025-04-07T14:12:04.406488Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8217:6110], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:04.406984Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8213:6106], server id = [2:8218:6111], tablet id = 72075186224037899 2025-04-07T14:12:04.407025Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:04.407640Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8214:6107], server id = [2:8219:6112], tablet id = 72075186224037900 2025-04-07T14:12:04.407669Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:04.407850Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-07T14:12:04.408057Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8215:6108], server id = [2:8220:6113], tablet id = 72075186224037901 2025-04-07T14:12:04.408080Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:04.408238Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8223:6116], server id = [2:8226:6119], tablet id = 72075186224037904, status = OK 2025-04-07T14:12:04.408304Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8223:6116], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:04.408468Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-07T14:12:04.408773Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8224:6117], server id = [2:8227:6120], tablet id = 72075186224037905, status = OK 2025-04-07T14:12:04.408838Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8224:6117], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:04.409426Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8216:6109], server id = [2:8221:6114], tablet id = 72075186224037902 2025-04-07T14:12:04.409461Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:04.409964Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8225:6118], server id = [2:8228:6121], tablet id = 72075186224037906, status = OK 2025-04-07T14:12:04.410029Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8225:6118], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:04.410193Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8217:6110], server id = [2:8222:6115], tablet id = 72075186224037903 2025-04-07T14:12:04.410214Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:04.410263Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8229:6122], server id = [2:8230:6123], tablet id = 72075186224037907, status = OK 2025-04-07T14:12:04.410306Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8229:6122], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:04.410850Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-07T14:12:04.410893Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T14:12:04.411112Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8231:6124], server id = [2:8232:6125], tablet id = 72075186224037908, status = OK 2025-04-07T14:12:04.411163Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8231:6124], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:04.411229Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T14:12:04.411302Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T14:12:04.411480Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8223:6116], server id = [2:8226:6119], tablet id = 72075186224037904 2025-04-07T14:12:04.411495Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:04.411746Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8224:6117], server id = [2:8227:6120], tablet id = 72075186224037905 2025-04-07T14:12:04.411762Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:04.411811Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8225:6118], server id = [2:8228:6121], tablet id = 72075186224037906 2025-04-07T14:12:04.411823Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:04.411842Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T14:12:04.411874Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:04.411975Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:04.412132Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:04.412352Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:04.413780Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8229:6122], server id = [2:8230:6123], tablet id = 72075186224037907 2025-04-07T14:12:04.413813Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:04.414123Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8231:6124], server id = [2:8232:6125], tablet id = 72075186224037908 2025-04-07T14:12:04.414145Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:04.414480Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:04.432590Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8249:6142]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:04.432843Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:04.432888Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8249:6142], StatRequests.size() = 1 2025-04-07T14:12:04.548834Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmU0OTYzNDItNWZhMjY0LTYzMTQwMzA5LWIzNDNkM2U3, TxId: 2025-04-07T14:12:04.548894Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmU0OTYzNDItNWZhMjY0LTYzMTQwMzA5LWIzNDNkM2U3, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-07T14:12:04.549490Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8257:6148]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:04.549723Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:04.550120Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:04.550167Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:12:04.553274Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:04.553340Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:12:04.553385Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:12:04.558484Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] Test command err: 2025-04-07T14:09:26.390113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:26.390582Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:26.390770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013f6/r3tmp/tmpweFk5C/pdisk_1.dat 2025-04-07T14:09:26.831072Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13344, node 1 2025-04-07T14:09:27.315850Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:27.315892Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:27.315912Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:27.316305Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:27.327035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:27.450773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:27.450917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:27.473540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21605 2025-04-07T14:09:28.199510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:31.594666Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:31.638364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:31.638481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:31.678319Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:31.682462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:31.934576Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:31.990109Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:31.990208Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:32.020567Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:32.020785Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:32.021011Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:32.021076Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:32.021139Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:32.021188Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:32.021257Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:32.021324Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:32.021767Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:32.248416Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.248529Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.259081Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:32.267965Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:32.268893Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:32.269995Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:32.304595Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:32.304902Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:32.304994Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:32.306880Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.306979Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.320341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:32.344162Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:32.344304Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:32.362619Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:32.379863Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:32.424701Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:32.731434Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:32.903236Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:33.730573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:33.730690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:33.805702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:34.266761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2426:3064], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.266901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.268396Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2431:3068]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:34.268682Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:09:34.268769Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2433:3070] 2025-04-07T14:09:34.272812Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2433:3070] 2025-04-07T14:09:34.273443Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2434:2945] 2025-04-07T14:09:34.273737Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2433:3070], server id = [2:2434:2945], tablet id = 72075186224037897, status = OK 2025-04-07T14:09:34.274007Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2434:2945], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:09:34.274084Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:09:34.274256Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:09:34.274339Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2431:3068], StatRequests.size() = 1 2025-04-07T14:09:34.315433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2438:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.315524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.315782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2443:3079], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.321585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:09:34.529571Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:09:34.529647Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:09:34.606493Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2433:3070], schemeshard count = 1 2025-04-07T14:09:34.886795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2445:3081], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:09:35.021924Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2589:3173]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:35.022141Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:09:35.022200Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2589:3173], StatRequests.size() = 1 2025-04-07T14:09:35.283276Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89bjgxa3pqvb4hgtw6tk5m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmE2MWI3ZDQtMWYwMTA1ZjYtZGY0YmVjNTctZWU5MjI0ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:35.381744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation par ... RN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2025-04-07T14:11:57.707967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6932:4877], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-07T14:11:57.932576Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7045:4936]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:11:57.932892Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:11:57.932996Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7047:4938] 2025-04-07T14:11:57.933064Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7047:4938] 2025-04-07T14:11:57.933350Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7048:4939] 2025-04-07T14:11:57.933430Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7047:4938], server id = [2:7048:4939], tablet id = 72075186224037897, status = OK 2025-04-07T14:11:57.933469Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7048:4939], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:11:57.933501Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-07T14:11:57.933573Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:11:57.933623Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7045:4936], StatRequests.size() = 1 2025-04-07T14:11:58.037418Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2U2MGQyNTMtYjY1Mzk1MzUtNTJhYzU0MmMtNzk0YTRiYzA=, TxId: 2025-04-07T14:11:58.037493Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2U2MGQyNTMtYjY1Mzk1MzUtNTJhYzU0MmMtNzk0YTRiYzA=, TxId: 2025-04-07T14:11:58.037910Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:11:58.061605Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:11:58.061672Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:11:58.135648Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:11:58.135710Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:11:58.190022Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7047:4938], schemeshard count = 1 2025-04-07T14:11:59.167800Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:11:59.167924Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2025-04-07T14:11:59.167971Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:00.296008Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:12:00.327971Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:00.328114Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2025-04-07T14:12:00.328151Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:00.328485Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:00.330449Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:12:00.340237Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmI1YmE2Ny1jMjdiY2ZmZi03M2M2ZjIyZi1hNDAwZTdkMQ==, TxId: 2025-04-07T14:12:00.340295Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmI1YmE2Ny1jMjdiY2ZmZi03M2M2ZjIyZi1hNDAwZTdkMQ==, TxId: 2025-04-07T14:12:00.340898Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:00.353950Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:00.354012Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:2978:3264] 2025-04-07T14:12:01.622323Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:01.622397Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is data table. 2025-04-07T14:12:01.622430Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037889, LocalPathId: 5] 2025-04-07T14:12:02.770735Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:12:02.776092Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:12:02.776413Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:12:02.819111Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:02.819180Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is data table. 2025-04-07T14:12:02.819231Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 5] 2025-04-07T14:12:02.819535Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:02.821380Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:12:02.830466Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGUwOTgxNWYtZTkyYWYxMWUtZGZlOWVlOTMtYzVjZGY0ZQ==, TxId: 2025-04-07T14:12:02.830511Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGUwOTgxNWYtZTkyYWYxMWUtZGZlOWVlOTMtYzVjZGY0ZQ==, TxId: 2025-04-07T14:12:02.831008Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:02.844117Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 5] 2025-04-07T14:12:02.844178Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:04.027484Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:04.027563Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:04.027606Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:12:05.189625Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:05.189745Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is data table. 2025-04-07T14:12:05.189779Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 5] 2025-04-07T14:12:05.190065Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:05.192330Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:12:05.201806Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmQ0ZDkyOTQtNjM4ZWE0N2YtOWU1OWRjZjktZjVlZmNiZmY=, TxId: 2025-04-07T14:12:05.201864Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmQ0ZDkyOTQtNjM4ZWE0N2YtOWU1OWRjZjktZjVlZmNiZmY=, TxId: 2025-04-07T14:12:05.202481Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:05.215327Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 5] 2025-04-07T14:12:05.215404Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2978:3264] 2025-04-07T14:12:05.215883Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7378:5122]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:05.218535Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:05.218589Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:12:05.222173Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:05.222230Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:12:05.222275Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:12:05.224277Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-07T14:12:05.224541Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 2025-04-07T14:12:05.224854Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:7408:5134]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:05.227285Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:05.227343Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:12:05.227531Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:05.227572Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:12:05.227619Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:12:05.229872Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-07T14:12:05.230052Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >> KqpScheme::AlterUser [GOOD] >> KqpScheme::AsyncReplicationConnectionString >> KqpOlapScheme::InvalidColumnInTieringRule [GOOD] >> KqpOlapScheme::NullColumnError >> KqpScheme::AlterTableAddExplicitSyncVectorKMeansTreeIndex [GOOD] >> KqpScheme::AlterTableAlterIndex >> KqpScheme::CreateAndAlterTableWithBloomFilterCompat [GOOD] >> KqpScheme::CreateAndAlterTableComplex >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTx [GOOD] >> KqpScheme::Int8Int16Olap >> KqpScheme::CreateTableWithVectorIndexPublicApi [GOOD] >> KqpScheme::CreateTableWithVectorIndexCoveredPublicApi >> KqpScheme::DisableS3ExternalDataSource [GOOD] >> KqpScheme::DoubleCreateExternalDataSource >> KqpOlapTypes::Timestamp [GOOD] >> KqpOlapTypes::TimestampCmpErr >> KqpScheme::DropAsyncReplication [GOOD] >> KqpScheme::DropAsyncReplicationCascade >> KqpScheme::CreateTableWithTtlSettingsCompat [GOOD] >> KqpScheme::CreateTableWithTtlOnIntColumn >> KqpScheme::UnknownFamilyTest [GOOD] >> KqpScheme::TwoSimilarFamiliesTest >> SystemView::VSlotsFields >> SystemView::ConcurrentScans >> KqpScheme::TouchIndexAfterMoveTableWrite [GOOD] >> SystemView::TopPartitionsFields >> KqpScheme::PathWithNoRoot [GOOD] >> KqpScheme::ModifyUnknownPermissions >> KqpScheme::CreateTableWithUniformPartitionsUuid [GOOD] >> KqpScheme::CreateTableWithUniqConstraint >> KqpScheme::DisableResourcePoolClassifiers [GOOD] >> KqpScheme::DisableResourcePoolClassifiersOnServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::TouchIndexAfterMoveTableWrite [GOOD] Test command err: Trying to start YDB, gRPC: 30235, MsgBus: 3573 2025-04-07T14:11:44.165531Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573736370533068:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:44.165619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001446/r3tmp/tmpR84kdX/pdisk_1.dat 2025-04-07T14:11:44.408686Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30235, node 1 2025-04-07T14:11:44.469424Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:44.469447Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:44.469452Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:44.469544Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:44.500063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:44.500168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:44.501829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3573 TClient is connected to server localhost:3573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:44.841882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.861104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.978889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:45.098630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:45.173518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.558618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573744960469371:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.558776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.796438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.816698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.837291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.872328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.892679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.915851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.946113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573744960469861:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.946185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.946235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573744960469866:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.949291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:46.957673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573744960469868:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:47.620798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T14:11:47.651454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 28162, MsgBus: 28481 2025-04-07T14:11:48.825474Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573752740513519:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:48.825544Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001446/r3tmp/tmpzs7lMZ/pdisk_1.dat 2025-04-07T14:11:48.909610Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:48.928062Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:48.928124Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:48.929224Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28162, node 2 2025-04-07T14:11:48.959522Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:48.959536Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:48.959540Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:48.959600Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28481 TClient is connected to server localhost:28481 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:49.277506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:49.293306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:49.344434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:49.471090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:49.542612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.093744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573765625417106:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:51.093839Z node 2 :KQP_W ... type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.387347Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573808389761914:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:01.387423Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:01.421448Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:01.449747Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:01.473303Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:01.497153Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:01.524943Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:01.550049Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:01.659406Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573808389762407:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:01.659475Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:01.659541Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573808389762412:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:01.662883Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:01.672562Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573808389762414:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:02.757724Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T14:12:02.790138Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3157, MsgBus: 64247 2025-04-07T14:12:03.805545Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573819882786914:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:03.805610Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001446/r3tmp/tmpNojtPE/pdisk_1.dat 2025-04-07T14:12:03.882755Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:03.908289Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:03.908350Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3157, node 5 2025-04-07T14:12:03.909421Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:03.932932Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:03.932954Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:03.932961Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:03.933063Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64247 TClient is connected to server localhost:64247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:04.203505Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:04.212327Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:04.291578Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:04.429219Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:04.486525Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:06.813585Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573832767690510:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:06.813935Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:06.832545Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.864278Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.896295Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.960955Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.991719Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:07.061583Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:07.102726Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573837062658307:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:07.102797Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:07.102869Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573837062658312:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:07.106144Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:07.114885Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573837062658314:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:08.143960Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T14:12:08.174327Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-07T14:12:08.807129Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573819882786914:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:08.807203Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpScheme::ModifyPermissionsByRelativePath [GOOD] >> KqpScheme::ModifyPermissionsByIncorrectPaths >> KqpConstraints::DefaultValuesForTableNegative4 [GOOD] >> KqpConstraints::IndexedTableAndNotNullColumn >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeCompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitionSizeUncompat >> KqpScheme::CreateAndDropGroup [GOOD] >> KqpScheme::CreateAsyncReplication >> KqpOlapScheme::NullColumnError [GOOD] >> KqpOlapScheme::NullKeySchema >> KqpScheme::DisableDropExternalDataSource [GOOD] >> KqpScheme::DisableCreateExternalTable >> KqpScheme::AlterNonExistingResourcePoolClassifier [GOOD] >> KqpConstraints::Utf8AndDefault [GOOD] >> KqpOlapScheme::AddColumn >> KqpScheme::CreateExternalDataSourceWithSa [GOOD] >> KqpScheme::CreateExternalDataSourceValidationAuthMethod >> KqpRequestContext::TraceIdInErrorMessage [FAIL] >> KqpScan::AggregateByColumn >> KqpScheme::AsyncReplicationConnectionString [GOOD] >> KqpScheme::AsyncReplicationConnectionStringWithSsl >> SystemView::Nodes [GOOD] >> SystemView::PDisksFields >> KqpOlapTypes::TimestampCmpErr [GOOD] >> KqpScheme::AddColumnFamilyWithCompressionLevel >> KqpScheme::CreateAndAlterTableComplex [GOOD] >> KqpScheme::ChangefeedTopicPartitions >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterNonExistingResourcePoolClassifier [GOOD] Test command err: Trying to start YDB, gRPC: 20219, MsgBus: 29765 2025-04-07T14:11:26.163098Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573659752430803:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.163344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00145f/r3tmp/tmpOZfFhf/pdisk_1.dat 2025-04-07T14:11:26.585436Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20219, node 1 2025-04-07T14:11:26.588556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.588641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.590171Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:11:26.590201Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:11:26.591783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.633493Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.633512Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.633524Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.633620Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29765 TClient is connected to server localhost:29765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.167653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.196689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.294858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.422563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.472913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:28.543885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668342366945:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.543958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.741660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.759591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.777682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.793634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.810731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.834894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.864542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668342367436:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.864591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573668342367441:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.864598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.867219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:28.873259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573668342367443:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:29.761108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:29.784636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:11:29.806903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:11:31.166117Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573659752430803:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:31.166303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23684, MsgBus: 23833 2025-04-07T14:11:31.692512Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573682280086624:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:31.692591Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00145f/r3tmp/tmpywNgmV/pdisk_1.dat 2025-04-07T14:11:31.773652Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23684, node 2 2025-04-07T14:11:31.814091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:31.814182Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:31.815667Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:31.834717Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:31.834752Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:31.834760Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:31.834866Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23833 TClient is connected to server localhost:23833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:32.209067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.216707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.261136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.399334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Oper ... 281474976715677:1, at schemeshard: 72057594046644480 2025-04-07T14:11:52.473818Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-07T14:11:52.984613Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.410939Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.689125Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573783424483275:2768], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-04-07T14:11:55.689207Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-04-07T14:11:56.951361Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573787719450874:2860], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-04-07T14:11:56.951473Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-04-07T14:11:58.196350Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573796309385822:2977], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-04-07T14:11:58.196434Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-04-07T14:11:59.809661Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573800604353502:3098], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-04-07T14:11:59.809757Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } Trying to start YDB, gRPC: 18640, MsgBus: 27889 2025-04-07T14:12:00.667636Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490573807689598054:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:00.667750Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00145f/r3tmp/tmpnem4Zl/pdisk_1.dat 2025-04-07T14:12:00.777211Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:00.808563Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:00.808688Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:00.810278Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18640, node 6 2025-04-07T14:12:00.857732Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:00.857764Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:00.857775Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:00.857904Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27889 TClient is connected to server localhost:27889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:01.414294Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.432462Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.505112Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.714260Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.783386Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:04.519560Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573824869468961:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:04.519676Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:04.562861Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:04.594248Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:04.626864Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:04.656018Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:04.686878Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:04.755495Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:04.792227Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573824869469460:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:04.792325Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:04.792548Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573824869469465:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:04.797362Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:04.806405Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490573824869469467:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:05.668087Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490573807689598054:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:05.676439Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:06.155003Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-04-07T14:12:06.730328Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:12:07.286417Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:1, at schemeshard: 72057594046644480 2025-04-07T14:12:07.858367Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:12:08.489446Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-07T14:12:09.136880Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715697:0, at schemeshard: 72057594046644480 >> KqpAcl::AclForOltpAndOlap-isOlap [GOOD] >> KqpScheme::CreateTableWithVectorIndexCoveredPublicApi [GOOD] >> KqpScheme::DoubleCreateExternalDataSource [GOOD] >> KqpScheme::DoubleCreateExternalTable >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] Test command err: 2025-04-07T14:09:47.451883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:47.452156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:47.452281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001371/r3tmp/tmpp2iMh6/pdisk_1.dat 2025-04-07T14:09:47.773681Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4000, node 1 2025-04-07T14:09:47.983354Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:47.983419Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:47.983442Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:47.983822Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:47.985534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:48.076652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:48.076785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:48.090509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21143 2025-04-07T14:09:48.715792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:51.621375Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:51.657538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:51.657639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:51.697514Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:51.701790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:51.913523Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:51.970148Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:51.970248Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:52.006028Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:52.006231Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:52.006444Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:52.006535Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:52.006590Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:52.006644Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:52.006693Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:52.006746Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:52.007258Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:52.220172Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:52.220290Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:52.225953Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:52.232827Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:52.233499Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:52.234413Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:52.253903Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:52.253971Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:52.254042Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:52.256186Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:52.256269Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:52.268430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:52.279150Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:52.279269Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:52.290419Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:52.306420Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:52.350995Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:52.611929Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:52.745011Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:53.549799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:53.549927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:53.565453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:53.752727Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2285:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:53.752947Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2285:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:53.753267Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2285:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:53.753361Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2285:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:53.753467Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2285:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:53.753579Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2285:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:53.753682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2285:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:53.753825Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2285:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:53.753980Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2285:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:53.754112Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2285:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:53.754202Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2285:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:53.754267Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2285:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:53.792885Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2849];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:53.792955Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2849];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:53.793161Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2849];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:53.793261Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2849];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:53.793332Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2849];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:53.793387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2849];tablet_id=72075186224037900;process=TTxI ... [72075186224037897] Subscribed for config changes 2025-04-07T14:12:11.518822Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:12:11.519277Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:11.519364Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:11.520973Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:11.521032Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:11.521801Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:11.579694Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:11.579891Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T14:12:11.580921Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8221:6111], server id = [2:8226:6116], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:11.581316Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8221:6111], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:11.581641Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8222:6112], server id = [2:8227:6117], tablet id = 72075186224037900, status = OK 2025-04-07T14:12:11.581711Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8222:6112], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:11.583183Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:11.583616Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8223:6113], server id = [2:8228:6118], tablet id = 72075186224037901, status = OK 2025-04-07T14:12:11.583691Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8223:6113], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:11.583802Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T14:12:11.584318Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8224:6114], server id = [2:8229:6119], tablet id = 72075186224037902, status = OK 2025-04-07T14:12:11.584391Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8224:6114], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:11.585460Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8221:6111], server id = [2:8226:6116], tablet id = 72075186224037899 2025-04-07T14:12:11.585506Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:11.585659Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8225:6115], server id = [2:8230:6120], tablet id = 72075186224037903, status = OK 2025-04-07T14:12:11.585724Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8225:6115], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:11.585911Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8222:6112], server id = [2:8227:6117], tablet id = 72075186224037900 2025-04-07T14:12:11.585941Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:11.586373Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-07T14:12:11.586509Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-07T14:12:11.586744Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8231:6121], server id = [2:8233:6123], tablet id = 72075186224037904, status = OK 2025-04-07T14:12:11.586806Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8231:6121], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:11.587339Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8232:6122], server id = [2:8234:6124], tablet id = 72075186224037905, status = OK 2025-04-07T14:12:11.587415Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8232:6122], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:11.587504Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-07T14:12:11.587787Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8223:6113], server id = [2:8228:6118], tablet id = 72075186224037901 2025-04-07T14:12:11.587812Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:11.588396Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8224:6114], server id = [2:8229:6119], tablet id = 72075186224037902 2025-04-07T14:12:11.588432Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:11.588535Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-07T14:12:11.588948Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8225:6115], server id = [2:8230:6120], tablet id = 72075186224037903 2025-04-07T14:12:11.588996Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:11.589148Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8235:6125], server id = [2:8237:6127], tablet id = 72075186224037906, status = OK 2025-04-07T14:12:11.589224Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8235:6125], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:11.589358Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T14:12:11.589935Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8236:6126], server id = [2:8239:6129], tablet id = 72075186224037907, status = OK 2025-04-07T14:12:11.590007Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8236:6126], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:11.590604Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8231:6121], server id = [2:8233:6123], tablet id = 72075186224037904 2025-04-07T14:12:11.590634Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:11.590697Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8238:6128], server id = [2:8240:6130], tablet id = 72075186224037908, status = OK 2025-04-07T14:12:11.590782Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8238:6128], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:11.591268Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T14:12:11.591450Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8232:6122], server id = [2:8234:6124], tablet id = 72075186224037905 2025-04-07T14:12:11.591482Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:11.591663Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T14:12:11.591851Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T14:12:11.591896Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:11.592187Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:11.592430Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:11.592788Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:11.594950Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8235:6125], server id = [2:8237:6127], tablet id = 72075186224037906 2025-04-07T14:12:11.594978Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:11.595293Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8236:6126], server id = [2:8239:6129], tablet id = 72075186224037907 2025-04-07T14:12:11.595319Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:11.595383Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8238:6128], server id = [2:8240:6130], tablet id = 72075186224037908 2025-04-07T14:12:11.595397Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:11.595563Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:11.616177Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8257:6147]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:11.616485Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:11.616553Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8257:6147], StatRequests.size() = 1 2025-04-07T14:12:11.739495Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2M5YThkODYtNDQ1M2Q5YWItNTg4MmI1NDYtODExNmQ0Yzc=, TxId: 2025-04-07T14:12:11.739569Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2M5YThkODYtNDQ1M2Q5YWItNTg4MmI1NDYtODExNmQ0Yzc=, TxId: 2025-04-07T14:12:11.740090Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:11.775205Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8267:6153] 2025-04-07T14:12:11.775419Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8173:6083], server id = [2:8267:6153], tablet id = 72075186224037897, status = OK 2025-04-07T14:12:11.775584Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:8267:6153], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-07T14:12:11.775792Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8268:6154] 2025-04-07T14:12:11.775866Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:8268:6154], schemeshard id = 72075186224037889 2025-04-07T14:12:11.790774Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:11.790841Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:11.886833Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8275:6159]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:11.887200Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:11.887253Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:12:11.890410Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:11.890477Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:12:11.890530Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:12:11.898768Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithVectorIndexCoveredPublicApi [GOOD] Test command err: Trying to start YDB, gRPC: 14575, MsgBus: 17065 2025-04-07T14:11:51.069804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573765251214483:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:51.069904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001439/r3tmp/tmpPdDthg/pdisk_1.dat 2025-04-07T14:11:51.331190Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14575, node 1 2025-04-07T14:11:51.401287Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:51.401308Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:51.401313Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:51.401414Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:51.418381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:51.418508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:51.420228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17065 TClient is connected to server localhost:17065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:51.813452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.829985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.927094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.049874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.115735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.260325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573773841150780:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.260451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.483057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.504181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.525633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.546854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.584526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.647504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.678483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573773841151278:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.678559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.678647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573773841151283:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.681391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:53.688396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573773841151285:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:54.566266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25819, MsgBus: 3505 2025-04-07T14:11:55.047936Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573784742971978:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:55.047982Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001439/r3tmp/tmpx6GN7H/pdisk_1.dat 2025-04-07T14:11:55.108880Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25819, node 2 2025-04-07T14:11:55.148257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:55.148332Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:55.149921Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:55.156457Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:55.156474Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:55.156480Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:55.156579Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3505 TClient is connected to server localhost:3505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:55.460439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.468977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.506335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.632152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.701551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:57.486759Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573793332908284:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:57.486823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:57.500392Z node 2 :FLAT_TX_SCHEMESHARD ... 81474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:04.056345Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:04.100070Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:04.221965Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:04.294434Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:06.171144Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573833227368480:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:06.171458Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:06.187094Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.216207Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.241775Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.268111Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.290645Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.355152Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.480274Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573833227368981:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:06.480346Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:06.480358Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573833227368986:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:06.483241Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:06.490843Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573833227368988:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:07.536984Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17958, MsgBus: 23526 2025-04-07T14:12:08.280272Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573838280037418:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:08.280314Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001439/r3tmp/tmpTawl1E/pdisk_1.dat 2025-04-07T14:12:08.365524Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:08.388244Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:08.388335Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:08.389856Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17958, node 5 2025-04-07T14:12:08.424969Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:08.424993Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:08.425002Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:08.425114Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23526 TClient is connected to server localhost:23526 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:08.835146Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:08.854801Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:08.905417Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:09.068478Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:12:09.137569Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:11.251447Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573851164940985:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:11.251516Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:11.296218Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.332182Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.362332Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.432511Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.470005Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.539013Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.584418Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573851164941488:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:11.584494Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:11.584556Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573851164941493:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:11.588079Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:11.597100Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573851164941495:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:12.738316Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpAcl::AclForOltpAndOlap-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 23706, MsgBus: 61730 2025-04-07T14:11:42.175634Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573726374806904:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:42.175737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00144e/r3tmp/tmp7yZfJ1/pdisk_1.dat 2025-04-07T14:11:42.436849Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23706, node 1 2025-04-07T14:11:42.476735Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:42.476757Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:42.476764Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:42.476880Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:42.507042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:42.507166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:42.508736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61730 TClient is connected to server localhost:61730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:42.845265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:42.867499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:42.973407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.103839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:43.183354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.350013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573734964743222:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:44.350143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:44.534905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:44.556209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:44.576936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:44.598460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:44.621450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:44.656055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:44.685162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573734964743716:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:44.685262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:44.685275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573734964743721:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:44.688062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:44.695967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573734964743723:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:45.401865Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7490573739259711335:3617], for# user0@builtin, access# DescribeSchema 2025-04-07T14:11:45.401888Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7490573739259711335:3617], for# user0@builtin, access# DescribeSchema 2025-04-07T14:11:45.412356Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490573739259711332:2465], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/TwoShard]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:11:45.412592Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTA3M2U2ZjctZjdkOGVjOWQtNzUyYzkyY2ItMTQwZWI0NWU=, ActorId: [1:7490573739259711323:2460], ActorState: ExecuteState, TraceId: 01jr89fjkj8jzshf0h1n6kk1nz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 1044, MsgBus: 10430 2025-04-07T14:11:45.981924Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573740780256829:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:45.982007Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00144e/r3tmp/tmpxTF95Q/pdisk_1.dat 2025-04-07T14:11:46.040096Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1044, node 2 2025-04-07T14:11:46.085010Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:46.085032Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:46.085042Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:46.085132Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:46.100087Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:46.100174Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:46.101835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10430 TClient is connected to server localhost:10430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:46.376085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.383914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.449860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.551604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04 ... 04-07T14:12:07.673729Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:07.821016Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:07.884917Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:10.168236Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573850444709430:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:10.168343Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:10.213321Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:10.245206Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:10.277693Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:10.349287Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:10.379162Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:10.412699Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:10.455133Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573850444709925:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:10.455235Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:10.455311Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573850444709930:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:10.459170Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:10.469064Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490573850444709932:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:11.575776Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.616299Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.642547Z node 6 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [6:7490573854739677641:3681], for# user0@builtin, access# DescribeSchema 2025-04-07T14:12:11.642582Z node 6 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [6:7490573854739677641:3681], for# user0@builtin, access# DescribeSchema 2025-04-07T14:12:11.644360Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490573854739677638:2480], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/test_acl]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:12:11.646072Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=OGU0N2QxMmQtYzVjNDZlNS1lNmYwODZmMy0zNzNlMDkxNA==, ActorId: [6:7490573854739677636:2479], ActorState: ExecuteState, TraceId: 01jr89gc7f3pjs6hd0epme1rma, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:12:11.663869Z node 6 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [6:7490573854739677664:3698], for# user0@builtin, access# DescribeSchema 2025-04-07T14:12:11.663901Z node 6 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [6:7490573854739677664:3698], for# user0@builtin, access# DescribeSchema 2025-04-07T14:12:11.665069Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490573854739677649:2484], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:30: Error: Cannot find table 'db.[/Root/test_acl]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:12:11.665617Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=OGJjMmZmNTItNjAzYjk3MGQtYTViMzBmN2UtMjM4NThjOGQ=, ActorId: [6:7490573854739677645:2483], ActorState: ExecuteState, TraceId: 01jr89gc855njnzzj8k227qtvc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:12:11.674798Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.821318Z node 6 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [6:7490573854739677695:3715], for# user0@builtin, access# SelectRow 2025-04-07T14:12:11.821439Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715674. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 16] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-07T14:12:11.821896Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NjQ3MzllOTQtMWQ5MWNjNjEtYWNlMTdlNzUtYWY3MWM1YTU=, ActorId: [6:7490573854739677683:2491], ActorState: ExecuteState, TraceId: 01jr89gc9bb444rpgbg34ga5k1, Create QueryResponse for error on request, msg: 2025-04-07T14:12:11.822782Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jr89gc9bb444rpgbg34ga5k1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=NjQ3MzllOTQtMWQ5MWNjNjEtYWNlMTdlNzUtYWY3MWM1YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:11.908758Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490573833264838527:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:11.908831Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:11.927541Z node 6 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [6:7490573854739677731:3739], for# user0@builtin, access# UpdateRow 2025-04-07T14:12:11.927740Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715676. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 16] Access: 2 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-07T14:12:11.928204Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=M2Y3NTgxMWMtYWE4M2YyNWUtZTcwNWQzZjctMTVmNmY0MjU=, ActorId: [6:7490573854739677717:2496], ActorState: ExecuteState, TraceId: 01jr89gcdn5gs0taqhx4rp9x2j, Create QueryResponse for error on request, msg: 2025-04-07T14:12:11.928986Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jr89gcdn5gs0taqhx4rp9x2j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=M2Y3NTgxMWMtYWE4M2YyNWUtZTcwNWQzZjctMTVmNmY0MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:11.943070Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-07T14:12:12.067252Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr89gchraz2gkvjynsb433g0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmYzYTAtODUyZWNlYjktNDlkYWExYzktMjc2OGJjNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:12.173433Z node 6 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [6:7490573859034645120:3799], for# user0@builtin, access# UpdateRow 2025-04-07T14:12:12.173559Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715680. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 16] Access: 2 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-07T14:12:12.174005Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NjY0OGVkZjgtNzIxMWFiMTYtZGNmMjI2N2UtNDViYjI1Yzk=, ActorId: [6:7490573859034645092:2514], ActorState: ExecuteState, TraceId: 01jr89gcnh5s099660sc57p1pb, Create QueryResponse for error on request, msg: 2025-04-07T14:12:12.174645Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr89gcnh5s099660sc57p1pb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=NjY0OGVkZjgtNzIxMWFiMTYtZGNmMjI2N2UtNDViYjI1Yzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:12.184720Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-07T14:12:12.209243Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jr89gcsda3ndpd9v1spd5gw2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=OWQxZjQxZWMtNGE0MTIxYTctMjY1ODgxMDctM2FjNzYwZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:12.321411Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr89gct97hqcccjnpxbkykyk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ZDE5NTQ3N2UtMTJmYzQxOTItODM1ZmEwYjEtOWZkYjhkZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpScheme::Int8Int16Olap [GOOD] >> SystemView::PartitionStatsOneSchemeShard >> SystemView::ConcurrentScans [GOOD] >> SystemView::Describe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] Test command err: 2025-04-07T14:09:27.315752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:27.316146Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:27.316310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ea/r3tmp/tmpGkuSRe/pdisk_1.dat 2025-04-07T14:09:27.736152Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22766, node 1 2025-04-07T14:09:28.028511Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:28.028554Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:28.028576Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:28.029168Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:28.030966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:28.134531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:28.134670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:28.162262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13580 2025-04-07T14:09:28.846514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:32.116451Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:32.153712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.153839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.193311Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:32.196886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:32.421285Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:32.482591Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:32.482693Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:32.513970Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:32.514130Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:32.514326Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:32.514397Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:32.514444Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:32.514499Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:32.514547Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:32.514597Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:32.514992Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:32.733726Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.733835Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.740199Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:32.757100Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:32.758150Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:32.759238Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:32.787914Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:32.787992Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:32.788062Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:32.790010Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.790095Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.813384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:32.830059Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:32.830213Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:32.842900Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:32.859737Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:32.901935Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:33.198490Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:33.361037Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:34.142742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.142875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.162163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:34.397611Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:34.397862Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:34.398156Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:34.398286Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:34.398411Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:34.398549Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:34.398676Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:34.398781Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:34.398913Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:34.399068Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:34.399216Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:34.399348Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:34.472626Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:34.472740Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:34.472977Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:34.473090Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:34.473194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:34.473319Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTx ... e::Execute 2025-04-07T14:12:12.461962Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:12.462621Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:12.462696Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:12.463972Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:12.486540Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:12.486685Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 2 2025-04-07T14:12:12.487203Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8180:6092], server id = [2:8185:6097], tablet id = 72075186224037903 2025-04-07T14:12:12.487246Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.487870Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8246:6136], server id = [2:8251:6141], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:12.488015Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8246:6136], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.488150Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8247:6137], server id = [2:8252:6142], tablet id = 72075186224037900, status = OK 2025-04-07T14:12:12.488210Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8247:6137], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.488430Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8248:6138], server id = [2:8253:6143], tablet id = 72075186224037901, status = OK 2025-04-07T14:12:12.488488Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8248:6138], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.489614Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:12.490401Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T14:12:12.490594Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-07T14:12:12.490887Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8246:6136], server id = [2:8251:6141], tablet id = 72075186224037899 2025-04-07T14:12:12.490916Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.491189Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8249:6139], server id = [2:8254:6144], tablet id = 72075186224037902, status = OK 2025-04-07T14:12:12.491261Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8249:6139], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.491380Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8250:6140], server id = [2:8255:6145], tablet id = 72075186224037903, status = OK 2025-04-07T14:12:12.491430Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8250:6140], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.491943Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8247:6137], server id = [2:8252:6142], tablet id = 72075186224037900 2025-04-07T14:12:12.491972Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.492453Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8248:6138], server id = [2:8253:6143], tablet id = 72075186224037901 2025-04-07T14:12:12.492479Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.492581Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8256:6146], server id = [2:8259:6149], tablet id = 72075186224037904, status = OK 2025-04-07T14:12:12.492649Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8256:6146], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.492763Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8257:6147], server id = [2:8260:6150], tablet id = 72075186224037905, status = OK 2025-04-07T14:12:12.492813Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8257:6147], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.492851Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-07T14:12:12.493512Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-07T14:12:12.494068Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8258:6148], server id = [2:8261:6151], tablet id = 72075186224037906, status = OK 2025-04-07T14:12:12.494130Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8258:6148], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.494753Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-07T14:12:12.494931Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8249:6139], server id = [2:8254:6144], tablet id = 72075186224037902 2025-04-07T14:12:12.494973Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.495118Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T14:12:12.495331Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8250:6140], server id = [2:8255:6145], tablet id = 72075186224037903 2025-04-07T14:12:12.495354Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.495453Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T14:12:12.495613Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8262:6152], server id = [2:8264:6154], tablet id = 72075186224037907, status = OK 2025-04-07T14:12:12.495690Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8262:6152], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.495868Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8256:6146], server id = [2:8259:6149], tablet id = 72075186224037904 2025-04-07T14:12:12.495891Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.496250Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8257:6147], server id = [2:8260:6150], tablet id = 72075186224037905 2025-04-07T14:12:12.496273Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.496581Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8263:6153], server id = [2:8265:6155], tablet id = 72075186224037908, status = OK 2025-04-07T14:12:12.496646Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8263:6153], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.497118Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8258:6148], server id = [2:8261:6151], tablet id = 72075186224037906 2025-04-07T14:12:12.497144Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.497234Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T14:12:12.497290Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T14:12:12.497324Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:12.497507Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:12.497689Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:12.497966Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:12.500507Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8262:6152], server id = [2:8264:6154], tablet id = 72075186224037907 2025-04-07T14:12:12.500541Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.500886Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8263:6153], server id = [2:8265:6155], tablet id = 72075186224037908 2025-04-07T14:12:12.500917Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.501176Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:12.522979Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8282:6172]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:12.523236Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:12.523302Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8282:6172], StatRequests.size() = 1 2025-04-07T14:12:12.676916Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjRhNTU5NTQtMzhkNzMxMmEtY2JiMjBjNzMtMTM0ZDU0NDk=, TxId: 2025-04-07T14:12:12.676998Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjRhNTU5NTQtMzhkNzMxMmEtY2JiMjBjNzMtMTM0ZDU0NDk=, TxId: 2025-04-07T14:12:12.677570Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:12.690434Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8292:6178] 2025-04-07T14:12:12.690683Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:8292:6178], schemeshard id = 72075186224037889 2025-04-07T14:12:12.690767Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8196:6108], server id = [2:8293:6179], tablet id = 72075186224037897, status = OK 2025-04-07T14:12:12.690856Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8293:6179] 2025-04-07T14:12:12.690924Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:8293:6179], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-07T14:12:12.704877Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:12.704958Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:12.874171Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8300:6184]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:12.874518Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:12.874571Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:12:12.878670Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:12.878744Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:12:12.878822Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:12:12.884806Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpScheme::TwoSimilarFamiliesTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] Test command err: 2025-04-07T14:09:29.444605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:29.444966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:29.445120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ca/r3tmp/tmpyxkv2E/pdisk_1.dat 2025-04-07T14:09:29.803345Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9175, node 1 2025-04-07T14:09:30.055810Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:30.055858Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:30.055886Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:30.056453Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:30.058479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:30.173308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:30.173436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:30.195778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2109 2025-04-07T14:09:30.877717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:34.117317Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:34.150843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:34.150951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:34.193264Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:34.196643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:34.411160Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:34.480471Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:34.480585Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:34.516100Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:34.516263Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:34.516408Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:34.516452Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:34.516491Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:34.516575Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:34.516616Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:34.516650Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:34.517009Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:34.706302Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:34.706408Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:34.717369Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1767:2559] 2025-04-07T14:09:34.734724Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1808:2579] 2025-04-07T14:09:34.740424Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1808:2579], schemeshard id = 72075186224037889 2025-04-07T14:09:34.741365Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:34.770546Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:34.770603Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:34.770670Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:34.777146Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:34.777224Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:34.783066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:34.795913Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:34.796055Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:34.811160Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:34.828909Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:34.882696Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:35.173121Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:35.354290Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:36.270650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2143:3020], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:36.270777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:36.287095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:36.493881Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2294:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:36.494125Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2294:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:36.494381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2294:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:36.494505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2294:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:36.494637Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2294:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:36.494759Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2294:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:36.494901Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2294:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:36.495020Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2294:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:36.495139Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2294:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:36.495239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2294:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:36.495357Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2294:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:36.495505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2294:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:36.539813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2847];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:36.539894Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2847];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:36.540100Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2847];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:36.540203Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2847];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:36.540355Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2847];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:36.540484Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2847];tablet_id=72075186224037900;process=TTxIn ... [72075186224037897] Subscribed for config changes 2025-04-07T14:12:12.625683Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:12.625744Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:12.625863Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:12:12.626865Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:12.626959Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:12.628578Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:12.674058Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:12.674204Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T14:12:12.675062Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8198:6120], server id = [2:8203:6125], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:12.675171Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8198:6120], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.675424Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8199:6121], server id = [2:8204:6126], tablet id = 72075186224037900, status = OK 2025-04-07T14:12:12.675485Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8199:6121], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.675562Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8200:6122], server id = [2:8206:6128], tablet id = 72075186224037901, status = OK 2025-04-07T14:12:12.675608Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8200:6122], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.676557Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8201:6123], server id = [2:8205:6127], tablet id = 72075186224037902, status = OK 2025-04-07T14:12:12.676621Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8201:6123], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.677368Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:12.677992Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8202:6124], server id = [2:8207:6129], tablet id = 72075186224037903, status = OK 2025-04-07T14:12:12.678051Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8202:6124], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.678143Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T14:12:12.678749Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-07T14:12:12.679014Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-07T14:12:12.679218Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8198:6120], server id = [2:8203:6125], tablet id = 72075186224037899 2025-04-07T14:12:12.679255Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.679553Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8199:6121], server id = [2:8204:6126], tablet id = 72075186224037900 2025-04-07T14:12:12.679578Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.679611Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-07T14:12:12.679998Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8200:6122], server id = [2:8206:6128], tablet id = 72075186224037901 2025-04-07T14:12:12.680021Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.680096Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8201:6123], server id = [2:8205:6127], tablet id = 72075186224037902 2025-04-07T14:12:12.680117Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.680219Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8208:6130], server id = [2:8211:6133], tablet id = 72075186224037904, status = OK 2025-04-07T14:12:12.680278Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8208:6130], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.680404Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8202:6124], server id = [2:8207:6129], tablet id = 72075186224037903 2025-04-07T14:12:12.680427Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.680801Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8209:6131], server id = [2:8214:6136], tablet id = 72075186224037905, status = OK 2025-04-07T14:12:12.680865Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8209:6131], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.680982Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8210:6132], server id = [2:8213:6135], tablet id = 72075186224037906, status = OK 2025-04-07T14:12:12.681045Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8210:6132], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.681442Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8212:6134], server id = [2:8216:6138], tablet id = 72075186224037907, status = OK 2025-04-07T14:12:12.681507Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8212:6134], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.681913Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8215:6137], server id = [2:8217:6139], tablet id = 72075186224037908, status = OK 2025-04-07T14:12:12.681967Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8215:6137], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.682053Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-07T14:12:12.682893Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T14:12:12.683064Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T14:12:12.683162Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T14:12:12.683379Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8208:6130], server id = [2:8211:6133], tablet id = 72075186224037904 2025-04-07T14:12:12.683402Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.683516Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T14:12:12.683556Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:12.683741Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:12.683909Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:12.684139Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:12.688194Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8209:6131], server id = [2:8214:6136], tablet id = 72075186224037905 2025-04-07T14:12:12.688223Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.688604Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8210:6132], server id = [2:8213:6135], tablet id = 72075186224037906 2025-04-07T14:12:12.688627Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.688698Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8212:6134], server id = [2:8216:6138], tablet id = 72075186224037907 2025-04-07T14:12:12.688717Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.688895Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:12.689412Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8215:6137], server id = [2:8217:6139], tablet id = 72075186224037908 2025-04-07T14:12:12.689436Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.715536Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8234:6156]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:12.715805Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:12.715865Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8234:6156], StatRequests.size() = 1 2025-04-07T14:12:12.860245Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmY5NGY4MmUtYzhjY2RhOGMtYjUxYTc4YTItZTUxZWM4MjU=, TxId: 2025-04-07T14:12:12.860329Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmY5NGY4MmUtYzhjY2RhOGMtYjUxYTc4YTItZTUxZWM4MjU=, TxId: 2025-04-07T14:12:12.860915Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:12.873927Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8244:6162] 2025-04-07T14:12:12.874226Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:8244:6162], schemeshard id = 72075186224037889 2025-04-07T14:12:12.874328Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8151:6093], server id = [2:8245:6163], tablet id = 72075186224037897, status = OK 2025-04-07T14:12:12.874420Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8245:6163] 2025-04-07T14:12:12.874519Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:8245:6163], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-07T14:12:12.889159Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:12.889241Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:13.007085Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8250:6168]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:13.007490Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:13.007570Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:12:13.010604Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:13.010667Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:12:13.010730Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:12:13.022734Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] Test command err: 2025-04-07T14:09:27.556382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:27.556833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:27.557010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e5/r3tmp/tmpZEbeKT/pdisk_1.dat 2025-04-07T14:09:27.929101Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5075, node 1 2025-04-07T14:09:28.187968Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:28.188015Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:28.188044Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:28.188445Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:28.193595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:28.291071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:28.291212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:28.310055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31807 2025-04-07T14:09:29.006874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:32.292001Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:32.341230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.341355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.391844Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:32.398096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:32.632856Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:32.702745Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:32.702839Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:32.732980Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:32.733162Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:32.733372Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:32.733439Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:32.733504Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:32.733613Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:32.733676Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:32.733744Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:32.734170Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:32.956367Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.956465Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.963829Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:32.978221Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:32.979194Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:32.980276Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:33.007778Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:33.007838Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:33.007936Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:33.009888Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:33.009981Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:33.024839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:33.039332Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:33.039549Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:33.052573Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:33.074930Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:33.115777Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:33.468315Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:33.640245Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:34.509417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.509555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.526530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:34.772403Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:34.772658Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:34.772999Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:34.773153Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:34.773334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:34.773472Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:34.773634Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:34.773803Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:34.773916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:34.774029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:34.774159Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:34.774293Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:34.875353Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:34.875438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:34.875661Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:34.875756Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:34.875887Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:34.875960Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxI ... ode 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:12:09.984575Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7996:5997], schemeshard count = 1 2025-04-07T14:12:11.806348Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:11.806407Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:12:11.806438Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:11.806481Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:11.810763Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:11.827454Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:11.828044Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:11.828149Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:11.829074Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 2 2025-04-07T14:12:11.829149Z node 2 :STATISTICS WARN: [72075186224037897] TTxResponseTabletDistribution::Execute. Some tablets are probably in Hive boot queue 2025-04-07T14:12:11.829203Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:12.871852Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:12.885511Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:12.885734Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:12:12.888885Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8158:6085], server id = [2:8163:6090], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:12.889452Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8158:6085], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.889629Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8159:6086], server id = [2:8164:6091], tablet id = 72075186224037900, status = OK 2025-04-07T14:12:12.889712Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8159:6086], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.891244Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8160:6087], server id = [2:8166:6093], tablet id = 72075186224037901, status = OK 2025-04-07T14:12:12.891322Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8160:6087], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.891546Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8161:6088], server id = [2:8165:6092], tablet id = 72075186224037902, status = OK 2025-04-07T14:12:12.891604Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8161:6088], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.891655Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:12.892771Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T14:12:12.893075Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8162:6089], server id = [2:8167:6094], tablet id = 72075186224037903, status = OK 2025-04-07T14:12:12.893151Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8162:6089], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.893252Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-07T14:12:12.893584Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8158:6085], server id = [2:8163:6090], tablet id = 72075186224037899 2025-04-07T14:12:12.893620Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.894196Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-07T14:12:12.894514Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8159:6086], server id = [2:8164:6091], tablet id = 72075186224037900 2025-04-07T14:12:12.894541Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.894729Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8160:6087], server id = [2:8166:6093], tablet id = 72075186224037901 2025-04-07T14:12:12.894751Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.894786Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-07T14:12:12.895081Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8168:6095], server id = [2:8171:6098], tablet id = 72075186224037904, status = OK 2025-04-07T14:12:12.895144Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8168:6095], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.895202Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8161:6088], server id = [2:8165:6092], tablet id = 72075186224037902 2025-04-07T14:12:12.895223Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.895345Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8169:6096], server id = [2:8172:6099], tablet id = 72075186224037905, status = OK 2025-04-07T14:12:12.895412Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8169:6096], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.895955Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8170:6097], server id = [2:8174:6101], tablet id = 72075186224037906, status = OK 2025-04-07T14:12:12.896024Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8170:6097], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.896519Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8162:6089], server id = [2:8167:6094], tablet id = 72075186224037903 2025-04-07T14:12:12.896548Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.896995Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8173:6100], server id = [2:8175:6102], tablet id = 72075186224037907, status = OK 2025-04-07T14:12:12.897062Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8173:6100], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.897158Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-07T14:12:12.897670Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T14:12:12.897803Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8176:6103], server id = [2:8177:6104], tablet id = 72075186224037908, status = OK 2025-04-07T14:12:12.897862Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8176:6103], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:12.898070Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T14:12:12.898612Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T14:12:12.898863Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8168:6095], server id = [2:8171:6098], tablet id = 72075186224037904 2025-04-07T14:12:12.898889Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.898961Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8169:6096], server id = [2:8172:6099], tablet id = 72075186224037905 2025-04-07T14:12:12.898979Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.899028Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T14:12:12.899064Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:12.899248Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8170:6097], server id = [2:8174:6101], tablet id = 72075186224037906 2025-04-07T14:12:12.899270Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.899346Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:12.899544Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:12.899859Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:12.901819Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8173:6100], server id = [2:8175:6102], tablet id = 72075186224037907 2025-04-07T14:12:12.901853Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.902220Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8176:6103], server id = [2:8177:6104], tablet id = 72075186224037908 2025-04-07T14:12:12.902245Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:12.902487Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:12.922449Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8194:6121]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:12.922671Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:12.922712Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8194:6121], StatRequests.size() = 1 2025-04-07T14:12:13.056986Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjZlMDJkYWQtMmQ3NTFjMWMtOWNkMTJmMTYtNGQyMDllYmM=, TxId: 2025-04-07T14:12:13.057054Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjZlMDJkYWQtMmQ3NTFjMWMtOWNkMTJmMTYtNGQyMDllYmM=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-07T14:12:13.057753Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8202:6127]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:13.057946Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:13.058407Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:13.058457Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:12:13.062716Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:13.062780Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:12:13.062846Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:12:13.068955Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpScheme::ModifyUnknownPermissions [GOOD] >> KqpScheme::ModifyPermissionsByRelativePathQueryClient >> KqpOlapScheme::NullKeySchema [GOOD] >> KqpOlapScheme::InsertAddInsertDrop >> KqpScheme::CreateTableWithUniqConstraint [GOOD] >> KqpScheme::CreateTableWithUniqConstraintPublicApi >> KqpScheme::CreateAndAlterTableWithPartitionSizeUncompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadUncompat >> SystemView::VSlotsFields [GOOD] >> SystemView::TopPartitionsTables >> KqpScheme::AlterTableAlterIndex [GOOD] >> SystemView::PgTablesOneSchemeShardDataQuery >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::Int8Int16Olap [GOOD] Test command err: Trying to start YDB, gRPC: 6873, MsgBus: 19766 2025-04-07T14:11:50.995572Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573761953077673:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:50.995658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00143d/r3tmp/tmpJrBAFw/pdisk_1.dat 2025-04-07T14:11:51.257907Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6873, node 1 2025-04-07T14:11:51.332887Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:51.332919Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:51.332925Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:51.333010Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:51.350564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:51.350770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:51.352383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19766 TClient is connected to server localhost:19766 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:51.673261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.691733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.799042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.903192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.971311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.124993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573774837981293:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.125098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.320688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.346152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.369849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.393241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.418219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.457637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.488973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573774837981787:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.489063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.489126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573774837981792:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.492357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:53.500139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573774837981794:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:54.250097Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-04-07T14:11:54.254038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21100, MsgBus: 62185 2025-04-07T14:11:54.772018Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573781475269561:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:54.772115Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00143d/r3tmp/tmpCXGX77/pdisk_1.dat 2025-04-07T14:11:54.840287Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21100, node 2 2025-04-07T14:11:54.886645Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:54.886659Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:54.886663Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:54.886732Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:54.891460Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:54.891537Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:54.893556Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62185 TClient is connected to server localhost:62185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:55.150019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.158456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.192524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.276062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.316036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.872440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573790065205856:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:56.872486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOU ... found or you don't have access permissions } 2025-04-07T14:12:11.969623Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:11.993707Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:12.026390Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:12.059633Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:12.094106Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:12.132490Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:12.166510Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:12.213020Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573855743125620:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.213091Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573855743125625:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.213101Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.216481Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:12.226021Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573855743125627:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:13.294358Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573838563254238:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:13.294417Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:13.362994Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:13.484367Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7490573860038093309:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:12:13.484648Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7490573860038093309:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:12:13.485077Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7490573860038093309:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:12:13.485252Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7490573860038093309:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:12:13.485402Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7490573860038093309:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:12:13.485571Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7490573860038093309:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:12:13.485743Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7490573860038093309:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:12:13.485904Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7490573860038093309:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:12:13.486046Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7490573860038093309:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:12:13.486191Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7490573860038093309:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:12:13.486330Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7490573860038093309:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:12:13.486465Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7490573860038093309:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:12:13.490580Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:12:13.490663Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:12:13.490776Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:12:13.490817Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:12:13.491029Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:12:13.491068Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:12:13.491170Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:12:13.491213Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:12:13.491293Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:12:13.491327Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:12:13.491396Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:12:13.491435Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:12:13.492002Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:12:13.492048Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:12:13.492299Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:12:13.492335Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:12:13.492492Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:12:13.492525Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:12:13.492996Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:12:13.493042Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:12:13.493161Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:12:13.493198Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=368;columns=4; 2025-04-07T14:12:14.012985Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035133568, txId: 18446744073709551615] shutting down >> KqpScheme::CreateAsyncReplication [GOOD] >> KqpScheme::CreateAsyncReplicationWithSecret >> KqpScheme::DropAsyncReplicationCascade [GOOD] >> KqpScheme::ModifyPermissionsByIncorrectPaths [GOOD] >> SystemView::TabletsFields >> LabeledDbCounters::TwoTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::TwoSimilarFamiliesTest [GOOD] Test command err: Trying to start YDB, gRPC: 6276, MsgBus: 5054 2025-04-07T14:11:51.504133Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573768560643988:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:51.504204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001434/r3tmp/tmpE6by5q/pdisk_1.dat 2025-04-07T14:11:51.761012Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6276, node 1 2025-04-07T14:11:51.810577Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:51.810607Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:51.810616Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:51.810711Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:51.868395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:51.868553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:51.870173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5054 TClient is connected to server localhost:5054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:52.171795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.190083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.278808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.409807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.466370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.721142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573777150580290:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.721211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.893081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.912758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.931002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.950882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.970817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.034616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.105911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573781445548089:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.105964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573781445548094:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.105968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.108312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:54.115455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573781445548096:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:54.913344Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7490573781445548413:3621], for# test_user@builtin, access# DescribeSchema 2025-04-07T14:11:54.913370Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7490573781445548413:3621], for# test_user@builtin, access# DescribeSchema 2025-04-07T14:11:54.921348Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490573781445548410:2465], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/KeyValue]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:11:54.921586Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTgzOTI2NGEtZGQ1NDY1Ny1lMzQyMTk1MS04N2ZiY2Q5Nw==, ActorId: [1:7490573781445548401:2460], ActorState: ExecuteState, TraceId: 01jr89fvwt681spd95xawb4gda, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/KeyValue]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:11:54.936097Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490573781445548418:2468], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/NonExistent]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:11:54.936302Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTgzOTI2NGEtZGQ1NDY1Ny1lMzQyMTk1MS04N2ZiY2Q5Nw==, ActorId: [1:7490573781445548401:2460], ActorState: ExecuteState, TraceId: 01jr89fvxe274hyckpxztzm52b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/NonExistent]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Trying to start YDB, gRPC: 32609, MsgBus: 64802 2025-04-07T14:11:55.495814Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573785390015868:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:55.495911Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001434/r3tmp/tmpDtHpiq/pdisk_1.dat 2025-04-07T14:11:55.565385Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32609, node 2 2025-04-07T14:11:55.597169Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:55.597231Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:55.598896Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:55.614950Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:55.614974Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:55.614980Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:55.615043Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64802 TClient is connected to server localhost:64802 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" ... 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:05.276718Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:05.283918Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:05.325559Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:05.476267Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:05.535079Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:07.820268Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573834186899377:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:07.820340Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:07.837699Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:07.860758Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:07.886893Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:07.913284Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:07.954218Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:07.981115Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:08.111625Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573838481867170:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:08.111707Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:08.111762Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573838481867175:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:08.114961Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:08.123305Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573838481867177:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 63363, MsgBus: 11372 2025-04-07T14:12:09.728956Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573844634849806:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:09.729021Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001434/r3tmp/tmpG0o9FZ/pdisk_1.dat 2025-04-07T14:12:09.815302Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:09.865069Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:09.865169Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:09.866841Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63363, node 5 2025-04-07T14:12:09.909919Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:09.909946Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:09.909957Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:09.910062Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11372 TClient is connected to server localhost:11372 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:12:10.364957Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:10.380969Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:10.427475Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:12:10.566790Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:10.641616Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:13.124780Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573861814720697:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:13.124892Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:13.167617Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:13.199968Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:13.227245Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:13.259505Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:13.290917Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:13.369147Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:13.420638Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573861814721194:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:13.420768Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:13.420880Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573861814721199:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:13.425395Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:13.435145Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573861814721201:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpOlapScheme::TenThousandColumns [FAIL] >> KqpOlapScheme::SetColumnFamily >> KqpScheme::DropResourcePoolClassifier [GOOD] >> KqpScheme::DropNonExistingResourcePoolClassifier >> SystemView::QueryStatsRetries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterTableAlterIndex [GOOD] Test command err: Trying to start YDB, gRPC: 63744, MsgBus: 63925 2025-04-07T14:11:44.173009Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573737633276070:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:44.173295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001448/r3tmp/tmpQjp8ZD/pdisk_1.dat 2025-04-07T14:11:44.419547Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63744, node 1 2025-04-07T14:11:44.485834Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:44.485859Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:44.485864Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:44.485943Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:44.533355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:44.533453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:44.535263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63925 TClient is connected to server localhost:63925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:44.836354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.848613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:44.926760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:45.025528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:45.068703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:46.747192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573746223212372:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.747321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:46.960327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:46.982926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.000730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.019469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.041989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.063496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.091041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573750518180160:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:47.091119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:47.091141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573750518180165:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:47.094247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:47.101975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573750518180167:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:47.845482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.870757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:11:47.895923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.022952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T14:11:49.057212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.118612Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-04-07T14:11:49.144534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710762:2, at schemeshard: 72057594046644480 2025-04-07T14:11:49.173318Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573737633276070:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:49.173766Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:11:49.178988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4276, MsgBus: 24142 2025-04-07T14:11:49.753066Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573760013188623:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:49.753157Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001448/r3tmp/tmpkkm7x6/pdisk_1.dat 2025-04-07T14:11:49.830870Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4276, node 2 2025-04-07T14:11:49.870492Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:49.870568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:49.872152Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:49.884283Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:49.884303Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:49.884307Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:49.884384Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24142 TClient is connected to server localhost:24142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Deprica ... 5-04-07T14:12:04.745783Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573821425093013:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:05.735142Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.806393Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.851042Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.487059Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490573808540188907:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:06.487133Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:07.337365Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T14:12:07.412441Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-07T14:12:07.443352Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037926 not found 2025-04-07T14:12:07.443391Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037927 not found 2025-04-07T14:12:07.492398Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037925 not found 2025-04-07T14:12:07.492432Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037928 not found 2025-04-07T14:12:07.546295Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710762:2, at schemeshard: 72057594046644480 2025-04-07T14:12:07.601297Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-04-07T14:12:07.633805Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037929 not found 2025-04-07T14:12:07.633835Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037932 not found Trying to start YDB, gRPC: 24073, MsgBus: 27421 2025-04-07T14:12:08.234961Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573839183831502:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:08.235021Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001448/r3tmp/tmpigvwJT/pdisk_1.dat 2025-04-07T14:12:08.296638Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24073, node 5 2025-04-07T14:12:08.336299Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:08.336387Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:08.337998Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:08.354682Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:08.354709Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:08.354718Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:08.354842Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27421 TClient is connected to server localhost:27421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:12:08.725074Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:08.742954Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:08.817480Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:08.943285Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:09.028230Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:11.536620Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573852068735111:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:11.536909Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:11.557688Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.586682Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.616808Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.684564Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.713493Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.748162Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.786622Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573852068735610:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:11.786694Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:11.786707Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573852068735615:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:11.789752Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:11.798459Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573852068735617:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:12.931046Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:13.006747Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:12:13.093206Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:12:13.235533Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573839183831502:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:13.235617Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:15.124118Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2025-04-07T14:12:15.174697Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715678:1, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] Test command err: 2025-04-07T14:09:27.983518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:27.983865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:27.984015Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013de/r3tmp/tmp4IKtR4/pdisk_1.dat 2025-04-07T14:09:28.326266Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11295, node 1 2025-04-07T14:09:28.555544Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:28.555600Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:28.555632Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:28.556244Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:28.558594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:28.655277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:28.655412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:28.673131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21969 2025-04-07T14:09:29.392036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:32.754859Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:32.789534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.789642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.833700Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:32.837495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:33.070192Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:33.129990Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:33.130106Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:33.163164Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:33.163327Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:33.163516Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:33.163578Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:33.163634Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:33.163680Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:33.163729Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:33.163783Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:33.164263Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:33.372453Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:33.372587Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:33.379275Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:33.386550Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:33.387110Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:33.388022Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2025-04-07T14:09:33.407160Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:33.407215Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:33.407276Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-07T14:09:33.408969Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:33.409055Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:33.423922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:33.438780Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:33.438925Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:33.454044Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:33.470440Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:33.510402Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:33.828755Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:33.988957Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:34.679069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:09:35.298771Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:35.474726Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-07T14:09:35.474796Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:09:35.474888Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2493:2909], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:09:35.476923Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2494:2910] 2025-04-07T14:09:35.477045Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2494:2910], schemeshard id = 72075186224037899 2025-04-07T14:09:36.453764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2621:3190], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:36.453986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:36.470471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-07T14:09:36.707908Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3044];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:36.708137Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3044];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:36.708421Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3044];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:36.708554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3044];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:36.708700Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3044];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:36.708813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3044];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:36.708920Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3044];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:36.709018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3044];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:36.709157Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3044];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:36.709277Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3044];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:36.709410Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3044];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:36.709506Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3044];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:36.756028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[2:2778:3047];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04 ... 2025-04-07T14:12:12.299896Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-07T14:12:12.299967Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 236.000000s, at schemeshard: 72075186224037899 2025-04-07T14:12:12.300190Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-04-07T14:12:12.314004Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:12:14.678351Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:14.678412Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:12:14.678453Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-07T14:12:14.678503Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T14:12:14.682907Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:14.700383Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:14.701093Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:14.701234Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:14.702354Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:14.716153Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:14.716383Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:12:14.717381Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9256:6867], server id = [2:9261:6872], tablet id = 72075186224037905, status = OK 2025-04-07T14:12:14.717881Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9256:6867], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:12:14.719094Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9257:6868], server id = [2:9262:6873], tablet id = 72075186224037906, status = OK 2025-04-07T14:12:14.719183Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9257:6868], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:12:14.719413Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T14:12:14.720313Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9258:6869], server id = [2:9263:6874], tablet id = 72075186224037907, status = OK 2025-04-07T14:12:14.720388Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9258:6869], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:12:14.721019Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9256:6867], server id = [2:9261:6872], tablet id = 72075186224037905 2025-04-07T14:12:14.721053Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:14.721240Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9259:6870], server id = [2:9264:6875], tablet id = 72075186224037908, status = OK 2025-04-07T14:12:14.721302Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9259:6870], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:12:14.721847Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T14:12:14.722108Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T14:12:14.722252Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9260:6871], server id = [2:9266:6877], tablet id = 72075186224037909, status = OK 2025-04-07T14:12:14.722317Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9260:6871], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:12:14.722478Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9265:6876], server id = [2:9267:6878], tablet id = 72075186224037910, status = OK 2025-04-07T14:12:14.722528Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9265:6876], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:12:14.723458Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T14:12:14.723845Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9257:6868], server id = [2:9262:6873], tablet id = 72075186224037906 2025-04-07T14:12:14.723872Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:14.723978Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9258:6869], server id = [2:9263:6874], tablet id = 72075186224037907 2025-04-07T14:12:14.723997Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:14.724153Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2025-04-07T14:12:14.724400Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2025-04-07T14:12:14.724658Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9259:6870], server id = [2:9264:6875], tablet id = 72075186224037908 2025-04-07T14:12:14.724710Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:14.724882Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9268:6879], server id = [2:9270:6881], tablet id = 72075186224037911, status = OK 2025-04-07T14:12:14.724948Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9268:6879], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:12:14.725112Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9269:6880], server id = [2:9272:6883], tablet id = 72075186224037912, status = OK 2025-04-07T14:12:14.725166Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9269:6880], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:12:14.725919Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9265:6876], server id = [2:9267:6878], tablet id = 72075186224037910 2025-04-07T14:12:14.725948Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:14.726014Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9260:6871], server id = [2:9266:6877], tablet id = 72075186224037909 2025-04-07T14:12:14.726035Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:14.726095Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9271:6882], server id = [2:9273:6884], tablet id = 72075186224037913, status = OK 2025-04-07T14:12:14.726153Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9271:6882], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:12:14.726243Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9274:6885], server id = [2:9275:6886], tablet id = 72075186224037914, status = OK 2025-04-07T14:12:14.726295Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9274:6885], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:12:14.726563Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-04-07T14:12:14.727072Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-04-07T14:12:14.727181Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9268:6879], server id = [2:9270:6881], tablet id = 72075186224037911 2025-04-07T14:12:14.727203Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:14.727343Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2025-04-07T14:12:14.727535Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2025-04-07T14:12:14.727580Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:14.727811Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9269:6880], server id = [2:9272:6883], tablet id = 72075186224037912 2025-04-07T14:12:14.727837Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:14.727954Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:14.728120Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:14.728269Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9271:6882], server id = [2:9273:6884], tablet id = 72075186224037913 2025-04-07T14:12:14.728285Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:14.728414Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-07T14:12:14.730120Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9274:6885], server id = [2:9275:6886], tablet id = 72075186224037914 2025-04-07T14:12:14.730150Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:14.730568Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:14.750843Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:9292:6903]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:14.751073Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:14.751109Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:9292:6903], StatRequests.size() = 1 2025-04-07T14:12:14.863768Z node 2 :SYSTEM_VIEWS WARN: [72075186224037896] TEvIntervalQuerySummary, wrong stage: node id# 2 2025-04-07T14:12:14.863965Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2M2OTRhMDAtMzM0Zjk3OWUtN2JmYmNjZWItYmUwZDNmMDE=, TxId: 2025-04-07T14:12:14.864004Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2M2OTRhMDAtMzM0Zjk3OWUtN2JmYmNjZWItYmUwZDNmMDE=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-07T14:12:14.864565Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:9300:6909]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:14.864910Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:14.865444Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:14.865496Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:12:14.868487Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:14.868549Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-07T14:12:14.868595Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:12:14.874305Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpScheme::DisableCreateExternalTable [GOOD] >> KqpScheme::DisableDropExternalTable >> AnalyzeDatashard::AnalyzeOneTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::ModifyPermissionsByIncorrectPaths [GOOD] Test command err: Trying to start YDB, gRPC: 29693, MsgBus: 25625 2025-04-07T14:11:51.494704Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573765555934777:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:51.494760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001432/r3tmp/tmpLXv1cI/pdisk_1.dat 2025-04-07T14:11:51.768047Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29693, node 1 2025-04-07T14:11:51.771586Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:11:51.772858Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:11:51.801575Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:51.801603Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:51.801611Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:51.801681Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:51.850710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:51.850861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:51.852399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25625 TClient is connected to server localhost:25625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:52.217824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.244326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.367982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.473921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.548989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.787695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573774145871098:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.787819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.018770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.038489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.056608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.077988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.100287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.167816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.201093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573778440838891:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.201168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.201197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573778440838896:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.203378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:54.208776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573778440838898:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:55.163093Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-04-07T14:11:55.167011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24772, MsgBus: 61355 2025-04-07T14:11:55.972939Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573782559178724:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:55.973036Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001432/r3tmp/tmpJtEp5t/pdisk_1.dat 2025-04-07T14:11:56.090796Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:56.114825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:56.114906Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:56.116331Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24772, node 2 2025-04-07T14:11:56.146275Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:56.146298Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:56.146303Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:56.146405Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61355 TClient is connected to server localhost:61355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:56.442334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.449867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.493209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.617056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.680035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:58.494787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573795444082315:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have ... SchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:06.569519Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:06.632524Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:08.858267Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573839954966518:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:08.858368Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:08.898725Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:08.926508Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:08.952862Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:08.985514Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:09.020530Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:09.053425Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:09.164364Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573844249934311:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:09.164445Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:09.164472Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573844249934316:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:09.167487Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:09.176186Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573844249934318:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:10.294926Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-04-07T14:12:10.337893Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:12:10.358295Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11831, MsgBus: 21709 2025-04-07T14:12:10.993914Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573849299922760:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:10.993982Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001432/r3tmp/tmp1ZnYQF/pdisk_1.dat 2025-04-07T14:12:11.093226Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:11.117865Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:11.117952Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:11.119861Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11831, node 5 2025-04-07T14:12:11.170035Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:11.170055Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:11.170063Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:11.170179Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21709 TClient is connected to server localhost:21709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:12:11.679609Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.699436Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:11.775497Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:11.927438Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:11.991721Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:14.243677Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573866479793653:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:14.243761Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:14.308754Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.338756Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.408435Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.446815Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.480597Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.512313Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.559382Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573866479794146:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:14.559501Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:14.559672Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573866479794151:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:14.563394Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:14.572709Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573866479794153:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:15.661836Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropAsyncReplicationCascade [GOOD] Test command err: Trying to start YDB, gRPC: 28750, MsgBus: 30020 2025-04-07T14:11:50.414617Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573761363908528:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:50.414672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001442/r3tmp/tmpn0zpJu/pdisk_1.dat 2025-04-07T14:11:50.660116Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28750, node 1 2025-04-07T14:11:50.700869Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:50.700895Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:50.700903Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:50.701012Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:50.741379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:50.741515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:50.743144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30020 TClient is connected to server localhost:30020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:51.084830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.101368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.200840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.309910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.380153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.773647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573769953844839:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:52.773744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:52.974545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:52.995796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.015155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.038281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.059985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.081500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.111872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573774248812627:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.111933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.111967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573774248812632:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.114914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:53.122043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573774248812634:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:53.914905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22294, MsgBus: 11039 2025-04-07T14:11:54.505661Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573778572226572:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:54.505710Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001442/r3tmp/tmpul21eu/pdisk_1.dat 2025-04-07T14:11:54.567754Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22294, node 2 2025-04-07T14:11:54.606386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:54.606459Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:54.608058Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:54.621971Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:54.621988Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:54.621993Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:54.622079Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11039 TClient is connected to server localhost:11039 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:54.939972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:54.968229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.005437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.118869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.189857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.957660Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573787162162863:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:56.957749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:56.985126Z node 2 :FLAT_TX_SCHEMESH ... e, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T14:12:07.852889Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T14:12:08.334295Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490573817128134486:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:08.334381Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:08.698434Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropReplication, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-07T14:12:08.706619Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715679:2, at schemeshard: 72057594046644480 2025-04-07T14:12:08.712942Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,35) wasn't found 2025-04-07T14:12:08.713002Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,34) wasn't found 2025-04-07T14:12:08.713045Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037922 not found 2025-04-07T14:12:08.713062Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037921 not found 2025-04-07T14:12:08.720433Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037920 not found Trying to start YDB, gRPC: 61065, MsgBus: 63421 2025-04-07T14:12:09.592058Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573843409979226:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:09.592137Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001442/r3tmp/tmpgGJx5i/pdisk_1.dat 2025-04-07T14:12:09.657008Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61065, node 5 2025-04-07T14:12:09.692528Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:09.692617Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:09.694191Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:09.705337Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:09.705359Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:09.705365Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:09.705453Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63421 TClient is connected to server localhost:63421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:12:10.133758Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:10.139328Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:10.159000Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:10.217348Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:10.371527Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:10.437425Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:12.840696Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573856294882811:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.840796Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.880119Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:12.910309Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:12.939437Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:12.994728Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:13.032654Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:13.076735Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:13.158202Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573860589850607:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:13.158288Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:13.158751Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573860589850612:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:13.162958Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:13.173841Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573860589850614:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:14.185813Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.276478Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.424011Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.451505Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.595574Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573843409979226:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:14.595678Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:15.341065Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropReplicationCascade, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.351282Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715678:2, at schemeshard: 72057594046644480 2025-04-07T14:12:15.358614Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037921 not found 2025-04-07T14:12:15.358655Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037922 not found 2025-04-07T14:12:15.359857Z node 5 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,35) wasn't found 2025-04-07T14:12:15.359976Z node 5 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,34) wasn't found 2025-04-07T14:12:15.382458Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037920 not found 2025-04-07T14:12:15.384882Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037923 not found >> KqpScheme::AddColumnFamilyWithCompressionLevel [GOOD] >> KqpScheme::AddChangefeed >> KqpScan::AggregateByColumn [GOOD] >> KqpScan::AggregateCountStar >> KqpOlapScheme::DropThenAddColumnIndexation [GOOD] >> KqpOlapScheme::DropThenAddColumnCompaction >> KqpScheme::AsyncReplicationConnectionStringWithSsl [GOOD] >> KqpScheme::AsyncReplicationEndpointAndDatabase >> KqpScheme::CreateExternalDataSourceValidationAuthMethod [GOOD] >> KqpScheme::CreateExternalDataSourceValidationLocation >> SystemView::PDisksFields [GOOD] >> SystemView::GroupsFields ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable [GOOD] Test command err: 2025-04-07T14:09:26.417454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:26.417744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:26.417884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013fc/r3tmp/tmp8hWKWs/pdisk_1.dat 2025-04-07T14:09:26.801325Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14098, node 1 2025-04-07T14:09:27.316523Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:27.316576Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:27.316606Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:27.317409Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:27.325802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:27.440377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:27.440529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:27.462233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9479 2025-04-07T14:09:28.165263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:31.548742Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:31.592960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:31.593075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:31.640046Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:31.648236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:31.883124Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:31.937202Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:31.937325Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:31.966683Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:31.966831Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:31.967027Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:31.967097Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:31.967151Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:31.967201Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:31.967249Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:31.967319Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:31.967736Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:32.186011Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.186116Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.193131Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:32.200524Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:32.201349Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:32.202452Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:32.224438Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:32.224504Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:32.224578Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:32.226411Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.226504Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.241703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:32.254910Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:32.255076Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:32.267985Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:32.284415Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:32.326118Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:32.598239Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:32.765656Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:33.731101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:33.731219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:33.808042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:34.272985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2426:3064], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.273138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.274436Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2431:3068]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:34.274606Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:09:34.274680Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2433:3070] 2025-04-07T14:09:34.274742Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2433:3070] 2025-04-07T14:09:34.275258Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2434:2945] 2025-04-07T14:09:34.275667Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2433:3070], server id = [2:2434:2945], tablet id = 72075186224037897, status = OK 2025-04-07T14:09:34.275874Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2434:2945], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:09:34.275943Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:09:34.276168Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:09:34.276239Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2431:3068], StatRequests.size() = 1 2025-04-07T14:09:34.318640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2438:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.318749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.319144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2443:3079], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.325450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:09:34.490934Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:09:34.491011Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:09:34.591161Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2433:3070], schemeshard count = 1 2025-04-07T14:09:34.896869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2445:3081], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:09:35.078252Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2586:3168]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:09:35.078388Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:09:35.078423Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2586:3168], StatRequests.size() = 1 2025-04-07T14:09:35.283511Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89bjg1f9r844p4wah9972w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWFlYzQ4YmYtNzhjYjk2OS1hMDJmZjE4ZC1kMWQwOTM5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:09:35.358596Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServ ... 621Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:11:24.515696Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:11:24.515898Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:11:28.552987Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:11:31.436859Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:11:31.437230Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:11:35.420067Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:11:38.160521Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:11:38.160832Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:11:42.099879Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:11:44.909906Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:11:44.910165Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:11:48.838543Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:11:51.542145Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:11:51.542416Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:11:55.700776Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:11:58.459580Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:11:58.459921Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:12:02.580637Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:12:05.325779Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:12:05.326050Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:12:09.336958Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:12:11.023047Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-07T14:12:11.023139Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:12:11.023183Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:12:11.023227Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-07T14:12:12.398190Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:12:12.398546Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:12:12.469399Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2025-04-07T14:12:12.469458Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 206.000000s, at schemeshard: 72075186224037889 2025-04-07T14:12:12.469669Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 49 2025-04-07T14:12:12.498223Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:12:13.912121Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:13.912223Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:13.912265Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T14:12:13.912341Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2025-04-07T14:12:13.912395Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:13.912806Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:13.945629Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:12:13.949457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6466:4583], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:13.949576Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6475:4588], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:13.949666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:13.961278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2025-04-07T14:12:14.019612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6480:4591], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-07T14:12:14.324700Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6599:4655]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:14.324924Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:12:14.325008Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:6601:4657] 2025-04-07T14:12:14.325072Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:6601:4657] 2025-04-07T14:12:14.325354Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:6602:4658] 2025-04-07T14:12:14.325470Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:6602:4658], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:12:14.325523Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-07T14:12:14.325663Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:6601:4657], server id = [2:6602:4658], tablet id = 72075186224037897, status = OK 2025-04-07T14:12:14.325714Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:12:14.325776Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:6599:4655], StatRequests.size() = 1 2025-04-07T14:12:14.447453Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTc4NThhYzgtZTI1ZjUzNjktZTM2ODFiZDItNDQwMDhlMmY=, TxId: 2025-04-07T14:12:14.447533Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTc4NThhYzgtZTI1ZjUzNjktZTM2ODFiZDItNDQwMDhlMmY=, TxId: 2025-04-07T14:12:14.447955Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:14.462031Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:14.462106Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:14.549646Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:12:14.549728Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:12:14.614313Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:6601:4657], schemeshard count = 1 2025-04-07T14:12:15.416114Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:15.416213Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2025-04-07T14:12:15.416260Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:16.444087Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:12:16.476263Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:16.476397Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2025-04-07T14:12:16.476434Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:16.476928Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:16.479548Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:12:16.502035Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzY4ZTA3NjctYzE1MDM5OWItZmY5NDBiYTAtMTQ1OTE5NGQ=, TxId: 2025-04-07T14:12:16.502093Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzY4ZTA3NjctYzE1MDM5OWItZmY5NDBiYTAtMTQ1OTE5NGQ=, TxId: 2025-04-07T14:12:16.502414Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:16.521167Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:16.521231Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2631:3182] 2025-04-07T14:12:16.521804Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6725:4729]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:16.526510Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:16.526583Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:12:16.531643Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:16.531708Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:12:16.531761Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:12:16.534073Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-07T14:12:16.534315Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> KqpScheme::ChangefeedTopicPartitions [GOOD] >> KqpScheme::CreateAlterDropTableStore >> KqpScheme::DoubleCreateExternalTable [GOOD] >> KqpScheme::DoubleCreateResourcePool >> SystemView::Describe [GOOD] >> SystemView::DescribeAccessDenied >> SystemView::TabletsFields [GOOD] >> SystemView::TabletsFollowers >> KqpOlapScheme::AddColumn [GOOD] >> KqpOlapScheme::AddColumnErrors >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadUncompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadCompat >> KqpConstraints::IndexedTableAndNotNullColumn [GOOD] >> KqpConstraints::IndexAutoChooseAndNonReadyIndex >> KqpOlapScheme::SetColumnFamily [GOOD] >> KqpOlapScheme::UnknownColumnFamily >> SystemView::PgTablesOneSchemeShardDataQuery [GOOD] >> SystemView::QueryStats >> KqpScheme::CreateTableWithUniqConstraintPublicApi [GOOD] >> KqpScheme::CreateTableWithVectorIndex >> KqpScheme::ModifyPermissionsByRelativePathQueryClient [GOOD] >> KqpScheme::OlapSharding_KeyOnly >> SystemView::QueryStatsRetries [GOOD] >> SystemView::StoragePoolsFields >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] >> KqpScheme::DisableDropExternalTable [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] >> KqpScheme::CreateAlterDropTableStore [GOOD] >> KqpScheme::CreateAlterDropColumnTableInStore >> SystemView::TabletsFollowers [GOOD] >> SystemView::TabletsRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] Test command err: 2025-04-07T14:09:26.596994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:26.597405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:26.597570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013fb/r3tmp/tmpSbxxSw/pdisk_1.dat 2025-04-07T14:09:27.054463Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10022, node 1 2025-04-07T14:09:27.325024Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:27.325075Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:27.325108Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:27.325669Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:27.327793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:27.440843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:27.440991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:27.456955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25550 2025-04-07T14:09:28.202675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:31.611262Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:31.653996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:31.654116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:31.698574Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:31.707085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:31.938855Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:31.994043Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:31.994143Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:32.026173Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:32.026370Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:32.026596Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:32.026665Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:32.026722Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:32.026794Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:32.026862Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:32.026923Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:32.027354Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:32.251211Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.251326Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.260580Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:32.268219Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:32.268960Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:32.270016Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:32.291455Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:32.291527Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:32.291599Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:32.293704Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.293799Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.310659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:32.322842Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:32.323005Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:32.346295Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:32.365972Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:32.418819Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:32.699450Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:32.895916Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:33.730748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:33.730903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:33.809555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:33.924226Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:33.924516Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:33.924871Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:33.925042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:33.925166Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:33.925297Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:33.925424Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:33.925551Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:33.925699Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:33.925831Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:33.925980Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:33.926087Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:33.956624Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:33.956787Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:33.956938Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:33.956997Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:33.957215Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:33.957257Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:33.957353Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema ... e id = 2, client id = [2:7253:5390], server id = [2:7298:5414], tablet id = 72075186224037897, status = OK 2025-04-07T14:12:14.682443Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7298:5414], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-07T14:12:14.682585Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7299:5415] 2025-04-07T14:12:14.682668Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:7299:5415], schemeshard id = 72075186224037889 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to 2025-04-07T14:12:14.796589Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7303:5418] 2025-04-07T14:12:14.797244Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:2655:3163] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-07T14:12:14.797291Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2655:3163] 2025-04-07T14:12:14.797361Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Complete 2025-04-07T14:12:16.064907Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:16.064997Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:16.065040Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T14:12:16.065090Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:16.065140Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:16.065846Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:16.079043Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:16.079411Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:16.079505Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:16.080309Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:16.095673Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:16.095938Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:12:16.096466Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7349:5445], server id = [2:7350:5446], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:16.096600Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7349:5445], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:16.097570Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:16.097675Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:16.097916Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:16.098118Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:16.098390Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:16.104311Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7349:5445], server id = [2:7350:5446], tablet id = 72075186224037899 2025-04-07T14:12:16.104368Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:16.106058Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:16.129820Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7367:5463]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:16.130056Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:16.130102Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7367:5463], StatRequests.size() = 1 2025-04-07T14:12:16.249352Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODNiZmQ5ZmUtY2EzMDE5NzgtNWUzYmVhM2EtZTBiZDg1MTc=, TxId: 2025-04-07T14:12:16.249420Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODNiZmQ5ZmUtY2EzMDE5NzgtNWUzYmVhM2EtZTBiZDg1MTc=, TxId: 2025-04-07T14:12:16.249815Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:16.265526Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:16.265614Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:16.988931Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T14:12:16.989015Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:12:17.683686Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:17.683772Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:17.684305Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:17.707319Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:17.707737Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:17.707792Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2025-04-07T14:12:17.733069Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:19.108042Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:19.108120Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:19.108151Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T14:12:19.108389Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-07T14:12:19.110089Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2025-04-07T14:12:19.110224Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-07T14:12:19.126527Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2025-04-07T14:12:20.561372Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:20.561427Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:20.561471Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:12:21.899757Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:12:21.899939Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:12:21.911725Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:21.911846Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:21.911880Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:21.912388Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:21.927379Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:21.927732Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:21.927786Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:21.928084Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:21.943737Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:21.943888Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T14:12:21.944324Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7540:5561], server id = [2:7541:5562], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:21.944440Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7540:5561], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:21.945073Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:21.945171Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:21.945325Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:21.945494Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:21.945817Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:21.947298Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7540:5561], server id = [2:7541:5562], tablet id = 72075186224037899 2025-04-07T14:12:21.947323Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:21.947780Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:21.966214Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTE1MDdiZGMtOTg5ZjRmNS03ZmQyYTBlOS0yMWIyN2FlYQ==, TxId: 2025-04-07T14:12:21.966278Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTE1MDdiZGMtOTg5ZjRmNS03ZmQyYTBlOS0yMWIyN2FlYQ==, TxId: 2025-04-07T14:12:21.966698Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:21.985898Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:21.985970Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2655:3163] >> KqpScheme::AsyncReplicationEndpointAndDatabase [GOOD] >> SystemView::PartitionStatsOneSchemeShard [GOOD] >> SystemView::PartitionStatsOneSchemeShardDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] Test command err: 2025-04-07T14:09:31.961313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:31.961743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:31.961921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013bf/r3tmp/tmpnTwrVx/pdisk_1.dat 2025-04-07T14:09:32.376361Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13546, node 1 2025-04-07T14:09:32.606492Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:32.606553Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:32.606587Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:32.607239Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:32.616223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:32.715170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.715292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.735144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9413 2025-04-07T14:09:33.439957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:36.797867Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:36.836954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:36.837065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:36.897824Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:36.907354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:37.154347Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:37.209142Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:37.209233Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:37.234483Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:37.234649Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:37.234860Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:37.234931Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:37.234983Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:37.235030Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:37.235082Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:37.235132Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:37.235543Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:37.467986Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:37.468090Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:37.474698Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:37.489805Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:37.490977Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:37.492273Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:37.518361Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:37.518413Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:37.518462Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:37.519683Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:37.519757Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:37.530516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:37.540632Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:37.540802Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:37.555900Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:37.572685Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:37.616532Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:37.890156Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:38.064952Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:38.789319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:38.789433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:38.805572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:39.002959Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:39.003178Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:39.003456Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:39.003604Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:39.003710Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:39.003827Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:39.003926Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:39.004040Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:39.004159Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:39.004265Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:39.004359Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:39.004531Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:39.063527Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:39.063590Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:39.063741Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:39.063812Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:39.063882Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:39.063946Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxI ... [72075186224037897] Subscribed for config changes 2025-04-07T14:12:21.834802Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:21.834858Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:21.834952Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:12:21.835610Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:21.835704Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:21.838122Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:21.909025Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:21.909213Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:12:21.910192Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8213:6112], server id = [2:8218:6117], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:21.910605Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8213:6112], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:21.910865Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8214:6113], server id = [2:8219:6118], tablet id = 72075186224037900, status = OK 2025-04-07T14:12:21.910931Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8214:6113], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:21.910998Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8215:6114], server id = [2:8220:6119], tablet id = 72075186224037901, status = OK 2025-04-07T14:12:21.911046Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8215:6114], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:21.913107Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:21.913470Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8216:6115], server id = [2:8221:6120], tablet id = 72075186224037902, status = OK 2025-04-07T14:12:21.913548Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8216:6115], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:21.913655Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T14:12:21.914267Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-07T14:12:21.914412Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8217:6116], server id = [2:8222:6121], tablet id = 72075186224037903, status = OK 2025-04-07T14:12:21.914478Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8217:6116], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:21.915148Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8213:6112], server id = [2:8218:6117], tablet id = 72075186224037899 2025-04-07T14:12:21.915193Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:21.915242Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-07T14:12:21.915534Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8214:6113], server id = [2:8219:6118], tablet id = 72075186224037900 2025-04-07T14:12:21.915561Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:21.915737Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8215:6114], server id = [2:8220:6119], tablet id = 72075186224037901 2025-04-07T14:12:21.915759Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:21.915893Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8223:6122], server id = [2:8225:6124], tablet id = 72075186224037904, status = OK 2025-04-07T14:12:21.915959Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8223:6122], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:21.916439Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-07T14:12:21.916654Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8216:6115], server id = [2:8221:6120], tablet id = 72075186224037902 2025-04-07T14:12:21.916699Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:21.916806Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8224:6123], server id = [2:8227:6126], tablet id = 72075186224037905, status = OK 2025-04-07T14:12:21.916881Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8224:6123], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:21.917004Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8226:6125], server id = [2:8229:6128], tablet id = 72075186224037906, status = OK 2025-04-07T14:12:21.917057Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8226:6125], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:21.917868Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-07T14:12:21.918007Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8228:6127], server id = [2:8230:6129], tablet id = 72075186224037907, status = OK 2025-04-07T14:12:21.918066Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8228:6127], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:21.918203Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8217:6116], server id = [2:8222:6121], tablet id = 72075186224037903 2025-04-07T14:12:21.918225Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:21.918266Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T14:12:21.918759Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T14:12:21.918881Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8231:6130], server id = [2:8232:6131], tablet id = 72075186224037908, status = OK 2025-04-07T14:12:21.918938Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8231:6130], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:21.918984Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8223:6122], server id = [2:8225:6124], tablet id = 72075186224037904 2025-04-07T14:12:21.919004Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:21.919143Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T14:12:21.919579Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8224:6123], server id = [2:8227:6126], tablet id = 72075186224037905 2025-04-07T14:12:21.919604Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:21.919726Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8226:6125], server id = [2:8229:6128], tablet id = 72075186224037906 2025-04-07T14:12:21.919748Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:21.919803Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T14:12:21.919840Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:21.919993Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8228:6127], server id = [2:8230:6129], tablet id = 72075186224037907 2025-04-07T14:12:21.920016Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:21.920102Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:21.920295Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:21.920552Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:21.923778Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8231:6130], server id = [2:8232:6131], tablet id = 72075186224037908 2025-04-07T14:12:21.923817Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:21.924104Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:21.946000Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8249:6148]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:21.946227Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:21.946275Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8249:6148], StatRequests.size() = 1 2025-04-07T14:12:22.089049Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjU5ODRjMC1lNDU1NGE1ZC1iNDIyZjZlZS1kNDVhNzQyNg==, TxId: 2025-04-07T14:12:22.089134Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjU5ODRjMC1lNDU1NGE1ZC1iNDIyZjZlZS1kNDVhNzQyNg==, TxId: 2025-04-07T14:12:22.089756Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:22.102954Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8261:6156] 2025-04-07T14:12:22.103093Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8165:6084], server id = [2:8261:6156], tablet id = 72075186224037897, status = OK 2025-04-07T14:12:22.103188Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8262:6157] 2025-04-07T14:12:22.103320Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:8261:6156], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-07T14:12:22.103466Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:8262:6157], schemeshard id = 72075186224037889 2025-04-07T14:12:22.118925Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:22.119002Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:22.282185Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8267:6160]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:22.282554Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:22.282614Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:12:22.285588Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:22.285657Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:12:22.285715Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:12:22.291598Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] Test command err: 2025-04-07T14:09:27.995075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:27.995396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:27.995551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013dd/r3tmp/tmpjA9cET/pdisk_1.dat 2025-04-07T14:09:28.341980Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2058, node 1 2025-04-07T14:09:28.561647Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:28.561706Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:28.561737Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:28.562398Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:28.564814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:28.681257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:28.681411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:28.701841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4638 2025-04-07T14:09:29.407202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:33.005741Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:33.047991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:33.048109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:33.096805Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:33.102144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:33.323734Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:33.373846Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:33.373930Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:33.411826Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:33.412008Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:33.412229Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:33.412314Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:33.412373Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:33.412432Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:33.412479Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:33.412537Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:33.413017Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:33.649425Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:33.649544Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:33.662898Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:33.671166Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:33.672243Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:33.673624Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:33.711857Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:33.711929Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:33.712003Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:33.714120Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:33.714224Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:33.730999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:33.749152Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:33.749319Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:33.765536Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:33.785401Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:33.826912Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:34.196449Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:34.373443Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:35.239144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:35.239277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:35.256935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:35.376997Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:35.377236Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:35.377595Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:35.377766Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:35.377886Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:35.378065Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:35.378213Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:35.378338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:35.378458Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:35.378586Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:35.378715Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:35.378884Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:35.409070Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:35.409180Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:35.409327Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:35.409385Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:35.409545Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:35.409583Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:35.409687Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema:: ... olumn count# 0 2025-04-07T14:12:15.674714Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 2 2025-04-07T14:12:15.674811Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 1 2025-04-07T14:12:15.674903Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 1 2025-04-07T14:12:15.674965Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:12:15.675106Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:15.675910Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:12:15.676995Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:15.677068Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:15.677196Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:12:15.677780Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:15.677837Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:15.679274Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:15.737798Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:15.737975Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:12:15.738476Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7347:5448], server id = [2:7348:5449], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:15.738590Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7347:5448], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:15.739469Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:15.739562Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:15.739728Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:15.739896Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:15.740137Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:15.742554Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7347:5448], server id = [2:7348:5449], tablet id = 72075186224037899 2025-04-07T14:12:15.742615Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:15.743049Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:15.764220Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7365:5466]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:15.764426Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:15.764476Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7365:5466], StatRequests.size() = 1 2025-04-07T14:12:15.877662Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzkwNTk1ZWQtNWVmOTlmNzgtOWViNzg0OWQtZmVjOTA2ZDU=, TxId: 2025-04-07T14:12:15.877737Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzkwNTk1ZWQtNWVmOTlmNzgtOWViNzg0OWQtZmVjOTA2ZDU=, TxId: 2025-04-07T14:12:15.878350Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:15.890458Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7375:5472] 2025-04-07T14:12:15.890650Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:7375:5472], schemeshard id = 72075186224037889 2025-04-07T14:12:15.890723Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7300:5421], server id = [2:7376:5473], tablet id = 72075186224037897, status = OK 2025-04-07T14:12:15.890754Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7376:5473] 2025-04-07T14:12:15.890812Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7376:5473], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-07T14:12:15.917677Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:15.917742Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:16.079570Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7384:5478] 2025-04-07T14:12:16.080366Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:2655:3163] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-07T14:12:16.080429Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2655:3163] 2025-04-07T14:12:16.080497Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Complete 2025-04-07T14:12:16.669156Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T14:12:16.669236Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:12:17.487872Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:17.487974Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:17.488693Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:17.503862Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:17.504332Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:17.504391Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2025-04-07T14:12:17.533091Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:19.047780Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:19.047861Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:19.047912Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T14:12:19.048117Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-07T14:12:19.048542Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2025-04-07T14:12:19.048633Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-07T14:12:19.069058Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2025-04-07T14:12:20.573331Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:20.573408Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:20.573456Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:12:22.052951Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:22.053086Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:22.053130Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:22.053715Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:22.066626Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:22.066982Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:22.067041Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:22.067436Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:22.082990Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:22.083207Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T14:12:22.083750Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7543:5565], server id = [2:7544:5566], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:22.083871Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7543:5565], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:22.084581Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:22.084678Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:22.084841Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:22.085003Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:22.085268Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:22.089155Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7543:5565], server id = [2:7544:5566], tablet id = 72075186224037899 2025-04-07T14:12:22.089196Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:22.089488Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:22.111344Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGZjYjBhMTMtYWRlZWVjNDMtNzI4YjQ0Mi1kMDBkYjM4YQ==, TxId: 2025-04-07T14:12:22.111423Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGZjYjBhMTMtYWRlZWVjNDMtNzI4YjQ0Mi1kMDBkYjM4YQ==, TxId: 2025-04-07T14:12:22.111874Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:22.125297Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:22.125372Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2655:3163] >> KqpScheme::CreateExternalDataSourceValidationLocation [GOOD] >> KqpScan::AggregateCountStar [GOOD] >> SystemView::TopPartitionsFields [GOOD] >> SystemView::TopPartitionsFollowers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] Test command err: 2025-04-07T14:09:27.372470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:399:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:27.372775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:27.372902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013e8/r3tmp/tmpSAHWg7/pdisk_1.dat 2025-04-07T14:09:27.732202Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27786, node 1 2025-04-07T14:09:27.983974Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:27.984021Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:27.984049Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:27.984203Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:27.990971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:28.092323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:28.092440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:28.109467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62169 2025-04-07T14:09:28.728333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:32.105741Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:32.143807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.143940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.199149Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:32.203064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:32.424755Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:32.507109Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:32.507209Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:32.543648Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:32.545880Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:32.546112Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:32.546177Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:32.546243Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:32.546313Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:32.546364Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:32.546418Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:32.547113Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:32.767881Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.767980Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2550], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:32.773873Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1771:2559] 2025-04-07T14:09:32.780294Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1788:2569] 2025-04-07T14:09:32.780608Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1788:2569], schemeshard id = 72075186224037889 2025-04-07T14:09:32.797290Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:32.825855Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:32.825900Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:32.825948Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:32.830057Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.830123Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.841072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:32.854409Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:32.854541Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:32.867504Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:32.884093Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:32.913776Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:33.223552Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:33.368938Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:34.257027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2144:3022], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.257171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.277000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:34.395107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2227:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:34.395379Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2227:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:34.395705Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2227:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:34.395836Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2227:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:34.395984Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2227:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:34.396108Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2227:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:34.396218Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2227:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:34.396391Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2227:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:34.396524Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2227:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:34.396638Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2227:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:34.396779Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2227:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:34.396895Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2227:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:34.429710Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:34.429840Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:34.430020Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:34.430074Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:34.430328Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:34.430376Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:34.430490Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema ... olumn count# 0 2025-04-07T14:12:17.149665Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 2 2025-04-07T14:12:17.149758Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 1 2025-04-07T14:12:17.149835Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 1 2025-04-07T14:12:17.149894Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:12:17.150030Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:17.151196Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:17.151255Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:17.151700Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:12:17.151774Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:12:17.152597Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:17.152651Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:17.154485Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:17.211315Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:17.211522Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:12:17.212123Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7352:5447], server id = [2:7353:5448], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:17.212254Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7352:5447], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:17.213155Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:17.213247Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:17.213463Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:17.213662Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:17.213920Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:17.216253Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7352:5447], server id = [2:7353:5448], tablet id = 72075186224037899 2025-04-07T14:12:17.216301Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:17.219013Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:17.249481Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7370:5465]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:17.249714Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:17.249764Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7370:5465], StatRequests.size() = 1 2025-04-07T14:12:17.373807Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjA3OTA4YTYtMzAzZmUzOC05ODk3YTk4Yy1hYTA5MThlNw==, TxId: 2025-04-07T14:12:17.373882Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjA3OTA4YTYtMzAzZmUzOC05ODk3YTk4Yy1hYTA5MThlNw==, TxId: 2025-04-07T14:12:17.374453Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:17.390587Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7380:5471] 2025-04-07T14:12:17.390731Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7304:5419], server id = [2:7380:5471], tablet id = 72075186224037897, status = OK 2025-04-07T14:12:17.390902Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7380:5471], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-07T14:12:17.391050Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7381:5472] 2025-04-07T14:12:17.391106Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:7381:5472], schemeshard id = 72075186224037889 2025-04-07T14:12:17.410363Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:17.410437Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:17.606271Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7389:5477] 2025-04-07T14:12:17.607111Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:2661:3170] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-07T14:12:17.607182Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2661:3170] 2025-04-07T14:12:17.607261Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Complete 2025-04-07T14:12:18.121082Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T14:12:18.121201Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:12:18.789410Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:18.789517Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:18.790152Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:18.805300Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:18.805718Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:18.805775Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2025-04-07T14:12:18.818882Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:20.133873Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:20.133972Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:20.134020Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T14:12:20.134252Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-07T14:12:20.134771Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2025-04-07T14:12:20.134887Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-07T14:12:20.148375Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2025-04-07T14:12:21.451566Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:21.451660Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:21.451725Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:12:22.674857Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:22.675001Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:22.675092Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:22.675748Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:22.689186Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:22.689549Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:22.689617Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:22.690028Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:22.703685Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:22.704043Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T14:12:22.704747Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7546:5564], server id = [2:7547:5565], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:22.704923Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7546:5564], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:22.705694Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:22.705796Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:22.705981Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:22.706170Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:22.706529Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:22.709406Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7546:5564], server id = [2:7547:5565], tablet id = 72075186224037899 2025-04-07T14:12:22.709448Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:22.710232Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:22.733671Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2ZiNDQyNTMtOTA1MjlkYzctNDdiZTIwZjktNDFkMWE5OTE=, TxId: 2025-04-07T14:12:22.733770Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2ZiNDQyNTMtOTA1MjlkYzctNDdiZTIwZjktNDFkMWE5OTE=, TxId: 2025-04-07T14:12:22.734292Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:22.760209Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:22.760301Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2661:3170] >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DisableDropExternalTable [GOOD] Test command err: 2025-04-07T14:11:55.272500Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573784194747248:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:55.272762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001427/r3tmp/tmphYZYSD/pdisk_1.dat 2025-04-07T14:11:55.539173Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24111, node 1 2025-04-07T14:11:55.616105Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:55.616140Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:55.616146Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:55.616260Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:55.632199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:55.632336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:55.634997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:55.846642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.885750Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2025-04-07T14:11:57.641112Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T14:11:57.644070Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmJjZTJjZDUtZTY2ZDJmYS1mZDg4ZmI2YS0yMjVkNjA5MA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YmJjZTJjZDUtZTY2ZDJmYS1mZDg4ZmI2YS0yMjVkNjA5MA== 2025-04-07T14:11:57.644836Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490573792784682638:2301], Start check tables existence, number paths: 2 2025-04-07T14:11:57.644963Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-07T14:11:57.645018Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T14:11:57.645048Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T14:11:57.645099Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmJjZTJjZDUtZTY2ZDJmYS1mZDg4ZmI2YS0yMjVkNjA5MA==, ActorId: [1:7490573792784682639:2302], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:11:57.649482Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490573792784682638:2301], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T14:11:57.649552Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490573792784682638:2301], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T14:11:57.649587Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490573792784682638:2301], Successfully finished 2025-04-07T14:11:57.649652Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T14:11:57.667764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:57.679480Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490573793179376654:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:57.679562Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:11:57.686594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:57.686684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:57.693375Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:11:57.693482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:57.795105Z node 3 :STATISTICS WARN: [72075186224037897] TTxInit::Complete. EnableColumnStatistics=false 2025-04-07T14:11:57.795266Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:57.813322Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:57.813413Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:57.816594Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:57.898900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:11:57.910341Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573794874280173:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:57.910395Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:11:57.921405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:57.921469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:57.925623Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:11:57.926699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:58.014875Z node 2 :STATISTICS WARN: [72075186224037907] TTxInit::Complete. EnableColumnStatistics=false 2025-04-07T14:11:58.027518Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:58.044410Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:58.044476Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:58.047410Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:58.120152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:58.138689Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:11:58.139004Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:11:58.139104Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:11:58.139255Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:11:58.139353Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:11:58.139468Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:11:58.200575Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:00.272616Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573784194747248:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:00.272799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:00.278041Z node 3 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T14:12:00.278183Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T14:12:00.278199Z node 3 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T14:12:00.278398Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7490573806064279270:2316], Start check tables existence, number paths: 2 2025-04-07T14:12:00.278494Z node 3 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-07T14:12:00.279869Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7490573806064279270:2316], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T14:12:00.279910Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7490573806064279270:2316], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T14:12:00.279927Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7490573806064279270:2316], Successfully finished 2025-04-07T14:12:00.279964Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T14:12:00.380041Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T14:12:00.380544Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T14:12:00.380571Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T14:12:00.380604Z node 2 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-07T14:12:00.380627Z node 2 :KQP_WORKLOAD_SER ... : 72057594046644480 waiting... 2025-04-07T14:12:12.752119Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:12.823881Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:15.321484Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573869150157843:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.321607Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.371016Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.397881Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.421847Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.452137Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.479489Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.515084Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.602192Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573869150158340:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.602258Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.602342Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573869150158345:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.606001Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:15.617318Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490573869150158347:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:16.887878Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490573851970286958:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:16.887947Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14865, MsgBus: 24084 2025-04-07T14:12:17.665730Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7490573880610253580:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:17.665791Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001427/r3tmp/tmppSXUC8/pdisk_1.dat 2025-04-07T14:12:17.780198Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:17.797683Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:17.797773Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:17.803314Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14865, node 7 2025-04-07T14:12:17.861238Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:17.861259Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:17.861266Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:17.861389Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24084 TClient is connected to server localhost:24084 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:18.345121Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:18.357301Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:18.368025Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:18.433815Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:18.597137Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:18.678794Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.376954Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490573897790124465:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:21.377089Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:21.423367Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.462950Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.503965Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.541792Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.576300Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.632273Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.724216Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490573897790124966:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:21.724321Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:21.724598Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490573897790124971:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:21.729827Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:21.740800Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490573897790124973:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:22.664935Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7490573880610253580:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:22.665014Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpScheme::DoubleCreateResourcePool [GOOD] >> KqpScheme::DoubleCreateResourcePoolClassifier ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AsyncReplicationEndpointAndDatabase [GOOD] Test command err: Trying to start YDB, gRPC: 26674, MsgBus: 5834 2025-04-07T14:11:58.930174Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573798678271137:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:58.930228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001422/r3tmp/tmpMvOR2j/pdisk_1.dat 2025-04-07T14:11:59.189489Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26674, node 1 2025-04-07T14:11:59.266733Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:59.266760Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:59.266774Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:59.266976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:59.283745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:59.283876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:59.285774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5834 TClient is connected to server localhost:5834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:59.658305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:59.678333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:59.784171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:59.916439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:59.991067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.449329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573811563174740:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:01.449458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:01.673836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:01.694821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:01.715435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:01.737625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:01.760345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:01.785650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:01.815862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573811563175231:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:01.815919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:01.816061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573811563175236:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:01.819048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:01.825910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573811563175238:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:02.731108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.769555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.794952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.821624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.848981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.873484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.896360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.921804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.947177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.975456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.999223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.022076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26581, MsgBus: 63172 2025-04-07T14:12:03.524575Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573819347091744:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:03.524628Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001422/r3tmp/tmpLGF6r6/pdisk_1.dat 2025-04-07T14:12:03.627725Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:03.646450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:03.646546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:03.647871Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26581, node 2 2025-04-07T14:12:03.686513Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:03.686548Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:03.686556Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:03.686655Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63172 TClient is connected to server localhost:63172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: ... rmissions } 2025-04-07T14:12:15.948903Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:16.019583Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:16.054054Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:16.083129Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:16.117056Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:16.149605Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:16.248325Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573875295071137:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:16.248406Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:16.248584Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573875295071142:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:16.252080Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:16.262730Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573875295071144:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:17.425520Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:17.438524Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490573858115199758:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:17.438601Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:17.523452Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:12:17.654560Z node 4 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe table failed: path# /Root/table, status# CLIENT_CANCELLED, issues# [ {
: Error: GRpc error: (1): Cancelled on the server side } {
: Error: Grpc error response on endpoint localhost:6118 } ] Trying to start YDB, gRPC: 24835, MsgBus: 61226 2025-04-07T14:12:18.484584Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573882142654600:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:18.484643Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001422/r3tmp/tmpSgTjKN/pdisk_1.dat 2025-04-07T14:12:18.610369Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:18.624944Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:18.625038Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:18.627244Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24835, node 5 2025-04-07T14:12:18.657208Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:18.657229Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:18.657238Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:18.657344Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61226 TClient is connected to server localhost:61226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:19.193462Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:19.211019Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:19.268221Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:19.410833Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:19.466176Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:21.948042Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573895027558187:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:21.948126Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:21.991430Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:22.026677Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:22.102438Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:22.177983Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:22.251936Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:22.298889Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:22.383136Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573899322525992:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:22.383221Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:22.383420Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573899322525997:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:22.386502Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:22.394606Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573899322525999:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:23.487218Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573882142654600:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:23.489103Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:23.561116Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:23.628696Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpOlapScheme::AddColumnErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateExternalDataSourceValidationLocation [GOOD] Test command err: Trying to start YDB, gRPC: 8417, MsgBus: 25466 2025-04-07T14:11:54.062094Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573778983764581:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:54.062161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001428/r3tmp/tmpOHevRk/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8417, node 1 2025-04-07T14:11:54.326505Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:11:54.326541Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:11:54.343824Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:54.353741Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:54.353773Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:54.353781Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:54.353851Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:54.416437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:54.416552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:54.418137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25466 TClient is connected to server localhost:25466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:54.765559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:11:54.785684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.872917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:54.993683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.072655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.579626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573787573700876:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:56.579726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:56.812880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.832340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.854030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.875975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.896885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.922052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.959189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573787573701366:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:56.959312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:56.959480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573787573701372:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:56.962492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:56.969921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573787573701374:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:57.876553Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-04-07T14:11:57.894743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 32222, MsgBus: 19967 2025-04-07T14:11:58.518662Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573798994085626:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:58.518746Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001428/r3tmp/tmppKY8jX/pdisk_1.dat 2025-04-07T14:11:58.588931Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32222, node 2 2025-04-07T14:11:58.621402Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:58.621481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:58.623161Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:58.638456Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:58.638477Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:58.638481Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:58.638538Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19967 TClient is connected to server localhost:19967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:59.034317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:59.050322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:59.097437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:59.247620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:59.319577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.163026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573811878989231:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have ac ... _SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:13.248649Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:13.331225Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:15.786165Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573869880282336:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.786252Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.836145Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.863559Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.896628Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.927383Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.996924Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:16.068393Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:16.227478Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573874175250140:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:16.227589Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:16.227939Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573874175250145:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:16.232250Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:16.244737Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573874175250147:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:17.340503Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490573856995378745:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:17.340577Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6923, MsgBus: 15673 2025-04-07T14:12:18.588889Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573881640618005:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:18.588949Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001428/r3tmp/tmphHUhvY/pdisk_1.dat 2025-04-07T14:12:18.731092Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:18.734874Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:18.734965Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:18.736723Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6923, node 5 2025-04-07T14:12:18.793268Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:18.793296Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:18.793304Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:18.793412Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15673 TClient is connected to server localhost:15673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:19.370544Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:19.399860Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:19.483276Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:19.687704Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:19.776336Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:22.359731Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573898820488875:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:22.359810Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:22.392228Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:22.428421Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:22.460376Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:22.494974Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:22.535077Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:22.572299Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:22.626038Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573898820489368:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:22.626105Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:22.626154Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573898820489373:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:22.630157Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:22.640999Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573898820489375:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:23.591379Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573881640618005:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:23.591463Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpOlapScheme::UnknownColumnFamily [GOOD] >> KqpOlapScheme::SetNotDefaultColumnFamilyForPrimaryKey >> KqpScheme::OlapSharding_KeyOnly [GOOD] >> SystemView::GroupsFields [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] Test command err: 2025-04-07T14:09:38.994742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:38.995060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:38.995212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013b4/r3tmp/tmporjXXh/pdisk_1.dat 2025-04-07T14:09:39.351719Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28064, node 1 2025-04-07T14:09:39.574378Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:39.574438Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:39.574468Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:39.575091Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:39.577212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:39.680653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:39.680828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:39.695219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6163 2025-04-07T14:09:40.309834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:43.432032Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:43.467298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:43.467413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:43.514136Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:43.518343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:43.742481Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:43.809829Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:43.809918Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:43.842564Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:43.842745Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:43.842973Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:43.843041Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:43.843097Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:43.843151Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:43.843198Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:43.843255Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:43.843687Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:44.063503Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:44.063600Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:44.070653Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:44.080823Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:44.081823Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:44.082967Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:44.105976Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:44.106030Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:44.106085Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:44.107729Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:44.107797Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:44.126936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:44.140006Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:44.140159Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:44.152084Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:44.169528Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:44.209683Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:44.497707Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:44.656072Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:45.424105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:45.424249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:45.441983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:45.635103Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:45.635363Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:45.635711Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:45.635890Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:45.635976Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:45.636050Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:45.636137Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:45.636210Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:45.636298Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:45.636413Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:45.636527Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:45.636698Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:45.693345Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:45.693434Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:45.693632Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:45.693728Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:45.693827Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:45.693956Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxI ... 037897] EvServerConnected, pipe server id = [2:8280:6171] 2025-04-07T14:12:21.226923Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:8280:6171], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-07T14:12:21.240210Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:21.240290Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:21.386963Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8288:6176] 2025-04-07T14:12:21.388046Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:3385:3249] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-07T14:12:21.388111Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:3385:3249] 2025-04-07T14:12:21.388197Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Complete 2025-04-07T14:12:21.847704Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T14:12:21.847790Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:12:21.858438Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-07T14:12:21.858493Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:12:22.479413Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:22.479495Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:22.479535Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:12:23.748917Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:23.749039Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:23.749088Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:23.749750Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:23.762985Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:23.763374Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:23.763449Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:23.763838Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:23.777242Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:23.777425Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-07T14:12:23.778240Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8370:6220], server id = [2:8372:6222], tablet id = 72075186224037902, status = OK 2025-04-07T14:12:23.778364Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8370:6220], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:23.778500Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8371:6221], server id = [2:8373:6223], tablet id = 72075186224037903, status = OK 2025-04-07T14:12:23.778557Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8371:6221], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:23.779803Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8367:6217], server id = [2:8374:6224], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:23.779875Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8367:6217], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:23.780082Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-07T14:12:23.780688Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-07T14:12:23.780964Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8368:6218], server id = [2:8375:6225], tablet id = 72075186224037900, status = OK 2025-04-07T14:12:23.781046Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8368:6218], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:23.781111Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8369:6219], server id = [2:8376:6226], tablet id = 72075186224037901, status = OK 2025-04-07T14:12:23.781157Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8369:6219], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:23.782009Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:23.782279Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8370:6220], server id = [2:8372:6222], tablet id = 72075186224037902 2025-04-07T14:12:23.782324Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:23.782480Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8371:6221], server id = [2:8373:6223], tablet id = 72075186224037903 2025-04-07T14:12:23.782503Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:23.782565Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T14:12:23.782788Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-07T14:12:23.783132Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8377:6227], server id = [2:8379:6229], tablet id = 72075186224037904, status = OK 2025-04-07T14:12:23.783201Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8377:6227], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:23.783429Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8367:6217], server id = [2:8374:6224], tablet id = 72075186224037899 2025-04-07T14:12:23.783458Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:23.783536Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8378:6228], server id = [2:8380:6230], tablet id = 72075186224037905, status = OK 2025-04-07T14:12:23.783596Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8378:6228], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:23.784056Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8368:6218], server id = [2:8375:6225], tablet id = 72075186224037900 2025-04-07T14:12:23.784084Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:23.784575Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8369:6219], server id = [2:8376:6226], tablet id = 72075186224037901 2025-04-07T14:12:23.784603Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:23.784694Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8381:6231], server id = [2:8383:6233], tablet id = 72075186224037906, status = OK 2025-04-07T14:12:23.784768Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8381:6231], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:23.784882Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8382:6232], server id = [2:8385:6235], tablet id = 72075186224037907, status = OK 2025-04-07T14:12:23.784936Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8382:6232], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:23.785374Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-07T14:12:23.785553Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8384:6234], server id = [2:8386:6236], tablet id = 72075186224037908, status = OK 2025-04-07T14:12:23.785613Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8384:6234], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:23.786104Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T14:12:23.786190Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T14:12:23.786699Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8377:6227], server id = [2:8379:6229], tablet id = 72075186224037904 2025-04-07T14:12:23.786726Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:23.786850Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T14:12:23.786950Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T14:12:23.786987Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:23.787278Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:23.787472Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:23.787715Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:23.790007Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8378:6228], server id = [2:8380:6230], tablet id = 72075186224037905 2025-04-07T14:12:23.790034Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:23.790342Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8381:6231], server id = [2:8383:6233], tablet id = 72075186224037906 2025-04-07T14:12:23.790371Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:23.790530Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:23.791008Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8382:6232], server id = [2:8385:6235], tablet id = 72075186224037907 2025-04-07T14:12:23.791039Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:23.791545Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8384:6234], server id = [2:8386:6236], tablet id = 72075186224037908 2025-04-07T14:12:23.791569Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:23.814578Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmIxYWVhY2YtN2E2NGRiZTAtZWM4NjU1ZTAtNDE2YjJiZTM=, TxId: 2025-04-07T14:12:23.814637Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmIxYWVhY2YtN2E2NGRiZTAtZWM4NjU1ZTAtNDE2YjJiZTM=, TxId: 2025-04-07T14:12:23.815094Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:23.831133Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:23.831195Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3385:3249] >> SystemView::DescribeSystemFolder >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> THeavyPerfTest::TTestLoadEverything >> THiveTest::TestFollowersReconfiguration >> THiveTest::TestDrain >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadCompat [GOOD] >> THiveTest::TestLocalReplacement >> THiveTest::TestCreateTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::AddColumnErrors [GOOD] Test command err: Trying to start YDB, gRPC: 28703, MsgBus: 63291 2025-04-07T14:11:53.140679Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573776705592541:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:53.141395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00142d/r3tmp/tmpm7RGHS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28703, node 1 2025-04-07T14:11:53.408835Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:11:53.408870Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:11:53.419792Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:53.441615Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:53.441639Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:53.441645Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:53.441821Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:53.491963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:53.492074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:53.493818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63291 TClient is connected to server localhost:63291 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:53.823533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.840118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.947839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:54.044459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:54.115494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:55.655397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573785295528833:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:55.655525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:55.862553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.884796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.905045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.925643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.960913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.989897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.026357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573789590496621:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:56.026422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:56.026617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573789590496626:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:56.029304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:56.037835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573789590496628:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:56.878930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11069, MsgBus: 19687 2025-04-07T14:11:57.722741Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573793946136771:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:57.722836Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00142d/r3tmp/tmpSm6JHQ/pdisk_1.dat 2025-04-07T14:11:57.825627Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:57.841748Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:57.841807Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:57.842884Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11069, node 2 2025-04-07T14:11:57.885550Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:57.885575Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:57.885582Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:57.885701Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19687 TClient is connected to server localhost:19687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:58.266273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:58.282450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:58.326876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:58.483633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:58.544326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:00.328744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573806831040353:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:00.328819Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: ... , (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:20.810028Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:20.811686Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26413, node 6 2025-04-07T14:12:20.869161Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:20.869182Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:20.869191Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:20.869299Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64934 TClient is connected to server localhost:64934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:21.459033Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:21.472387Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:12:24.686749Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573908097774190:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:24.686921Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:24.695217Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:12:24.751754Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573908097774237:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:12:24.751824Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573908097774237:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:12:24.752127Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573908097774237:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:12:24.752260Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573908097774237:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:12:24.752400Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573908097774237:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:12:24.752535Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573908097774237:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:12:24.752657Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573908097774237:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:12:24.752956Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573908097774237:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:12:24.753093Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573908097774237:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:12:24.753216Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573908097774237:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:12:24.753345Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573908097774237:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:12:24.753468Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490573908097774237:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:12:24.758426Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:12:24.758491Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:12:24.758598Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:12:24.758652Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:12:24.758828Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:12:24.758865Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:12:24.758962Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:12:24.758999Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:12:24.759066Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:12:24.759099Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:12:24.759151Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:12:24.759194Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:12:24.759565Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:12:24.759609Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:12:24.759787Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:12:24.759817Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:12:24.759966Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:12:24.760012Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:12:24.760180Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:12:24.760218Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:12:24.760344Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:12:24.760378Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:12:24.815980Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573908097774307:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:24.816076Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> THiveTest::TestUpdateChannelValues >> THiveTest::TestReCreateTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::OlapSharding_KeyOnly [GOOD] Test command err: Trying to start YDB, gRPC: 18784, MsgBus: 22047 2025-04-07T14:12:00.125337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573805976954815:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:00.125465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00141c/r3tmp/tmprvKokt/pdisk_1.dat 2025-04-07T14:12:00.387055Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18784, node 1 2025-04-07T14:12:00.447622Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:00.447656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:00.447685Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:00.447786Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:00.464075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:00.464213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:00.465883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22047 TClient is connected to server localhost:22047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:00.873566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:00.895116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:00.989366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.103969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.159490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:02.623156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573814566891132:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:02.623247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:02.829378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.851599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.870279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.889517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.910901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.936906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.967981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573814566891623:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:02.968038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573814566891628:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:02.968053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:02.970387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:02.977942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573814566891630:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:03.860384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17543, MsgBus: 24963 2025-04-07T14:12:05.348151Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573825219143171:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:05.348229Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00141c/r3tmp/tmp0bQ9r4/pdisk_1.dat 2025-04-07T14:12:05.425739Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:05.449823Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:05.449901Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:05.451656Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17543, node 2 2025-04-07T14:12:05.481970Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:05.481994Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:05.482001Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:05.482093Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24963 TClient is connected to server localhost:24963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:05.862892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:05.872156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:05.917991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:06.050157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:06.115523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:08.103950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573838104046753:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:08.104023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:08.142170Z node 2 :FLAT_TX_SCHEMESH ... node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[4:7490573892936352017:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:12:20.914811Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[4:7490573892936352017:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:12:20.915169Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[4:7490573892936352017:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:12:20.915322Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[4:7490573892936352017:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:12:20.915471Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[4:7490573892936352017:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:12:20.915622Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[4:7490573892936352017:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:12:20.915756Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[4:7490573892936352017:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:12:20.915897Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[4:7490573892936352017:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:12:20.916036Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[4:7490573892936352017:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:12:20.916183Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[4:7490573892936352017:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:12:20.916370Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[4:7490573892936352017:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:12:20.916524Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[4:7490573892936352017:2467];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:12:20.923249Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:12:20.923325Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:12:20.923466Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:12:20.923500Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:12:20.923711Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:12:20.923740Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:12:20.923844Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:12:20.923876Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:12:20.923969Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:12:20.924002Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:12:20.924051Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:12:20.924082Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:12:20.924629Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:12:20.924688Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:12:20.924928Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:12:20.924958Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:12:20.925112Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:12:20.925146Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:12:20.925376Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:12:20.925415Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:12:20.925550Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:12:20.925586Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:12:20.998153Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.025936Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10235, MsgBus: 18124 2025-04-07T14:12:22.002177Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573901976916794:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:22.002236Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00141c/r3tmp/tmpduS8fA/pdisk_1.dat 2025-04-07T14:12:22.107582Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:22.128186Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:22.128282Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:22.129999Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10235, node 5 2025-04-07T14:12:22.177893Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:22.177915Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:22.177921Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:22.178026Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18124 TClient is connected to server localhost:18124 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:22.669198Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:25.425723Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573914861819301:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:25.425917Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> KqpScheme::DropNonExistingResourcePoolClassifier [GOOD] >> THiveTest::TestCreateTablet [GOOD] >> THiveTest::TestCreate100Tablets >> KqpScheme::CreateTableWithVectorIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadCompat [GOOD] Test command err: Trying to start YDB, gRPC: 13440, MsgBus: 1616 2025-04-07T14:12:02.418294Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573814758833567:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:02.418352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00140e/r3tmp/tmptPzqDQ/pdisk_1.dat 2025-04-07T14:12:02.666589Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13440, node 1 2025-04-07T14:12:02.730210Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:02.730241Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:02.730247Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:02.730345Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:02.760044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:02.760175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:02.761857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1616 TClient is connected to server localhost:1616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:03.126563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:03.147599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:03.253024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:03.369742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:03.425632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:04.865020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573823348769876:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:04.865150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:05.047586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.070129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.094148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.115253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.137575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.201513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.272712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573827643737672:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:05.272783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:05.272839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573827643737677:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:05.275811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:05.283908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573827643737679:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:06.086395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.129860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13348, MsgBus: 28570 2025-04-07T14:12:06.605777Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573831632340551:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:06.605859Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00140e/r3tmp/tmp9JsaiW/pdisk_1.dat 2025-04-07T14:12:06.664402Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13348, node 2 2025-04-07T14:12:06.707073Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:06.707148Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:06.708843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:06.718034Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:06.718054Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:06.718060Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:06.718146Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28570 TClient is connected to server localhost:28570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:06.976461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:06.993926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:07.065548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:07.186897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:07.237443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:09.276018Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573844517244139:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:09.276091Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Ser ... chemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TableWithPartitioningByLoad" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColu... (TRUNCATED) Trying to start YDB, gRPC: 2334, MsgBus: 10116 2025-04-07T14:12:21.317576Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573896604700584:2132];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00140e/r3tmp/tmpvyEt2D/pdisk_1.dat 2025-04-07T14:12:21.403308Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:21.446552Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:21.462253Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:21.462358Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:21.463870Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2334, node 5 2025-04-07T14:12:21.533460Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:21.533481Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:21.533487Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:21.533579Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10116 TClient is connected to server localhost:10116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:21.910181Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:21.921551Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:22.002683Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:22.141502Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:22.216386Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:24.607304Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573909489604090:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:24.607372Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:24.651883Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:24.692897Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:24.724531Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:24.759143Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:24.791717Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:24.898667Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:24.942596Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573909489604589:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:24.942663Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:24.942857Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573909489604594:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:24.947295Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:24.962391Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573909489604596:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:26.174683Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithPartitioningByLoad TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithPartitioningByLoad" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1744035146245 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TableWithPartitioningByLoad" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColu... (TRUNCATED) 2025-04-07T14:12:26.241708Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithPartitioningByLoad TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithPartitioningByLoad" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1744035146245 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TableWithPartitioningByLoad" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColu... (TRUNCATED) 2025-04-07T14:12:26.298950Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:12:26.315772Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573896604700584:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:26.315832Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient::Ls request: /Root/TableWithPartitioningByLoad TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithPartitioningByLoad" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1744035146245 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TableWithPartitioningByLoad" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColu... (TRUNCATED) >> THiveTest::TestUpdateChannelValues [GOOD] >> THiveTest::TestStorageBalancer >> SystemView::StoragePoolsFields [GOOD] >> SystemView::StoragePoolsRanges >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestLockTabletExecution >> SystemView::DescribeAccessDenied [GOOD] >> SystemView::CollectScriptingQueries >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::AggregateCountStar [GOOD] Test command err: Trying to start YDB, gRPC: 4833, MsgBus: 1737 2025-04-07T14:05:44.153759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:1223:2383], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:05:44.154654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:05:44.155242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:05:44.156305Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:1221:2328], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:05:44.156593Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:1227:2328], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:05:44.157152Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:05:44.157196Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:05:44.157236Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:05:44.157453Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:05:44.158692Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:1225:2328], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:05:44.159058Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:05:44.159364Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00160e/r3tmp/tmpQG3kPm/pdisk_1.dat 2025-04-07T14:05:44.611656Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4833, node 1 2025-04-07T14:05:44.873619Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:05:44.873665Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:05:44.873690Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:05:44.874119Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:05:45.005306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:45.005428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:45.021205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:45.021302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:45.026213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:45.026344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:05:45.026642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:05:45.026700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:1737 2025-04-07T14:05:45.303413Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:05:45.303759Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:05:45.304098Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-07T14:05:45.304358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:45.304962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:45.305216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:05:45.305335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:05:45.618383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:45.871300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:46.725414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:48.296497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:49.329514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:05:51.348496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3785:3577], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.348743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:51.369739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:05:52.117659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:05:52.857892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:05:53.401334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:05:54.043038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:05:54.618648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:05:55.409107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4844:4187], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.409317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.410037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4849:4192], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:05:55.419839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:05:55.827123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:4851:4194], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:06:03.263396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:06:03.263512Z node 1 :IMPORT WARN: Table profiles were not loaded (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:694: Trying to start YDB, gRPC: 6119, MsgBus: 25095 2025-04-07T14:12:12.434600Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573857239946558:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:12.435049Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: ... teTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:13.217478Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:13.276423Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.050121Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573870124850133:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.050210Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.074004Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.099920Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.124911Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.190623Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.220257Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.250939Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:15.285245Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573870124850626:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.285346Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.285503Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573870124850631:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.290149Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:15.298529Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573870124850633:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:17.233328Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035136606, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 17409, MsgBus: 1544 2025-04-07T14:12:18.117114Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490573884139954690:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:18.118795Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00160e/r3tmp/tmpo4j30c/pdisk_1.dat 2025-04-07T14:12:18.252645Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17409, node 6 2025-04-07T14:12:18.300383Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:18.300467Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:18.302713Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:18.314428Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:18.314462Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:18.314470Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:18.314582Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1544 TClient is connected to server localhost:1544 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:18.779803Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:18.786546Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:18.796034Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:18.852655Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:12:18.998031Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:19.065434Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:21.416370Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573897024858283:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:21.416453Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:21.450104Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.526238Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.567963Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.603873Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.665594Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.700984Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.758132Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573897024858781:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:21.758219Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:21.758427Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573897024858786:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:21.761479Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:21.769332Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490573897024858788:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:23.144725Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490573884139954690:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:23.145296Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:23.677021Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035143207, txId: 281474976715671] shutting down |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> THiveTest::TestLocalDisconnect >> SystemView::TabletsRanges [GOOD] >> SystemView::TabletsRangesPredicateExtractDisabled >> KqpScheme::CreateAlterDropColumnTableInStore [GOOD] >> AnalyzeColumnshard::AnalyzeStatus [GOOD] >> KqpScheme::CreateAsyncReplicationWithSecret [GOOD] >> KqpScheme::CreateBackupCollection >> TProxyActorTest::TestCreateSemaphore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] Test command err: 2025-04-07T14:09:46.235377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:46.235745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:46.235920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00137b/r3tmp/tmpDhXRM8/pdisk_1.dat 2025-04-07T14:09:46.579016Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18510, node 1 2025-04-07T14:09:46.810538Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:46.810596Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:46.810627Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:46.811217Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:46.813252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:46.906350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:46.906504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:46.921080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5451 2025-04-07T14:09:47.528580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:50.500596Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:50.541345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:50.541452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:50.580982Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:50.585228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:50.791014Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:50.859378Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:50.859490Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:50.889893Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:50.890079Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:50.890298Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:50.890360Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:50.890412Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:50.890469Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:50.890531Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:50.890584Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:50.891028Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:51.106798Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:51.106910Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:51.113438Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:51.122171Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:51.123050Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:51.124077Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:51.144296Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:51.144365Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:51.144435Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:51.147184Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:51.147252Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:51.160646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:51.172394Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:51.172509Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:51.184813Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:51.203756Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:51.244408Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:51.498476Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:51.654697Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:52.410232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:52.410372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:52.428544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:52.525670Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:52.525841Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:52.526085Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:52.526193Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:52.526291Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:52.526366Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:52.526429Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:52.526503Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:52.526600Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:52.526692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:52.526784Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:52.526878Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:52.547927Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:52.548021Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:52.548131Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:52.548173Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:52.548338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:52.548369Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:52.548484Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema: ... 6330Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7170:5345], server id = [2:7171:5346], tablet id = 72075186224037897 2025-04-07T14:12:23.996407Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7306:5424] 2025-04-07T14:12:23.996457Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7306:5424] 2025-04-07T14:12:24.044880Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:12:24.045005Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:12:24.045736Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:12:24.046833Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:12:24.047154Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded database: /Root/Database 2025-04-07T14:12:24.047198Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal start key 2025-04-07T14:12:24.047239Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal table owner id: 72075186224037889 2025-04-07T14:12:24.047278Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal table local path id: 4 2025-04-07T14:12:24.047315Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal start time: 1744035143959553 2025-04-07T14:12:24.047352Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal IsColumnTable: 1 2025-04-07T14:12:24.047386Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded global traversal round: 2 2025-04-07T14:12:24.047517Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 1 2025-04-07T14:12:24.047575Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:12:24.047668Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 2 2025-04-07T14:12:24.047753Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 1 2025-04-07T14:12:24.047852Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 1 2025-04-07T14:12:24.047917Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:12:24.048065Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:24.048935Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:12:24.049446Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:24.049510Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:24.049802Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:12:24.050674Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:24.050737Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:24.054807Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:24.119736Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:24.119959Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T14:12:24.120613Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7353:5451], server id = [2:7354:5452], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:24.120771Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7353:5451], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:24.121863Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:24.121968Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:24.122179Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:24.122377Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:24.122684Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:24.126318Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7353:5451], server id = [2:7354:5452], tablet id = 72075186224037899 2025-04-07T14:12:24.126365Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:24.126913Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:24.151304Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7371:5469]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:24.151541Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:24.151579Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7371:5469], StatRequests.size() = 1 2025-04-07T14:12:24.295697Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWRiN2I5NDctMWZhYTZmY2MtYTY0MjM1MTQtNzZhMzM1NmQ=, TxId: 2025-04-07T14:12:24.295779Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWRiN2I5NDctMWZhYTZmY2MtYTY0MjM1MTQtNzZhMzM1NmQ=, TxId: 2025-04-07T14:12:24.296307Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:24.324590Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7383:5475] 2025-04-07T14:12:24.324780Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7306:5424], server id = [2:7384:5476], tablet id = 72075186224037897, status = OK 2025-04-07T14:12:24.324858Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7384:5476] 2025-04-07T14:12:24.324928Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:7383:5475], schemeshard id = 72075186224037889 2025-04-07T14:12:24.325000Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7384:5476], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-07T14:12:24.343866Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:24.343945Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:24.454638Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7390:5481] 2025-04-07T14:12:24.455342Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:2655:3163] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-07T14:12:24.455399Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2655:3163] 2025-04-07T14:12:24.455477Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Complete 2025-04-07T14:12:24.932982Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-07T14:12:24.933071Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:12:25.481302Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:25.481399Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:25.481448Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:12:26.603227Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:26.603352Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:26.603405Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:26.603888Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:26.617716Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:26.618140Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:26.618207Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:26.618596Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:26.631858Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:26.632056Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-07T14:12:26.632578Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7469:5524], server id = [2:7470:5525], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:26.632745Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7469:5524], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:26.633396Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:26.633492Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:26.633674Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:26.633840Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:26.634114Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:26.636522Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7469:5524], server id = [2:7470:5525], tablet id = 72075186224037899 2025-04-07T14:12:26.636565Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:26.637210Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:26.674777Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2IxMjQ0YzAtNjczMGVjMTEtNzgzNDA5YWUtZmM4ODY5YzM=, TxId: 2025-04-07T14:12:26.674851Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2IxMjQ0YzAtNjczMGVjMTEtNzgzNDA5YWUtZmM4ODY5YzM=, TxId: 2025-04-07T14:12:26.675377Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:26.691106Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:26.691173Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2655:3163] >> TProxyActorTest::TestCreateSemaphoreInterrupted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropNonExistingResourcePoolClassifier [GOOD] Test command err: Trying to start YDB, gRPC: 11926, MsgBus: 20832 2025-04-07T14:11:52.277568Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573773301767050:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:52.277673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001431/r3tmp/tmpllH0SB/pdisk_1.dat 2025-04-07T14:11:52.542098Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11926, node 1 2025-04-07T14:11:52.583467Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:52.583492Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:52.583530Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:52.583643Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:52.637761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:52.637936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:52.639419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20832 TClient is connected to server localhost:20832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:53.024282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.047212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:11:53.175189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.293618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.349732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:54.388347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573781891703348:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.388437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.580112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.601849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.623513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.643388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.666140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.694555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.766774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573781891703845:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.766831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.767038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573781891703850:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.769651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:54.779168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573781891703852:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:55.708698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11443, MsgBus: 25573 2025-04-07T14:11:56.254339Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573788249237681:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:56.254425Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001431/r3tmp/tmpc6kqYU/pdisk_1.dat 2025-04-07T14:11:56.324313Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11443, node 2 2025-04-07T14:11:56.355515Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:56.355628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:56.357185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:56.376891Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:56.376915Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:56.376924Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:56.377028Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25573 TClient is connected to server localhost:25573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:56.700071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.707326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.775142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.909882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.968690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:58.688506Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573796839173976:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:58.688578Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:58.718784Z node 2 :FLAT_TX_SCHEMESH ... 3837973517792:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:08.222173Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:08.230001Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573837973517794:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:09.117439Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-04-07T14:12:09.601393Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:12:10.047786Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2025-04-07T14:12:10.063524Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490573825088613685:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:10.063580Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:10.477950Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-07T14:12:10.915415Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-07T14:12:11.372698Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-07T14:12:13.969135Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573859448355725:2747], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-04-07T14:12:13.969252Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } Trying to start YDB, gRPC: 7653, MsgBus: 30855 2025-04-07T14:12:16.887767Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573873513785595:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:16.918346Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001431/r3tmp/tmpI5bjMp/pdisk_1.dat 2025-04-07T14:12:17.001712Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:17.030376Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:17.030480Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:17.032146Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7653, node 5 2025-04-07T14:12:17.086360Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:17.086385Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:17.086396Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:17.086543Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30855 TClient is connected to server localhost:30855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:17.676360Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:17.697449Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:12:17.786181Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:17.972689Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:12:18.058885Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:20.580558Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573890693656403:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:20.580636Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:20.640320Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:20.716740Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:20.756640Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:20.791566Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:20.824367Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:20.871664Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:20.933034Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573890693656902:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:20.933140Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:20.933387Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573890693656907:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:20.937042Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:20.958624Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573890693656909:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:21.886335Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573873513785595:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:21.886407Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:22.173759Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-04-07T14:12:22.748393Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:12:23.349150Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:1, at schemeshard: 72057594046644480 2025-04-07T14:12:23.912976Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:12:24.534536Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-07T14:12:25.164038Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715696:0, at schemeshard: 72057594046644480 >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithVectorIndex [GOOD] Test command err: Trying to start YDB, gRPC: 23471, MsgBus: 2294 2025-04-07T14:12:00.933685Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573805304160252:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:00.933767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001416/r3tmp/tmpQl5Vf6/pdisk_1.dat 2025-04-07T14:12:01.171957Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23471, node 1 2025-04-07T14:12:01.231251Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:01.231274Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:01.231298Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:01.231449Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:01.261336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:01.261470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:01.263208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2294 TClient is connected to server localhost:2294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:01.665973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.688171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.789545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.892099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.966927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:03.318683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573818189063861:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:03.318784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:03.521916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.546585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.567367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.588644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.612360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.633003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.661883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573818189064352:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:03.661969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:03.662069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573818189064357:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:03.665202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:03.673889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573818189064359:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:04.427888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6639, MsgBus: 19989 2025-04-07T14:12:05.147448Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573827129602197:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:05.147519Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001416/r3tmp/tmpzXVHYX/pdisk_1.dat 2025-04-07T14:12:05.214988Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6639, node 2 2025-04-07T14:12:05.248614Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:05.248709Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:05.250204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:05.261823Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:05.261839Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:05.261846Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:05.261937Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19989 TClient is connected to server localhost:19989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:05.577254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:05.594245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.677814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:05.781206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:05.845822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:07.625389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573835719538491:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:07.625579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:07.638583Z node 2 :FLAT_TX_SCHEMESHARD W ... 6.772374Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:19.128283Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573887296412288:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:19.128366Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:19.173524Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:19.213216Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:19.282609Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:19.322464Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:19.356503Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:19.441583Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:19.524367Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573887296412796:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:19.524452Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:19.524688Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573887296412801:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:19.527942Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:19.538906Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573887296412803:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:20.769233Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490573870116541403:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:20.772137Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:20.816042Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2847, MsgBus: 3229 2025-04-07T14:12:21.966113Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573896712649402:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:21.966159Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001416/r3tmp/tmpx60zjR/pdisk_1.dat 2025-04-07T14:12:22.063897Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:22.091268Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:22.091356Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:22.092839Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2847, node 5 2025-04-07T14:12:22.139910Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:22.139933Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:22.139945Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:22.140051Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3229 TClient is connected to server localhost:3229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:22.648183Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:22.664072Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:22.727669Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:22.894794Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:22.995761Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:25.740867Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573913892520269:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:25.740936Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:25.779480Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:25.813274Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:25.844010Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:25.876211Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:25.910947Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:25.988313Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:26.048881Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573918187488063:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:26.048995Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:26.049291Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573918187488068:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:26.052988Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:26.063717Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573918187488070:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:26.966330Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573896712649402:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:26.966402Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:27.226923Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.8%| [TA] $(B)/ydb/core/kqp/ut/scan/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] |94.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/scan/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner >> THiveTest::TestLocalDisconnect [GOOD] >> THiveTest::TestHiveRestart |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAlterDropColumnTableInStore [GOOD] Test command err: Trying to start YDB, gRPC: 21280, MsgBus: 4533 2025-04-07T14:12:03.533075Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573816851960921:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:03.533132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001401/r3tmp/tmp8PLRzL/pdisk_1.dat 2025-04-07T14:12:03.818337Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21280, node 1 2025-04-07T14:12:03.894994Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:03.895022Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:03.895031Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:03.895149Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:03.896228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:03.896360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:03.898009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4533 TClient is connected to server localhost:4533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:04.308226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:04.325417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:04.422157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:04.532626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:04.586017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:06.208111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573829736864535:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:06.208202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:06.391979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.415203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.434876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.454640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.476252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.507428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.542814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573829736865028:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:06.542892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:06.543078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573829736865033:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:06.546530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:06.554490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573829736865035:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:07.424105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithBloomFilter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithBloomFilter" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710671 CreateStep: 1744035127485 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TableWithBloomFilter" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key"... (TRUNCATED) 2025-04-07T14:12:07.463515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithBloomFilter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithBloomFilter" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710671 CreateStep: 1744035127485 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TableWithBloomFilter" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key"... (TRUNCATED) 2025-04-07T14:12:07.488272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithBloomFilter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithBloomFilter" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710671 CreateStep: 1744035127485 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TableWithBloomFilter" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key"... (TRUNCATED) Trying to start YDB, gRPC: 1393, MsgBus: 24967 2025-04-07T14:12:08.207920Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573839582470222:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:08.208006Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001401/r3tmp/tmpIhbuHJ/pdisk_1.dat 2025-04-07T14:12:08.297340Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:08.322453Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:08.322569Z node 2 :HIVE WARN: HIVE#720 ... 86224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:12:27.877028Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:12:27.877052Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:12:27.877093Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:12:27.877120Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:12:27.877405Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:12:27.877457Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:12:27.877620Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:12:27.877657Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:12:27.877771Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:12:27.877803Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:12:27.877944Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:12:27.877970Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:12:27.878061Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:12:27.878096Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:12:27.879817Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:12:27.879863Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:12:27.879970Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:12:27.880013Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:12:27.880166Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:12:27.880202Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:12:27.880290Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:12:27.880325Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:12:27.880387Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:12:27.880417Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:12:27.880454Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:12:27.880479Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:12:27.880792Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:12:27.880832Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:12:27.880996Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:12:27.881027Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:12:27.881139Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:12:27.881169Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:12:27.881316Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:12:27.881363Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:12:27.881495Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:12:27.881530Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:12:27.919824Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573921474669835:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:27.919911Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:27.941201Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:12:28.020529Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573925769637278:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:28.020633Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:28.034119Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:12:28.068113Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573925769637358:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:28.068203Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:28.078454Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropColumnStore, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:28.101338Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037894 not found 2025-04-07T14:12:28.101372Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037891 not found 2025-04-07T14:12:28.101386Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037897 not found 2025-04-07T14:12:28.101401Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037888 not found 2025-04-07T14:12:28.101416Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037896 not found 2025-04-07T14:12:28.101430Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037893 not found 2025-04-07T14:12:28.101445Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037890 not found 2025-04-07T14:12:28.101460Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037895 not found 2025-04-07T14:12:28.101474Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037892 not found 2025-04-07T14:12:28.101488Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037889 not found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] Test command err: 2025-04-07T14:09:27.864464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:399:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:27.864801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:27.864943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013d9/r3tmp/tmpSDtRF0/pdisk_1.dat 2025-04-07T14:09:28.262356Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13221, node 1 2025-04-07T14:09:28.505699Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:28.505750Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:28.505775Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:28.505910Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:28.508396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:28.611820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:28.611941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:28.630058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15468 2025-04-07T14:09:29.307155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:32.816001Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:32.871941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:32.872039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:32.922353Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:32.926575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:33.126970Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:33.177124Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:33.177226Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:33.216225Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:33.217608Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:33.217832Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:33.217898Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:33.217953Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:33.218005Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:33.218063Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:33.218116Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:33.218737Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:33.409081Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:33.409260Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2550], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:33.419813Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1770:2560] 2025-04-07T14:09:33.424688Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1791:2570] 2025-04-07T14:09:33.424995Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1791:2570], schemeshard id = 72075186224037889 2025-04-07T14:09:33.434569Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:33.459889Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:33.459962Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:33.460041Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:33.464422Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:33.464541Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:33.478754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:33.494047Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:33.494215Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:33.510170Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:33.528182Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:33.557128Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:33.880075Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:34.060317Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:34.997787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2145:3023], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:34.997943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:35.017822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:35.131199Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:35.131400Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:35.131676Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:35.131757Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:35.131835Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:35.131925Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:35.132025Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:35.132117Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:35.132212Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:35.132288Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:35.132358Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:35.132431Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:35.174046Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:35.174173Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:35.174384Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:35.174465Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:35.174802Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:35.174876Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:35.175031Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema ... TATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:23.742619Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:23.743064Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:23.743131Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2025-04-07T14:12:23.756136Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:25.030355Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:25.030419Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:25.030447Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T14:12:25.030479Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:25.030517Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:25.031336Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:25.044636Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-07T14:12:25.044793Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:25.045198Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:25.045267Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-04-07T14:12:25.046598Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:25.047329Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7346:5458] 2025-04-07T14:12:25.047890Z node 2 :STATISTICS DEBUG: [72075186224037897] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_ENQUEUED 2025-04-07T14:12:25.049177Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7348:5459]
---- StatisticsAggregator ----
Database: /Root/Database
BaseStatistics: 1
SchemeShards: 1
    72075186224037889
Nodes: 1
    2
RequestedSchemeShards: 1
    72075186224037889
FastCounter: 3
FastCheckInFlight: 0
FastSchemeShards: 0
FastNodes: 0
PropagationInFlight: 0
PropagationSchemeShards: 0
PropagationNodes: 0
LastSSIndex: 0
PendingRequests: 0
ProcessUrgentInFlight: 0
Columns: 2
DatashardRanges: 0
CountMinSketches: 0
ScheduleTraversalsByTime: 2
  oldest table: [OwnerId: 72075186224037889, LocalPathId: 4], update time: 1970-01-01T00:00:00Z
ScheduleTraversalsBySchemeShard: 1
    72075186224037889
    [OwnerId: 72075186224037889, LocalPathId: 4], [OwnerId: 72075186224037889, LocalPathId: 3]
ForceTraversals: 1
    1970-01-01T00:00:05Z
NavigateType: Traversal
NavigateAnalyzeOperationId: 
NavigatePathId: 
ForceTraversalOperationId: 
TraversalStartTime: 2025-04-07T14:12:25Z
TraversalPathId: [OwnerId: 72075186224037889, LocalPathId: 4]
TraversalIsColumnTable: 1
TraversalStartKey: 
GlobalTraversalRound: 2
TraversalRound: 1
HiveRequestRound: 1
... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2025-04-07T14:12:25.050183Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2025-04-07T14:12:25.050280Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-07T14:12:25.064163Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:25.064305Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2025-04-07T14:12:25.064406Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:12:25.064962Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7351:5462], server id = [2:7352:5463], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:25.065081Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7351:5462], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:25.065884Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:25.065978Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:25.066192Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:25.066353Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:25.066630Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:25.069075Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7351:5462], server id = [2:7352:5463], tablet id = 72075186224037899 2025-04-07T14:12:25.069116Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:25.069577Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:25.090974Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7369:5480]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:25.091148Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:25.091187Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7369:5480], StatRequests.size() = 1 2025-04-07T14:12:25.211792Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTEzYTgwZDYtYzUzMWMyNzgtYWI0NDAzNGUtOWNmODU2OGU=, TxId: 2025-04-07T14:12:25.211863Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTEzYTgwZDYtYzUzMWMyNzgtYWI0NDAzNGUtOWNmODU2OGU=, TxId: 2025-04-07T14:12:25.212337Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:25.226107Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:25.226175Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:25.807581Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T14:12:25.807683Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:12:26.452638Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:26.452745Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:26.452807Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:12:27.844997Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:12:27.845210Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:12:27.868347Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:27.868509Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:27.868571Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:27.869258Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:27.886055Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:27.886513Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:27.886593Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:27.887019Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:27.901673Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:27.901848Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T14:12:27.902378Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7462:5532], server id = [2:7463:5533], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:27.902503Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7462:5532], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:27.903088Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:27.903173Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:27.903473Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:27.903655Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:27.903902Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:27.905891Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7462:5532], server id = [2:7463:5533], tablet id = 72075186224037899 2025-04-07T14:12:27.905927Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:27.906601Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:27.928842Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDZlMzYxMzEtZTI3ODNmODktNDVlOGRhODQtZDIwMTM4MjY=, TxId: 2025-04-07T14:12:27.928913Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDZlMzYxMzEtZTI3ODNmODktNDVlOGRhODQtZDIwMTM4MjY=, TxId: 2025-04-07T14:12:27.929356Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:27.943024Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:27.943095Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:890:2716] 2025-04-07T14:12:27.944190Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7482:5548] 2025-04-07T14:12:27.944822Z node 2 :STATISTICS DEBUG: [72075186224037897] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] >> TraverseColumnShard::TraverseColumnTable [GOOD] >> SystemView::QueryStats [GOOD] >> SystemView::QueryStatsFields >> TProxyActorTest::TestAttachSession [GOOD] >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet >> KqpYql::UpdateBadType >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestLimitedNodeList |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] >> THiveTest::TestStorageBalancer [GOOD] >> THiveTest::TestStartTabletTwiceInARow >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout >> KqpYql::InsertCV |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] >> THiveTest::TestCreate100Tablets [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet >> KqpScripting::StreamExecuteYqlScriptScan >> KqpScripting::ScanQuery >> KqpPragma::Auth >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert >> KqpYql::EvaluateExpr1 >> KqpScripting::ScriptingCreateAndAlterTableTest >> KqpYql::BinaryJsonOffsetBound >> KqpOlapScheme::SetNotDefaultColumnFamilyForPrimaryKey [GOOD] >> KqpOlapScheme::TwoSimilarColumnFamilies >> KqpScripting::StreamExecuteYqlScriptSeveralQueries >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> THiveTest::TestDeleteTablet >> THiveTest::TestStartTabletTwiceInARow [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable [GOOD] Test command err: 2025-04-07T14:09:43.321562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:43.321837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:43.321967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013a9/r3tmp/tmpnnkXxb/pdisk_1.dat 2025-04-07T14:09:43.685886Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8430, node 1 2025-04-07T14:09:43.905608Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:43.905666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:43.905696Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:43.906253Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:43.911513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:44.007685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:44.007872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:44.022990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30897 2025-04-07T14:09:44.643337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:47.809459Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:47.848186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:47.848312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:47.888192Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:47.892215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:48.075623Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:48.119790Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:48.119873Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:48.149055Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:48.149259Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:48.149443Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:48.149494Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:48.149541Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:48.149584Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:48.149618Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:48.149665Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:48.150095Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:48.365395Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:48.365515Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:48.373080Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:48.381330Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:48.382340Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:48.383393Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:48.406111Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:48.406162Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:48.406225Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:48.407740Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:48.407840Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:48.420382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:48.431984Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:48.432167Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:48.445592Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:48.461567Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:48.501972Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:48.802499Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:48.968368Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:49.717092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:49.717240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:49.733813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:49.942772Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:49.942961Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:49.943219Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:49.943309Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:49.943381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:49.943475Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:49.943553Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:49.943664Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:49.943762Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:49.943872Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:49.943958Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:49.944053Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:49.999945Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:50.000058Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:50.000326Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:50.000468Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:50.000606Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:50.001512Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxI ... ode 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:12:25.766937Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:25.876726Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:12:25.876825Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:12:25.953352Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8044:6020], schemeshard count = 1 2025-04-07T14:12:29.073810Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:29.073874Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:12:29.073916Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:29.073970Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:29.079605Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:29.098391Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:29.099050Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:29.099164Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:29.100047Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:29.115950Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:29.116152Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:12:29.116937Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8171:6090], server id = [2:8176:6095], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:29.117376Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8171:6090], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:29.117519Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8172:6091], server id = [2:8177:6096], tablet id = 72075186224037900, status = OK 2025-04-07T14:12:29.117582Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8172:6091], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:29.118660Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8173:6092], server id = [2:8178:6097], tablet id = 72075186224037901, status = OK 2025-04-07T14:12:29.118721Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8173:6092], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:29.118890Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:29.119028Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T14:12:29.119567Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8174:6093], server id = [2:8179:6098], tablet id = 72075186224037902, status = OK 2025-04-07T14:12:29.119616Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8174:6093], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:29.119690Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8175:6094], server id = [2:8180:6099], tablet id = 72075186224037903, status = OK 2025-04-07T14:12:29.119734Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8175:6094], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:29.119846Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-07T14:12:29.120545Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8171:6090], server id = [2:8176:6095], tablet id = 72075186224037899 2025-04-07T14:12:29.120582Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:29.120861Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8172:6091], server id = [2:8177:6096], tablet id = 72075186224037900 2025-04-07T14:12:29.120892Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:29.120964Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-07T14:12:29.121129Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-07T14:12:29.121251Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8173:6092], server id = [2:8178:6097], tablet id = 72075186224037901 2025-04-07T14:12:29.121289Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:29.121442Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8181:6100], server id = [2:8184:6103], tablet id = 72075186224037904, status = OK 2025-04-07T14:12:29.121489Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8181:6100], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:29.121591Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8182:6101], server id = [2:8185:6104], tablet id = 72075186224037905, status = OK 2025-04-07T14:12:29.121624Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8182:6101], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:29.122327Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8174:6093], server id = [2:8179:6098], tablet id = 72075186224037902 2025-04-07T14:12:29.122357Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:29.122464Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8183:6102], server id = [2:8186:6105], tablet id = 72075186224037906, status = OK 2025-04-07T14:12:29.122547Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8183:6102], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:29.123030Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8175:6094], server id = [2:8180:6099], tablet id = 72075186224037903 2025-04-07T14:12:29.123056Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:29.123148Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-07T14:12:29.123270Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T14:12:29.123384Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8187:6106], server id = [2:8189:6108], tablet id = 72075186224037907, status = OK 2025-04-07T14:12:29.123471Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8187:6106], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:29.123684Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8188:6107], server id = [2:8190:6109], tablet id = 72075186224037908, status = OK 2025-04-07T14:12:29.123740Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8188:6107], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:29.124157Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T14:12:29.124274Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8181:6100], server id = [2:8184:6103], tablet id = 72075186224037904 2025-04-07T14:12:29.124298Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:29.124819Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8182:6101], server id = [2:8185:6104], tablet id = 72075186224037905 2025-04-07T14:12:29.124848Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:29.124911Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T14:12:29.124975Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T14:12:29.125014Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:29.125206Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:29.125460Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:29.125793Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:29.127866Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8183:6102], server id = [2:8186:6105], tablet id = 72075186224037906 2025-04-07T14:12:29.127898Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:29.128189Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8187:6106], server id = [2:8189:6108], tablet id = 72075186224037907 2025-04-07T14:12:29.128215Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:29.128465Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:29.128899Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8188:6107], server id = [2:8190:6109], tablet id = 72075186224037908 2025-04-07T14:12:29.128924Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:29.150203Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8207:6126]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:29.150444Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:29.150501Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8207:6126], StatRequests.size() = 1 2025-04-07T14:12:29.288951Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTM0MWM3NjgtZTI2MWE2MDctYTM0N2ZhZjktYmQ4ZGNkNTY=, TxId: 2025-04-07T14:12:29.289022Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTM0MWM3NjgtZTI2MWE2MDctYTM0N2ZhZjktYmQ4ZGNkNTY=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-07T14:12:29.289702Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8215:6132]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:29.289971Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:29.290434Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:29.290502Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:12:29.294092Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:29.294161Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:12:29.294218Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:12:29.303306Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |94.9%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestReassignUseRelativeSpace >> THiveTest::TestLimitedNodeList [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC1 >> KqpScripting::StreamScanQuery >> KqpYql::EvaluateIf >> KqpScripting::SelectNullType >> THiveTest::TestCreateSubHiveCreateTablet [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets >> KqpScripting::ScriptValidate >> KqpYql::TestUuidDefaultColumn >> SystemView::TopPartitionsTables [GOOD] >> SystemView::TopPartitionsRanges >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestHiveBalancer >> KqpScheme::CreateTableWithTtlOnIntColumn [GOOD] >> KqpScheme::CreateTableWithTtlOnDatetime64Column >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestManyFollowersOnOneNode >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] >> SystemView::PartitionStatsOneSchemeShardDataQuery [GOOD] >> SystemView::PartitionStatsTtlFields >> KqpYql::EvaluateExpr2 >> SystemView::CollectScriptingQueries [GOOD] >> SystemView::DescribeSystemFolder [GOOD] >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestRestartsWithFollower >> KqpConstraints::IndexAutoChooseAndNonReadyIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] Test command err: 2025-04-07T14:12:16.525055Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573876335641994:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:16.525307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00107b/r3tmp/tmp37DhjL/pdisk_1.dat 2025-04-07T14:12:16.874508Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17222, node 1 2025-04-07T14:12:16.953998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:16.954130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:16.955991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:16.972451Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:16.972473Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:16.972486Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:16.972584Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:17.213896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:17.236781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:19.132537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573889220544594:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:19.132620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573889220544583:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:19.132749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:19.136978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:12:19.146316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573889220544597:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:12:19.552940Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr89ghta14m36d0afgyv5xw6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTFlZTI1ZS05MTAzYjc3Yy0yMjZlMWNlYy1jYjZlYjM3OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:19.557352Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7490573889220544684:2316], owner: [1:7490573889220544680:2314], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:19.557996Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7490573889220544684:2316], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:19.558608Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7490573889220544684:2316], row count: 1, finished: 1 2025-04-07T14:12:19.558651Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7490573889220544684:2316], owner: [1:7490573889220544680:2314], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:19.563221Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035139547, txId: 281474976710661] shutting down 2025-04-07T14:12:20.199789Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573891329095023:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:20.199837Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00107b/r3tmp/tmpjKLbEj/pdisk_1.dat 2025-04-07T14:12:20.317442Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:20.335470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:20.335530Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:20.336723Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8654, node 2 2025-04-07T14:12:20.409133Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:20.409152Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:20.409157Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:20.409225Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:20.597396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:20.605446Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:22.959035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:12:23.042483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573904213997633:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:23.042484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573904213997642:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:23.042568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:23.045354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:12:23.052679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573904213997647:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:12:23.209872Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89gqbz7qy14p999agh0t0b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDRmYzlkOGEtZTBhZTk5ODQtOTIxOGY3ZWYtMTNhNjk4YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:23.211621Z node 2 :SYSTEM_VIEWS INFO: Scan started, actor: [2:7490573904213997736:2322], owner: [2:7490573904213997732:2320], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:23.212367Z node 2 :SYSTEM_VIEWS INFO: Scan prepared, actor: [2:7490573904213997736:2322], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:23.212707Z node 2 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [2:7490573904213997736:2322], row count: 4, finished: 1 2025-04-07T14:12:23.212747Z node 2 :SYSTEM_VIEWS INFO: Scan finished, actor: [2:7490573904213997736:2322], owner: [2:7490573904213997732:2320], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:23.214759Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035143209, txId: 281474976715661] shutting down 2025-04-07T14:12:23.924734Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490573903939837369:2052];send_to=[0:7307199536658146131:7 ... { TraceId: 01jr89gvxacexqs6v9q3kmbr2s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTFjMGRjNmMtY2E4MDYzYmQtN2M1N2EzOC1mYWI0MDRjNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:27.798573Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7490573921119707757:2392], owner: [3:7490573921119707754:2390], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:27.799029Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7490573921119707757:2392], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:27.799458Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7490573921119707757:2392], row count: 3, finished: 1 2025-04-07T14:12:27.799502Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7490573921119707757:2392], owner: [3:7490573921119707754:2390], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:27.801260Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035147795, txId: 281474976710673] shutting down 2025-04-07T14:12:27.902835Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr89gw0x6p0h9y821h72f89w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjM2NmUzNWItZmQzNDI5OGItYWY1NTI3ODYtMjU5ZDFlZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:27.904677Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7490573921119707790:2401], owner: [3:7490573921119707786:2399], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:27.905367Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7490573921119707790:2401], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:27.905744Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7490573921119707790:2401], row count: 3, finished: 1 2025-04-07T14:12:27.905781Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7490573921119707790:2401], owner: [3:7490573921119707786:2399], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:27.907733Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035147901, txId: 281474976710675] shutting down 2025-04-07T14:12:28.009554Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jr89gw4961c3dhgmygyzg3jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDRiNDM3N2QtNWM3YjQyNDktMWE3YWRlMjctNTFlOGQwYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:28.011378Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7490573925414675121:2411], owner: [3:7490573925414675117:2409], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:28.011884Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7490573925414675121:2411], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:28.012263Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7490573925414675121:2411], row count: 4, finished: 1 2025-04-07T14:12:28.012306Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7490573925414675121:2411], owner: [3:7490573925414675117:2409], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:28.014041Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035148008, txId: 281474976710677] shutting down 2025-04-07T14:12:28.123614Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jr89gw7n19qr51xbarrrbt2q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTQwOWUwYjYtNjAyMTYxYTYtN2MxZWU1ZGYtNTQzOGVjY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:28.150614Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7490573925414675153:2420], owner: [3:7490573925414675149:2418], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:28.151130Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7490573925414675153:2420], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:28.151537Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7490573925414675153:2420], row count: 4, finished: 1 2025-04-07T14:12:28.151584Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7490573925414675153:2420], owner: [3:7490573925414675149:2418], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:28.153475Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035148122, txId: 281474976710679] shutting down 2025-04-07T14:12:28.936306Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490573925600554129:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:28.936386Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00107b/r3tmp/tmpIAt1Wv/pdisk_1.dat 2025-04-07T14:12:29.039449Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:29.066032Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:29.066124Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:29.069798Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8828, node 4 2025-04-07T14:12:29.137217Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:29.137241Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:29.137251Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:29.137393Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:29.416009Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:29.424414Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:32.090921Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:12:32.224839Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573942780424185:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:32.224936Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:32.225230Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490573942780424197:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:32.230443Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:12:32.242954Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573942780424199:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:12:32.607829Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89h0as7ckbf9f2c06mq1ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZjMzMWVkMGQtNTYyNmRhNjItNDAxMjFlZjItNmQ1ZWNkZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:32.621074Z node 4 :SYSTEM_VIEWS INFO: Scan started, actor: [4:7490573942780424298:2336], owner: [4:7490573942780424297:2335], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:32.621841Z node 4 :SYSTEM_VIEWS INFO: Scan prepared, actor: [4:7490573942780424298:2336], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:32.628938Z node 4 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [4:7490573942780424298:2336], row count: 4, finished: 1 2025-04-07T14:12:32.629022Z node 4 :SYSTEM_VIEWS INFO: Scan finished, actor: [4:7490573942780424298:2336], owner: [4:7490573942780424297:2335], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:32.629243Z node 4 :SYSTEM_VIEWS INFO: Scan started, actor: [4:7490573942780424304:2339], owner: [4:7490573942780424297:2335], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:32.642848Z node 4 :SYSTEM_VIEWS INFO: Scan prepared, actor: [4:7490573942780424304:2339], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:32.648936Z node 4 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [4:7490573942780424304:2339], row count: 4, finished: 1 2025-04-07T14:12:32.648986Z node 4 :SYSTEM_VIEWS INFO: Scan finished, actor: [4:7490573942780424304:2339], owner: [4:7490573942780424297:2335], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:32.651795Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035152604, txId: 281474976715661] shutting down >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteOwnerTablets >> SystemView::StoragePoolsRanges [GOOD] >> SystemView::SystemViewFailOps >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners >> KqpScheme::CreateBackupCollection [GOOD] >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::CollectScriptingQueries [GOOD] Test command err: 2025-04-07T14:12:10.228719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573849767636736:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:10.228783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001094/r3tmp/tmpZ7tV4f/pdisk_1.dat 2025-04-07T14:12:10.548288Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3604, node 1 2025-04-07T14:12:10.565788Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:10.566602Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:10.579435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:10.580128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:10.584688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:10.607370Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:10.607391Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:10.607402Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:10.607513Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61189 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:11.056591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:11.088737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:12.664762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572326:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.664909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.665492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572373:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.665540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572366:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.665571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572367:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.665614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572368:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.665641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572369:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.666414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572379:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.666471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.666971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572392:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.667008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572395:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.668926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572409:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.668984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572413:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.669039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.669517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572438:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.671496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:12:12.675244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572487:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.675296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572490:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.675354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.675787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572513:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.675827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572516:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.675887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572512:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.676157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.676823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572530:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.676852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572533:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.676927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.678205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572547:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.678255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572550:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.678310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.680099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858357572559:2390], DatabaseId: /Root, PoolId: default, Fai ... schemeshard: 72057594046644480 2025-04-07T14:12:25.416791Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7490573890504862275:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:25.416925Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:25.628999Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr89gssw5h4cyed5z2c3qtyh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=ZjQ5NzEyM2MtYzZjZjE2Mi0yZmY2YjE1NS1jODFmYmU2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:25.682336Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root
: Error: Access denied 2025-04-07T14:12:25.700857Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1
: Error: Access denied
: Error: Access denied 2025-04-07T14:12:25.721110Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/.sys 2025-04-07T14:12:25.782510Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1/.sys
: Error: Access denied 2025-04-07T14:12:25.822348Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/.sys/partition_stats
: Error: Access denied 2025-04-07T14:12:25.857815Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1/.sys/partition_stats
: Error: Access denied 2025-04-07T14:12:25.888894Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 14 2025-04-07T14:12:25.889611Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:12:25.897606Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-07T14:12:25.898083Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:12:25.898173Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-07T14:12:25.899178Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:12:25.899323Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 13 2025-04-07T14:12:25.899458Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:12:25.928523Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7490573895295605420:2104], Type=268959746 2025-04-07T14:12:25.928574Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7490573895295605420:2104], Type=268959746 2025-04-07T14:12:25.928595Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7490573895295605420:2104], Type=268959746 2025-04-07T14:12:25.928619Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7490573895295605420:2104], Type=268959746 2025-04-07T14:12:26.059386Z node 14 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7490573895735033341:2110];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:26.054628Z node 15 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[15:7490573895295605328:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:26.054712Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:26.059477Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:26.192616Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7490573894524586706:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:26.188036Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7490573895767681362:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:26.192705Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:26.188108Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:28.538276Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7490573925547199374:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:28.538331Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001094/r3tmp/tmpi18wmw/pdisk_1.dat 2025-04-07T14:12:28.685946Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:28.715977Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:28.716082Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:28.717639Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10262, node 16 2025-04-07T14:12:28.781233Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:28.781257Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:28.781269Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:28.781408Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:29.099694Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:29.105488Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:29.110518Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.252629Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7490573942727069272:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:32.252748Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:32.253122Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7490573942727069284:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:32.257254Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:12:32.268211Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7490573942727069286:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:12:32.490467Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89h0btc5f4a17tjfv5819d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=ZGVkY2JhZTctMTJkNzU5MjAtMzM3ZDgzNGYtZmYwYjVkMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:32.661872Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89h0ns53eqq7r7acgvzvvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=MjQ0MzA5Yi1mNDU5NWU3Zi0yMGZhZDM0YS0xOWU4ZTFl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:32.671239Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035152706, txId: 281474976715662] shutting down 2025-04-07T14:12:32.874086Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr89h0tq28rwdwhxxd4gpq1a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=OGM4OWMxMGEtOTAwMmEzNDItZjJkYWYxZjgtMTljOWNkNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:32.875831Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7490573942727069458:2340], owner: [16:7490573942727069454:2338], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-07T14:12:32.876881Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7490573942727069458:2340], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:32.877433Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7490573942727069458:2340], row count: 2, finished: 1 2025-04-07T14:12:32.877478Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7490573942727069458:2340], owner: [16:7490573942727069454:2338], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-07T14:12:32.879705Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035152873, txId: 281474976715664] shutting down >> KqpYql::UpdateBadType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::DescribeSystemFolder [GOOD] Test command err: 2025-04-07T14:12:12.663885Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573857186085594:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:12.663937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00108e/r3tmp/tmpo24n1m/pdisk_1.dat 2025-04-07T14:12:12.994405Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8329, node 1 2025-04-07T14:12:13.055849Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:13.055872Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:13.055880Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:13.055975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:13.062368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:13.062522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:13.064437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:13.350425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:13.362515Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:12:15.296523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573870070988026:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.296597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573870070988037:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.296682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:15.300567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:12:15.309234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573870070988040:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:12:15.730366Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr89gdykd0bz2atd38b9wpzt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTJmMDMyNWYtOTA4NzYyZDctYjNmNWM3NWMtYTNmY2NkZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:15.782413Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7490573870070988128:2312], owner: [1:7490573870070988124:2310], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-04-07T14:12:15.783086Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7490573870070988128:2312], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:15.804566Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7490573870070988128:2312], row count: 1, finished: 1 2025-04-07T14:12:15.804634Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7490573870070988128:2312], owner: [1:7490573870070988124:2310], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-04-07T14:12:15.824732Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035135729, txId: 281474976710660] shutting down 2025-04-07T14:12:16.981258Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr89ghabbkwg1g9cnw53d386, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2M5OTRlYjItNThmZTViOGEtZmRhNGZmYmMtMTY4NmI0YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:16.984727Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7490573874365955470:2326], owner: [1:7490573874365955466:2324], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-04-07T14:12:16.985324Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7490573874365955470:2326], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:16.985584Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7490573874365955470:2326], row count: 1, finished: 1 2025-04-07T14:12:16.985609Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7490573874365955470:2326], owner: [1:7490573874365955466:2324], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-04-07T14:12:16.987732Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035136971, txId: 281474976710662] shutting down 2025-04-07T14:12:17.664881Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573857186085594:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:17.664975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:18.141589Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr89gjeh760q09cb59v4w15b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWMwMjhhZC1kY2ViZDY5ZS1jM2UyMzcxMy02NDQ1MWJhNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:18.146270Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7490573882955890101:2337], owner: [1:7490573882955890097:2335], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-04-07T14:12:18.149210Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7490573882955890101:2337], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:18.149457Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7490573882955890101:2337], row count: 1, finished: 1 2025-04-07T14:12:18.149480Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7490573882955890101:2337], owner: [1:7490573882955890097:2335], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-04-07T14:12:18.152046Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035138140, txId: 281474976710664] shutting down test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00108e/r3tmp/tmpcKVgbx/pdisk_1.dat 2025-04-07T14:12:19.592764Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:12:19.668517Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:19.696082Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:19.696176Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:19.702145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7081, node 2 2025-04-07T14:12:19.759802Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:19.759830Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:19.759836Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:19.759927Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:20.017503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:22.371575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573901289939591:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:22.371753Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have ... eEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:27.164521Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:27.181255Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:27.195676Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7490573919647070895:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:27.196089Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:27.200650Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7490573920994290477:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:27.203070Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:12:27.207034Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:27.207121Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-04-07T14:12:27.211564Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:27.211654Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:27.213654Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2025-04-07T14:12:27.215686Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:27.218203Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-04-07T14:12:27.219253Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:27.244892Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:27.302964Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:27.318158Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7490573922875319209:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:27.318254Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:27.325015Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7490573920209944777:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:27.327308Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:27.336038Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:27.336114Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-04-07T14:12:27.339159Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-04-07T14:12:27.340141Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:27.342446Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:27.342542Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:27.357703Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-04-07T14:12:27.359190Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:27.376637Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:30.206354Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:30.353343Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490573934937074642:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:30.353344Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490573934937074653:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:30.353398Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:30.356891Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715663:3, at schemeshard: 72057594046644480 2025-04-07T14:12:30.371930Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490573934937074656:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715663 completed, doublechecking } 2025-04-07T14:12:30.561143Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr89gygf7e7eebmmf83e7kn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDVmZWZmOGMtNjk0NGVhNS1lZWQ5OTM2Zi1iNjRjNmI5Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:30.590855Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:31.001093Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr89gz1b16j13q3ndgzzz0cx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDVmZWZmOGMtNjk0NGVhNS1lZWQ5OTM2Zi1iNjRjNmI5Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:31.018455Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-07T14:12:31.347124Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr89gzb11wjr3qfby0e3s42m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDVmZWZmOGMtNjk0NGVhNS1lZWQ5OTM2Zi1iNjRjNmI5Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root
: Error: Path not found 2025-04-07T14:12:31.570371Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2025-04-07T14:12:31.570842Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:12:31.570968Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-04-07T14:12:31.571353Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:12:31.571421Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-04-07T14:12:31.571547Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:12:31.572506Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-04-07T14:12:31.577510Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:12:31.604268Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7490573922875319436:2101], Type=268959746 2025-04-07T14:12:31.728759Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7490573917757204150:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:31.728828Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:32.196332Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7490573919647070895:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:32.196406Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:32.200767Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7490573920994290477:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:32.200845Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:32.318636Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7490573922875319209:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:32.318706Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:32.326241Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7490573920209944777:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:32.326315Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::IndexAutoChooseAndNonReadyIndex [GOOD] Test command err: Trying to start YDB, gRPC: 5550, MsgBus: 24492 2025-04-07T14:11:51.437995Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573767334167524:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:51.438079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001437/r3tmp/tmpHEx7Gc/pdisk_1.dat 2025-04-07T14:11:51.703848Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5550, node 1 2025-04-07T14:11:51.769298Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:51.769319Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:51.769324Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:51.769389Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:51.790600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:51.790717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:51.792301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24492 TClient is connected to server localhost:24492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:52.180546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.208692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.311411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.418611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.491102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.791424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573775924103825:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.791509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:53.956769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.976390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.995583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.018273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.039976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.063552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.100868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573780219071614:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.100929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.100944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573780219071619:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.103378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:54.110063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573780219071621:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:54.783340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.015159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976710675:1, at schemeshard: 72057594046644480 2025-04-07T14:11:55.029574Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-04-07T14:11:55.069153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2822, MsgBus: 13504 2025-04-07T14:11:55.901648Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573785926010549:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:55.901735Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001437/r3tmp/tmpAAyftB/pdisk_1.dat 2025-04-07T14:11:55.975707Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:56.002216Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:56.002292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:56.005620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2822, node 2 2025-04-07T14:11:56.036580Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:56.036603Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:56.036610Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:56.036718Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13504 TClient is connected to server localhost:13504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:56.442194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.449594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.495403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.630248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:56.703639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14: ... ActorId: [5:7490573866049636083:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:14.467845Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:14.519007Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.589057Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.616547Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.649256Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.681030Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.715927Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:14.757316Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573866049636580:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:14.757442Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:14.757725Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573866049636585:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:14.762481Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:14.772838Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573866049636587:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:15.918854Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:16.009028Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573853164732493:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:16.009104Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5492, MsgBus: 23817 2025-04-07T14:12:25.098990Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:12:25.099377Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:12:25.099468Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001437/r3tmp/tmpHlBg4G/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5492, node 6 2025-04-07T14:12:25.566913Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:25.567883Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:25.567944Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:25.567992Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:25.568245Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:25.616559Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:25.616734Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:25.628280Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23817 TClient is connected to server localhost:23817 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:26.030306Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:26.155588Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:26.528157Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:26.983852Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:27.344576Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:27.946953Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:1728:3346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:27.947172Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:27.970193Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:28.219209Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:28.515966Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:28.872423Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:29.168062Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:29.543320Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:29.864509Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:2300:3791], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:29.864691Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:29.865087Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:2305:3796], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:29.871694Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:30.081884Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:2307:3798], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:31.264211Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:32.391113Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T14:12:32.999167Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 >> KqpYql::EvaluateExpr1 [GOOD] >> KqpYql::Discard >> KqpOlapScheme::TwoSimilarColumnFamilies [GOOD] >> KqpYql::BinaryJsonOffsetBound [GOOD] >> KqpYql::AnsiIn >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDrainWithMaxTabletsScheduled >> KqpPragma::Auth [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer >> KqpYql::InsertCV [GOOD] >> KqpYql::InsertCVList ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 28989, MsgBus: 26334 2025-04-07T14:12:31.404384Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573936785862261:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:31.405159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ebf/r3tmp/tmpblgNsj/pdisk_1.dat 2025-04-07T14:12:31.849224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:31.849305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:31.849585Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:31.853309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28989, node 1 2025-04-07T14:12:31.926215Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:31.926238Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:31.926244Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:31.926351Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26334 TClient is connected to server localhost:26334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:32.515530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:34.311026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573949670764768:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.311119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.527119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.663008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573949670764877:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.663075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.663276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573949670764882:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.667118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:12:34.674289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573949670764884:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateBackupCollection [GOOD] Test command err: Trying to start YDB, gRPC: 16041, MsgBus: 3851 2025-04-07T14:12:02.668921Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573814706234014:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:02.669013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001408/r3tmp/tmpqypVAv/pdisk_1.dat 2025-04-07T14:12:02.909707Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16041, node 1 2025-04-07T14:12:02.975977Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:02.975993Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:02.975998Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:02.976128Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:03.013419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:03.013577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:03.015247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3851 TClient is connected to server localhost:3851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:03.379285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:03.396536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:03.512319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:03.627266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:03.684333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:05.026508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573827591137605:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:05.026625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:05.222036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.241104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.263160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.283924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.304514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.373968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.412272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573827591138103:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:05.412337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:05.412371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573827591138108:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:05.415225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:05.422929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573827591138110:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 25640, MsgBus: 8165 2025-04-07T14:12:06.858542Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573832893945641:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:06.860769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001408/r3tmp/tmpT6j195/pdisk_1.dat 2025-04-07T14:12:06.926722Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25640, node 2 2025-04-07T14:12:06.964041Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:06.964138Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:06.965770Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:06.989324Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:06.989343Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:06.989351Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:06.989439Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8165 TClient is connected to server localhost:8165 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:07.351497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:07.358151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:07.437787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:07.555544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:07.607541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:09.393261Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573845778849233:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:09.393329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:09.434715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:09.457243Z node 2 :FLAT_TX_SCHEMESHARD WA ... d ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:20.075749Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490573892260875658:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:21.385800Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490573875081004271:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:21.387047Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:21.457401Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.523454Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:1, at schemeshard: 72057594046644480 2025-04-07T14:12:22.168816Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:12:22.696824Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:1, at schemeshard: 72057594046644480 2025-04-07T14:12:23.279223Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-07T14:12:23.859504Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-07T14:12:24.396328Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-07T14:12:24.904691Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T14:12:24.943104Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-07T14:12:26.895071Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-04-07T14:12:27.241450Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715720:0, at schemeshard: 72057594046644480 2025-04-07T14:12:27.275294Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715721:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26462, MsgBus: 22616 2025-04-07T14:12:29.050417Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573930394451411:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:29.050486Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001408/r3tmp/tmp7FPlFC/pdisk_1.dat 2025-04-07T14:12:29.189823Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:29.189924Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:29.191672Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:29.196785Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26462, node 5 2025-04-07T14:12:29.249257Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:29.249287Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:29.249297Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:29.249416Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22616 TClient is connected to server localhost:22616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:29.805919Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:29.817142Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:29.832442Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:29.915533Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:30.133753Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:30.224399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.713135Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573943279354981:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:32.713226Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:32.759525Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:32.830588Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:32.863313Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:32.936141Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:32.992543Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:33.050958Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:33.111808Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573947574322781:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:33.111896Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:33.111914Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573947574322786:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:33.116091Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:33.126518Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573947574322788:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:34.056372Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573930394451411:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:34.056469Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:34.380283Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateBackupCollection, opId: 281474976715672:2, at schemeshard: 72057594046644480 2025-04-07T14:12:34.395730Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715672, at schemeshard: 72057594046644480 2025-04-07T14:12:34.429904Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateBackupCollection, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> THiveTest::TestRestartsWithFollower [GOOD] >> THiveTest::TestRestartTablets >> KqpScripting::ScanQuery [GOOD] >> KqpScripting::ScanQueryDisable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType [GOOD] Test command err: Trying to start YDB, gRPC: 19959, MsgBus: 24992 2025-04-07T14:12:30.734523Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573933392654402:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:30.734571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001eea/r3tmp/tmpwPEOSs/pdisk_1.dat 2025-04-07T14:12:31.121647Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19959, node 1 2025-04-07T14:12:31.166030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:31.166164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:31.168402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:31.176719Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:31.176754Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:31.176763Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:31.176906Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24992 TClient is connected to server localhost:24992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:31.751885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:31.764745Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:12:31.776258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:31.907168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.067700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.136964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.573955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573946277557779:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:33.574042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:33.850331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:33.878058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:33.953118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:33.982719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.011818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.050599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.102315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573950572525570:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.102396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.102723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573950572525575:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.105749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:34.114752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573950572525577:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Type annotation, code: 1030
:4:26: Error: At function: KiUpdateTable!
:3:20: Error: Failed to convert type: Struct<'Amount':String?> to Struct<'Amount':Uint64?>
:3:20: Error: Failed to convert 'Amount': Optional to Optional
:3:20: Error: Row type mismatch for table: db.[/Root/Test] >> KqpScripting::StreamExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex >> KqpScripting::StreamExecuteYqlScriptScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce >> SystemView::QueryStatsFields [GOOD] >> SystemView::QueryStatsAllTables >> THiveTest::TestSpreadNeighboursDifferentOwners [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics >> KqpYql::TestUuidDefaultColumn [GOOD] >> KqpScripting::ExecuteYqlScriptScanScalar >> KqpYql::EvaluateIf [GOOD] >> KqpYql::EvaluateFor >> KqpScripting::StreamScanQuery [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries >> KqpScripting::SelectNullType [GOOD] >> KqpScripting::StreamDdlAndDml >> THiveTest::TestDeleteOwnerTablets [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany >> KqpScripting::ScriptValidate [GOOD] >> KqpScripting::ScriptStats >> TSchemeShardMoveTest::Reject >> KqpScheme::CreateTableWithTtlOnDatetime64Column [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsCompat >> KqpScripting::ScriptingCreateAndAlterTableTest [GOOD] >> KqpScripting::SecondaryIndexes >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics [GOOD] >> THiveTest::TestServerlessComputeResourcesMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn [GOOD] Test command err: Trying to start YDB, gRPC: 13452, MsgBus: 20712 2025-04-07T14:12:32.898214Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573943048132113:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:32.898561Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ea3/r3tmp/tmpsRsX81/pdisk_1.dat 2025-04-07T14:12:33.224051Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13452, node 1 2025-04-07T14:12:33.264864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:33.265019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:33.266624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:33.327645Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:33.327691Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:33.327698Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:33.327792Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20712 TClient is connected to server localhost:20712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:33.790794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:35.736701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573955933034410:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.736802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.019026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.129278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573960228001810:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.129339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.129399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573960228001815:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.132486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:12:36.141585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573960228001817:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } >> GroupWriteTest::TwoTables >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestLockTabletExecutionTimeout >> KqpYql::EvaluateExpr2 [GOOD] >> KqpYql::EvaluateExpr3 >> KqpScheme::DoubleCreateResourcePoolClassifier [GOOD] >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::TwoSimilarColumnFamilies [GOOD] Test command err: Trying to start YDB, gRPC: 11135, MsgBus: 9091 2025-04-07T14:11:46.634071Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573744767818834:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:46.634159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001444/r3tmp/tmpJeqH28/pdisk_1.dat 2025-04-07T14:11:46.870653Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11135, node 1 2025-04-07T14:11:46.937947Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:46.937971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:46.937980Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:46.938084Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:46.973224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:46.973351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:46.975020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9091 TClient is connected to server localhost:9091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:47.320237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:47.335923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:47.452295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:47.556504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:47.607931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:48.803926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573753357755138:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:48.804023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:49.036971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.063509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.088096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.128125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.153393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.218813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.249012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573757652722929:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:49.249143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:49.249171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573757652722934:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:49.252105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:49.260735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573757652722936:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:49.957907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:11:49.995492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:11:50.009180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64880, MsgBus: 22756 2025-04-07T14:11:50.725471Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573762919419592:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:50.725520Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001444/r3tmp/tmpd1Ubix/pdisk_1.dat 2025-04-07T14:11:50.799980Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64880, node 2 2025-04-07T14:11:50.825870Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:50.825947Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:50.827788Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:50.856088Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:50.856113Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:50.856119Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:50.856197Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22756 TClient is connected to server localhost:22756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:51.164282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Uint64 NOT NULL, column0 Int32, column1 Int32, column2 Int32, column3 Int32, column4 Int32, column5 Int32, column6 Int32, column7 Int32, column8 Int32, column9 Int32, column10 Int32, column11 Int32, column12 Int32, column13 Int32, column14 Int32, column15 Int32, column16 Int32, column17 Int32, column18 Int32, column19 Int32, column20 Int32, column21 Int32, column22 Int32, column23 Int32, column24 Int32, column25 Int32, column26 Int32, column27 Int32, column28 Int32, column29 Int32, column30 Int32, column31 Int32, column32 Int32, column33 Int32, column34 Int32, column35 Int32, column36 Int32, column37 Int32, column38 Int32, column39 Int32, column40 Int32, column41 Int32, column42 Int32, column43 Int32, column44 Int32, column45 Int32, column46 Int32, column47 Int32, column48 Int32, column49 Int32, column50 Int32, column51 Int32, column52 Int32, column53 Int32, column54 Int32, column55 Int32, column56 Int32, column57 Int32, column58 Int32, column59 Int32, column60 Int32, column61 Int32, column62 Int32, column63 Int32, column64 Int32, column65 Int32, column66 Int32 ... fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:12:30.334730Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573932417741818:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:12:30.334816Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573932417741818:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:12:30.334896Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573932417741818:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:12:30.334985Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573932417741818:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:12:30.335079Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573932417741818:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:12:30.335160Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573932417741818:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:12:30.335254Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573932417741818:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:12:30.335356Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573932417741818:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:12:30.335460Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7490573932417741818:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:12:30.343117Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:12:30.343189Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:12:30.343285Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:12:30.343316Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:12:30.343487Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:12:30.343516Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:12:30.343597Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:12:30.343629Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:12:30.343700Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:12:30.343733Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:12:30.343777Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:12:30.343810Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:12:30.344164Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:12:30.344200Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:12:30.344362Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:12:30.344406Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:12:30.344570Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:12:30.344606Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:12:30.344858Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:12:30.344896Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:12:30.345049Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:12:30.345077Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:12:30.485096Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573932417741890:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:30.485176Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:30.499291Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 32172, MsgBus: 13260 2025-04-07T14:12:31.514024Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490573937728624954:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:31.514090Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001444/r3tmp/tmpC7pOG5/pdisk_1.dat 2025-04-07T14:12:31.625048Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:31.654176Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:31.654280Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:31.656556Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32172, node 6 2025-04-07T14:12:31.723229Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:31.723252Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:31.723272Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:31.723410Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13260 TClient is connected to server localhost:13260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:12:32.402201Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 CREATE TABLE `/Root/TableWithFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family1, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="lz4"), FAMILY family1 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:12:35.545614Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573954908494762:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.545744Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> SystemView::SystemViewFailOps [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize >> THiveTest::TestServerlessComputeResourcesMode [GOOD] >> THiveTest::TestSkipBadNode >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DoubleCreateResourcePoolClassifier [GOOD] Test command err: Trying to start YDB, gRPC: 62968, MsgBus: 25736 2025-04-07T14:11:59.716358Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573801956000537:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:59.716415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001420/r3tmp/tmpXdNePe/pdisk_1.dat 2025-04-07T14:11:59.995105Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62968, node 1 2025-04-07T14:12:00.050118Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:00.050159Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:00.050173Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:00.050287Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:00.102076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:00.102244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:00.103809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25736 TClient is connected to server localhost:25736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:00.478170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:00.502653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:00.616420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:00.745309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:00.822214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:02.059603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573814840904147:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:02.059679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:02.232657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.253203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.273145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.295035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.314594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.341533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:02.414126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573814840904646:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:02.414189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573814840904651:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:02.414191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:02.416709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:02.422984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573814840904653:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:03.129586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480 2025-04-07T14:12:03.544307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.885736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:1, at schemeshard: 72057594046644480 2025-04-07T14:12:04.227151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-07T14:12:04.655442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T14:12:04.720887Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573801956000537:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:04.720933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:04.994289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-07T14:12:05.420572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T14:12:05.442305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T14:12:06.952552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-07T14:12:07.008279Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDc5MTM1ZWQtOTlkZmM2OTAtNWI5YzM2ZmQtZTY5NDQxOGY=, ActorId: [1:7490573832020775545:2787], ActorState: ExecuteState, TraceId: 01jr89g7pk52ggwb18w8nn9a8p, Create QueryResponse for error on request, msg: 2025-04-07T14:12:08.105079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710736:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28405, MsgBus: 15986 2025-04-07T14:12:08.809940Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573838115259341:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:08.810002Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001420/r3tmp/tmpHlOg7q/pdisk_1.dat 2025-04-07T14:12:08.896089Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28405, node 2 2025-04-07T14:12:08.912445Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:08.912520Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:08.913508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:08.946033Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:08.946054Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:08.946061Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:08.946148Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15986 TClient is connected to server localhost:15986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self ... VE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:25.857802Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15395, node 5 2025-04-07T14:12:25.948081Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:25.948105Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:25.948115Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:25.948254Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10028 TClient is connected to server localhost:10028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:26.448255Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:26.454441Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:26.466580Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:26.564409Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:12:26.756800Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:26.852803Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:29.456322Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573928376860263:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:29.456424Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:29.511383Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:29.558183Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:29.596399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:29.674178Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:29.749459Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:29.785221Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:29.889539Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573928376860767:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:29.889637Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:29.889679Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573928376860772:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:29.894333Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:29.906601Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490573928376860774:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:30.709057Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573911196989386:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:30.709127Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:31.237990Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-04-07T14:12:31.981814Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-07T14:12:32.541196Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:1, at schemeshard: 72057594046644480 2025-04-07T14:12:33.145325Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:12:33.777743Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.383546Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-07T14:12:38.152148Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7490573967031568129:2818], TxId: 281474976715723, task: 1. Ctx: { TraceId : 01jr89h5rna4qtazkbznmv70gg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=YjBlYzM4Ni1iZWZmMjJjZi1lN2NmYTAzLTllMDJiMTlk. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T14:12:38.152902Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7490573967031568130:2819], TxId: 281474976715723, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr89h5rna4qtazkbznmv70gg. SessionId : ydb://session/3?node_id=5&id=YjBlYzM4Ni1iZWZmMjJjZi1lN2NmYTAzLTllMDJiMTlk. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [5:7490573967031568126:2783], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:12:38.163238Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=YjBlYzM4Ni1iZWZmMjJjZi1lN2NmYTAzLTllMDJiMTlk, ActorId: [5:7490573962736600705:2783], ActorState: ExecuteState, TraceId: 01jr89h5rna4qtazkbznmv70gg, Create QueryResponse for error on request, msg: 2025-04-07T14:12:38.175952Z node 5 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jr89h5r10r50tbj41qgkbhe7" } } } } ;request=session_id: "ydb://session/3?node_id=5&id=YjBlYzM4Ni1iZWZmMjJjZi1lN2NmYTAzLTllMDJiMTlk" tx_control { tx_id: "01jr89h5r10r50tbj41qgkbhe7" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers`\nSELECT database,name,config,rank FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "database" type { type_id: UTF8 } } members { name: "name" type { type_id: UTF8 } } members { name: "config" type { type_id: JSON_DOCUMENT } } members { name: "rank" type { type_id: INT64 } } } } } } value { items { items { text_value: "/Root" } items { text_value: "MyResourcePoolClassifier" } items { text_value: "{\"resource_pool\":\"test_pool\"}" } items { int64_value: 1 } } } } } ; 2025-04-07T14:12:38.363264Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490573967031568171:2832], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-04-07T14:12:38.363374Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionStealLock >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC >> KqpYql::Discard [GOOD] >> TSchemeShardMoveTest::OneTable [GOOD] >> KqpYql::AnsiIn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::SystemViewFailOps [GOOD] Test command err: 2025-04-07T14:12:17.408930Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573878646223907:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:17.409719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00105e/r3tmp/tmpxwx7CE/pdisk_1.dat 2025-04-07T14:12:17.757567Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31112, node 1 2025-04-07T14:12:17.863335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:17.863454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:17.865353Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:17.865378Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:17.865387Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:17.865502Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:17.868605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:18.190459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:18.232558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:20.406273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573891531126641:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:20.406345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573891531126632:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:20.406463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:20.409816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:12:20.446441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573891531126646:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:12:20.928095Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr89gmshdzw87nqxz43k51rb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmNiNTdjNzQtYzczNGQwZjQtZDdmNTlmODgtNDVjYjM5ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:21.046252Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr89gnb39zemm9yxb97zgysn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2VjNjcxZmQtNTNmNWQzZmEtMTEyYzkwYzItMzM1MTVlOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:21.047851Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7490573895826094089:2327], owner: [1:7490573895826094085:2325], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-07T14:12:21.048434Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7490573895826094089:2327], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:21.048891Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7490573895826094089:2327], row count: 1, finished: 1 2025-04-07T14:12:21.048921Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7490573895826094089:2327], owner: [1:7490573895826094085:2325], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-07T14:12:21.050435Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035141045, txId: 281474976710662] shutting down 2025-04-07T14:12:22.669200Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490573901711769598:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00105e/r3tmp/tmpU6YJRy/pdisk_1.dat 2025-04-07T14:12:22.706682Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:22.760471Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12445, node 6 2025-04-07T14:12:22.813721Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:22.820852Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:22.825856Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:22.845857Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:22.845884Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:22.845892Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:22.845994Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8921 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:23.083989Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:23.093265Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:25.442700Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573914596671947:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:25.442761Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:25.442805Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490573914596671958:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:25.446637Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:12:25.459515Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490573914596671961:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:12:25.648743Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89gqek7y0fr0wqxvtdkxt2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ZWQxNTZiNGItZDBhYWFhMTAtZDI2OWZiN2QtOTNjNTIzMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:25.650970Z node 6 :SYSTEM_VIEWS INFO: Scan started, actor: [6:7490573914596672047:2311], owner: [6:7490573914596672043:2309], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-07T14:12:25.651784Z node 6 :SYSTEM_VIEWS INFO: Scan prepared, actor: [6:7490573914596672047:2311], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:25.652452Z node 6 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [6:7490573914596672047:2311], row count: 0, finished: 1 2025-04-07T14:12:25.652492Z node 6 :SYSTEM_VIEWS INFO: Scan finished, actor: [6:7490573914596672047:2311], owner: [6:7490573914596672043:2309], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-07T14:12:25.653899Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035145647, txId: 281474976715660] shutting down 2025-04-07T14:12:26.766361Z node ... ], row count: 4, finished: 1 2025-04-07T14:12:33.806560Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7490573946089489994:2337], owner: [7:7490573946089489990:2335], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-07T14:12:33.808429Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035153801, txId: 281474976715664] shutting down 2025-04-07T14:12:33.967386Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr89h1wv1yf50mmaaymfkf4d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MzRjNDc0M2ItM2IyMzI4YWEtMzgxOTA2LTJmMzI1YTY1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:33.968985Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7490573946089490030:2348], owner: [7:7490573946089490026:2346], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-07T14:12:33.969470Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7490573946089490030:2348], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:33.969696Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7490573946089490030:2348], row count: 2, finished: 1 2025-04-07T14:12:33.969735Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7490573946089490030:2348], owner: [7:7490573946089490026:2346], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-07T14:12:33.971579Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035153966, txId: 281474976715666] shutting down 2025-04-07T14:12:34.108974Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr89h221fwta9qw0qhwy6wmp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQyNTE2MzUtMmY0MGVjY2EtNmZkMDQ1MmQtMmI5MWI5YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:34.110762Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7490573950384457358:2357], owner: [7:7490573950384457354:2355], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-07T14:12:34.111350Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7490573950384457358:2357], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:34.111621Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7490573950384457358:2357], row count: 3, finished: 1 2025-04-07T14:12:34.111650Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7490573950384457358:2357], owner: [7:7490573950384457354:2355], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-07T14:12:34.113469Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035154108, txId: 281474976715668] shutting down 2025-04-07T14:12:34.224164Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr89h2696s4xtwe9ssggn8bf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=M2Q1NDVhMDktYjE2YWRhYjgtODE5NWE4ZWEtOWQ3MDA4N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:34.225993Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7490573950384457390:2366], owner: [7:7490573950384457386:2364], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-07T14:12:34.229259Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7490573950384457390:2366], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:34.229518Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7490573950384457390:2366], row count: 2, finished: 1 2025-04-07T14:12:34.229545Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7490573950384457390:2366], owner: [7:7490573950384457386:2364], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-07T14:12:34.231681Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035154223, txId: 281474976715670] shutting down 2025-04-07T14:12:34.370157Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr89h2a54rbnwk0qsak4byvk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MjMxMWZhYTAtMTVkZTMxOTEtYzgzODY2MC0yN2YxMWUxMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:34.372789Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7490573950384457423:2376], owner: [7:7490573950384457420:2374], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-07T14:12:34.375082Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7490573950384457423:2376], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:34.375305Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7490573950384457423:2376], row count: 3, finished: 1 2025-04-07T14:12:34.375332Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7490573950384457423:2376], owner: [7:7490573950384457420:2374], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-07T14:12:34.377000Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035154369, txId: 281474976715672] shutting down 2025-04-07T14:12:35.377906Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7490573958070047004:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:35.378007Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00105e/r3tmp/tmpUkHw6L/pdisk_1.dat 2025-04-07T14:12:35.492490Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:35.527997Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:35.528091Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:35.533866Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19784, node 8 2025-04-07T14:12:35.595582Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:35.595604Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:35.595631Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:35.595760Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20454 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:35.855027Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting...
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-07T14:12:38.472862Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490573970954949729:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:38.472935Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions }
: Error: Execution, code: 1060
:2:28: Error: Executing DROP TABLE
: Error: Incorrect scheme found while performing Kikimr operation., code: 2003
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200
: Error: Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats], code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats] 2025-04-07T14:12:38.532707Z node 8 :TX_PROXY ERROR: [ReadTable [8:7490573970954949750:2308] TxId# 281474976715663] Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats] 2025-04-07T14:12:38.532922Z node 8 :TX_PROXY ERROR: [ReadTable [8:7490573970954949750:2308] TxId# 281474976715663] RESPONSE Status# ResolveError shard: 0 table: /Root/.sys/partition_stats
: Error: Bulk upsert to table '/Root/.sys/partition_stats'is not supported. Table is a system view
: Error: Check failed: path: '/Root/.sys', error: path part '.sys' is reserved by the system
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> KqpYql::InsertCVList [GOOD] >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:38.293712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:38.293802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:38.293840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:38.293874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:38.293934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:38.293963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:38.294020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:38.294354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:38.363151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:38.363210Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:38.375554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:38.375684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:38.375838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:38.393180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:38.393401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:38.393987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:38.394217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:38.396117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:38.397617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:38.397673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:38.397763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:38.397797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:38.397839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:38.397951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:38.404036Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:38.500612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:38.500838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:38.501058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:38.501380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:38.501437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:38.503574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:38.503730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:38.503920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:38.503979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:38.504026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:38.504077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:38.505951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:38.506009Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:38.506045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:38.507598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:38.507637Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:38.507676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:38.507756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:38.511067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:38.512855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:38.513060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:38.514130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:38.514263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:38.514316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:38.514595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:38.514660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:38.514841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:38.514927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:38.517899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:38.517957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:38.518183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:38.518230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:38.518446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:38.518488Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:38.518583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:38.518631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:38.518692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:38.518732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:38.518765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:38.518807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:38.518873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:38.518912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:38.518943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:38.521802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:38.521940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:38.521971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:38.522017Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:38.522057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:38.522220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:12:40.710435Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 108:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-07T14:12:40.710485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 108:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T14:12:40.710521Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 108, done: 0, blocked: 1 2025-04-07T14:12:40.710594Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 108:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 108 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-07T14:12:40.710700Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 137 -> 129 2025-04-07T14:12:40.710818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:40.710877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:12:40.714620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:12:40.714809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:12:40.714995Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:40.715040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:40.715178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T14:12:40.715298Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:40.715337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-04-07T14:12:40.715378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 108, path id: 4 2025-04-07T14:12:40.715685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:12:40.715830Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-04-07T14:12:40.715914Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:12:40.715950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:12:40.715990Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2025-04-07T14:12:40.716687Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-04-07T14:12:40.716782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-04-07T14:12:40.716819Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-07T14:12:40.716859Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2025-04-07T14:12:40.716894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:40.717622Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-04-07T14:12:40.717698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-04-07T14:12:40.717721Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-07T14:12:40.717749Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-07T14:12:40.717775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T14:12:40.717830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-04-07T14:12:40.720786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:12:40.720842Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:40.721062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:12:40.721162Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-07T14:12:40.721197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-07T14:12:40.721242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-04-07T14:12:40.721303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:340:2315] message: TxId: 108 2025-04-07T14:12:40.721348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-07T14:12:40.721402Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-04-07T14:12:40.721435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-04-07T14:12:40.721511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T14:12:40.722618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-04-07T14:12:40.722924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-04-07T14:12:40.724084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-07T14:12:40.724135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:821:2782] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-07T14:12:40.724785Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-07T14:12:40.724842Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2025-04-07T14:12:40.740068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 8589936884 } TabletId: 72075186233409546 State: 4 2025-04-07T14:12:40.740174Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-04-07T14:12:40.742495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:12:40.742960Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-07T14:12:40.744806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:40.745059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T14:12:40.745392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:12:40.745440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-07T14:12:40.745508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:40.748331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:12:40.748402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-07T14:12:40.748830Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2025-04-07T14:12:40.749456Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:40.749640Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 229us result status StatusSuccess 2025-04-07T14:12:40.749989Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled >> THiveTest::TestSkipBadNode [GOOD] >> TScaleRecommenderTest::BasicTest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard [GOOD] Test command err: Trying to start YDB, gRPC: 22851, MsgBus: 32571 2025-04-07T14:12:31.443819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573939533015143:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:31.444289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001eb1/r3tmp/tmprpvugw/pdisk_1.dat 2025-04-07T14:12:31.798144Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:31.799944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:31.800014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22851, node 1 2025-04-07T14:12:31.807070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:31.873241Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:31.873272Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:31.873289Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:31.873390Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32571 TClient is connected to server localhost:32571 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:32.359722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.383148Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:12:32.398578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.523499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:32.671343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:12:32.736029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:34.214420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573952417918716:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.214519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.430665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.456144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.498319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.520697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.544258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.572872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.607461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573952417919211:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.607550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.607908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573952417919216:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.611000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:34.620005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573952417919218:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 7578, MsgBus: 65179 2025-04-07T14:12:36.193359Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573960146673385:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:36.193413Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001eb1/r3tmp/tmpjThafR/pdisk_1.dat 2025-04-07T14:12:36.297287Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:36.321621Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:36.321720Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:36.323574Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7578, node 2 2025-04-07T14:12:36.368998Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:36.369023Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:36.369031Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:36.369149Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65179 TClient is connected to server localhost:65179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:36.803009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:36.807685Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:36.817411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:36.882575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.038898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.113922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:39.181723Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573973031576985:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.181994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.200399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.226648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.254800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.283675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.306727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.336172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.414802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573973031577482:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.414868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573973031577487:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.414878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.417601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:39.425386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573973031577489:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:40.332012Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490573977326545098:2463], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:13: Error: DISCARD not supported in YDB queries, code: 2008 2025-04-07T14:12:40.332196Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjVmNmVjNjgtODk2OWU3NDEtOGQzNDI4NWEtYWY4ZDUwM2M=, ActorId: [2:7490573977326545091:2459], ActorState: ExecuteState, TraceId: 01jr89h8686dgzzx4sm5b9s0a5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::AnsiIn [GOOD] Test command err: Trying to start YDB, gRPC: 2547, MsgBus: 65046 2025-04-07T14:12:31.451981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573937705995881:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:31.452025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001eaf/r3tmp/tmp6tgxtV/pdisk_1.dat 2025-04-07T14:12:31.802224Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2547, node 1 2025-04-07T14:12:31.861962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:31.862434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:31.889049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:31.949074Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:31.949092Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:31.949097Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:31.949162Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65046 TClient is connected to server localhost:65046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:32.472482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.511756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.631930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.776090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.852431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:34.391379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573950590899450:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.391497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.618636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.644132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.667579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.691117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.721093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.756040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.823014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573950590899944:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.823093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.823165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573950590899949:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.826360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:34.833628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573950590899951:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:35.711858Z node 1 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=1&id=ZDc2YjkyY2MtZjdjNzQxYWQtOTdiZjMxYTktMjA2Mjc4ZQ==, ActorId: [1:7490573954885867552:2460], ActorState: ExecuteState, TraceId: 01jr89h3ns981nbejd6kgg1trf, Internal error, message: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer 2025-04-07T14:12:35.711902Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDc2YjkyY2MtZjdjNzQxYWQtOTdiZjMxYTktMjA2Mjc4ZQ==, ActorId: [1:7490573954885867552:2460], ActorState: ExecuteState, TraceId: 01jr89h3ns981nbejd6kgg1trf, Create QueryResponse for error on request, msg: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer Trying to start YDB, gRPC: 4274, MsgBus: 18443 2025-04-07T14:12:36.390168Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573960043211860:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:36.390261Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001eaf/r3tmp/tmpYYMMQc/pdisk_1.dat 2025-04-07T14:12:36.471754Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:36.489821Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:36.489901Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:36.491190Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4274, node 2 2025-04-07T14:12:36.547129Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:36.547157Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:36.547166Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:36.547259Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18443 TClient is connected to server localhost:18443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:36.972232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:36.977662Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:36.991249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.072698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.221162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.298936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:39.322431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573972928115439:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.322502Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.362940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.388582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.417364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.442139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.465217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.491781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.524847Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573972928115932:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.524910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.524965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573972928115937:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.527903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:39.537000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573972928115939:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] >> KqpYql::EvaluateFor [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] >> KqpScripting::ScanQueryDisable [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] >> KqpScheme::DisableResourcePoolClassifiersOnServerless [GOOD] >> KqpScheme::DisableMetadataObjectsOnServerless >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce >> KqpOlapScheme::DropThenAddColumnCompaction [GOOD] >> KqpOlapScheme::DropTtlColumn |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCVList [GOOD] Test command err: Trying to start YDB, gRPC: 4592, MsgBus: 26842 2025-04-07T14:12:31.016541Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573939075120234:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:31.017954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ee1/r3tmp/tmpIF4YTy/pdisk_1.dat 2025-04-07T14:12:31.332352Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4592, node 1 2025-04-07T14:12:31.387006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:31.387440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:31.404790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:31.421958Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:31.421979Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:31.421983Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:31.422048Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26842 TClient is connected to server localhost:26842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:32.003637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.027296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.157409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.335988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.391280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:34.042925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573951960023628:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.043042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.312322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.338986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.366696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.401657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.430691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.497073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.578728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573951960024129:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.578814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.579055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573951960024134:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.582497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:34.592081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573951960024136:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:35.841577Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490573956254991800:2474], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZWU5M2FjNjgtZjUxNWNhMWEtODJlYTRhMTAtN2VhMTEzNjc=. CustomerSuppliedId : . TraceId : 01jr89h3ks0t7kgvj9bp0mbhjv. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T14:12:35.842799Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490573956254991802:2475], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZWU5M2FjNjgtZjUxNWNhMWEtODJlYTRhMTAtN2VhMTEzNjc=. TraceId : 01jr89h3ks0t7kgvj9bp0mbhjv. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490573956254991797:2465], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:12:35.850382Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWU5M2FjNjgtZjUxNWNhMWEtODJlYTRhMTAtN2VhMTEzNjc=, ActorId: [1:7490573956254991753:2465], ActorState: ExecuteState, TraceId: 01jr89h3ks0t7kgvj9bp0mbhjv, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Conflict with existing key., code: 2012 2025-04-07T14:12:36.014536Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573939075120234:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:36.014662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12757, MsgBus: 21265 2025-04-07T14:12:36.566243Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573961017660374:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:36.566304Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ee1/r3tmp/tmp1mUsAL/pdisk_1.dat 2025-04-07T14:12:36.682946Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12757, node 2 2025-04-07T14:12:36.711602Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:36.711683Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:36.725313Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:36.767512Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:36.767543Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:36.767561Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:36.767683Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21265 TClient is connected to server localhost:21265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:12:37.189945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:37.196344Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:37.210769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.272496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.434919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.539404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:39.659364Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573973902563960:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.659472Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.694815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.723513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.750583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.778090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.802084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.829602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.873425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573973902564454:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.873488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.873529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573973902564459:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.876436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:39.884297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573973902564461:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:40.954161Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490573978197532126:2474], TxId: 281474976715672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MzFhYzJiMjQtZTFkZmQzNDktZmE3MGNjMmQtMjFhNzVlODM=. TraceId : 01jr89h8mbb68acapdjn1bvnqy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-07T14:12:40.954610Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490573978197532128:2475], TxId: 281474976715672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MzFhYzJiMjQtZTFkZmQzNDktZmE3MGNjMmQtMjFhNzVlODM=. TraceId : 01jr89h8mbb68acapdjn1bvnqy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490573978197532123:2465], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:12:40.955293Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzFhYzJiMjQtZTFkZmQzNDktZmE3MGNjMmQtMjFhNzVlODM=, ActorId: [2:7490573978197532072:2465], ActorState: ExecuteState, TraceId: 01jr89h8mbb68acapdjn1bvnqy, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Duplicated keys found., code: 2012 >> KqpScripting::StreamDdlAndDml [GOOD] >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 18362, MsgBus: 15959 2025-04-07T14:12:31.405733Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573939327034591:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:31.405870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001edb/r3tmp/tmpcjXexW/pdisk_1.dat 2025-04-07T14:12:31.764447Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18362, node 1 2025-04-07T14:12:31.789933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:31.790029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:31.802986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:31.889216Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:31.889240Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:31.889257Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:31.889342Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15959 TClient is connected to server localhost:15959 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:32.389713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.413977Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:12:32.428510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.558994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:32.739326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:12:32.810207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:34.454934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573952211937966:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.455011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.732306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.761967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.786748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.813504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.838855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.865038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.901191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573952211938458:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.901277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.901496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573952211938463:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.905227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:34.914368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573952211938465:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:35.738918Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490573956506906076:2465], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:2:34: Error: Pragma auth not supported inside Kikimr query., code: 2016 2025-04-07T14:12:35.739110Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmJiNGQwMjctNzYyY2E3MzEtOTdlNzQxOGItZTU4NDM3NjQ=, ActorId: [1:7490573956506906067:2460], ActorState: ExecuteState, TraceId: 01jr89h3rcfswern0zz9599vfz, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 29012, MsgBus: 16585 2025-04-07T14:12:36.494846Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573958831470860:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:36.494928Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001edb/r3tmp/tmpO3PibB/pdisk_1.dat 2025-04-07T14:12:36.582783Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:36.623048Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:36.623131Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:36.624128Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29012, node 2 2025-04-07T14:12:36.675082Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:36.675120Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:36.675128Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:36.675231Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16585 TClient is connected to server localhost:16585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:37.114182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:37.123152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:12:37.210845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.373501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.438156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:39.442611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573971716374437:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.442707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.479821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.506543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.534830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.560995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.588523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.626198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.658724Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573971716374930:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.658792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.658793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573971716374935:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.661735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:39.670898Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573971716374937:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:40.708534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:41.255272Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035161288, txId: 281474976715675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 4641, MsgBus: 17408 2025-04-07T14:12:31.249644Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573939984721165:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:31.249873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001edc/r3tmp/tmp3jKmSf/pdisk_1.dat 2025-04-07T14:12:31.651818Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:31.662993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:31.663129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:31.683799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4641, node 1 2025-04-07T14:12:31.823493Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:31.823519Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:31.823533Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:31.823619Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17408 TClient is connected to server localhost:17408 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:32.373491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:32.412558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:12:32.578605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.752070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:32.813004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.318096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573952869624628:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.318193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.542972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.570722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.636054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.698259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.737674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.786280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.826671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573952869625131:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.826727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.826884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573952869625136:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.829585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:34.845603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573952869625138:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:36.196249Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035156213, txId: 281474976710671] shutting down 2025-04-07T14:12:36.244828Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573939984721165:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:36.244893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5960, MsgBus: 32371 2025-04-07T14:12:36.983976Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573959426050673:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:36.984041Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001edc/r3tmp/tmpRalRO7/pdisk_1.dat 2025-04-07T14:12:37.114379Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:37.116279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:37.116349Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:37.117619Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5960, node 2 2025-04-07T14:12:37.177279Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:37.177306Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:37.177314Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:37.177411Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32371 TClient is connected to server localhost:32371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:37.606551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.614999Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:37.639259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.728634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.927826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.013833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:39.914164Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573972310954274:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.914260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.930946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.953581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.982125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.009236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.038512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.107948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.145208Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573976605922066:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.145263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.145340Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573976605922071:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.147321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:40.153356Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573976605922073:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:41.138642Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035161176, txId: 281474976715671] shutting down 2025-04-07T14:12:41.302812Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035161337, txId: 281474976715673] shutting down 2025-04-07T14:12:41.463937Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035161498, txId: 281474976715675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateFor [GOOD] Test command err: Trying to start YDB, gRPC: 2797, MsgBus: 13669 2025-04-07T14:12:32.224920Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573941618411733:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:32.224967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ea9/r3tmp/tmpIYDQJ9/pdisk_1.dat 2025-04-07T14:12:32.636455Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:32.641484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:32.641577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 2797, node 1 2025-04-07T14:12:32.649302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:32.777213Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:32.777253Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:32.777265Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:32.777344Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13669 TClient is connected to server localhost:13669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:33.311551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.330952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.449718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.608179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:33.685191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.243582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573954503315327:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.243726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.486119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.518194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.542239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.606381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.632370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.656791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.701636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573954503315822:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.701723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.701983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573954503315827:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.705414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:35.714414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573954503315829:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 10054, MsgBus: 25699 2025-04-07T14:12:37.419945Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573966078341176:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:37.420511Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ea9/r3tmp/tmpqtOZqe/pdisk_1.dat 2025-04-07T14:12:37.554146Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:37.566728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:37.566795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:37.568268Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10054, node 2 2025-04-07T14:12:37.617849Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:37.617869Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:37.617875Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:37.617985Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25699 TClient is connected to server localhost:25699 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:38.058757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.075339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.144526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:38.298298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:12:38.375482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:40.359889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573978963244763:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.359971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.390564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.419671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.443397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.468117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.515820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.549378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.624607Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573978963245263:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.624683Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.624689Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573978963245268:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.627373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:40.635216Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573978963245270:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest >> KqpScripting::ExecuteYqlScriptScanScalar [GOOD] >> KqpScripting::JoinIndexLookup >> KqpYql::EvaluateExpr3 [GOOD] >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryDisable [GOOD] Test command err: Trying to start YDB, gRPC: 24491, MsgBus: 25007 2025-04-07T14:12:31.333084Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573937740688194:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:31.333136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ec6/r3tmp/tmpBoxsgo/pdisk_1.dat 2025-04-07T14:12:31.635045Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24491, node 1 2025-04-07T14:12:31.704053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:31.704388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:31.734168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:31.769276Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:31.769303Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:31.769315Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:31.769400Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25007 TClient is connected to server localhost:25007 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:32.320304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.332857Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:12:32.339588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.447579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.621850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:32.690226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.308310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573950625591780:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.319196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.539307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.610761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.632818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.653862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.674782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.702930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.751865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573950625592275:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.752021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.752252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573950625592280:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.754763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:34.763795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573950625592282:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:36.193719Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035156213, txId: 281474976710671] shutting down 2025-04-07T14:12:36.332787Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573937740688194:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:36.332864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30112, MsgBus: 12006 2025-04-07T14:12:36.845433Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573959554569254:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:36.845509Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ec6/r3tmp/tmp6KFYUC/pdisk_1.dat 2025-04-07T14:12:36.962803Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:36.990946Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:36.991026Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:36.992249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30112, node 2 2025-04-07T14:12:37.073231Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:37.073253Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:37.073259Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:37.073348Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12006 TClient is connected to server localhost:12006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:37.521850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.540804Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:37.554780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:37.641337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.856179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.938227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.879118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573972439472848:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.879194Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.917507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.939334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.963265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.985797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.011665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.044941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.083675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573976734440634:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.083751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.083763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573976734440639:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.086916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:40.094748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573976734440641:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:41.366487Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035161400, txId: 281474976715671] shutting down 2025-04-07T14:12:41.845516Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490573959554569254:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:41.845607Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] Test command err: Trying to start YDB, gRPC: 26867, MsgBus: 27927 2025-04-07T14:12:31.631332Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573939964956212:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:31.631361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ead/r3tmp/tmpWBlc5R/pdisk_1.dat 2025-04-07T14:12:32.037469Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:32.060090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:32.060197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:32.061845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26867, node 1 2025-04-07T14:12:32.201260Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:32.201287Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:32.201294Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:32.201382Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27927 TClient is connected to server localhost:27927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:32.726617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.750490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.882517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.017451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:33.073159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.645249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573952849859814:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.645358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.870449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.899910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.923780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.948145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.973445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.041072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.094359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573957144827609:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.094409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.094621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573957144827614:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.097510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:35.105025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573957144827616:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 4548, MsgBus: 17711 2025-04-07T14:12:36.946497Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573961150731103:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:36.946553Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ead/r3tmp/tmp3wwdE4/pdisk_1.dat 2025-04-07T14:12:37.073372Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:37.086414Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:37.086499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:37.089707Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4548, node 2 2025-04-07T14:12:37.189208Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:37.189231Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:37.189238Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:37.189321Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17711 TClient is connected to server localhost:17711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:37.602212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.633075Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:37.650915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.737517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.911167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:37.999599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:39.928884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573974035634678:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.928960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:39.962532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.987719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.011979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.037727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.064328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.098605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.172183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573978330602475:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.172254Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.172269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573978330602480:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.175224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:40.182749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573978330602482:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Result: [[[[101u]]];[[[102u]]];[[[103u]]];[[[104u]]];[[[105u]]]] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] Test command err: Trying to start YDB, gRPC: 10748, MsgBus: 23640 2025-04-07T14:12:32.108452Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573942314628210:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:32.108520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ea7/r3tmp/tmpd0Opgc/pdisk_1.dat 2025-04-07T14:12:32.526402Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:32.539205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:32.539274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:32.542738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10748, node 1 2025-04-07T14:12:32.621680Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:32.621696Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:32.621704Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:32.621768Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23640 TClient is connected to server localhost:23640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:33.098691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.131907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.269462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:33.420822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.490179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.053569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573955199531800:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.053705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.328742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.366262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.390559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.458399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.525101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.551088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.627502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573955199532302:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.627603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.628086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573955199532307:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.633625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:35.644173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573955199532309:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:36.878735Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035156892, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 16753, MsgBus: 24082 2025-04-07T14:12:37.671263Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573962850444973:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:37.671333Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ea7/r3tmp/tmpYbLXi8/pdisk_1.dat 2025-04-07T14:12:37.859997Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:37.870653Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:37.870728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:37.872278Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16753, node 2 2025-04-07T14:12:37.927234Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:37.927258Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:37.927266Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:37.927361Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24082 TClient is connected to server localhost:24082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:38.366665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.373736Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:12:38.378188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.451999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.603987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.682843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:40.398266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573975735348571:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.398350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.437764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.465747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.491170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.518747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.543231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.580191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.635655Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573975735349064:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.635714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.635756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573975735349069:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.638566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:40.645139Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573975735349071:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamDdlAndDml [GOOD] Test command err: Trying to start YDB, gRPC: 8330, MsgBus: 3352 2025-04-07T14:12:32.311955Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573941311712500:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:32.312027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ea6/r3tmp/tmpkMIvkr/pdisk_1.dat 2025-04-07T14:12:32.666498Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8330, node 1 2025-04-07T14:12:32.716729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:32.720938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:32.723777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:32.761072Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:32.761101Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:32.761108Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:32.761207Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3352 TClient is connected to server localhost:3352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:33.283417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.295943Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:12:33.311293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.434592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.583010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.664460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:35.150290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573954196616086:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.150403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.416747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.442394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.473257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.499652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.523548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.551085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.619107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573954196616580:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.619169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.619338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573954196616585:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.622462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:35.631624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573954196616587:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:36.562084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.816228Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035156857, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 21539, MsgBus: 3024 2025-04-07T14:12:37.807226Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573964608214658:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:37.807301Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ea6/r3tmp/tmphGK1sq/pdisk_1.dat 2025-04-07T14:12:37.925473Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:37.925841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:37.925923Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:37.954792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21539, node 2 2025-04-07T14:12:37.999731Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:37.999757Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:37.999764Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:37.999858Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3024 TClient is connected to server localhost:3024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:38.438567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.445086Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:38.453948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.531353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.682054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.767270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:40.413285Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573977493118246:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.413354Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.449682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.475350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.497642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.533466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.562895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.595031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.669744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573977493118744:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.669834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.669938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573977493118749:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.673155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:40.682343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573977493118751:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:41.524966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:41.905713Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035161946, txId: 281474976715673] shutting down >> GroupWriteTest::WriteHardRateDispatcher >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode >> KqpScripting::ScriptStats [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsCompat [GOOD] >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] >> TSchemeShardMoveTest::Chain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExpr3 [GOOD] Test command err: Trying to start YDB, gRPC: 6950, MsgBus: 7642 2025-04-07T14:12:33.646202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573947264263992:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:33.646282Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ea0/r3tmp/tmpZmbAcs/pdisk_1.dat 2025-04-07T14:12:34.059198Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:34.099412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:34.099523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6950, node 1 2025-04-07T14:12:34.102294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:34.153281Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:34.153344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:34.153357Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:34.153470Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7642 TClient is connected to server localhost:7642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:34.652297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:34.669568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:34.797338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:34.948099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:35.021199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.480631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573960149167594:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.480982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.712532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.745170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.810954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.840522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.871965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.913943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.966602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573960149168093:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.966665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.966819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573960149168098:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.970439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:36.980307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573960149168100:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 10782, MsgBus: 8488 2025-04-07T14:12:38.873045Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573967111159160:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:38.873096Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ea0/r3tmp/tmpDRiQaI/pdisk_1.dat 2025-04-07T14:12:38.957863Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:38.972620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:38.972691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 10782, node 2 2025-04-07T14:12:38.973647Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:39.011671Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:39.011689Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:39.011696Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:39.011807Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8488 TClient is connected to server localhost:8488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:39.406718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:39.413311Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:12:39.419638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:39.466598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:39.596194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:39.668872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:41.453641Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573979996062743:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:41.453707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:41.488177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:41.515072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:41.539313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:41.564046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:41.588904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:41.614034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:41.653711Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573979996063234:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:41.653781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573979996063239:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:41.653795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:41.656372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:41.663510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573979996063241:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] Test command err: 2025-04-07T14:09:46.866026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:46.866366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:46.866534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001378/r3tmp/tmp71VgJp/pdisk_1.dat 2025-04-07T14:09:47.228825Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19793, node 1 2025-04-07T14:09:47.421373Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:47.421424Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:47.421453Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:47.421962Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:47.423634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:47.514524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:47.514657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:47.528701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4957 2025-04-07T14:09:48.150437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:51.152101Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:51.185784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:51.185889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:51.226948Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:51.232152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:51.433388Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:51.493440Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:51.493540Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:51.532424Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:51.532725Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:51.532934Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:51.532990Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:51.533046Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:51.533109Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:51.533165Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:51.533217Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:51.534736Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:51.739347Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1744:2539] 2025-04-07T14:09:51.741599Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:51.753652Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:51.753713Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:51.753770Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:51.759486Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:51.759586Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1796:2567], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:51.791863Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1840:2588] 2025-04-07T14:09:51.791987Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1840:2588], schemeshard id = 72075186224037889 2025-04-07T14:09:51.816103Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:51.816194Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:51.821243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:51.831347Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:51.831517Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:51.844527Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:51.861694Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:51.863577Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:52.139321Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:52.281198Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:53.088792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2141:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:53.088952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:53.106374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:53.199262Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:53.199502Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:53.199803Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:53.199938Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:53.200090Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:53.200260Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:53.200412Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:53.200576Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:53.200706Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:53.200828Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:53.200942Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:53.201108Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:53.227858Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:53.227975Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:53.228149Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:53.228202Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:53.228427Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:53.228477Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:53.228590Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema: ... (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:34.254862Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7683:5664]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:34.255129Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:34.255177Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7683:5664], StatRequests.size() = 1 2025-04-07T14:12:34.395138Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2ZkMzQwODMtZmVmMWMzNmQtNzQwMTdkNjctM2IyODgyMzQ=, TxId: 2025-04-07T14:12:34.395201Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2ZkMzQwODMtZmVmMWMzNmQtNzQwMTdkNjctM2IyODgyMzQ=, TxId: 2025-04-07T14:12:34.395825Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:34.414260Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 5] 2025-04-07T14:12:34.414336Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:35.002244Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T14:12:35.002328Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:12:35.578961Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:35.579046Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is column table. 2025-04-07T14:12:35.579581Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:35.593172Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:35.593517Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:35.593564Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 5], AnalyzedShards 1 2025-04-07T14:12:35.623885Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:36.830442Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:12:36.830687Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:12:36.855877Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:36.855997Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:36.856054Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:36.856608Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:36.874415Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-07T14:12:36.874593Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:36.875109Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:36.875171Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:36.875782Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2025-04-07T14:12:36.875862Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-07T14:12:36.876081Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:36.889812Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2025-04-07T14:12:36.889890Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:36.890099Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T14:12:36.890781Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7782:5722], server id = [2:7783:5723], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:36.890910Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7782:5722], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:36.892776Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:36.892888Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:36.893054Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:36.893220Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:36.893467Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:36.895335Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7782:5722], server id = [2:7783:5723], tablet id = 72075186224037899 2025-04-07T14:12:36.895364Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:36.895777Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:36.920824Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGU5OGU5YTMtZjU4YmM3MDgtMTY5NzgxYWEtMzgyM2I3YWY=, TxId: 2025-04-07T14:12:36.920892Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGU5OGU5YTMtZjU4YmM3MDgtMTY5NzgxYWEtMzgyM2I3YWY=, TxId: 2025-04-07T14:12:36.921449Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:36.937819Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:36.937890Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3084:3324] 2025-04-07T14:12:37.540337Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-07T14:12:37.540416Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:12:38.165015Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:38.165098Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:38.165153Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:12:39.452183Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:40.757471Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:12:40.768344Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T14:12:40.768445Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-07T14:12:40.768545Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:40.768576Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:40.768601Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:12:42.152659Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:42.152792Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is column table. 2025-04-07T14:12:42.152828Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 5] 2025-04-07T14:12:42.153379Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:42.166618Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:42.167055Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:42.167117Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:42.167512Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:42.180517Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:42.180700Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-07T14:12:42.181121Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7957:5819], server id = [2:7958:5820], tablet id = 72075186224037900, status = OK 2025-04-07T14:12:42.181241Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7957:5819], path = { OwnerId: 72075186224037889 LocalId: 5 } 2025-04-07T14:12:42.181810Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T14:12:42.181874Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:42.181974Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:42.182093Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:42.182332Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:42.184014Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7957:5819], server id = [2:7958:5820], tablet id = 72075186224037900 2025-04-07T14:12:42.184044Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:42.184574Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:42.203130Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWQ2MDIyMTUtNDE5YjlmZS0xZDExNWJmMS0xZmYxZjYx, TxId: 2025-04-07T14:12:42.203184Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWQ2MDIyMTUtNDE5YjlmZS0xZDExNWJmMS0xZmYxZjYx, TxId: 2025-04-07T14:12:42.203547Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:42.217089Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 5] 2025-04-07T14:12:42.217143Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3084:3324] >> SystemView::TopPartitionsFollowers [GOOD] >> SystemView::TabletsShards >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] Test command err: 2025-04-07T14:09:44.930528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:44.930784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:44.930895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00139d/r3tmp/tmpxz7E1h/pdisk_1.dat 2025-04-07T14:09:45.264223Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9758, node 1 2025-04-07T14:09:45.465996Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:45.466051Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:45.466080Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:45.466644Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:45.468653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:45.561973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:45.562143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:45.575736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22432 2025-04-07T14:09:46.214065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:49.014888Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:49.050002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:49.050127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:49.089809Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:49.093693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:49.285143Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:49.334765Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:49.334883Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:49.367684Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:49.367844Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:49.368060Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:49.368126Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:49.368227Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:49.368284Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:49.368332Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:49.368404Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:49.368873Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:49.593953Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:49.594064Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:49.600263Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:49.609387Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:49.610258Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:49.611372Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:49.632764Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:49.632832Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:49.632896Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:49.634807Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:49.634898Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:49.650476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:49.666491Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:49.666652Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:49.680466Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:49.696845Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:49.736520Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:50.010272Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:50.196256Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:50.946676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:50.946813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:50.961975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:51.047042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:51.047227Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:51.047485Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:51.047591Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:51.047711Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:51.047866Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:51.048001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:51.048129Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:51.048282Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:51.048405Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:51.048514Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:51.048619Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:51.073572Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:51.073657Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:51.073779Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:51.073825Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:51.073962Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:51.074001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:51.074106Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema: ... 14:12:32.170077Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:32.190160Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:32.190728Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:32.190794Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2025-04-07T14:12:32.206195Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:33.420723Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:33.420795Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:33.420826Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T14:12:33.420865Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:33.420914Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:33.421747Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:33.437103Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-07T14:12:33.437243Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:33.437688Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:33.437758Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) ... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2025-04-07T14:12:33.439104Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2025-04-07T14:12:33.439209Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-07T14:12:33.439591Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:33.439922Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:2657:3163] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-07T14:12:33.439974Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2657:3163] 2025-04-07T14:12:33.456495Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2025-04-07T14:12:33.456583Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:33.456787Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Complete 2025-04-07T14:12:33.456990Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:12:33.457606Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7302:5422], server id = [2:7303:5423], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:33.457736Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7302:5422], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:33.458646Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:33.458751Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:33.458989Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:33.459168Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:33.459341Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7302:5422], server id = [2:7303:5423], tablet id = 72075186224037899 2025-04-07T14:12:33.459377Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:33.459572Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:33.462965Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:33.487239Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7320:5440]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:33.487488Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:33.487544Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7320:5440], StatRequests.size() = 1 2025-04-07T14:12:33.604337Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDA2MmM2NWMtZTQzZTgwYjktMTVlZTdiOTgtZjE1MTczOTY=, TxId: 2025-04-07T14:12:33.604432Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDA2MmM2NWMtZTQzZTgwYjktMTVlZTdiOTgtZjE1MTczOTY=, TxId: 2025-04-07T14:12:33.605067Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:33.620062Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:33.620146Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:34.286915Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T14:12:34.287018Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:12:34.988988Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:12:34.989087Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:12:34.989129Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:12:36.325677Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:12:36.325842Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:12:36.348168Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:36.348317Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:36.348369Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:36.349214Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:36.364048Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:36.364469Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:36.364552Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:36.364935Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:36.378458Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:36.378603Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T14:12:36.379136Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7411:5490], server id = [2:7412:5491], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:36.379261Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7411:5490], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:36.379862Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:36.379937Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:36.380155Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:36.380319Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:36.380560Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:36.382434Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7411:5490], server id = [2:7412:5491], tablet id = 72075186224037899 2025-04-07T14:12:36.382465Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:36.383528Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:36.403027Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDhmN2I1ZmYtYTAwMzk0ZTMtODc2Y2I5My1lOTFlMWNkNg==, TxId: 2025-04-07T14:12:36.403095Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDhmN2I1ZmYtYTAwMzk0ZTMtODc2Y2I5My1lOTFlMWNkNg==, TxId: 2025-04-07T14:12:36.403905Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:36.428588Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:36.428687Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2657:3163] 2025-04-07T14:12:37.108802Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-07T14:12:37.108880Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:12:38.797397Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:39.815541Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:12:39.826413Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T14:12:39.826496Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-07T14:12:41.071555Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:12:41.071638Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:12:42.500431Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:12:42.500621Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:12:42.522284Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-07T14:12:42.522339Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:43.336150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:43.336236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:43.336290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:43.336327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:43.336371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:43.336418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:43.336472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:43.336841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:43.404799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:43.404852Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:43.415816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:43.415949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:43.416125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:43.421496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:43.421692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:43.422342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:43.422548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:43.424534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:43.426096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:43.426173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:43.426324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:43.426370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:43.426426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:43.426609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.433501Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:43.535119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:43.535305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.535551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:43.535735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:43.535774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.537708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:43.537838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:43.537997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.538033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:43.538064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:43.538100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:43.539413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.539466Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:43.539507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:43.540715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.540747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.540838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:43.540895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:43.543257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:43.544388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:43.544534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:43.545283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:43.545373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:43.545405Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:43.545593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:43.545634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:43.545777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:43.545832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:43.547404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:43.547442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:43.547594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:43.547631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:43.547811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.547848Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:43.547936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:43.547972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:43.548016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:43.548052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:43.548083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:43.548107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:43.548183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:43.548223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:43.548246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:43.549610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:43.549686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:43.549712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:43.549741Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:43.549782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:43.549872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... rAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.840734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.841127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.841318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.841388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.841508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2025-04-07T14:12:43.841553Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueue, at schemeshard: 72057594046678944 2025-04-07T14:12:43.841588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-07T14:12:43.841622Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueueReadBalancer, at schemeshard: 72057594046678944 2025-04-07T14:12:43.841641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:12:43.841736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.841799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.841967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 8, at schemeshard: 72057594046678944 2025-04-07T14:12:43.842110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:12:43.842439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.842548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.842876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.842956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.843139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.843231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.843311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.843486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.843558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.843700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.843879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.844043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.844118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.844176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.844403Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:12:43.848877Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:12:43.849840Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:508:2445], Recipient [1:508:2445]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-07T14:12:43.849919Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-07T14:12:43.851009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:43.851070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:43.851662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:43.851720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:43.851766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:43.851799Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:12:43.852764Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:543:2445], Recipient [1:508:2445]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T14:12:43.852811Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T14:12:43.852848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:508:2445] sender: [1:564:2058] recipient: [1:15:2062] 2025-04-07T14:12:43.895734Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:563:2489], Recipient [1:508:2445]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-07T14:12:43.895798Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-07T14:12:43.895925Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:43.896252Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 209us result status StatusSuccess 2025-04-07T14:12:43.896550Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:43.897218Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:565:2490], Recipient [1:508:2445]: NKikimrPQ.TEvPeriodicTopicStats PathId: 2 Generation: 1 Round: 96 DataSize: 19 UsedReserveSize: 7 2025-04-07T14:12:43.897261Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-04-07T14:12:43.897290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 19 UsedReserveSize 7 2025-04-07T14:12:43.897325Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-07T14:12:43.897374Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-04-07T14:12:43.897582Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:566:2491], Recipient [1:508:2445]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-07T14:12:43.897618Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-07T14:12:43.897698Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:43.897842Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 141us result status StatusSuccess 2025-04-07T14:12:43.898102Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpScripting::SecondaryIndexes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:41.939170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:41.939255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:41.939295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:41.939329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:41.939369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:41.939414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:41.939485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:41.939768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:41.991399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:41.991457Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:42.000776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:42.000902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:42.001032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:42.005867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:42.006046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:42.006666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:42.006864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:42.008539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:42.009657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:42.009697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:42.009796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:42.009836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:42.009864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:42.009969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.015690Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:42.121711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:42.121930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.122199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:42.122442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:42.122498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.124761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:42.124905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:42.125106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.125158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:42.125195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:42.125270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:42.127072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.127130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:42.127169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:42.128800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.128845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.128941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:42.129003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:42.136351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:42.137977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:42.138140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:42.138945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:42.139036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:42.139075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:42.139290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:42.139330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:42.139452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:42.139518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:42.140956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:42.140987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:42.141112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:42.141145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:42.141336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.141371Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:42.141441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:42.141473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:42.141505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:42.141535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:42.141560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:42.141581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:42.141669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:42.141698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:42.141724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:42.142958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:42.143037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:42.143065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:42.143092Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:42.143130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:42.143253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:43.663572Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-07T14:12:43.663635Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2025-04-07T14:12:43.663844Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546] Handle TEvPersQueue::TEvStatus 2025-04-07T14:12:43.664060Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-07T14:12:43.664140Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-07T14:12:43.664200Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-07T14:12:43.664529Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 2 DataSize: 16975298 UsedReserveSize: 16975298 2025-04-07T14:12:43.664631Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-07T14:12:43.664801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2025-04-07T14:12:43.678065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:12:43.688766Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:43.688936Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 190us result status StatusSuccess 2025-04-07T14:12:43.689332Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:44.163636Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-07T14:12:44.163697Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-04-07T14:12:44.163831Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546] Handle TEvPersQueue::TEvStatus 2025-04-07T14:12:44.164064Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-07T14:12:44.164158Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-07T14:12:44.164221Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-07T14:12:44.164500Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 16975298 2025-04-07T14:12:44.164586Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-07T14:12:44.164735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2025-04-07T14:12:44.178154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:12:44.188799Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:44.188933Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 148us result status StatusSuccess 2025-04-07T14:12:44.189264Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:44.210260Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:44.210448Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 206us result status StatusSuccess 2025-04-07T14:12:44.210781Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptStats [GOOD] Test command err: Trying to start YDB, gRPC: 11542, MsgBus: 20885 2025-04-07T14:12:32.831891Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573942493709035:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:32.831941Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ea5/r3tmp/tmpTMClId/pdisk_1.dat 2025-04-07T14:12:33.224579Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11542, node 1 2025-04-07T14:12:33.259182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:33.259658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:33.267653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:33.290374Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:33.290397Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:33.290403Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:33.290509Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20885 TClient is connected to server localhost:20885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:33.763836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.777165Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.781452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:12:33.921690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:34.062197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:34.126341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.691460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573955378612634:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.691602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.937443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.972485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.003503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.029245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.063428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.094333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.141871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573959673580426:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.141934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.142186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573959673580431:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.145134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:36.153491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573959673580433:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 13130, MsgBus: 4047 2025-04-07T14:12:37.909372Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573963279021635:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:37.909429Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ea5/r3tmp/tmpqr4daZ/pdisk_1.dat 2025-04-07T14:12:38.053412Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:38.062956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:38.063043Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:38.064341Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13130, node 2 2025-04-07T14:12:38.173313Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:38.173346Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:38.173354Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:38.173452Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4047 TClient is connected to server localhost:4047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:38.618296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.629350Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:12:38.638998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.693294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.841996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.909981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:40.640012Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573976163925209:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.640084Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.657826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.680825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.705748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.730286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.757649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.784382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.836776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573976163925699:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.836874Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.837286Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573976163925704:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.841090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:40.849809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573976163925706:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:42.190554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:42.662056Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035162660, txId: 281474976710674] shutting down 2025-04-07T14:12:42.909439Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490573963279021635:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:42.909500Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:43.167072Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035163185, txId: 281474976710678] shutting down 2025-04-07T14:12:43.424450Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035163406, txId: 281474976710682] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithUniformPartitionsCompat [GOOD] Test command err: Trying to start YDB, gRPC: 19114, MsgBus: 4311 2025-04-07T14:12:00.488281Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573805934509772:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:00.488388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00141a/r3tmp/tmpSqh9Ze/pdisk_1.dat 2025-04-07T14:12:00.761115Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19114, node 1 2025-04-07T14:12:00.824642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:00.824680Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:00.824687Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:00.824779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:00.864044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:00.864185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:00.865861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4311 TClient is connected to server localhost:4311 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:01.210257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.229225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.356614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.487326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:01.544481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:02.824732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573814524446072:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:02.824836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:03.041038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.064045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.084991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.104039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.125768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.150867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.184357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573818819413860:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:03.184413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573818819413865:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:03.184434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:03.187075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:03.194209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573818819413867:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:04.118827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithTtlSettings TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithTtlSettings" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715676 CreateStep: 1744035124181 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TableWithTtlSettings" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) 2025-04-07T14:12:04.159827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithTtlSettings TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithTtlSettings" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715676 CreateStep: 1744035124181 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TableWithTtlSettings" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) 2025-04-07T14:12:04.319130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithTtlSettings TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithTtlSettings" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715676 CreateStep: 1744035124181 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TableWithTtlSettings" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) 2025-04-07T14:12:04.360161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-07T14:12:04.385300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3525, MsgBus: 20476 2025-04-07T14:12:04.970391Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573824573521264:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:04.970498Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;pa ... boperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29618, MsgBus: 7474 2025-04-07T14:12:33.137321Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7490573947811640394:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:33.137407Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00141a/r3tmp/tmpvoycId/pdisk_1.dat 2025-04-07T14:12:33.285575Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:33.313948Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:33.314049Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:33.315998Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29618, node 7 2025-04-07T14:12:33.361061Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:33.361083Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:33.361088Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:33.361198Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7474 TClient is connected to server localhost:7474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:33.951328Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... --!syntax_v1 CREATE TABLE `/Root/TableWithTtlSettings` ( Key Uint64, Datetime64Column Datetime64, PRIMARY KEY (Key) ) WITH ( TTL = Interval("P1D") ON Datetime64Column ) 2025-04-07T14:12:36.963693Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490573960696542908:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.963798Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.990184Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3537, MsgBus: 22260 2025-04-07T14:12:38.174873Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7490573969560091100:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:38.174939Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00141a/r3tmp/tmpwYtCZC/pdisk_1.dat 2025-04-07T14:12:38.308974Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:38.339257Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:38.339378Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:38.341442Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3537, node 8 2025-04-07T14:12:38.408177Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:38.408202Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:38.408210Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:38.408329Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22260 TClient is connected to server localhost:22260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:39.043330Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:39.062558Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.141296Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:39.436681Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:39.514648Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:42.025053Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490573986739961989:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:42.025171Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:42.072514Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:42.103052Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:42.133733Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:42.164758Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:42.197461Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:42.246130Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:42.289811Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490573986739962483:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:42.289883Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490573986739962488:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:42.289915Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:42.293178Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:42.301437Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7490573986739962490:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:43.175259Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7490573969560091100:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:43.175357Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:43.434122Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SecondaryIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 25343, MsgBus: 8057 2025-04-07T14:12:31.473540Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573938041804509:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:31.473883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001eba/r3tmp/tmp77jLhk/pdisk_1.dat 2025-04-07T14:12:31.928093Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:31.939737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:31.939807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:31.940790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25343, node 1 2025-04-07T14:12:32.015093Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:32.015112Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:32.015118Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:32.015221Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8057 TClient is connected to server localhost:8057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:32.632569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.658585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.815696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.968280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:33.031442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:34.593695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573950926708076:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.593841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.865576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.891043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.919019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.991537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.016171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.050957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.100985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573955221675870:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.101067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.101230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573955221675875:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.104391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:35.110739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573955221675877:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:36.161134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.473925Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573938041804509:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:36.473981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:36.561386Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035156598, txId: 281474976710673] shutting down 2025-04-07T14:12:36.597363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.651127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.665059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.673648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.715346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.725982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-07T14:12:37.021365Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035157053, txId: 281474976710682] shutting down 2025-04-07T14:12:37.059387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T14:12:37.095727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T14:12:37.468968Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035157501, txId: 281474976710687] shutting down Trying to start YDB, gRPC: 15973, MsgBus: 12820 2025-04-07T14:12:38.237548Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573970508073176:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:38.237617Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001eba/r3tmp/tmpajoCXI/pdisk_1.dat 2025-04-07T14:12:38.366380Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:38.369684Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:38.369758Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:38.371202Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15973, node 2 2025-04-07T14:12:38.424069Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:38.424096Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:38.424103Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:38.424194Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12820 TClient is connected to server localhost:12820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:38.811539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.827473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.907416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:39.065566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:12:39.128899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:41.077802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573983392976744:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:41.077907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:41.113376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:41.169221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:41.193970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:41.216949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:41.244351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:41.289243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:41.320675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573983392977237:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:41.320765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:41.320827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573983392977242:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:41.323691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:41.330800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573983392977244:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:12:42.211161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:12:42.281074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:12:42.321330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:12:43.239166Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490573970508073176:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:43.239279Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] Test command err: 2025-04-07T14:12:27.497704Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:27.500987Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.501192Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-07T14:12:27.501733Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-07T14:12:27.502728Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-07T14:12:27.502774Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:27.503544Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2073] ControllerId# 72057594037932033 2025-04-07T14:12:27.503585Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:27.503689Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:27.504121Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:27.504873Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:27.504954Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:27.505084Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:27.505237Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:19:2063] 2025-04-07T14:12:27.505267Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:19:2063] 2025-04-07T14:12:27.515458Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:27.515503Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:27.524044Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.524218Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.524444Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.524580Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.524753Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.524869Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.525004Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.525032Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:27.525106Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:28:2073] 2025-04-07T14:12:27.525135Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:28:2073] 2025-04-07T14:12:27.525942Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.526034Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:19:2063] 2025-04-07T14:12:27.540332Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:27.540389Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.540431Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.540511Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:27.540546Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.540584Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-07T14:12:27.543644Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-07T14:12:27.545450Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-07T14:12:27.545624Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.545784Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:27.546064Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.546327Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.546369Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-07T14:12:27.546420Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-07T14:12:27.546481Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-07T14:12:27.546503Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-07T14:12:27.546598Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:49:2089] 2025-04-07T14:12:27.546618Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:49:2089] 2025-04-07T14:12:27.546959Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:49:2089] 2025-04-07T14:12:27.547075Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:27.547176Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.547216Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:27.549453Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:27.549720Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:27.549824Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-07T14:12:27.549866Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-07T14:12:27.549930Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:19:2063] 2025-04-07T14:12:27.550540Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-07T14:12:27.550588Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-07T14:12:27.550604Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-07T14:12:27.557945Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:19:2063] 2025-04-07T14:12:27.557996Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:19:2063] 2025-04-07T14:12:27.558215Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:27.558434Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:27.558982Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-07T14:12:27.559015Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.559111Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-07T14:12:27.559298Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-04-07T14:12:27.559349Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 2146435075 Sender# [1:45:2088] SessionId# [0:0:0] Cookie# 0 2025-04-07T14:12:27.559396Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.018677s 2025-04-07T14:12:27.559607Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:27.559720Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:27.559784Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-07T14:12:27.559812Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-07T14:12:27.559887Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:28:2073] 2025-04-07T14:12:27.559911Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:28:2073] 2025-04-07T14:12:27.559947Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-04-07T14:12:27.559974Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-04-07T14:12:27.560047Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: ... TabletId# 72075186224037888 RecordGeneration# 3 PerGenerationCounter# 1 Channel# 0 Status# OK} Marker# DSPC02 2025-04-07T14:12:45.109179Z node 20 :BS_PROXY_COLLECT DEBUG: [9a4981dbe41244dc] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037888 RecordGeneration# 3 Channel# 1 VDisk# [80000001:1:0:0:0]} Marker# DSPC01 2025-04-07T14:12:45.109199Z node 20 :BS_PROXY_COLLECT INFO: [9a4981dbe41244dc] Result# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 3 PerGenerationCounter# 1 Channel# 1 Status# OK} Marker# DSPC02 2025-04-07T14:12:45.109528Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [20:678:2477] 2025-04-07T14:12:45.109559Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [20:678:2477] 2025-04-07T14:12:45.109793Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:45.109838Z node 20 :TABLET_RESOLVER DEBUG: SelectForward node 20 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [21:546:2091] 2025-04-07T14:12:45.109925Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result remote node 21 [20:678:2477] 2025-04-07T14:12:45.109994Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] remote node connected [20:678:2477] 2025-04-07T14:12:45.110036Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [20:678:2477] 2025-04-07T14:12:45.110072Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] ::Bootstrap [21:682:2142] 2025-04-07T14:12:45.110089Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] lookup [21:682:2142] 2025-04-07T14:12:45.110121Z node 21 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594046678944 entry.State: StNormal ev: {EvForward TabletID: 72057594046678944 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:45.110143Z node 21 :TABLET_RESOLVER DEBUG: SelectForward node 21 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594046678944 followers: 0 countLeader 1 allowFollowers 0 winner: [20:317:2260] 2025-04-07T14:12:45.110166Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] forward result remote node 20 [21:682:2142] 2025-04-07T14:12:45.110205Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] remote node connected [21:682:2142] 2025-04-07T14:12:45.110221Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [21:682:2142] 2025-04-07T14:12:45.110407Z node 20 :PIPE_SERVER DEBUG: [72057594046678944] Accept Connect Originator# [21:682:2142] 2025-04-07T14:12:45.110463Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [20:678:2477] 2025-04-07T14:12:45.110502Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] immediate retry [20:678:2477] 2025-04-07T14:12:45.110530Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [20:678:2477] 2025-04-07T14:12:45.110611Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2025-04-07T14:12:45.110692Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StProblemResolve ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:45.110755Z node 20 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:45.110847Z node 20 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-07T14:12:45.110888Z node 20 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-07T14:12:45.110909Z node 20 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-07T14:12:45.110946Z node 20 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [20:639:2448] CurrentLeaderTablet: [20:641:2449] CurrentGeneration: 3 CurrentStep: 0} 2025-04-07T14:12:45.110989Z node 20 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [20:639:2448] CurrentLeaderTablet: [20:641:2449] CurrentGeneration: 3 CurrentStep: 0} 2025-04-07T14:12:45.111043Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [20:639:2448] CurrentLeaderTablet: [20:641:2449] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-07T14:12:45.111076Z node 20 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2025-04-07T14:12:45.111122Z node 20 :TABLET_RESOLVER DEBUG: SelectForward node 20 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [20:639:2448] 2025-04-07T14:12:45.111199Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [20:678:2477] 2025-04-07T14:12:45.111252Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [20:678:2477] 2025-04-07T14:12:45.111312Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] connected with status OK role: Leader [21:682:2142] 2025-04-07T14:12:45.111345Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send queued [21:682:2142] 2025-04-07T14:12:45.111406Z node 20 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [20:678:2477] 2025-04-07T14:12:45.111468Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [20:678:2477] 2025-04-07T14:12:45.111502Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [20:678:2477] 2025-04-07T14:12:45.111550Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send [21:682:2142] 2025-04-07T14:12:45.111583Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] push event to server [21:682:2142] 2025-04-07T14:12:45.111642Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [21:682:2142] 2025-04-07T14:12:45.111744Z node 20 :PIPE_SERVER DEBUG: [72057594046678944] Push Sender# [21:681:2142] EventType# 271122945 2025-04-07T14:12:45.111854Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2025-04-07T14:12:45.111906Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:12:45.112054Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:12:45.112109Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:12:45.112923Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [21:688:2143] 2025-04-07T14:12:45.112945Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [21:688:2143] 2025-04-07T14:12:45.113083Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [21:688:2143] 2025-04-07T14:12:45.113160Z node 21 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:45.113186Z node 21 :TABLET_RESOLVER DEBUG: SelectForward node 21 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [20:316:2259] 2025-04-07T14:12:45.113302Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 20 [21:688:2143] 2025-04-07T14:12:45.113442Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [21:688:2143] 2025-04-07T14:12:45.113463Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [21:688:2143] 2025-04-07T14:12:45.113754Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [21:688:2143] 2025-04-07T14:12:45.114213Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [21:688:2143] 2025-04-07T14:12:45.114234Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [21:688:2143] 2025-04-07T14:12:45.114255Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [21:688:2143] 2025-04-07T14:12:45.114317Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [21:688:2143] 2025-04-07T14:12:45.114473Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [21:686:2143] EventType# 268959744 2025-04-07T14:12:45.114596Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{40, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-04-07T14:12:45.114645Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{40, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:12:45.114748Z node 20 :HIVE WARN: HIVE#72057594037927937 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:45.114811Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{40, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{24, redo 152b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-07T14:12:45.114857Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{40, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:12:45.115169Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [20:697:2482] 2025-04-07T14:12:45.115203Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [20:697:2482] 2025-04-07T14:12:45.115262Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:45.115305Z node 20 :TABLET_RESOLVER DEBUG: SelectForward node 20 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [20:316:2259] 2025-04-07T14:12:45.115346Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [20:697:2482] 2025-04-07T14:12:45.115386Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [20:697:2482] 2025-04-07T14:12:45.115420Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [20:697:2482] 2025-04-07T14:12:45.115465Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [20:697:2482] 2025-04-07T14:12:45.115545Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [20:697:2482] 2025-04-07T14:12:45.115649Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [20:697:2482] 2025-04-07T14:12:45.115687Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [20:697:2482] 2025-04-07T14:12:45.115724Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [20:697:2482] 2025-04-07T14:12:45.115766Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [20:697:2482] 2025-04-07T14:12:45.115799Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [20:697:2482] 2025-04-07T14:12:45.115844Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [20:696:2481] EventType# 268697616 |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TReplicationTests::CreateSequential ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:41.940506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:41.940584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:41.940624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:41.940678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:41.940716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:41.940749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:41.940803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:41.941125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:41.989697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:41.989755Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:41.997660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:41.997792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:41.997945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:42.003458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:42.003688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:42.004297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:42.004500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:42.009263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:42.010489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:42.010542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:42.010664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:42.010711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:42.010746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:42.010897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.017170Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:42.116206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:42.116395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.116648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:42.116867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:42.116920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.118858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:42.118975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:42.119123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.119163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:42.119197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:42.119244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:42.120860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.120911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:42.120938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:42.122285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.122326Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.122414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:42.122471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:42.125527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:42.127119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:42.127287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:42.128170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:42.128289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:42.128329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:42.128537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:42.128576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:42.128742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:42.128804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:42.130386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:42.130422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:42.130586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:42.130630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:42.130846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:42.130884Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:42.130968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:42.130997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:42.131034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:42.131072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:42.131102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:42.131128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:42.131187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:42.131222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:42.131247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:42.132923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:42.133015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:42.133047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:42.133080Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:42.133137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:42.133246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify ... tID 72057594046678944 is [1:127:2152] sender: [1:751:2058] recipient: [1:749:2707] Leader for TabletID 72057594046678944 is [1:752:2708] sender: [1:753:2058] recipient: [1:749:2707] 2025-04-07T14:12:45.464009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:45.464136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:45.464184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:45.464223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:45.464260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:45.464308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:45.464367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:45.464719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:45.479961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:45.481366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:45.481538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:45.481806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:45.481850Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:45.481969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:45.482670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-04-07T14:12:45.482776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SomeTable, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:12:45.482858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.482928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.483321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:45.483500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-07T14:12:45.483758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-04-07T14:12:45.483911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.484013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:45.484054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:12:45.484172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:45.484358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:45.484709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-07T14:12:45.485030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.485167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.485559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.485645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.485862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.485951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.486046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.486230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.486321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.486470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.486687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.486805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.486868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.486942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.494685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:45.494750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:45.495653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:45.495708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:45.495774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:45.496600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:752:2708] sender: [1:805:2058] recipient: [1:15:2062] 2025-04-07T14:12:45.532117Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:45.532398Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeTable" took 307us result status StatusSuccess 2025-04-07T14:12:45.532876Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeTable" PathDescription { Self { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SomeTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 Family: 1 FamilyName: "alternative" NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 4140 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1020 IndexSize: 0 } PoolsUsage { PoolKind: "pool-kind-2" DataSize: 3120 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 82256 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:45.534859Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:45.535021Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 186us result status StatusSuccess 2025-04-07T14:12:45.535393Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TSchemeShardMoveTest::Index [GOOD] >> KqpOlapScheme::DropTtlColumn [GOOD] >> KqpOlapScheme::DropTableAfterInsert >> TReplicationTests::Create >> TTxAllocatorClientTest::AllocateOverTheEdge >> TTxAllocatorClientTest::InitiatingRequest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:43.370794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:43.370870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:43.370909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:43.370947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:43.371006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:43.371058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:43.371104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:43.371413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:43.427454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:43.427528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:43.437425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:43.437529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:43.437668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:43.445208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:43.445427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:43.446062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:43.446279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:43.452784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:43.454276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:43.454343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:43.454497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:43.454539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:43.454577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:43.454740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.461413Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:43.597736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:43.597988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.598311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:43.598545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:43.598600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.601179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:43.601320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:43.601495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.601547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:43.601583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:43.601635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:43.603604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.603661Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:43.603699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:43.605425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.605466Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.605582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:43.605652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:43.609618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:43.611558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:43.611753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:43.612936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:43.613068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:43.613118Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:43.613380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:43.613437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:43.613616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:43.613690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:43.615614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:43.615658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:43.615852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:43.615899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:43.616160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:43.616204Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:43.616306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:43.616345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:43.616397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:43.616442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:43.616479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:43.616526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:43.616651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:43.616717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:43.616752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:43.618813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:43.618931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:43.618970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:43.619008Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:43.619063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:43.619201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... : 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:45.113995Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-07T14:12:45.114066Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2025-04-07T14:12:45.114753Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 0 2025-04-07T14:12:45.114873Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-07T14:12:45.115071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-04-07T14:12:45.127865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:12:45.138461Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:45.138614Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 173us result status StatusSuccess 2025-04-07T14:12:45.138937Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:45.612803Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-07T14:12:45.612884Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-04-07T14:12:45.613569Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 4 DataSize: 16975298 UsedReserveSize: 0 2025-04-07T14:12:45.613669Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-07T14:12:45.613845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-04-07T14:12:45.626950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-07T14:12:45.637621Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:45.637804Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 206us result status StatusSuccess 2025-04-07T14:12:45.638187Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:45.659444Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:45.659735Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 333us result status StatusSuccess 2025-04-07T14:12:45.660169Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:45.660950Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:566:2498] connected; active server actors: 1 2025-04-07T14:12:45.673168Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] BALANCER INIT DONE for Topic1: (0, 72075186233409546) (1, 72075186233409546) (2, 72075186233409546) 2025-04-07T14:12:45.673747Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-04-07T14:12:45.674827Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:45.674968Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 161us result status StatusSuccess 2025-04-07T14:12:45.675288Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:45.675469Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72057594046678944, NodeId 1, Generation 3 2025-04-07T14:12:45.676154Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72075186233409546, NodeId 1, Generation 2 2025-04-07T14:12:45.698784Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:613:2533] connected; active server actors: 1 |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> TSchemeShardMoveTest::MoveIndex |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TTxAllocatorClientTest::ZeroRange >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] >> TTxAllocatorClientTest::InitiatingRequest [GOOD] |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveBalancerWithLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:44.599532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:44.599606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:44.599634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:44.599658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:44.599688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:44.599712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:44.599750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:44.599992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:44.650112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:44.650154Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:44.657622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:44.657711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:44.657829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:44.661805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:44.661996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:44.662478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:44.662684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:44.664429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:44.665496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:44.665558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:44.665655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:44.665693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:44.665723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:44.665880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.671185Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:44.758798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:44.758948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.759103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:44.759288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:44.759323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.761393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:44.761541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:44.761725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.761772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:44.761802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:44.761851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:44.763520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.763568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:44.763598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:44.765087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.765127Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.765159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:44.765220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:44.768341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:44.769956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:44.770119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:44.771038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:44.771144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:44.771184Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:44.771392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:44.771441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:44.771591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:44.771664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:44.773448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:44.773484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:44.773639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:44.773674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:44.773848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.773881Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:44.773957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:44.773997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:44.774046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:44.774077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:44.774103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:44.774138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:44.774191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:44.774225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:44.774249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:44.775925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:44.776006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:44.776035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:44.776075Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:44.776119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:44.776213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:46.191194Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:46.191276Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 91us result status StatusSuccess 2025-04-07T14:12:46.191442Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "TableMove" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:46.191697Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:12:46.191820Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 132us result status StatusSuccess 2025-04-07T14:12:46.193913Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:46.194432Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:12:46.194564Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 145us result status StatusSuccess 2025-04-07T14:12:46.194935Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:45.760181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:45.760250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:45.760276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:45.760304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:45.760340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:45.760362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:45.760421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:45.760709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:45.811399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:45.811463Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:45.819783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:45.819877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:45.819998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:45.823828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:45.823987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:45.824463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:45.824631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:45.826025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:45.826979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:45.827019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:45.827119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:45.827151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:45.827179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:45.827322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.832821Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:45.936004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:45.936191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.936467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:45.936686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:45.936730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.938797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:45.938933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:45.939091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.939139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:45.939170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:45.939206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:45.940827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.940887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:45.940925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:45.942713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.942761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.942868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:45.942942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:45.946684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:45.948833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:45.949025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:45.950031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:45.950160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:45.950202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:45.950475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:45.950531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:45.950700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:45.950770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:45.953773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:45.953812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:45.953967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:45.953999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:45.954185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:45.954223Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:45.954301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:45.954342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:45.954376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:45.954409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:45.954436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:45.954458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:45.954576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:45.954609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:45.954635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:45.956089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:45.956164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:45.956206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:45.956234Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:45.956278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:45.956378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... LocalPathId: 1], version: 11 2025-04-07T14:12:46.426706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:12:46.426765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-07T14:12:46.426792Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:12:46.427245Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:854:2736], Recipient [1:280:2268]: NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:12:46.427273Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-07T14:12:46.427297Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037968897, status: OK, at schemeshard: 72057594046678944 2025-04-07T14:12:46.427423Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409551][Topic3] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409551 2025-04-07T14:12:46.427521Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [1:213:2213], Recipient [1:280:2268]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 2025-04-07T14:12:46.427543Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2025-04-07T14:12:46.427576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-07T14:12:46.427820Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-04-07T14:12:46.427972Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [1:213:2213], Recipient [1:280:2268]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 2025-04-07T14:12:46.427996Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2025-04-07T14:12:46.428025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-07T14:12:46.429879Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:12:46.431431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T14:12:46.431467Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:12:46.431914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:12:46.431993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T14:12:46.432017Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:12:46.433084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-07T14:12:46.433193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-07T14:12:46.433332Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [1:854:2736], Recipient [1:280:2268]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:12:46.433364Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:12:46.433404Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-07T14:12:46.433769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-07T14:12:46.433805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-07T14:12:46.434246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:870:2752], Recipient [1:280:2268]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:12:46.434291Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:12:46.434323Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-07T14:12:46.434482Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:492:2433], Recipient [1:280:2268]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2025-04-07T14:12:46.434518Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-07T14:12:46.434576Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-07T14:12:46.434663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:12:46.434714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:868:2750] 2025-04-07T14:12:46.434886Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [1:870:2752], Recipient [1:280:2268]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:12:46.434918Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:12:46.434947Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-04-07T14:12:46.435533Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:871:2753], Recipient [1:280:2268]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-07T14:12:46.435591Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-07T14:12:46.435674Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:46.435849Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 165us result status StatusSuccess 2025-04-07T14:12:46.436253Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 31 UsedReserveSize: 31 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:46.437075Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:872:2754], Recipient [1:280:2268]: NKikimrPQ.TEvPeriodicTopicStats PathId: 4 Generation: 1 Round: 6 DataSize: 151 UsedReserveSize: 151 2025-04-07T14:12:46.437123Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-04-07T14:12:46.437183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 4] DataSize 151 UsedReserveSize 151 2025-04-07T14:12:46.437224Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-07T14:12:46.437638Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:873:2755], Recipient [1:280:2268]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-07T14:12:46.437680Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-07T14:12:46.437771Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:46.439624Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 163us result status StatusSuccess 2025-04-07T14:12:46.440022Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 182 UsedReserveSize: 182 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2025-04-07T14:12:46.681108Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-07T14:12:46.682107Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-07T14:12:46.683569Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-07T14:12:46.693426Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.696501Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-07T14:12:46.711176Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.711557Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.711710Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-07T14:12:46.711836Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.711924Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.712025Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-07T14:12:46.712155Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-07T14:12:46.713434Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2025-04-07T14:12:46.715826Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.715897Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.715993Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-04-07T14:12:46.716052Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 5000 >> GroupWriteTest::TwoTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2025-04-07T14:12:46.683108Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-07T14:12:46.683505Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-07T14:12:46.684133Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-07T14:12:46.693427Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.696482Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-07T14:12:46.710976Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.711118Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.711253Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-07T14:12:46.711377Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.711469Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.711583Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-07T14:12:46.711716Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-07T14:12:46.713402Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2025-04-07T14:12:46.715773Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.715856Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.715966Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-04-07T14:12:46.716018Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 5000 2025-04-07T14:12:46.716314Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-07T14:12:46.716501Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-07T14:12:46.716687Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-07T14:12:46.716876Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-07T14:12:46.717006Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2025-04-07T14:12:46.718446Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.718506Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.718613Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2025-04-07T14:12:46.718653Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 5000 to# 10000 2025-04-07T14:12:46.718860Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-07T14:12:46.719036Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-07T14:12:46.719234Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-07T14:12:46.719503Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-07T14:12:46.719634Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2025-04-07T14:12:46.720016Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.720117Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.720210Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2025-04-07T14:12:46.720254Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 10000 to# 15000 2025-04-07T14:12:46.720456Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> TReplicationTests::Create [GOOD] >> TReplicationTests::ConsistencyMode >> TTxAllocatorClientTest::Boot |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables [GOOD] Test command err: RandomSeed# 14591114950969629474 2025-04-07T14:12:39.120774Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058679074007041 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-07T14:12:39.120859Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058502699329537 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-07T14:12:39.145360Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-07T14:12:39.145441Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 going to send TEvBlock {TabletId# 72058679074007041 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-07T14:12:39.145526Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-07T14:12:39.145552Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 going to send TEvBlock {TabletId# 72058502699329537 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-07T14:12:39.149169Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-07T14:12:39.149247Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-07T14:12:39.167854Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:12:39.167949Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:12:39.171847Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-07T14:12:39.171939Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-07T14:12:47.073445Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-07T14:12:47.073520Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:12:47.073558Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:12:47.073579Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-07T14:12:47.073600Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:12:47.073625Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:12:47.073643Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-07T14:12:47.073663Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:12:47.073685Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:12:47.149695Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-04-07T14:12:47.149778Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-04-07T14:12:47.149815Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2025-04-07T14:12:47.149854Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2025-04-07T14:12:47.149892Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} 2025-04-07T14:12:47.149928Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} >> KqpScripting::JoinIndexLookup [GOOD] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> TTxAllocatorClientTest::Boot [GOOD] >> TSchemeShardMoveTest::MoveIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2025-04-07T14:12:47.717057Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-07T14:12:47.717442Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-07T14:12:47.718086Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-07T14:12:47.719695Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:47.720073Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-07T14:12:47.729436Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:47.729555Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:47.729683Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-07T14:12:47.729784Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:47.729857Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:47.729933Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-07T14:12:47.730042Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateDropRecreate >> TReplicationTests::ConsistencyMode [GOOD] >> TReplicationTests::Alter |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::JoinIndexLookup [GOOD] Test command err: Trying to start YDB, gRPC: 6263, MsgBus: 6209 2025-04-07T14:12:37.572890Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573964989884226:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:37.573091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e9d/r3tmp/tmpKubUNy/pdisk_1.dat 2025-04-07T14:12:38.059749Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:38.064847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:38.064985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:38.066292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6263, node 1 2025-04-07T14:12:38.153161Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:38.153180Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:38.153191Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:38.153403Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6209 TClient is connected to server localhost:6209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:38.614897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.627924Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:12:38.640735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:38.755511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:38.884133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:12:38.951094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:40.450675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573977874787664:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.450776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.782630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.806350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.828831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.851438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.879092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.904602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:40.942185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573977874788157:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.942247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.942444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573977874788162:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:40.945580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:40.953724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573977874788164:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:42.502857Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035162513, txId: 281474976710671] shutting down 2025-04-07T14:12:42.561001Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573964989884226:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:42.561052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27044, MsgBus: 21743 2025-04-07T14:12:43.183502Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573989375133272:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:43.183563Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e9d/r3tmp/tmp4Nz3dM/pdisk_1.dat 2025-04-07T14:12:43.278005Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:43.299499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:43.299578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:43.300821Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27044, node 2 2025-04-07T14:12:43.343490Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:43.343508Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:43.343513Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:43.343617Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21743 TClient is connected to server localhost:21743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:43.728683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:43.746114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:43.816945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:43.956539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:44.013626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:45.688767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573997965069576:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:45.688860Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:45.717676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:45.799633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:45.824259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:45.851265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:45.877857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:45.909149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:45.951021Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573997965070071:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:45.951093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:45.951422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573997965070076:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:45.954379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:45.962541Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490573997965070078:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> SystemView::TabletsShards [GOOD] >> TDatabaseResolverTests::Ydb_Serverless_Timeout >> TDatabaseResolverTests::PostgreSQL >> TDatabaseResolverTests::DataStreams_Dedicated >> TDatabaseResolverTests::Ydb_Dedicated >> TDatabaseResolverTests::DataStreams_Serverless >> TDatabaseResolverTests::ClickHouseNative >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] >> TDatabaseResolverTests::PostgreSQL [GOOD] >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::CreateWithoutCredentials >> TDatabaseResolverTests::Greenplum_MasterNode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:47.046916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:47.047012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:47.047050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:47.047083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:47.047126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:47.047154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:47.047220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:47.047558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:47.112373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:47.112438Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:47.123124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:47.123253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:47.123432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:47.128964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:47.129149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:47.129770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:47.129956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:47.131822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:47.132989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:47.133049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:47.133162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:47.133206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:47.133243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:47.133405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.139346Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:47.228988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:47.229213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.229403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:47.229620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:47.229665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.231742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:47.231881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:47.232058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.232114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:47.232151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:47.232195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:47.234160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.234220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:47.234256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:47.235937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.235987Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.236039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:47.236107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:47.238852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:47.240506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:47.240646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:47.241479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:47.241583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:47.241623Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:47.241788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:47.241822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:47.241936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:47.242026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:47.243677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:47.243713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:47.243841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:47.243872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:47.244035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.244077Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:47.244159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:47.244183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:47.244216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:47.244244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:47.244266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:47.244299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:47.244352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:47.244391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:47.244425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:47.245998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:47.246073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:47.246100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:47.246147Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:47.246182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:47.246260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:48.580243Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:48.580353Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 123us result status StatusSuccess 2025-04-07T14:12:48.580635Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:48.581100Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:12:48.581323Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Sync" took 221us result status StatusSuccess 2025-04-07T14:12:48.581930Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Sync" PathDescription { Self { Name: "Sync" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:48.585328Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:12:48.585497Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Async" took 177us result status StatusSuccess 2025-04-07T14:12:48.585964Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Async" PathDescription { Self { Name: "Async" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 5 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] |95.1%| [TA] $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2025-04-07T14:12:49.173317Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2025-04-07T14:12:49.093396Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2025-04-07T14:12:49.103503Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] Test command err: 2025-04-07T14:12:49.182378Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed PostgreSQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-postgresql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-postgresql.viewer`. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2025-04-07T14:12:49.168688Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2025-04-07T14:12:49.380015Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::Describe >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] >> TScaleRecommenderTest::BasicTest [GOOD] >> TStorageBalanceTest::TestScenario1 |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::TabletsShards [GOOD] Test command err: 2025-04-07T14:12:10.222447Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573849342838545:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:10.222619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00109a/r3tmp/tmpsFJbvO/pdisk_1.dat 2025-04-07T14:12:10.544568Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2232, node 1 2025-04-07T14:12:10.596958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:10.597165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:10.603136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:10.642788Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:10.642816Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:10.642823Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:10.642947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:11.051329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:11.087610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:11.114553Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573852529612201:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:11.114590Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:11.122663Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490573850799981645:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:11.122972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:11.123165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:11.129252Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-07T14:12:11.152891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:11.156636Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2025-04-07T14:12:11.205256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:11.205732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:11.211367Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-07T14:12:11.212275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:11.309178Z node 5 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [5:7490573852529612331:2178], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2025-04-07T14:12:11.309418Z node 5 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-04-07T14:12:11.309530Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:11.311904Z node 5 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2025-04-07T14:12:11.311961Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2025-04-07T14:12:11.321262Z node 5 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [5:7490573852529612331:2178], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Tenant1 2025-04-07T14:12:11.322068Z node 5 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [5:7490573852529612331:2178], database# /Root/Tenant1, no sysview processor 2025-04-07T14:12:11.324088Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2025-04-07T14:12:11.324147Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2025-04-07T14:12:11.324801Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2025-04-07T14:12:11.324853Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval metrics: query count# 0 2025-04-07T14:12:11.324884Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval query tops: total query count# 0 2025-04-07T14:12:11.324932Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2025-04-07T14:12:11.324966Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 6, result count# 0 2025-04-07T14:12:11.325006Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 7, result count# 0 2025-04-07T14:12:11.325091Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 8, result count# 0 2025-04-07T14:12:11.325130Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 9, result count# 0 2025-04-07T14:12:11.325174Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 10, result count# 0 2025-04-07T14:12:11.325213Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 11, result count# 0 2025-04-07T14:12:11.325248Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 12, result count# 0 2025-04-07T14:12:11.325282Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 13, result count# 0 2025-04-07T14:12:11.325319Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 14, result count# 0 2025-04-07T14:12:11.325349Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 15, result count# 0 2025-04-07T14:12:11.325417Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 16, partCount count# 0 2025-04-07T14:12:11.325559Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 19, partCount count# 0 2025-04-07T14:12:11.325588Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 17, result count# 0 2025-04-07T14:12:11.325618Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 18, result count# 0 2025-04-07T14:12:11.325729Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2025-04-07T14:12:11.000000Z 2025-04-07T14:12:11.331527Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Complete 2025-04-07T14:12:11.334527Z node 5 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2025-04-07T14:12:11.336698Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Execute: database# /Root/Tenant1 2025-04-07T14:12:11.340152Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Complete 2025-04-07T14:12:11.364345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:11.386803Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490573851381065317:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:11.386878Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:11.390450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:11.390546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:11.393458Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573854431335812:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:11.393515Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:11.394261Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:12:11.400152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:11.406795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:11.407029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-04-07T14:12:11.412917Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:11.413878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:11.461138Z node 2 :SYSTEM_VIEWS INFO: [72075186224037899] OnActivateExecutor 2025-04-07T14:12:11.461223Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInitSchema::Execute 2025-04-07T14:12:11.467342Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [3:7490573851381065455:2186], path id# [OwnerId: 72057594046644480, LocalPathId: 3], service# 2 2025-04-07T14:12:11.474906Z node 3 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [3:7490573851381065455:2186], path id# [OwnerId: 72057594046644480, LocalPathId: 3], database# /Root/Tenant2 2025-04-07T14:12:11.475915Z node 3 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [3:7490573851381065455:2186], database# /Root/Tenant2, no sysview processor 2025-0 ... 6850Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:12:42.336868Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-07T14:12:42.337228Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-04-07T14:12:42.337342Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:7490573929137817045:2550], Recipient [6:7490573911957947320:2195]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:12:42.337332Z node 9 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:42.337374Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:12:42.337558Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-07T14:12:42.340862Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7490573929137817124:2615], Recipient [6:7490573911957947320:2195]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:12:42.340898Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:12:42.341109Z node 9 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [9:7490573928378238033:2183], Recipient [9:7490573928378238040:2281]: NKikimr::TEvTablet::TEvTabletDead 2025-04-07T14:12:42.341193Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:12:42.341415Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2025-04-07T14:12:42.341944Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:12:42.342541Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037890, from:72057594046644480 is reset 2025-04-07T14:12:42.342668Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7490573929137817122:2613], Recipient [6:7490573911957947320:2195]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:12:42.342693Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:12:42.342797Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037888, from:72057594046644480 is reset 2025-04-07T14:12:42.342792Z node 9 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2025-04-07T14:12:42.342858Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7490573929137817123:2614], Recipient [6:7490573911957947320:2195]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:12:42.342860Z node 9 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037894 2025-04-07T14:12:42.342878Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:12:42.342905Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037889, from:72057594046644480 is reset 2025-04-07T14:12:42.342952Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7490573929137817127:2618], Recipient [6:7490573911957947320:2195]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:12:42.342968Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:12:42.342980Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037893, from:72057594046644480 is reset 2025-04-07T14:12:42.342888Z node 10 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 268829696, Sender [10:7490573932281220280:2229], Recipient [10:7490573932281220292:2285]: NKikimr::TEvTablet::TEvTabletDead 2025-04-07T14:12:42.343276Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7490573929137817125:2616], Recipient [6:7490573911957947320:2195]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:12:42.343297Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:12:42.343311Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037891, from:72057594046644480 is reset 2025-04-07T14:12:42.343372Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7490573929137817126:2617], Recipient [6:7490573911957947320:2195]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:12:42.343384Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:12:42.343392Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037892, from:72057594046644480 is reset 2025-04-07T14:12:42.342970Z node 10 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2025-04-07T14:12:42.343439Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:7490573929137817209:2681], Recipient [6:7490573911957947320:2195]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:12:42.343459Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:12:42.343786Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-07T14:12:42.344564Z node 9 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [9:7490573928378237660:2055], processor id# 72075186224037893, database# /Root/Tenant1 2025-04-07T14:12:42.345553Z node 9 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [9:7490573928378237660:2055], database# /Root/Tenant1, processor id# 72075186224037893 2025-04-07T14:12:42.346564Z node 10 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [10:7490573932281219908:2129], processor id# 72075186224037893, database# /Root/Tenant1 2025-04-07T14:12:42.346731Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7490573932281219908:2129], database# /Root/Tenant1, processor id# 72075186224037893 2025-04-07T14:12:44.404625Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7490573995712828343:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:44.404694Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00109a/r3tmp/tmpn7atFu/pdisk_1.dat 2025-04-07T14:12:44.491504Z node 11 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1381, node 11 2025-04-07T14:12:44.526652Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:44.526745Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:44.528392Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:44.557516Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:44.557548Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:44.557556Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:44.557671Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:44.784003Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:44.792290Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:48.129304Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7490574012892698311:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:48.129361Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7490574012892698330:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:48.129436Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:48.133052Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:12:48.142751Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7490574012892698340:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:12:48.313618Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89hcpgdm7225r8vjjacs3q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=YmQwZmE0ZDQtODRkNTkwMTUtZDdkOGNlNmYtMWFlM2JlZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:48.315252Z node 11 :SYSTEM_VIEWS INFO: Scan started, actor: [11:7490574012892698425:2322], owner: [11:7490574012892698421:2320], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:48.315715Z node 11 :SYSTEM_VIEWS INFO: Scan prepared, actor: [11:7490574012892698425:2322], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:48.316321Z node 11 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [11:7490574012892698425:2322], row count: 3, finished: 1 2025-04-07T14:12:48.316358Z node 11 :SYSTEM_VIEWS INFO: Scan finished, actor: [11:7490574012892698425:2322], owner: [11:7490574012892698421:2320], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-07T14:12:48.318083Z node 11 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035168312, txId: 281474976715661] shutting down |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::CancelBuild >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable >> TDatabaseResolverTests::MySQL >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:40.989850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:40.989946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:40.990001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:40.990037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:40.990905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:40.990959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:40.991037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:40.991924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:41.063803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:41.063859Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:41.073729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:41.073860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:41.073989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:41.079211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:41.080953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:41.082789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:41.083015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:41.087631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:41.095750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:41.095828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:41.096050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:41.096110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:41.096156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:41.096316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.102621Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:41.202018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:41.202885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.203731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:41.205158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:41.205222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.207900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:41.208699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:41.208870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.208992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:41.209056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:41.209110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:41.210824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.210869Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:41.210901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:41.212549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.212610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.212719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:41.212774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:41.216821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:41.218430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:41.219458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:41.221123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:41.221269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:41.221309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:41.221560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:41.221610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:41.221742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:41.221808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:41.223698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:41.223735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:41.223862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:41.223910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:41.224094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.224141Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:41.224226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:41.224255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:41.224294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:41.224329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:41.224356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:41.224379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:41.224442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:41.224477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:41.224536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:41.234785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:41.234911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:41.234947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:41.234984Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:41.235043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:41.235169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 4 2025-04-07T14:12:49.665309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.665371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.665735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 2, at schemeshard: 72057594046678944 2025-04-07T14:12:49.665836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-07T14:12:49.665883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-07T14:12:49.666066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2025-04-07T14:12:49.666170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.666243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2025-04-07T14:12:49.666284Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-07T14:12:49.666332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:12:49.666362Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 3], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-07T14:12:49.666384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:12:49.666475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 2, at schemeshard: 72057594046678944 2025-04-07T14:12:49.666599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.666788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2025-04-07T14:12:49.667140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.667266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.667707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.667789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.668012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.668123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.668242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.668410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.668520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.668745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.668957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.669088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.669144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.669197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:49.669467Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:12:49.674715Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:12:49.676518Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:1123:3059], Recipient [1:1123:3059]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-07T14:12:49.676587Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-07T14:12:49.678012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:49.678081Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:49.678862Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1123:3059], Recipient [1:1123:3059]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:12:49.678914Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:12:49.679668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:49.679733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:49.679783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:49.679821Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:12:49.680860Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1158:3059], Recipient [1:1123:3059]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T14:12:49.680915Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T14:12:49.680951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1123:3059] sender: [1:1178:2058] recipient: [1:15:2062] 2025-04-07T14:12:49.708487Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1177:3103], Recipient [1:1123:3059]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-07T14:12:49.708557Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-07T14:12:49.708692Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:12:49.709016Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 336us result status StatusSuccess 2025-04-07T14:12:49.709828Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1 MinPartitionsCount: 20 MaxPartitionsCount: 20 } } TableSchemaVersion: 2 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13184 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 17748 Memory: 132944 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13184 DataSize: 13184 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2025-04-07T14:12:50.625533Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable |95.2%| [TA] $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] Test command err: 2025-04-07T14:09:44.192336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:44.192697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:44.192849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013a3/r3tmp/tmpbEOgrV/pdisk_1.dat 2025-04-07T14:09:44.520426Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23527, node 1 2025-04-07T14:09:44.744508Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:44.744560Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:44.744591Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:44.745094Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:44.747112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:44.839343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:44.839527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:44.854096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4703 2025-04-07T14:09:45.477126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:48.336710Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:48.374122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:48.374231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:48.413476Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:48.417553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:48.619802Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:48.674554Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:48.674645Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:48.706346Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:48.706515Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:48.706727Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:48.706810Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:48.706863Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:48.706927Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:48.706979Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:48.707050Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:48.707463Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:48.922047Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:48.922155Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:48.929554Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:48.938179Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:48.939158Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:48.940383Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:48.965713Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:48.965768Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:48.965821Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:48.967354Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:48.967459Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:48.979967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:48.998013Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:48.998161Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:49.013793Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:49.030570Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:49.074473Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:49.360934Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:49.543788Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:50.319069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:50.319213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:50.335770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:50.561463Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:50.561716Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:50.561996Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:50.562149Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:50.562290Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:50.562522Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:50.562673Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:50.562794Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:50.562920Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:50.563071Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:50.563201Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:50.563346Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:50.617309Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:50.617396Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:50.617596Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:50.617695Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:50.617808Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:50.617910Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxI ... 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:12:49.087856Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:49.093134Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:12:49.109261Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:12:49.109874Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:12:49.109978Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:12:49.110835Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:12:49.124286Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:12:49.124457Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:12:49.125171Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8176:6091], server id = [2:8181:6096], tablet id = 72075186224037899, status = OK 2025-04-07T14:12:49.125602Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8176:6091], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:49.125723Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8177:6092], server id = [2:8182:6097], tablet id = 72075186224037900, status = OK 2025-04-07T14:12:49.125771Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8177:6092], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:49.127015Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8178:6093], server id = [2:8183:6098], tablet id = 72075186224037901, status = OK 2025-04-07T14:12:49.127074Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8178:6093], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:49.127215Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:12:49.127834Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T14:12:49.128097Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8179:6094], server id = [2:8184:6099], tablet id = 72075186224037902, status = OK 2025-04-07T14:12:49.128146Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8179:6094], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:49.128277Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8180:6095], server id = [2:8185:6100], tablet id = 72075186224037903, status = OK 2025-04-07T14:12:49.128321Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8180:6095], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:49.128493Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-07T14:12:49.129243Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8176:6091], server id = [2:8181:6096], tablet id = 72075186224037899 2025-04-07T14:12:49.129285Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:49.129400Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8177:6092], server id = [2:8182:6097], tablet id = 72075186224037900 2025-04-07T14:12:49.129416Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:49.129488Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-07T14:12:49.129560Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-07T14:12:49.129883Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8178:6093], server id = [2:8183:6098], tablet id = 72075186224037901 2025-04-07T14:12:49.129915Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:49.129978Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8186:6101], server id = [2:8188:6103], tablet id = 72075186224037904, status = OK 2025-04-07T14:12:49.130041Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8186:6101], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:49.130630Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8187:6102], server id = [2:8190:6105], tablet id = 72075186224037905, status = OK 2025-04-07T14:12:49.130684Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8187:6102], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:49.130830Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8179:6094], server id = [2:8184:6099], tablet id = 72075186224037902 2025-04-07T14:12:49.130846Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:49.131188Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8180:6095], server id = [2:8185:6100], tablet id = 72075186224037903 2025-04-07T14:12:49.131216Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:49.131326Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8189:6104], server id = [2:8191:6106], tablet id = 72075186224037906, status = OK 2025-04-07T14:12:49.131388Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8189:6104], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:49.131480Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-07T14:12:49.131739Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8192:6107], server id = [2:8194:6109], tablet id = 72075186224037907, status = OK 2025-04-07T14:12:49.131807Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8192:6107], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:49.132212Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8193:6108], server id = [2:8195:6110], tablet id = 72075186224037908, status = OK 2025-04-07T14:12:49.132283Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8193:6108], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:12:49.132949Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T14:12:49.133006Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T14:12:49.133120Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8186:6101], server id = [2:8188:6103], tablet id = 72075186224037904 2025-04-07T14:12:49.133136Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:49.133219Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T14:12:49.133264Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T14:12:49.133294Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:12:49.133417Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:12:49.133585Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:12:49.133840Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:12:49.135167Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8187:6102], server id = [2:8190:6105], tablet id = 72075186224037905 2025-04-07T14:12:49.135189Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:49.135240Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8189:6104], server id = [2:8191:6106], tablet id = 72075186224037906 2025-04-07T14:12:49.135253Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:49.135562Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:12:49.135866Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8192:6107], server id = [2:8194:6109], tablet id = 72075186224037907 2025-04-07T14:12:49.135884Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:49.136099Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8193:6108], server id = [2:8195:6110], tablet id = 72075186224037908 2025-04-07T14:12:49.136116Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:12:49.150726Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8212:6127]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:49.150905Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:12:49.150937Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8212:6127], StatRequests.size() = 1 2025-04-07T14:12:49.245913Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDdmMjg1MzItZDE4OTE5ZjEtMmFjYjg5MzktN2UzYWRjOTA=, TxId: 2025-04-07T14:12:49.245960Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDdmMjg1MzItZDE4OTE5ZjEtMmFjYjg5MzktN2UzYWRjOTA=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-07T14:12:49.246509Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:12:49.265281Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:12:49.265341Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:12:49.317026Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:8227:6137];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:12:49.352246Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:12:49.355419Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:8227:6137];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:12:49.638648Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8280:6172]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:12:49.639039Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:49.639100Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:12:49.641940Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:12:49.642003Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:12:49.642056Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:12:49.647794Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpInplaceUpdate::SingleRowIf >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestHiveBalancerWithFollowers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:44.002878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:44.002949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:44.002974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:44.002997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:44.003035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:44.003054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:44.003086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:44.003305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:44.052729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:44.052772Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:44.061057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:44.061204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:44.061358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:44.065946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:44.066113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:44.066648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:44.066831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:44.068138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:44.069151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:44.069201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:44.069278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:44.069307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:44.069397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:44.069555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.075429Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:44.174607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:44.174812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.175063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:44.175310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:44.175356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.177513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:44.177652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:44.177811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.177853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:44.177882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:44.177942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:44.179866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.179927Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:44.179969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:44.181964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.182017Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.182133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:44.182210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:44.186084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:44.188273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:44.188418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:44.189221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:44.189331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:44.189369Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:44.189565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:44.189598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:44.189727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:44.189789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:44.191467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:44.191520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:44.191670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:44.191719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:44.191975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:44.192022Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:44.192099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:44.192140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:44.192172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:44.192203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:44.192232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:44.192255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:44.192308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:44.192340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:44.192365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:44.193766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:44.193864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:44.193904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:44.193941Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:44.193990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:44.194106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... WARN: Table profiles were not loaded 2025-04-07T14:12:50.773338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:50.774001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-04-07T14:12:50.774082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:12:50.774159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.774252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.774639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:50.774760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-07T14:12:50.774980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-04-07T14:12:50.775134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.775239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:50.775288Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-07T14:12:50.775333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:12:50.775463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:50.775617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.775858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-07T14:12:50.776147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.776275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.776639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.776728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.776911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.777053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.777139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.777324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.777416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.777549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.777746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.777877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.777933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.777989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:12:50.778221Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:12:50.784863Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:12:50.786425Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:1006:2953], Recipient [1:1006:2953]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-07T14:12:50.786475Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-07T14:12:50.787379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:50.787440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:50.787592Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1006:2953], Recipient [1:1006:2953]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:12:50.787628Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:12:50.787782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:50.787846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:50.787890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:50.787921Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:12:50.788167Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1041:2953], Recipient [1:1006:2953]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T14:12:50.788208Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T14:12:50.788240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1006:2953] sender: [1:1059:2058] recipient: [1:15:2062] 2025-04-07T14:12:50.819128Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1058:2995], Recipient [1:1006:2953]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-07T14:12:50.819201Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-07T14:12:50.819331Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:12:50.819708Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 324us result status StatusSuccess 2025-04-07T14:12:50.820777Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13184 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 82256 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13184 DataSize: 13184 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable >> TSchemeShardViewTest::AsyncCreateDifferentViews >> TSchemeShardViewTest::AsyncDropSameView >> TSchemeShardViewTest::DropView >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 27162, MsgBus: 5801 2025-04-07T14:12:33.268716Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573947906842918:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:33.268813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ea2/r3tmp/tmpL5Wbuz/pdisk_1.dat 2025-04-07T14:12:33.543986Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27162, node 1 2025-04-07T14:12:33.625219Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:33.625245Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:33.625252Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:33.625332Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:33.634402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:33.634529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:33.636330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5801 TClient is connected to server localhost:5801 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:34.120523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:34.139748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:34.264186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:34.406817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:34.472436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:35.964336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573956496779065:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:35.964426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.170526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.195539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.219042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.249426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.279959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.317400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:36.355498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573960791746855:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.355575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.355629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573960791746860:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:36.358925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:36.368073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573960791746862:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:37.416521Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714472:2459] 2025-04-07T14:12:37.442148Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714485:2464] 2025-04-07T14:12:37.469270Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714490:2466] 2025-04-07T14:12:37.480265Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714506:2472] 2025-04-07T14:12:37.503337Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714513:2475] 2025-04-07T14:12:37.518227Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714528:2481] 2025-04-07T14:12:37.550858Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714560:2488] 2025-04-07T14:12:37.571843Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714572:2492] 2025-04-07T14:12:37.628035Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714588:2498] 2025-04-07T14:12:37.641032Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714599:2503] 2025-04-07T14:12:37.678982Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714613:2508] 2025-04-07T14:12:37.719678Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714628:2514] 2025-04-07T14:12:37.765145Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714661:2522] 2025-04-07T14:12:37.822242Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714668:2524] 2025-04-07T14:12:37.882402Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714689:2532] 2025-04-07T14:12:37.936841Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714726:2541] 2025-04-07T14:12:38.011670Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573965086714740:2546] 2025-04-07T14:12:38.058593Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573969381682047:2551] 2025-04-07T14:12:38.108588Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573969381682070:2556] 2025-04-07T14:12:38.180819Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573969381682103:2567] 2025-04-07T14:12:38.248809Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573969381682118:2573] 2025-04-07T14:12:38.273116Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573947906842918:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:38.273392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:38.320841Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573969381682155:2582] 2025-04-07T14:12:38.392428Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573969381682180:2592] 2025-04-07T14:12:38.466464Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573969381682227:2598] 2025-04-07T14:12:38.544653Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573969381682270:2607] 2025-04-07T14:12:38.544845Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490573969381682308:2612] TxId: 281474976710673. Ctx: { TraceId: 01jr89h6e57mb3mhy3gmknj0v5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWIxM2UyODMtOWVkNWRhNWEtMzU1M2MyYjAtNjk2NGZhN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-07T14:12:38.562014Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWIxM2UyODMtOWVkNWRhNWEtMzU1M2MyYjAtNjk2NGZhN2I=, ActorId: [1:7490573969381682281:2612], ActorState: ExecuteState, TraceId: 01jr89h6e57mb3mhy3gmknj0v5, Create QueryResponse for error on request, msg: 2025-04-07T14:12:38.562390Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035158586, txId: 281474976710672] shutting down 2025-04-07T14:12:38.627206Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573969381682313:2613] 2025-04-07T14:12:38.711799Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573969381682352:2622] 2025-04-07T14:12:38.798177Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573969381682375:2631] 2025-04-07T14:12:38.860095Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035158887, txId: 281474976710675] shutting down 2025-04-07T14:12:38.958667Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573969381682500:2655] 2025-04-07T14:12:39.054665Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7490573969381682524:2665] 2025-04-07T14:12:39.119259Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035159153, txId: 281474976710677] shutting down 2025-04- ... [2:7490573992428211978:2297] 2025-04-07T14:12:50.345086Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037893, step: 1744035170353 2025-04-07T14:12:50.345132Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986321:2897]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7490573992428211982:2299] 2025-04-07T14:12:50.345206Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037890, step: 1744035170353 2025-04-07T14:12:50.345244Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986320:2896]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7490573992428211985:2300] 2025-04-07T14:12:50.345298Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986323:2899]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037893, actor_id: [2:7490573992428211980:2298] 2025-04-07T14:12:50.345623Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037892, step: 1744035170353 2025-04-07T14:12:50.345677Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986322:2898]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7490573992428211978:2297] 2025-04-07T14:12:50.345736Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037891, step: 1744035170353 2025-04-07T14:12:50.345782Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037890, step: 1744035170353 2025-04-07T14:12:50.345825Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986320:2896]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7490573992428211985:2300] 2025-04-07T14:12:50.345887Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037893, step: 1744035170353 2025-04-07T14:12:50.345985Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986321:2897]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7490573992428211982:2299] 2025-04-07T14:12:50.346087Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986323:2899]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037893, actor_id: [2:7490573992428211980:2298] 2025-04-07T14:12:50.346145Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037890, step: 1744035170353 2025-04-07T14:12:50.346296Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037892, step: 1744035170353 2025-04-07T14:12:50.346349Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986322:2898]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7490573992428211978:2297] 2025-04-07T14:12:50.346409Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986320:2896]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7490573992428211985:2300] 2025-04-07T14:12:50.346427Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490574022492986320:2896]. TKqpScanFetcherActor: broken tablet for this request 72075186224037890, retries limit exceeded (0/20) 2025-04-07T14:12:50.346605Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037891, step: 1744035170353 2025-04-07T14:12:50.346750Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037893, step: 1744035170353 2025-04-07T14:12:50.346800Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986321:2897]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7490573992428211982:2299] 2025-04-07T14:12:50.346857Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037892, step: 1744035170353 2025-04-07T14:12:50.346903Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986323:2899]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037893, actor_id: [2:7490573992428211980:2298] 2025-04-07T14:12:50.346957Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986322:2898]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7490573992428211978:2297] 2025-04-07T14:12:50.347164Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037891, step: 1744035170353 2025-04-07T14:12:50.347252Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037892, step: 1744035170353 2025-04-07T14:12:50.347300Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037893, step: 1744035170353 2025-04-07T14:12:50.347355Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986321:2897]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7490573992428211982:2299] 2025-04-07T14:12:50.347423Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986322:2898]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7490573992428211978:2297] 2025-04-07T14:12:50.347432Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490574022492986322:2898]. TKqpScanFetcherActor: broken tablet for this request 72075186224037892, retries limit exceeded (0/20) 2025-04-07T14:12:50.347537Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986323:2899]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037893, actor_id: [2:7490573992428211980:2298] 2025-04-07T14:12:50.347550Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490574022492986323:2899]. TKqpScanFetcherActor: broken tablet for this request 72075186224037893, retries limit exceeded (0/20) 2025-04-07T14:12:50.347804Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715681. Snapshot is not valid, tabletId: 72075186224037891, step: 1744035170353 2025-04-07T14:12:50.347874Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7490574022492986321:2897]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7490573992428211982:2299] 2025-04-07T14:12:50.347887Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490574022492986321:2897]. TKqpScanFetcherActor: broken tablet for this request 72075186224037891, retries limit exceeded (0/20) 2025-04-07T14:12:50.646098Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035170682, txId: 281474976715684] shutting down 2025-04-07T14:12:50.812124Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035170843, txId: 281474976715686] shutting down 2025-04-07T14:12:50.987858Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7490574022492986730:2951] TxId: 281474976715689. Ctx: { TraceId: 01jr89hjg36qrcqqcf47464p4t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzY0MWY1ZDAtYzA1NTIyOS01YTZhMzQ0Yy0yYzNmZjMyYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-07T14:12:50.988164Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzY0MWY1ZDAtYzA1NTIyOS01YTZhMzQ0Yy0yYzNmZjMyYg==, ActorId: [2:7490574022492986695:2951], ActorState: ExecuteState, TraceId: 01jr89hjg36qrcqqcf47464p4t, Create QueryResponse for error on request, msg: 2025-04-07T14:12:50.988580Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035171025, txId: 281474976715688] shutting down 2025-04-07T14:12:50.988870Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490574022492986734:2955], TxId: 281474976715689, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr89hjg36qrcqqcf47464p4t. SessionId : ydb://session/3?node_id=2&id=NzY0MWY1ZDAtYzA1NTIyOS01YTZhMzQ0Yy0yYzNmZjMyYg==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7490574022492986730:2951], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:12:50.990139Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490574022492986736:2956], TxId: 281474976715689, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzY0MWY1ZDAtYzA1NTIyOS01YTZhMzQ0Yy0yYzNmZjMyYg==. TraceId : 01jr89hjg36qrcqqcf47464p4t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490574022492986730:2951], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:12:50.990464Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490574022492986737:2957], TxId: 281474976715689, task: 3. Ctx: { TraceId : 01jr89hjg36qrcqqcf47464p4t. SessionId : ydb://session/3?node_id=2&id=NzY0MWY1ZDAtYzA1NTIyOS01YTZhMzQ0Yy0yYzNmZjMyYg==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490574022492986730:2951], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:12:50.990683Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490574022492986738:2958], TxId: 281474976715689, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NzY0MWY1ZDAtYzA1NTIyOS01YTZhMzQ0Yy0yYzNmZjMyYg==. TraceId : 01jr89hjg36qrcqqcf47464p4t. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7490574022492986730:2951], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:12:50.990685Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715689. Snapshot is not valid, tabletId: 72075186224037895, step: 1744035171025 2025-04-07T14:12:50.990934Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490574022492986739:2959], TxId: 281474976715689, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NzY0MWY1ZDAtYzA1NTIyOS01YTZhMzQ0Yy0yYzNmZjMyYg==. TraceId : 01jr89hjg36qrcqqcf47464p4t. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7490574022492986730:2951], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:12:50.991770Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715689. Snapshot is not valid, tabletId: 72075186224037896, step: 1744035171025 2025-04-07T14:12:50.991941Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715689. Snapshot is not valid, tabletId: 72075186224037894, step: 1744035171025 2025-04-07T14:12:51.501943Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035171536, txId: 281474976715692] shutting down >> SystemView::TopPartitionsRanges [GOOD] >> TReplicationTests::CopyReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] >> TSchemeShardViewTest::AsyncDropSameView [GOOD] >> TSchemeShardViewTest::DropView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:46.904539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:46.904638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:46.904705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:46.904742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:46.905316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:46.905357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:46.905425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:46.906393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:46.977009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:46.977073Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:46.987595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:46.987765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:46.987917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:46.993615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:46.993797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:46.995609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:46.995915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:47.000201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:47.006307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:47.006381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:47.006511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:47.006555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:47.006705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:47.006855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.013914Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:47.115356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:47.116387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.117236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:47.118959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:47.119025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.121937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:47.122656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:47.122955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.123024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:47.123107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:47.123165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:47.124865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.124921Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:47.124956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:47.126315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.126373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.126440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:47.126484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:47.130328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:47.131982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:47.132643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:47.133628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:47.133792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:47.133882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:47.134100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:47.134150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:47.134298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:47.134381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:47.136305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:47.136353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:47.136472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:47.136652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:47.136863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.136913Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:47.137010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:47.137042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:47.137083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:47.137120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:47.137149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:47.137197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:47.137249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:47.137283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:47.137314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:47.144217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:47.144343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:47.144378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:47.144430Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:47.144472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:47.144581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 4-07T14:12:52.628226Z node 8 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:12:52.628244Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T14:12:52.628264Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:12:52.628308Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-07T14:12:52.633706Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:12:52.633925Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:12:52.635022Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1145 } } 2025-04-07T14:12:52.635051Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-07T14:12:52.635138Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1145 } } 2025-04-07T14:12:52.635217Z node 8 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1145 } } 2025-04-07T14:12:52.635626Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 400 RawX2: 34359740739 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:12:52.635661Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-07T14:12:52.635763Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 400 RawX2: 34359740739 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:12:52.635828Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:12:52.635908Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 400 RawX2: 34359740739 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:12:52.635962Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:52.636008Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-07T14:12:52.637279Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:12:52.638057Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:12:52.660791Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 34359740658 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:12:52.660840Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-07T14:12:52.660930Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 302 RawX2: 34359740658 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:12:52.660963Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:12:52.661016Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 302 RawX2: 34359740658 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:12:52.661062Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:52.661091Z node 8 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:12:52.661134Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:12:52.661174Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:12:52.661197Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T14:12:52.662719Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:12:52.663056Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:12:52.663114Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-04-07T14:12:52.663174Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-07T14:12:52.663238Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-04-07T14:12:52.663328Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-07T14:12:52.663363Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-04-07T14:12:52.664793Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:12:52.664849Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:12:52.664991Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:12:52.665033Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:12:52.665076Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T14:12:52.665156Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:338:2313] message: TxId: 102 2025-04-07T14:12:52.665223Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:12:52.665270Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:12:52.665301Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:12:52.665422Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:12:52.665455Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:12:52.666883Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:12:52.666926Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:429:2393] TestWaitNotification: OK eventTxId 102 2025-04-07T14:12:52.667389Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:52.667608Z node 8 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 250us result status StatusSuccess 2025-04-07T14:12:52.667942Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardReadTableSnapshots::ReadTableSnapshot >> DataShardReadTableSnapshots::ReadTableSplitBefore >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder >> DataShardReadTableSnapshots::ReadTableDropColumn >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:52.820081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:52.820192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:52.820229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:52.820257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:52.820290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:52.820319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:52.820374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:52.821266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:52.876489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:52.876538Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:52.887147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:52.887284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:52.887413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:52.894320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:52.894522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:52.896773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:52.897108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:52.902493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:52.910335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:52.910395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:52.910509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:52.910546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:52.910644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:52.910771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:52.916372Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:52.994417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:52.995480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:52.996493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:52.997952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:52.998008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.000357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:53.001478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:53.001704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.001756Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:53.001819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:53.001859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:53.003495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.003566Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:53.003596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:53.004887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.004941Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.005009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:53.005054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:53.008611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:53.010069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:53.010930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:53.011724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:53.011845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:53.011895Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:53.012163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:53.012208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:53.012387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:53.012455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:53.013959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:53.014003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:53.014132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:53.014160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:53.014319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.014350Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:53.014432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:53.014455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:53.014510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:53.014539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:53.014564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:53.014604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:53.014645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:53.014687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:53.014719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:53.021184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:53.021305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:53.021346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:53.021394Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:53.021442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:53.021588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... leModifications Reason: "Check failed: path: \'/MyRoot/MyView\', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2025-04-07T14:12:53.080397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), operation: DROP VIEW, path: /MyRoot/MyView 2025-04-07T14:12:53.080546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/MyView\', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2025-04-07T14:12:53.080640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), operation: DROP VIEW, path: /MyRoot/MyView 2025-04-07T14:12:53.082010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:12:53.082117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-07T14:12:53.082424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:53.082538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:53.082588Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-04-07T14:12:53.082695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-07T14:12:53.082838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:53.082886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:12:53.084614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:53.084687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:53.084821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:12:53.084921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:53.084962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-07T14:12:53.084994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:12:53.085141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.085169Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:12:53.085234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:12:53.085265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:12:53.085300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-07T14:12:53.085334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:12:53.085356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:12:53.085380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:12:53.085421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:12:53.085455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-07T14:12:53.085498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T14:12:53.085524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-07T14:12:53.085945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.086012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.086038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:12:53.086062Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T14:12:53.086094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:53.086600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.086649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.086664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:12:53.086682Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:12:53.086700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:12:53.086755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-07T14:12:53.087355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:12:53.087389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:12:53.087438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:53.088601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:12:53.089747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:12:53.089865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-04-07T14:12:53.090072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T14:12:53.090102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2025-04-07T14:12:53.090156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-07T14:12:53.090177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-07T14:12:53.090516Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T14:12:53.090573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:12:53.090597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:324:2316] 2025-04-07T14:12:53.090642Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-07T14:12:53.090700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:12:53.090723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:324:2316] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2025-04-07T14:12:53.091018Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:53.091197Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 149us result status StatusPathDoesNotExist 2025-04-07T14:12:53.091313Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:52.819209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:52.819303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:52.819335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:52.819367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:52.820250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:52.820289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:52.820362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:52.821264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:52.888222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:52.888285Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:52.897665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:52.897768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:52.897871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:52.901806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:52.901940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:52.902409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:52.902565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:52.903874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:52.910369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:52.910430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:52.910570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:52.910615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:52.910659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:52.910786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:52.916609Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:53.014086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:53.014277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.014438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:53.014637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:53.014679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.016621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:53.016758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:53.016891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.016935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:53.016984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:53.017018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:53.018554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.018597Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:53.018621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:53.020118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.020162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.020215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:53.020277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:53.023146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:53.024393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:53.024535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:53.025281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:53.025384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:53.025428Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:53.025620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:53.025656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:53.025804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:53.025877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:53.027172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:53.027210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:53.027351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:53.027385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:53.027561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.027592Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:53.027665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:53.027688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:53.027718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:53.027752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:53.027799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:53.027821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:53.027870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:53.027918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:53.027944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:53.029234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:53.029308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:53.029334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:53.029367Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:53.029404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:53.029470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... nside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-07T14:12:53.070411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropView Drop { Name: "MyView" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:53.070554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropView Propose, opId: 102:0, path: /MyRoot/MyView 2025-04-07T14:12:53.070691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-07T14:12:53.070745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:53.072838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAccepted TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-07T14:12:53.072981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP VIEW, path: /MyRoot/MyView 2025-04-07T14:12:53.073111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.073172Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 ProgressState 2025-04-07T14:12:53.073230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-07T14:12:53.073320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:53.074671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:12:53.074768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-07T14:12:53.074994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:53.075078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:53.075124Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-04-07T14:12:53.075217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-07T14:12:53.075335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:53.075394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:12:53.076973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:53.077031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:53.077177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:12:53.077324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:53.077366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-07T14:12:53.077413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:12:53.077620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.077667Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:12:53.077761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:12:53.077812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:12:53.077856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-07T14:12:53.077896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:12:53.077950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:12:53.077988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:12:53.078049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:12:53.078086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-07T14:12:53.078138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T14:12:53.078180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-07T14:12:53.079038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.079178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.079216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:12:53.079251Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T14:12:53.079288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:53.080295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.080357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.080378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:12:53.080411Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:12:53.080433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:12:53.080488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-07T14:12:53.080768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:12:53.080808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:12:53.080857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:53.082310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:12:53.083494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:12:53.083581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T14:12:53.083756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:12:53.083787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:12:53.084133Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:12:53.084206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:12:53.084234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:318:2310] TestWaitNotification: OK eventTxId 102 2025-04-07T14:12:53.084547Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:53.084735Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 133us result status StatusPathDoesNotExist 2025-04-07T14:12:53.084860Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:52.819224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:52.819300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:52.819332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:52.819353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:52.820255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:52.820303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:52.820359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:52.821290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:52.877923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:52.877991Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:52.889301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:52.889459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:52.889614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:52.895924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:52.896114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:52.896791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:52.896998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:52.902879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:52.910418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:52.910487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:52.910645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:52.910701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:52.910759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:52.910941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:52.916798Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:53.021501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:53.021680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.021826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:53.021977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:53.022012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.023571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:53.023699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:53.023823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.023868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:53.023918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:53.023949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:53.025250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.025289Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:53.025313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:53.026371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.026397Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.026432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:53.026464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:53.028737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:53.029862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:53.029979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:53.030636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:53.030719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:53.030758Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:53.030947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:53.030981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:53.031102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:53.031151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:53.032370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:53.032412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:53.032532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:53.032558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:53.032724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.032754Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:53.032813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:53.032835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:53.032866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:53.032898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:53.032926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:53.032947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:53.032985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:53.033019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:53.033041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:53.034211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:53.034295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:53.034323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:53.034346Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:53.034375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:53.034436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... t reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:12:53.093361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-07T14:12:53.093383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-07T14:12:53.093408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-07T14:12:53.093934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-07T14:12:53.093989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:53.094020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:12:53.094047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-07T14:12:53.094704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.094765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.094789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:12:53.094812Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-07T14:12:53.094855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:12:53.096022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.096091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.096116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:12:53.096138Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-07T14:12:53.096158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:12:53.096216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-07T14:12:53.098068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:12:53.098937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-04-07T14:12:53.099221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:12:53.099254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-07T14:12:53.099333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:12:53.099351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-04-07T14:12:53.099401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T14:12:53.099419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T14:12:53.099908Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:12:53.100057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:12:53.100099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:330:2322] 2025-04-07T14:12:53.100271Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:12:53.100434Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T14:12:53.100524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:12:53.100552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:330:2322] 2025-04-07T14:12:53.100654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:12:53.100692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:330:2322] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-07T14:12:53.101164Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:53.101331Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 186us result status StatusSuccess 2025-04-07T14:12:53.102669Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:53.103260Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:53.103467Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 173us result status StatusSuccess 2025-04-07T14:12:53.103727Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:53.104088Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:53.104231Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 124us result status StatusSuccess 2025-04-07T14:12:53.104375Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ReadSessionImplTest::ForcefulDestroyPartitionStream >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch >> ReadSessionImplTest::DecompressRaw >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> Compression::WriteRAW >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::TopPartitionsRanges [GOOD] Test command err: 2025-04-07T14:12:09.913673Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573845527379332:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:09.913724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010aa/r3tmp/tmp0oNwux/pdisk_1.dat 2025-04-07T14:12:10.253503Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:10.295688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:10.295822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:10.297711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24714, node 1 2025-04-07T14:12:10.427539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:10.427569Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:10.427580Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:10.427667Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:10.792847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:12.308586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858412281855:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.308641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573858412281843:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.308732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:12.314275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:12:12.321288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573858412281857:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:12:13.068891Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr89gbet69g6ny0dgqsjfte2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIwODE5YjAtZmMwOWVmNzYtOGRiZmU4NzUtZGY2YmNmMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:13.141047Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7490573862707249242:2312], owner: [1:7490573862707249238:2310], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-07T14:12:13.141890Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7490573862707249242:2312], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:13.153299Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7490573862707249242:2312], row count: 1, finished: 1 2025-04-07T14:12:13.153462Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7490573862707249242:2312], owner: [1:7490573862707249238:2310], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-07T14:12:13.165527Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035133048, txId: 281474976710660] shutting down 2025-04-07T14:12:14.304554Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr89geqb0z7g9zc9kgypmzn7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWIzNDM1MjctOWYzNTBjZjktMThkMzRiNTgtM2EwZDRjNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:14.306457Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7490573867002216584:2327], owner: [1:7490573867002216579:2325], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-07T14:12:14.307246Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7490573867002216584:2327], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:14.307492Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7490573867002216584:2327], row count: 1, finished: 1 2025-04-07T14:12:14.307520Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7490573867002216584:2327], owner: [1:7490573867002216579:2325], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-07T14:12:14.308866Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035134300, txId: 281474976710662] shutting down 2025-04-07T14:12:14.914252Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573845527379332:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:14.914357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:15.442417Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr89gftk986zrtd5ye3fg6by, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWIyYzgzNTYtYjZhY2MwNzktZjNjYzRhNWYtMTRmMWMwOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:15.444232Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7490573871297183919:2338], owner: [1:7490573871297183915:2336], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-07T14:12:15.444760Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7490573871297183919:2338], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:12:15.445005Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7490573871297183919:2338], row count: 2, finished: 1 2025-04-07T14:12:15.445032Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7490573871297183919:2338], owner: [1:7490573871297183915:2336], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-07T14:12:15.446927Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035135441, txId: 281474976710664] shutting down 2025-04-07T14:12:16.184968Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573873666235647:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:16.185015Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010aa/r3tmp/tmpdMjnoA/pdisk_1.dat 2025-04-07T14:12:16.319451Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:16.353276Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:16.353356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:16.356556Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20633, node 2 2025-04-07T14:12:16.418520Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:16.418557Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:16.418571Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:16.418753Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:16.706001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:16.731281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:16.749818Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490573875479299609:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:16.749865Z node 6 :METADATA_PR ... query count# 0, persisted# 0 2025-04-07T14:12:50.518911Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 15, interval end# 2025-04-07T15:00:00.000000Z, query count# 0, persisted# 0 2025-04-07T14:12:50.521881Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Complete 2025-04-07T14:12:50.526282Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [10:7490573947766546073:2186] 2025-04-07T14:12:50.527556Z node 10 :SYSTEM_VIEWS DEBUG: Send counters: service id# [10:7490573947766546073:2186], processor id# 72075186224037893, database# /Root/Tenant1, generation# 11302142586626850836, node id# 10, is retrying# 0, is labeled# 0 2025-04-07T14:12:50.527708Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr89hj2h5qmzncyrhat3azcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MzlkMGIwZDQtZTY3MzA0Y2MtZDc3Y2FkZTItNjRkNTAwN2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:50.527756Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] TEvSendDbCountersRequest: node id# 10, generation# 11302142586626850836, services count# 1, request size# 1023 2025-04-07T14:12:50.527836Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvSendDbCountersResponse: service id# [10:7490573947766546073:2186], database# /Root/Tenant1, generation# 11302142586626850836 2025-04-07T14:12:50.529906Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7490574019353744387:2464], owner: [7:7490574019353744383:2462], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2025-04-07T14:12:50.530456Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7490574019353744387:2464], schemeshard id: 72075186224037888, hive id: 72057594037968897, tenant name: /Root/Tenant1, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], tenant node count: 2 2025-04-07T14:12:50.530980Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1744035169000000 Rank: 0 } InclusiveFrom: false To { IntervalEndUs: 1744035169000000 Rank: 3 } InclusiveTo: false Type: TOP_PARTITIONS_ONE_MINUTE , rows# 2, bytes# 153, next# 2025-04-07T14:12:50.531228Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7490574019353744387:2464], row count: 2, finished: 1 2025-04-07T14:12:50.531270Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7490574019353744387:2464], owner: [7:7490574019353744383:2462], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2025-04-07T14:12:50.531923Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [11:7490573947350156179:2063] 2025-04-07T14:12:50.533172Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035170526, txId: 281474976715683] shutting down 2025-04-07T14:12:50.545279Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxAggregate::Execute 2025-04-07T14:12:50.545346Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryResults: interval end# 2025-04-07T14:12:50.000000Z, query count# 0 2025-04-07T14:12:50.545366Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 8, interval end# 2025-04-07T14:12:50.000000Z, query count# 0, persisted# 0 2025-04-07T14:12:50.545382Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 10, interval end# 2025-04-07T14:12:50.000000Z, query count# 0, persisted# 0 2025-04-07T14:12:50.545396Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 12, interval end# 2025-04-07T14:12:50.000000Z, query count# 0, persisted# 0 2025-04-07T14:12:50.545413Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 14, interval end# 2025-04-07T14:12:50.000000Z, query count# 0, persisted# 0 2025-04-07T14:12:50.545431Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 9, interval end# 2025-04-07T15:00:00.000000Z, query count# 0, persisted# 0 2025-04-07T14:12:50.545446Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 11, interval end# 2025-04-07T15:00:00.000000Z, query count# 0, persisted# 0 2025-04-07T14:12:50.545463Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 13, interval end# 2025-04-07T15:00:00.000000Z, query count# 0, persisted# 0 2025-04-07T14:12:50.545480Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 15, interval end# 2025-04-07T15:00:00.000000Z, query count# 0, persisted# 0 2025-04-07T14:12:50.548254Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxAggregate::Complete 2025-04-07T14:12:50.552904Z node 7 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [7:7490573946339297885:2155] 2025-04-07T14:12:50.584133Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [8:7490573949098087006:2063] 2025-04-07T14:12:50.617432Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [10:7490573947766545884:2063] 2025-04-07T14:12:50.632265Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [10:7490573947766546073:2186] 2025-04-07T14:12:50.660029Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jr89hj7ca3thvfvbvbz3q7ta, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MWFiMjQ3OWEtNGVhOTBjNGYtNWFmZmYyZGItMjk3YzVmOWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:50.662505Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7490574019353744421:2473], owner: [7:7490574019353744418:2471], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2025-04-07T14:12:50.662989Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7490574019353744421:2473], schemeshard id: 72075186224037888, hive id: 72057594037968897, tenant name: /Root/Tenant1, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], tenant node count: 2 2025-04-07T14:12:50.663619Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1744035169000000 Rank: 0 } InclusiveFrom: false To { IntervalEndUs: 1744035169000000 Rank: 3 } InclusiveTo: true Type: TOP_PARTITIONS_ONE_MINUTE , rows# 3, bytes# 229, next# 2025-04-07T14:12:50.664057Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7490574019353744421:2473], row count: 3, finished: 1 2025-04-07T14:12:50.664090Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7490574019353744421:2473], owner: [7:7490574019353744418:2471], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2025-04-07T14:12:50.665931Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035170659, txId: 281474976715685] shutting down 2025-04-07T14:12:50.681800Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-04-07T14:12:50.682364Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:12:50.682501Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2025-04-07T14:12:50.682967Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:12:50.683144Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-04-07T14:12:50.683715Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:12:50.684834Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-04-07T14:12:50.684856Z node 11 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [11:7490573947350156270:2090], processor id# 72075186224037893, database# /Root/Tenant1 2025-04-07T14:12:50.684965Z node 11 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [11:7490573947350156270:2090], database# /Root/Tenant1, processor id# 72075186224037893 2025-04-07T14:12:50.685324Z node 8 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [8:7490573953393054410:2110], processor id# 72075186224037899, database# /Root/Tenant2 2025-04-07T14:12:50.685428Z node 8 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [8:7490573953393054410:2110], database# /Root/Tenant2, processor id# 72075186224037899 2025-04-07T14:12:50.686035Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:12:50.688010Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7490573950663228035:2105], Type=268959746 2025-04-07T14:12:50.688052Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7490573950663228035:2105], Type=268959746 2025-04-07T14:12:50.688073Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7490573950663228035:2105], Type=268959746 2025-04-07T14:12:50.689361Z node 10 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [10:7490573947766546073:2186], processor id# 72075186224037893, database# /Root/Tenant1 2025-04-07T14:12:50.689502Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7490573947766546073:2186], database# /Root/Tenant1, processor id# 72075186224037893 2025-04-07T14:12:50.690649Z node 9 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [9:7490573950663227880:2123], processor id# 72075186224037899, database# /Root/Tenant2 2025-04-07T14:12:50.691087Z node 9 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [9:7490573950663227880:2123], database# /Root/Tenant2, processor id# 72075186224037899 2025-04-07T14:12:50.710615Z node 9 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [9:7490573950663227880:2123] 2025-04-07T14:12:50.712325Z node 9 :SYSTEM_VIEWS DEBUG: Send counters: service id# [9:7490573950663227880:2123], processor id# 72075186224037899, database# /Root/Tenant2, generation# 1455302418597141916, node id# 9, is retrying# 0, is labeled# 0 2025-04-07T14:12:50.743868Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [11:7490573947350156270:2090] 2025-04-07T14:12:50.744749Z node 11 :SYSTEM_VIEWS DEBUG: Send counters: service id# [11:7490573947350156270:2090], processor id# 72075186224037893, database# /Root/Tenant1, generation# 9047938029864707868, node id# 11, is retrying# 0, is labeled# 0 2025-04-07T14:12:50.746456Z node 9 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [9:7490573950663227880:2123], processor id# 72075186224037899, database# /Root/Tenant2 2025-04-07T14:12:50.746539Z node 9 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [9:7490573950663227880:2123], database# /Root/Tenant2, processor id# 72075186224037899 2025-04-07T14:12:50.766919Z node 9 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [9:7490573950663227880:2123] 2025-04-07T14:12:50.777288Z node 11 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [11:7490573947350156270:2090], processor id# 72075186224037893, database# /Root/Tenant1 2025-04-07T14:12:50.777339Z node 11 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [11:7490573947350156270:2090], database# /Root/Tenant1, processor id# 72075186224037893 2025-04-07T14:12:50.956923Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [10:7490573947766545884:2063] 2025-04-07T14:12:50.972749Z node 9 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [9:7490573946368260458:2063] 2025-04-07T14:12:50.982996Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [8:7490573953393054410:2110] |95.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> ColumnBuildTest::CancelBuild [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:46.904503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:46.904593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:46.904648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:46.904696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:46.905286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:46.905309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:46.905357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:46.906412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:46.977010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:46.977062Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:46.986816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:46.986937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:46.987105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:46.993574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:46.993834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:46.995613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:46.995821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:47.000277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:47.006306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:47.006410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:47.006533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:47.006596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:47.006643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:47.006817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.013943Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:47.121650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:47.121853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.122063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:47.122274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:47.122330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.124344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:47.124448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:47.124603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.124649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:47.124709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:47.124767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:47.126272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.126311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:47.126332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:47.127423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.127465Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.127512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:47.127545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:47.130176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:47.131649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:47.132619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:47.133495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:47.133633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:47.133691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:47.133999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:47.134050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:47.134241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:47.134304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:47.136053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:47.136094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:47.136208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:47.136230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:47.136393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:47.136433Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:47.136516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:47.136544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:47.136580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:47.136610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:47.136634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:47.136701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:47.136763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:47.136802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:47.136829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:47.138435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:47.138527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:47.138556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:47.138598Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:47.138636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:47.138727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... untTxs#1 2025-04-07T14:12:53.502147Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Ack mediator stepId#5000003 2025-04-07T14:12:53.502178Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:0 2025-04-07T14:12:53.502297Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [9:123:2149], Recipient [9:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-07T14:12:53.502316Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:12:53.502373Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:53.502394Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:53.502471Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:12:53.502573Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:53.502600Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-07T14:12:53.502629Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:12:53.502661Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:12:53.502688Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:12:53.502778Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:12:53.502801Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:12:53.502829Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:12:53.502868Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-07T14:12:53.502901Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:12:53.502948Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:12:53.502972Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:12:53.503055Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:12:53.503090Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-07T14:12:53.503125Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-07T14:12:53.503151Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-07T14:12:53.503978Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:203:2206], Recipient [9:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-04-07T14:12:53.504007Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-07T14:12:53.504044Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.504088Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.504112Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:12:53.504161Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:12:53.504194Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:53.504258Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:12:53.505212Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:203:2206], Recipient [9:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 } 2025-04-07T14:12:53.505244Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-07T14:12:53.505284Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.505340Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:12:53.505364Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:12:53.505388Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:12:53.505413Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:12:53.505485Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-07T14:12:53.505522Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:12:53.505763Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435080, Sender [9:123:2149], Recipient [9:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-04-07T14:12:53.505795Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-04-07T14:12:53.505851Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:12:53.505894Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:12:53.505955Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:53.507113Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:12:53.507684Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:12:53.507712Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:12:53.508793Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:12:53.508826Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:12:53.508928Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T14:12:53.509156Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:12:53.509202Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:12:53.509614Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [9:441:2397], Recipient [9:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:12:53.509666Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:12:53.509709Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-07T14:12:53.509817Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [9:359:2339], Recipient [9:123:2149]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-04-07T14:12:53.509856Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-07T14:12:53.509920Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:12:53.510010Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:12:53.510050Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [9:439:2395] 2025-04-07T14:12:53.510230Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [9:441:2397], Recipient [9:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:12:53.510261Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:12:53.510295Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-07T14:12:53.510667Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [9:442:2398], Recipient [9:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-07T14:12:53.510722Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-07T14:12:53.510818Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:53.511003Z node 9 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 195us result status StatusPathDoesNotExist 2025-04-07T14:12:53.511148Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-04-07T14:12:54.031435Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.031487Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.031517Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.033165Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.035559Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.046964Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.048408Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.052560Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.052576Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.052591Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.052933Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.053371Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.053461Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.053618Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.053881Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T14:12:54.055547Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.055585Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.055614Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.055949Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.057137Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.057269Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.057427Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.063230Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.066506Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:12:54.066619Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.066671Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-07T14:12:54.067666Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.067685Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.067724Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.067949Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.068232Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.068341Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.068487Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 11 Compressed message data size: 31 2025-04-07T14:12:54.069235Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:12:54.069463Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-07T14:12:54.069694Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-07T14:12:54.069825Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-07T14:12:54.069908Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.069930Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:12:54.069955Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T14:12:54.070100Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-04-07T14:12:54.070129Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T14:12:54.070143Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-07T14:12:54.070157Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:12:54.070246Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-04-07T14:12:54.070303Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-07T14:12:54.070317Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-07T14:12:54.070341Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T14:12:54.070415Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-04-07T14:12:54.070435Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-07T14:12:54.070451Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-07T14:12:54.070469Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:12:54.070541Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-04-07T14:12:54.071539Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.071558Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.071573Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.071850Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.072235Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.072354Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.072516Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2025-04-07T14:12:54.073251Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:12:54.073408Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-07T14:12:54.073704Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-07T14:12:54.073926Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-07T14:12:54.074047Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.074075Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:12:54.074090Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T14:12:54.074103Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-07T14:12:54.074125Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:12:54.074304Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2025-04-07T14:12:54.074393Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-07T14:12:54.074413Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-07T14:12:54.074428Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-07T14:12:54.074442Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-07T14:12:54.074458Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:12:54.074608Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2025-04-07T14:12:54.075695Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.075712Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.075726Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.076007Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.076469Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.076591Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.076774Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.077605Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:12:54.078454Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:12:54.078740Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-04-07T14:12:54.078845Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T14:12:54.078961Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.078987Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:12:54.079017Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-04-07T14:12:54.079033Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-04-07T14:12:54.079070Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-04-07T14:12:54.079091Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-07T14:12:54.079213Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-07T14:12:54.079324Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] |95.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-04-07T14:12:54.042012Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.042044Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.042064Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.042397Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T14:12:54.042436Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.042455Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.043297Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007704s 2025-04-07T14:12:54.043721Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.044051Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T14:12:54.044117Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.045854Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.045864Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.045873Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.046090Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T14:12:54.046111Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.046130Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.046173Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005952s 2025-04-07T14:12:54.046473Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.046797Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T14:12:54.046850Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.047535Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.047553Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.047570Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.048104Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-07T14:12:54.048129Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.048156Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.048195Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.196887s 2025-04-07T14:12:54.048532Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.048783Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T14:12:54.048821Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.049363Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.049378Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.049391Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.049662Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-07T14:12:54.049689Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.049706Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.049737Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.211837s 2025-04-07T14:12:54.050045Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.050340Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T14:12:54.050384Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.050978Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.050996Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.051028Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.051193Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.051766Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.059951Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.060185Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-04-07T14:12:54.060214Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.060228Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.060269Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.264827s 2025-04-07T14:12:54.060579Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-07T14:12:54.061605Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.061620Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.061632Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.061863Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.062149Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.062233Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.062527Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.163475Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.163742Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T14:12:54.163811Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.163855Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-07T14:12:54.163918Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-07T14:12:54.264226Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T14:12:54.264359Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-07T14:12:54.265356Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.265378Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.265408Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.265779Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.266277Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.266412Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.266732Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.367620Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.367800Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T14:12:54.367852Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.367885Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-07T14:12:54.367961Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-07T14:12:54.368049Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-07T14:12:54.368125Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T14:12:54.368248Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-07T14:12:54.368867Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> KqpOlapScheme::DropTableAfterInsert [GOOD] >> KqpOlapScheme::InitTtlSettingsOnShardStart >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestDownAfterDrain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:51.111014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:51.111135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:51.111174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:51.111219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:51.111824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:51.111861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:51.111913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:51.113045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:51.171914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:51.171954Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:51.181227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:51.181364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:51.181521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:51.188755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:51.189036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:51.190819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:51.191098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:51.196401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:51.204474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:51.204534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:51.204720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:51.204771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:51.204876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:51.204995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:51.211036Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:51.289026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:51.289923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:51.290806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:51.292683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:51.292758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:51.295762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:51.296571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:51.296838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:51.296898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:51.297003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:51.297041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:51.298884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:51.298936Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:51.298968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:51.300477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:51.300516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:51.300558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:51.300614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:51.304936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:51.306591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:51.307951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:51.308854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:51.308983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:51.309024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:51.309248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:51.309283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:51.309416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:51.309462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:51.311039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:51.311079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:51.311184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:51.311220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:51.311383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:51.311411Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:51.311471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:51.311511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:51.311556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:51.311588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:51.311610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:51.311630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:51.311676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:51.311713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:51.311743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:51.313280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:51.313377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:51.313407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:51.313438Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:51.313494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:51.313581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... sAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:12:54.051079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-04-07T14:12:54.051158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-04-07T14:12:54.051408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-07T14:12:54.051433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-07T14:12:54.051458Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-07T14:12:54.051586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:54.051650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:54.051685Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710761:0 HandleReply TEvOperationPlan: step# 5000007 2025-04-07T14:12:54.051736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710761:0 128 -> 240 2025-04-07T14:12:54.053070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-07T14:12:54.053112Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710761:0 ProgressState 2025-04-07T14:12:54.053162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-07T14:12:54.053181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-07T14:12:54.053204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: true 2025-04-07T14:12:54.053261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:127:2152] message: TxId: 281474976710761 2025-04-07T14:12:54.053292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-07T14:12:54.053318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-04-07T14:12:54.053340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-04-07T14:12:54.053383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-04-07T14:12:54.054811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-07T14:12:54.054882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-07T14:12:54.054935Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfoId: 102 2025-04-07T14:12:54.055000Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1138:3002], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:12:54.056303Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T14:12:54.056381Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1138:3002], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:12:54.056424Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-04-07T14:12:54.057744Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T14:12:54.057819Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1138:3002], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:12:54.057879Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-07T14:12:54.058045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:12:54.058098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1162:3026] TestWaitNotification: OK eventTxId 102 2025-04-07T14:12:54.060399Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-07T14:12:54.060767Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } 2025-04-07T14:12:54.062685Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:12:54.062905Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 242us result status StatusSuccess 2025-04-07T14:12:54.063305Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "DefaultValue" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-04-07T14:12:54.052069Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.052121Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.052142Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.052505Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.054454Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T14:12:54.054512Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.055382Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.055426Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.055441Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.055775Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.056061Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-07T14:12:54.056103Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.056888Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.056909Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.056926Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.057206Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T14:12:54.057242Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.057263Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.057710Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-04-07T14:12:54.058475Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.058490Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.058502Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.058854Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-07T14:12:54.058886Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.058907Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.058952Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-04-07T14:12:54.059879Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-07T14:12:54.059914Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-07T14:12:54.059940Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.060237Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.060697Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.069156Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-07T14:12:54.069483Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.069804Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (empty maybe) 2025-04-07T14:12:54.072598Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-04-07T14:12:54.072847Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.072895Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:12:54.072916Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T14:12:54.072930Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-07T14:12:54.072948Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-07T14:12:54.072961Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-07T14:12:54.072975Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-04-07T14:12:54.072988Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-04-07T14:12:54.073129Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-04-07T14:12:54.073158Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-04-07T14:12:54.073174Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-04-07T14:12:54.073188Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-04-07T14:12:54.073212Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-04-07T14:12:54.073229Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-04-07T14:12:54.073246Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-04-07T14:12:54.073261Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-04-07T14:12:54.073351Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-04-07T14:12:54.073368Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-04-07T14:12:54.073386Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-04-07T14:12:54.073399Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-04-07T14:12:54.073412Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-04-07T14:12:54.073425Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-04-07T14:12:54.073439Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-04-07T14:12:54.073453Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-04-07T14:12:54.073466Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-04-07T14:12:54.073491Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-04-07T14:12:54.073507Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-04-07T14:12:54.073521Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-04-07T14:12:54.073533Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-04-07T14:12:54.073545Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-04-07T14:12:54.073558Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-04-07T14:12:54.073571Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-04-07T14:12:54.073617Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-04-07T14:12:54.073638Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-04-07T14:12:54.073656Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-04-07T14:12:54.073669Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-04-07T14:12:54.073684Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-04-07T14:12:54.073696Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-04-07T14:12:54.073709Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-04-07T14:12:54.073722Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-04-07T14:12:54.073733Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-04-07T14:12:54.073755Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-04-07T14:12:54.073770Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-04-07T14:12:54.073785Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-04-07T14:12:54.073798Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-04-07T14:12:54.073811Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-04-07T14:12:54.073823Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-04-07T14:12:54.073836Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-04-07T14:12:54.073849Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-04-07T14:12:54.073861Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-04-07T14:12:54.073911Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-07T14:12:54.075656Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-04-07T14:12:54.075827Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-04-07T14:12:54.075857Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-04-07T14:12:54.075872Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-04-07T14:12:54.075890Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-04-07T14:12:54.075906Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-04-07T14:12:54.075931Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-04-07T14:12:54.075957Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-04-07T14:12:54.075972Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-04-07T14:12:54.075988Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-04-07T14:12:54.076001Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-04-07T14:12:54.076017Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-04-07T14:12:54.076030Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-04-07T14:12:54.076049Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-04-07T14:12:54.076066Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-04-07T14:12:54.076079Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-04-07T14:12:54.076095Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-04-07T14:12:54.076124Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-04-07T14:12:54.076137Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-04-07T14:12:54.076149Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-04-07T14:12:54.076160Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-04-07T14:12:54.076172Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-04-07T14:12:54.076183Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-04-07T14:12:54.076204Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-04-07T14:12:54.076219Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-04-07T14:12:54.076231Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-04-07T14:12:54.076242Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-04-07T14:12:54.076261Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-04-07T14:12:54.076274Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-04-07T14:12:54.076285Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-04-07T14:12:54.076297Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-04-07T14:12:54.076310Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-04-07T14:12:54.076321Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-04-07T14:12:54.076350Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-04-07T14:12:54.076366Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-04-07T14:12:54.076378Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-04-07T14:12:54.076389Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-04-07T14:12:54.076403Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-04-07T14:12:54.076415Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-04-07T14:12:54.076436Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-04-07T14:12:54.076451Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-04-07T14:12:54.076463Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-04-07T14:12:54.076475Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-04-07T14:12:54.076489Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-04-07T14:12:54.076502Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-04-07T14:12:54.076514Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-04-07T14:12:54.076526Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-04-07T14:12:54.076545Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-04-07T14:12:54.076570Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-04-07T14:12:54.076585Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-04-07T14:12:54.076597Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-04-07T14:12:54.076640Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-07T14:12:54.077297Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-07T14:12:54.079036Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.079073Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.079090Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.079421Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.079784Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.079909Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.080233Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.181122Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.181347Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T14:12:54.181408Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.181438Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-07T14:12:54.181498Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-07T14:12:54.381825Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-07T14:12:54.483900Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-07T14:12:54.484022Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T14:12:54.484142Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-07T14:12:54.485260Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.485279Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.485295Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.491522Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.492035Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.492238Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.492953Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.593891Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.596844Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-07T14:12:54.596905Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.596939Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-07T14:12:54.597017Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-07T14:12:54.597119Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-07T14:12:54.597310Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-07T14:12:54.597394Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-07T14:12:54.597518Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> TTopicReaderTests::TestRun_ReadOneMessage >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-04-07T14:12:54.069235Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.069278Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.069297Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.069668Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.070115Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.077617Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.078035Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.079408Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:12:54.079852Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:12:54.079979Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-07T14:12:54.080066Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:12:54.080137Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.080164Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-07T14:12:54.080197Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T14:12:54.080212Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T14:12:54.081326Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.081349Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.081370Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.081605Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.081974Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.082063Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.082260Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2025-04-07T14:12:54.083040Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:12:54.083214Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-07T14:12:54.083458Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-07T14:12:54.083696Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-07T14:12:54.083811Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.083833Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:12:54.083855Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T14:12:54.083971Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-04-07T14:12:54.083995Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T14:12:54.084006Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-07T14:12:54.084016Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:12:54.084086Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-04-07T14:12:54.084136Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-07T14:12:54.084146Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-07T14:12:54.084161Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T14:12:54.084206Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-04-07T14:12:54.084234Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-07T14:12:54.084244Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-07T14:12:54.084254Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:12:54.084317Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-04-07T14:12:54.085303Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.085319Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.085342Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.085666Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.086043Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.086197Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.086355Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 100 Compressed message data size: 91 2025-04-07T14:12:54.087114Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:12:54.087255Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-07T14:12:54.087491Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-07T14:12:54.087687Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-07T14:12:54.087757Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.087782Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T14:12:54.087843Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2025-04-07T14:12:54.087861Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:12:54.087870Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T14:12:54.087905Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2025-04-07T14:12:54.087915Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T14:12:54.087924Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2025-04-07T14:12:54.087967Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2025-04-07T14:12:54.087980Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-07T14:12:54.087989Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataRecei ... uster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:12:55.227181Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2025-04-07T14:12:55.319588Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-07T14:12:55.319617Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-07T14:12:55.319645Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:55.320232Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:55.320727Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:55.320928Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-07T14:12:55.321966Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-04-07T14:12:55.389681Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-04-07T14:12:55.389848Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:55.389879Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:12:55.389892Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T14:12:55.389906Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-07T14:12:55.389919Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-07T14:12:55.389929Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-07T14:12:55.389938Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-04-07T14:12:55.389948Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-04-07T14:12:55.389963Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-04-07T14:12:55.389975Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-04-07T14:12:55.390007Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-04-07T14:12:55.390130Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-07T14:12:55.392025Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2025-04-07T14:12:55.406340Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:55.406377Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:55.406411Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:55.406694Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:55.407234Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:55.407384Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:55.407743Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:55.408159Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-04-07T14:12:55.409326Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:55.409364Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:55.409396Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:55.409754Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:55.410064Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:55.410174Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:55.410645Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:55.410982Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:12:55.411077Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:55.411129Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T14:12:55.411264Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> KqpInplaceUpdate::SingleRowIf [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestDeleteTabletWithFollowers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowIf [GOOD] Test command err: Trying to start YDB, gRPC: 11349, MsgBus: 13149 2025-04-07T14:12:51.960911Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574024957556268:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:51.961258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ee6/r3tmp/tmpQK1NMF/pdisk_1.dat 2025-04-07T14:12:52.241950Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11349, node 1 2025-04-07T14:12:52.332837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:52.332918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:52.334359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:52.379001Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:52.379022Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:52.379038Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:52.379105Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13149 TClient is connected to server localhost:13149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:52.837472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:52.869093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:52.971282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:53.073938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:53.142737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:53.864885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574033547492401:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:53.865006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:54.247667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:54.269377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:54.289894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:54.310895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:54.337302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:54.403973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:54.470120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574037842460228:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:54.470188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:54.470235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574037842460233:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:54.475157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:54.483813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574037842460235:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:55.632265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestFollowers |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] >> TableWriter::Backup [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitBefore [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished >> DataShardReadTableSnapshots::ReadTableSnapshot [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets >> DataShardReadTableSnapshots::ReadTableDropColumn [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber >> KqpOlapScheme::InitTtlSettingsOnShardStart [GOOD] >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowerPromotion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] Test command err: 2025-04-07T14:12:56.355498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:12:56.355833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:12:56.356059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d3c/r3tmp/tmpWjwd6Y/pdisk_1.dat 2025-04-07T14:12:56.691731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.730429Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:56.778750Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:12:56.779806Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:12:56.780035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:56.780148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:56.791563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:56.900692Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T14:12:56.900769Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:12:56.900902Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T14:12:56.995769Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:12:56.996329Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:12:56.996418Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:12:56.996710Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:12:56.996897Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:12:56.996987Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:12:56.997269Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T14:12:56.998578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.999430Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T14:12:56.999504Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T14:12:57.035552Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:12:57.036594Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:12:57.037062Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:12:57.037358Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:57.070001Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:12:57.070603Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:57.070705Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:12:57.071930Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:12:57.071999Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:12:57.072066Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:12:57.072327Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:12:57.120428Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:12:57.120629Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:12:57.120810Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:12:57.120869Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:12:57.120904Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:12:57.120938Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:12:57.121282Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:12:57.121328Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:12:57.121529Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:12:57.121631Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:12:57.122041Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:12:57.122078Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:12:57.122123Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:12:57.122215Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:12:57.122272Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:12:57.122349Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:12:57.122393Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:12:57.122427Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:12:57.122467Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:12:57.122514Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:12:57.122688Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:12:57.122732Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:12:57.122811Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:12:57.123045Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:12:57.123095Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:12:57.123184Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:12:57.123256Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:12:57.123303Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:12:57.123344Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:12:57.123385Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:12:57.123660Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:12:57.123705Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:12:57.123736Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:12:57.123766Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:12:57.123833Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:12:57.123866Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:12:57.123901Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:12:57.123936Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:12:57.123961Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:12:57.125338Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:12:57.125389Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:12:57.136064Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:12:57.136170Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:12:57.136204Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:12:57.136262Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... onse 2025-04-07T14:12:58.853712Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:985:2786], Recipient [1:985:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:12:58.853736Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:12:58.853775Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-07T14:12:58.853802Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:12:58.853860Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037892 for WaitForStreamClearance 2025-04-07T14:12:58.853887Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit WaitForStreamClearance 2025-04-07T14:12:58.853915Z node 1 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715665] at 72075186224037892 2025-04-07T14:12:58.853947Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is Executed 2025-04-07T14:12:58.853970Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037892 executing on unit WaitForStreamClearance 2025-04-07T14:12:58.853992Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037892 to execution unit ReadTableScan 2025-04-07T14:12:58.854015Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit ReadTableScan 2025-04-07T14:12:58.854204Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is Continue 2025-04-07T14:12:58.854241Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:12:58.854269Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037892 2025-04-07T14:12:58.854303Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-04-07T14:12:58.854330Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-04-07T14:12:58.854369Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-07T14:12:58.854811Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [1:1084:2865], Recipient [1:930:2740]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715665 ShardId: 72075186224037892 2025-04-07T14:12:58.854843Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037892 2025-04-07T14:12:58.854872Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037892 2025-04-07T14:12:58.854957Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [1:1084:2865], Recipient [1:985:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-07T14:12:58.854988Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-07T14:12:58.855065Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715665, MessageQuota: 1 2025-04-07T14:12:58.855385Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037892, TxId: 281474976715665, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T14:12:58.855509Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [1:1084:2865], Recipient [1:930:2740]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037892 Status: RESPONSE_DATA TxId: 281474976715665 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2025-04-07T14:12:58.855553Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received stream data from ShardId# 72075186224037892 2025-04-07T14:12:58.855577Z node 1 :TX_PROXY TRACE: [ReadTable [1:930:2740] TxId# 281474976715662] Sending TEvStreamDataAck to [1:1084:2865] ShardId# 72075186224037892 2025-04-07T14:12:58.855697Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [1:1084:2865], Recipient [1:930:2740]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715665 ShardId: 72075186224037892 2025-04-07T14:12:58.855728Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037892 2025-04-07T14:12:58.855769Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037892, TxId: 281474976715665, PendingAcks: 0 2025-04-07T14:12:58.856134Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [1:929:2740], Recipient [1:930:2740]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-07T14:12:58.856162Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-07T14:12:58.856187Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037892 2025-04-07T14:12:58.856260Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715665, MessageQuota: 1 2025-04-07T14:12:58.856312Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037892, TxId: 281474976715665, MessageQuota: 1 2025-04-07T14:12:58.856418Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2025-04-07T14:12:58.856442Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037892 2025-04-07T14:12:58.856563Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [1:1084:2865], Recipient [1:930:2740]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715665 ShardId: 72075186224037892 2025-04-07T14:12:58.856590Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received TEvStreamQuotaRelease from ShardId# 72075186224037892 2025-04-07T14:12:58.856613Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Released quota 1 reserved messages from ShardId# 72075186224037892 2025-04-07T14:12:58.860988Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:985:2786], Recipient [1:985:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:12:58.861049Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:12:58.861110Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-07T14:12:58.861141Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:12:58.861178Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037892 for ReadTableScan 2025-04-07T14:12:58.861216Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit ReadTableScan 2025-04-07T14:12:58.861266Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715665] at 72075186224037892 error: , IsFatalError: 0 2025-04-07T14:12:58.861306Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is Executed 2025-04-07T14:12:58.861344Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037892 executing on unit ReadTableScan 2025-04-07T14:12:58.861373Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037892 to execution unit FinishPropose 2025-04-07T14:12:58.861395Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit FinishPropose 2025-04-07T14:12:58.861428Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is DelayComplete 2025-04-07T14:12:58.861450Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037892 executing on unit FinishPropose 2025-04-07T14:12:58.861473Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037892 to execution unit CompletedOperations 2025-04-07T14:12:58.861496Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit CompletedOperations 2025-04-07T14:12:58.861555Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is Executed 2025-04-07T14:12:58.861577Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037892 executing on unit CompletedOperations 2025-04-07T14:12:58.861599Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037892 has finished 2025-04-07T14:12:58.861624Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:12:58.861663Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037892 2025-04-07T14:12:58.861690Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-04-07T14:12:58.861712Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-04-07T14:12:58.861772Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-07T14:12:58.861829Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037892 on unit FinishPropose 2025-04-07T14:12:58.861878Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037892 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T14:12:58.861955Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-07T14:12:58.862218Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [1:985:2786], Recipient [1:930:2740]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037892 Status: COMPLETE TxId: 281474976715665 Step: 0 OrderId: 281474976715665 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037892 CpuTimeUsec: 353 } } 2025-04-07T14:12:58.862253Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received stream complete from ShardId# 72075186224037892 2025-04-07T14:12:58.862317Z node 1 :TX_PROXY INFO: [ReadTable [1:930:2740] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.015451s execute time: 0.517050s total time: 0.532501s 2025-04-07T14:12:58.862795Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [1:930:2740], Recipient [1:845:2676]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2025-04-07T14:12:58.863105Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [1:930:2740], Recipient [1:980:2784]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2025-04-07T14:12:58.863294Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [1:930:2740], Recipient [1:849:2678]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2025-04-07T14:12:58.863421Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [1:930:2740], Recipient [1:985:2786]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::InitTtlSettingsOnShardStart [GOOD] Test command err: Trying to start YDB, gRPC: 5569, MsgBus: 10656 2025-04-07T14:11:50.611266Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573764013554494:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:50.611571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001441/r3tmp/tmp1eOMPT/pdisk_1.dat 2025-04-07T14:11:50.870313Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5569, node 1 2025-04-07T14:11:50.925652Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:50.925670Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:50.925675Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:50.925740Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:50.943371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:50.943508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:50.945317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10656 TClient is connected to server localhost:10656 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:51.331797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, value Utf8, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:11:52.740704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573772603489566:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:52.740810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:52.920929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:52.964903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573772603489642:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:11:52.965056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573772603489642:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:11:52.965220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573772603489642:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:11:52.965293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573772603489642:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:11:52.965371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573772603489642:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:11:52.965444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573772603489642:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:11:52.965525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573772603489642:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:11:52.965584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573772603489642:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:11:52.965646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573772603489642:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:11:52.965702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573772603489642:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:11:52.965752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573772603489642:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:11:52.965861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573772603489642:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:11:52.968054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:11:52.968116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:11:52.968254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:11:52.968291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:11:52.968476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:11:52.968512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:11:52.968626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:11:52.968701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:11:52.968830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:11:52.968883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:11:52.968931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:11:52.968962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:11:52.969433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:11:52.969490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:11:52.969715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:11:52.969787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:11:52.969955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:11:52.969986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:11:52.970184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:11:52.970212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:11:52.970343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:11:52.970380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14: ... ration type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:12:58.551952Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490574055843519737:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:12:58.552043Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490574055843519737:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:12:58.552366Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490574055843519737:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:12:58.552529Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490574055843519737:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:12:58.552690Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490574055843519737:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:12:58.552855Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490574055843519737:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:12:58.552996Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490574055843519737:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:12:58.553141Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490574055843519737:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:12:58.553290Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490574055843519737:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:12:58.553429Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490574055843519737:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:12:58.553563Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490574055843519737:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:12:58.553707Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490574055843519737:2307];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:12:58.557317Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:12:58.557384Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:12:58.557488Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:12:58.557549Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:12:58.557748Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:12:58.557788Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:12:58.557883Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:12:58.557922Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:12:58.557989Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:12:58.558025Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:12:58.558069Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:12:58.558107Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:12:58.558505Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:12:58.558556Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:12:58.558829Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:12:58.558877Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:12:58.559065Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:12:58.559108Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:12:58.559405Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:12:58.559450Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:12:58.559634Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:12:58.559674Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:12:58.604199Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574055843519807:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:58.604267Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:58.605527Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:12:58.627159Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574055843519834:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:58.627240Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:58.642352Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:12:58.663321Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574055843519866:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:58.663402Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:58.675316Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:58.693766Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574055843519898:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:58.693847Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:58.705911Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:58.825582Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490574055843519936:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-07T14:12:58.839202Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; 2025-04-07T14:12:58.839867Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7490574055843519936:2336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:437;event=StartTtl;skip=not_ready_tiers; >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowersCrossDC_Easy |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |95.3%| [TA] $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:12:40.990387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:12:40.990493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:40.990537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:12:40.990589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:12:40.990892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:12:40.990928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:12:40.991005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:12:40.991974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:41.060094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:12:41.060227Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:41.071525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:41.071668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:12:41.071826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:12:41.079302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:12:41.080965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:12:41.082736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:41.083028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:12:41.087653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:41.095757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:41.095832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:41.096052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:12:41.096105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:41.096222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:12:41.096368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.102594Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:12:41.202013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:12:41.202885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.203754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:12:41.205168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:12:41.205246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.207899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:41.208737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:12:41.208893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.208954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:12:41.209016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:12:41.209056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:12:41.210862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.210921Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:12:41.210953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:12:41.212654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.212725Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.212814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:41.212861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:12:41.216606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:41.218426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:12:41.219459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:12:41.221144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:12:41.221260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:12:41.221297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:41.221559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:12:41.221601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:12:41.221745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:41.221825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:12:41.223669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:12:41.223704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:12:41.223858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:12:41.223900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:12:41.224105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:12:41.224145Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:12:41.224221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:12:41.224250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:41.224285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:12:41.224318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:12:41.224350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:12:41.224372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:12:41.224438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:12:41.224474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:12:41.224500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:12:41.226108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:41.226240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:12:41.226272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:12:41.226312Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:12:41.226355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:12:41.226455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... : Table profiles were not loaded 2025-04-07T14:13:00.632872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:00.633616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-04-07T14:13:00.633711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:13:00.633821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.633920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.634293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:00.634429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-07T14:13:00.634684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-04-07T14:13:00.634849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.634948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:00.634995Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-07T14:13:00.635038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:13:00.635120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:00.635198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.635344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-07T14:13:00.635547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.635619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.635847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.635894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.636068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.636152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.636252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.636490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.636611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.636799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.637037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.637177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.637240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.637296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:13:00.637560Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-07T14:13:00.645174Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:13:00.646985Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:1741:3668], Recipient [1:1741:3668]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-07T14:13:00.647041Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-07T14:13:00.648229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:00.648293Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:00.648531Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1741:3668], Recipient [1:1741:3668]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:13:00.648569Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:13:00.648811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:00.648876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:00.648917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:00.648951Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-07T14:13:00.649376Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1778:3668], Recipient [1:1741:3668]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T14:13:00.649414Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-07T14:13:00.649447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1741:3668] sender: [1:1796:2058] recipient: [1:15:2062] 2025-04-07T14:13:00.687376Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1795:3712], Recipient [1:1741:3668]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-07T14:13:00.687448Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-07T14:13:00.687578Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:13:00.687845Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 288us result status StatusSuccess 2025-04-07T14:13:00.688559Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 25856 RowCount: 200 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 13389 Memory: 156496 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 25856 DataSize: 25856 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowersCrossDC_Tight >> TSubDomainTest::Boot >> TSubDomainTest::LsLs >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain >> TSubDomainTest::CreateDummyTabletsInDifferentDomains >> TSubDomainTest::CreateTabletForUnknownDomain >> TSubDomainTest::CreateTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] Test command err: 2025-04-07T14:12:56.378417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:12:56.378749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:12:56.378982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000dc7/r3tmp/tmpn0Olzs/pdisk_1.dat 2025-04-07T14:12:56.733970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.780318Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:56.832141Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:12:56.832806Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:12:56.832966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:56.833096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:56.844373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:56.953549Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T14:12:56.953614Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:12:56.953772Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T14:12:57.041652Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:12:57.042200Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:12:57.042293Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:12:57.042579Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:12:57.042752Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:12:57.042821Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:12:57.043029Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T14:12:57.044167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.044989Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T14:12:57.045066Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T14:12:57.083081Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:12:57.084026Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:12:57.084444Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:12:57.084678Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:57.116147Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:12:57.116742Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:57.116845Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:12:57.118182Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:12:57.118286Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:12:57.118342Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:12:57.118616Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:12:57.146091Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:12:57.146284Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:12:57.146435Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:12:57.146480Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:12:57.146513Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:12:57.146545Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:12:57.146837Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:12:57.146886Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:12:57.147074Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:12:57.147148Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:12:57.147454Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:12:57.147488Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:12:57.147551Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:12:57.147606Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:12:57.147648Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:12:57.147701Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:12:57.147731Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:12:57.147751Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:12:57.147773Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:12:57.147804Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:12:57.147933Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:12:57.147965Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:12:57.148033Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:12:57.148250Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:12:57.148290Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:12:57.148351Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:12:57.148405Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:12:57.148441Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:12:57.148473Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:12:57.148496Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:12:57.148695Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:12:57.148729Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:12:57.148754Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:12:57.148781Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:12:57.148826Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:12:57.148847Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:12:57.148871Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:12:57.148893Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:12:57.148911Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:12:57.149887Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:12:57.149965Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:12:57.160566Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:12:57.160635Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:12:57.160684Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:12:57.160732Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... 281474976715659] at 72075186224037888 is Executed 2025-04-07T14:13:03.103460Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit MakeScanSnapshot 2025-04-07T14:13:03.103486Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit WaitForStreamClearance 2025-04-07T14:13:03.103506Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-04-07T14:13:03.103574Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:709:2592] for [0:281474976715659] at 72075186224037888 2025-04-07T14:13:03.103606Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-04-07T14:13:03.103657Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:13:03.103823Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287427, Sender [2:631:2537], Recipient [2:709:2592]: NKikimrTx.TEvStreamClearanceRequest TxId: 281474976715659 ShardId: 72075186224037888 KeyRange { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } 2025-04-07T14:13:03.103866Z node 2 :TX_PROXY DEBUG: [ReadTable [2:709:2592] TxId# 281474976715658] Received TEvStreamClearanceRequest from ShardId# 72075186224037888 2025-04-07T14:13:03.103922Z node 2 :TX_PROXY DEBUG: [ReadTable [2:709:2592] TxId# 281474976715658] Sending TEvStreamClearanceResponse to [2:631:2537] ShardId# 72075186224037888 2025-04-07T14:13:03.104078Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:709:2592], Recipient [2:631:2537]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715659 2025-04-07T14:13:03.104130Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-07T14:13:03.104338Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:709:2592], Recipient [2:631:2537]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715659 Cleared: true 2025-04-07T14:13:03.104371Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-07T14:13:03.104443Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:631:2537], Recipient [2:631:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:03.104472Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:03.104515Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:13:03.104548Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:13:03.104586Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-04-07T14:13:03.104622Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-04-07T14:13:03.104696Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715659] at 72075186224037888 2025-04-07T14:13:03.104759Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-07T14:13:03.104794Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-07T14:13:03.104821Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit ReadTableScan 2025-04-07T14:13:03.104853Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-04-07T14:13:03.105044Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-04-07T14:13:03.105071Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:13:03.105096Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:13:03.105122Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:13:03.105164Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:13:03.105218Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:13:03.105624Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:743:2613], Recipient [2:631:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-07T14:13:03.105662Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-07T14:13:03.105755Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:743:2613], Recipient [2:709:2592]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715659 ShardId: 72075186224037888 2025-04-07T14:13:03.105790Z node 2 :TX_PROXY DEBUG: [ReadTable [2:709:2592] TxId# 281474976715658] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-07T14:13:03.106145Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:708:2592], Recipient [2:709:2592]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715658 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-07T14:13:03.106200Z node 2 :TX_PROXY DEBUG: [ReadTable [2:709:2592] TxId# 281474976715658] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-07T14:13:03.106238Z node 2 :TX_PROXY DEBUG: [ReadTable [2:709:2592] TxId# 281474976715658] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-07T14:13:03.106306Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-04-07T14:13:03.106424Z node 2 :TX_DATASHARD ERROR: Got scan fatal error: Invalid DyNumber binary representation 2025-04-07T14:13:03.106468Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-04-07T14:13:03.106695Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T14:13:03.106734Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-04-07T14:13:03.106843Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:743:2613], Recipient [2:709:2592]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715659 ShardId: 72075186224037888 2025-04-07T14:13:03.106877Z node 2 :TX_PROXY DEBUG: [ReadTable [2:709:2592] TxId# 281474976715658] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-04-07T14:13:03.106914Z node 2 :TX_PROXY DEBUG: [ReadTable [2:709:2592] TxId# 281474976715658] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-04-07T14:13:03.107091Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:631:2537], Recipient [2:631:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:03.107145Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:03.107198Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:13:03.107245Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:13:03.107291Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-04-07T14:13:03.107331Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-04-07T14:13:03.107374Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715659] at 72075186224037888 error: Invalid DyNumber binary representation, IsFatalError: 1 2025-04-07T14:13:03.107436Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-07T14:13:03.107499Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit ReadTableScan 2025-04-07T14:13:03.107553Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:13:03.107592Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-04-07T14:13:03.107630Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is DelayComplete 2025-04-07T14:13:03.107663Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:13:03.107702Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:13:03.107735Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:13:03.107789Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-07T14:13:03.107827Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:13:03.107859Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715659] at 72075186224037888 has finished 2025-04-07T14:13:03.107900Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:13:03.107935Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:13:03.107975Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:13:03.108010Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:13:03.108078Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:13:03.108115Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-04-07T14:13:03.108160Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715659 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: EXEC_ERROR 2025-04-07T14:13:03.108208Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715659 at tablet 72075186224037888 status: EXEC_ERROR errors: PROGRAM_ERROR (Invalid DyNumber binary representation) | 2025-04-07T14:13:03.108287Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:13:03.108584Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:631:2537], Recipient [2:709:2592]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037888 Status: EXEC_ERROR Error { Kind: PROGRAM_ERROR Reason: "Invalid DyNumber binary representation" } TxId: 281474976715659 Step: 0 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 479 } } 2025-04-07T14:13:03.108634Z node 2 :TX_PROXY DEBUG: [ReadTable [2:709:2592] TxId# 281474976715658] Received TEvProposeTransactionResult Status# EXEC_ERROR ShardId# 72075186224037888 2025-04-07T14:13:03.108725Z node 2 :TX_PROXY ERROR: [ReadTable [2:709:2592] TxId# 281474976715658] RESPONSE Status# ExecError shard: 72075186224037888 table: /Root/Table 2025-04-07T14:13:03.109095Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:709:2592], Recipient [2:631:2537]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1500 TxId: 281474976715658 >> TSubDomainTest::FailIfAffectedSetNotInterior >> TSubDomainTest::UserAttributes >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] Test command err: 2025-04-07T14:12:56.223002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:12:56.223418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:12:56.223696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000de9/r3tmp/tmpC8HndR/pdisk_1.dat 2025-04-07T14:12:56.627055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.685582Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:56.734672Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:12:56.736765Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:12:56.736974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:56.737114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:56.748548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:56.861812Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T14:12:56.861892Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:12:56.863076Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T14:12:56.968571Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:12:56.969205Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:12:56.969339Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:12:56.969613Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:12:56.969767Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:12:56.969933Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:12:56.970244Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T14:12:56.972778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.973694Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T14:12:56.973762Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T14:12:57.014129Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:12:57.015197Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:12:57.016713Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:12:57.016986Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:57.063364Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:12:57.064050Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:57.064185Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:12:57.065683Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:12:57.065758Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:12:57.065812Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:12:57.066178Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:12:57.094476Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:12:57.094647Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:12:57.094776Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:12:57.094818Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:12:57.094852Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:12:57.094893Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:12:57.095197Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:12:57.095265Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:12:57.096226Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:12:57.096343Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:12:57.097773Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:12:57.097821Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:12:57.097861Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:12:57.097923Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:12:57.098006Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:12:57.098088Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:12:57.098125Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:12:57.098155Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:12:57.098185Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:12:57.098235Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:12:57.098424Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:12:57.098470Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:12:57.098557Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:12:57.098864Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:12:57.098928Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:12:57.099005Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:12:57.099071Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:12:57.099115Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:12:57.099149Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:12:57.099179Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:12:57.099449Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:12:57.099484Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:12:57.099526Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:12:57.099558Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:12:57.099619Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:12:57.099654Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:12:57.099687Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:12:57.099716Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:12:57.099738Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:12:57.101083Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:12:57.101185Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:12:57.111882Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:12:57.111986Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:12:57.112021Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:12:57.112064Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... 86224037890 has no attached operations 2025-04-07T14:13:03.453421Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-07T14:13:03.453465Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:13:03.453866Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:957:2764], Recipient [2:824:2660]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-07T14:13:03.453896Z node 2 :TX_PROXY DEBUG: [ReadTable [2:824:2660] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-07T14:13:03.453954Z node 2 :TX_PROXY DEBUG: [ReadTable [2:824:2660] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-07T14:13:03.454038Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:957:2764], Recipient [2:864:2691]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-07T14:13:03.454066Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-07T14:13:03.454137Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-07T14:13:03.454435Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T14:13:03.454576Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:957:2764], Recipient [2:824:2660]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\003\000\000\000b\005\035!\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\003\000\000\000" 2025-04-07T14:13:03.454608Z node 2 :TX_PROXY DEBUG: [ReadTable [2:824:2660] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-04-07T14:13:03.454634Z node 2 :TX_PROXY TRACE: [ReadTable [2:824:2660] TxId# 281474976715661] Sending TEvStreamDataAck to [2:957:2764] ShardId# 72075186224037890 2025-04-07T14:13:03.454716Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-04-07T14:13:03.454777Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:957:2764], Recipient [2:824:2660]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-07T14:13:03.454801Z node 2 :TX_PROXY DEBUG: [ReadTable [2:824:2660] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-07T14:13:03.455146Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:823:2660], Recipient [2:824:2660]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-07T14:13:03.455176Z node 2 :TX_PROXY DEBUG: [ReadTable [2:824:2660] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-07T14:13:03.455202Z node 2 :TX_PROXY DEBUG: [ReadTable [2:824:2660] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-07T14:13:03.455241Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-07T14:13:03.455316Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T14:13:03.455431Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:957:2764], Recipient [2:824:2660]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-04-07T14:13:03.455455Z node 2 :TX_PROXY DEBUG: [ReadTable [2:824:2660] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-04-07T14:13:03.455476Z node 2 :TX_PROXY TRACE: [ReadTable [2:824:2660] TxId# 281474976715661] Sending TEvStreamDataAck to [2:957:2764] ShardId# 72075186224037890 2025-04-07T14:13:03.455562Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-04-07T14:13:03.455647Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:957:2764], Recipient [2:824:2660]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-07T14:13:03.455680Z node 2 :TX_PROXY DEBUG: [ReadTable [2:824:2660] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-07T14:13:03.455958Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:823:2660], Recipient [2:824:2660]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-07T14:13:03.455997Z node 2 :TX_PROXY DEBUG: [ReadTable [2:824:2660] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-07T14:13:03.456020Z node 2 :TX_PROXY DEBUG: [ReadTable [2:824:2660] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-07T14:13:03.456058Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-07T14:13:03.456103Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-07T14:13:03.456249Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:957:2764], Recipient [2:824:2660]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715662 ShardId: 72075186224037890 2025-04-07T14:13:03.456276Z node 2 :TX_PROXY DEBUG: [ReadTable [2:824:2660] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037890 2025-04-07T14:13:03.456300Z node 2 :TX_PROXY DEBUG: [ReadTable [2:824:2660] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037890 2025-04-07T14:13:03.456343Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-07T14:13:03.456368Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037890 2025-04-07T14:13:03.456527Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:864:2691], Recipient [2:864:2691]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:03.456555Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:03.456591Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T14:13:03.456618Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:13:03.456644Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037890 for ReadTableScan 2025-04-07T14:13:03.456686Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit ReadTableScan 2025-04-07T14:13:03.456717Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715662] at 72075186224037890 error: , IsFatalError: 0 2025-04-07T14:13:03.456748Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-04-07T14:13:03.456774Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit ReadTableScan 2025-04-07T14:13:03.456796Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit FinishPropose 2025-04-07T14:13:03.456819Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-04-07T14:13:03.456871Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is DelayComplete 2025-04-07T14:13:03.456898Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit FinishPropose 2025-04-07T14:13:03.456923Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit CompletedOperations 2025-04-07T14:13:03.456947Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit CompletedOperations 2025-04-07T14:13:03.456992Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-04-07T14:13:03.457012Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit CompletedOperations 2025-04-07T14:13:03.457030Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037890 has finished 2025-04-07T14:13:03.457053Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:13:03.457077Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-07T14:13:03.457116Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-07T14:13:03.457146Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-07T14:13:03.457193Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:13:03.457221Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-04-07T14:13:03.457254Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T14:13:03.457320Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T14:13:03.457531Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:864:2691], Recipient [2:824:2660]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715662 Step: 0 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 301 } } 2025-04-07T14:13:03.457563Z node 2 :TX_PROXY DEBUG: [ReadTable [2:824:2660] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037890 2025-04-07T14:13:03.457623Z node 2 :TX_PROXY INFO: [ReadTable [2:824:2660] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.013684s execute time: 0.312996s total time: 0.326680s 2025-04-07T14:13:03.458008Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:824:2660], Recipient [2:631:2537]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-07T14:13:03.458195Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:824:2660], Recipient [2:861:2689]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-07T14:13:03.458431Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:824:2660], Recipient [2:864:2691]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] Test command err: 2025-04-07T14:12:56.224308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:12:56.224576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:12:56.224762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e95/r3tmp/tmpxZppqz/pdisk_1.dat 2025-04-07T14:12:56.627055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.684024Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:56.737174Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:12:56.737959Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:12:56.738124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:56.738252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:56.749451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:56.861822Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T14:12:56.861883Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:12:56.863033Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T14:12:56.975089Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:12:56.975659Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:12:56.975753Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:12:56.976057Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:12:56.976219Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:12:56.976334Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:12:56.976618Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T14:12:56.977942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.978733Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T14:12:56.978800Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T14:12:57.019726Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:12:57.020939Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:12:57.021327Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:12:57.021597Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:57.062792Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:12:57.063426Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:57.063538Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:12:57.065020Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:12:57.065107Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:12:57.065171Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:12:57.065502Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:12:57.093091Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:12:57.094381Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:12:57.094549Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:12:57.094589Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:12:57.094622Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:12:57.094670Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:12:57.095006Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:12:57.095047Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:12:57.096231Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:12:57.096350Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:12:57.097766Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:12:57.097810Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:12:57.097852Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:12:57.097927Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:12:57.097987Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:12:57.098132Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:12:57.098170Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:12:57.098215Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:12:57.098250Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:12:57.098291Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:12:57.098461Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:12:57.098503Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:12:57.098580Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:12:57.098827Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:12:57.098870Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:12:57.098963Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:12:57.099104Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:12:57.099145Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:12:57.099179Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:12:57.099221Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:12:57.099473Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:12:57.099505Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:12:57.099549Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:12:57.099576Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:12:57.099639Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:12:57.099665Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:12:57.099695Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:12:57.099724Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:12:57.099748Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:12:57.101054Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:12:57.101153Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:12:57.111882Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:12:57.111992Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:12:57.112022Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:12:57.112068Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... 186224037896 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:13:03.652858Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037896 2025-04-07T14:13:03.652891Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-04-07T14:13:03.652919Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2025-04-07T14:13:03.652951Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-07T14:13:03.653275Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1309:3037], Recipient [2:1036:2820]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-04-07T14:13:03.653312Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1036:2820] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-04-07T14:13:03.653340Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1036:2820] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-04-07T14:13:03.653432Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1309:3037], Recipient [2:1211:2959]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-07T14:13:03.653452Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-07T14:13:03.653504Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-07T14:13:03.653770Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037896, TxId: 281474976715664, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T14:13:03.653876Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1309:3037], Recipient [2:1036:2820]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: RESPONSE_DATA TxId: 281474976715664 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2025-04-07T14:13:03.653901Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1036:2820] TxId# 281474976715663] Received stream data from ShardId# 72075186224037896 2025-04-07T14:13:03.653921Z node 2 :TX_PROXY TRACE: [ReadTable [2:1036:2820] TxId# 281474976715663] Sending TEvStreamDataAck to [2:1309:3037] ShardId# 72075186224037896 2025-04-07T14:13:03.653982Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037896, TxId: 281474976715664, PendingAcks: 0 2025-04-07T14:13:03.654049Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1309:3037], Recipient [2:1036:2820]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-04-07T14:13:03.654075Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1036:2820] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-04-07T14:13:03.654427Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:1035:2820], Recipient [2:1036:2820]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715663 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-07T14:13:03.654476Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1036:2820] TxId# 281474976715663] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-07T14:13:03.654526Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1036:2820] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-04-07T14:13:03.654650Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-07T14:13:03.654737Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-07T14:13:03.654873Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:1309:3037], Recipient [2:1036:2820]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715664 ShardId: 72075186224037896 2025-04-07T14:13:03.654894Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1036:2820] TxId# 281474976715663] Received TEvStreamQuotaRelease from ShardId# 72075186224037896 2025-04-07T14:13:03.654912Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1036:2820] TxId# 281474976715663] Released quota 1 reserved messages from ShardId# 72075186224037896 2025-04-07T14:13:03.654957Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037896 2025-04-07T14:13:03.654975Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715664, at: 72075186224037896 2025-04-07T14:13:03.655053Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1211:2959], Recipient [2:1211:2959]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:03.655080Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:03.655114Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-04-07T14:13:03.655135Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:13:03.655164Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037896 for ReadTableScan 2025-04-07T14:13:03.655182Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit ReadTableScan 2025-04-07T14:13:03.655212Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715664] at 72075186224037896 error: , IsFatalError: 0 2025-04-07T14:13:03.655255Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-04-07T14:13:03.655281Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit ReadTableScan 2025-04-07T14:13:03.655303Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit FinishPropose 2025-04-07T14:13:03.655325Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-04-07T14:13:03.655353Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is DelayComplete 2025-04-07T14:13:03.655383Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit FinishPropose 2025-04-07T14:13:03.655407Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit CompletedOperations 2025-04-07T14:13:03.655425Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit CompletedOperations 2025-04-07T14:13:03.655452Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-04-07T14:13:03.655468Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit CompletedOperations 2025-04-07T14:13:03.655484Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037896 has finished 2025-04-07T14:13:03.655504Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:13:03.655518Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037896 2025-04-07T14:13:03.655550Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-04-07T14:13:03.655570Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2025-04-07T14:13:03.655617Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-07T14:13:03.655645Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-04-07T14:13:03.655672Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037896 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T14:13:03.655718Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-07T14:13:03.655901Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1211:2959], Recipient [2:1036:2820]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715664 Step: 0 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 262 } } 2025-04-07T14:13:03.655932Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1036:2820] TxId# 281474976715663] Received stream complete from ShardId# 72075186224037896 2025-04-07T14:13:03.655984Z node 2 :TX_PROXY INFO: [ReadTable [2:1036:2820] TxId# 281474976715663] RESPONSE Status# ExecComplete prepare time: 0.018762s execute time: 0.591226s total time: 0.609988s 2025-04-07T14:13:03.656301Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1036:2820], Recipient [2:845:2676]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-07T14:13:03.656475Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1036:2820], Recipient [2:951:2756]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-07T14:13:03.656770Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1036:2820], Recipient [2:955:2758]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-07T14:13:03.657067Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1036:2820], Recipient [2:1206:2957]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-07T14:13:03.657255Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1036:2820], Recipient [2:1211:2959]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-07T14:13:03.657456Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1312:3040], Recipient [2:1103:2878]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:03.657485Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:03.657515Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1310:3038], serverId# [2:1312:3040], sessionId# [0:0:0] 2025-04-07T14:13:03.657576Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1036:2820], Recipient [2:1103:2878]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-07T14:13:03.657726Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1313:3041], Recipient [2:1104:2879]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:03.657757Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:03.657786Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1311:3039], serverId# [2:1313:3041], sessionId# [0:0:0] 2025-04-07T14:13:03.657933Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1036:2820], Recipient [2:1104:2879]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] Test command err: 2025-04-07T14:12:56.249102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:12:56.249407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:12:56.249637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d8a/r3tmp/tmpucKlzt/pdisk_1.dat 2025-04-07T14:12:56.627322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.687501Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:56.735112Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:12:56.736744Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:12:56.736976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:56.737119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:56.748505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:56.861801Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T14:12:56.861861Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:12:56.863049Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T14:12:56.980105Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:12:56.980632Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:12:56.980748Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:12:56.981073Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:12:56.981250Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:12:56.981351Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:12:56.981612Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T14:12:56.982868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.983732Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T14:12:56.983801Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T14:12:57.014165Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:12:57.015182Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:12:57.016681Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:12:57.016959Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:12:57.057071Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:12:57.057648Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:12:57.057736Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:12:57.058986Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:12:57.059076Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:12:57.059132Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:12:57.060014Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:12:57.108418Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:12:57.108624Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:12:57.108789Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:12:57.108828Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:12:57.108861Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:12:57.108897Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:12:57.109244Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:12:57.109290Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:12:57.109475Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:12:57.109593Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:12:57.109994Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:12:57.110038Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:12:57.110096Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:12:57.110158Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:12:57.110209Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:12:57.110274Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:12:57.110308Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:12:57.110337Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:12:57.110364Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:12:57.110404Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:12:57.110579Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:12:57.110626Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:12:57.110711Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:12:57.110936Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:12:57.110983Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:12:57.111067Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:12:57.111137Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:12:57.111202Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:12:57.111240Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:12:57.111270Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:12:57.111537Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:12:57.111572Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:12:57.111604Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:12:57.111633Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:12:57.111700Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:12:57.111728Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:12:57.111763Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:12:57.111789Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:12:57.111832Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:12:57.113147Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:12:57.113248Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:12:57.123927Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:12:57.124006Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:12:57.124035Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:12:57.124087Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... 1 active planned 0 immediate 1 planned 0 2025-04-07T14:13:03.731505Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for WaitForStreamClearance 2025-04-07T14:13:03.731531Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit WaitForStreamClearance 2025-04-07T14:13:03.731577Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715663] at 72075186224037890 2025-04-07T14:13:03.731615Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-07T14:13:03.731643Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit WaitForStreamClearance 2025-04-07T14:13:03.731670Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit ReadTableScan 2025-04-07T14:13:03.731696Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-04-07T14:13:03.731860Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Continue 2025-04-07T14:13:03.731886Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:13:03.731913Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-07T14:13:03.731948Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-07T14:13:03.731984Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-07T14:13:03.732033Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:13:03.732408Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:960:2768], Recipient [2:849:2678]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-07T14:13:03.732437Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-07T14:13:03.732522Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:960:2768], Recipient [2:930:2740]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-04-07T14:13:03.732549Z node 2 :TX_PROXY DEBUG: [ReadTable [2:930:2740] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-07T14:13:03.732586Z node 2 :TX_PROXY DEBUG: [ReadTable [2:930:2740] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 2 rows at [2:960:2768] 2025-04-07T14:13:03.732699Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-04-07T14:13:03.732923Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T14:13:03.733060Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:960:2768], Recipient [2:930:2740]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-04-07T14:13:03.733087Z node 2 :TX_PROXY DEBUG: [ReadTable [2:930:2740] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-04-07T14:13:03.733112Z node 2 :TX_PROXY TRACE: [ReadTable [2:930:2740] TxId# 281474976715662] Sending TEvStreamDataAck to [2:960:2768] ShardId# 72075186224037890 2025-04-07T14:13:03.733158Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-04-07T14:13:03.733238Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:960:2768], Recipient [2:930:2740]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-04-07T14:13:03.733265Z node 2 :TX_PROXY DEBUG: [ReadTable [2:930:2740] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-07T14:13:03.733512Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:929:2740], Recipient [2:930:2740]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-07T14:13:03.733545Z node 2 :TX_PROXY DEBUG: [ReadTable [2:930:2740] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-07T14:13:03.733572Z node 2 :TX_PROXY DEBUG: [ReadTable [2:930:2740] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 1 rows at [2:960:2768] 2025-04-07T14:13:03.733615Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-04-07T14:13:03.733672Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T14:13:03.733794Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:960:2768], Recipient [2:930:2740]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\005\000\000\000b\005\0357\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\005\000\000\000" 2025-04-07T14:13:03.733821Z node 2 :TX_PROXY DEBUG: [ReadTable [2:930:2740] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-04-07T14:13:03.733844Z node 2 :TX_PROXY TRACE: [ReadTable [2:930:2740] TxId# 281474976715662] Sending TEvStreamDataAck to [2:960:2768] ShardId# 72075186224037890 2025-04-07T14:13:03.733904Z node 2 :TX_PROXY INFO: [ReadTable [2:930:2740] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.015564s execute time: 0.209301s total time: 0.224865s 2025-04-07T14:13:03.734063Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-04-07T14:13:03.734114Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 0 2025-04-07T14:13:03.734286Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-07T14:13:03.734310Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715663, at: 72075186224037890 2025-04-07T14:13:03.734527Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:930:2740], Recipient [2:845:2676]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2025-04-07T14:13:03.734816Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:849:2678], Recipient [2:849:2678]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:03.734838Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:03.734865Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T14:13:03.734885Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:13:03.734908Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for ReadTableScan 2025-04-07T14:13:03.734933Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-04-07T14:13:03.734964Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715663] at 72075186224037890 error: , IsFatalError: 0 2025-04-07T14:13:03.734998Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-07T14:13:03.735030Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit ReadTableScan 2025-04-07T14:13:03.735064Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit FinishPropose 2025-04-07T14:13:03.735090Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-04-07T14:13:03.735125Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is DelayComplete 2025-04-07T14:13:03.735150Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit FinishPropose 2025-04-07T14:13:03.735178Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2025-04-07T14:13:03.735202Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit CompletedOperations 2025-04-07T14:13:03.735239Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-07T14:13:03.735260Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2025-04-07T14:13:03.735280Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037890 has finished 2025-04-07T14:13:03.735308Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:13:03.735333Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-07T14:13:03.735361Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-07T14:13:03.735388Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-07T14:13:03.735431Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:13:03.735467Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-04-07T14:13:03.735512Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T14:13:03.735588Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T14:13:03.735778Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [2:930:2740], Recipient [2:849:2678]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715663 2025-04-07T14:13:03.735815Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-04-07T14:13:03.735845Z node 2 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037890 txId 281474976715663 2025-04-07T14:13:03.735884Z node 2 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037890 txId 281474976715663 2025-04-07T14:13:03.736029Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [2:930:2740], Recipient [2:849:2678]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715663 2025-04-07T14:13:03.736057Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-04-07T14:13:03.736142Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:930:2740], Recipient [2:849:2678]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 |95.3%| [TA] $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> TSubDomainTest::StartAndStopTenanNode >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP >> TSubDomainTest::Boot [GOOD] >> TSubDomainTest::CheckAccessCopyTable >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive >> TSubDomainTest::UserAttributes [GOOD] >> TSubDomainTest::UserAttributesApplyIf >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> TSubDomainTest::LsLs [GOOD] >> TSubDomainTest::LsAltered >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource >> TExternalDataSourceTest::SchemeErrors >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped >> TTopicReaderTests::TestRun_ReadOneMessage [GOOD] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> KqpScheme::CreateDropTableMultipleTime [GOOD] >> KqpScheme::CreateDropTableViaApiMultipleTime ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2025-04-07T14:12:58.816782Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574055084599492:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:58.823633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:59.053623Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574055642297210:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:59.061188Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.061722Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bd5/r3tmp/tmp4OWYXq/pdisk_1.dat 2025-04-07T14:12:59.117788Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:59.411229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.411309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.415958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.454449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.454513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.458556Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25325, node 1 2025-04-07T14:12:59.470172Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:59.471091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.511921Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:59.512827Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:59.665257Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001bd5/r3tmp/yandexUfDCgz.tmp 2025-04-07T14:12:59.665285Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001bd5/r3tmp/yandexUfDCgz.tmp 2025-04-07T14:12:59.665440Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001bd5/r3tmp/yandexUfDCgz.tmp 2025-04-07T14:12:59.665541Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:59.815474Z INFO: TTestServer started on Port 11087 GrpcPort 25325 TClient is connected to server localhost:11087 PQClient connected to localhost:25325 === TenantModeEnabled() = 1 === Init PQ - start server on port 25325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:00.255422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:13:00.255622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.255842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:13:00.256052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:00.256095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.261578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:00.261737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:00.261897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.261960Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:00.262047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-07T14:13:00.262071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T14:13:00.265399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.265459Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:00.265475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-07T14:13:00.269609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:00.269640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-07T14:13:00.269664Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:00.271252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.271300Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.271326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.271367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.287792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:00.291662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-07T14:13:00.291783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:13:00.296407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035180342, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:00.296580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7490574059379567244 RawX2: 4294969643 } } Step: 1744035180342 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:13:00.296614Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.296903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:00.296944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.297104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:13:00.297164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:13:00.298801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:00.298826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:13:00.298965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:00.299002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490574059379567359:2423], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-07T14:13:00.299048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.299080Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:00.299162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:00.299171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.299196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-07T14:13:00.299214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.299224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:00.299234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-07T14:13:00.299285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:13:00.2993 ... PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-07T14:13:08.276891Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490574097263022077:2339] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-07T14:13:08.276914Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T14:13:08.279808Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-07T14:13:08.279905Z node 3 :PERSQUEUE INFO: new Cookie 12345678|72c893e9-ed12f072-d2d76e3e-71617b4e_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-04-07T14:13:08.280228Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|72c893e9-ed12f072-d2d76e3e-71617b4e_0 2025-04-07T14:13:08.281496Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|72c893e9-ed12f072-d2d76e3e-71617b4e_0 grpc read done: success: 0 data: 2025-04-07T14:13:08.281513Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|72c893e9-ed12f072-d2d76e3e-71617b4e_0 grpc read failed 2025-04-07T14:13:08.281653Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|72c893e9-ed12f072-d2d76e3e-71617b4e_0 2025-04-07T14:13:08.281663Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|72c893e9-ed12f072-d2d76e3e-71617b4e_0 is DEAD 2025-04-07T14:13:08.281871Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison Finish: 0 === InitializeWritePQService done === PersQueueClient === InitializePQ completed BEFORE MODIFY PERMISSIONS 2025-04-07T14:13:08.348117Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_0@builtin \003\n\031\010\001\022\025\032\023test_user_1@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_1@builtin \003\n\031\010\001\022\025\032\023test_user_2@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_2@builtin \003" } } TxId: 281474976720665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:35364" , at schemeshard: 72057594046644480 2025-04-07T14:13:08.348315Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976720665:0, at schemeshard: 72057594046644480 2025-04-07T14:13:08.348487Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-04-07T14:13:08.348502Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-07T14:13:08.348654Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:08.351499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-04-07T14:13:08.351624Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720665:0 progress is 1/1 2025-04-07T14:13:08.351638Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720665 ready parts: 1/1 2025-04-07T14:13:08.351692Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-04-07T14:13:08.351744Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720665, ready parts: 1/1, is published: false 2025-04-07T14:13:08.351763Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-04-07T14:13:08.351775Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720665 ready parts: 1/1 2025-04-07T14:13:08.351784Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720665:0 2025-04-07T14:13:08.351798Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720665, publications: 1, subscribers: 0 2025-04-07T14:13:08.351814Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2025-04-07T14:13:08.361404Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720665, response: Status: StatusSuccess TxId: 281474976720665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:13:08.361684Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, remove access: -():test_user_0@builtin:-, remove access: -():test_user_1@builtin:-, remove access: -():test_user_2@builtin:- 2025-04-07T14:13:08.361846Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:08.361866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-07T14:13:08.362078Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:08.362095Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7490574080083152001:2365], at schemeshard: 72057594046644480, txId: 281474976720665, path id: 10 2025-04-07T14:13:08.363203Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976720665 2025-04-07T14:13:08.363281Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976720665 2025-04-07T14:13:08.363292Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976720665 2025-04-07T14:13:08.363308Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-04-07T14:13:08.363324Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-04-07T14:13:08.363397Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976720665, subscribers: 0 2025-04-07T14:13:08.364941Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720665 2025-04-07T14:13:08.377207Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-07T14:13:08.377239Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-07T14:13:08.377576Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2025-04-07T14:13:08.377646Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:35350 2025-04-07T14:13:08.377661Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:35350 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-04-07T14:13:08.377668Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T14:13:08.378369Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-07T14:13:08.378526Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-07T14:13:08.378532Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T14:13:08.378538Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-07T14:13:08.378561Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490574097263022107:2347] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-07T14:13:08.378574Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T14:13:08.379076Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-07T14:13:08.379223Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|5eeb4416-aaf90fad-d9634901-91dab897_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-04-07T14:13:08.379616Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|5eeb4416-aaf90fad-d9634901-91dab897_0 2025-04-07T14:13:08.385786Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|5eeb4416-aaf90fad-d9634901-91dab897_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-04-07T14:13:08.385998Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|5eeb4416-aaf90fad-d9634901-91dab897_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-04-07T14:13:08.386043Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|5eeb4416-aaf90fad-d9634901-91dab897_0 2025-04-07T14:13:08.386219Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|5eeb4416-aaf90fad-d9634901-91dab897_0 is DEAD 2025-04-07T14:13:08.386508Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] Test command err: === Server->StartServer(false); 2025-04-07T14:12:58.837328Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574056071051899:2217];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:58.837388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:58.888281Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574054487395658:2231];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ba2/r3tmp/tmpyf6FAP/pdisk_1.dat 2025-04-07T14:12:59.080431Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.086116Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.125245Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:59.354394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.354505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.355362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.355416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.358579Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:59.359086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.360466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.412631Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10284, node 1 2025-04-07T14:12:59.471468Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:59.471500Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:59.603078Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001ba2/r3tmp/yandexoJkK68.tmp 2025-04-07T14:12:59.603115Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001ba2/r3tmp/yandexoJkK68.tmp 2025-04-07T14:12:59.603224Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001ba2/r3tmp/yandexoJkK68.tmp 2025-04-07T14:12:59.603296Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:59.823268Z INFO: TTestServer started on Port 32307 GrpcPort 10284 TClient is connected to server localhost:32307 PQClient connected to localhost:10284 === TenantModeEnabled() = 1 === Init PQ - start server on port 10284 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:00.385508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:13:00.385787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.386075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:13:00.386390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:00.386494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.389046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:00.389209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:00.389423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.389500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:00.389528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-07T14:13:00.389559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T14:13:00.391466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.391540Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:00.391559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-07T14:13:00.396868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.396917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.396936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.396980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.401151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:00.407202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:00.407236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-07T14:13:00.407253Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:00.408381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-07T14:13:00.408498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:13:00.410967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035180454, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:00.411108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7490574060366019622 RawX2: 4294969652 } } Step: 1744035180454 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:13:00.411130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.411450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:00.411475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.411829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:13:00.411880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:13:00.413640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:00.413659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:13:00.413774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:00.413789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490574060366019700:2406], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-07T14:13:00.413824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.413843Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:00.413914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:00.413923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.413939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-07T14:13:00.413955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.413971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:00.413980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-07T14:13:00.414015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:13:00.4140 ... LAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-07T14:13:08.358604Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715664 2025-04-07T14:13:08.358614Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 5 2025-04-07T14:13:08.358624Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 2 2025-04-07T14:13:08.358738Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-07T14:13:08.358777Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-07T14:13:08.358785Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715664 2025-04-07T14:13:08.358794Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 2 2025-04-07T14:13:08.358803Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 4 2025-04-07T14:13:08.358843Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715664, subscribers: 1 2025-04-07T14:13:08.358855Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7490574099432199230:2332] Create topic result: 1 === EnablePQLogs === CreateChannel === NewStub === InitializeWritePQService === InitializeWritePQService start iteration 2025-04-07T14:13:08.362976Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715664 2025-04-07T14:13:08.363021Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715664 2025-04-07T14:13:08.363042Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715664 === InitializeWritePQService create streamingWriter === InitializeWritePQService Write 2025-04-07T14:13:08.477989Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-07T14:13:08.478018Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-07T14:13:08.478406Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "Root/acc/topic1" message_group_id: "12345678" } 2025-04-07T14:13:08.478476Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "Root/acc/topic1" message_group_id: "12345678" from ipv6:[::1]:51648 2025-04-07T14:13:08.478490Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:51648 proto=v1 topic=Root/acc/topic1 durationSec=0 2025-04-07T14:13:08.478498Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T14:13:08.480153Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-07T14:13:08.480269Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-07T14:13:08.480277Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T14:13:08.480305Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-07T14:13:08.480337Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490574099432199394:2339] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-07T14:13:08.480355Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T14:13:08.481002Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-07T14:13:08.481118Z node 3 :PERSQUEUE INFO: new Cookie 12345678|3fc8b0a6-93b9c8b2-851c62c6-b604c9f8_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-04-07T14:13:08.481548Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|3fc8b0a6-93b9c8b2-851c62c6-b604c9f8_0 2025-04-07T14:13:08.482654Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|3fc8b0a6-93b9c8b2-851c62c6-b604c9f8_0 grpc read done: success: 0 data: 2025-04-07T14:13:08.482668Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|3fc8b0a6-93b9c8b2-851c62c6-b604c9f8_0 grpc read failed 2025-04-07T14:13:08.482788Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|3fc8b0a6-93b9c8b2-851c62c6-b604c9f8_0 2025-04-07T14:13:08.482798Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|3fc8b0a6-93b9c8b2-851c62c6-b604c9f8_0 is DEAD 2025-04-07T14:13:08.482993Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison Finish: 0 === InitializeWritePQService done === PersQueueClient === InitializePQ completed 2025-04-07T14:13:08.492889Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-07T14:13:08.492916Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-07T14:13:08.493248Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "12345678" } 2025-04-07T14:13:08.493315Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "topic1" message_group_id: "12345678" from ipv6:[::1]:51648 2025-04-07T14:13:08.493325Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:51648 proto=v1 topic=topic1 durationSec=0 2025-04-07T14:13:08.493334Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T14:13:08.494104Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-07T14:13:08.494222Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-07T14:13:08.494230Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T14:13:08.494237Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-07T14:13:08.494265Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490574099432199405:2345] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-07T14:13:08.494280Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T14:13:08.494854Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-07T14:13:08.495031Z node 3 :PERSQUEUE INFO: new Cookie 12345678|c6e47ae7-c68a26dd-f911f57b-dda18527_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-04-07T14:13:08.495342Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 12345678|c6e47ae7-c68a26dd-f911f57b-dda18527_0 2025-04-07T14:13:08.496914Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 12345678|c6e47ae7-c68a26dd-f911f57b-dda18527_0 grpc read done: success: 0 data: 2025-04-07T14:13:08.496940Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|c6e47ae7-c68a26dd-f911f57b-dda18527_0 grpc read failed 2025-04-07T14:13:08.496969Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|c6e47ae7-c68a26dd-f911f57b-dda18527_0 grpc closed 2025-04-07T14:13:08.497003Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|c6e47ae7-c68a26dd-f911f57b-dda18527_0 is DEAD 2025-04-07T14:13:08.497846Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:13:09.069880Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490574103727166721:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:13:09.071619Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTI4NWY2ZTEtNmEyODE1M2MtYWNkMmRmM2EtMzcxZGY1MmM=, ActorId: [3:7490574103727166719:2349], ActorState: ExecuteState, TraceId: 01jr89j49pcspjmj6bsj39fxs4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:13:09.072430Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2025-04-07T14:12:28.844145Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:28.847372Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:28.847635Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-07T14:12:28.848266Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-07T14:12:28.849299Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-07T14:12:28.849352Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:28.850159Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2073] ControllerId# 72057594037932033 2025-04-07T14:12:28.850210Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:28.850336Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:28.850584Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:28.851293Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:28.851342Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:28.851514Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:28.851677Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:19:2063] 2025-04-07T14:12:28.851708Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:19:2063] 2025-04-07T14:12:28.861981Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:28.862031Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:28.864191Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:28.864430Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:28.864573Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:28.864721Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:28.864884Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:28.865004Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:28.865138Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:28.865165Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:28.865230Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:28:2073] 2025-04-07T14:12:28.865258Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:28:2073] 2025-04-07T14:12:28.866198Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:28.866281Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:19:2063] 2025-04-07T14:12:28.883149Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:28.883199Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:28.883256Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:28.883333Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:28.883365Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:28.883387Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-07T14:12:28.886872Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-07T14:12:28.888708Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-07T14:12:28.888908Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:28.889045Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:28.889378Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:28.889662Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:28.889748Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-07T14:12:28.889813Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-07T14:12:28.889935Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:49:2089] 2025-04-07T14:12:28.889961Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:49:2089] 2025-04-07T14:12:28.891550Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-07T14:12:28.891597Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-07T14:12:28.891660Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:28.891726Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:49:2089] 2025-04-07T14:12:28.891852Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:28.894492Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:28.894633Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:28.895738Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-07T14:12:28.895871Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-07T14:12:28.895902Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-07T14:12:28.895925Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:28.896010Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-07T14:12:28.896073Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-07T14:12:28.896123Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:19:2063] 2025-04-07T14:12:28.901807Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:19:2063] 2025-04-07T14:12:28.901869Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:19:2063] 2025-04-07T14:12:28.902193Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:28.905406Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:28.905571Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:28.905690Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-07T14:12:28.905720Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:28.905841Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-07T14:12:28.906026Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:28.906114Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-07T14:12:28.906151Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-07T14:12:28.906268Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:28:2073] 2025-04-07T14:12:28.906301Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:28:2073] 2025-04-07T14:12:28.906347Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-04-07T14:12:28.906390Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 2146435075 Sender# [1:45:2088] SessionId# [0:0:0] Cookie# 0 2025-04-07T14:12:28.906439Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.022816s 2025-04-07T14:12:28.906537Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-04-07T14:12:28.906562Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-04-07T14:12:28.906646Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: ... 07Z node 47 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046678944 Cookie: 1} 2025-04-07T14:13:09.369148Z node 47 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046678944 Cookie: 2} 2025-04-07T14:13:09.369329Z node 48 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72057594046678944 CurrentLeader: [47:317:2260] CurrentLeaderTablet: [47:332:2269] CurrentGeneration: 2 CurrentStep: 0} 2025-04-07T14:13:09.369398Z node 48 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72057594046678944 CurrentLeader: [47:317:2260] CurrentLeaderTablet: [47:332:2269] CurrentGeneration: 2 CurrentStep: 0} 2025-04-07T14:13:09.369509Z node 48 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594046678944 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72057594046678944 Cookie: 0 CurrentLeader: [47:317:2260] CurrentLeaderTablet: [47:332:2269] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-07T14:13:09.369560Z node 48 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72057594046678944 followers: 0 2025-04-07T14:13:09.369629Z node 48 :TABLET_RESOLVER DEBUG: SelectForward node 48 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594046678944 followers: 0 countLeader 1 allowFollowers 0 winner: [47:317:2260] 2025-04-07T14:13:09.369706Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594046678944] forward result remote node 47 [48:541:2088] 2025-04-07T14:13:09.369814Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594046678944] remote node connected [48:541:2088] 2025-04-07T14:13:09.369868Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [48:541:2088] 2025-04-07T14:13:09.370030Z node 47 :PIPE_SERVER DEBUG: [72057594046678944] Accept Connect Originator# [48:541:2088] 2025-04-07T14:13:09.370275Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594046678944] connected with status OK role: Leader [48:541:2088] 2025-04-07T14:13:09.370332Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send queued [48:541:2088] 2025-04-07T14:13:09.370424Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send [48:541:2088] 2025-04-07T14:13:09.370450Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594046678944] push event to server [48:541:2088] 2025-04-07T14:13:09.370504Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [48:541:2088] 2025-04-07T14:13:09.370628Z node 47 :PIPE_SERVER DEBUG: [72057594046678944] Push Sender# [48:540:2088] EventType# 271122945 2025-04-07T14:13:09.370776Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2025-04-07T14:13:09.370853Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:13:09.371649Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:13:09.371741Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:13:09.373551Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [48:547:2089] 2025-04-07T14:13:09.373584Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [48:547:2089] 2025-04-07T14:13:09.373632Z node 48 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [48:548:2090] 2025-04-07T14:13:09.373649Z node 48 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [48:548:2090] 2025-04-07T14:13:09.373826Z node 48 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:13:09.373868Z node 48 :TABLET_RESOLVER DEBUG: SelectForward node 48 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [47:316:2259] 2025-04-07T14:13:09.373951Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [48:547:2089] 2025-04-07T14:13:09.373983Z node 48 :PIPE_CLIENT DEBUG: TClient[72075186224037888] queue send [48:548:2090] 2025-04-07T14:13:09.374069Z node 48 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:13:09.374236Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 47 [48:547:2089] 2025-04-07T14:13:09.374442Z node 48 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:13:09.374483Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [48:547:2089] 2025-04-07T14:13:09.374506Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [48:547:2089] 2025-04-07T14:13:09.374766Z node 47 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-07T14:13:09.374893Z node 47 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-07T14:13:09.374951Z node 47 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-07T14:13:09.375023Z node 47 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [48:547:2089] 2025-04-07T14:13:09.375138Z node 48 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [47:451:2360] CurrentLeaderTablet: [47:468:2372] CurrentGeneration: 1 CurrentStep: 0} 2025-04-07T14:13:09.375386Z node 48 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [47:451:2360] CurrentLeaderTablet: [47:468:2372] CurrentGeneration: 1 CurrentStep: 0} 2025-04-07T14:13:09.375472Z node 48 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [47:451:2360] CurrentLeaderTablet: [47:468:2372] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-07T14:13:09.375500Z node 48 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2025-04-07T14:13:09.375539Z node 48 :TABLET_RESOLVER DEBUG: SelectForward node 48 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [47:451:2360] 2025-04-07T14:13:09.375648Z node 48 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result remote node 47 [48:548:2090] 2025-04-07T14:13:09.375793Z node 48 :PIPE_CLIENT DEBUG: TClient[72075186224037888] remote node connected [48:548:2090] 2025-04-07T14:13:09.375824Z node 48 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [48:548:2090] 2025-04-07T14:13:09.375911Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [48:547:2089] 2025-04-07T14:13:09.375941Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [48:547:2089] 2025-04-07T14:13:09.375968Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [48:547:2089] 2025-04-07T14:13:09.376052Z node 48 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [48:547:2089] 2025-04-07T14:13:09.376422Z node 47 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [48:548:2090] 2025-04-07T14:13:09.376508Z node 47 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [48:544:2089] EventType# 268959744 2025-04-07T14:13:09.376795Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-04-07T14:13:09.376879Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:13:09.377070Z node 47 :HIVE WARN: HIVE#72057594037927937 Node(48, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:09.377166Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{14, redo 208b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-07T14:13:09.377234Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:13:09.377428Z node 48 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [48:548:2090] 2025-04-07T14:13:09.377451Z node 48 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [48:548:2090] 2025-04-07T14:13:09.377474Z node 48 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [48:548:2090] 2025-04-07T14:13:09.377517Z node 48 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [48:548:2090] 2025-04-07T14:13:09.377600Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-07T14:13:09.377659Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:13:09.377736Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:13:09.377801Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:13:09.377940Z node 47 :PIPE_SERVER DEBUG: [72075186224037888] Push Sender# [48:545:2090] EventType# 268959744 2025-04-07T14:13:09.378039Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-04-07T14:13:09.378063Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:13:09.378154Z node 47 :HIVE WARN: HIVE#72075186224037888 Node(48, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:09.378217Z node 47 :HIVE WARN: HIVE#72075186224037888 Node(48, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:09.378249Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{6, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-07T14:13:09.378274Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:13:09.378377Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-07T14:13:09.378398Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:13:09.378431Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:13:09.378455Z node 47 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] Test command err: === Server->StartServer(false); 2025-04-07T14:12:59.032342Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574060341764547:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:59.034102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001baa/r3tmp/tmp1xoEuv/pdisk_1.dat 2025-04-07T14:12:59.242099Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.240853Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.266774Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:12:59.516095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.516248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.521323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.521400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.523921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.526237Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:59.538482Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:59.541354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2114, node 1 2025-04-07T14:12:59.667790Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001baa/r3tmp/yandexkriObE.tmp 2025-04-07T14:12:59.667818Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001baa/r3tmp/yandexkriObE.tmp 2025-04-07T14:12:59.667979Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001baa/r3tmp/yandexkriObE.tmp 2025-04-07T14:12:59.668081Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:59.815504Z INFO: TTestServer started on Port 10020 GrpcPort 2114 TClient is connected to server localhost:10020 PQClient connected to localhost:2114 === TenantModeEnabled() = 1 === Init PQ - start server on port 2114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:00.399601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:13:00.399765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.400017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:13:00.400224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:00.400320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.405247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:00.405370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:00.405479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.405518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:00.405536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-07T14:13:00.405557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T14:13:00.407546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.407572Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:00.407586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:13:00.409160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.409181Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.409200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.409234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 waiting... 2025-04-07T14:13:00.413136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:00.414622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-07T14:13:00.414733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:13:00.417263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035180461, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:00.417361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7490574060341765004 RawX2: 4294969658 } } Step: 1744035180461 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:13:00.417384Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.417654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:00.417685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.417805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:13:00.417843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:13:00.419633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:00.419661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:13:00.419795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:00.419818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490574060341765045:2400], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-07T14:13:00.419864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.419880Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:00.419951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:00.419961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.419978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-07T14:13:00.419991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.420004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:00.420012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-07T14:13:00.420040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:13:00.420050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-04-07T14:13:00.420061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-07T14:13:00.422976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:13:00.423084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:13:00.423099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in ... partition: 0 MaxSeqNo: 0 sessionId: 12345678|133ea51c-5001687b-ef201a48-be970a0b_0 Finish: 0 === InitializeWritePQService done === PersQueueClient 2025-04-07T14:13:08.593134Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|133ea51c-5001687b-ef201a48-be970a0b_0 grpc read done: success: 0 data: 2025-04-07T14:13:08.593159Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|133ea51c-5001687b-ef201a48-be970a0b_0 grpc read failed 2025-04-07T14:13:08.593403Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|133ea51c-5001687b-ef201a48-be970a0b_0 2025-04-07T14:13:08.593425Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|133ea51c-5001687b-ef201a48-be970a0b_0 is DEAD 2025-04-07T14:13:08.593746Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed BEFORE MODIFY PERMISSIONS 2025-04-07T14:13:08.623816Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\027\010\001\022\023\032\021test_user@builtin\n\037\010\000\022\033\010\001\020\366\213\001\032\021test_user@builtin \003" } } TxId: 281474976710665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:49608" , at schemeshard: 72057594046644480 2025-04-07T14:13:08.623955Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:13:08.624046Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-04-07T14:13:08.624055Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-07T14:13:08.624170Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:08.624184Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:13:08.624246Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-04-07T14:13:08.624255Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-07T14:13:08.624314Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-04-07T14:13:08.624354Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710665, ready parts: 1/1, is published: false 2025-04-07T14:13:08.624370Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-04-07T14:13:08.624378Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-07T14:13:08.624388Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710665:0 2025-04-07T14:13:08.624396Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710665, publications: 1, subscribers: 0 2025-04-07T14:13:08.624405Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2025-04-07T14:13:08.627653Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710665, response: Status: StatusSuccess TxId: 281474976710665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:13:08.627884Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user@builtin, remove access: -():test_user@builtin:- 2025-04-07T14:13:08.628033Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:08.628047Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-07T14:13:08.628255Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:08.628272Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7490574079490516324:2364], at schemeshard: 72057594046644480, txId: 281474976710665, path id: 10 2025-04-07T14:13:08.629022Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710665 2025-04-07T14:13:08.629104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710665 2025-04-07T14:13:08.629118Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710665 2025-04-07T14:13:08.629133Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-04-07T14:13:08.629150Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-04-07T14:13:08.629252Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710665, subscribers: 0 2025-04-07T14:13:08.633122Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710665 2025-04-07T14:13:08.637654Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-07T14:13:08.637677Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-07T14:13:08.638076Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-message-group" } 2025-04-07T14:13:08.638150Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-message-group" from ipv6:[::1]:49582 2025-04-07T14:13:08.638162Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:49582 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-04-07T14:13:08.638170Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T14:13:08.638996Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-07T14:13:08.639149Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-07T14:13:08.639158Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T14:13:08.639164Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-07T14:13:08.639193Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490574096670386414:2348] (SourceId=test-message-group, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-07T14:13:08.639208Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T14:13:08.639811Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-07T14:13:08.640006Z node 3 :PERSQUEUE INFO: new Cookie test-message-group|63d45fa5-bc3c8f84-93ffdd26-3b127142_0 generated for partition 0 topic 'acc/topic1' owner test-message-group 2025-04-07T14:13:08.640373Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group|63d45fa5-bc3c8f84-93ffdd26-3b127142_0 2025-04-07T14:13:08.641431Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-message-group|63d45fa5-bc3c8f84-93ffdd26-3b127142_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-04-07T14:13:08.641701Z node 3 :PQ_WRITE_PROXY INFO: updating token 2025-04-07T14:13:08.641747Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T14:13:08.642434Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|63d45fa5-bc3c8f84-93ffdd26-3b127142_0 describe result for acl check 2025-04-07T14:13:08.642525Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_2@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-message-group|63d45fa5-bc3c8f84-93ffdd26-3b127142_0 2025-04-07T14:13:08.642760Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|63d45fa5-bc3c8f84-93ffdd26-3b127142_0 is DEAD 2025-04-07T14:13:08.643004Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:13:09.159662Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490574100965353732:2357], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:13:09.159922Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjY0MWVkZTEtZWQxNjkzNGYtYzRkNDM4OWQtNzBmMTA1NzY=, ActorId: [3:7490574100965353725:2353], ActorState: ExecuteState, TraceId: 01jr89j4ck7apgnqt7k2dj7cb0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:13:09.160329Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2025-04-07T14:12:58.881047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574054936818388:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:58.881101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:58.992840Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574053303154619:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:58.992941Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:59.228279Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.257758Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b99/r3tmp/tmpXZbXVj/pdisk_1.dat 2025-04-07T14:12:59.586403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.586493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.589316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.589396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.591912Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:59.592051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.596802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.640708Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26706, node 1 2025-04-07T14:12:59.678550Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:59.678811Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:59.774213Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001b99/r3tmp/yandexvdmi75.tmp 2025-04-07T14:12:59.774242Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001b99/r3tmp/yandexvdmi75.tmp 2025-04-07T14:12:59.774399Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001b99/r3tmp/yandexvdmi75.tmp 2025-04-07T14:12:59.774499Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:59.817287Z INFO: TTestServer started on Port 29149 GrpcPort 26706 TClient is connected to server localhost:29149 PQClient connected to localhost:26706 === TenantModeEnabled() = 1 === Init PQ - start server on port 26706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:00.177216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:13:00.177495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.177758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:13:00.177951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:00.177994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.181683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:00.181814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:00.181954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.181989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:00.182002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-07T14:13:00.182025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:13:00.184540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.184595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:00.184623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:13:00.186413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.186439Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.186454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.186486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.190457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:00.192781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-07T14:13:00.192893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:13:00.193051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:00.193066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-07T14:13:00.193096Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:00.195748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035180237, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:00.195877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7490574059231786297 RawX2: 4294969658 } } Step: 1744035180237 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:13:00.195899Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.196125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:00.196150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.196309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:13:00.196354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:13:00.198353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:00.198373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:13:00.198512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:00.198533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490574059231786399:2428], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-07T14:13:00.198568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.198588Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:00.198662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:00.198674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.198704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-07T14:13:00.198725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.198736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:00.198748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-07T14:13:00.198803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:13:00.1988 ... -07T14:13:09.130483Z :NOTICE: [/Root] [/Root] [f4a437eb-bcec0d77-78cfcb9e-15ede1d6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:13:09.130778Z :INFO: [/Root] [/Root] [8d18146a-215b46ba-8be5984d-7e88c402] Starting read session 2025-04-07T14:13:09.130825Z :DEBUG: [/Root] [/Root] [8d18146a-215b46ba-8be5984d-7e88c402] Starting session to cluster null (localhost:32628) 2025-04-07T14:13:09.131033Z :DEBUG: [/Root] [/Root] [8d18146a-215b46ba-8be5984d-7e88c402] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:09.131068Z :DEBUG: [/Root] [/Root] [8d18146a-215b46ba-8be5984d-7e88c402] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:09.131106Z :DEBUG: [/Root] [/Root] [8d18146a-215b46ba-8be5984d-7e88c402] [null] Reconnecting session to cluster null in 0.000000s 2025-04-07T14:13:09.140833Z :DEBUG: [/Root] [/Root] [8d18146a-215b46ba-8be5984d-7e88c402] [null] Successfully connected. Initializing session 2025-04-07T14:13:09.141530Z node 3 :PQ_READ_PROXY DEBUG: new grpc connection 2025-04-07T14:13:09.141558Z node 3 :PQ_READ_PROXY DEBUG: new session created cookie 2 2025-04-07T14:13:09.142040Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-04-07T14:13:09.142193Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 read init: from# ipv6:[::1]:52134, request# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-04-07T14:13:09.142255Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 auth for : consumer_aba 2025-04-07T14:13:09.142830Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 Handle describe topics response 2025-04-07T14:13:09.142950Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 auth is DEAD 2025-04-07T14:13:09.142971Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 auth ok: topics# 1, initDone# 0 2025-04-07T14:13:09.144139Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 register session: topic# /Root/account1/write_topic 2025-04-07T14:13:09.144787Z :INFO: [/Root] [/Root] [8d18146a-215b46ba-8be5984d-7e88c402] [null] Server session id: consumer_aba_3_2_14496471345885564990_v1 2025-04-07T14:13:09.144883Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7490574101335364204:2365] connected; active server actors: 1 2025-04-07T14:13:09.145227Z :DEBUG: [/Root] [/Root] [8d18146a-215b46ba-8be5984d-7e88c402] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:09.145355Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7490574101335364204:2365] session consumer_aba_3_2_14496471345885564990_v1 2025-04-07T14:13:09.145491Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2025-04-07T14:13:09.145637Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2025-04-07T14:13:09.145724Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_14496471345885564990_v1" (Sender=[3:7490574101335364201:2365], Pipe=[3:7490574101335364204:2365], Partitions=[], ActiveFamilyCount=0) 2025-04-07T14:13:09.145765Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2025-04-07T14:13:09.145835Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-04-07T14:13:09.145914Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_14496471345885564990_v1" (Sender=[3:7490574101335364201:2365], Pipe=[3:7490574101335364204:2365], Partitions=[], ActiveFamilyCount=0) 2025-04-07T14:13:09.146004Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_14496471345885564990_v1" sender [3:7490574101335364201:2365] lock partition 0 for ReadingSession "consumer_aba_3_2_14496471345885564990_v1" (Sender=[3:7490574101335364201:2365], Pipe=[3:7490574101335364204:2365], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-04-07T14:13:09.146096Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-07T14:13:09.146143Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000267s 2025-04-07T14:13:09.148887Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 grpc read done: success# 1, data# { read { } } 2025-04-07T14:13:09.149491Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_14496471345885564990_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7490574101335364204 RawX2: 4503612512274749 } Path: "/Root/account1/write_topic" } 2025-04-07T14:13:09.149670Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 got read request: guid# 7e517caf-8aa365f5-29169de2-d7bd7ca9 2025-04-07T14:13:09.149749Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2025-04-07T14:13:09.157022Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1 2025-04-07T14:13:09.157212Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: consumer_aba_3_2_14496471345885564990_v1:1 with generation 1 2025-04-07T14:13:09.205806Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1744035189006 CreateTimestampMS: 1744035189005 SizeLag: 165 WriteTimestampEstimateMS: 1744035189006 } Cookie: 18446744073709551615 } 2025-04-07T14:13:09.205878Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2025-04-07T14:13:09.205943Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2025-04-07T14:13:09.217289Z :INFO: [/Root] [/Root] [8d18146a-215b46ba-8be5984d-7e88c402] Closing read session. Close timeout: 0.000000s 2025-04-07T14:13:09.217338Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2025-04-07T14:13:09.217398Z :INFO: [/Root] [/Root] [8d18146a-215b46ba-8be5984d-7e88c402] Counters: { Errors: 0 CurrentSessionLifetimeMs: 86 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:13:09.217514Z :NOTICE: [/Root] [/Root] [8d18146a-215b46ba-8be5984d-7e88c402] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T14:13:09.217555Z :DEBUG: [/Root] [/Root] [8d18146a-215b46ba-8be5984d-7e88c402] [null] Abort session to cluster 2025-04-07T14:13:09.218016Z :NOTICE: [/Root] [/Root] [8d18146a-215b46ba-8be5984d-7e88c402] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:13:09.218651Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 grpc read done: success# 0, data# { } 2025-04-07T14:13:09.218689Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 grpc read failed 2025-04-07T14:13:09.218723Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 grpc closed 2025-04-07T14:13:09.218761Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_14496471345885564990_v1 is DEAD 2025-04-07T14:13:09.219801Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: consumer_aba_3_2_14496471345885564990_v1 2025-04-07T14:13:09.225076Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7490574101335364204:2365] disconnected; active server actors: 1 2025-04-07T14:13:09.225150Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] pipe [3:7490574101335364204:2365] client consumer_aba disconnected session consumer_aba_3_2_14496471345885564990_v1 2025-04-07T14:13:09.332229Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490574101335364219:2373], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:13:09.333160Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjFlN2JiODEtZjhkYWJjMzctNmU0NTdkNjktNTY0NTMxMWY=, ActorId: [3:7490574101335364212:2369], ActorState: ExecuteState, TraceId: 01jr89j4hm1vtmeske8dmwms7x, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:13:09.333674Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> TExternalDataSourceTest::SchemeErrors [GOOD] >> TSubDomainTest::UserAttributesApplyIf [GOOD] >> TSubDomainTest::CreateTablet [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain >> TSubDomainTest::LsAltered [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:10.969202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:10.969317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:10.969358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:10.969392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:10.970277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:10.970322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:10.970407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:10.973019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:11.055400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:11.055459Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:11.077097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:11.077240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:11.077375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:11.092377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:11.092729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:11.097419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:11.097682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:11.119832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:11.129957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:11.130026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:11.130178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:11.130228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:11.130359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:11.130532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.149712Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:11.305350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:11.305565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.305775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:11.306000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:11.306055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.308006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:11.308159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:11.308342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.308392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:11.308442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:11.308473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:11.310126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.310172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:11.310213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:11.311551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.311607Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.311640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:11.311724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:11.315085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:11.316635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:11.316828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:11.317796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:11.317911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:11.317962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:11.318177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:11.318220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:11.318371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:11.318444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:11.320072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:11.320121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:11.320251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:11.320283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:11.320447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.320519Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:11.320614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:11.320647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:11.320706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:11.320738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:11.320768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:11.320793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:11.320839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:11.320878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:11.320906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:11.322553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:11.322650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:11.322681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:11.322711Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:11.322754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:11.322831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... HARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:11.430540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:11.430577Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-07T14:13:11.430621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:11.433034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:11.433127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:11.433159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:11.433188Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:13:11.433216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:13:11.433281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T14:13:11.441575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:13:11.441765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-07T14:13:11.442747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:11.442883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:11.442952Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000003 2025-04-07T14:13:11.443071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-07T14:13:11.443268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:11.443342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:13:11.445296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:13:11.445820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:13:11.447265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:11.447306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:11.447452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:13:11.447516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:13:11.447593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:11.447621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-07T14:13:11.447655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:13:11.447679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:13:11.447911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.447958Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:13:11.448054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:13:11.448087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:13:11.448142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-07T14:13:11.448179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:13:11.448211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:13:11.448239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:13:11.448323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:13:11.448357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-07T14:13:11.448387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-07T14:13:11.448413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-07T14:13:11.449424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:11.449501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:11.449527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:11.449565Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:13:11.449611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:11.452060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:11.452140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:11.452168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:11.452210Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-07T14:13:11.452241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:13:11.452305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-07T14:13:11.456413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:13:11.456689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T14:13:11.456863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:13:11.456898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:13:11.457335Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:13:11.457413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:13:11.457445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:327:2319] TestWaitNotification: OK eventTxId 102 2025-04-07T14:13:11.457876Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:13:11.458057Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 224us result status StatusSuccess 2025-04-07T14:13:11.458347Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:10.969200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:10.969365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:10.969413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:10.969451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:10.970271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:10.970316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:10.970399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:10.973163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:11.069293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:11.069368Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:11.094133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:11.094313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:11.094465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:11.113000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:11.113220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:11.113888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:11.114115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:11.116090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:11.129949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:11.130039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:11.130207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:11.130265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:11.130318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:11.130473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.141484Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:11.227444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:11.228615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.229580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:11.231953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:11.232032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.241843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:11.242693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:11.243007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.243075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:11.243161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:11.243201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:11.250259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.250343Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:11.250392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:11.254674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.254728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.254763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:11.254838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:11.267035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:11.269517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:11.272802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:11.274103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:11.274301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:11.274359Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:11.274702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:11.274765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:11.275013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:11.275103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:11.277843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:11.277924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:11.278101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:11.278142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:11.278371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.278422Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:11.278541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:11.278581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:11.278630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:11.278674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:11.278725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:11.278762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:11.278840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:11.278892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:11.278931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:11.281150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:11.281275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:11.281314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:11.281352Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:11.281401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:11.281503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:11.367366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 125, database: /MyRoot, subject: , status: StatusSchemeError, reason: (NKikimr::NExternalSource::TExternalSourceException) ydb/core/external_sources/external_source_factory.cpp:27: External source with type DataStream was not found, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 2025-04-07T14:13:11.370543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:11.370831Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2025-04-07T14:13:11.370914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2025-04-07T14:13:11.371081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2025-04-07T14:13:11.373468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:11.373679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-04-07T14:13:11.377094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:11.377471Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-04-07T14:13:11.377578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2025-04-07T14:13:11.377739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-04-07T14:13:11.379857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:11.380066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-04-07T14:13:11.383202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:11.383581Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-04-07T14:13:11.383672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2025-04-07T14:13:11.384004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-04-07T14:13:11.386422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:11.386664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-04-07T14:13:11.389224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:11.389433Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2025-04-07T14:13:11.389532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2025-04-07T14:13:11.389642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-04-07T14:13:11.391786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:11.391969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] Test command err: === Server->StartServer(false); 2025-04-07T14:12:58.841232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574055158024701:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:58.845338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:58.894458Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574054843618610:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:58.894905Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb3/r3tmp/tmp9YlKkH/pdisk_1.dat 2025-04-07T14:12:59.082641Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.098617Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.364963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.365072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.366649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.366738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.376833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.390933Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:59.391723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.428076Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9388, node 1 2025-04-07T14:12:59.474444Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:59.475135Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:59.605269Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001bb3/r3tmp/yandexqbwzT0.tmp 2025-04-07T14:12:59.605299Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001bb3/r3tmp/yandexqbwzT0.tmp 2025-04-07T14:12:59.605457Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001bb3/r3tmp/yandexqbwzT0.tmp 2025-04-07T14:12:59.605553Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:59.814261Z INFO: TTestServer started on Port 19604 GrpcPort 9388 TClient is connected to server localhost:19604 PQClient connected to localhost:9388 === TenantModeEnabled() = 1 === Init PQ - start server on port 9388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:00.306008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:13:00.306187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.306423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:13:00.306673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:00.306736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:00.309665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:00.309783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:00.309919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.309954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:00.309969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-07T14:13:00.310002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T14:13:00.312161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.312191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:00.312207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:13:00.313451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:00.313461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-07T14:13:00.313477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:00.317679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.317717Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.317740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.317804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.321674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:00.323323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-07T14:13:00.323472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:13:00.332741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035180370, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:00.332892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7490574059452992609 RawX2: 4294969663 } } Step: 1744035180370 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:13:00.332915Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.333158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:00.333186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.333335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:13:00.333395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:13:00.335600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:00.335623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:13:00.335772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:00.335789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490574059452992672:2413], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-07T14:13:00.335830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.335857Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:00.335941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:00.335968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.335993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-07T14:13:00.336013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.336024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:00.336033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-07T14:13:00.336077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:13:00.336092Z ... rceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-07T14:13:08.919091Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490574096626360411:2348] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-07T14:13:08.919109Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T14:13:08.919504Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-07T14:13:08.919614Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|ff8bfcb8-d3faf86-8ade667d-1044a323_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-04-07T14:13:08.919881Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|ff8bfcb8-d3faf86-8ade667d-1044a323_0 ===Assert streaming op1 ===Assert streaming op2 2025-04-07T14:13:08.920981Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|ff8bfcb8-d3faf86-8ade667d-1044a323_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-07T14:13:08.921259Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-07T14:13:08.921925Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T14:13:08.925975Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse ===ModifyAcl BEFORE MODIFY PERMISSIONS 2025-04-07T14:13:08.934187Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin" } } TxId: 281474976710666 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:60832" , at schemeshard: 72057594046644480 2025-04-07T14:13:08.934357Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:13:08.934523Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-04-07T14:13:08.934558Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-07T14:13:08.934664Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710666:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:08.934687Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:13:08.934747Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710666:0 progress is 1/1 2025-04-07T14:13:08.934763Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710666 ready parts: 1/1 2025-04-07T14:13:08.934800Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-04-07T14:13:08.934834Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710666, ready parts: 1/1, is published: false 2025-04-07T14:13:08.934853Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-04-07T14:13:08.934867Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710666 ready parts: 1/1 2025-04-07T14:13:08.934891Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710666:0 2025-04-07T14:13:08.934912Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710666, publications: 1, subscribers: 0 2025-04-07T14:13:08.934922Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710666, [OwnerId: 72057594046644480, LocalPathId: 10], 4 2025-04-07T14:13:08.936436Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710666, response: Status: StatusSuccess TxId: 281474976710666 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:13:08.936599Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710666, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, remove access: -():test_user_0@builtin:- 2025-04-07T14:13:08.936833Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:08.936864Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710666, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-07T14:13:08.937137Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:08.937166Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7490574079446490335:2381], at schemeshard: 72057594046644480, txId: 281474976710666, path id: 10 2025-04-07T14:13:08.938195Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710666 2025-04-07T14:13:08.938252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710666 2025-04-07T14:13:08.938261Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710666 2025-04-07T14:13:08.938275Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710666, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 4 2025-04-07T14:13:08.938288Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-04-07T14:13:08.938343Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710666, subscribers: 0 ===Wait for session created with token with removed ACE to die2025-04-07T14:13:08.940742Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710666 2025-04-07T14:13:09.416055Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490574100921327742:2356], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:13:09.416260Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzE0NGY0MTMtZDVmZTBmNDEtN2VhZmRlZDEtMmQzZDY4NTI=, ActorId: [3:7490574100921327735:2352], ActorState: ExecuteState, TraceId: 01jr89j4mkf7hv9hh8jh18cs1d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:13:09.416650Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:13:09.602489Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490574079446489703:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:09.602570Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:13:09.623526Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490574079158670702:2124];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:09.623620Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:13:09.920458Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T14:13:09.921323Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|ff8bfcb8-d3faf86-8ade667d-1044a323_0 describe result for acl check 2025-04-07T14:13:09.921453Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_0@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-group-id|ff8bfcb8-d3faf86-8ade667d-1044a323_0 2025-04-07T14:13:09.921782Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|ff8bfcb8-d3faf86-8ade667d-1044a323_0 is DEAD 2025-04-07T14:13:09.922036Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison status: UNAUTHORIZED issues { message: "access to topic \'Topic /Root/acc/topic1 in database: /Root\' denied for \'test_user_0@builtin\' due to \'no WriteTopic rights\', Marker# PQ1125" issue_code: 500018 severity: 1 } 2025-04-07T14:13:10.440854Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7490574105216295067:2363], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:13:10.442492Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzQ0ZWFmZS02OTNhZGQ5NC03ZDRjOGU4MC04ZTUxYWJmYQ==, ActorId: [3:7490574105216295065:2362], ActorState: ExecuteState, TraceId: 01jr89j5mmbh0veqy0wyd68tae, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:13:10.442813Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> TSubDomainTest::FailIfAffectedSetNotInterior [GOOD] >> TSubDomainTest::GenericCases >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TExternalDataSourceTest::CreateExternalDataSource >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter >> TExternalDataSourceTest::DropTableTwice >> THiveTest::TestCheckSubHiveMigrationManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-04-07T14:12:58.825985Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574053387120086:2122];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:58.826554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001b97/r3tmp/tmpcRhk3l/pdisk_1.dat 2025-04-07T14:12:59.091060Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.093369Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.154926Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:12:59.427890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.427988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.429139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.429185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.435956Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:59.436105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.438915Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:59.440223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21440, node 1 2025-04-07T14:12:59.600993Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001b97/r3tmp/yandexj8RpT5.tmp 2025-04-07T14:12:59.601024Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001b97/r3tmp/yandexj8RpT5.tmp 2025-04-07T14:12:59.601234Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001b97/r3tmp/yandexj8RpT5.tmp 2025-04-07T14:12:59.601344Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:59.814207Z INFO: TTestServer started on Port 16167 GrpcPort 21440 TClient is connected to server localhost:16167 PQClient connected to localhost:21440 === TenantModeEnabled() = 1 === Init PQ - start server on port 21440 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:00.299200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:13:00.299379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.299597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:13:00.299805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:00.299857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.317717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:00.317933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:00.318165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.318234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:00.318264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-07T14:13:00.318296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:13:00.320719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.320772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:00.320794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:13:00.322119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:00.322150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-07T14:13:00.322178Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:00.322430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.322457Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.322509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.322572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.328214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:00.331168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-07T14:13:00.331328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:13:00.334042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035180377, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:00.334243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7490574057682087909 RawX2: 4294969651 } } Step: 1744035180377 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:13:00.334276Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.334545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:00.334581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.334739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:13:00.334804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:13:00.336650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:00.336696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:13:00.336838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:00.336871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490574057682087949:2392], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-07T14:13:00.336904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.336921Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:00.337013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:00.337027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.337054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-07T14:13:00.337076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.337098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:00.337108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-07T14:13:00.337182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:13:00.337201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:13:00.337216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-07T14:13:00.340075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 7 ... vent: NActors::TEvents::TEvPoison 2025-04-07T14:13:10.322536Z :DEBUG: [] MessageGroupId [123] SessionId [] Write session: try to update token 2025-04-07T14:13:10.322845Z :INFO: [] MessageGroupId [123] SessionId [] Write session: Do CDS request 2025-04-07T14:13:10.322953Z :INFO: [] MessageGroupId [123] SessionId [] Start write session. Will connect to endpoint: localhost:22044 2025-04-07T14:13:10.319004Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:10.319046Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server disconnected, pipe [3:7490574108072156939:2375] destroyed 2025-04-07T14:13:10.319088Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T14:13:10.332721Z :DEBUG: [] MessageGroupId [123] SessionId [] Write session: send init request: init_request { topic: "/Root/PQ/account/topic" message_group_id: "123" } 2025-04-07T14:13:10.330287Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-07T14:13:10.330311Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 4 2025-04-07T14:13:10.333173Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/PQ/account/topic" message_group_id: "123" } 2025-04-07T14:13:10.333246Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 4 topic: "/Root/PQ/account/topic" message_group_id: "123" from ipv6:[::1]:52610 2025-04-07T14:13:10.336778Z :INFO: [] MessageGroupId [123] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744035190336 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:13:10.333261Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=4 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:52610 proto=v1 topic=/Root/PQ/account/topic durationSec=0 2025-04-07T14:13:10.333269Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T14:13:10.333971Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: describe result for acl check 2025-04-07T14:13:10.334091Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-07T14:13:10.336858Z :INFO: [] MessageGroupId [123] SessionId [] Write session established. Init response: last_sequence_number: 2 session_id: "123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0" topic: "PQ/account/topic" 2025-04-07T14:13:10.334099Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T14:13:10.334106Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-07T14:13:10.334134Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7490574108072156969:2388] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-07T14:13:10.334150Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 4 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T14:13:10.334613Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037893, NodeId 3, Generation: 1 2025-04-07T14:13:10.334652Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:10.334675Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server connected, pipe [3:7490574108072156972:2388], now have 1 active actors on pipe 2025-04-07T14:13:10.334759Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-07T14:13:10.334776Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-07T14:13:10.334853Z node 3 :PERSQUEUE INFO: new Cookie 123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0 generated for partition 0 topic 'PQ/account/topic' owner 123 2025-04-07T14:13:10.334926Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T14:13:10.334975Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:13:10.335090Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-07T14:13:10.335101Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-07T14:13:10.335147Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:13:10.335194Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: 123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0 2025-04-07T14:13:10.337080Z :DEBUG: [] MessageGroupId [123] SessionId [123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0] Write 1 messages with Id from 1 to 1 2025-04-07T14:13:10.337163Z :DEBUG: [] MessageGroupId [123] SessionId [123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0] Write session: try to update token 2025-04-07T14:13:10.337196Z :DEBUG: [] MessageGroupId [123] SessionId [123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0] Send 1 message(s) (0 left), first sequence number is 3 2025-04-07T14:13:10.337353Z :INFO: [] MessageGroupId [123] SessionId [123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0] Write session: close. Timeout = 10000 ms 2025-04-07T14:13:10.337594Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-07T14:13:10.337776Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-07T14:13:10.337968Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-07T14:13:10.337989Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-07T14:13:10.338041Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-07T14:13:10.338074Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T14:13:10.338259Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-07T14:13:10.338271Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-07T14:13:10.338302Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message topic: PQ/account/topic partition: 0 SourceId: '\000123' SeqNo: 3 partNo : 0 messageNo: 1 size 372 offset: -1 2025-04-07T14:13:10.338356Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account/topic". Partition: 0. Amount: 376. Cookie: 3 2025-04-07T14:13:10.338395Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account/topic". Partition: 0: Cookie: 3 2025-04-07T14:13:10.338486Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob processing sourceId '\000123' seqNo 3 partNo 0 2025-04-07T14:13:10.346252Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob complete sourceId '\000123' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 443 count 1 nextOffset 3 batches 1 2025-04-07T14:13:10.346702Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account/topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 431 WTime 1744035190346 2025-04-07T14:13:10.348597Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T14:13:10.360331Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 376 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:13:10.360380Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:13:10.360424Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Answering for message sourceid: '\000123', Topic: 'PQ/account/topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-07T14:13:10.360576Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-07T14:13:10.360627Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T14:13:10.361537Z :DEBUG: [] MessageGroupId [123] SessionId [123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 14 queued_in_partition_duration_ms: 7 } 2025-04-07T14:13:10.361584Z :DEBUG: [] MessageGroupId [123] SessionId [123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0] Write session: acknoledged message 1 2025-04-07T14:13:10.440746Z :INFO: [] MessageGroupId [123] SessionId [123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0] Write session will now close 2025-04-07T14:13:10.440808Z :DEBUG: [] MessageGroupId [123] SessionId [123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0] Write session: aborting 2025-04-07T14:13:10.441270Z :INFO: [] MessageGroupId [123] SessionId [123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0] Write session: gracefully shut down, all writes complete 2025-04-07T14:13:10.441305Z :DEBUG: [] MessageGroupId [123] SessionId [123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0] Write session: destroy 2025-04-07T14:13:10.447269Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0 grpc read done: success: 0 data: 2025-04-07T14:13:10.447299Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0 grpc read failed 2025-04-07T14:13:10.447320Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0 grpc closed 2025-04-07T14:13:10.447331Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|21fb2c99-44bddc3e-f00321e4-49ed0e08_0 is DEAD 2025-04-07T14:13:10.447885Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:13:10.448847Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:10.448894Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server disconnected, pipe [3:7490574108072156972:2388] destroyed 2025-04-07T14:13:10.448937Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::UserAttributesApplyIf [GOOD] Test command err: 2025-04-07T14:13:05.040057Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574084774032633:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:05.040236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001808/r3tmp/tmpN1kGfv/pdisk_1.dat 2025-04-07T14:13:05.518509Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:05.557695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:05.557785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:05.559141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24439 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T14:13:05.794781Z node 1 :TX_PROXY DEBUG: actor# [1:7490574084774032647:2100] Handle TEvNavigate describe path dc-1 2025-04-07T14:13:05.794831Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574084774032922:2252] HANDLE EvNavigateScheme dc-1 2025-04-07T14:13:05.794975Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490574084774032670:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:05.795010Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490574084774032670:2113], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T14:13:05.795186Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574084774032923:2253][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T14:13:05.797557Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574080479065071:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574084774032927:2253] 2025-04-07T14:13:05.797628Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574080479065071:2049] Subscribe: subscriber# [1:7490574084774032927:2253], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:05.797677Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574080479065074:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574084774032928:2253] 2025-04-07T14:13:05.797696Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574080479065074:2052] Subscribe: subscriber# [1:7490574084774032928:2253], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:05.797735Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574080479065077:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574084774032929:2253] 2025-04-07T14:13:05.797796Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574080479065077:2055] Subscribe: subscriber# [1:7490574084774032929:2253], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:05.797874Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574084774032927:2253][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574080479065071:2049] 2025-04-07T14:13:05.797901Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574084774032928:2253][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574080479065074:2052] 2025-04-07T14:13:05.797916Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574084774032929:2253][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574080479065077:2055] 2025-04-07T14:13:05.797971Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574084774032923:2253][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574084774032924:2253] 2025-04-07T14:13:05.798022Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574084774032923:2253][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574084774032925:2253] 2025-04-07T14:13:05.798068Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490574084774032923:2253][/dc-1] Set up state: owner# [1:7490574084774032670:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:05.798159Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574084774032923:2253][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574084774032926:2253] 2025-04-07T14:13:05.798194Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490574084774032923:2253][/dc-1] Path was already updated: owner# [1:7490574084774032670:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:05.798270Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574084774032927:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574084774032924:2253], cookie# 1 2025-04-07T14:13:05.798294Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574084774032928:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574084774032925:2253], cookie# 1 2025-04-07T14:13:05.798334Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574084774032929:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574084774032926:2253], cookie# 1 2025-04-07T14:13:05.798368Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574080479065071:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574084774032927:2253] 2025-04-07T14:13:05.798393Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574080479065071:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574084774032927:2253], cookie# 1 2025-04-07T14:13:05.798414Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574080479065074:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574084774032928:2253] 2025-04-07T14:13:05.798427Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574080479065074:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574084774032928:2253], cookie# 1 2025-04-07T14:13:05.798473Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574080479065077:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574084774032929:2253] 2025-04-07T14:13:05.798500Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574080479065077:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574084774032929:2253], cookie# 1 2025-04-07T14:13:05.799787Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574084774032927:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574080479065071:2049], cookie# 1 2025-04-07T14:13:05.799810Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574084774032928:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574080479065074:2052], cookie# 1 2025-04-07T14:13:05.799825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574084774032929:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574080479065077:2055], cookie# 1 2025-04-07T14:13:05.799855Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574084774032923:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574084774032924:2253], cookie# 1 2025-04-07T14:13:05.799877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574084774032923:2253][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T14:13:05.799926Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574084774032923:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574084774032925:2253], cookie# 1 2025-04-07T14:13:05.800017Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574084774032923:2253][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T14:13:05.800047Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574084774032923:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574084774032926:2253], cookie# 1 2025-04-07T14:13:05.800059Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574084774032923:2253][/dc-1] Unexpected sync response: sender# [1:7490574084774032926:2253], cookie# 1 2025-04-07T14:13:05.889773Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490574084774032670:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T14:13:05.890109Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490574084774032670:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... ify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744035188945 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [2:7490574095648367165:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1744035188945 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [2:7490574095648367165:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1744035188945 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2025-04-07T14:13:09.059929Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-04-07T14:13:09.059996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710662 TClient::Ls request: /dc-1/USER_0 2025-04-07T14:13:09.060551Z node 2 :TX_PROXY DEBUG: actor# [2:7490574095648366622:2095] Handle TEvNavigate describe path /dc-1/USER_0 2025-04-07T14:13:09.060588Z node 2 :TX_PROXY DEBUG: Actor# [2:7490574099943334530:2350] HANDLE EvNavigateScheme /dc-1/USER_0 2025-04-07T14:13:09.060708Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490574095648366851:2112], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:09.060775Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7490574095648367165:2297][/dc-1/USER_0] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7490574095648366851:2112], cookie# 10 2025-04-07T14:13:09.060835Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7490574095648367169:2297][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7490574095648367166:2297], cookie# 10 2025-04-07T14:13:09.060858Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7490574095648367170:2297][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7490574095648367167:2297], cookie# 10 2025-04-07T14:13:09.060871Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7490574095648367171:2297][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7490574095648367168:2297], cookie# 10 2025-04-07T14:13:09.060913Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7490574095648366558:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7490574095648367169:2297], cookie# 10 2025-04-07T14:13:09.060933Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7490574095648366561:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7490574095648367170:2297], cookie# 10 2025-04-07T14:13:09.060947Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7490574095648366564:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7490574095648367171:2297], cookie# 10 2025-04-07T14:13:09.060964Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7490574095648367169:2297][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7490574095648366558:2049], cookie# 10 2025-04-07T14:13:09.060971Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7490574095648367170:2297][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7490574095648366561:2052], cookie# 10 2025-04-07T14:13:09.060979Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7490574095648367171:2297][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7490574095648366564:2055], cookie# 10 2025-04-07T14:13:09.060997Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7490574095648367165:2297][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7490574095648367166:2297], cookie# 10 2025-04-07T14:13:09.061018Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7490574095648367165:2297][/dc-1/USER_0] Sync is in progress: cookie# 10, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T14:13:09.061029Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7490574095648367165:2297][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7490574095648367167:2297], cookie# 10 2025-04-07T14:13:09.061059Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7490574095648367165:2297][/dc-1/USER_0] Sync is done: cookie# 10, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T14:13:09.061074Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7490574095648367165:2297][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7490574095648367168:2297], cookie# 10 2025-04-07T14:13:09.061082Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7490574095648367165:2297][/dc-1/USER_0] Unexpected sync response: sender# [2:7490574095648367168:2297], cookie# 10 2025-04-07T14:13:09.061107Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7490574095648366851:2112], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-04-07T14:13:09.061156Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7490574095648366851:2112], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7490574095648367165:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1744035188945 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T14:13:09.061199Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490574095648366851:2112], cacheItem# { Subscriber: { Subscriber: [2:7490574095648367165:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1744035188945 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2025-04-07T14:13:09.061275Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490574099943334531:2351], recipient# [2:7490574099943334530:2350], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T14:13:09.061297Z node 2 :TX_PROXY DEBUG: Actor# [2:7490574099943334530:2350] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T14:13:09.061350Z node 2 :TX_PROXY DEBUG: Actor# [2:7490574099943334530:2350] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2025-04-07T14:13:09.061655Z node 2 :TX_PROXY DEBUG: Actor# [2:7490574099943334530:2350] Handle TEvDescribeSchemeResult Forward to# [2:7490574099943334529:2349] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744035188945 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744035188945 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1... (TRUNCATED) >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:10.974135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:10.974245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:10.974293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:10.974327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:10.974370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:10.974399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:10.974462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:10.974794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:11.048017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:11.048073Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:11.079013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:11.079163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:11.079301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:11.093546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:11.093743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:11.095042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:11.095341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:11.107606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:11.129947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:11.130030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:11.130197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:11.130251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:11.130302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:11.130499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.139998Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:11.300912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:11.301178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.301398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:11.301627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:11.301683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.304734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:11.304928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:11.305124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.305189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:11.305231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:11.305282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:11.309539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.309627Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:11.309663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:11.313978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.314036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.314094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:11.314204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:11.318140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:11.319896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:11.320114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:11.321237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:11.321369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:11.321419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:11.321653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:11.321725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:11.321908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:11.321987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:11.324049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:11.324092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:11.324216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:11.324268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:11.324483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:11.324523Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:11.324617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:11.324736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:11.324787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:11.324827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:11.324859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:11.324890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:11.324960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:11.325001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:11.325034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:11.326973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:11.327149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:11.327196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:11.327237Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:11.327289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:11.327391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:13:12.316980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:13:12.317018Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:13:12.317058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:13:12.317128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:13:12.317171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:13:12.317209Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-04-07T14:13:12.317243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-07T14:13:12.317278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-07T14:13:12.317310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-07T14:13:12.318579Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:13:12.318663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:13:12.318699Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:13:12.318734Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:13:12.318775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:13:12.325969Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:13:12.326102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:13:12.326158Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:13:12.326193Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-07T14:13:12.326233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:13:12.327254Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:13:12.327340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:13:12.327373Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:13:12.327404Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T14:13:12.327432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:13:12.327501Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T14:13:12.329926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:12.333849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:12.333977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T14:13:12.334236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:13:12.334314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T14:13:12.334730Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:13:12.334808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:13:12.334836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:328:2320] TestWaitNotification: OK eventTxId 101 2025-04-07T14:13:12.335187Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:13:12.335333Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 185us result status StatusSuccess 2025-04-07T14:13:12.335582Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-07T14:13:12.338184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:12.338351Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2025-04-07T14:13:12.338443Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2025-04-07T14:13:12.343412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:12.343608Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-07T14:13:12.343844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-07T14:13:12.343875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-07T14:13:12.344175Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-07T14:13:12.344247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:13:12.344282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:336:2328] TestWaitNotification: OK eventTxId 103 2025-04-07T14:13:12.344683Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:13:12.344825Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 197us result status StatusSuccess 2025-04-07T14:13:12.345036Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::LsAltered [GOOD] Test command err: 2025-04-07T14:13:05.037162Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574086562621532:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:05.037215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00184c/r3tmp/tmpn9Tsqe/pdisk_1.dat 2025-04-07T14:13:05.517808Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:05.536090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:05.536201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:05.538241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29958 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T14:13:05.856379Z node 1 :TX_PROXY DEBUG: actor# [1:7490574086562621553:2100] Handle TEvNavigate describe path dc-1 2025-04-07T14:13:05.856434Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574086562621830:2253] HANDLE EvNavigateScheme dc-1 2025-04-07T14:13:05.856607Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490574086562621576:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:05.856680Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490574086562621576:2113], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T14:13:05.856893Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574086562621831:2254][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T14:13:05.863268Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574082267653978:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574086562621836:2254] 2025-04-07T14:13:05.863341Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574082267653978:2052] Subscribe: subscriber# [1:7490574086562621836:2254], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:05.863429Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574086562621836:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574082267653978:2052] 2025-04-07T14:13:05.863466Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574086562621831:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574086562621833:2254] 2025-04-07T14:13:05.863508Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574082267653978:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574086562621836:2254] 2025-04-07T14:13:05.863533Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574082267653975:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574086562621835:2254] 2025-04-07T14:13:05.863565Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574082267653975:2049] Subscribe: subscriber# [1:7490574086562621835:2254], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:05.863590Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574086562621835:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574082267653975:2049] 2025-04-07T14:13:05.863613Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574086562621831:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574086562621832:2254] 2025-04-07T14:13:05.863680Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490574086562621831:2254][/dc-1] Set up state: owner# [1:7490574086562621576:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:05.863796Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574086562621835:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574086562621832:2254], cookie# 1 2025-04-07T14:13:05.863813Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574086562621836:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574086562621833:2254], cookie# 1 2025-04-07T14:13:05.863826Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574086562621837:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574086562621834:2254], cookie# 1 2025-04-07T14:13:05.863841Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574082267653975:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574086562621835:2254] 2025-04-07T14:13:05.863867Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574082267653975:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574086562621835:2254], cookie# 1 2025-04-07T14:13:05.864881Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574082267653981:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574086562621837:2254] 2025-04-07T14:13:05.864916Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574082267653981:2055] Subscribe: subscriber# [1:7490574086562621837:2254], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:05.864970Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574082267653981:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574086562621837:2254], cookie# 1 2025-04-07T14:13:05.865008Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574082267653978:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574086562621836:2254], cookie# 1 2025-04-07T14:13:05.865061Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574086562621835:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574082267653975:2049], cookie# 1 2025-04-07T14:13:05.865086Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574086562621837:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574082267653981:2055] 2025-04-07T14:13:05.865101Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574086562621837:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574082267653981:2055], cookie# 1 2025-04-07T14:13:05.865114Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574086562621836:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574082267653978:2052], cookie# 1 2025-04-07T14:13:05.865159Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574086562621831:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574086562621832:2254], cookie# 1 2025-04-07T14:13:05.865178Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574086562621831:2254][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T14:13:05.865217Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574086562621831:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574086562621834:2254] 2025-04-07T14:13:05.865283Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490574086562621831:2254][/dc-1] Path was already updated: owner# [1:7490574086562621576:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:05.865304Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574086562621831:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574086562621834:2254], cookie# 1 2025-04-07T14:13:05.865321Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574086562621831:2254][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T14:13:05.865370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574086562621831:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574086562621833:2254], cookie# 1 2025-04-07T14:13:05.865403Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574086562621831:2254][/dc-1] Unexpected sync response: sender# [1:7490574086562621833:2254], cookie# 1 2025-04-07T14:13:05.865428Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574082267653981:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574086562621837:2254] 2025-04-07T14:13:05.940885Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490574086562621576:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T14:13:05.941238Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490574086562621576:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... 22] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2025-04-07T14:13:09.726586Z node 2 :TX_PROXY DEBUG: Actor# [2:7490574102857571247:2322] Handle TEvDescribeSchemeResult Forward to# [2:7490574102857571246:2321] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744035189246 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744035189246 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) TClient::Ls request: /dc-1 2025-04-07T14:13:09.730308Z node 2 :TX_PROXY DEBUG: actor# [2:7490574098562603534:2098] Handle TEvNavigate describe path /dc-1 2025-04-07T14:13:09.730341Z node 2 :TX_PROXY DEBUG: Actor# [2:7490574102857571250:2325] HANDLE EvNavigateScheme /dc-1 2025-04-07T14:13:09.730408Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490574098562603578:2112], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:09.730490Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7490574102857571128:2253][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7490574098562603578:2112], cookie# 4 2025-04-07T14:13:09.730535Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7490574102857571132:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7490574102857571129:2253], cookie# 4 2025-04-07T14:13:09.730551Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7490574102857571133:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7490574102857571130:2253], cookie# 4 2025-04-07T14:13:09.730567Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7490574102857571134:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7490574102857571131:2253], cookie# 4 2025-04-07T14:13:09.730590Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7490574098562603279:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7490574102857571132:2253], cookie# 4 2025-04-07T14:13:09.730612Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7490574098562603282:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7490574102857571133:2253], cookie# 4 2025-04-07T14:13:09.730628Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7490574098562603285:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7490574102857571134:2253], cookie# 4 2025-04-07T14:13:09.730651Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7490574102857571132:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7490574098562603279:2049], cookie# 4 2025-04-07T14:13:09.730663Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7490574102857571133:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7490574098562603282:2052], cookie# 4 2025-04-07T14:13:09.730686Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7490574102857571134:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7490574098562603285:2055], cookie# 4 2025-04-07T14:13:09.730710Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7490574102857571128:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7490574102857571129:2253], cookie# 4 2025-04-07T14:13:09.730720Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7490574102857571128:2253][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T14:13:09.730728Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7490574102857571128:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7490574102857571130:2253], cookie# 4 2025-04-07T14:13:09.730738Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7490574102857571128:2253][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T14:13:09.730751Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7490574102857571128:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7490574102857571131:2253], cookie# 4 2025-04-07T14:13:09.730758Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7490574102857571128:2253][/dc-1] Unexpected sync response: sender# [2:7490574102857571131:2253], cookie# 4 2025-04-07T14:13:09.730784Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7490574098562603578:2112], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T14:13:09.730853Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7490574098562603578:2112], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7490574102857571128:2253] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744035189204 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T14:13:09.730903Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490574098562603578:2112], cacheItem# { Subscriber: { Subscriber: [2:7490574102857571128:2253] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744035189204 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-07T14:13:09.730983Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490574102857571251:2326], recipient# [2:7490574102857571250:2325], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T14:13:09.731000Z node 2 :TX_PROXY DEBUG: Actor# [2:7490574102857571250:2325] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T14:13:09.731035Z node 2 :TX_PROXY DEBUG: Actor# [2:7490574102857571250:2325] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-07T14:13:09.731386Z node 2 :TX_PROXY DEBUG: Actor# [2:7490574102857571250:2325] Handle TEvDescribeSchemeResult Forward to# [2:7490574102857571249:2324] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744035189204 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744035189204 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744035189246 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594... (TRUNCATED) >> TSubDomainTest::StartAndStopTenanNode [GOOD] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor >> TExternalDataSourceTest::ReadOnlyMode >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource >> TSubDomainTest::CreateDummyTabletsInDifferentDomains [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable >> TSubDomainTest::CheckAccessCopyTable [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> TSubDomainTest::ConsistentCopyTable >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::DropExternalDataSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:13.253802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:13.253907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:13.253947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:13.253977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:13.254028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:13.254056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:13.254107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:13.254412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:13.318676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:13.318722Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:13.329993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:13.330128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:13.330288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:13.346228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:13.346438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:13.347030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:13.347224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:13.349132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:13.350329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:13.350390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:13.350510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:13.350547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:13.350578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:13.350709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.357582Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:13.454883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:13.455107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.455333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:13.455534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:13.455591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.457939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:13.458078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:13.458224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.458266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:13.458320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:13.458351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:13.460219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.460282Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:13.460312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:13.461892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.461931Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.461976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:13.462025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:13.465274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:13.467189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:13.467396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:13.468463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:13.468604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:13.468680Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:13.468948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:13.469000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:13.469167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:13.469253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:13.471390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:13.471448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:13.471625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:13.471663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:13.471939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.471987Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:13.472071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:13.472098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:13.472139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:13.472173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:13.472217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:13.472244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:13.472303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:13.472336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:13.472363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:13.474197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:13.474285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:13.474318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:13.474364Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:13.474398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:13.474511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T14:13:13.479509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T14:13:13.480003Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T14:13:13.481068Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T14:13:13.495335Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T14:13:13.497414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:13.497705Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-04-07T14:13:13.497779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-07T14:13:13.497820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-07T14:13:13.499560Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:13:13.501616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:13.501791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2025-04-07T14:13:13.502799Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T14:13:13.502994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:13:13.503049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T14:13:13.503437Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:13:13.503530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:13:13.503589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:279:2271] TestWaitNotification: OK eventTxId 101 2025-04-07T14:13:13.503953Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:13:13.504120Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 193us result status StatusPathDoesNotExist 2025-04-07T14:13:13.504266Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> TExternalDataSourceTest::DropExternalDataSource [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 >> TExternalDataSourceTest::ReadOnlyMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:13.274662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:13.274797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:13.274837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:13.274874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:13.274938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:13.274967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:13.275024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:13.275408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:13.345628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:13.345691Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:13.359131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:13.359249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:13.359379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:13.368448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:13.368635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:13.369236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:13.369415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:13.371135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:13.372304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:13.372356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:13.372477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:13.372522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:13.372556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:13.372734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.379226Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:13.519404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:13.519658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.519896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:13.520166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:13.520220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.526053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:13.526159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:13.526276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.526315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:13.526337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:13.526369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:13.529498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.529560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:13.529611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:13.532117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.532169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.532206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:13.532285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:13.544752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:13.547102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:13.547292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:13.548263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:13.548430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:13.548476Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:13.548739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:13.548812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:13.548975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:13.549043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:13.554148Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:13.554201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:13.554376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:13.554417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:13.554646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.554696Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:13.554803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:13.554837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:13.554878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:13.554923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:13.554966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:13.554995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:13.555061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:13.555104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:13.555135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:13.557107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:13.557217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:13.557252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:13.557288Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:13.557339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:13.557448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 046678944, txId: 101, path id: 2 2025-04-07T14:13:14.235953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:13:14.235990Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:13:14.236082Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-07T14:13:14.236112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:13:14.236150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-07T14:13:14.236191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-07T14:13:14.236224Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:13:14.236254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:13:14.236312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:13:14.236346Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-07T14:13:14.236373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-07T14:13:14.236407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-07T14:13:14.237121Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:13:14.237188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:13:14.237217Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:13:14.237249Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-07T14:13:14.237284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:14.237784Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:13:14.237847Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-07T14:13:14.237868Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-07T14:13:14.237891Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T14:13:14.237913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:13:14.237971Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-07T14:13:14.240513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:14.241663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-07T14:13:14.241878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-07T14:13:14.241940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-07T14:13:14.242270Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-07T14:13:14.242356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:13:14.242402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:300:2292] TestWaitNotification: OK eventTxId 101 2025-04-07T14:13:14.242764Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:13:14.242929Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 201us result status StatusSuccess 2025-04-07T14:13:14.243202Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-07T14:13:14.245848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:14.246106Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2025-04-07T14:13:14.246171Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2025-04-07T14:13:14.246293Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-07T14:13:14.253475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-07T14:13:14.253657Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T14:13:14.253952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:13:14.253990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:13:14.254353Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:13:14.254451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:13:14.254483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:308:2300] TestWaitNotification: OK eventTxId 102 2025-04-07T14:13:14.254953Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:13:14.255113Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 202us result status StatusSuccess 2025-04-07T14:13:14.255403Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStorageBalanceTest::TestScenario1 [GOOD] >> TStorageBalanceTest::TestScenario2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:13.280991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:13.281113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:13.281166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:13.281201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:13.281246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:13.281275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:13.281330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:13.281681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:13.351382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:13.351424Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:13.362400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:13.362518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:13.362628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:13.372937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:13.373146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:13.373815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:13.374026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:13.376053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:13.377413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:13.377473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:13.377603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:13.377646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:13.377703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:13.377865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.385116Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:13.507322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:13.507564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.507821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:13.508037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:13.508106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.511507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:13.511666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:13.512108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.512167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:13.512202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:13.512253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:13.514237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.514290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:13.514318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:13.515463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.515492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.515530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:13.515616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:13.518110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:13.519948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:13.520098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:13.520843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:13.520941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:13.520989Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:13.521183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:13.521221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:13.521368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:13.521422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:13.522927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:13.522962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:13.523077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:13.523112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:13.523280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.523318Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:13.523412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:13.523437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:13.523496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:13.523534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:13.523559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:13.523590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:13.523638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:13.523669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:13.523694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:13.525089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:13.525173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:13.525198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:13.525224Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:13.525260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:13.525332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... tWaitNotification wait txId: 126 2025-04-07T14:13:14.349817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2025-04-07T14:13:14.349835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 2025-04-07T14:13:14.350330Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-04-07T14:13:14.350478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-04-07T14:13:14.350530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:336:2328] 2025-04-07T14:13:14.350699Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-04-07T14:13:14.350766Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-04-07T14:13:14.350805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-04-07T14:13:14.350826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [2:336:2328] 2025-04-07T14:13:14.350904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-04-07T14:13:14.350926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [2:336:2328] TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 2025-04-07T14:13:14.351375Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:13:14.351588Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 235us result status StatusSuccess 2025-04-07T14:13:14.351983Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:14.352686Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:13:14.352871Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 198us result status StatusSuccess 2025-04-07T14:13:14.353113Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:14.353722Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:13:14.353845Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 139us result status StatusSuccess 2025-04-07T14:13:14.355029Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } Children { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:14.355547Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:13:14.355709Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 180us result status StatusSuccess 2025-04-07T14:13:14.355943Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:14.356391Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:13:14.356534Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 153us result status StatusSuccess 2025-04-07T14:13:14.356776Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-04-07T14:12:58.825488Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574055642396343:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:58.825694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb2/r3tmp/tmpws6pla/pdisk_1.dat 2025-04-07T14:12:59.101156Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.125918Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.207779Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:12:59.371094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.371188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.372191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.372251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.377501Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:59.383031Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:59.383198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.389244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11282, node 1 2025-04-07T14:12:59.601254Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001bb2/r3tmp/yandex5sng1Z.tmp 2025-04-07T14:12:59.601280Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001bb2/r3tmp/yandex5sng1Z.tmp 2025-04-07T14:12:59.601400Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001bb2/r3tmp/yandex5sng1Z.tmp 2025-04-07T14:12:59.601627Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:59.814290Z INFO: TTestServer started on Port 28406 GrpcPort 11282 TClient is connected to server localhost:28406 PQClient connected to localhost:11282 === TenantModeEnabled() = 1 === Init PQ - start server on port 11282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:00.407736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:13:00.408016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.408332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:13:00.408714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:00.408918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.411590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:00.411930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:00.412106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.412173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:00.412195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-07T14:13:00.412216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T14:13:00.414192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.414275Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:00.414295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:13:00.416101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.416132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.416160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.416199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 waiting... 2025-04-07T14:13:00.420764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:00.421289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:00.421308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-07T14:13:00.421326Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:00.422644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-07T14:13:00.422782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:13:00.425433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035180468, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:00.425623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7490574059937364103 RawX2: 4294969651 } } Step: 1744035180468 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:13:00.425653Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.425941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:00.425977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:00.426172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:13:00.426216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:13:00.427790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:00.427821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:13:00.427977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:00.427996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490574059937364145:2394], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-07T14:13:00.428027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:00.428048Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:00.428139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:00.428154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.428176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-07T14:13:00.428195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:13:00.428212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:00.428228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-07T14:13:00.428266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:13:00.428283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:13:00.428302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-07T14:13:00.439208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 7 ... axSeqNo: 0 sessionId: 1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0 2025-04-07T14:13:12.072413Z :INFO: [] MessageGroupId [1236] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744035192072 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:13:12.072472Z :INFO: [] MessageGroupId [1236] SessionId [] Write session established. Init response: session_id: "1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0" topic: "PQ/account3/folder1/folder2/topic" 2025-04-07T14:13:12.074128Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0] Write 1 messages with Id from 1 to 1 2025-04-07T14:13:12.075084Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0] Write session: try to update token 2025-04-07T14:13:12.075113Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0] Send 1 message(s) (0 left), first sequence number is 1 2025-04-07T14:13:12.077352Z :INFO: [] MessageGroupId [1236] SessionId [1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0] Write session: close. Timeout = 10000 ms 2025-04-07T14:13:12.081443Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-07T14:13:12.082725Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-07T14:13:12.082959Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-07T14:13:12.082986Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-04-07T14:13:12.083052Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-07T14:13:12.083101Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T14:13:12.083364Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-07T14:13:12.083386Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-04-07T14:13:12.083884Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 0 messageNo: 1 size: 511961 2025-04-07T14:13:12.084344Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 1 messageNo: 1 size: 511961 2025-04-07T14:13:12.084503Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size: 176151 2025-04-07T14:13:12.084521Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size 176151 offset: -1 2025-04-07T14:13:12.084880Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0. Amount: 1200088. Cookie: 7 2025-04-07T14:13:12.708818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 8] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-07T14:13:12.749103Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0: Cookie: 7 2025-04-07T14:13:12.749319Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 0 2025-04-07T14:13:12.749366Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 1 2025-04-07T14:13:12.749397Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 2 2025-04-07T14:13:12.763619Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob complete sourceId '\0001236' seqNo 1 partNo 2 FormedBlobsCount 0 NewHead: Offset 6 PartNo 0 PackedSize 1200285 count 1 nextOffset 7 batches 3 2025-04-07T14:13:12.764649Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account3/folder1/folder2/topic' partition 0 compactOffset 6,1 HeadOffset 6 endOffset 6 curOffset 7 d0000000000_00000000000000000006_00000_0000000001_00002| size 1200275 WTime 1744035192764 2025-04-07T14:13:12.765948Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T14:13:12.788300Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1200088 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:13:12.788351Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:13:12.788393Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 6 is stored on disk 2025-04-07T14:13:12.788425Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:13:12.788447Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 1, Offset: 6 is stored on disk 2025-04-07T14:13:12.788483Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:13:12.788509Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 2, Offset: 6 is stored on disk 2025-04-07T14:13:12.793162Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-07T14:13:12.793288Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T14:13:12.800511Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0] Write session got write response: sequence_numbers: 1 offsets: 6 already_written: false write_statistics { persist_duration_ms: 35 queued_in_partition_duration_ms: 664 throttled_on_partition_duration_ms: 664 } 2025-04-07T14:13:12.800565Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0] Write session: acknoledged message 1 2025-04-07T14:13:12.808815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-07T14:13:12.808944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 8 shard idx 72057594046644480:1 data size 0 row count 0 2025-04-07T14:13:12.809013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 8], pathId map=user, is column=0, is olap=0 2025-04-07T14:13:12.809037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 8: RowCount 0, DataSize 0 2025-04-07T14:13:12.809344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-07T14:13:12.851523Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574115771941452:2579], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:13:12.853531Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTgxMmE1Mi0zMDIxOWQ1YS05YjdlMTAyOC04OGJhMTJjOA==, ActorId: [1:7490574115771941450:2578], ActorState: ExecuteState, TraceId: 01jr89j7zv4d98at5c0s8a2vrz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:13:12.857283Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:13:12.882435Z :INFO: [] MessageGroupId [1236] SessionId [1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0] Write session will now close 2025-04-07T14:13:12.882503Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0] Write session: aborting 2025-04-07T14:13:12.882958Z :INFO: [] MessageGroupId [1236] SessionId [1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0] Write session: gracefully shut down, all writes complete 2025-04-07T14:13:12.882995Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0] Write session: destroy DURATION 2.930595s 2025-04-07T14:13:12.893343Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0 grpc read done: success: 0 data: 2025-04-07T14:13:12.893372Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0 grpc read failed 2025-04-07T14:13:12.893396Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0 grpc closed 2025-04-07T14:13:12.893409Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|bab039da-a44d8ca9-7397f9ab-4a2b09de_0 is DEAD 2025-04-07T14:13:12.894275Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:13:12.894795Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:12.894833Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] server disconnected, pipe [1:7490574115771941433:2575] destroyed 2025-04-07T14:13:12.894886Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T14:13:12.933672Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:13:12.999543Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037891] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:13:13.298534Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvPQ::TEvPartitionCounters PartitionId 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:13.934335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:13.934434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:13.934468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:13.934495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:13.934536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:13.934562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:13.934607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:13.934889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:14.000727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:14.000797Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:14.011851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:14.011981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:14.012131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:14.017415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:14.017581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:14.018102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:14.018273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:14.019941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:14.020963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:14.021020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:14.021172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:14.021210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:14.021244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:14.021364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:14.027159Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:14.142969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:14.143175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:14.143402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:14.143630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:14.143679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:14.145797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:14.145924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:14.146079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:14.146125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:14.146169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:14.146202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:14.147988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:14.148036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:14.148067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:14.149580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:14.149620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:14.149653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:14.149722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:14.153063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:14.154753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:14.154910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:14.155912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:14.156044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:14.156093Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:14.156321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:14.156368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:14.156539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:14.156611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:14.158606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:14.158661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:14.158804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:14.158838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:14.159050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:14.159090Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:14.159191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:14.159240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:14.159285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:14.159321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:14.159359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:14.159389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:14.159447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:14.159491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:14.159524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:14.161295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:14.161391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:14.161450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:14.161482Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:14.161529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:14.161620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 46678944 2025-04-07T14:13:14.604469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 0 2025-04-07T14:13:14.604548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:14.604907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:13:14.604975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T14:13:14.610041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAccepted TxId: 128 SchemeshardId: 72057594046678944 PathId: 4, at schemeshard: 72057594046678944 2025-04-07T14:13:14.610198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2025-04-07T14:13:14.610431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:14.610473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:14.610631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T14:13:14.610720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:14.610755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:440:2401], at schemeshard: 72057594046678944, txId: 128, path id: 1 2025-04-07T14:13:14.610793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:440:2401], at schemeshard: 72057594046678944, txId: 128, path id: 4 2025-04-07T14:13:14.610953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2025-04-07T14:13:14.611032Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#128:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:14.611083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 128 ready parts: 1/1 2025-04-07T14:13:14.611233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 128 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:14.611998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2025-04-07T14:13:14.612081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2025-04-07T14:13:14.612114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2025-04-07T14:13:14.612149Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-07T14:13:14.612181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-07T14:13:14.613871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2025-04-07T14:13:14.613975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2025-04-07T14:13:14.614010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2025-04-07T14:13:14.614041Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2025-04-07T14:13:14.614067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T14:13:14.614142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 0/1, is published: true 2025-04-07T14:13:14.615870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 128:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:128 msg type: 269090816 2025-04-07T14:13:14.616057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 128, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:13:14.616582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 FAKE_COORDINATOR: Add transaction: 128 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 128 at step: 5000004 2025-04-07T14:13:14.619306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:14.619435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 128 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:14.619488Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#128:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-04-07T14:13:14.619699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 128:0 128 -> 240 2025-04-07T14:13:14.619853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:13:14.619910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T14:13:14.620269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 FAKE_COORDINATOR: Erasing txId 128 2025-04-07T14:13:14.621987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:14.622021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:14.622151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-07T14:13:14.622214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:14.622242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:440:2401], at schemeshard: 72057594046678944, txId: 128, path id: 1 2025-04-07T14:13:14.622272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:440:2401], at schemeshard: 72057594046678944, txId: 128, path id: 4 2025-04-07T14:13:14.622321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2025-04-07T14:13:14.622354Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 128:0 ProgressState 2025-04-07T14:13:14.622486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#128:0 progress is 1/1 2025-04-07T14:13:14.622529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-04-07T14:13:14.622578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 1/1, is published: false 2025-04-07T14:13:14.622619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-04-07T14:13:14.622655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 128:0 2025-04-07T14:13:14.622704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 128:0 2025-04-07T14:13:14.622765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T14:13:14.622798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 128, publications: 2, subscribers: 0 2025-04-07T14:13:14.622852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-07T14:13:14.622888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 4], 3 2025-04-07T14:13:14.623707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2025-04-07T14:13:14.623780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2025-04-07T14:13:14.623807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 128 2025-04-07T14:13:14.623840Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-07T14:13:14.623879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-07T14:13:14.625609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2025-04-07T14:13:14.625685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2025-04-07T14:13:14.625709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 128 2025-04-07T14:13:14.625732Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-07T14:13:14.625774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T14:13:14.625834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 128, subscribers: 0 2025-04-07T14:13:14.629048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-04-07T14:13:14.629311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 TestModificationResult got TxId: 128, wait until txId: 128 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:13.602200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:13.602326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:13.602370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:13.602408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:13.602452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:13.602481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:13.602531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:13.602853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:13.671581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:13.671644Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:13.683610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:13.683765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:13.683915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:13.690393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:13.690614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:13.691246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:13.691448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:13.694035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:13.695337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:13.695417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:13.695556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:13.695624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:13.695662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:13.695838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.702423Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:13.817200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:13.817454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.817685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:13.817900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:13.818033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.820239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:13.820386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:13.820547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.820597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:13.820627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:13.820691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:13.822513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.822561Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:13.822603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:13.824051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.824089Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.824143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:13.824213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:13.833711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:13.835870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:13.836048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:13.837135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:13.837347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:13.837401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:13.837754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:13.837807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:13.837988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:13.838069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:13.840208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:13.840247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:13.840410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:13.840445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:13.840685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:13.840729Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:13.840822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:13.840862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:13.840922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:13.840962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:13.840993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:13.841021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:13.841095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:13.841143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:13.841177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:13.843051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:13.843143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:13.843176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:13.843211Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:13.843261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:13.843360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... d: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:14.544276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:14.544310Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:14.544345Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-07T14:13:14.544384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:14.545476Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:14.545555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:14.545578Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:14.545601Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T14:13:14.545626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:13:14.545724Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T14:13:14.547591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:13:14.547701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-07T14:13:14.548537Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:14.548677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:14.548729Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000003 2025-04-07T14:13:14.548810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:13:14.548873Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-07T14:13:14.549013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:14.549080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:13:14.549435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:13:14.549775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:13:14.550968Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:14.550999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:14.551099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:13:14.551194Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:14.551228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-07T14:13:14.551301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:13:14.551519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:13:14.551554Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:13:14.551648Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:13:14.551676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:13:14.551726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-07T14:13:14.551764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:13:14.551794Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:13:14.551821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:13:14.551883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:13:14.551916Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-07T14:13:14.551942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-07T14:13:14.551976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-07T14:13:14.552320Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:14.552379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:14.552405Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:14.552435Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:13:14.552471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:13:14.552635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:13:14.552718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:13:14.552779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:14.553101Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:14.553156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:14.553179Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:14.553199Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:13:14.553219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:14.553273Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-07T14:13:14.556345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:13:14.556435Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:13:14.556502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T14:13:14.556758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:13:14.556804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:13:14.557168Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:13:14.557245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:13:14.557276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:328:2320] TestWaitNotification: OK eventTxId 102 2025-04-07T14:13:14.557702Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:13:14.557855Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 184us result status StatusPathDoesNotExist 2025-04-07T14:13:14.558002Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:15.282941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:15.283061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:15.283105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:15.283137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:15.283183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:15.283212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:15.283265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:15.283606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:15.357651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:15.357708Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:15.371768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:15.371904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:15.372045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:15.380114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:15.380328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:15.380921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:15.381128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:15.382952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:15.384112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:15.384162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:15.384289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:15.384331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:15.384363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:15.384506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:15.390646Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:15.535848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:15.536090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:15.536531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:15.536787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:15.536848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:15.539231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:15.539387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:15.539593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:15.539649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:15.539682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:15.539718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:15.541717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:15.541783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:15.541820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:15.544394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:15.544436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:15.544477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:15.544563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:15.558842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:15.561107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:15.561299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:15.562314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:15.562472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:15.562561Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:15.562815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:15.562871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:15.563048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:15.563124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:15.565320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:15.565377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:15.565553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:15.565594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:15.565812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:15.565856Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:15.565963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:15.566000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:15.566058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:15.566100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:15.566134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:15.566162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:15.566223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:15.566271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:15.566304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:15.568314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:15.568425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:15.568458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:15.568494Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:15.568535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:15.568637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... Id: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:13:15.662994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:13:15.663025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:13:15.663056Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-07T14:13:15.663104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:15.664094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:13:15.664184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:13:15.664219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:13:15.664258Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-07T14:13:15.664294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:13:15.664373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-07T14:13:15.666118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-07T14:13:15.666247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-07T14:13:15.666848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:15.666949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:15.666996Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-04-07T14:13:15.667077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:13:15.667137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-07T14:13:15.667293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:15.667357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:13:15.668182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T14:13:15.668498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-04-07T14:13:15.669882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:15.669913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:15.670016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:13:15.670129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:15.670159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-07T14:13:15.670209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-07T14:13:15.670385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:13:15.670438Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-07T14:13:15.670540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-07T14:13:15.670584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:13:15.670623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-07T14:13:15.670657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:13:15.670688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T14:13:15.670716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T14:13:15.670777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:13:15.670808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-07T14:13:15.670836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-07T14:13:15.670862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-07T14:13:15.671283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:13:15.671345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:13:15.671379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:13:15.671416Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:13:15.671504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:13:15.671755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:13:15.671791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:13:15.671840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:15.672052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:13:15.672105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:13:15.672132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:13:15.672156Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-07T14:13:15.672180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:15.672238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-07T14:13:15.675435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T14:13:15.675547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-07T14:13:15.675619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-07T14:13:15.675855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-07T14:13:15.675893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-07T14:13:15.676262Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-07T14:13:15.676335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:13:15.676384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:384:2376] TestWaitNotification: OK eventTxId 104 2025-04-07T14:13:15.676932Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:13:15.677115Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 203us result status StatusPathDoesNotExist 2025-04-07T14:13:15.677269Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |95.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionDelete >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] Test command err: 2025-04-07T14:13:05.281605Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574085312012113:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:05.281656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00185e/r3tmp/tmptwd59W/pdisk_1.dat 2025-04-07T14:13:05.899189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:05.899261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:05.918101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:05.950586Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8082 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T14:13:06.273894Z node 1 :TX_PROXY DEBUG: actor# [1:7490574085312012285:2110] Handle TEvNavigate describe path dc-1 2025-04-07T14:13:06.273948Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574089606980111:2438] HANDLE EvNavigateScheme dc-1 2025-04-07T14:13:06.274065Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490574085312012389:2137], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:06.274107Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490574085312012389:2137], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T14:13:06.274327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089606980112:2439][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T14:13:06.276068Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574085312012047:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574089606980116:2439] 2025-04-07T14:13:06.276127Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574085312012047:2050] Subscribe: subscriber# [1:7490574089606980116:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.276173Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574085312012053:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574089606980118:2439] 2025-04-07T14:13:06.276188Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574085312012053:2056] Subscribe: subscriber# [1:7490574089606980118:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.276229Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089606980116:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574085312012047:2050] 2025-04-07T14:13:06.276249Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089606980118:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574085312012053:2056] 2025-04-07T14:13:06.276281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089606980112:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574089606980113:2439] 2025-04-07T14:13:06.276306Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089606980112:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574089606980115:2439] 2025-04-07T14:13:06.276355Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490574089606980112:2439][/dc-1] Set up state: owner# [1:7490574085312012389:2137], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:06.276459Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089606980116:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574089606980113:2439], cookie# 1 2025-04-07T14:13:06.276473Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089606980117:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574089606980114:2439], cookie# 1 2025-04-07T14:13:06.276485Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089606980118:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574089606980115:2439], cookie# 1 2025-04-07T14:13:06.276513Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574085312012047:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574089606980116:2439] 2025-04-07T14:13:06.276536Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574085312012047:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574089606980116:2439], cookie# 1 2025-04-07T14:13:06.276556Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574085312012053:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574089606980118:2439] 2025-04-07T14:13:06.276567Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574085312012053:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574089606980118:2439], cookie# 1 2025-04-07T14:13:06.279335Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574085312012050:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574089606980117:2439] 2025-04-07T14:13:06.279431Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574085312012050:2053] Subscribe: subscriber# [1:7490574089606980117:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.279502Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574085312012050:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574089606980117:2439], cookie# 1 2025-04-07T14:13:06.279563Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089606980116:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574085312012047:2050], cookie# 1 2025-04-07T14:13:06.279584Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089606980118:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574085312012053:2056], cookie# 1 2025-04-07T14:13:06.279610Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089606980117:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574085312012050:2053] 2025-04-07T14:13:06.279628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089606980117:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574085312012050:2053], cookie# 1 2025-04-07T14:13:06.279659Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089606980112:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574089606980113:2439], cookie# 1 2025-04-07T14:13:06.279679Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089606980112:2439][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T14:13:06.279696Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089606980112:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574089606980115:2439], cookie# 1 2025-04-07T14:13:06.279714Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089606980112:2439][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T14:13:06.279755Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089606980112:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574089606980114:2439] 2025-04-07T14:13:06.279821Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490574089606980112:2439][/dc-1] Path was already updated: owner# [1:7490574085312012389:2137], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:06.279845Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089606980112:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574089606980114:2439], cookie# 1 2025-04-07T14:13:06.279881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089606980112:2439][/dc-1] Unexpected sync response: sender# [1:7490574089606980114:2439], cookie# 1 2025-04-07T14:13:06.279923Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574085312012050:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574089606980117:2439] 2025-04-07T14:13:06.328526Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490574085312012389:2137], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T14:13:06.328980Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490574085312012389:2137], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... 4-07T14:13:14.766009Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490574112405862728:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490574125290765776:2738] 2025-04-07T14:13:14.766018Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7490574112405863037:2125], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-07T14:13:14.766021Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490574112405862731:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490574125290765777:2738] 2025-04-07T14:13:14.766043Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490574125290765769:2740][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490574112405862725:2050] 2025-04-07T14:13:14.766071Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7490574112405863037:2125], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7490574125290765756:2738] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T14:13:14.766073Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490574125290765771:2740][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490574112405862731:2056] 2025-04-07T14:13:14.766106Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490574125290765770:2740][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490574112405862728:2053] 2025-04-07T14:13:14.766120Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574112405863037:2125], cacheItem# { Subscriber: { Subscriber: [3:7490574125290765756:2738] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:14.766140Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490574125290765765:2740][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490574125290765766:2740] 2025-04-07T14:13:14.766168Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490574125290765765:2740][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490574125290765768:2740] 2025-04-07T14:13:14.766187Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7490574125290765765:2740][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7490574112405863037:2125], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:14.766199Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574125290765778:2741], recipient# [3:7490574125290765753:2282], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:14.766223Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490574125290765765:2740][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490574125290765767:2740] 2025-04-07T14:13:14.766237Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490574112405862725:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [3:7490574125290765775:2738] 2025-04-07T14:13:14.766245Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490574112405862725:2050] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-04-07T14:13:14.766247Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7490574125290765765:2740][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7490574112405863037:2125], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:14.766265Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490574112405862728:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490574125290765770:2740] 2025-04-07T14:13:14.766272Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490574112405862725:2050] Subscribe: subscriber# [3:7490574125290765775:2738], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:14.766304Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490574112405862725:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490574125290765769:2740] 2025-04-07T14:13:14.766304Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7490574112405863037:2125], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-07T14:13:14.766321Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490574112405862731:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490574125290765771:2740] 2025-04-07T14:13:14.766350Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490574125290765775:2738][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7490574112405862725:2050] 2025-04-07T14:13:14.766362Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7490574112405863037:2125], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7490574125290765765:2740] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T14:13:14.766399Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490574125290765756:2738][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7490574125290765772:2738] 2025-04-07T14:13:14.766431Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574112405863037:2125], cacheItem# { Subscriber: { Subscriber: [3:7490574125290765765:2740] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:14.766435Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7490574125290765756:2738][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7490574112405863037:2125], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:14.766457Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490574112405862725:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490574125290765775:2738] 2025-04-07T14:13:14.766488Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574125290765779:2742], recipient# [3:7490574125290765757:2284], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:14.833601Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490574112405863037:2125], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:14.833751Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574112405863037:2125], cacheItem# { Subscriber: { Subscriber: [3:7490574116700831123:2725] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:14.833870Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574125290765781:2743], recipient# [3:7490574125290765780:2285], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] >> KeyValueReadStorage::ReadOk >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty >> TKeyValueTest::TestRewriteThenLastValue >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorks >> TKeyValueTest::TestRenameWorks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] Test command err: 2025-04-07T14:13:05.149958Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574086872617878:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:05.150023Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017f8/r3tmp/tmpsOAwij/pdisk_1.dat 2025-04-07T14:13:05.695080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:05.695166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:05.705694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:05.737180Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25431 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T14:13:06.087454Z node 1 :TX_PROXY DEBUG: actor# [1:7490574086872617892:2113] Handle TEvNavigate describe path dc-1 2025-04-07T14:13:06.087500Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574091167585660:2438] HANDLE EvNavigateScheme dc-1 2025-04-07T14:13:06.087643Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490574086872617917:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:06.087702Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490574086872617917:2127], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T14:13:06.087892Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574091167585661:2439][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T14:13:06.089917Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086872617597:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574091167585665:2439] 2025-04-07T14:13:06.089966Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574086872617597:2050] Subscribe: subscriber# [1:7490574091167585665:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.090018Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086872617600:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574091167585666:2439] 2025-04-07T14:13:06.090027Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574086872617600:2053] Subscribe: subscriber# [1:7490574091167585666:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.090040Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086872617603:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574091167585667:2439] 2025-04-07T14:13:06.090048Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574086872617603:2056] Subscribe: subscriber# [1:7490574091167585667:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.090160Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574091167585665:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574086872617597:2050] 2025-04-07T14:13:06.090174Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574091167585666:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574086872617600:2053] 2025-04-07T14:13:06.090188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574091167585667:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574086872617603:2056] 2025-04-07T14:13:06.090214Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574091167585661:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574091167585662:2439] 2025-04-07T14:13:06.090228Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574091167585661:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574091167585663:2439] 2025-04-07T14:13:06.090281Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490574091167585661:2439][/dc-1] Set up state: owner# [1:7490574086872617917:2127], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:06.090355Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574091167585661:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574091167585664:2439] 2025-04-07T14:13:06.090391Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490574091167585661:2439][/dc-1] Path was already updated: owner# [1:7490574086872617917:2127], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:06.090421Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574091167585665:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574091167585662:2439], cookie# 1 2025-04-07T14:13:06.090430Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574091167585666:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574091167585663:2439], cookie# 1 2025-04-07T14:13:06.090437Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574091167585667:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574091167585664:2439], cookie# 1 2025-04-07T14:13:06.090456Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086872617597:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574091167585665:2439] 2025-04-07T14:13:06.090472Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086872617597:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574091167585665:2439], cookie# 1 2025-04-07T14:13:06.090484Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086872617600:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574091167585666:2439] 2025-04-07T14:13:06.090491Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086872617600:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574091167585666:2439], cookie# 1 2025-04-07T14:13:06.090498Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086872617603:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574091167585667:2439] 2025-04-07T14:13:06.090505Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086872617603:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574091167585667:2439], cookie# 1 2025-04-07T14:13:06.102764Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574091167585665:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574086872617597:2050], cookie# 1 2025-04-07T14:13:06.102855Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574091167585666:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574086872617600:2053], cookie# 1 2025-04-07T14:13:06.102868Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574091167585667:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574086872617603:2056], cookie# 1 2025-04-07T14:13:06.102898Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574091167585661:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574091167585662:2439], cookie# 1 2025-04-07T14:13:06.102918Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574091167585661:2439][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T14:13:06.102931Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574091167585661:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574091167585663:2439], cookie# 1 2025-04-07T14:13:06.102953Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574091167585661:2439][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T14:13:06.102972Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574091167585661:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574091167585664:2439], cookie# 1 2025-04-07T14:13:06.102985Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574091167585661:2439][/dc-1] Unexpected sync response: sender# [1:7490574091167585664:2439], cookie# 1 2025-04-07T14:13:06.138434Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490574086872617917:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T14:13:06.138815Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490574086872617917:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... R_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.025830Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7490574114081610203:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.026015Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7490574131261479739:2302], recipient# [4:7490574131261479737:2303], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.026054Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7490574131261479740:2303], recipient# [4:7490574131261479738:2304], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.026698Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7490574131261479737:2303], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:13:16.026846Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:13:16.082068Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7490574114081610203:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.082192Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7490574131261479741:2304], recipient# [4:7490574131261479737:2303], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.088879Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7490574131261479737:2303], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:13:16.175162Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7490574114081610203:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.175310Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7490574131261479742:2305], recipient# [4:7490574131261479737:2303], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.175512Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7490574131261479737:2303], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:13:16.264505Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7490574114081610203:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.264682Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7490574131261479743:2306], recipient# [4:7490574131261479737:2303], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.264864Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7490574131261479737:2303], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:13:16.337630Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7490574114081610203:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.337788Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7490574131261479744:2307], recipient# [4:7490574131261479737:2303], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.337951Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7490574131261479737:2303], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:13:16.424032Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7490574114081610203:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.424213Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7490574131261479745:2308], recipient# [4:7490574131261479737:2303], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.424394Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7490574131261479737:2303], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> TKeyValueTest::TestCopyRangeWorks >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks >> TSubDomainTest::GenericCases [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty [GOOD] >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany >> THiveTest::TestLockTabletExecutionDeleteReboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] Test command err: 2025-04-07T14:13:05.449250Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574086075280964:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:05.458411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:13:05.634308Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574084612310267:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:05.634353Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017e6/r3tmp/tmp8wocdB/pdisk_1.dat 2025-04-07T14:13:06.316005Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:13:06.504737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:13:06.685315Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:13:06.703258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:06.703370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:06.709381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:06.709487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:06.709673Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:06.711020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:06.711070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:06.750456Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:13:06.750627Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:13:06.897499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:06.913136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:06.913508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62305 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T14:13:07.045423Z node 1 :TX_PROXY DEBUG: actor# [1:7490574086075280977:2141] Handle TEvNavigate describe path dc-1 2025-04-07T14:13:07.045480Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574094665216035:2458] HANDLE EvNavigateScheme dc-1 2025-04-07T14:13:07.045616Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490574090370248297:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:07.045740Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090370248410:2211][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7490574090370248297:2155], cookie# 1 2025-04-07T14:13:07.047384Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090370248431:2211][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574090370248428:2211], cookie# 1 2025-04-07T14:13:07.047426Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090370248432:2211][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574090370248429:2211], cookie# 1 2025-04-07T14:13:07.047447Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090370248433:2211][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574090370248430:2211], cookie# 1 2025-04-07T14:13:07.047484Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086075280639:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574090370248432:2211], cookie# 1 2025-04-07T14:13:07.047496Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086075280636:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574090370248431:2211], cookie# 1 2025-04-07T14:13:07.047539Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086075280642:2059] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574090370248433:2211], cookie# 1 2025-04-07T14:13:07.047565Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090370248432:2211][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574086075280639:2056], cookie# 1 2025-04-07T14:13:07.047582Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090370248431:2211][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574086075280636:2053], cookie# 1 2025-04-07T14:13:07.047594Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090370248433:2211][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574086075280642:2059], cookie# 1 2025-04-07T14:13:07.047677Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090370248410:2211][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574090370248429:2211], cookie# 1 2025-04-07T14:13:07.047698Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090370248410:2211][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T14:13:07.047724Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090370248410:2211][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574090370248428:2211], cookie# 1 2025-04-07T14:13:07.047774Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090370248410:2211][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T14:13:07.047819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090370248410:2211][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574090370248430:2211], cookie# 1 2025-04-07T14:13:07.047833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090370248410:2211][/dc-1] Unexpected sync response: sender# [1:7490574090370248430:2211], cookie# 1 2025-04-07T14:13:07.047895Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490574090370248297:2155], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-07T14:13:07.073390Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490574090370248297:2155], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490574090370248410:2211] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T14:13:07.075735Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490574090370248297:2155], cacheItem# { Subscriber: { Subscriber: [1:7490574090370248410:2211] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-07T14:13:07.078602Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490574094665216036:2459], recipient# [1:7490574094665216035:2458], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T14:13:07.078702Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574094665216035:2458] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T14:13:07.149663Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574094665216035:2458] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-07T14:13:07.152312Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574094665216035:2458] Handle TEvDescribeSchemeResult Forward to# [1:7490574094665216034:2457] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVers ... 14975127:2855][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [6:7490574125914975141:2855] 2025-04-07T14:13:15.597152Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7490574125914975126:2854][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7490574125914975135:2854] 2025-04-07T14:13:15.597173Z node 6 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][6:7490574125914975127:2855][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [6:7490574113030072313:2148], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:15.597188Z node 6 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][6:7490574125914975126:2854][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [6:7490574113030072313:2148], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:15.597195Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7490574125914975127:2855][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [6:7490574125914975142:2855] 2025-04-07T14:13:15.597210Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7490574125914975126:2854][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7490574125914975136:2854] 2025-04-07T14:13:15.597219Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: [main][6:7490574125914975127:2855][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [6:7490574113030072313:2148], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:15.597237Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: [main][6:7490574125914975126:2854][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [6:7490574113030072313:2148], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:15.597239Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7490574113030071964:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7490574125914975143:2855] 2025-04-07T14:13:15.597255Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7490574113030071964:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7490574125914975137:2854] 2025-04-07T14:13:15.597268Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7490574113030071967:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7490574125914975144:2855] 2025-04-07T14:13:15.597273Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7490574113030071970:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7490574125914975139:2854] 2025-04-07T14:13:15.597285Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7490574113030071970:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7490574125914975145:2855] 2025-04-07T14:13:15.597286Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7490574113030071967:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7490574125914975138:2854] 2025-04-07T14:13:15.597355Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [6:7490574113030072313:2148], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-07T14:13:15.597444Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [6:7490574113030072313:2148], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [6:7490574125914975127:2855] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T14:13:15.597518Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7490574113030072313:2148], cacheItem# { Subscriber: { Subscriber: [6:7490574125914975127:2855] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:15.597603Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [6:7490574113030072313:2148], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-07T14:13:15.597655Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [6:7490574113030072313:2148], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [6:7490574125914975126:2854] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T14:13:15.597705Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7490574113030072313:2148], cacheItem# { Subscriber: { Subscriber: [6:7490574125914975126:2854] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:15.597806Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7490574125914975146:2856], recipient# [6:7490574125914975124:2286], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:15.597872Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7490574125914975147:2857], recipient# [6:7490574125914975122:2284], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.533841Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7490574113030072313:2148], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.533985Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7490574113030072313:2148], cacheItem# { Subscriber: { Subscriber: [6:7490574117325040487:2837] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:16.534090Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7490574130209942454:2861], recipient# [6:7490574130209942453:2287], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.598728Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7490574113030072313:2148], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.598856Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7490574113030072313:2148], cacheItem# { Subscriber: { Subscriber: [6:7490574125914975127:2855] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:16.598940Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7490574130209942456:2862], recipient# [6:7490574130209942455:2288], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite >> DataShardWrite::WriteImmediateBadRequest >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2025-04-07T14:13:18.087277Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-07T14:13:18.090181Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-07T14:13:18.100128Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-04-07T14:13:18.100217Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-07T14:13:18.106826Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:122} Inline read request KeyValue# 1 Status# OK 2025-04-07T14:13:18.106892Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:134} Expected OK or UNKNOWN and given OK readCount# 0 2025-04-07T14:13:18.106949Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2025-04-07T14:13:18.037550Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-07T14:13:18.039972Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-07T14:13:18.049003Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-07T14:13:18.049059Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-07T14:13:18.056046Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-07T14:13:18.056172Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:310} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2025-04-07T14:13:18.055878Z ErrorReason# >> TKeyValueCollectorTest::TestKeyValueCollectorMany [GOOD] >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] Test command err: 2025-04-07T14:13:08.269840Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574095654693505:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:08.269929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017d1/r3tmp/tmpTvlbpR/pdisk_1.dat 2025-04-07T14:13:08.784202Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:08.821685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:08.821775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:08.826092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1979 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T14:13:09.037638Z node 1 :TX_PROXY DEBUG: actor# [1:7490574095654693530:2120] Handle TEvNavigate describe path dc-1 2025-04-07T14:13:09.037674Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574099949661278:2430] HANDLE EvNavigateScheme dc-1 2025-04-07T14:13:09.037798Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490574095654693571:2142], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:09.037827Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490574095654693571:2142], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T14:13:09.037997Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574099949661279:2431][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T14:13:09.039646Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574095654693224:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574099949661283:2431] 2025-04-07T14:13:09.039658Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574095654693227:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574099949661284:2431] 2025-04-07T14:13:09.039693Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574095654693224:2050] Subscribe: subscriber# [1:7490574099949661283:2431], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:09.039728Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574095654693227:2053] Subscribe: subscriber# [1:7490574099949661284:2431], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:09.039773Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574095654693230:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574099949661285:2431] 2025-04-07T14:13:09.039790Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574095654693230:2056] Subscribe: subscriber# [1:7490574099949661285:2431], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:09.039802Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574099949661283:2431][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574095654693224:2050] 2025-04-07T14:13:09.039829Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574095654693224:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574099949661283:2431] 2025-04-07T14:13:09.039842Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574099949661284:2431][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574095654693227:2053] 2025-04-07T14:13:09.039863Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574095654693227:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574099949661284:2431] 2025-04-07T14:13:09.039864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574099949661285:2431][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574095654693230:2056] 2025-04-07T14:13:09.039894Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574095654693230:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574099949661285:2431] 2025-04-07T14:13:09.039913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574099949661279:2431][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574099949661280:2431] 2025-04-07T14:13:09.039940Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574099949661279:2431][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574099949661281:2431] 2025-04-07T14:13:09.040018Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490574099949661279:2431][/dc-1] Set up state: owner# [1:7490574095654693571:2142], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:09.040110Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574099949661279:2431][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574099949661282:2431] 2025-04-07T14:13:09.040150Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490574099949661279:2431][/dc-1] Path was already updated: owner# [1:7490574095654693571:2142], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:09.040212Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574099949661283:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574099949661280:2431], cookie# 1 2025-04-07T14:13:09.040227Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574099949661284:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574099949661281:2431], cookie# 1 2025-04-07T14:13:09.040239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574099949661285:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574099949661282:2431], cookie# 1 2025-04-07T14:13:09.040499Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574095654693224:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574099949661283:2431], cookie# 1 2025-04-07T14:13:09.040595Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574095654693227:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574099949661284:2431], cookie# 1 2025-04-07T14:13:09.040609Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574095654693230:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574099949661285:2431], cookie# 1 2025-04-07T14:13:09.040632Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574099949661283:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574095654693224:2050], cookie# 1 2025-04-07T14:13:09.040645Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574099949661284:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574095654693227:2053], cookie# 1 2025-04-07T14:13:09.040657Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574099949661285:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574095654693230:2056], cookie# 1 2025-04-07T14:13:09.040701Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574099949661279:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574099949661280:2431], cookie# 1 2025-04-07T14:13:09.040716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574099949661279:2431][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T14:13:09.040728Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574099949661279:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574099949661281:2431], cookie# 1 2025-04-07T14:13:09.040744Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574099949661279:2431][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T14:13:09.040764Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574099949661279:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574099949661282:2431], cookie# 1 2025-04-07T14:13:09.040779Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574099949661279:2431][/dc-1] Unexpected sync response: sender# [1:7490574099949661282:2431], cookie# 1 2025-04-07T14:13:09.090892Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490574095654693571:2142], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T14:13:09.091271Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490574095654693571:2142], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... 5-04-07T14:13:16.401565Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7490574119515801057:2125], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-07T14:13:16.401613Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7490574119515801057:2125], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7490574132400703543:2541] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T14:13:16.401661Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574119515801057:2125], cacheItem# { Subscriber: { Subscriber: [3:7490574132400703543:2541] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:16.401768Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574132400703563:2544], recipient# [3:7490574132400703541:2280], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.401823Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490574119515800742:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7490574132400703562:2543] 2025-04-07T14:13:16.401832Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490574119515800742:2050] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-07T14:13:16.401859Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490574119515800742:2050] Subscribe: subscriber# [3:7490574132400703562:2543], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:16.401887Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490574119515800745:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7490574132400703564:2543] 2025-04-07T14:13:16.401896Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490574119515800745:2053] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-07T14:13:16.401916Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490574119515800745:2053] Subscribe: subscriber# [3:7490574132400703564:2543], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:16.401936Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490574132400703562:2543][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490574119515800742:2050] 2025-04-07T14:13:16.401939Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490574119515800748:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7490574132400703565:2543] 2025-04-07T14:13:16.401946Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490574119515800748:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-07T14:13:16.401953Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490574132400703564:2543][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490574119515800745:2053] 2025-04-07T14:13:16.401997Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7490574119515800748:2056] Subscribe: subscriber# [3:7490574132400703565:2543], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:16.402001Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490574132400703558:2543][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490574132400703559:2543] 2025-04-07T14:13:16.402020Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490574119515800742:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490574132400703562:2543] 2025-04-07T14:13:16.402030Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490574132400703558:2543][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490574132400703560:2543] 2025-04-07T14:13:16.402032Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490574119515800745:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490574132400703564:2543] 2025-04-07T14:13:16.402048Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7490574132400703558:2543][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7490574119515801057:2125], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:16.402073Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7490574132400703565:2543][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490574119515800748:2056] 2025-04-07T14:13:16.402102Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7490574132400703558:2543][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7490574132400703561:2543] 2025-04-07T14:13:16.402107Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7490574119515801057:2125], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-07T14:13:16.402124Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7490574132400703558:2543][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7490574119515801057:2125], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:16.402139Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7490574119515800748:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7490574132400703565:2543] 2025-04-07T14:13:16.402151Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7490574119515801057:2125], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7490574132400703558:2543] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T14:13:16.402196Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574119515801057:2125], cacheItem# { Subscriber: { Subscriber: [3:7490574132400703558:2543] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:16.402239Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574132400703566:2545], recipient# [3:7490574132400703551:2282], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.656212Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490574119515801057:2125], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.656342Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574119515801057:2125], cacheItem# { Subscriber: { Subscriber: [3:7490574123810768915:2530] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:16.656538Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574132400703571:2546], recipient# [3:7490574132400703570:2283], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] Test command err: 2025-04-07T14:13:18.674661Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-04-07T14:13:18.677044Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::GenericCases [GOOD] Test command err: 2025-04-07T14:13:05.373558Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574084906037925:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:05.373843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001817/r3tmp/tmp6AalEF/pdisk_1.dat 2025-04-07T14:13:06.031640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:06.031711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:06.038330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:06.073709Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3580 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T14:13:06.347507Z node 1 :TX_PROXY DEBUG: actor# [1:7490574084906038003:2138] Handle TEvNavigate describe path dc-1 2025-04-07T14:13:06.347560Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574089201005738:2439] HANDLE EvNavigateScheme dc-1 2025-04-07T14:13:06.347660Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490574084906038027:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:06.347690Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490574084906038027:2151], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T14:13:06.347854Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089201005739:2440][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T14:13:06.358383Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574084906037672:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574089201005744:2440] 2025-04-07T14:13:06.358461Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574084906037672:2054] Subscribe: subscriber# [1:7490574089201005744:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.358551Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574084906037675:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574089201005745:2440] 2025-04-07T14:13:06.358575Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574084906037675:2057] Subscribe: subscriber# [1:7490574089201005745:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.358627Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089201005744:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574084906037672:2054] 2025-04-07T14:13:06.358653Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089201005745:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574084906037675:2057] 2025-04-07T14:13:06.358701Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089201005739:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574089201005741:2440] 2025-04-07T14:13:06.358738Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089201005739:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574089201005742:2440] 2025-04-07T14:13:06.358784Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490574089201005739:2440][/dc-1] Set up state: owner# [1:7490574084906038027:2151], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:06.358893Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089201005743:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574089201005740:2440], cookie# 1 2025-04-07T14:13:06.358910Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089201005744:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574089201005741:2440], cookie# 1 2025-04-07T14:13:06.358925Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089201005745:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574089201005742:2440], cookie# 1 2025-04-07T14:13:06.358984Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574084906037672:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574089201005744:2440] 2025-04-07T14:13:06.359018Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574084906037672:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574089201005744:2440], cookie# 1 2025-04-07T14:13:06.359040Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574084906037675:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574089201005745:2440] 2025-04-07T14:13:06.359070Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574084906037675:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574089201005745:2440], cookie# 1 2025-04-07T14:13:06.362010Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574084906037669:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574089201005743:2440] 2025-04-07T14:13:06.362060Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574084906037669:2051] Subscribe: subscriber# [1:7490574089201005743:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.362100Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574084906037669:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574089201005743:2440], cookie# 1 2025-04-07T14:13:06.362154Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089201005744:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574084906037672:2054], cookie# 1 2025-04-07T14:13:06.362173Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089201005745:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574084906037675:2057], cookie# 1 2025-04-07T14:13:06.362228Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089201005743:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574084906037669:2051] 2025-04-07T14:13:06.362254Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574089201005743:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574084906037669:2051], cookie# 1 2025-04-07T14:13:06.362291Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089201005739:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574089201005741:2440], cookie# 1 2025-04-07T14:13:06.362309Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089201005739:2440][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T14:13:06.362323Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089201005739:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574089201005742:2440], cookie# 1 2025-04-07T14:13:06.362346Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089201005739:2440][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T14:13:06.362379Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089201005739:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574089201005740:2440] 2025-04-07T14:13:06.362437Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490574089201005739:2440][/dc-1] Path was already updated: owner# [1:7490574084906038027:2151], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:06.362460Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089201005739:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574089201005740:2440], cookie# 1 2025-04-07T14:13:06.362473Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574089201005739:2440][/dc-1] Unexpected sync response: sender# [1:7490574089201005740:2440], cookie# 1 2025-04-07T14:13:06.362565Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574084906037669:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574089201005743:2440] 2025-04-07T14:13:06.411932Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490574084906038027:2151], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T14:13:06.412453Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490574084906038027:2151], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:16.250592Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7490574133387850730:3017], recipient# [4:7490574133387850714:2304], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:16.250640Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7490574133387850715:3014][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T14:13:16.250990Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7490574116207980014:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [4:7490574133387850734:3014] 2025-04-07T14:13:16.251001Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7490574116207980014:2050] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-04-07T14:13:16.251032Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7490574116207980014:2050] Subscribe: subscriber# [4:7490574133387850734:3014], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:16.251059Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7490574116207980017:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [4:7490574133387850735:3014] 2025-04-07T14:13:16.251066Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7490574116207980017:2053] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-04-07T14:13:16.251096Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7490574116207980017:2053] Subscribe: subscriber# [4:7490574133387850735:3014], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:16.251126Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7490574116207980020:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [4:7490574133387850736:3014] 2025-04-07T14:13:16.251135Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7490574116207980020:2056] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-04-07T14:13:16.251153Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7490574116207980020:2056] Subscribe: subscriber# [4:7490574133387850736:3014], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:16.251182Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7490574133387850734:3014][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7490574116207980014:2050] 2025-04-07T14:13:16.251204Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7490574133387850735:3014][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7490574116207980017:2053] 2025-04-07T14:13:16.251231Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7490574133387850736:3014][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7490574116207980020:2056] 2025-04-07T14:13:16.251267Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7490574133387850715:3014][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7490574133387850731:3014] 2025-04-07T14:13:16.251296Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7490574133387850715:3014][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7490574133387850732:3014] 2025-04-07T14:13:16.251340Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][4:7490574133387850715:3014][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [4:7490574120502947787:2216], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:16.251359Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7490574133387850715:3014][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7490574133387850733:3014] 2025-04-07T14:13:16.251380Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7490574133387850715:3014][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7490574120502947787:2216], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:16.251400Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7490574116207980014:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7490574133387850734:3014] 2025-04-07T14:13:16.251415Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7490574116207980017:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7490574133387850735:3014] 2025-04-07T14:13:16.251439Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7490574116207980020:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7490574133387850736:3014] 2025-04-07T14:13:16.251482Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7490574120502947787:2216], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-07T14:13:16.251541Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7490574120502947787:2216], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7490574133387850715:3014] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T14:13:16.251634Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7490574120502947787:2216], cacheItem# { Subscriber: { Subscriber: [4:7490574133387850715:3014] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:16.251715Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7490574133387850737:3018], recipient# [4:7490574133387850713:2303], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:17.079564Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7490574120502947787:2216], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:17.079710Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7490574120502947787:2216], cacheItem# { Subscriber: { Subscriber: [4:7490574124797915515:2539] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:17.079798Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7490574137682818038:3022], recipient# [4:7490574137682818037:2305], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:17.257821Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7490574120502947787:2216], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:17.257963Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7490574120502947787:2216], cacheItem# { Subscriber: { Subscriber: [4:7490574133387850717:3016] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:17.258052Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7490574137682818046:3023], recipient# [4:7490574137682818045:2306], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestExternalBoot >> KqpIndexes::InnerJoinSecondaryIndexLookupAndRightTablePredicateNonIndexColumn >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> THiveTest::TestLockTabletExecutionReconnect >> TKeyValueTest::TestWriteReadPatchRead >> TSubDomainTest::ConsistentCopyTable [GOOD] >> THiveTest::TestExternalBoot [GOOD] >> THiveTest::TestExternalBootWhenLocked >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi >> LabeledDbCounters::TwoTablets [GOOD] >> LabeledDbCounters::TwoTabletsKillOneTablet >> TKeyValueTest::TestConcatWorks >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::ConsistentCopyTable [GOOD] Test command err: 2025-04-07T14:13:05.063390Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574083682108446:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:05.063445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00183c/r3tmp/tmp91zf1x/pdisk_1.dat 2025-04-07T14:13:05.511127Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:05.517381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:05.517448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:05.527685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9244 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T14:13:05.755591Z node 1 :TX_PROXY DEBUG: actor# [1:7490574083682108457:2100] Handle TEvNavigate describe path dc-1 2025-04-07T14:13:05.755645Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574083682108730:2252] HANDLE EvNavigateScheme dc-1 2025-04-07T14:13:05.755817Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490574083682108480:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:05.755888Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490574083682108480:2113], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T14:13:05.756115Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574083682108731:2253][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T14:13:05.758305Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574079387140881:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574083682108735:2253] 2025-04-07T14:13:05.758382Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574079387140881:2049] Subscribe: subscriber# [1:7490574083682108735:2253], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:05.758468Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574079387140887:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574083682108737:2253] 2025-04-07T14:13:05.758491Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574079387140887:2055] Subscribe: subscriber# [1:7490574083682108737:2253], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:05.758561Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574083682108735:2253][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574079387140881:2049] 2025-04-07T14:13:05.758593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574083682108737:2253][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574079387140887:2055] 2025-04-07T14:13:05.758655Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574083682108731:2253][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574083682108732:2253] 2025-04-07T14:13:05.758702Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574083682108731:2253][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574083682108734:2253] 2025-04-07T14:13:05.758775Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490574083682108731:2253][/dc-1] Set up state: owner# [1:7490574083682108480:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:05.758867Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574079387140884:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574083682108736:2253] 2025-04-07T14:13:05.758938Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574079387140884:2052] Subscribe: subscriber# [1:7490574083682108736:2253], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:05.759065Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574079387140881:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574083682108735:2253] 2025-04-07T14:13:05.759095Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574079387140887:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574083682108737:2253] 2025-04-07T14:13:05.760116Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574083682108736:2253][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574079387140884:2052] 2025-04-07T14:13:05.760172Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574083682108735:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574083682108732:2253], cookie# 1 2025-04-07T14:13:05.760194Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574083682108736:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574083682108733:2253], cookie# 1 2025-04-07T14:13:05.760224Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574083682108737:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574083682108734:2253], cookie# 1 2025-04-07T14:13:05.760267Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574083682108731:2253][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574083682108733:2253] 2025-04-07T14:13:05.760325Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490574083682108731:2253][/dc-1] Path was already updated: owner# [1:7490574083682108480:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:05.762423Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574079387140884:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574083682108736:2253] 2025-04-07T14:13:05.762467Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574079387140884:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574083682108736:2253], cookie# 1 2025-04-07T14:13:05.762502Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574079387140881:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574083682108735:2253], cookie# 1 2025-04-07T14:13:05.762552Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574079387140887:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574083682108737:2253], cookie# 1 2025-04-07T14:13:05.762602Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574083682108736:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574079387140884:2052], cookie# 1 2025-04-07T14:13:05.762627Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574083682108735:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574079387140881:2049], cookie# 1 2025-04-07T14:13:05.762641Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574083682108737:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574079387140887:2055], cookie# 1 2025-04-07T14:13:05.762695Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574083682108731:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574083682108733:2253], cookie# 1 2025-04-07T14:13:05.762731Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574083682108731:2253][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T14:13:05.762777Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574083682108731:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574083682108732:2253], cookie# 1 2025-04-07T14:13:05.762802Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574083682108731:2253][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T14:13:05.762840Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574083682108731:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574083682108734:2253], cookie# 1 2025-04-07T14:13:05.762856Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574083682108731:2253][/dc-1] Unexpected sync response: sender# [1:7490574083682108734:2253], cookie# 1 2025-04-07T14:13:05.820057Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490574083682108480:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T14:13:05.820883Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490574083682108480:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... wPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:19.683428Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7490574127887351421:2121], cacheItem# { Subscriber: { Subscriber: [6:7490574145067220808:2242] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:19.683537Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7490574145067220840:2253], recipient# [6:7490574145067220806:2292], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:19.683885Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7490574145067220806:2292], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:13:19.746914Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7490574127887351421:2121], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:19.747038Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7490574127887351421:2121], cacheItem# { Subscriber: { Subscriber: [6:7490574145067220807:2241] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:19.747085Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7490574127887351421:2121], cacheItem# { Subscriber: { Subscriber: [6:7490574145067220808:2242] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:19.747190Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7490574145067220841:2254], recipient# [6:7490574145067220806:2292], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:19.747573Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7490574145067220806:2292], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:13:19.818398Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7490574127887351421:2121], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:19.818517Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7490574127887351421:2121], cacheItem# { Subscriber: { Subscriber: [6:7490574145067220807:2241] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:19.818577Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7490574127887351421:2121], cacheItem# { Subscriber: { Subscriber: [6:7490574145067220808:2242] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:19.818679Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7490574145067220842:2255], recipient# [6:7490574145067220806:2292], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:19.819169Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7490574145067220806:2292], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:13:19.900231Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7490574127887351421:2121], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:19.900339Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7490574127887351421:2121], cacheItem# { Subscriber: { Subscriber: [6:7490574145067220807:2241] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:19.900387Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7490574127887351421:2121], cacheItem# { Subscriber: { Subscriber: [6:7490574145067220808:2242] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:19.900486Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7490574145067220843:2256], recipient# [6:7490574145067220806:2292], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:19.900616Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7490574145067220806:2292], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> THiveTest::TestExternalBootWhenLocked [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] >> TVectorIndexTests::CreateTableCoveredEmbedding >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 >> KqpScheme::DisableMetadataObjectsOnServerless [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootWhenLocked [GOOD] Test command err: 2025-04-07T14:12:27.137841Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:27.141269Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.141480Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:27.143011Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [2:64:2071] ControllerId# 72057594037932033 2025-04-07T14:12:27.143052Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:27.143864Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:27.145557Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:27.147261Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:27.147304Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:27.150078Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:63:2070] Create Queue# [2:70:2075] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.150220Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:63:2070] Create Queue# [2:71:2076] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.150357Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:63:2070] Create Queue# [2:72:2077] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.150489Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:63:2070] Create Queue# [2:73:2078] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.150638Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:63:2070] Create Queue# [2:74:2079] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.150804Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:63:2070] Create Queue# [2:75:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.150914Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:63:2070] Create Queue# [2:76:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.150935Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:27.151629Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:64:2071] 2025-04-07T14:12:27.151675Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:64:2071] 2025-04-07T14:12:27.151721Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:27.151767Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:27.153878Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:27.154036Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:27.157079Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.157214Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:27.157980Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [3:83:2072] ControllerId# 72057594037932033 2025-04-07T14:12:27.158011Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:27.158071Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:27.158221Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:27.158774Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:27.158807Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:27.160288Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:82:2071] Create Queue# [3:89:2076] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.160432Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:82:2071] Create Queue# [3:90:2077] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.160590Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:82:2071] Create Queue# [3:91:2078] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.160761Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:82:2071] Create Queue# [3:92:2079] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.160942Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:82:2071] Create Queue# [3:93:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.161071Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:82:2071] Create Queue# [3:94:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.161219Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:82:2071] Create Queue# [3:95:2082] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.161244Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:27.161304Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:83:2072] 2025-04-07T14:12:27.161334Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:83:2072] 2025-04-07T14:12:27.161367Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:27.161407Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:27.161741Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:27.161981Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:27.164382Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.164548Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-07T14:12:27.165087Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-07T14:12:27.166239Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-07T14:12:27.166288Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:27.167079Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:105:2075] ControllerId# 72057594037932033 2025-04-07T14:12:27.167116Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:27.167188Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:27.167373Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:27.177914Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:27.177987Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:27.179626Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:104:2074] Create Queue# [1:113:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.179774Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:104:2074] Create Queue# [1:114:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.179893Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:104:2074] Create Queue# [1:115:2082] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.180033Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:104:2074] Create Queue# [1:116:2083] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.180174Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:104:2074] Create Queue# [1:117:2084] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.180324Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:104:2074] Create Queue# [1:118:2085] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.180484Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:104:2074] Create Queue# [1:119:2086] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.180515Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:27.180588Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:105:2075] 2025-04-07T14:12:27.180613Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:105:2075] 2025-04-07T14:12:27.180647Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:27.180703Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:27.181539Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:27.183160Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.198918Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:105:2075] 2025-04-07T14:12:27.199013Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.199082Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.206292Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.214658Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:64:2071] 2025-04-07T14:12:27.214730Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.214761Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.214984Z node 3 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.215168Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:83:2072] 2025-04-07T14:12:27.215224Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.215252Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.216299Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtain ... # [72057594037927937:2:8:0:0:201:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 22 } Cost# 81582 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 23 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-07T14:13:21.170450Z node 50 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Result# TEvPutResult {Id# [72057594037927937:2:8:0:0:201:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-07T14:13:21.170538Z node 50 :BS_PROXY_PUT INFO: [f913878b3da83702] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:8:0:0:201:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T14:13:21.170790Z node 50 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:8:0:0:201:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-07T14:13:21.170945Z node 50 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} commited cookie 1 for step 8 2025-04-07T14:13:21.171318Z node 50 :TABLET_MAIN DEBUG: Tablet: 72075186224037888 Received TEvTabletStop from [50:94:2091], reason = ReasonStop Marker# TSYS29 2025-04-07T14:13:21.171372Z node 50 :PIPE_SERVER DEBUG: [72075186224037888] Stop 2025-04-07T14:13:21.171633Z node 50 :TABLET_MAIN NOTICE: Tablet: 72075186224037888 Type: Dummy, EReason: ReasonPill, SuggestedGeneration: 1, KnownGeneration: 1 Marker# TSYS31 2025-04-07T14:13:21.171698Z node 50 :PIPE_SERVER DEBUG: [72075186224037888] Detach 2025-04-07T14:13:21.171886Z node 50 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:3} suiciding, Waste{1:0, 289b +(0, 0b), 2 trc, -0b acc} 2025-04-07T14:13:21.172653Z node 50 :PIPE_CLIENT DEBUG: TClient[72075186224037888] peer closed [50:433:2346] 2025-04-07T14:13:21.172733Z node 50 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [50:433:2346] 2025-04-07T14:13:21.172836Z node 50 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send [50:95:2091] 2025-04-07T14:13:21.172900Z node 50 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [50:95:2091] 2025-04-07T14:13:21.172976Z node 50 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [50:94:2091] EventType# 268960257 2025-04-07T14:13:21.173203Z node 50 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{21, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2025-04-07T14:13:21.173292Z node 50 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{21, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:13:21.173439Z node 50 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{21, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:13:21.173525Z node 50 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{21, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:13:21.173757Z node 50 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-07T14:13:21.173845Z node 50 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:13:21.173960Z node 50 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:13:21.174055Z node 50 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:13:21.174524Z node 50 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [50:446:2353] 2025-04-07T14:13:21.174582Z node 50 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [50:446:2353] 2025-04-07T14:13:21.174691Z node 50 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:13:21.174770Z node 50 :TABLET_RESOLVER DEBUG: SelectForward node 50 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [50:363:2294] 2025-04-07T14:13:21.174861Z node 50 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [50:446:2353] 2025-04-07T14:13:21.174926Z node 50 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [50:446:2353] 2025-04-07T14:13:21.175055Z node 50 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [50:446:2353] 2025-04-07T14:13:21.175131Z node 50 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [50:446:2353] 2025-04-07T14:13:21.175241Z node 50 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2025-04-07T14:13:21.175397Z node 50 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:13:21.175564Z node 50 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-07T14:13:21.175658Z node 50 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-07T14:13:21.175695Z node 50 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-07T14:13:21.175767Z node 50 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [50:363:2294] CurrentLeaderTablet: [50:380:2306] CurrentGeneration: 1 CurrentStep: 0} 2025-04-07T14:13:21.175859Z node 50 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [50:363:2294] CurrentLeaderTablet: [50:380:2306] CurrentGeneration: 1 CurrentStep: 0} 2025-04-07T14:13:21.175974Z node 50 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [50:363:2294] CurrentLeaderTablet: [50:380:2306] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-07T14:13:21.176151Z node 50 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-04-07T14:13:21.176505Z node 51 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [51:448:2091] 2025-04-07T14:13:21.176562Z node 51 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [51:448:2091] 2025-04-07T14:13:21.176705Z node 51 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:13:21.176782Z node 51 :TABLET_RESOLVER DEBUG: SelectForward node 51 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [50:314:2258] 2025-04-07T14:13:21.176866Z node 51 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [51:448:2091] 2025-04-07T14:13:21.176949Z node 51 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [51:448:2091] 2025-04-07T14:13:21.177009Z node 51 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 50 [51:448:2091] 2025-04-07T14:13:21.177161Z node 51 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [51:448:2091] 2025-04-07T14:13:21.177247Z node 51 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [51:448:2091] 2025-04-07T14:13:21.177559Z node 50 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [51:448:2091] 2025-04-07T14:13:21.177874Z node 51 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [51:448:2091] 2025-04-07T14:13:21.177964Z node 51 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [51:448:2091] 2025-04-07T14:13:21.178031Z node 51 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [51:448:2091] 2025-04-07T14:13:21.178118Z node 51 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [51:448:2091] 2025-04-07T14:13:21.178192Z node 51 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [51:448:2091] 2025-04-07T14:13:21.178255Z node 51 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [51:448:2091] 2025-04-07T14:13:21.178529Z node 50 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [51:436:2086] EventType# 268697624 2025-04-07T14:13:21.178721Z node 50 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2025-04-07T14:13:21.178802Z node 50 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:13:21.179008Z node 50 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{13, redo 83b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-07T14:13:21.179092Z node 50 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:13:21.190374Z node 50 :BS_PROXY_PUT INFO: [d70ef3c23a1a2346] bootstrap ActorId# [50:451:2356] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T14:13:21.190564Z node 50 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] Id# [72057594037927937:2:9:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T14:13:21.190639Z node 50 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] restore Id# [72057594037927937:2:9:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T14:13:21.190761Z node 50 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG33 2025-04-07T14:13:21.190831Z node 50 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG32 2025-04-07T14:13:21.191063Z node 50 :BS_PROXY DEBUG: Send to queueActorId# [50:53:2078] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T14:13:21.192419Z node 50 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 23 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 24 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-07T14:13:21.192553Z node 50 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-07T14:13:21.192678Z node 50 :BS_PROXY_PUT INFO: [d70ef3c23a1a2346] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T14:13:21.192924Z node 50 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-07T14:13:21.193104Z node 50 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DisableMetadataObjectsOnServerless [GOOD] Test command err: Trying to start YDB, gRPC: 1953, MsgBus: 6541 2025-04-07T14:11:50.615842Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573764581024170:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:50.615908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00143e/r3tmp/tmpdythqp/pdisk_1.dat 2025-04-07T14:11:50.865807Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1953, node 1 2025-04-07T14:11:50.922497Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:50.922519Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:50.922524Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:50.922615Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:50.976072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:50.976222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:50.977944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6541 TClient is connected to server localhost:6541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:51.277339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.296171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.406617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.523849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:51.568767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:52.925044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:52.947134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:52.965739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:52.985942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.005960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:53.030527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 Check query: CREATE RESOURCE POOL MyResourcePool WITH ( CONCURRENT_QUERY_LIMIT=20, QUEUE_SIZE=1000 ); Check query: ALTER RESOURCE POOL MyResourcePool SET (CONCURRENT_QUERY_LIMIT = 30, QUEUE_SIZE = 100), RESET (QUERY_MEMORY_LIMIT_PERCENT_PER_NODE); Check query: DROP RESOURCE POOL MyResourcePool; 2025-04-07T14:11:54.588916Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573780764832399:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:54.588952Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00143e/r3tmp/tmpMSKT8p/pdisk_1.dat 2025-04-07T14:11:54.670455Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:54.696522Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:54.696598Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:54.699494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7765, node 2 2025-04-07T14:11:54.735921Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:54.735936Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:54.735940Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:54.736015Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:54.915544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:54.984721Z node 2 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2025-04-07T14:11:56.640484Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-07T14:11:56.640636Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-07T14:11:56.640678Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-07T14:11:56.642148Z node 2 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=2&id=NzhjYWU1MzUtYzUwMGRlZWMtZGQxN2U1ZWMtYjMwODdkYzc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzhjYWU1MzUtYzUwMGRlZWMtZGQxN2U1ZWMtYjMwODdkYzc= 2025-04-07T14:11:56.642559Z node 2 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-07T14:11:56.642969Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7490573789354767778:2301], Start check tables existence, number paths: 2 2025-04-07T14:11:56.643118Z node 2 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=2&id=NzhjYWU1MzUtYzUwMGRlZWMtZGQxN2U1ZWMtYjMwODdkYzc=, ActorId: [2:7490573789354767779:2302], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:11:56.643746Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7490573789354767778:2301], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-07T14:11:56.643801Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7490573789354767778:2301], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-07T14:11:56.643892Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7490573789354767778:2301], Successfully finished 2025-04-07T14:11:56.643934Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-07T14:11:56.655323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.666168Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490573788338060527:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:56.666217Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:11:56.671750Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:56.671823Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:56.675692Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-07T14:11:56.677030Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:56.766533Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:11:56.770576Z node 4 :STATISTICS WARN: [72075186224037897] TTxInit::Comp ... tZmZkYWVkNTctZmFmZTUyY2E= 2025-04-07T14:13:21.335297Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=OWMzZWNkMGItOTllNTUxZGMtZmZkYWVkNTctZmFmZTUyY2E=, ActorId: [11:7490574154625034200:3312], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:13:21.335780Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=OWMzZWNkMGItOTllNTUxZGMtZmZkYWVkNTctZmFmZTUyY2E=, ActorId: [11:7490574154625034200:3312], ActorState: ReadyState, TraceId: 01jr89jg9qb2dc134ggkc75f8w, received request, proxyRequestId: 124 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-dedicated/.metadata/secrets/values`; rpcActor: [11:7490574154625034201:3313] database: /Root/test-dedicated databaseId: /Root/test-dedicated pool id: default 2025-04-07T14:13:21.335807Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=OWMzZWNkMGItOTllNTUxZGMtZmZkYWVkNTctZmFmZTUyY2E=, ActorId: [11:7490574154625034200:3312], ActorState: ReadyState, TraceId: 01jr89jg9qb2dc134ggkc75f8w, request placed into pool from cache: default 2025-04-07T14:13:21.335898Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=OWMzZWNkMGItOTllNTUxZGMtZmZkYWVkNTctZmFmZTUyY2E=, ActorId: [11:7490574154625034200:3312], ActorState: ExecuteState, TraceId: 01jr89jg9qb2dc134ggkc75f8w, Sending CompileQuery request 2025-04-07T14:13:21.347138Z node 11 :SCHEME_BOARD_SUBSCRIBER WARN: [main][11:7490574094495488908:3179][/Root/test-dedicated/.metadata/secrets/values] Sync is done: cookie# 66, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-07T14:13:21.347354Z node 11 :SCHEME_BOARD_SUBSCRIBER WARN: [main][11:7490574094495488908:3179][/Root/test-dedicated/.metadata/secrets/values] Sync is done: cookie# 67, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-07T14:13:21.348281Z node 11 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [11:7490574154625034203:3314], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/secrets/values]
: Error: LookupError, code: 2005 2025-04-07T14:13:21.348541Z node 11 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=11&id=OWMzZWNkMGItOTllNTUxZGMtZmZkYWVkNTctZmFmZTUyY2E=, ActorId: [11:7490574154625034200:3312], ActorState: ExecuteState, TraceId: 01jr89jg9qb2dc134ggkc75f8w, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-04-07T14:13:21.348583Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=OWMzZWNkMGItOTllNTUxZGMtZmZkYWVkNTctZmFmZTUyY2E=, ActorId: [11:7490574154625034200:3312], ActorState: ExecuteState, TraceId: 01jr89jg9qb2dc134ggkc75f8w, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:13:21.348600Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=OWMzZWNkMGItOTllNTUxZGMtZmZkYWVkNTctZmFmZTUyY2E=, ActorId: [11:7490574154625034200:3312], ActorState: ExecuteState, TraceId: 01jr89jg9qb2dc134ggkc75f8w, EndCleanup, isFinal: 0 2025-04-07T14:13:21.348759Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=OWMzZWNkMGItOTllNTUxZGMtZmZkYWVkNTctZmFmZTUyY2E=, ActorId: [11:7490574154625034200:3312], ActorState: ExecuteState, TraceId: 01jr89jg9qb2dc134ggkc75f8w, Sent query response back to proxy, proxyRequestId: 124, proxyId: [11:7490574004301174019:2256] 2025-04-07T14:13:21.349585Z node 11 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/secrets/values]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-04-07T14:13:21.349961Z node 11 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/secrets/values]
: Error: LookupError, code: 2005 2025-04-07T14:13:21.350042Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=OWMzZWNkMGItOTllNTUxZGMtZmZkYWVkNTctZmFmZTUyY2E=, ActorId: [11:7490574154625034200:3312], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T14:13:21.350086Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=OWMzZWNkMGItOTllNTUxZGMtZmZkYWVkNTctZmFmZTUyY2E=, ActorId: [11:7490574154625034200:3312], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:13:21.350125Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=OWMzZWNkMGItOTllNTUxZGMtZmZkYWVkNTctZmFmZTUyY2E=, ActorId: [11:7490574154625034200:3312], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T14:13:21.350164Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=OWMzZWNkMGItOTllNTUxZGMtZmZkYWVkNTctZmFmZTUyY2E=, ActorId: [11:7490574154625034200:3312], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T14:13:21.350261Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=OWMzZWNkMGItOTllNTUxZGMtZmZkYWVkNTctZmFmZTUyY2E=, ActorId: [11:7490574154625034200:3312], ActorState: unknown state, Session actor destroyed 2025-04-07T14:13:21.632985Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM= 2025-04-07T14:13:21.633108Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM=, ActorId: [11:7490574154625034212:3319], ActorState: unknown state, session actor bootstrapped 2025-04-07T14:13:21.633758Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM=, ActorId: [11:7490574154625034212:3319], ActorState: ReadyState, TraceId: 01jr89jgk1cbg7btjh5n683afm, received request, proxyRequestId: 126 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-dedicated/.metadata/secrets/values`; rpcActor: [11:7490574154625034213:3320] database: /Root/test-dedicated databaseId: /Root/test-dedicated pool id: default 2025-04-07T14:13:21.633811Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM=, ActorId: [11:7490574154625034212:3319], ActorState: ReadyState, TraceId: 01jr89jgk1cbg7btjh5n683afm, request placed into pool from cache: default 2025-04-07T14:13:21.633939Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM=, ActorId: [11:7490574154625034212:3319], ActorState: ExecuteState, TraceId: 01jr89jgk1cbg7btjh5n683afm, Sending CompileQuery request 2025-04-07T14:13:21.648211Z node 11 :SCHEME_BOARD_SUBSCRIBER WARN: [main][11:7490574094495488908:3179][/Root/test-dedicated/.metadata/secrets/values] Sync is done: cookie# 68, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-07T14:13:21.648303Z node 11 :SCHEME_BOARD_SUBSCRIBER WARN: [main][11:7490574094495488908:3179][/Root/test-dedicated/.metadata/secrets/values] Sync is done: cookie# 69, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-07T14:13:21.649209Z node 11 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [11:7490574154625034215:3321], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/secrets/values]
: Error: LookupError, code: 2005 2025-04-07T14:13:21.649439Z node 11 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=11&id=ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM=, ActorId: [11:7490574154625034212:3319], ActorState: ExecuteState, TraceId: 01jr89jgk1cbg7btjh5n683afm, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-04-07T14:13:21.649487Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM=, ActorId: [11:7490574154625034212:3319], ActorState: ExecuteState, TraceId: 01jr89jgk1cbg7btjh5n683afm, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:13:21.649511Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM=, ActorId: [11:7490574154625034212:3319], ActorState: ExecuteState, TraceId: 01jr89jgk1cbg7btjh5n683afm, EndCleanup, isFinal: 0 2025-04-07T14:13:21.649659Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM=, ActorId: [11:7490574154625034212:3319], ActorState: ExecuteState, TraceId: 01jr89jgk1cbg7btjh5n683afm, Sent query response back to proxy, proxyRequestId: 126, proxyId: [11:7490574004301174019:2256] 2025-04-07T14:13:21.650390Z node 11 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/secrets/values]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-04-07T14:13:21.650655Z node 11 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/secrets/values]
: Error: LookupError, code: 2005 2025-04-07T14:13:21.650763Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM=, ActorId: [11:7490574154625034212:3319], ActorState: ReadyState, Session closed due to explicit close event 2025-04-07T14:13:21.650798Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM=, ActorId: [11:7490574154625034212:3319], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:13:21.650826Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM=, ActorId: [11:7490574154625034212:3319], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-07T14:13:21.650855Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM=, ActorId: [11:7490574154625034212:3319], ActorState: unknown state, Cleanup temp tables: 0 2025-04-07T14:13:21.650927Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ODhjMTY3ZTgtZmRlY2QzYzgtZTFhZjAwMDItMjdlODc0OGM=, ActorId: [11:7490574154625034212:3319], ActorState: unknown state, Session actor destroyed >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:22.975049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:22.975180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:22.975236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:22.975276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:22.975323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:22.975350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:22.975414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:22.975791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:23.048848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:23.048907Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:23.066028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:23.066160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:23.066305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:23.074079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:23.074351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:23.076324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:23.076637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:23.085745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:23.095682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:23.095770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:23.095950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:23.096007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:23.096142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:23.096352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.105360Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:23.242942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:23.244024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.244865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:23.246484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:23.246558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.249777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:23.250567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:23.250874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.250927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:23.251009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:23.251062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:23.253252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.253309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:23.253349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:23.255239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.255283Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.255339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:23.255396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:23.259812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:23.262023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:23.263147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:23.264340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:23.264494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:23.264549Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:23.264864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:23.264917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:23.265105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:23.265181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:23.267410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:23.267456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:23.267653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:23.267708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:23.267935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.267978Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:23.268125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:23.268167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:23.268214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:23.268255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:23.268289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:23.268319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:23.268398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:23.268438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:23.268474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:23.270436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:23.270541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:23.270577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:23.270617Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:23.270681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:23.270777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ed: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "embedding" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:23.625881Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:13:23.626274Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 285us result status StatusSuccess 2025-04-07T14:13:23.626704Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:23.627354Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:13:23.627638Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 243us result status StatusSuccess 2025-04-07T14:13:23.628016Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock >> TUniqueIndexTests::CreateTable >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] >> TVectorIndexTests::CreateTableWithError >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] >> TUniqueIndexTests::CreateTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2025-04-07T14:12:54.087468Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.087525Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.087560Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.087946Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.096637Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.096789Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.097251Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.097725Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.097852Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:12:54.097965Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.098015Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T14:12:54.098698Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.098717Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.098754Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.098992Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.099409Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.099520Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.099707Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.100063Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.100151Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:12:54.100230Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.100264Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T14:12:54.101153Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.101171Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.101199Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.101476Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.101852Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.101942Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.102111Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.102841Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.103012Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:12:54.103107Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.103140Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T14:12:54.103957Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.103981Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.104012Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.104264Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.104884Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.108244Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.108474Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.110044Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.110462Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:12:54.110581Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.110622Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T14:12:54.111602Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.111620Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.111640Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.111922Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.112439Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.112564Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.112750Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.113091Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.113194Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:12:54.113281Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.113317Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-07T14:12:54.113877Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.113933Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.113969Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.114214Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.114742Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.114831Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.115018Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.115319Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.115435Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:12:54.115537Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.115572Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-07T14:12:54.116317Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.116356Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.116385Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.116751Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.117241Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.117347Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.117541Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.118150Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.118316Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:12:54.118409Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.118442Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-07T14:12:54.119206Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.119230Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.119257Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.119490Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:12:54.119875Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:12:54.119971Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.120132Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:12:54.121485Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.121845Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:12:54.121906Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:12:54.121937Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-07T14:12:54.127278Z :ReadSession INFO: Random seed for debugging is 1744035174127248 2025-04-07T14:12:54.386824Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574038423099511:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:54.386948Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:54.473075Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574036633417574:2227];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001091/r3tmp/tmpBDSMUR/pdisk_1.dat 2025-04-07T14:12:54.600070Z n ... .783403Z :DEBUG: [/Root] [/Root] [d5b5f8cd-81f701af-b6a5a833-b558f720] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:12.783630Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-04-07T14:13:12.783713Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) GOT MESSAGE: Message { Data: "message3" Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-04-07T14:13:12.742000Z WriteTime: 2025-04-07T14:13:12.753000Z Ip: "ipv6:[::1]:44482" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:44482" } } } 2025-04-07T14:13:12.784037Z :DEBUG: [/Root] [/Root] [d5b5f8cd-81f701af-b6a5a833-b558f720] [dc1] Commit offsets [2, 3). Partition stream id: 1 2025-04-07T14:13:12.784277Z :DEBUG: [/Root] [/Root] [d5b5f8cd-81f701af-b6a5a833-b558f720] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T14:13:12.784931Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 grpc read done: success# 1, data# { read { } } 2025-04-07T14:13:12.785016Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 got read request: guid# 6143efff-438c5f32-9c640054-af027c98 2025-04-07T14:13:12.789205Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 grpc read done: success# 1, data# { commit { cookies { assign_id: 1 partition_cookie: 3 } } } 2025-04-07T14:13:12.789339Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 commit request from client for 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-07T14:13:12.789360Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 commit request from 3 to 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-07T14:13:12.789397Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 3 2025-04-07T14:13:12.793193Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T14:13:12.793235Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T14:13:12.793336Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_14794252949578048244_v1 2025-04-07T14:13:12.793434Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T14:13:12.795877Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T14:13:12.795948Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:13:12.796001Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 3 2025-04-07T14:13:12.796264Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2025-04-07T14:13:12.796318Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 3 2025-04-07T14:13:12.796361Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 3 2025-04-07T14:13:12.796922Z :DEBUG: [/Root] [/Root] [d5b5f8cd-81f701af-b6a5a833-b558f720] [dc1] Committed response: { cookies { assign_id: 1 partition_cookie: 3 } } 2025-04-07T14:13:12.844784Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7849045c-73b273c7-f4a98a36-54010e14_0] Write session will now close 2025-04-07T14:13:12.844842Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7849045c-73b273c7-f4a98a36-54010e14_0] Write session: aborting 2025-04-07T14:13:12.845478Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7849045c-73b273c7-f4a98a36-54010e14_0] Write session: gracefully shut down, all writes complete 2025-04-07T14:13:12.845524Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7849045c-73b273c7-f4a98a36-54010e14_0] Write session: destroy 2025-04-07T14:13:12.856754Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|7849045c-73b273c7-f4a98a36-54010e14_0 grpc read done: success: 0 data: 2025-04-07T14:13:12.856802Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|7849045c-73b273c7-f4a98a36-54010e14_0 grpc read failed 2025-04-07T14:13:12.856831Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|7849045c-73b273c7-f4a98a36-54010e14_0 grpc closed 2025-04-07T14:13:12.856849Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|7849045c-73b273c7-f4a98a36-54010e14_0 is DEAD 2025-04-07T14:13:12.857950Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:13:12.869629Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:12.869674Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7490574115732513797:2597] destroyed 2025-04-07T14:13:12.869714Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T14:13:15.050246Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:13:15.311365Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2025-04-07T14:13:20.050606Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:13:22.780449Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2025-04-07T14:13:22.850593Z :INFO: [/Root] [/Root] [d5b5f8cd-81f701af-b6a5a833-b558f720] Closing read session. Close timeout: 0.000000s 2025-04-07T14:13:22.850658Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-04-07T14:13:22.850700Z :INFO: [/Root] [/Root] [d5b5f8cd-81f701af-b6a5a833-b558f720] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16617 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:13:22.850822Z :NOTICE: [/Root] [/Root] [d5b5f8cd-81f701af-b6a5a833-b558f720] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T14:13:22.850873Z :DEBUG: [/Root] [/Root] [d5b5f8cd-81f701af-b6a5a833-b558f720] [dc1] Abort session to cluster 2025-04-07T14:13:22.852644Z :NOTICE: [/Root] [/Root] [d5b5f8cd-81f701af-b6a5a833-b558f720] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:13:22.852317Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 grpc read done: success# 0, data# { } 2025-04-07T14:13:22.852434Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 grpc read failed 2025-04-07T14:13:22.852469Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 closed 2025-04-07T14:13:22.852526Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_14794252949578048244_v1 is DEAD 2025-04-07T14:13:22.853093Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7490574089962709499:2503] disconnected; active server actors: 1 2025-04-07T14:13:22.853117Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7490574089962709499:2503] client user disconnected session shared/user_1_1_14794252949578048244_v1 2025-04-07T14:13:22.853419Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:22.853470Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_14794252949578048244_v1 2025-04-07T14:13:22.853525Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7490574089962709504:2506] destroyed 2025-04-07T14:13:22.853603Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_14794252949578048244_v1 2025-04-07T14:13:24.322204Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:24.322244Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:24.322279Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:13:24.322643Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:13:24.323390Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:13:24.323608Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:24.323814Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:13:24.324488Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:13:24.324879Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:13:24.325087Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-07T14:13:24.325216Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:13:24.325270Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:13:24.325317Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-07T14:13:24.325496Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T14:13:24.325539Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 >> TVectorIndexTests::CreateTableWithError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TUniqueIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:26.419221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:26.419306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:26.419339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:26.419368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:26.419411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:26.419451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:26.419518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:26.419847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:26.485028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:26.485087Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:26.495222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:26.495360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:26.495508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:26.501528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:26.501711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:26.502269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:26.502451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:26.504317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:26.505595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:26.505653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:26.505768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:26.505812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:26.505843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:26.506019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.512220Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:26.623652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:26.623883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.624077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:26.624288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:26.624342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.627753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:26.627881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:26.628061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.628116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:26.628144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:26.628183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:26.630211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.630263Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:26.630294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:26.632014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.632056Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.632105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:26.632145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:26.644926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:26.646788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:26.646968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:26.647845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:26.647965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:26.648029Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:26.648260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:26.648308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:26.648455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:26.648516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:26.650453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:26.650492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:26.650672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:26.650706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:26.650936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.650971Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:26.651066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:26.651097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:26.651136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:26.651168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:26.651195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:26.651220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:26.651283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:26.651321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:26.651350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:26.652952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:26.653052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:26.653086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:26.653115Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:26.653160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:26.653250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 909139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 314 RawX2: 4294969595 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:13:26.909191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:26.909224Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:13:26.909258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:13:26.909294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-07T14:13:26.909654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:13:26.909679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-07T14:13:26.909842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:13:26.909875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:13:26.909932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:13:26.909973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:26.909994Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.910034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:13:26.910062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-07T14:13:26.913035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:26.913104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:26.915855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:26.915945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:26.916015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:13:26.916101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.916211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:13:26.916515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.916692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:13:26.916746Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-07T14:13:26.916849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-07T14:13:26.916881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-07T14:13:26.916926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-07T14:13:26.917102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.917128Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:13:26.917178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-07T14:13:26.917199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-07T14:13:26.917229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-07T14:13:26.917290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:378:2343] message: TxId: 101 2025-04-07T14:13:26.917343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-07T14:13:26.917384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:13:26.917448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:13:26.917598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:13:26.917634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-07T14:13:26.917652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-07T14:13:26.917675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:13:26.917693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-07T14:13:26.917709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-07T14:13:26.917744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:13:26.920641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:13:26.920715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:379:2344] TestWaitNotification: OK eventTxId 101 2025-04-07T14:13:26.921277Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:13:26.921561Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 342us result status StatusSuccess 2025-04-07T14:13:26.922347Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2025-04-07T14:12:58.858158Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574054542118030:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:58.858211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:58.900888Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574053053359646:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:59.095053Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.094024Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bbb/r3tmp/tmpnr5G9K/pdisk_1.dat 2025-04-07T14:12:59.165589Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:59.493406Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:59.499955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.500048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.501067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.501131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.522826Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:59.522991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.523937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3459, node 1 2025-04-07T14:12:59.616354Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001bbb/r3tmp/yandexHP9JYK.tmp 2025-04-07T14:12:59.616379Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001bbb/r3tmp/yandexHP9JYK.tmp 2025-04-07T14:12:59.616570Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001bbb/r3tmp/yandexHP9JYK.tmp 2025-04-07T14:12:59.616689Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:59.814158Z INFO: TTestServer started on Port 20428 GrpcPort 3459 TClient is connected to server localhost:20428 PQClient connected to localhost:3459 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:00.163446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:13:00.270014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T14:13:02.237599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574071721988282:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.237753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.251725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574071721988304:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.251802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574071721988306:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.252116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.255227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T14:13:02.274509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574071721988310:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T14:13:02.554819Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574071721988406:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:13:02.564108Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGExYmE4ZGEtYzIyNGZmMDYtMzljYmJlZmQtZjUyOGQ4YmQ=, ActorId: [1:7490574071721988279:2303], ActorState: ExecuteState, TraceId: 01jr89hxmh3xw2rq8nxkx7nem3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:13:02.577818Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:13:02.613988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:13:02.681549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:13:02.817459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:13:03.364874Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr89hyaxez64tctt71vmp76a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGRhZTUwZGYtYTU3ZTYyMmEtNjkyYTNmZDUtM2YwOTc2OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490574076016956142:3093] 2025-04-07T14:13:03.855069Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574054542118030:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:03.855130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:13:03.893475Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490574053053359646:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:03.893535Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-04-07T14:13:09.261222Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490574058837085590:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:09.261456Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490574058837085590:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-07T14:13:09.261550Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490574058837085590:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490574058837086065:2449] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744035180230 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T14:13:09.261643Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490574058837085590:2127], cacheItem# { Subscriber: { Subscriber: [1:7490574058837086065:2449] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744035180230 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: Op ... rtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:10:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:25.699385Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490574123560627499:2124], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:25.699435Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574123560627499:2124], cacheItem# { Subscriber: { Subscriber: [3:7490574140740497788:2858] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744035198381 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:25.699537Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574170805270937:4416], recipient# [3:7490574123560627408:2190], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T14:13:25.699643Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574170805270938:4417], recipient# [3:7490574123560627408:2190], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T14:13:25.699694Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490574123560627499:2124], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:25.699768Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574123560627499:2124], cacheItem# { Subscriber: { Subscriber: [3:7490574127855595288:2444] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 28 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744035195133 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:25.699866Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574170805270939:4418], recipient# [3:7490574123560627408:2190], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T14:13:25.957113Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490574123560627499:2124], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:25.957261Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574123560627499:2124], cacheItem# { Subscriber: { Subscriber: [3:7490574136445530208:2677] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:25.957347Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574170805270953:4428], recipient# [3:7490574170805270952:2540], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:25.996538Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [3:7490574123560627499:2124], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-07T14:13:25.996642Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7490574123560627499:2124], cacheItem# { Subscriber: { Subscriber: [3:7490574140740497788:2858] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744035198381 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:25.996731Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7490574123560627499:2124], cacheItem# { Subscriber: { Subscriber: [3:7490574136445530322:2744] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744035198115 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:25.996906Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574170805270956:4429], recipient# [3:7490574170805270955:2534], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-07T14:13:25.997659Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490574123560627499:2124], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:25.997761Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574123560627499:2124], cacheItem# { Subscriber: { Subscriber: [3:7490574127855595288:2444] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 28 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744035195133 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:25.997890Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574170805270959:4430], recipient# [3:7490574170805270958:2541], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> THiveTest::TestLockTabletExecutionLocalGone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:27.435298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:27.435378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:27.435409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:27.435451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:27.435494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:27.435519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:27.435573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:27.435931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:27.499171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:27.499221Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:27.513665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:27.513805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:27.513952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:27.521252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:27.521421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:27.522068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:27.522258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:27.524181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:27.525260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:27.525310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:27.525407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:27.525441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:27.525464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:27.525582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.531672Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:27.640834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:27.641051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.641225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:27.641430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:27.641481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.643895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:27.644016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:27.644182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.644233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:27.644264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:27.644307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:27.650341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.650444Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:27.650493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:27.652512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.652572Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.652640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:27.652697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:27.655849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:27.657913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:27.658092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:27.658997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:27.659144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:27.659201Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:27.659420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:27.659465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:27.659623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:27.659676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:27.661287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:27.661323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:27.661499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:27.661548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:27.661770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.661814Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:27.661895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:27.661942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:27.661982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:27.662010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:27.662031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:27.662051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:27.662108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:27.662147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:27.662170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:27.663622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:27.663711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:27.663741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:27.663774Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:27.663821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:27.663927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-07T14:13:27.667909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-07T14:13:27.668390Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-07T14:13:27.670167Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Bootstrap 2025-04-07T14:13:27.683992Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] Become StateWork (SchemeCache [1:269:2261]) 2025-04-07T14:13:27.686279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "__ydb_parent" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "__ydb_parent" Type: EIndexTypeGlobalVectorKmeansTree VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:27.686638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-04-07T14:13:27.686767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: index key column shouldn't have a reserved name: __ydb_parent, at schemeshard: 72057594046678944 2025-04-07T14:13:27.686802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: index key column shouldn't have a reserved name: __ydb_parent, at schemeshard: 72057594046678944 2025-04-07T14:13:27.689357Z node 1 :TX_PROXY DEBUG: actor# [1:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:13:27.691084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "index key column shouldn\'t have a reserved name: __ydb_parent" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:27.691230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: index key column shouldn't have a reserved name: __ydb_parent, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2025-04-07T14:13:27.692567Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-07T14:13:27.695556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "id" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:27.696020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-04-07T14:13:27.696167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2025-04-07T14:13:27.696205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2025-04-07T14:13:27.698850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "the same column can\'t be used as key and data column for one index, for example id" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:27.699010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors TestModificationResult got TxId: 102, wait until txId: 102 >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::Decimal >> THiveTest::TestLockTabletExecutionLocalGone [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLockTabletExecutionLocalGone [GOOD] Test command err: 2025-04-07T14:12:27.229262Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:27.232371Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.232578Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-07T14:12:27.233122Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-07T14:12:27.234106Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-07T14:12:27.234149Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:27.234939Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:47:2073] ControllerId# 72057594037932033 2025-04-07T14:12:27.234972Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:27.235085Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:27.235312Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:27.245587Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:27.245635Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:27.247383Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:46:2072] Create Queue# [1:55:2078] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.247537Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:46:2072] Create Queue# [1:56:2079] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.247731Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:46:2072] Create Queue# [1:57:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.247868Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:46:2072] Create Queue# [1:58:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.248034Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:46:2072] Create Queue# [1:59:2082] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.248157Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:46:2072] Create Queue# [1:60:2083] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.248286Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:46:2072] Create Queue# [1:61:2084] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.248307Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:27.248386Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:47:2073] 2025-04-07T14:12:27.248414Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:47:2073] 2025-04-07T14:12:27.248449Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:27.248482Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:27.249177Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:27.249254Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:27.251796Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.251891Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:27.252645Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [2:67:2071] ControllerId# 72057594037932033 2025-04-07T14:12:27.252689Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:27.252736Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:27.252907Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:27.253573Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:27.253601Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:27.254934Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:73:2075] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.255121Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:74:2076] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.255238Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:75:2077] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.255393Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:76:2078] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.255538Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:77:2079] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.255662Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:78:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.255798Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:79:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.255820Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:27.255866Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:67:2071] 2025-04-07T14:12:27.255888Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:67:2071] 2025-04-07T14:12:27.255917Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:27.255956Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:27.256279Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:27.256515Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:67:2071] 2025-04-07T14:12:27.256550Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.256584Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.256771Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.265915Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:47:2073] 2025-04-07T14:12:27.265981Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.266032Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.267388Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.267610Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.267644Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-07T14:12:27.270461Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-07T14:12:27.270867Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-07T14:12:27.271083Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:47:2073] 2025-04-07T14:12:27.271137Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.271159Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-07T14:12:27.271243Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-07T14:12:27.271491Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-07T14:12:27.271541Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:51:2064] 2025-04-07T14:12:27.271561Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:51:2064] 2025-04-07T14:12:27.271652Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.271901Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:27.272152Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.272254Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [2:71:2064] 2025-04-07T14:12:27.272272Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [2:71:2064] 2025-04-07T14:12:27.272325Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.272407Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:27.272490Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.272528Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [2:71:2064] 2025-04-07T14:12:27.272550Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-07T14:12:27.272568Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [2:89:2085] 2025-04-07T14:12:27.272581Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [2:89:2085] 2025-04-07T14:12:27.272609Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-07T14:12:27.272636Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: ... 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T14:13:29.018982Z node 25 :BS_PROXY_PUT DEBUG: [358e54de1cafbaeb] restore Id# [72075186224037888:2:1:0:0:42:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T14:13:29.019040Z node 25 :BS_PROXY_PUT DEBUG: [358e54de1cafbaeb] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224037888:2:1:0:0:42:1] Marker# BPG33 2025-04-07T14:13:29.019074Z node 25 :BS_PROXY_PUT DEBUG: [358e54de1cafbaeb] Sending missing VPut part# 0 to# 0 blob Id# [72075186224037888:2:1:0:0:42:1] Marker# BPG32 2025-04-07T14:13:29.019167Z node 25 :BS_PROXY DEBUG: Send to queueActorId# [25:568:2093] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72075186224037888:2:1:0:0:42:1] FDS# 42 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T14:13:29.019215Z node 25 :BS_PROXY_PUT INFO: [ebee40e0668be2ee] bootstrap ActorId# [25:623:2132] Group# 2147483649 BlobCount# 1 BlobIDs# [[72075186224037888:2:1:1:28672:89:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T14:13:29.019273Z node 25 :BS_PROXY_PUT DEBUG: [ebee40e0668be2ee] Id# [72075186224037888:2:1:1:28672:89:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T14:13:29.019307Z node 25 :BS_PROXY_PUT DEBUG: [ebee40e0668be2ee] restore Id# [72075186224037888:2:1:1:28672:89:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T14:13:29.019345Z node 25 :BS_PROXY_PUT DEBUG: [ebee40e0668be2ee] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224037888:2:1:1:28672:89:1] Marker# BPG33 2025-04-07T14:13:29.019368Z node 25 :BS_PROXY_PUT DEBUG: [ebee40e0668be2ee] Sending missing VPut part# 0 to# 0 blob Id# [72075186224037888:2:1:1:28672:89:1] Marker# BPG32 2025-04-07T14:13:29.019428Z node 25 :BS_PROXY DEBUG: Send to queueActorId# [25:578:2100] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72075186224037888:2:1:1:28672:89:1] FDS# 89 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T14:13:29.019522Z node 23 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-07T14:13:29.019560Z node 23 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-07T14:13:29.019956Z node 23 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-07T14:13:29.020044Z node 23 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [25:492:2086] EventType# 268960257 2025-04-07T14:13:29.020338Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:6} Tx{13, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2025-04-07T14:13:29.020412Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:6} Tx{13, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:13:29.020840Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:6} Tx{13, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{16, redo 165b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-07T14:13:29.020928Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:6} Tx{13, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:13:29.028919Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:7} Tx{14, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-07T14:13:29.028984Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:7} Tx{14, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:13:29.029088Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:7} Tx{14, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{17, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:13:29.029139Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:7} Tx{14, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:13:29.029975Z node 25 :BS_PROXY_PUT DEBUG: [358e54de1cafbaeb] received {EvVPutResult Status# OK ID# [72075186224037888:2:1:0:0:42:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80330 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [80000000:1:0:0:0] Marker# BPP01 2025-04-07T14:13:29.030072Z node 25 :BS_PROXY_PUT DEBUG: [358e54de1cafbaeb] Result# TEvPutResult {Id# [72075186224037888:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 2147483648 Marker# BPP12 2025-04-07T14:13:29.030116Z node 25 :BS_PROXY_PUT INFO: [358e54de1cafbaeb] SendReply putResult# TEvPutResult {Id# [72075186224037888:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T14:13:29.030328Z node 25 :BS_PROXY_PUT DEBUG: [ebee40e0668be2ee] received {EvVPutResult Status# OK ID# [72075186224037888:2:1:1:28672:89:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 80700 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} from# [80000001:1:0:0:0] Marker# BPP01 2025-04-07T14:13:29.030392Z node 25 :BS_PROXY_PUT DEBUG: [ebee40e0668be2ee] Result# TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 2147483649 Marker# BPP12 2025-04-07T14:13:29.030425Z node 25 :BS_PROXY_PUT INFO: [ebee40e0668be2ee] SendReply putResult# TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T14:13:29.030556Z node 23 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaUpdate TabletID: 72075186224037888 ProposedGeneration: 2 ProposedStep: 0} 2025-04-07T14:13:29.030701Z node 25 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72075186224037888:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-07T14:13:29.030788Z node 25 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-07T14:13:29.030923Z node 25 :TABLET_MAIN DEBUG: Tablet: 72075186224037888 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-07T14:13:29.030988Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:2} commited cookie 2 for step 1 2025-04-07T14:13:29.031238Z node 25 :BS_PROXY_COLLECT INFO: [8d3ab1a908111ee8] bootstrap ActorId# [25:624:2133] Group# 2147483648 TabletId# 72075186224037888 Channel# 0 RecordGeneration# 2 PerGenerationCounter# 1 Deadline# 586524-01-19T08:01:49.551615Z CollectGeneration# 2 CollectStep# 0 Collect# true Hard# false RestartCounter# 0 Marker# DSPC03 2025-04-07T14:13:29.031350Z node 25 :BS_PROXY DEBUG: Send to queueActorId# [25:568:2093] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[72075186224037888:2:1:0] collect=[2:0] cookie# 0 2025-04-07T14:13:29.031409Z node 25 :BS_PROXY_COLLECT INFO: [fb0feab7af645c61] bootstrap ActorId# [25:625:2134] Group# 2147483649 TabletId# 72075186224037888 Channel# 1 RecordGeneration# 2 PerGenerationCounter# 1 Deadline# 586524-01-19T08:01:49.551615Z CollectGeneration# 2 CollectStep# 0 Collect# true Hard# false RestartCounter# 0 Marker# DSPC03 2025-04-07T14:13:29.031439Z node 25 :BS_PROXY_COLLECT INFO: [fb0feab7af645c61] Keep# [72075186224037888:1:2:1:8192:289:0] Marker# DSPC04 2025-04-07T14:13:29.031507Z node 25 :BS_PROXY DEBUG: Send to queueActorId# [25:578:2100] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[72075186224037888:2:1:1] collect=[2:0] Keep: [72075186224037888:1:2:1:8192:289:0] cookie# 0 2025-04-07T14:13:29.033811Z node 25 :BS_PROXY_COLLECT DEBUG: [fb0feab7af645c61] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037888 RecordGeneration# 2 Channel# 1 VDisk# [80000001:1:0:0:0]} Marker# DSPC01 2025-04-07T14:13:29.033871Z node 25 :BS_PROXY_COLLECT INFO: [fb0feab7af645c61] Result# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 1 Status# OK} Marker# DSPC02 2025-04-07T14:13:29.034199Z node 25 :BS_PROXY_COLLECT DEBUG: [8d3ab1a908111ee8] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037888 RecordGeneration# 2 Channel# 0 VDisk# [80000000:1:0:0:0]} Marker# DSPC01 2025-04-07T14:13:29.034258Z node 25 :BS_PROXY_COLLECT INFO: [8d3ab1a908111ee8] Result# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} Marker# DSPC02 2025-04-07T14:13:29.034748Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [23:627:2409] 2025-04-07T14:13:29.034826Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [23:627:2409] 2025-04-07T14:13:29.034944Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:13:29.035121Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:13:29.035259Z node 23 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-07T14:13:29.035330Z node 23 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-07T14:13:29.035374Z node 23 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-07T14:13:29.035442Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [25:553:2087] CurrentLeaderTablet: [25:555:2088] CurrentGeneration: 2 CurrentStep: 0} 2025-04-07T14:13:29.035511Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [25:553:2087] CurrentLeaderTablet: [25:555:2088] CurrentGeneration: 2 CurrentStep: 0} 2025-04-07T14:13:29.035608Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [25:553:2087] CurrentLeaderTablet: [25:555:2088] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-07T14:13:29.035681Z node 23 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2025-04-07T14:13:29.035753Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [25:553:2087] 2025-04-07T14:13:29.035854Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result remote node 25 [23:627:2409] 2025-04-07T14:13:29.035956Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037888] remote node connected [23:627:2409] 2025-04-07T14:13:29.036026Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [23:627:2409] 2025-04-07T14:13:29.036252Z node 25 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [23:627:2409] 2025-04-07T14:13:29.036567Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [23:627:2409] 2025-04-07T14:13:29.036623Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [23:627:2409] >> TAsyncIndexTests::Decimal [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:29.900202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:29.900288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:29.900325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:29.900358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:29.900399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:29.900458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:29.900531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:29.900867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:29.961121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:29.961179Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:29.971898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:29.972055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:29.972224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:29.978475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:29.978672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:29.979354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:29.979551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:29.981474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:29.982753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:29.982812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:29.982953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:29.982994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:29.983033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:29.983216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:29.992538Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:30.124732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:30.124924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:30.125083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:30.125271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:30.125324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:30.127406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:30.127543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:30.127717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:30.127770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:30.127818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:30.127868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:30.129721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:30.129796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:30.129836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:30.131439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:30.131471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:30.131511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:30.131549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:30.134127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:30.135791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:30.135971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:30.136694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:30.136799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:30.136843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:30.137043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:30.137075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:30.137219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:30.137289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:30.139229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:30.139273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:30.139489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:30.139529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:30.139791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:30.139837Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:30.139926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:30.139954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:30.139996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:30.140034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:30.140068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:30.140094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:30.140166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:30.140205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:30.140234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:30.142020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:30.142133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:30.142163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:30.142196Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:30.142244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:30.142340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 374237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 314 RawX2: 4294969595 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:13:30.374290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:30.374410Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:13:30.374467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:13:30.374507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-07T14:13:30.375114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:13:30.375144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-07T14:13:30.375293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:13:30.375324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:13:30.375369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:13:30.375401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:30.375422Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:13:30.375455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:13:30.375504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-07T14:13:30.381176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:30.381353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:30.386760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:30.387009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:30.387275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:13:30.387645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:13:30.387867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:13:30.388148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:13:30.388191Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-07T14:13:30.388283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-07T14:13:30.388317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-07T14:13:30.388357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-07T14:13:30.388907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:13:30.389252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:13:30.389287Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:13:30.389352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-07T14:13:30.389394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-07T14:13:30.389432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-07T14:13:30.389499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:378:2343] message: TxId: 101 2025-04-07T14:13:30.389556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-07T14:13:30.389597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:13:30.389624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:13:30.389747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:13:30.389786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-07T14:13:30.389802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-07T14:13:30.389826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:13:30.389844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-07T14:13:30.389859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-07T14:13:30.389892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:13:30.392536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:13:30.392577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:379:2344] TestWaitNotification: OK eventTxId 101 2025-04-07T14:13:30.393069Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:13:30.393309Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 255us result status StatusSuccess 2025-04-07T14:13:30.394023Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile >> TVectorIndexTests::CreateTable >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] >> TVectorIndexTests::CreateTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2025-04-07T14:12:58.827337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574053217744634:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:58.827551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:59.130600Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bc9/r3tmp/tmpLaGj7p/pdisk_1.dat 2025-04-07T14:12:59.141553Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.240204Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:12:59.530516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.530608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.535267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.535351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.536521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.544269Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:59.545558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.568929Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14203, node 1 2025-04-07T14:12:59.607916Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:59.621445Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:59.752018Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001bc9/r3tmp/yandexKJNCCo.tmp 2025-04-07T14:12:59.752052Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001bc9/r3tmp/yandexKJNCCo.tmp 2025-04-07T14:12:59.752191Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001bc9/r3tmp/yandexKJNCCo.tmp 2025-04-07T14:12:59.752291Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:59.819745Z INFO: TTestServer started on Port 12977 GrpcPort 14203 TClient is connected to server localhost:12977 PQClient connected to localhost:14203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:00.102800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:13:00.185007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T14:13:02.363784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574070397614730:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.363936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.364166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574070397614765:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.367281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T14:13:02.370243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574070397614799:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.370332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.404358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574070397614767:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T14:13:02.609142Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574070397614866:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:13:02.609489Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTNhZGQwNWUtYjg1MmQ5ZjQtMTdlNWQwOTEtOTg3ZTVlNmU=, ActorId: [1:7490574070397614725:2302], ActorState: ExecuteState, TraceId: 01jr89hxrk9j4qgmfcbca52cnp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:13:02.611635Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:13:02.615584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:13:02.716267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:13:02.917804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:13:03.364915Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr89hydzdqmtymdh5vbfcpgg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTkzN2Y4MzMtNTJhZTBlNGItNjE3YjUwNDItMTBmOTlmYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490574074692582608:3094] 2025-04-07T14:13:03.826301Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574053217744634:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:03.826352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok >>>>> Prepare scheme WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-04-07T14:13:09.368443Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490574057512712032:2125], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:09.368643Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490574057512712032:2125], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-07T14:13:09.368749Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490574057512712032:2125], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490574057512712518:2439] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744035180174 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T14:13:09.368824Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490574057512712032:2125], cacheItem# { Subscriber: { Subscriber: [1:7490574057512712518:2439] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744035180174 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 14 IsSync: true Partial: 0 } 2025-04-07T14:13:09.368966Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7490574100462386824:3376], recipient# [1:7490574100462386823:3375], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ P ... ll> }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:30.315004Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574193628862037:4683], recipient# [3:7490574129204348609:2265], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T14:13:30.338414Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7490574126985888530:2102], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:30.338559Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7490574126985888530:2102], cacheItem# { Subscriber: { Subscriber: [4:7490574131280855851:2107] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:30.338662Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7490574191410399335:3065], recipient# [4:7490574191410399334:2378], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:30.384780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:13:30.384826Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:30.388107Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490574129204348624:2124], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:30.388245Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574129204348624:2124], cacheItem# { Subscriber: { Subscriber: [3:7490574142089251370:2726] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:30.388329Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574193628862042:4684], recipient# [3:7490574193628862041:2814], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:30.506647Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2025-04-07T14:13:30.506964Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:30.612394Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:30.612563Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2025-04-07T14:13:30.644336Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [3:7490574129204348624:2124], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-07T14:13:30.644435Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7490574129204348624:2124], cacheItem# { Subscriber: { Subscriber: [3:7490574142089251630:2879] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744035198738 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:30.644495Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7490574129204348624:2124], cacheItem# { Subscriber: { Subscriber: [3:7490574142089251513:2798] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744035198521 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:30.644771Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574193628862050:4687], recipient# [3:7490574193628862049:2808], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-07T14:13:30.645478Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490574129204348624:2124], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:30.645604Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574129204348624:2124], cacheItem# { Subscriber: { Subscriber: [3:7490574129204349113:2445] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 28 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744035195819 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:30.645747Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574193628862053:4688], recipient# [3:7490574193628862052:2815], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T14:13:30.648046Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7490574193628862048:2808] TxId: 281474976715689. Ctx: { TraceId: 01jr89js1r3c9vf4p3gbpfn90c, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjVlNmJiOTYtYjIyYTY0MTItYmFhOWY5NWUtYTIxM2ZmM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-04-07T14:13:30.648743Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490574193628862061:2817], TxId: 281474976715689, task: 4. Ctx: { TraceId : 01jr89js1r3c9vf4p3gbpfn90c. SessionId : ydb://session/3?node_id=3&id=YjVlNmJiOTYtYjIyYTY0MTItYmFhOWY5NWUtYTIxM2ZmM2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7490574193628862048:2808], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-07T14:13:30.649066Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7490574193628862059:2816], TxId: 281474976715689, task: 2. Ctx: { TraceId : 01jr89js1r3c9vf4p3gbpfn90c. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=YjVlNmJiOTYtYjIyYTY0MTItYmFhOWY5NWUtYTIxM2ZmM2Q=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7490574193628862048:2808], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-07T14:13:30.704441Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2025-04-07T14:13:30.706399Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Handle TEvPQ::TEvPartitionCounters PartitionId 2 >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] Test command err: 2025-04-07T14:13:05.242257Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574086533436232:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:05.242297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001888/r3tmp/tmpdoyUQ4/pdisk_1.dat 2025-04-07T14:13:05.833534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:05.868293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:05.868370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:05.878213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1600 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T14:13:06.087768Z node 1 :TX_PROXY DEBUG: actor# [1:7490574086533436455:2136] Handle TEvNavigate describe path dc-1 2025-04-07T14:13:06.087824Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574090828404183:2432] HANDLE EvNavigateScheme dc-1 2025-04-07T14:13:06.087956Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490574086533436482:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:06.087991Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490574086533436482:2151], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T14:13:06.088157Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090828404184:2433][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T14:13:06.089819Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086533436128:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574090828404188:2433] 2025-04-07T14:13:06.089861Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574086533436128:2051] Subscribe: subscriber# [1:7490574090828404188:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.089901Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086533436131:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574090828404189:2433] 2025-04-07T14:13:06.089932Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574086533436131:2054] Subscribe: subscriber# [1:7490574090828404189:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.089952Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086533436134:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574090828404190:2433] 2025-04-07T14:13:06.089965Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574086533436134:2057] Subscribe: subscriber# [1:7490574090828404190:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.089999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090828404188:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574086533436128:2051] 2025-04-07T14:13:06.090020Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090828404189:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574086533436131:2054] 2025-04-07T14:13:06.090042Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090828404190:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574086533436134:2057] 2025-04-07T14:13:06.090100Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090828404184:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574090828404185:2433] 2025-04-07T14:13:06.090129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090828404184:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574090828404186:2433] 2025-04-07T14:13:06.090167Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490574090828404184:2433][/dc-1] Set up state: owner# [1:7490574086533436482:2151], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:06.090267Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090828404184:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574090828404187:2433] 2025-04-07T14:13:06.090308Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490574090828404184:2433][/dc-1] Path was already updated: owner# [1:7490574086533436482:2151], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:06.090357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090828404188:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574090828404185:2433], cookie# 1 2025-04-07T14:13:06.090371Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090828404189:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574090828404186:2433], cookie# 1 2025-04-07T14:13:06.090383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090828404190:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574090828404187:2433], cookie# 1 2025-04-07T14:13:06.090403Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086533436128:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574090828404188:2433] 2025-04-07T14:13:06.090423Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086533436128:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574090828404188:2433], cookie# 1 2025-04-07T14:13:06.090439Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086533436131:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574090828404189:2433] 2025-04-07T14:13:06.090449Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086533436131:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574090828404189:2433], cookie# 1 2025-04-07T14:13:06.090464Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086533436134:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574090828404190:2433] 2025-04-07T14:13:06.090474Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574086533436134:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574090828404190:2433], cookie# 1 2025-04-07T14:13:06.103091Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090828404188:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574086533436128:2051], cookie# 1 2025-04-07T14:13:06.103132Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090828404189:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574086533436131:2054], cookie# 1 2025-04-07T14:13:06.103145Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574090828404190:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574086533436134:2057], cookie# 1 2025-04-07T14:13:06.103177Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090828404184:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574090828404185:2433], cookie# 1 2025-04-07T14:13:06.103195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090828404184:2433][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T14:13:06.103211Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090828404184:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574090828404186:2433], cookie# 1 2025-04-07T14:13:06.103229Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090828404184:2433][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T14:13:06.103248Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090828404184:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574090828404187:2433], cookie# 1 2025-04-07T14:13:06.103259Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574090828404184:2433][/dc-1] Unexpected sync response: sender# [1:7490574090828404187:2433], cookie# 1 2025-04-07T14:13:06.157379Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490574086533436482:2151], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T14:13:06.157697Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490574086533436482:2151], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... nknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:30.870446Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7490574170723586570:2127], cacheItem# { Subscriber: { Subscriber: [11:7490574187903456011:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:30.870567Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7490574192198423347:2314], recipient# [11:7490574187903456008:2300], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:30.870688Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7490574187903456008:2300], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:13:30.922035Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7490574170723586570:2127], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:30.922179Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7490574170723586570:2127], cacheItem# { Subscriber: { Subscriber: [11:7490574187903456010:2288] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:30.922240Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7490574170723586570:2127], cacheItem# { Subscriber: { Subscriber: [11:7490574187903456011:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:30.922474Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7490574192198423348:2315], recipient# [11:7490574187903456008:2300], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:30.922666Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7490574187903456008:2300], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:13:31.006030Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7490574170723586570:2127], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:31.006185Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7490574170723586570:2127], cacheItem# { Subscriber: { Subscriber: [11:7490574187903456010:2288] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:31.006248Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7490574170723586570:2127], cacheItem# { Subscriber: { Subscriber: [11:7490574187903456011:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:31.006407Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7490574196493390645:2316], recipient# [11:7490574187903456008:2300], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:31.006616Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7490574187903456008:2300], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:13:31.106390Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7490574170723586570:2127], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:31.106545Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7490574170723586570:2127], cacheItem# { Subscriber: { Subscriber: [11:7490574187903456010:2288] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:31.106608Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7490574170723586570:2127], cacheItem# { Subscriber: { Subscriber: [11:7490574187903456011:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:31.106774Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7490574196493390646:2317], recipient# [11:7490574187903456008:2300], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:31.106991Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7490574187903456008:2300], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:31.751902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:31.752005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:31.752050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:31.752078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:31.752117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:31.752145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:31.752198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:31.752450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:31.811816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:31.811864Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:31.826790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:31.826913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:31.827048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:31.832408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:31.832556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:31.833144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:31.833321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:31.834941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:31.836038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:31.836088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:31.836196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:31.836238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:31.836270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:31.836430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:31.842181Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:31.962088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:31.962261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:31.962401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:31.962548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:31.962592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:31.964297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:31.964398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:31.964534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:31.964568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:31.964595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:31.964624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:31.966116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:31.966160Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:31.966189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:31.967433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:31.967459Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:31.967499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:31.967527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:31.969929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:31.971183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:31.971315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:31.971982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:31.972069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:31.972108Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:31.972261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:31.972290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:31.972400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:31.972450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:31.973836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:31.973866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:31.973996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:31.974030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:31.974186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:31.974211Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:31.974277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:31.974303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:31.974338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:31.974363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:31.974385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:31.974402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:31.974466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:31.974501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:31.974526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:31.975992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:31.976085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:31.976111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:31.976133Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:31.976164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:31.976241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... calPathId: 2] was 5 2025-04-07T14:13:32.312522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:32.312590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:32.312610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:32.312630Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-07T14:13:32.312652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:13:32.313020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:32.313096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:32.313126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:32.313696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:32.313759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:32.313778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:32.313798Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-07T14:13:32.313819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:32.314097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:32.314168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:32.314195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:32.314291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:32.314333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:32.314350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:32.315042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:32.315109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:32.315130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:32.315159Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-07T14:13:32.315186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-07T14:13:32.316482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:32.316543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:13:32.316558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:13:32.316573Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-04-07T14:13:32.316589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-07T14:13:32.316627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/4, is published: true 2025-04-07T14:13:32.317718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-04-07T14:13:32.317750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:32.317970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-07T14:13:32.318066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/4 2025-04-07T14:13:32.318092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-04-07T14:13:32.318119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-04-07T14:13:32.318887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:13:32.318982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-07T14:13:32.319005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:32.319127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:13:32.319175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-04-07T14:13:32.319191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-04-07T14:13:32.319207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-04-07T14:13:32.319442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.319464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:32.319544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:13:32.319584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-04-07T14:13:32.319596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-07T14:13:32.319613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-04-07T14:13:32.319667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:419:2374] message: TxId: 102 2025-04-07T14:13:32.319706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-07T14:13:32.319733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:13:32.319755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:13:32.319814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:13:32.319836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-07T14:13:32.319846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-07T14:13:32.319861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:13:32.319871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-04-07T14:13:32.319879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-04-07T14:13:32.319898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T14:13:32.319932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-04-07T14:13:32.319945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-04-07T14:13:32.319974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-07T14:13:32.320281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:13:32.320319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:13:32.320367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:13:32.320380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:13:32.320393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:13:32.321380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:13:32.321428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-07T14:13:32.322741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:13:32.322773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:522:2476] TestWaitNotification: OK eventTxId 102 >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::DeleteImmediate >> KqpIndexes::InnerJoinSecondaryIndexLookupAndRightTablePredicateNonIndexColumn [GOOD] >> KqpIndexes::IndexTopSortPushDown >> TGroupMapperTest::MonteCarlo [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> SystemView::QueryStatsAllTables [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MonteCarlo [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages |95.5%| [TA] $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.5%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicUsage::BrokenCredentialsProvider [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::QueryStatsAllTables [GOOD] Test command err: 2025-04-07T14:12:16.252197Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573872304663311:2102];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:16.255756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001085/r3tmp/tmprtuPMn/pdisk_1.dat 2025-04-07T14:12:16.594178Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14367, node 1 2025-04-07T14:12:16.652491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:16.652600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:16.661270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:16.701649Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:16.701680Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:16.701693Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:16.701802Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:17.026731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:17.066603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:17.302277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:19.073820Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-04-07T14:12:19.073865Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-04-07T14:12:19.110714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573885189566311:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:19.110783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573885189566319:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:19.110852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:19.113912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:12:19.142226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573885189566325:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-07T14:12:19.241310Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: , DatabaseId: /Root, UserSid: , Text: \n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"" }, "settings": { "ydb_user":"" }, "rollback_settings": { } } }} 2025-04-07T14:12:19.242303Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F00010F448 2025-04-07T14:12:19.242348Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7490573885189566308:2309], queryUid: , queryText: "\n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n ", keepInCache: 0, split: 0{ TraceId: 01jr89gkh2380zpa96drn9fz41, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU5YmJlNmEtNWQyN2ZiNzItY2UxYzk2YTItNjMwZTA1NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-04-07T14:12:19.242475Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: , DatabaseId: /Root, UserSid: , Text: \n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"" }, "settings": { "ydb_user":"" }, "rollback_settings": { } } }} 2025-04-07T14:12:19.242552Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7490573885189566308:2309], queueSize: 1 2025-04-07T14:12:19.243046Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7490573885189566308:2309], compileActor: [1:7490573885189566417:2319] 2025-04-07T14:12:19.497322Z node 1 :KQP_YQL INFO: TraceId: 01jr89gkh2380zpa96drn9fz41, SessionId: CompileActor 2025-04-07 14:12:19.495 INFO ydb-core-sys_view-ut_kqp(pid=468332, tid=0x00007FB5F1496640) [KQP] kqp_host.cpp:1343: Compiled query: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"Root/.sys/pg_tables"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Filter (Right! $1) (lambda '($18) (Coalesce (Or (== (Member $18 '"tablename") (PgConst '"Table0" (PgType 'name))) (== (Member $18 '"tablename") (PgConst '"Table1" (PgType 'name)))) (Bool 'false))))) (let $4 (TypeOf $3)) (let $5 (SqlProjectItem $4 '"schemaname" (lambda '($19) (Member $19 '"schemaname")))) (let $6 (SqlProjectItem $4 '"tablename" (lambda '($20) (Member $20 '"tablename")))) (let $7 (SqlProjectItem $4 '"tableowner" (lambda '($21) (Member $21 '"tableowner")))) (let $8 (SqlProjectItem $4 '"tablespace" (lambda '($22) (Member $22 '"tablespace")))) (let $9 (SqlProjectItem $4 '"hasindexes" (lambda '($23) (Member $23 '"hasindexes")))) (let $10 (SqlProjectItem $4 '"hasrules" (lambda '($24) (Member $24 '"hasrules")))) (let $11 (SqlProjectItem $4 '"hastriggers" (lambda '($25) (Member $25 '"hastriggers")))) (let $12 (SqlProjectItem $4 '"rowsecurity" (lambda '($26) (Member $26 '"rowsecurity")))) (let $13 '($5 $6 $7 $8 $9 $10 $11 $12)) (let $14 (Sort (PersistableRepr (SqlProject $3 $13)) (Bool 'true) (lambda '($27) (PersistableRepr (Member $27 '"tablename"))))) (let $15 '('"schemaname" '"tablename" '"tableowner" '"tablespace" '"hasindexes" '"hasrules" '"hastriggers" '"rowsecurity")) (let $16 '('('type) '('autoref) '('columns $15))) (let $17 (Write! (Left! $1) $2 (Key) $14 $16)) (return (Commit! $17 $2)) ) 2025-04-07T14:12:19.500967Z node 1 :KQP_YQL TRACE: TraceId: 01jr89gkh2380zpa96drn9fz41, SessionId: CompileActor 2025-04-07 14:12:19.499 TRACE ydb-core-sys_view-ut_kqp(pid=468332, tid=0x00007FB5F1496640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"Root/.sys/pg_tables"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Filter (Right! $1) (lambda '($18) (Coalesce (Or (== (Member $18 '"tablename") (PgConst '"Table0" (PgType 'name))) (== (Member $18 '"tablename") (PgConst '"Table1" (PgType 'name)))) (Bool 'false))))) (let $4 (TypeOf $3)) (let $5 (SqlProjectItem $4 '"schemaname" (lambda '($19) (Member $19 '"schemaname")))) (let $6 (SqlProjectItem $4 '"tablename" (lambda '($20) (Member $20 '"tablename")))) (let $7 (SqlProjectItem $4 '"tableowner" (lambda '($21) (Member $21 '"tableowner")))) (let $8 (SqlProjectItem $4 '"tablespace" (lambda '($22) (Member $22 '"tablespace")))) (let $9 (SqlProjectItem $4 '"hasindexes" (lambda '($23) (Member $23 '"hasindexes")))) (let $10 (SqlProjectItem $4 '"hasrules" (lambda '($24) (Member $24 '"hasrules")))) (let $11 (SqlProjectItem $4 '"hastriggers" (lambda '($25) (Member $25 '"hastriggers")))) (let $12 (SqlProjectItem $4 '"rowsecurity" (lambda '($26) (Member $26 '"rowsecurity")))) (let $13 '($5 $6 $7 $8 $9 $10 $11 $12)) (let $14 (Sort (PersistableRepr (SqlProject $3 $13)) (Bool 'true) (lambda '($27) (PersistableRepr (Member $27 '"tablename"))))) (let $15 '('"schemaname" '"tablename" '"tableowner" '"tablespace" '"hasindexes" '"hasrules" '"hastriggers" '"rowsecurity")) (let $16 '('('type) '('autoref) '('columns $15))) (let $17 (Write! (Left! $1) $2 (Key) $14 $16)) (return (Commit! (Commit! $17 $2) (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-04-07T14:12:19.501518Z node 1 :KQP_YQL DEBUG: TraceId: 01jr89gkh2380zpa96drn9fz41, SessionId: CompileActor 2025-04-07 14:12:19.501 DEBUG ydb-core-sys_view-ut_kqp(pid=468332, tid=0x00007FB5F1496640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 485us 2025-04-07T14:12:19.505638Z node 1 :KQP_YQL INFO: TraceId: 01jr89gkh2380zpa96drn9fz41, SessionId: CompileActor 2025-04-07 14:12:19.505 INFO ydb-core-sys_view-ut_kqp(pid=468332, tid=0x00007FB5F1496640) [RESULT] yql_result_provider.cpp:1416: RewriteIO 2025-04-07T14:12:19.513617Z node 1 :KQP_YQL DEBUG: TraceId: 01jr89gkh2380zpa96drn9fz41, SessionId: CompileActor 2025-04-07 14:12:19.513 DEBUG ydb-core-sys_view-ut_kqp(pid=468332, tid=0x00007FB5F1496640) [perf] type_ann_expr.cpp:44: Execution of [TypeAnnotationTransformer::DoTransform] took 7.82ms 2025-04-07T14:12:19.514963Z node 1 :KQP_YQL DEBUG: TraceId: 01jr89gkh2380zpa96drn9fz41, SessionId: CompileActor 2025-04-07 14:12:19.514 DEBUG ydb-core-sys_view- ... T14:13:20.024824Z node 46 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:20.025003Z node 46 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:20.407566Z node 46 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:20.427490Z node 46 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:24.343706Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [46:7490574165592890638:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:24.343706Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [46:7490574165592890643:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:24.343814Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:24.348735Z node 46 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:13:24.390096Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [46:7490574165592890652:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:13:24.646615Z node 46 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr89jk7n4ep9yntanhq4wr09, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=46&id=NTMxMmYzYWYtYmViMjFmY2ItZTY5YzNlZWMtM2U1N2ZhZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:13:24.774353Z node 46 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[46:7490574144118053189:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:24.774717Z node 46 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:13:24.781261Z node 46 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr89jkhn1ddtkmsrvwbj6ag2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=46&id=ZGI2ODYyYi02YjhiOGU0MC1hOWM5MGY0LTQyNTk0ZTVj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:13:24.803209Z node 46 :SYSTEM_VIEWS INFO: Scan started, actor: [46:7490574165592890808:2329], owner: [46:7490574165592890804:2327], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_minute] 2025-04-07T14:13:24.803770Z node 46 :SYSTEM_VIEWS INFO: Scan prepared, actor: [46:7490574165592890808:2329], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:13:24.804286Z node 46 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [46:7490574165592890808:2329], row count: 1, finished: 1 2025-04-07T14:13:24.804332Z node 46 :SYSTEM_VIEWS INFO: Scan finished, actor: [46:7490574165592890808:2329], owner: [46:7490574165592890804:2327], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_minute] 2025-04-07T14:13:24.806502Z node 46 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035204780, txId: 281474976710662] shutting down 2025-04-07T14:13:27.247592Z node 51 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[51:7490574180228862761:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001085/r3tmp/tmpAEEFvz/pdisk_1.dat 2025-04-07T14:13:27.322710Z node 51 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:13:27.421689Z node 51 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:27.472716Z node 51 :HIVE WARN: HIVE#72057594037968897 Node(51, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:27.472842Z node 51 :HIVE WARN: HIVE#72057594037968897 Node(51, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:27.478109Z node 51 :HIVE WARN: HIVE#72057594037968897 Node(51, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5743, node 51 2025-04-07T14:13:27.552277Z node 51 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:27.552308Z node 51 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:27.552320Z node 51 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:27.552490Z node 51 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:28.033290Z node 51 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:28.064235Z node 51 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:32.103949Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [51:7490574201703700077:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:32.103953Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [51:7490574201703700072:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:32.104027Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:32.108571Z node 51 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:13:32.129995Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [51:7490574201703700086:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:13:32.244435Z node 51 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[51:7490574180228862761:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:32.244527Z node 51 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:13:32.354623Z node 51 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr89jtt6551qa0843x7fc85y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=51&id=ODE1M2UwMjEtZDU2YTAyNTctYTU0ZjRkZTEtODQ3OWZiMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:13:32.462271Z node 51 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr89jv2d3533nmf2vaq64m1v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=51&id=MWVlYTIyZDAtYjM4NWI3NDYtYWYwYmEyY2YtYTU3MmVjM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:13:32.464849Z node 51 :SYSTEM_VIEWS INFO: Scan started, actor: [51:7490574201703700239:2329], owner: [51:7490574201703700235:2327], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2025-04-07T14:13:32.465311Z node 51 :SYSTEM_VIEWS INFO: Scan prepared, actor: [51:7490574201703700239:2329], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:13:32.465804Z node 51 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [51:7490574201703700239:2329], row count: 1, finished: 1 2025-04-07T14:13:32.465843Z node 51 :SYSTEM_VIEWS INFO: Scan finished, actor: [51:7490574201703700239:2329], owner: [51:7490574201703700235:2327], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2025-04-07T14:13:32.468113Z node 51 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035212461, txId: 281474976710662] shutting down >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-04-07T14:12:54.033041Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1744035174033017 2025-04-07T14:12:54.430118Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574037071480245:2150];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:54.435309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:54.476039Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574036087900874:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:54.476087Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00108a/r3tmp/tmpVUlgOu/pdisk_1.dat 2025-04-07T14:12:54.695232Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:54.705357Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:54.901760Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:54.912998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:54.913123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:54.914261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:54.914304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:54.919182Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:54.919337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:54.921020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65087, node 1 2025-04-07T14:12:55.050843Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/00108a/r3tmp/yandexUbgea1.tmp 2025-04-07T14:12:55.050872Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/00108a/r3tmp/yandexUbgea1.tmp 2025-04-07T14:12:55.050965Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/00108a/r3tmp/yandexUbgea1.tmp 2025-04-07T14:12:55.051054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:55.216037Z INFO: TTestServer started on Port 13732 GrpcPort 65087 TClient is connected to server localhost:13732 PQClient connected to localhost:65087 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:55.508056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T14:12:55.598524Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T14:12:57.244117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574049956383006:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.244200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574049956383025:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.244261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.248641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:12:57.261074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574049956383064:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.261523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.262671Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-07T14:12:57.263826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574049956383035:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:12:57.539747Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490574048972803086:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:12:57.539977Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmY0OWExZDgtNjRmZDc2NmItMWY4MDVmYWQtNmYwMmY3YzM=, ActorId: [2:7490574048972803061:2280], ActorState: ExecuteState, TraceId: 01jr89hrv8dhfhw0a5y9w94wjb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:12:57.541754Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:12:57.557322Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574049956383132:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:12:57.558769Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjFkYTEyOS02YTQzZDFhMi00MTJiNDIzZS1kOTRkYTQ2Yg==, ActorId: [1:7490574049956383003:2302], ActorState: ExecuteState, TraceId: 01jr89hrrt9xfkyhkk5dwbkva7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:12:57.559120Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:12:57.592049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.715061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.828554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:65087", true, true, 1000); 2025-04-07T14:12:58.189118Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr89hsd3652ypqqkkyba6qgs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQ4YWY1MWYtODQzNTlkYi1iOGFmOTYxYi1jNjgwODYxNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490574054251350826:2977] 2025-04-07T14:12:59.432847Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574037071480245:2150];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:59.432945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:59.476773Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490574036087900874:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:59.476855Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T14:13:04.508442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:65087 MetaRequest { CmdGetTopicMetadata { Topic ... DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-07T14:13:33.422991Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T14:13:33.423000Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-07T14:13:33.423016Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490574205304576527:2485] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-07T14:13:33.425803Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490574205304576527:2485] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-07T14:13:33.570328Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490574205304576527:2485] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-07T14:13:33.570582Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7490574205304576575:2485] connected; active server actors: 1 2025-04-07T14:13:33.570639Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490574205304576527:2485] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-07T14:13:33.570659Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490574205304576527:2485] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-07T14:13:33.570835Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7490574205304576575:2485] disconnected; active server actors: 1 2025-04-07T14:13:33.570853Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7490574205304576575:2485] disconnected no session 2025-04-07T14:13:33.704838Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490574205304576527:2485] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-07T14:13:33.704882Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490574205304576527:2485] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-07T14:13:33.704900Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7490574205304576527:2485] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-07T14:13:33.704931Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T14:13:33.706283Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:33.706348Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7490574205304576597:2485], now have 1 active actors on pipe 2025-04-07T14:13:33.706411Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2025-04-07T14:13:33.706572Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T14:13:33.706616Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T14:13:33.706698Z node 6 :PERSQUEUE INFO: new Cookie src|8951136d-dcab9af1-7c97075e-fed4744a_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-07T14:13:33.706794Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T14:13:33.706852Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:13:33.707212Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T14:13:33.707235Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T14:13:33.707319Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:13:33.707511Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|8951136d-dcab9af1-7c97075e-fed4744a_0 2025-04-07T14:13:33.708285Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744035213708 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:13:33.708416Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|8951136d-dcab9af1-7c97075e-fed4744a_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-07T14:13:33.708618Z :INFO: [] MessageGroupId [src] SessionId [src|8951136d-dcab9af1-7c97075e-fed4744a_0] Write session: close. Timeout = 0 ms 2025-04-07T14:13:33.708692Z :INFO: [] MessageGroupId [src] SessionId [src|8951136d-dcab9af1-7c97075e-fed4744a_0] Write session will now close 2025-04-07T14:13:33.708734Z :DEBUG: [] MessageGroupId [src] SessionId [src|8951136d-dcab9af1-7c97075e-fed4744a_0] Write session: aborting 2025-04-07T14:13:33.709531Z :INFO: [] MessageGroupId [src] SessionId [src|8951136d-dcab9af1-7c97075e-fed4744a_0] Write session: gracefully shut down, all writes complete 2025-04-07T14:13:33.709558Z :DEBUG: [] MessageGroupId [src] SessionId [src|8951136d-dcab9af1-7c97075e-fed4744a_0] Write session is aborting and will not restart 2025-04-07T14:13:33.709619Z :DEBUG: [] MessageGroupId [src] SessionId [src|8951136d-dcab9af1-7c97075e-fed4744a_0] Write session: destroy 2025-04-07T14:13:33.709732Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|8951136d-dcab9af1-7c97075e-fed4744a_0 grpc read done: success: 0 data: 2025-04-07T14:13:33.709752Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|8951136d-dcab9af1-7c97075e-fed4744a_0 grpc read failed 2025-04-07T14:13:33.710276Z node 5 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|8951136d-dcab9af1-7c97075e-fed4744a_0 2025-04-07T14:13:33.710303Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|8951136d-dcab9af1-7c97075e-fed4744a_0 is DEAD 2025-04-07T14:13:33.710598Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:13:33.710879Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:33.710930Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7490574205304576597:2485] destroyed 2025-04-07T14:13:33.710979Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T14:13:33.725967Z :INFO: [/Root] [/Root] [9a7c2013-25999d97-7bf88a9a-ce18b214] Starting read session 2025-04-07T14:13:33.726027Z :DEBUG: [/Root] [/Root] [9a7c2013-25999d97-7bf88a9a-ce18b214] Starting session to cluster null (localhost:9217) 2025-04-07T14:13:33.727727Z :DEBUG: [/Root] [/Root] [9a7c2013-25999d97-7bf88a9a-ce18b214] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:33.727755Z :DEBUG: [/Root] [/Root] [9a7c2013-25999d97-7bf88a9a-ce18b214] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:33.727789Z :DEBUG: [/Root] [/Root] [9a7c2013-25999d97-7bf88a9a-ce18b214] [null] Reconnecting session to cluster null in 0.000000s 2025-04-07T14:13:33.728851Z :ERROR: [/Root] [/Root] [9a7c2013-25999d97-7bf88a9a-ce18b214] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2025-04-07T14:13:33.728906Z :DEBUG: [/Root] [/Root] [9a7c2013-25999d97-7bf88a9a-ce18b214] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:33.728938Z :DEBUG: [/Root] [/Root] [9a7c2013-25999d97-7bf88a9a-ce18b214] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:33.729037Z :INFO: [/Root] [/Root] [9a7c2013-25999d97-7bf88a9a-ce18b214] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2025-04-07T14:13:33.729191Z :NOTICE: [/Root] [/Root] [9a7c2013-25999d97-7bf88a9a-ce18b214] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:13:33.729216Z :DEBUG: [/Root] [/Root] [9a7c2013-25999d97-7bf88a9a-ce18b214] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2025-04-07T14:13:33.729291Z :INFO: [/Root] [/Root] [9a7c2013-25999d97-7bf88a9a-ce18b214] Closing read session. Close timeout: 0.000000s 2025-04-07T14:13:33.729323Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-07T14:13:33.729358Z :INFO: [/Root] [/Root] [9a7c2013-25999d97-7bf88a9a-ce18b214] Counters: { Errors: 1 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:13:33.729459Z :NOTICE: [/Root] [/Root] [9a7c2013-25999d97-7bf88a9a-ce18b214] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:13:34.217823Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490574209599543933:2503], TxId: 281474976715689, task: 1, CA Id [5:7490574209599543931:2503]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-07T14:13:34.249919Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490574209599543933:2503], TxId: 281474976715689, task: 1, CA Id [5:7490574209599543931:2503]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T14:13:34.295559Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490574209599543933:2503], TxId: 281474976715689, task: 1, CA Id [5:7490574209599543931:2503]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T14:13:34.351945Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490574209599543933:2503], TxId: 281474976715689, task: 1, CA Id [5:7490574209599543931:2503]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T14:13:34.447707Z node 5 :KQP_COMPUTE WARN: SelfId: [5:7490574209599543933:2503], TxId: 281474976715689, task: 1, CA Id [5:7490574209599543931:2503]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestCreateTabletBeforeLocal >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] >> TKeyValueTest::TestRenameWorks [GOOD] >> TKeyValueTest::TestRenameWorksNewApi |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-04-07T14:12:55.625863Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574041150802805:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:55.626189Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:55.669573Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574043173978940:2122];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:55.671520Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:55.856582Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:55.857744Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e30/r3tmp/tmpI1w62B/pdisk_1.dat 2025-04-07T14:12:56.091985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:56.092070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:56.094674Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:56.095486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:56.128359Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:56.138113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:56.138177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 64344, node 1 2025-04-07T14:12:56.146819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:56.162061Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:56.162093Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:56.347492Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001e30/r3tmp/yandexfQvJys.tmp 2025-04-07T14:12:56.347541Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001e30/r3tmp/yandexfQvJys.tmp 2025-04-07T14:12:56.347720Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001e30/r3tmp/yandexfQvJys.tmp 2025-04-07T14:12:56.347837Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:56.549535Z INFO: TTestServer started on Port 3046 GrpcPort 64344 TClient is connected to server localhost:3046 PQClient connected to localhost:64344 === TenantModeEnabled() = 0 === Init PQ - start server on port 64344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:56.888246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:12:56.888575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.890098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:12:56.891852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:12:56.891947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.894841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:12:56.895776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:12:56.896025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.896130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:12:56.896265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-07T14:12:56.896293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:12:56.897431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:12:56.897458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-07T14:12:56.897480Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:12:56.898114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.898151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:12:56.898161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:12:56.899570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.899594Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.899609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:12:56.899634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:12:56.914990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:56.917048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-07T14:12:56.917211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:12:56.919622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035176961, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:12:56.919844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7490574045445770575 RawX2: 4294969653 } } Step: 1744035176961 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:12:56.919874Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:12:56.920095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:12:56.920134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:12:56.920256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:12:56.920313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:12:56.921952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:12:56.921985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:12:56.922205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:12:56.922251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490574045445770683:2428], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-07T14:12:56.922306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:56.922331Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:12:56.922443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:12:56.922460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:12:56.922483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-07T14:12:56.922497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:12:56.922523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:12:56.922532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-07T14:12:56.922630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 202 ... d1f64309-ed85a2b1 2025-04-07T14:13:36.439140Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/cli_5_1_12402894990352272943_v1" (Sender=[5:7490574215981671867:2589], Pipe=[5:7490574215981671873:2589], Partitions=[], ActiveFamilyCount=0) 2025-04-07T14:13:36.439210Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] consumer cli family 1 status Active partitions [0] session "shared/cli_5_1_12402894990352272943_v1" sender [5:7490574215981671867:2589] lock partition 0 for ReadingSession "shared/cli_5_1_12402894990352272943_v1" (Sender=[5:7490574215981671867:2589], Pipe=[5:7490574215981671873:2589], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-04-07T14:13:36.439260Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-07T14:13:36.439287Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000183s 2025-04-07T14:13:36.441817Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_12402894990352272943_v1" ClientId: "cli" PipeClient { RawX1: 7490574215981671873 RawX2: 4503621102209565 } Path: "/Root/PQ/rt3.dc1--topic1" } 2025-04-07T14:13:36.441931Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-07T14:13:36.442299Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1 2025-04-07T14:13:36.442545Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_12402894990352272943_v1:1 with generation 1 2025-04-07T14:13:36.445680Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1744035216442 } Cookie: 18446744073709551615 } 2025-04-07T14:13:36.445711Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-04-07T14:13:36.445757Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 sending to client partition status 2025-04-07T14:13:36.446517Z :INFO: [] [] [41f9ba4a-ccc618b-e724ace6-7bfbc270] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (empty maybe) 2025-04-07T14:13:36.447653Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-04-07T14:13:36.447748Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-04-07T14:13:36.447784Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-04-07T14:13:36.447806Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-04-07T14:13:36.447842Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2025-04-07T14:13:36.447853Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1TEvPartitionReady. Aval parts: 1 2025-04-07T14:13:36.447883Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 performing read request: guid# 62077193-260b045d-3f6e934c-4f024cbf, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-07T14:13:36.448051Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid 62077193-260b045d-3f6e934c-4f024cbf 2025-04-07T14:13:36.448988Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1744035216323 CreateTimestampMS: 1744035216322 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1744035216344 CreateTimestampMS: 1744035216322 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1744035216345 CreateTimestampMS: 1744035216322 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2025-04-07T14:13:36.449134Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-04-07T14:13:36.449185Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 62077193-260b045d-3f6e934c-4f024cbf has messages 1 2025-04-07T14:13:36.449272Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 read done: guid# 62077193-260b045d-3f6e934c-4f024cbf, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 490 2025-04-07T14:13:36.449306Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 response to read: guid# 62077193-260b045d-3f6e934c-4f024cbf 2025-04-07T14:13:36.449475Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 Process answer. Aval parts: 0 2025-04-07T14:13:36.449840Z :DEBUG: [] [] [41f9ba4a-ccc618b-e724ace6-7bfbc270] [] Got ReadResponse, serverBytesSize = 490, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-04-07T14:13:36.449991Z :DEBUG: [] [] [41f9ba4a-ccc618b-e724ace6-7bfbc270] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-04-07T14:13:36.450343Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-04-07T14:13:36.450397Z :DEBUG: [] [] [41f9ba4a-ccc618b-e724ace6-7bfbc270] [] Returning serverBytesSize = 490 to budget 2025-04-07T14:13:36.450441Z :DEBUG: [] [] [41f9ba4a-ccc618b-e724ace6-7bfbc270] [] In ContinueReadingDataImpl, ReadSizeBudget = 490, ReadSizeServerDelta = 52428310 2025-04-07T14:13:36.450653Z :DEBUG: [] [] [41f9ba4a-ccc618b-e724ace6-7bfbc270] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-07T14:13:36.450817Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-07T14:13:36.450869Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-07T14:13:36.450895Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-04-07T14:13:36.450938Z :DEBUG: [] [] [41f9ba4a-ccc618b-e724ace6-7bfbc270] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-04-07T14:13:36.450978Z :DEBUG: [] [] [41f9ba4a-ccc618b-e724ace6-7bfbc270] [] Returning serverBytesSize = 0 to budget 2025-04-07T14:13:36.451143Z :INFO: [] [] [41f9ba4a-ccc618b-e724ace6-7bfbc270] Closing read session. Close timeout: 0.000000s 2025-04-07T14:13:36.450950Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 grpc read done: success# 1, data# { read_request { bytes_size: 490 } } 2025-04-07T14:13:36.451073Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 got read request: guid# f5c12c74-f9913075-a88e5315-7473e277 2025-04-07T14:13:36.451207Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-04-07T14:13:36.451250Z :INFO: [] [] [41f9ba4a-ccc618b-e724ace6-7bfbc270] Counters: { Errors: 0 CurrentSessionLifetimeMs: 26 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:13:36.451364Z :NOTICE: [] [] [41f9ba4a-ccc618b-e724ace6-7bfbc270] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T14:13:36.451412Z :DEBUG: [] [] [41f9ba4a-ccc618b-e724ace6-7bfbc270] [] Abort session to cluster 2025-04-07T14:13:36.452223Z :NOTICE: [] [] [41f9ba4a-ccc618b-e724ace6-7bfbc270] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:13:36.452331Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 grpc read done: success# 0, data# { } 2025-04-07T14:13:36.452367Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 grpc read failed 2025-04-07T14:13:36.452417Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 closed 2025-04-07T14:13:36.452997Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_12402894990352272943_v1 is DEAD 2025-04-07T14:13:36.453465Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_12402894990352272943_v1 2025-04-07T14:13:36.453742Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7490574215981671873:2589] disconnected; active server actors: 1 2025-04-07T14:13:36.453777Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7490574215981671873:2589] client cli disconnected session shared/cli_5_1_12402894990352272943_v1 >> KqpScheme::CreateDropTableViaApiMultipleTime [GOOD] >> KqpScheme::CreateDropColumnTable >> THiveTest::TestCreateTabletBeforeLocal [GOOD] >> THiveTest::TestCreateTabletReboots >> TVectorIndexTests::CreateTableMultiColumn >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> TVectorIndexTests::VectorKmeansTreePostingImplTable [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::VectorKmeansTreePostingImplTable [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpIndexes::IndexTopSortPushDown [GOOD] >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamOperationTimeout >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] >> TVectorIndexTests::CreateTableMultiColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableMultiColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:40.401141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:40.401228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:40.401260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:40.401296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:40.401328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:40.401347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:40.401386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:40.401643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:40.450688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:40.450725Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:40.457531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:40.457611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:40.457716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:40.461969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:40.462124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:40.462652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:40.462802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:40.464271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:40.465334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:40.465378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:40.465510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:40.465553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:40.465586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:40.465718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.470954Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:40.556755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:40.556928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.557071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:40.557239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:40.557274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.558811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:40.558910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:40.559047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.559080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:40.559104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:40.559143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:40.560437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.560472Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:40.560501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:40.561786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.561813Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.561848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:40.561882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:40.564403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:40.565528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:40.565727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:40.566382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:40.566488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:40.566530Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:40.566748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:40.566780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:40.566887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:40.566931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:40.568310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:40.568354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:40.568487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:40.568542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:40.568814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.568854Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:40.568954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:40.568986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:40.569021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:40.569054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:40.569083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:40.569113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:40.569173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:40.569224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:40.569250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:40.570517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:40.570581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:40.570603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:40.570632Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:40.570673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:40.570757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... sVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "covered1" DataColumnNames: "covered2" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:40.803014Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:13:40.803187Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 212us result status StatusSuccess 2025-04-07T14:13:40.803423Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:40.803870Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:13:40.803988Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 151us result status StatusSuccess 2025-04-07T14:13:40.804226Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "id1" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "id2" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "covered1" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "covered2" Type: "String" TypeId: 4097 Id: 5 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id1" KeyColumnNames: "id2" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::IndexTopSortPushDown [GOOD] Test command err: Trying to start YDB, gRPC: 24213, MsgBus: 3830 2025-04-07T14:13:20.148916Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574149404382773:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:20.149020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a85/r3tmp/tmpZLjLCH/pdisk_1.dat 2025-04-07T14:13:20.470534Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24213, node 1 2025-04-07T14:13:20.542057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:20.542165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:20.544068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:20.667750Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:20.667778Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:20.667787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:20.667907Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3830 TClient is connected to server localhost:3830 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:21.345820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:21.382092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:21.525264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:21.655173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:21.709720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:22.868739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574157994318941:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:22.868847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:23.375101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:13:23.403097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:13:23.429308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:13:23.458799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:13:23.499072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:13:23.540175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:13:23.600236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574162289286737:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:23.600323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:23.600397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574162289286742:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:23.605700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:13:23.616859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574162289286744:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:13:24.786218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:13:25.153148Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574149404382773:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:25.153213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:13:25.627469Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 1918, MsgBus: 19395 2025-04-07T14:13:33.808850Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574205875844160:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:33.808886Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a85/r3tmp/tmp7eGdmU/pdisk_1.dat 2025-04-07T14:13:33.894047Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1918, node 2 2025-04-07T14:13:33.938636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:33.938720Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:33.940348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:33.952105Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:33.952128Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:33.952145Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:33.952269Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19395 TClient is connected to server localhost:19395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:34.329922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:34.339038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:34.410414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:34.532617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:34.605634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:36.470312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574218760747776:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:36.470393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:36.504639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:13:36.533680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:13:36.558011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:13:36.587117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:13:36.611330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:13:36.641808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:13:36.718295Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574218760748271:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:36.718398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:36.718470Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574218760748276:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:36.721662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:13:36.729837Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574218760748278:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:13:37.782116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:13:37.811852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:13:37.845054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:13:38.809282Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490574205875844160:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:38.809329Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-04-07T14:12:57.067870Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574048865993497:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:57.067953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:57.093622Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574049998481351:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:57.199737Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001df6/r3tmp/tmpj6eCVP/pdisk_1.dat 2025-04-07T14:12:57.217504Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:57.232881Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:57.352096Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:57.360809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:57.360922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:57.365054Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:57.367250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21314, node 1 2025-04-07T14:12:57.408581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:57.408654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:57.411456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:57.422512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001df6/r3tmp/yandex2Sm704.tmp 2025-04-07T14:12:57.422530Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001df6/r3tmp/yandex2Sm704.tmp 2025-04-07T14:12:57.422635Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001df6/r3tmp/yandex2Sm704.tmp 2025-04-07T14:12:57.422736Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:57.465694Z INFO: TTestServer started on Port 14425 GrpcPort 21314 TClient is connected to server localhost:14425 PQClient connected to localhost:21314 === TenantModeEnabled() = 0 === Init PQ - start server on port 21314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:57.758503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:12:57.758722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.758938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:12:57.759137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:12:57.759191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.761340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:12:57.761476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:12:57.761668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.761707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:12:57.761762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-07T14:12:57.761779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:12:57.762625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:12:57.762654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-07T14:12:57.762669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:12:57.763562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.763597Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:12:57.763609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:12:57.764923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.764949Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.764995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:12:57.765039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:12:57.767837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:12:57.769526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-07T14:12:57.769707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:12:57.772081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035177815, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:12:57.772242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7490574048865994098 RawX2: 4294969657 } } Step: 1744035177815 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:12:57.772278Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:12:57.772504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:12:57.772540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:12:57.772711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:12:57.772756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:12:57.778615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:12:57.778641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:12:57.778821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:12:57.778871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490574048865994131:2391], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-07T14:12:57.778912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.778934Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:12:57.779040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:12:57.779063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:12:57.779089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-07T14:12:57.779106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:12:57.779117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:12:57.779124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-07T14:12:57.779195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:12:57.779219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:12:57.779238Z ... [1 (0), ], RequireBalancing=0 [] 2025-04-07T14:13:39.888074Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/cli_5_1_4908487378625753647_v1" (Sender=[5:7490574230480232081:2591], Pipe=[5:7490574230480232084:2591], Partitions=[], ActiveFamilyCount=0) 2025-04-07T14:13:39.888138Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] consumer cli family 1 status Active partitions [0] session "shared/cli_5_1_4908487378625753647_v1" sender [5:7490574230480232081:2591] lock partition 0 for ReadingSession "shared/cli_5_1_4908487378625753647_v1" (Sender=[5:7490574230480232081:2591], Pipe=[5:7490574230480232084:2591], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-04-07T14:13:39.888205Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-07T14:13:39.888234Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000198s 2025-04-07T14:13:39.889506Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_4908487378625753647_v1" ClientId: "cli" PipeClient { RawX1: 7490574230480232084 RawX2: 4503621102209567 } Path: "/Root/PQ/rt3.dc1--topic1" } 2025-04-07T14:13:39.889620Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-07T14:13:39.889908Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1 2025-04-07T14:13:39.890582Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_4908487378625753647_v1:1 with generation 1 2025-04-07T14:13:39.893373Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1744035219877 } Cookie: 18446744073709551615 } 2025-04-07T14:13:39.893416Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-04-07T14:13:39.893462Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 sending to client partition status 2025-04-07T14:13:39.894220Z :INFO: [] [] [830e6c1a-b192bc16-f154f3bc-fdf7bc17] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (empty maybe) 2025-04-07T14:13:39.894569Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-04-07T14:13:39.894681Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-04-07T14:13:39.894731Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-04-07T14:13:39.894756Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-04-07T14:13:39.894803Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2025-04-07T14:13:39.894821Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1TEvPartitionReady. Aval parts: 1 2025-04-07T14:13:39.894862Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 performing read request: guid# 2a2c0b8c-d4bce50b-585428d9-54e7b3b4, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-07T14:13:39.894991Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid 2a2c0b8c-d4bce50b-585428d9-54e7b3b4 2025-04-07T14:13:39.895636Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1744035219775 CreateTimestampMS: 1744035219773 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1744035219779 CreateTimestampMS: 1744035219774 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1744035219805 CreateTimestampMS: 1744035219774 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2025-04-07T14:13:39.895784Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-04-07T14:13:39.895824Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 2a2c0b8c-d4bce50b-585428d9-54e7b3b4 has messages 1 2025-04-07T14:13:39.895897Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 read done: guid# 2a2c0b8c-d4bce50b-585428d9-54e7b3b4, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 490 2025-04-07T14:13:39.895925Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 response to read: guid# 2a2c0b8c-d4bce50b-585428d9-54e7b3b4 2025-04-07T14:13:39.896085Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 Process answer. Aval parts: 0 2025-04-07T14:13:39.896309Z :DEBUG: [] [] [830e6c1a-b192bc16-f154f3bc-fdf7bc17] [] Got ReadResponse, serverBytesSize = 490, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-04-07T14:13:39.896394Z :DEBUG: [] [] [830e6c1a-b192bc16-f154f3bc-fdf7bc17] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-04-07T14:13:39.896621Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-04-07T14:13:39.896692Z :DEBUG: [] [] [830e6c1a-b192bc16-f154f3bc-fdf7bc17] [] Returning serverBytesSize = 490 to budget 2025-04-07T14:13:39.896727Z :DEBUG: [] [] [830e6c1a-b192bc16-f154f3bc-fdf7bc17] [] In ContinueReadingDataImpl, ReadSizeBudget = 490, ReadSizeServerDelta = 52428310 2025-04-07T14:13:39.897029Z :DEBUG: [] [] [830e6c1a-b192bc16-f154f3bc-fdf7bc17] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-07T14:13:39.897168Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-07T14:13:39.897238Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-07T14:13:39.897264Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-04-07T14:13:39.897306Z :DEBUG: [] [] [830e6c1a-b192bc16-f154f3bc-fdf7bc17] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-04-07T14:13:39.897345Z :DEBUG: [] [] [830e6c1a-b192bc16-f154f3bc-fdf7bc17] [] Returning serverBytesSize = 0 to budget 2025-04-07T14:13:39.897310Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 grpc read done: success# 1, data# { read_request { bytes_size: 490 } } 2025-04-07T14:13:39.897407Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 got read request: guid# 53e11255-d35a396e-65f8040-f75ef75e 2025-04-07T14:13:39.897510Z :INFO: [] [] [830e6c1a-b192bc16-f154f3bc-fdf7bc17] Closing read session. Close timeout: 0.000000s 2025-04-07T14:13:39.897537Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-04-07T14:13:39.897564Z :INFO: [] [] [830e6c1a-b192bc16-f154f3bc-fdf7bc17] Counters: { Errors: 0 CurrentSessionLifetimeMs: 19 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:13:39.897659Z :NOTICE: [] [] [830e6c1a-b192bc16-f154f3bc-fdf7bc17] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T14:13:39.897694Z :DEBUG: [] [] [830e6c1a-b192bc16-f154f3bc-fdf7bc17] [] Abort session to cluster 2025-04-07T14:13:39.898459Z :NOTICE: [] [] [830e6c1a-b192bc16-f154f3bc-fdf7bc17] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:13:39.898443Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 grpc read done: success# 0, data# { } 2025-04-07T14:13:39.898466Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 grpc read failed 2025-04-07T14:13:39.898484Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 grpc closed 2025-04-07T14:13:39.898509Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4908487378625753647_v1 is DEAD 2025-04-07T14:13:39.899436Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_4908487378625753647_v1 2025-04-07T14:13:39.899537Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7490574230480232084:2591] disconnected; active server actors: 1 2025-04-07T14:13:39.899581Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7490574230480232084:2591] client cli disconnected session shared/cli_5_1_4908487378625753647_v1 |95.6%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] >> TAsyncIndexTests::CreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: 2025-04-07T14:13:20.310939Z node 1 :BS_PROXY_GET ERROR: [023d050af957c82c] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[72057594037927937:2:1:2:1:5:0] DEADLINE Size# 0 RequestedSize# 5} ErrorReason# "status# DEADLINE from# [0:1:0:0:0]"} Marker# BPG29 2025-04-07T14:13:20.311113Z node 1 :BS_VDISK_PATCH ERROR: VDISK[0:_:0:0:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [72057594037927937:2:1:2:1:5:0] PatchedBlobId# [72057594037927937:2:1:2:4:5:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [0:1:0:0:0] Marker# BSVSP01 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:141:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:143:2166] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:147:2057] recipient: [4:143:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:146:2167] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:216:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:143:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:146:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:145:2167] Leader for TabletID 72057594037927937 is [5:148:2168] sender: [5:149:2057] recipient: [5:145:2167] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:148:2168] Leader for TabletID 72057594037927937 is [5:148:2168] sender: [5:219:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2172] Leader for TabletID 72057594037927937 is [7:153:2173] sender: [7:154:2057] recipient: [7:150:2172] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2173] Leader for TabletID 72057594037927937 is [7:153:2173] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:151:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:155:2057] recipient: [8:153:2174] Leader for TabletID 72057594037927937 is [8:156:2175] sender: [8:157:2057] recipient: [8:153:2174] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:156:2175] Leader for TabletID 72057594037927937 is [8:156:2175] sender: [8:204:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:155:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:156:2176] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:159:2057] recipient: [9:156:2176] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:158:2177] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:228:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:153:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:156:2176] Leader for TabletID 72057594037927937 is [10:158:2177] sender: [10:159:2057] recipient: [10:156:2176] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:158:2177] Leader for TabletID 72057594037927937 is [10:158:2177] sender: [10:228:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:154:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:158:2057] recipient: [11:156:2176] Leader for TabletID 72057594037927937 is [11:159:2177] sender: [11:160:2057] recipient: [11:156:2176] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:159:2177] Leader for TabletID 72057594037927937 is [11:159:2177] sender: [11:229:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:157:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:160:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:161:2057] recipient: [12:159:2179] Leader for TabletID 72057594037927937 is [12:162:2180] sender: [12:163:2057] recipient: [12:159:2179] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:162:2180] Leader for TabletID 72057594037927937 is [12:162:2180] sender: [12:215:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:161:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:164:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:165:2057] recipient: [13:163:2183] Leader for TabletID 72057594037927937 is [13:166:2184] sender: [13:167:2057] recipient: [13:163:2183] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:166:2184] Leader for TabletID 72057594037927937 is [13:166:2184] sender: [13:219:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:166:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:169:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:170:2057] recipient: [14:168:2188] Leader for TabletID 72057594037927937 is [14:171:2189] sender: [14:172:2057] recipient: [14:168:2188] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:171:2189] Leader for TabletID 72057594037927937 is [14:171:2189] sender: [14:241:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:166:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:169:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:170:2057] recipient: [15:168:2188] Leader for TabletID 72057594037927937 is [15:171:2189] sender: [15:172:2057] recipient: [15:168:2188] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:171:2189] Leader for TabletID 72057594037927937 is [15:171:2189] sender: [15:241:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] >> TAsyncIndexTests::OnlineBuild >> TKeyValueTest::TestCopyRangeWorks [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi >> KqpScheme::CreateDropColumnTable [GOOD] >> KqpScheme::CreateDropColumnTableNegative >> TAsyncIndexTests::CreateTable [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:43.275340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:43.275417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:43.275452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:43.275484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:43.275540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:43.275567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:43.275640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:43.275976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:43.327338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:43.327389Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:43.337804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:43.337929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:43.338078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:43.343231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:43.343414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:43.344122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:43.344295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:43.346008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:43.347138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:43.347188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:43.347278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:43.347312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:43.347339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:43.347511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.353433Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:43.468450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:43.468676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.468871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:43.469077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:43.469128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.470874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:43.471014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:43.471176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.471226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:43.471259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:43.471309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:43.472871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.472917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:43.472951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:43.474386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.474424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.474479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:43.474522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:43.483771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:43.485505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:43.485709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:43.486553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:43.486674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:43.486747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:43.486970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:43.487012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:43.487159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:43.487244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:43.489154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:43.489194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:43.489343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:43.489380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:43.489585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.489624Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:43.489721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:43.489755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:43.489798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:43.489831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:43.489863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:43.489889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:43.489949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:43.489988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:43.490023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:43.491685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:43.491785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:43.491821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:43.491854Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:43.491901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:43.491997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 716926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 314 RawX2: 4294969595 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:13:43.716979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:43.717019Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:13:43.717055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:13:43.717095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-07T14:13:43.717681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:13:43.717710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-07T14:13:43.717859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:13:43.717891Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:13:43.717963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969597 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-07T14:13:43.718002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:43.718025Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.718047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:13:43.718073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-07T14:13:43.723351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:43.723532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:43.728385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:43.728562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-07T14:13:43.728767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:13:43.729147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.729328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:13:43.729571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-07T14:13:43.729609Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-07T14:13:43.729698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-07T14:13:43.729730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-07T14:13:43.729765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-07T14:13:43.730259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.730611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.730662Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-07T14:13:43.730725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-07T14:13:43.730761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-07T14:13:43.730795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-07T14:13:43.730857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:378:2343] message: TxId: 101 2025-04-07T14:13:43.730906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-07T14:13:43.730953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-07T14:13:43.730987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-07T14:13:43.731110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:13:43.731147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-07T14:13:43.731178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-07T14:13:43.731207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:13:43.731225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-07T14:13:43.731241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-07T14:13:43.731274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:13:43.733894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-07T14:13:43.733947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:379:2344] TestWaitNotification: OK eventTxId 101 2025-04-07T14:13:43.734390Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:13:43.734621Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 235us result status StatusSuccess 2025-04-07T14:13:43.735367Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> TAsyncIndexTests::OnlineBuild [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC >> KqpScripting::StreamOperationTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] Test command err: 2025-04-07T14:13:21.687127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:13:21.687323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:13:21.687474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00107e/r3tmp/tmp7LgRVf/pdisk_1.dat 2025-04-07T14:13:21.963479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:13:21.999491Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:22.048625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:22.048839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:22.060350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:22.172369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:22.204021Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:13:22.205003Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:13:22.205429Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:13:22.205656Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:22.239609Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:13:22.240152Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:22.240240Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:13:22.241583Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:13:22.241639Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:13:22.241684Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:13:22.241937Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:13:22.270991Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:13:22.271259Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:13:22.271381Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:13:22.271418Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:13:22.271447Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:13:22.271485Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:13:22.271805Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:22.271859Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:22.272002Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:13:22.272080Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:13:22.272430Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:22.272467Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:22.272504Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:13:22.272545Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:13:22.272575Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:13:22.272601Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:13:22.272637Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:13:22.272678Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:13:22.272715Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:13:22.272759Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:13:22.272880Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:13:22.272946Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:13:22.273052Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:13:22.273294Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:13:22.273352Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:13:22.273453Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:13:22.273513Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:13:22.273562Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:13:22.273601Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:13:22.273636Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:13:22.273880Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:13:22.273908Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:13:22.273941Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:13:22.273970Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:13:22.274029Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:13:22.274056Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:13:22.274089Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:13:22.274117Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:13:22.274137Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:13:22.275300Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:13:22.275353Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:13:22.286040Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:13:22.286130Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:13:22.286163Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:13:22.286210Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:13:22.286269Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:13:22.466433Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:22.466480Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:22.466516Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:13:22.466888Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:13:22.466916Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:13:22.467008Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:13:22.467051Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:13:22.467078Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:13:22.467100Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:13:22.470863Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:13:22.470926Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:13:22.471866Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:22.471912Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:22.471956Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:13:22.471997Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:13:22.472048Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:13:22.472114Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... 7Z node 6 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-04-07T14:13:43.585988Z node 6 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 1500:100 keys extracted: 3 2025-04-07T14:13:43.586038Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-07T14:13:43.586068Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadWriteDetails 2025-04-07T14:13:43.586101Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:13:43.586130Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:13:43.586196Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically complete end at 72075186224037888 2025-04-07T14:13:43.586240Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically incomplete end at 72075186224037888 2025-04-07T14:13:43.586284Z node 6 :TX_DATASHARD TRACE: Activated operation [1500:100] at 72075186224037888 2025-04-07T14:13:43.586332Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-07T14:13:43.586359Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:13:43.586382Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildWriteOutRS 2025-04-07T14:13:43.586406Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildWriteOutRS 2025-04-07T14:13:43.586476Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-07T14:13:43.586505Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildWriteOutRS 2025-04-07T14:13:43.586529Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-04-07T14:13:43.586562Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-04-07T14:13:43.586588Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-07T14:13:43.586616Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-04-07T14:13:43.586646Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-04-07T14:13:43.586678Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit PrepareWriteTxInRS 2025-04-07T14:13:43.586718Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-07T14:13:43.586746Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-04-07T14:13:43.586771Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-04-07T14:13:43.586802Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit LoadAndWaitInRS 2025-04-07T14:13:43.586838Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-07T14:13:43.586863Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-04-07T14:13:43.586888Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit ExecuteWrite 2025-04-07T14:13:43.586912Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit ExecuteWrite 2025-04-07T14:13:43.586948Z node 6 :TX_DATASHARD DEBUG: Executing write operation for [1500:100] at 72075186224037888 2025-04-07T14:13:43.587104Z node 6 :TX_DATASHARD DEBUG: Executed write operation for [1500:100] at 72075186224037888, row count=3 2025-04-07T14:13:43.587160Z node 6 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-07T14:13:43.587223Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:13:43.587258Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit ExecuteWrite 2025-04-07T14:13:43.587299Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompleteWrite 2025-04-07T14:13:43.587344Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompleteWrite 2025-04-07T14:13:43.587552Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is DelayComplete 2025-04-07T14:13:43.587588Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompleteWrite 2025-04-07T14:13:43.587630Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:13:43.587696Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:13:43.587742Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-07T14:13:43.587767Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:13:43.587804Z node 6 :TX_DATASHARD TRACE: Execution plan for [1500:100] at 72075186224037888 has finished 2025-04-07T14:13:43.587847Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:13:43.587885Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:13:43.587932Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:13:43.587975Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:13:43.599052Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 100} 2025-04-07T14:13:43.599180Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-07T14:13:43.599280Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:13:43.599337Z node 6 :TX_DATASHARD TRACE: Complete execution for [1500:100] at 72075186224037888 on unit CompleteWrite 2025-04-07T14:13:43.599409Z node 6 :TX_DATASHARD DEBUG: Complete write [1500 : 100] from 72075186224037888 at tablet 72075186224037888 send result to client [6:558:2485] 2025-04-07T14:13:43.599492Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:13:43.601013Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:731:2603], Recipient [6:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:43.601098Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:43.601162Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:730:2602], serverId# [6:731:2603], sessionId# [0:0:0] 2025-04-07T14:13:43.601335Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:729:2601], Recipient [6:632:2537]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-07T14:13:43.602347Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:734:2606], Recipient [6:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:43.602406Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:43.602462Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:733:2605], serverId# [6:734:2606], sessionId# [0:0:0] 2025-04-07T14:13:43.602692Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:732:2604], Recipient [6:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-07T14:13:43.602795Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:13:43.602852Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/100 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T14:13:43.602903Z node 6 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1500/18446744073709551615 2025-04-07T14:13:43.602970Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-07T14:13:43.603063Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-07T14:13:43.603109Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:13:43.603152Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:13:43.603191Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:13:43.603266Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-07T14:13:43.603318Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-07T14:13:43.603353Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:13:43.603383Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:13:43.603413Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:13:43.603520Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-07T14:13:43.603806Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[6:732:2604], 1000} after executionsCount# 1 2025-04-07T14:13:43.603875Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:732:2604], 1000} sends rowCount# 3, bytes# 72, quota rows left# 18446744073709551612, quota bytes left# 18446744073709551543, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:13:43.603959Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:732:2604], 1000} finished in read 2025-04-07T14:13:43.604043Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-07T14:13:43.604073Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:13:43.604101Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:13:43.604129Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:13:43.604175Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-07T14:13:43.604200Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:13:43.604229Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-07T14:13:43.604272Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:13:43.604407Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::OnlineBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:13:43.829317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:43.829408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:43.829442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:43.829474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:43.829525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:43.829573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:43.829638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:43.829942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:43.888505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:43.888538Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:43.894995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:43.895090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:43.895220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:43.898406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:43.898516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:43.898923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:43.899044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:43.900072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:43.900856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:43.900891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:43.900979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:43.901007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:43.901029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:43.901133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.904810Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:13:43.993548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:43.993750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.993911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:43.994100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:43.994138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.995592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:43.995702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:43.995806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.995845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:43.995873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:43.995908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:43.997156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.997203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:43.997235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:43.998353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.998386Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.998434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:43.998471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:44.001491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:44.002900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:44.003056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:44.003899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:44.004019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:44.004070Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:44.004257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:44.004296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:44.004433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:44.004495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:44.005985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:44.006024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:44.006176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:44.006209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:44.006406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:44.006442Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:44.006531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:44.006560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:44.006599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:44.006630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:44.006656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:44.006679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:44.006753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:44.006787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:44.006810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:13:44.008468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:44.008561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:13:44.008592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:13:44.008641Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:13:44.008700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:44.008781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710760, response: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-07T14:13:44.419611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP LOCK, path: /MyRoot/Table 2025-04-07T14:13:44.419738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2025-04-07T14:13:44.419794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2025-04-07T14:13:44.419860Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 102, cookie: 102, txId: 281474976710760, status: StatusAccepted 2025-04-07T14:13:44.419931Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:372:2346], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2025-04-07T14:13:44.420107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-07T14:13:44.420146Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2025-04-07T14:13:44.420189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2025-04-07T14:13:44.420266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:44.422255Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T14:13:44.422321Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:372:2346], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:13:44.422584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-07T14:13:44.422682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:13:44.422794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-07T14:13:44.422817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-07T14:13:44.422845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-07T14:13:44.423011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:44.423086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:44.423130Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-07T14:13:44.423159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-07T14:13:44.424554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-07T14:13:44.424620Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-07T14:13:44.424695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-07T14:13:44.424717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-07T14:13:44.424749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-07T14:13:44.424788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:127:2152] message: TxId: 281474976710760 2025-04-07T14:13:44.424836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-07T14:13:44.424859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-07T14:13:44.424878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-07T14:13:44.424931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-07T14:13:44.426169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-07T14:13:44.426211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-07T14:13:44.426248Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-07T14:13:44.426316Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:372:2346], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:13:44.427433Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T14:13:44.427486Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:372:2346], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:13:44.427523Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-07T14:13:44.428599Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-07T14:13:44.428686Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:372:2346], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:13:44.428730Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-07T14:13:44.428810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:13:44.428852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:461:2425] TestWaitNotification: OK eventTxId 102 |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamOperationTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 11622, MsgBus: 13010 2025-04-07T14:12:31.405694Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573938581255410:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:31.405742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001eb5/r3tmp/tmpSfuFvh/pdisk_1.dat 2025-04-07T14:12:31.833344Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:31.842148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:31.842230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:31.852857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11622, node 1 2025-04-07T14:12:31.945148Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:31.945185Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:31.945193Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:31.945280Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13010 TClient is connected to server localhost:13010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:32.471764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.484245Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:12:32.503236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.638677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.797478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:32.884234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:34.418360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573951466158791:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.429586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.686414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.709820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.743575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.774662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.804455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.834810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:12:34.915033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573951466159292:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.915105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.915114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573951466159297:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:34.918504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:12:34.929175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573951466159299:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:12:36.135885Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035156171, txId: 281474976710672] shutting down 2025-04-07T14:12:36.398111Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035156409, txId: 281474976710675] shutting down 2025-04-07T14:12:36.417219Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573938581255410:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:36.418748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:36.687327Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035156710, txId: 281474976710678] shutting down 2025-04-07T14:12:36.919661Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035156955, txId: 281474976710681] shutting down 2025-04-07T14:12:37.181259Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035157200, txId: 281474976710684] shutting down 2025-04-07T14:12:37.449566Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035157473, txId: 281474976710687] shutting down 2025-04-07T14:12:37.743372Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035157767, txId: 281474976710690] shutting down 2025-04-07T14:12:38.070068Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035158103, txId: 281474976710693] shutting down 2025-04-07T14:12:38.343307Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035158376, txId: 281474976710696] shutting down 2025-04-07T14:12:38.596738Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035158635, txId: 281474976710699] shutting down 2025-04-07T14:12:38.846382Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035158880, txId: 281474976710702] shutting down 2025-04-07T14:12:39.083390Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035159118, txId: 281474976710705] shutting down 2025-04-07T14:12:39.317866Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035159356, txId: 281474976710708] shutting down 2025-04-07T14:12:39.547533Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035159587, txId: 281474976710711] shutting down 2025-04-07T14:12:39.758021Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035159790, txId: 281474976710714] shutting down 2025-04-07T14:12:39.995876Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035160035, txId: 281474976710717] shutting down 2025-04-07T14:12:40.269331Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035160301, txId: 281474976710720] shutting down 2025-04-07T14:12:40.499126Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035160539, txId: 281474976710723] shutting down 2025-04-07T14:12:40.764583Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035160791, txId: 281474976710726] shutting down 2025-04-07T14:12:41.004176Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035161043, txId: 281474976710729] shutting down 2025-04-07T14:12:41.258569Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035161295, txId: 281474976710732] shutting down 2025-04-07T14:12:41.514962Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035161554, txId: 281474976710735] shutting down 2025-04-07T14:12:41.771405Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035161806, txId: 281474976710738] shutting down 2025-04-07T14:12:41.996840Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035162030, ... T14:13:31.577120Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035211597, txId: 281474976711191] shutting down 2025-04-07T14:13:31.943258Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035211968, txId: 281474976711194] shutting down 2025-04-07T14:13:32.252637Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035212283, txId: 281474976711197] shutting down 2025-04-07T14:13:32.545562Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035212577, txId: 281474976711200] shutting down 2025-04-07T14:13:32.878421Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035212906, txId: 281474976711203] shutting down 2025-04-07T14:13:33.226754Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035213256, txId: 281474976711206] shutting down 2025-04-07T14:13:33.578863Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035213585, txId: 281474976711209] shutting down 2025-04-07T14:13:33.924446Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035213949, txId: 281474976711212] shutting down 2025-04-07T14:13:34.291737Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035214299, txId: 281474976711215] shutting down 2025-04-07T14:13:34.659039Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035214691, txId: 281474976711218] shutting down 2025-04-07T14:13:34.973141Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035215006, txId: 281474976711221] shutting down 2025-04-07T14:13:35.304279Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035215307, txId: 281474976711224] shutting down 2025-04-07T14:13:35.644243Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035215671, txId: 281474976711227] shutting down 2025-04-07T14:13:35.975935Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035216007, txId: 281474976711230] shutting down 2025-04-07T14:13:36.288637Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035216315, txId: 281474976711233] shutting down 2025-04-07T14:13:36.631400Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035216651, txId: 281474976711236] shutting down 2025-04-07T14:13:36.973521Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035216994, txId: 281474976711239] shutting down 2025-04-07T14:13:37.342753Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035217365, txId: 281474976711242] shutting down 2025-04-07T14:13:37.692978Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035217722, txId: 281474976711245] shutting down 2025-04-07T14:13:38.009533Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035218037, txId: 281474976711248] shutting down 2025-04-07T14:13:38.314729Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035218345, txId: 281474976711251] shutting down 2025-04-07T14:13:38.635815Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035218667, txId: 281474976711254] shutting down 2025-04-07T14:13:38.926552Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035218954, txId: 281474976711257] shutting down 2025-04-07T14:13:39.263750Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035219290, txId: 281474976711260] shutting down 2025-04-07T14:13:39.569607Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035219591, txId: 281474976711263] shutting down 2025-04-07T14:13:39.897838Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035219927, txId: 281474976711266] shutting down 2025-04-07T14:13:40.215834Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035220242, txId: 281474976711269] shutting down Trying to start YDB, gRPC: 17680, MsgBus: 17240 2025-04-07T14:13:40.947067Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574236247062832:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:40.947181Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001eb5/r3tmp/tmpofEH7O/pdisk_1.dat 2025-04-07T14:13:41.035603Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17680, node 2 2025-04-07T14:13:41.068535Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:41.068643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:41.070389Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:41.099008Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:41.099038Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:41.099045Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:41.099170Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17240 TClient is connected to server localhost:17240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:41.469657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:41.486918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:41.555441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:41.701290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:41.773429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:43.406059Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574249131966456:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:43.406133Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:43.433340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:13:43.457712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:13:43.480379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:13:43.504847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:13:43.531504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:13:43.560786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:13:43.600749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574249131966947:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:43.600830Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:43.601016Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574249131966952:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:43.604820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:13:43.613615Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574249131966954:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpScheme::AddChangefeed [GOOD] >> KqpScheme::AddChangefeedWhenDisabled |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] |95.7%| [TA] $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] Test command err: 2025-04-07T14:09:32.777024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:09:32.777340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:09:32.777505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013ba/r3tmp/tmpQyju6K/pdisk_1.dat 2025-04-07T14:09:33.172044Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4575, node 1 2025-04-07T14:09:33.378939Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:09:33.378986Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:09:33.379016Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:09:33.379593Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:09:33.381585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:09:33.475115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:33.475243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:33.494033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18688 2025-04-07T14:09:34.125660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:09:37.304706Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:09:37.346743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:37.346828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:37.389492Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:09:37.394862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:37.610438Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:09:37.678049Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:09:37.678152Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:09:37.710670Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:09:37.710848Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:09:37.711060Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:09:37.711137Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:09:37.711194Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:09:37.711253Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:09:37.711323Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:09:37.711388Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:09:37.711848Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:09:37.942107Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:37.942221Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:09:37.952086Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:09:37.960429Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:09:37.961311Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:09:37.962420Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:09:37.989476Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:09:37.989549Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:09:37.989618Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:09:37.991549Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:09:37.991653Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:09:38.006346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:09:38.023748Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:09:38.023919Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:09:38.041417Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:09:38.057409Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:09:38.099385Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:09:38.368381Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:09:38.555257Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:09:39.363701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:39.363844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:09:39.379519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:09:39.475467Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:09:39.475648Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:09:39.475935Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:09:39.476026Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:09:39.476110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:09:39.476227Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:09:39.476308Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:09:39.476401Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:09:39.476532Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:09:39.476652Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:09:39.476758Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:09:39.476894Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:09:39.508547Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:09:39.508643Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:09:39.508874Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:09:39.508943Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:09:39.509210Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:09:39.509277Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:09:39.509385Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema: ... $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:13:34.901041Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWYwOWEwZTMtZDQ5YjZlZGUtZjc3YTEwNTUtZTk1NGMxYTU=, TxId: 2025-04-07T14:13:34.901092Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWYwOWEwZTMtZDQ5YjZlZGUtZjc3YTEwNTUtZTk1NGMxYTU=, TxId: 2025-04-07T14:13:34.901439Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:13:34.915049Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:13:34.915100Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId7, ActorId=[1:2657:3164] 2025-04-07T14:13:35.490051Z node 2 :STATISTICS DEBUG: Event round 10 is different from the current 0 2025-04-07T14:13:35.490099Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:13:36.027603Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:13:36.027674Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId8 2025-04-07T14:13:36.027704Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-07T14:13:36.027727Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:13:36.038673Z node 2 :STATISTICS DEBUG: Event round 9 is different from the current 0 2025-04-07T14:13:36.038734Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-07T14:13:37.333537Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:13:37.344373Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:13:38.685568Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:13:38.685635Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId8 2025-04-07T14:13:38.685660Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-07T14:13:38.685674Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:13:39.972568Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:13:39.972737Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:13:39.983613Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:13:39.983734Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:13:39.983772Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:13:39.984336Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:13:39.997509Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:13:39.997870Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:13:39.997929Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:13:39.998242Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:13:40.024061Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:13:40.024244Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 11, current Round: 0 2025-04-07T14:13:40.024757Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9576:6692], server id = [2:9577:6693], tablet id = 72075186224037899, status = OK 2025-04-07T14:13:40.024854Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9576:6692], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:13:40.025403Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:13:40.025475Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:13:40.025751Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9576:6692], server id = [2:9577:6693], tablet id = 72075186224037899 2025-04-07T14:13:40.025780Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:13:40.025844Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:13:40.025986Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:13:40.026273Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:13:40.028655Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:13:40.047601Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDljYTAyMGQtOWJmOTM5NmMtMTc1OTg3MzctY2ZmZmYwNTE=, TxId: 2025-04-07T14:13:40.047672Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDljYTAyMGQtOWJmOTM5NmMtMTc1OTg3MzctY2ZmZmYwNTE=, TxId: 2025-04-07T14:13:40.048031Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:13:40.072858Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:13:40.072915Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId8, ActorId=[1:2657:3164] 2025-04-07T14:13:40.727790Z node 2 :STATISTICS DEBUG: Event round 11 is different from the current 0 2025-04-07T14:13:40.727850Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:13:41.277871Z node 2 :STATISTICS DEBUG: Event round 10 is different from the current 0 2025-04-07T14:13:41.277928Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-07T14:13:41.278006Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:13:41.278044Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-07T14:13:41.278075Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:13:42.345604Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:13:43.498265Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:13:43.519826Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:13:43.519904Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-07T14:13:43.519936Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:13:44.957553Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:13:44.957683Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:13:44.957721Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:13:44.958378Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:13:44.985520Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:13:44.985962Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:13:44.986035Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:13:44.986438Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:13:44.999792Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:13:44.999965Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 12, current Round: 0 2025-04-07T14:13:45.000332Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9753:6790], server id = [2:9754:6791], tablet id = 72075186224037899, status = OK 2025-04-07T14:13:45.000420Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9753:6790], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:13:45.000820Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:13:45.000878Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:13:45.001074Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:13:45.001192Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:13:45.001306Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9753:6790], server id = [2:9754:6791], tablet id = 72075186224037899 2025-04-07T14:13:45.001325Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:13:45.001441Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:13:45.003482Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:13:45.033046Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTk2OGUyNy0zZDM5MGEwOS1iYmM1NjVkOC0xMjE1MDFj, TxId: 2025-04-07T14:13:45.033114Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTk2OGUyNy0zZDM5MGEwOS1iYmM1NjVkOC0xMjE1MDFj, TxId: 2025-04-07T14:13:45.033560Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:13:45.060321Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:13:45.060387Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId9, ActorId=[1:2657:3164] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:143:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:146:2167] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:149:2057] recipient: [4:146:2167] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:148:2168] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:218:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:148:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:150:2172] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:154:2057] recipient: [5:150:2172] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:153:2173] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:223:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:155:2057] recipient: [7:153:2174] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:157:2057] recipient: [7:153:2174] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:156:2175] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:226:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:156:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:155:2176] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:159:2057] recipient: [8:155:2176] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:158:2177] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:228:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:155:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:156:2176] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:159:2057] recipient: [9:156:2176] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:158:2177] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:228:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:158:2057] recipient: [10:156:2176] Leader for TabletID 72057594037927937 is [10:159:2177] sender: [10:160:2057] recipient: [10:156:2176] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:159:2177] Leader for TabletID 72057594037927937 is [10:159:2177] sender: [10:229:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:160:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:161:2057] recipient: [11:159:2179] Leader for TabletID 72057594037927937 is [11:162:2180] sender: [11:163:2057] recipient: [11:159:2179] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:162:2180] Leader for TabletID 72057594037927937 is [11:162:2180] sender: [11:215:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:161:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:164:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:165:2057] recipient: [12:163:2183] Leader for TabletID 72057594037927937 is [12:166:2184] sender: [12:167:2057] recipient: [12:163:2183] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:166:2184] Leader for TabletID 72057594037927937 is [12:166:2184] sender: [12:219:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:166:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:169:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:170:2057] recipient: [13:168:2188] Leader for TabletID 72057594037927937 is [13:171:2189] sender: [13:172:2057] recipient: [13:168:2188] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:171:2189] Leader for TabletID 72057594037927937 is [13:171:2189] sender: [13:241:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:166:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:169:2057] recipient: [14:168:2188] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:170:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:171:2189] sender: [14:172:2057] recipient: [14:168:2188] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:171:2189] Leader for TabletID 72057594037927937 is [14:171:2189] sender: [14:241:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:167:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:170:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:171:2057] recipient: [15:169:2188] Leader for TabletID 72057594037927937 is [15:172:2189] sender: [15:173:2057] recipient: [15:169:2188] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:172:2189] Leader for TabletID 72057594037927937 is [15:172:2189] sender: [15:242:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpScheme::CreateDropColumnTableNegative [GOOD] >> KqpScheme::CreateBackupCollectionDisabledByDefault |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> THiveTest::TestCreateSubHiveCreateManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] >> DataShardWrite::DeletePrepared-Volatile [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] Test command err: 2025-04-07T14:13:21.470165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:13:21.470598Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:13:21.470888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00106f/r3tmp/tmpvZAEeq/pdisk_1.dat 2025-04-07T14:13:21.856702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:13:21.902889Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:21.955523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:21.955737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:21.967235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:22.088873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:22.132049Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:13:22.133128Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:13:22.133497Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:13:22.133664Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:22.168863Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:13:22.169691Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:22.169818Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:13:22.171629Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:13:22.171705Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:13:22.171760Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:13:22.172175Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:13:22.207504Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:13:22.207724Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:13:22.207811Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:13:22.207839Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:13:22.207867Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:13:22.207905Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:13:22.208199Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:22.208253Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:22.208373Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:13:22.208440Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:13:22.208799Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:22.208843Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:22.208877Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:13:22.208928Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:13:22.208964Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:13:22.209003Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:13:22.209042Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:13:22.209071Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:13:22.209118Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:13:22.209169Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:13:22.209282Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:13:22.209309Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:13:22.209384Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:13:22.211085Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:13:22.211156Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:13:22.211246Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:13:22.211294Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:13:22.211376Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:13:22.211438Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:13:22.211473Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:13:22.211758Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:13:22.211795Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:13:22.211828Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:13:22.211863Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:13:22.211919Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:13:22.211971Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:13:22.212009Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:13:22.212038Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:13:22.212061Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:13:22.213580Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:13:22.213628Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:13:22.224295Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:13:22.224391Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:13:22.224435Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:13:22.224488Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:13:22.224547Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:13:22.406335Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:22.406392Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:22.406429Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:13:22.406848Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:13:22.406882Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:13:22.406997Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:13:22.407039Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:13:22.407082Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:13:22.407114Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:13:22.411296Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:13:22.411362Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:13:22.413081Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:22.413128Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:22.413173Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:13:22.413207Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:13:22.413258Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:13:22.413298Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... xecuting on unit WaitForPlan 2025-04-07T14:13:48.635657Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:13:48.635892Z node 7 :TX_DATASHARD DEBUG: Planned transaction txId 1234567890011 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 1234567890011 AckTo { RawX1: 525 RawX2: 30064773530 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:13:48.635943Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:13:48.636285Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [7:771:2632], Recipient [7:771:2632]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:48.636328Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:48.636400Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:13:48.636452Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:13:48.636502Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:13:48.636552Z node 7 :TX_DATASHARD DEBUG: Found ready operation [3500:1234567890011] in PlanQueue unit at 72075186224037888 2025-04-07T14:13:48.636590Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PlanQueue 2025-04-07T14:13:48.636637Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-07T14:13:48.636715Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PlanQueue 2025-04-07T14:13:48.636765Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadWriteDetails 2025-04-07T14:13:48.636805Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadTxDetails 2025-04-07T14:13:48.637099Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-04-07T14:13:48.637190Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-04-07T14:13:48.637246Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-04-07T14:13:48.637313Z node 7 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 3500:1234567890011 keys extracted: 1 2025-04-07T14:13:48.637357Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-07T14:13:48.637385Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadWriteDetails 2025-04-07T14:13:48.637406Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:13:48.637432Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:13:48.637496Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically complete end at 72075186224037888 2025-04-07T14:13:48.637542Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically incomplete end at 72075186224037888 2025-04-07T14:13:48.637590Z node 7 :TX_DATASHARD TRACE: Activated operation [3500:1234567890011] at 72075186224037888 2025-04-07T14:13:48.637635Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-07T14:13:48.637653Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:13:48.637668Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildWriteOutRS 2025-04-07T14:13:48.637685Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildWriteOutRS 2025-04-07T14:13:48.637711Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-07T14:13:48.637725Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildWriteOutRS 2025-04-07T14:13:48.637741Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-04-07T14:13:48.637762Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-04-07T14:13:48.637783Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-07T14:13:48.637803Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-04-07T14:13:48.637821Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-04-07T14:13:48.637842Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PrepareWriteTxInRS 2025-04-07T14:13:48.637864Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-07T14:13:48.637884Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-04-07T14:13:48.637898Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-04-07T14:13:48.637912Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadAndWaitInRS 2025-04-07T14:13:48.637931Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-07T14:13:48.637945Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-04-07T14:13:48.637957Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit ExecuteWrite 2025-04-07T14:13:48.637972Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-04-07T14:13:48.637998Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-04-07T14:13:48.638311Z node 7 :TX_DATASHARD TRACE: Tablet 72075186224037888 is not ready for [3500:1234567890011] execution 2025-04-07T14:13:48.638399Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 released its data 2025-04-07T14:13:48.638447Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Restart 2025-04-07T14:13:48.638478Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:13:48.638527Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:13:48.638568Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:13:48.638613Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:13:48.638975Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:13:48.639021Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-04-07T14:13:48.639064Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-04-07T14:13:48.639312Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-04-07T14:13:48.639395Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-04-07T14:13:48.639449Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-04-07T14:13:48.639531Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 restored its data 2025-04-07T14:13:48.639702Z node 7 :TX_DATASHARD DEBUG: Executed write operation for [3500:1234567890011] at 72075186224037888, row count=1 2025-04-07T14:13:48.639748Z node 7 :TX_DATASHARD TRACE: Lock 1234567890001 marked broken at v{min} 2025-04-07T14:13:48.639819Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-07T14:13:48.639870Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:13:48.639916Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit ExecuteWrite 2025-04-07T14:13:48.639951Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompleteWrite 2025-04-07T14:13:48.639986Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-04-07T14:13:48.640179Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is DelayComplete 2025-04-07T14:13:48.640207Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompleteWrite 2025-04-07T14:13:48.640245Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:13:48.640278Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:13:48.640303Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-07T14:13:48.640329Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:13:48.640377Z node 7 :TX_DATASHARD TRACE: Execution plan for [3500:1234567890011] at 72075186224037888 has finished 2025-04-07T14:13:48.640419Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:13:48.640456Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:13:48.640495Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:13:48.640531Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:13:48.641065Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 1234567890011} 2025-04-07T14:13:48.641128Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-07T14:13:48.641746Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:13:48.641794Z node 7 :TX_DATASHARD TRACE: Complete execution for [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-04-07T14:13:48.641846Z node 7 :TX_DATASHARD DEBUG: Complete write [3500 : 1234567890011] from 72075186224037888 at tablet 72075186224037888 send result to client [7:764:2626] 2025-04-07T14:13:48.641888Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2025-04-07T14:12:58.853443Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574055762602973:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:58.855901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:58.863758Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574052661482199:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:58.863796Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:59.077043Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:59.077394Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bc8/r3tmp/tmpksjCtb/pdisk_1.dat 2025-04-07T14:12:59.402285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.402396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.432253Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:59.434195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:59.434265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:59.441878Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:59.442017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:59.442500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14661, node 1 2025-04-07T14:12:59.645259Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001bc8/r3tmp/yandexjifXfZ.tmp 2025-04-07T14:12:59.645289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001bc8/r3tmp/yandexjifXfZ.tmp 2025-04-07T14:12:59.645429Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001bc8/r3tmp/yandexjifXfZ.tmp 2025-04-07T14:12:59.645531Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:59.816442Z INFO: TTestServer started on Port 27702 GrpcPort 14661 TClient is connected to server localhost:27702 PQClient connected to localhost:14661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:00.163396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:13:00.275636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T14:13:02.221941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574072942473011:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.222056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.222149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574072942473019:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.226114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T14:13:02.228245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574072942473067:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.228376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:02.245749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574072942473035:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T14:13:02.605527Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574072942473130:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:13:02.607199Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWQxNjljYmEtOWFhNTlmM2QtZjk3NWE0ZjktZjgwZjM0OTQ=, ActorId: [1:7490574072942472993:2302], ActorState: ExecuteState, TraceId: 01jr89hxkva0v2rd9nhd3d5dxn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:13:02.609393Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:13:02.613893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:13:02.684482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:13:02.871161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:13:03.364824Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr89hych1bnm4htytcm5nmxk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWY0MDk3ZDItNWJiODRjZmQtOTVmODNkZGMtNjNhOWFlOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490574077237440861:3080] 2025-04-07T14:13:03.852087Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574055762602973:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:03.852158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:13:03.863849Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490574052661482199:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:03.863919Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T14:13:09.368246Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490574060057570307:2125], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:09.368543Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490574060057570307:2125], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-07T14:13:09.368635Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490574060057570307:2125], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7490574060057570794:2448] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744035180244 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-07T14:13:09.368732Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7490574060057570307:2125], cacheItem# { Subscriber: { Subscriber: [1:7490574060057570794:2448] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744035180244 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: ... e::TEvServerDisconnected 2025-04-07T14:13:49.003905Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Destroy direct read session some@random@consumer_7_1_6627868275026599319_v1 2025-04-07T14:13:49.003924Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [7:7490574270631393091:2485] destroyed 2025-04-07T14:13:49.003940Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:49.003954Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Destroy direct read session some@random@consumer_7_1_6627868275026599319_v1 2025-04-07T14:13:49.003973Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [7:7490574270631393090:2484] destroyed 2025-04-07T14:13:49.003912Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:49.003953Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session some@random@consumer_7_1_6627868275026599319_v1 2025-04-07T14:13:49.004022Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_6627868275026599319_v1 2025-04-07T14:13:49.004050Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_6627868275026599319_v1 2025-04-07T14:13:49.003997Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [7:7490574270631393093:2487] destroyed 2025-04-07T14:13:49.004023Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:49.004035Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session some@random@consumer_7_1_6627868275026599319_v1 2025-04-07T14:13:49.004048Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_6627868275026599319_v1 2025-04-07T14:13:49.004078Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_6627868275026599319_v1 2025-04-07T14:13:49.004055Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [7:7490574270631393092:2486] destroyed 2025-04-07T14:13:49.004076Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_6627868275026599319_v1 2025-04-07T14:13:49.004701Z :INFO: [/Root] TraceId [] SessionId [123|34cf7f31-3c56a0b5-f8dfaa13-9dee72f8_0] MessageGroupId [123] Write session: close. Timeout 0.000000s 2025-04-07T14:13:49.004745Z :INFO: [/Root] TraceId [] SessionId [123|34cf7f31-3c56a0b5-f8dfaa13-9dee72f8_0] MessageGroupId [123] Write session will now close 2025-04-07T14:13:49.004813Z :DEBUG: [/Root] TraceId [] SessionId [123|34cf7f31-3c56a0b5-f8dfaa13-9dee72f8_0] MessageGroupId [123] Write session: aborting 2025-04-07T14:13:49.004860Z node 8 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][topic2] pipe [7:7490574270631393082:2481] disconnected; active server actors: 1 2025-04-07T14:13:49.004892Z node 8 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][topic2] pipe [7:7490574270631393082:2481] client some@random@consumer disconnected session some@random@consumer_7_1_6627868275026599319_v1 2025-04-07T14:13:49.005238Z :INFO: [/Root] TraceId [] SessionId [123|34cf7f31-3c56a0b5-f8dfaa13-9dee72f8_0] MessageGroupId [123] Write session: gracefully shut down, all writes complete 2025-04-07T14:13:49.005292Z :DEBUG: [/Root] TraceId [] SessionId [123|34cf7f31-3c56a0b5-f8dfaa13-9dee72f8_0] MessageGroupId [123] Write session: destroy 2025-04-07T14:13:49.005901Z node 7 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 123|34cf7f31-3c56a0b5-f8dfaa13-9dee72f8_0 grpc read done: success: 0 data: 2025-04-07T14:13:49.005919Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|34cf7f31-3c56a0b5-f8dfaa13-9dee72f8_0 grpc read failed 2025-04-07T14:13:49.006039Z node 7 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 123|34cf7f31-3c56a0b5-f8dfaa13-9dee72f8_0 2025-04-07T14:13:49.006061Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|34cf7f31-3c56a0b5-f8dfaa13-9dee72f8_0 is DEAD 2025-04-07T14:13:49.006318Z node 7 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:13:49.006405Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:49.006437Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [7:7490574270631393074:2477] destroyed 2025-04-07T14:13:49.006466Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T14:13:49.062522Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7490574232566413668:2102], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:49.062721Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7490574232566413668:2102], cacheItem# { Subscriber: { Subscriber: [8:7490574245451315707:2184] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:49.062833Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7490574275516087031:2340], recipient# [8:7490574275516087030:2315], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:49.143309Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7490574231976684868:2124], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:49.143447Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7490574231976684868:2124], cacheItem# { Subscriber: { Subscriber: [7:7490574236271652912:2664] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:49.143579Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7490574274926360421:3977], recipient# [7:7490574274926360420:2490], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:49.145774Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7490574231976684868:2124], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:49.145892Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7490574231976684868:2124], cacheItem# { Subscriber: { Subscriber: [7:7490574236271652912:2664] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:49.145979Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7490574274926360423:3978], recipient# [7:7490574274926360422:2491], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:49.394593Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7490574231976684868:2124], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:49.394716Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7490574231976684868:2124], cacheItem# { Subscriber: { Subscriber: [7:7490574231976685346:2437] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 26 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744035219710 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:13:49.394915Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7490574274926360442:3991], recipient# [7:7490574231976684744:2163], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DeletePrepared-Volatile [GOOD] Test command err: 2025-04-07T14:13:21.518648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:13:21.518939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:13:21.519192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001064/r3tmp/tmpnWqBY3/pdisk_1.dat 2025-04-07T14:13:21.856701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:13:21.903530Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:21.955524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:21.955744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:21.967239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:22.088897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:22.129957Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:13:22.131060Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:13:22.131622Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:13:22.131875Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:22.176221Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:13:22.176985Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:22.177094Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:13:22.178668Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:13:22.178735Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:13:22.178800Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:13:22.179134Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:13:22.204722Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:13:22.205800Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:13:22.205925Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:13:22.205967Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:13:22.206022Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:13:22.206074Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:13:22.206445Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:22.207161Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:22.208013Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:13:22.208119Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:13:22.208565Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:22.208604Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:22.208703Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:13:22.208766Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:13:22.208804Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:13:22.208840Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:13:22.208877Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:13:22.208911Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:13:22.208954Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:13:22.209002Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:13:22.209124Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:13:22.209174Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:13:22.209289Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:13:22.211035Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:13:22.211111Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:13:22.211213Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:13:22.211259Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:13:22.211316Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:13:22.211360Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:13:22.211394Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:13:22.211670Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:13:22.211704Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:13:22.211737Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:13:22.211769Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:13:22.211830Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:13:22.211869Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:13:22.211922Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:13:22.211953Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:13:22.211975Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:13:22.213440Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:13:22.213486Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:13:22.224096Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:13:22.224169Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:13:22.224199Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:13:22.224231Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:13:22.224279Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:13:22.407659Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:22.407709Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:22.407737Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:13:22.408052Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:13:22.408077Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:13:22.408165Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:13:22.408194Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:13:22.408223Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:13:22.408250Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:13:22.411670Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:13:22.411729Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:13:22.413002Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:22.413045Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:13:22.413086Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:13:22.413122Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:13:22.413164Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:13:22.413208Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976 ... to execute [1500:101] at 72075186224037888 on unit LoadTxDetails 2025-04-07T14:13:49.686065Z node 7 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 got data writeOp from cache 1500:101 2025-04-07T14:13:49.686100Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2025-04-07T14:13:49.686121Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit LoadWriteDetails 2025-04-07T14:13:49.686137Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:13:49.686153Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:13:49.686211Z node 7 :TX_DATASHARD TRACE: Operation [1500:101] is the new logically complete end at 72075186224037888 2025-04-07T14:13:49.686246Z node 7 :TX_DATASHARD TRACE: Operation [1500:101] is the new logically incomplete end at 72075186224037888 2025-04-07T14:13:49.686289Z node 7 :TX_DATASHARD TRACE: Activated operation [1500:101] at 72075186224037888 2025-04-07T14:13:49.686336Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2025-04-07T14:13:49.686362Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:13:49.686382Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit BuildWriteOutRS 2025-04-07T14:13:49.686408Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit BuildWriteOutRS 2025-04-07T14:13:49.686448Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2025-04-07T14:13:49.686463Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit BuildWriteOutRS 2025-04-07T14:13:49.686478Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-04-07T14:13:49.686494Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-04-07T14:13:49.686511Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2025-04-07T14:13:49.686538Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-04-07T14:13:49.686554Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-04-07T14:13:49.686569Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit PrepareWriteTxInRS 2025-04-07T14:13:49.686586Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2025-04-07T14:13:49.686600Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-04-07T14:13:49.686614Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-04-07T14:13:49.686630Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit LoadAndWaitInRS 2025-04-07T14:13:49.686644Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2025-04-07T14:13:49.686656Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-04-07T14:13:49.686678Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit ExecuteWrite 2025-04-07T14:13:49.686702Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit ExecuteWrite 2025-04-07T14:13:49.686730Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [1500:101] at 72075186224037888 2025-04-07T14:13:49.686840Z node 7 :TX_DATASHARD DEBUG: Executed write operation for [1500:101] at 72075186224037888, row count=1 2025-04-07T14:13:49.686889Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-07T14:13:49.686953Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:13:49.686983Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit ExecuteWrite 2025-04-07T14:13:49.687017Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit CompleteWrite 2025-04-07T14:13:49.687051Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit CompleteWrite 2025-04-07T14:13:49.687206Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is DelayComplete 2025-04-07T14:13:49.687233Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit CompleteWrite 2025-04-07T14:13:49.687275Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:13:49.687305Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:13:49.687340Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2025-04-07T14:13:49.687370Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:13:49.687398Z node 7 :TX_DATASHARD TRACE: Execution plan for [1500:101] at 72075186224037888 has finished 2025-04-07T14:13:49.687441Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:13:49.687478Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:13:49.687515Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:13:49.687551Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:13:49.698627Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 101} 2025-04-07T14:13:49.698742Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-07T14:13:49.698847Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:13:49.698912Z node 7 :TX_DATASHARD TRACE: Complete execution for [1500:101] at 72075186224037888 on unit CompleteWrite 2025-04-07T14:13:49.698992Z node 7 :TX_DATASHARD DEBUG: Complete write [1500 : 101] from 72075186224037888 at tablet 72075186224037888 send result to client [7:558:2485] 2025-04-07T14:13:49.699055Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:13:49.700614Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:740:2612], Recipient [7:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:49.700712Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:49.700777Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [7:739:2611], serverId# [7:740:2612], sessionId# [0:0:0] 2025-04-07T14:13:49.700952Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:738:2610], Recipient [7:632:2537]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-07T14:13:49.702132Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:743:2615], Recipient [7:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:49.702198Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:49.702255Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [7:742:2614], serverId# [7:743:2615], sessionId# [0:0:0] 2025-04-07T14:13:49.702440Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:741:2613], Recipient [7:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-07T14:13:49.702515Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:13:49.702576Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/101 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2025-04-07T14:13:49.702625Z node 7 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1500/18446744073709551615 2025-04-07T14:13:49.702696Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-04-07T14:13:49.702805Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-07T14:13:49.702862Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:13:49.702904Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:13:49.702946Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:13:49.702998Z node 7 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-07T14:13:49.703050Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-07T14:13:49.703078Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:13:49.703102Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:13:49.703126Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:13:49.703232Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-07T14:13:49.703490Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:741:2613], 1000} after executionsCount# 1 2025-04-07T14:13:49.703569Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:741:2613], 1000} sends rowCount# 2, bytes# 48, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551567, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:13:49.703692Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:741:2613], 1000} finished in read 2025-04-07T14:13:49.703772Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-07T14:13:49.703803Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:13:49.703828Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:13:49.703856Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:13:49.703906Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-07T14:13:49.703930Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:13:49.703959Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-07T14:13:49.704008Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:13:49.704112Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TKeyValueTest::TestConcatWorks [GOOD] >> TKeyValueTest::TestConcatWorksNewApi >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest |95.7%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest |95.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-04-07T14:12:54.043075Z :ReadSession INFO: Random seed for debugging is 1744035174043055 2025-04-07T14:12:54.385292Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574037545526865:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:54.385776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:54.446261Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574038680326577:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:54.456066Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:54.618516Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:54.621334Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001078/r3tmp/tmpJtxGhx/pdisk_1.dat 2025-04-07T14:12:54.894601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:54.894696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:54.896528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:54.896601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:54.918997Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:54.927608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:54.928018Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:54.929003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29226, node 1 2025-04-07T14:12:55.053408Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001078/r3tmp/yandex4pix3D.tmp 2025-04-07T14:12:55.053437Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001078/r3tmp/yandex4pix3D.tmp 2025-04-07T14:12:55.053594Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001078/r3tmp/yandex4pix3D.tmp 2025-04-07T14:12:55.053698Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:55.215971Z INFO: TTestServer started on Port 8553 GrpcPort 29226 TClient is connected to server localhost:8553 PQClient connected to localhost:29226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:55.498755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:55.556057Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:12:57.310932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574050430429607:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.311048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574050430429598:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.311421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.314717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:12:57.316050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574050430429643:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.316144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.331232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574050430429612:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:12:57.529748Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574050430429712:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:12:57.529970Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490574051565228580:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:12:57.530240Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTUyNzgzMzUtMmNiNzE1MjAtZDdmNmM2M2YtZTVlNGY3YzY=, ActorId: [2:7490574051565228540:2280], ActorState: ExecuteState, TraceId: 01jr89hrx00phvkwbmd8a2aq3x, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:12:57.534262Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:12:57.539350Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTQ2YWMzN2UtODdhZDIyMDYtMTk3YWFlYjgtMWY4NWQ4YjA=, ActorId: [1:7490574050430429595:2302], ActorState: ExecuteState, TraceId: 01jr89hrtc76pj7q875272pjr4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:12:57.539680Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:12:57.592011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.701559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.817818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:29226", true, true, 1000); 2025-04-07T14:12:58.189095Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr89hsds76gcd259mjxj7c6q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzBjMzZkMjctOWQxMDE1NzYtNGEzYjczNzAtNTRjMzQzNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490574054725397403:2969] 2025-04-07T14:12:59.390290Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574037545526865:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:59.397438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:59.441513Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490574038680326577:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:59.441573Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T14:13:04.665280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:29226 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T14:13:04.764457Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following t ... r shared/user session shared/user_7_1_15168418859780348774_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset3 2025-04-07T14:13:49.464694Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_15168418859780348774_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid ca358571-1251b77c-79dae22-caef5237 has messages 1 2025-04-07T14:13:49.464805Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_15168418859780348774_v1 read done: guid# ca358571-1251b77c-79dae22-caef5237, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2025-04-07T14:13:49.464837Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_15168418859780348774_v1 response to read: guid# ca358571-1251b77c-79dae22-caef5237 2025-04-07T14:13:49.465049Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_15168418859780348774_v1 Process answer. Aval parts: 0 2025-04-07T14:13:49.465590Z :DEBUG: [/Root] [/Root] [4be2757d-4416cb6b-9b2aefed-b3bd23c6] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:49.465867Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-04-07T14:13:49.465964Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_15168418859780348774_v1 grpc read done: success# 1, data# { read { } } 2025-04-07T14:13:49.466051Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2025-04-07T14:13:49.466133Z :DEBUG: [/Root] [/Root] [4be2757d-4416cb6b-9b2aefed-b3bd23c6] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T14:13:49.466091Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_15168418859780348774_v1 got read request: guid# 68ecbfce-f1b1a8f3-9d40069c-fccc8ea4 DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-04-07T14:13:48.344000Z WriteTime: 2025-04-07T14:13:48.346000Z Ip: "ipv6:[::1]:43242" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:43242" } } } } 2025-04-07T14:13:49.466374Z :INFO: [/Root] [/Root] [4be2757d-4416cb6b-9b2aefed-b3bd23c6] Closing read session. Close timeout: 3.000000s 2025-04-07T14:13:49.466418Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-04-07T14:13:49.466454Z :INFO: [/Root] [/Root] [4be2757d-4416cb6b-9b2aefed-b3bd23c6] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1319 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:13:49.466953Z :INFO: [/Root] [/Root] [4be2757d-4416cb6b-9b2aefed-b3bd23c6] Closing read session. Close timeout: 0.000000s 2025-04-07T14:13:49.466992Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-04-07T14:13:49.467025Z :INFO: [/Root] [/Root] [4be2757d-4416cb6b-9b2aefed-b3bd23c6] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1320 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:13:49.467108Z :NOTICE: [/Root] [/Root] [4be2757d-4416cb6b-9b2aefed-b3bd23c6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:13:49.467362Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_15168418859780348774_v1 grpc read done: success# 0, data# { } 2025-04-07T14:13:49.467384Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_15168418859780348774_v1 grpc read failed 2025-04-07T14:13:49.467403Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_15168418859780348774_v1 grpc closed 2025-04-07T14:13:49.467433Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_15168418859780348774_v1 is DEAD 2025-04-07T14:13:49.467782Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:49.467817Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_15168418859780348774_v1 2025-04-07T14:13:49.467854Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7490574268382963512:2515] destroyed 2025-04-07T14:13:49.467878Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_1_15168418859780348774_v1 2025-04-07T14:13:49.468165Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [7:7490574268382963509:2512] disconnected; active server actors: 1 2025-04-07T14:13:49.468186Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [7:7490574268382963509:2512] client user disconnected session shared/user_7_1_15168418859780348774_v1 2025-04-07T14:13:50.679377Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:50.679407Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:50.679431Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:13:50.679751Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:13:50.680197Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:13:50.680370Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:50.680770Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-04-07T14:13:50.682086Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:50.682110Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:50.682134Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:13:50.682467Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:13:50.682880Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:13:50.682995Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:50.683153Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:13:50.684647Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-07T14:13:50.685107Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-04-07T14:13:50.685171Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-04-07T14:13:50.685309Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:13:50.685355Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-07T14:13:50.685377Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-07T14:13:50.685421Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-07T14:13:50.687079Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:50.687101Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:50.687139Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:13:50.687394Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:13:50.687738Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:13:50.687843Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:50.688017Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:13:50.688656Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:50.688805Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:13:50.688926Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:13:50.688972Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-07T14:13:50.689051Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2025-04-07T14:13:50.690331Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:50.690373Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:50.690411Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:13:50.690695Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-07T14:13:50.691032Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-07T14:13:50.691121Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:50.691890Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:13:50.691999Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:13:50.692069Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-07T14:13:50.692140Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> KqpScheme::AddChangefeedWhenDisabled [GOOD] >> KqpScheme::AddChangefeedNegative >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-07T14:13:52.031574Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:52.034598Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:52.034842Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-07T14:13:52.034890Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:52.034924Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-07T14:13:52.034961Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:52.035015Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.035056Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2025-04-07T14:13:52.035085Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-07T14:13:52.035582Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.036393Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2025-04-07T14:13:52.036503Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:52.053082Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.056756Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.056908Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.060900Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.061042Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:52.061348Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:52.062967Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2025-04-07T14:13:52.065983Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-07T14:13:52.067626Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2025-04-07T14:13:52.067721Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:52.070600Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:13:52.071058Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.071117Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2025-04-07T14:13:52.112037Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:52.115111Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:52.115350Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-07T14:13:52.115392Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:52.115422Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-07T14:13:52.115484Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:52.115533Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.115588Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] empty tx queue 2025-04-07T14:13:52.115626Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-07T14:13:52.116200Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.116255Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:407:2362], now have 1 active actors on pipe 2025-04-07T14:13:52.116372Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:52.116540Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 2(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:52.119374Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:52.119480Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.120159Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 2 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:52.120278Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:52.120523Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:52.120765Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:416:2369] 2025-04-07T14:13:52.122551Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-07T14:13:52.122608Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:416:2369] 2025-04-07T14:13:52.122656Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:52.122923Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2025-04-07T14:13:52.123408Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.123461Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:419:2371], now have 1 active actors on pipe 2025-04-07T14:13:52.137417Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:52.140414Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:52.140610Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-07T14:13:52.140655Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:52.140715Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-07T14:13:52.140747Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:52.140808Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.140873Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2025-04-07T14:13:52.140910Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-07T14:13:52.141476Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.141522Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:470:2410], now have 1 active actors on pipe 2025-04-07T14:13:52.141618Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:52.141775Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 3(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:52.144624Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:52.144746Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.145439Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 3 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:52.145538Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:52.145841Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:52.146064Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:479:2417] 2025-04-07T14:13:52.147642Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T14:13:52.147712Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:479:2417] 2025-04-07T14:13:52.147759Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:52.148053Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:52.148481Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.148522Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:482:2419], now have 1 active actors on pipe REQUEST MetaRequest { CmdGetReadSessionsInfo { ClientId: "client_id" Topic: "rt3.dc1--topic1" Topic: "rt3.dc1--topic2" } } Ticket: "client_id@builtin" 2025-04-07T14:13:52.156345Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.156399Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:493:2424], now have 1 active actors on pipe 2025-04-07T14:13:52.156973Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.157009Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:496:2425], now have 1 active actors on pipe 2025-04-07T14:13:52.157052Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.157077Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:497:2425], now have 1 active actors on pipe 2025-04-07T14:13:52.157728Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:52.157783Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:493:2424] destroyed 2025-04-07T14:13:52.158299Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:52.158341Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [2:496:2425] destroyed 2025-04-07T14:13:52.158407Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:52.158428Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:497:2425] destroyed RESULT Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } PartitionResult { Partition: 2 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies >> KqpScheme::CreateBackupCollectionDisabledByDefault [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:144:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:144:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:150:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:152:2173] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:156:2057] recipient: [8:152:2173] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:155:2174] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:225:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:150:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:152:2173] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:156:2057] recipient: [9:152:2173] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:155:2174] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:225:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:151:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:153:2173] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:157:2057] recipient: [10:153:2173] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:156:2174] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:226:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:141:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:144:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:145:2057] recipient: [13:143:2166] Leader for TabletID 72057594037927937 is [13:146:2167] sender: [13:147:2057] recipient: [13:143:2166] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:146:2167] Leader for TabletID 72057594037927937 is [13:146:2167] sender: [13:216:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:141:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:144:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:145:2057] recipient: [14:143:2166] Leader for TabletID 72057594037927937 is [14:146:2167] sender: [14:147:2057] recipient: [14:143:2166] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:146:2167] Leader for TabletID 72057594037927937 is [14:146:2167] sender: [14:216:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:142:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:145:2057] recipient: [15:144:2166] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:146:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:147:2167] sender: [15:148:2057] recipient: [15:144:2166] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:147:2167] Leader for TabletID 72057594037927937 is [15:147:2167] sender: [15:217:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:147:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:150:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:151:2057] recipient: [16:149:2171] Leader for TabletID 72057594037927937 is [16:152:2172] sender: [16:153:2057] recipient: [16:149:2171] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:152:2172] Leader for TabletID 72057594037927937 is [16:152:2172] sender: [16:222:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:147:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:150:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:151:2057] recipient: [17:149:2171] Leader for TabletID 72057594037927937 is [17:152:2172] sender: [17:153:2057] recipient: [17:149:2171] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:152:2172] Leader for TabletID 72057594037927937 is [17:152:2172] sender: [17:222:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:148:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:150:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:152:2057] recipient: [18:151:2171] Leader for TabletID 72057594037927937 is [18:153:2172] sender: [18:154:2057] recipient: [18:151:2171] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:153:2172] Leader for TabletID 72057594037927937 is [18:153:2172] sender: [18:201:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:150:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:153:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:154:2057] recipient: [19:152:2173] Leader for TabletID 72057594037927937 is [19:155:2174] sender: [19:156:2057] recipient: [19:152:2173] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:155:2174] Leader for TabletID 72057594037927937 is [19:155:2174] sender: [19:225:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:150:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:153:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:154:2057] recipient: [20:152:2173] Leader for TabletID 72057594037927937 is [20:155:2174] sender: [20:156:2057] recipient: [20:152:2173] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:155:2174] Leader for TabletID 72057594037927937 is [20:155:2174] sender: [20:225:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:151:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:153:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:155:2057] recipient: [21:154:2173] Leader for TabletID 72057594037927937 is [21:156:2174] sender: [21:157:2057] recipient: [21:154:2173] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:156:2174] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:32.674167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:32.674246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:32.674292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:32.674314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:32.674359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:32.674400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:32.674441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:32.674667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:32.724308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:32.724353Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:32.733703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:32.733787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:32.733895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:32.740601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:32.740746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:32.741277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:32.741591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:32.745089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:32.746596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:32.746644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:32.746797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:32.746835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:32.746863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:32.746957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:32.752212Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:32.852456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:32.852678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.852860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:32.853063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:32.853106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.854962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:32.855097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:32.855267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.855324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:32.855357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:32.855401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:32.857058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.857107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:32.857157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:32.858709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.858747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.858789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:32.858829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:32.862180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:32.863802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:32.864012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:32.864925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:32.865063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:32.865114Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:32.865362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:32.865411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:32.865561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:32.865647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:32.867665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:32.867710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:32.867888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:32.867928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:32.868304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.868354Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:32.868434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:32.868488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:32.868532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:32.868568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:32.868616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:32.868648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:32.868734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:32.868771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:32.868810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T14:13:52.865268Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-07T14:13:52.865919Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-04-07T14:13:52.865956Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:52.866107Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-07T14:13:52.866175Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-04-07T14:13:52.866195Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-04-07T14:13:52.866219Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2025-04-07T14:13:52.866539Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T14:13:52.866606Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T14:13:52.866630Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-07T14:13:52.867546Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T14:13:52.867615Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T14:13:52.867639Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-07T14:13:52.867659Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-04-07T14:13:52.867696Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-07T14:13:52.867745Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-04-07T14:13:52.868504Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T14:13:52.868537Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:52.868704Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:13:52.868772Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-04-07T14:13:52.868792Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-07T14:13:52.868822Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-04-07T14:13:52.868851Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-07T14:13:52.868882Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-07T14:13:52.868905Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-07T14:13:52.868961Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:13:52.868982Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2025-04-07T14:13:52.868997Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2025-04-07T14:13:52.869015Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T14:13:52.869031Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2025-04-07T14:13:52.869046Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2025-04-07T14:13:52.869067Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-07T14:13:52.869513Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:13:52.872013Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:13:52.872135Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:13:52.872158Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:13:52.873842Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:13:52.873901Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:13:52.875274Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 343 RawX2: 111669152024 } TabletId: 72075186233409546 State: 4 2025-04-07T14:13:52.875323Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-04-07T14:13:52.876401Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:13:52.876758Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 2025-04-07T14:13:52.876926Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:13:52.877112Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409546 2025-04-07T14:13:52.879296Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:13:52.879335Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-07T14:13:52.879387Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T14:13:52.879422Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-07T14:13:52.879451Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:13:52.881600Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:13:52.881649Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-04-07T14:13:52.881968Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-07T14:13:52.882164Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-07T14:13:52.882199Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-07T14:13:52.883099Z node 26 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-07T14:13:52.883177Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-07T14:13:52.883207Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:620:2549] 2025-04-07T14:13:52.886729Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 346 RawX2: 111669152026 } TabletId: 72075186233409547 State: 4 2025-04-07T14:13:52.886781Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-04-07T14:13:52.887798Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:13:52.888055Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-04-07T14:13:52.888169Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:52.888302Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:13:52.888473Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:13:52.888498Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:13:52.888532Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:52.891628Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:13:52.891668Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-04-07T14:13:52.891983Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-07T14:13:52.892181Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-07T14:13:52.892220Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-07T14:13:52.027259Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:52.034465Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:52.034772Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-07T14:13:52.034819Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:52.034850Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-07T14:13:52.034893Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:52.034932Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.034970Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2025-04-07T14:13:52.034996Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-07T14:13:52.035519Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.036363Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:259:2251], now have 1 active actors on pipe 2025-04-07T14:13:52.036449Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:52.054847Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.058019Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.058140Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.060962Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.061100Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:52.061426Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:52.062963Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:268:2258] 2025-04-07T14:13:52.066131Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-07T14:13:52.067652Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2258] 2025-04-07T14:13:52.067751Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:52.070604Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:13:52.071063Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.071127Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:271:2260], now have 1 active actors on pipe 2025-04-07T14:13:52.112499Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:52.115894Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:52.116132Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-07T14:13:52.116181Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:52.116221Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-07T14:13:52.116259Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:52.116303Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.116367Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2025-04-07T14:13:52.116404Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-07T14:13:52.116985Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.117043Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:406:2361], now have 1 active actors on pipe 2025-04-07T14:13:52.117185Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:52.117356Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:52.120415Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:52.120512Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.121214Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:52.121340Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:52.121636Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:52.121827Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:415:2368] 2025-04-07T14:13:52.123705Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T14:13:52.123766Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:415:2368] 2025-04-07T14:13:52.123821Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:52.124112Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:52.124591Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.124634Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:418:2370], now have 1 active actors on pipe 2025-04-07T14:13:52.126818Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.126867Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:428:2375], now have 1 active actors on pipe 2025-04-07T14:13:52.126902Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.126925Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:430:2376], now have 1 active actors on pipe 2025-04-07T14:13:52.127164Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:52.127201Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:428:2375] destroyed 2025-04-07T14:13:52.128575Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:52.128617Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:430:2376] destroyed 2025-04-07T14:13:52.551886Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:52.554246Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:52.554434Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-07T14:13:52.554468Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:52.554499Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-07T14:13:52.554533Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:52.554571Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.554612Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2025-04-07T14:13:52.554638Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-07T14:13:52.555061Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.555101Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:258:2250], now have 1 active actors on pipe 2025-04-07T14:13:52.555140Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:52.555300Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.557652Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermar ... c2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:52.620020Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:52.620124Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.620876Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 5 actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:52.620954Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:52.621146Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:52.621320Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:477:2415] 2025-04-07T14:13:52.622359Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-07T14:13:52.622400Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:477:2415] 2025-04-07T14:13:52.622464Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:52.622657Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2025-04-07T14:13:52.622989Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.623020Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:480:2417], now have 1 active actors on pipe 2025-04-07T14:13:52.635295Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:52.637987Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:52.638169Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-07T14:13:52.638198Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:52.638222Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-07T14:13:52.638247Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:52.638281Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.638311Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2025-04-07T14:13:52.638334Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-07T14:13:52.638790Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.638832Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:531:2456], now have 1 active actors on pipe 2025-04-07T14:13:52.638898Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:52.639024Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 6(current 0) received from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:52.641248Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:52.641334Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.641868Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 6 actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:52.641944Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:52.642129Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:52.642273Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:540:2463] 2025-04-07T14:13:52.643309Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T14:13:52.643344Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:540:2463] 2025-04-07T14:13:52.643393Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:52.643575Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:52.643877Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.643926Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:543:2465], now have 1 active actors on pipe 2025-04-07T14:13:52.645312Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.645347Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:554:2471], now have 1 active actors on pipe 2025-04-07T14:13:52.645368Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.645386Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:553:2470], now have 1 active actors on pipe 2025-04-07T14:13:52.645471Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.645486Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:555:2471], now have 1 active actors on pipe 2025-04-07T14:13:52.656064Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.656117Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:560:2475], now have 1 active actors on pipe 2025-04-07T14:13:52.670342Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:52.672243Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:52.672434Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-07T14:13:52.672466Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:52.672569Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:52.673207Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.673253Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-07T14:13:52.673387Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:52.673685Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:52.673861Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:619:2522] 2025-04-07T14:13:52.674865Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-07T14:13:52.675592Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-07T14:13:52.675781Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-07T14:13:52.675964Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-07T14:13:52.676104Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-07T14:13:52.676131Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-07T14:13:52.676157Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:13:52.676185Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T14:13:52.676227Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:619:2522] 2025-04-07T14:13:52.676265Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:52.676352Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2025-04-07T14:13:52.676511Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:52.676837Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:52.676865Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:554:2471] destroyed 2025-04-07T14:13:52.676894Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:52.676908Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:553:2470] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-04-07T14:13:52.545017Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:52.548601Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:52.548918Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-07T14:13:52.548970Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:52.549017Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-07T14:13:52.549069Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:52.549121Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.549172Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2025-04-07T14:13:52.549208Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-07T14:13:52.549812Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.549877Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:259:2251], now have 1 active actors on pipe 2025-04-07T14:13:52.549923Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:52.568218Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.571086Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.571202Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.571849Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.571970Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:52.572250Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:52.572525Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [3:268:2258] 2025-04-07T14:13:52.574054Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-07T14:13:52.574110Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [3:268:2258] 2025-04-07T14:13:52.574168Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:52.574419Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:13:52.574735Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.574771Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:271:2260], now have 1 active actors on pipe 2025-04-07T14:13:52.612793Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:52.615243Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:52.615449Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-07T14:13:52.615483Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:52.615523Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-07T14:13:52.615552Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:52.615588Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.615624Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] empty tx queue 2025-04-07T14:13:52.615651Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-07T14:13:52.616162Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.616222Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:408:2363], now have 1 active actors on pipe 2025-04-07T14:13:52.616325Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:52.616484Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.618971Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.619075Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.619638Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.619738Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:52.620000Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:52.620162Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [3:417:2370] 2025-04-07T14:13:52.621476Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-07T14:13:52.621528Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [3:417:2370] 2025-04-07T14:13:52.621596Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:52.621809Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:13:52.622200Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.622232Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:420:2372], now have 1 active actors on pipe 2025-04-07T14:13:52.634409Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:52.637463Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:52.637705Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-07T14:13:52.637742Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:52.637769Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-07T14:13:52.637800Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:52.637835Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.637870Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] empty tx queue 2025-04-07T14:13:52.637904Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-07T14:13:52.638401Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.638441Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:471:2411], now have 1 active actors on pipe 2025-04-07T14:13:52.638529Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:52.638706Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:52.641667Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:52.641806Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.642481Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:52.642560Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] S ... k: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:53.207317Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:53.207412Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:53.207937Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 7 actor [4:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:53.208022Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:53.208282Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:53.208549Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:480:2418] 2025-04-07T14:13:53.210001Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-07T14:13:53.210055Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:480:2418] 2025-04-07T14:13:53.210098Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:53.210319Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2025-04-07T14:13:53.210599Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.210634Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:483:2420], now have 1 active actors on pipe 2025-04-07T14:13:53.221111Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:53.223270Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:53.223438Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-07T14:13:53.223470Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:53.223506Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-07T14:13:53.223538Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:53.223576Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:53.223614Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2025-04-07T14:13:53.223641Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-07T14:13:53.224092Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.224131Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:534:2459], now have 1 active actors on pipe 2025-04-07T14:13:53.224223Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:53.224351Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 8(current 0) received from actor [4:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:53.226385Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:53.226470Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:53.227047Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 8 actor [4:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:53.227210Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:53.227408Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:53.227558Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:543:2466] 2025-04-07T14:13:53.228791Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T14:13:53.228845Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:543:2466] 2025-04-07T14:13:53.228892Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:53.229105Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:53.229388Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.229421Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:546:2468], now have 1 active actors on pipe 2025-04-07T14:13:53.230851Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.230893Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [4:556:2473], now have 1 active actors on pipe 2025-04-07T14:13:53.230972Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.231005Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:557:2474], now have 1 active actors on pipe 2025-04-07T14:13:53.231079Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.231095Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:558:2474], now have 1 active actors on pipe 2025-04-07T14:13:53.241765Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.241828Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:563:2478], now have 1 active actors on pipe 2025-04-07T14:13:53.258148Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:53.260757Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:53.261034Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-07T14:13:53.261091Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:53.261237Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:53.262036Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:53.262100Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-07T14:13:53.262208Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:53.262504Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:53.262752Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:622:2525] 2025-04-07T14:13:53.264161Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-07T14:13:53.265013Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-07T14:13:53.265181Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-07T14:13:53.265382Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-07T14:13:53.265567Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-07T14:13:53.265599Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-07T14:13:53.265631Z node 4 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:13:53.265672Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T14:13:53.265712Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:622:2525] 2025-04-07T14:13:53.265753Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:53.265867Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2025-04-07T14:13:53.266063Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:53.266407Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:53.266441Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [4:557:2474] destroyed 2025-04-07T14:13:53.266477Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:53.266495Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [4:556:2473] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 4 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 4 ErrorCode: OK } PartitionLocation { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "Tablet for that partition is not running" } ErrorCode: OK } } } >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-04-07T14:13:52.027111Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:52.034620Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:52.034861Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-07T14:13:52.034903Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:52.034932Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-07T14:13:52.034976Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:52.035030Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.035071Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2025-04-07T14:13:52.035100Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-07T14:13:52.035645Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.036389Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2025-04-07T14:13:52.036516Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:52.047436Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.050063Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.050919Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.061026Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.061224Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:52.061544Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:52.062970Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2025-04-07T14:13:52.066089Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-07T14:13:52.067651Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2025-04-07T14:13:52.067754Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:52.070614Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:13:52.071073Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.071153Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2025-04-07T14:13:52.112060Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:52.115163Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:52.115408Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-07T14:13:52.115463Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:52.115502Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-07T14:13:52.115545Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:52.115590Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.115657Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] empty tx queue 2025-04-07T14:13:52.115704Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-07T14:13:52.116280Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.116331Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:406:2361], now have 1 active actors on pipe 2025-04-07T14:13:52.116461Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:52.116618Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.119426Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.119529Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.120251Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:52.120374Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:52.120639Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:52.120878Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [2:415:2368] 2025-04-07T14:13:52.122598Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-07T14:13:52.122644Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:415:2368] 2025-04-07T14:13:52.122682Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:52.122904Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:13:52.123286Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.123323Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:418:2370], now have 1 active actors on pipe 2025-04-07T14:13:52.136066Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:52.139329Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:52.139522Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-07T14:13:52.139561Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:52.139592Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-07T14:13:52.139628Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:52.139694Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.139749Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] empty tx queue 2025-04-07T14:13:52.139780Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-07T14:13:52.140290Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:52.140325Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:469:2409], now have 1 active actors on pipe 2025-04-07T14:13:52.140406Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:52.140543Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:52.143036Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:52.143129Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:52.143701Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:52.143788Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:52.144044Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer ... 0000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:53.202043Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:53.202138Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:53.202644Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 11 actor [4:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:53.202725Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:53.202948Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:53.203158Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:479:2417] 2025-04-07T14:13:53.204637Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-07T14:13:53.204712Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:479:2417] 2025-04-07T14:13:53.204755Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:53.205000Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2025-04-07T14:13:53.205299Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.205332Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:482:2419], now have 1 active actors on pipe 2025-04-07T14:13:53.217625Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:53.220137Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:53.220336Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-07T14:13:53.220375Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:53.220406Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-07T14:13:53.220441Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:53.220492Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:53.220533Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2025-04-07T14:13:53.220564Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-07T14:13:53.221054Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.221094Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:533:2458], now have 1 active actors on pipe 2025-04-07T14:13:53.221172Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:53.221315Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 12(current 0) received from actor [4:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:53.223511Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:53.223618Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:53.224042Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 12 actor [4:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:53.224138Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:53.224351Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:53.224517Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:542:2465] 2025-04-07T14:13:53.225840Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T14:13:53.225890Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:542:2465] 2025-04-07T14:13:53.225931Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:53.226131Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:53.226430Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.226463Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:545:2467], now have 1 active actors on pipe 2025-04-07T14:13:53.227777Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.227817Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [4:553:2472], now have 1 active actors on pipe 2025-04-07T14:13:53.227982Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.228000Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:554:2473], now have 1 active actors on pipe 2025-04-07T14:13:53.228128Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.228145Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:555:2473], now have 1 active actors on pipe 2025-04-07T14:13:53.238985Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.239048Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:563:2480], now have 1 active actors on pipe 2025-04-07T14:13:53.259006Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:53.261117Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:53.261286Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-07T14:13:53.261323Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:53.261422Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:53.261779Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:53.261812Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-07T14:13:53.261888Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:53.262068Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:53.262194Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:622:2527] 2025-04-07T14:13:53.263425Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-07T14:13:53.264117Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-07T14:13:53.264316Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-07T14:13:53.264535Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-07T14:13:53.264690Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-07T14:13:53.264723Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-07T14:13:53.264750Z node 4 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:13:53.264779Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T14:13:53.264814Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:622:2527] 2025-04-07T14:13:53.264851Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:53.264946Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2025-04-07T14:13:53.265095Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:53.265447Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:53.265487Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [4:553:2472] destroyed 2025-04-07T14:13:53.265653Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:53.265671Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [4:554:2473] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionOffsetsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "partition is not ready yet" } ErrorCode: OK } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } >> THiveTest::TestCreateTabletAndReassignGroups [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetTopicMetadata request" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateBackupCollectionDisabledByDefault [GOOD] Test command err: Trying to start YDB, gRPC: 25639, MsgBus: 8262 2025-04-07T14:11:52.396520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573771347822892:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:52.396583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00142e/r3tmp/tmpQ5FdTb/pdisk_1.dat 2025-04-07T14:11:52.650792Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25639, node 1 2025-04-07T14:11:52.700459Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:52.700499Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:52.700505Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:52.700586Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:11:52.725201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:52.725293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:52.726942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8262 TClient is connected to server localhost:8262 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:53.034987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.048213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.142966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.257014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:53.304453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:54.399535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573779937759179:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.399606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.584800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.602116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.621096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.638365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.657627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.683658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:54.717999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573779937759673:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.718046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.718114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573779937759678:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:54.720703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:54.727795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573779937759680:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:11:55.416043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-04-07T14:11:55.496695Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-04-07T14:11:55.518993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.574095Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-04-07T14:11:55.575534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.659609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.663641Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-04-07T14:11:55.714380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.729958Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-04-07T14:11:55.779859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.780433Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-04-07T14:11:55.844509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715722:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.845332Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-04-07T14:11:55.918454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715732:0, at schemeshard: 72057594046644480 2025-04-07T14:11:55.924908Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-04-07T14:11:55.969613Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-04-07T14:11:55.977411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715740:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.032337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715745:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.042717Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-04-07T14:11:56.097714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715753:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.098728Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-04-07T14:11:56.151550Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-04-07T14:11:56.151564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715762:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.206894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715768:0, at schemeshard: 72057594046644480 2025-04-07T14:11:56.210132Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037 ... 94037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found Trying to start YDB, gRPC: 13623, MsgBus: 65051 2025-04-07T14:13:43.887506Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490574246175417006:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:43.887568Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00142e/r3tmp/tmphTQtOW/pdisk_1.dat 2025-04-07T14:13:44.007421Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13623, node 4 2025-04-07T14:13:44.036578Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:44.036688Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:44.038468Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:44.067163Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:44.067198Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:44.067211Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:44.067355Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65051 TClient is connected to server localhost:65051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:44.451642Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:47.140787Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574263355286820:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:47.140883Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:47.160800Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574263355286841:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:47.160907Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:47.200880Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574263355286855:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:47.200951Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 20217, MsgBus: 63943 2025-04-07T14:13:47.848644Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574266991321609:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:47.848718Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00142e/r3tmp/tmpzM5mqU/pdisk_1.dat 2025-04-07T14:13:47.942106Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20217, node 5 2025-04-07T14:13:48.006423Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:48.006526Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:48.008506Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:48.037529Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:48.037555Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:48.037566Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:48.037683Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63943 TClient is connected to server localhost:63943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:48.599334Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:48.616543Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:48.663269Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:48.819581Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:48.899359Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:51.164770Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574284171192499:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:51.164875Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:51.265731Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:13:51.297887Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:13:51.327378Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:13:51.359022Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:13:51.391341Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:13:51.460733Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:13:51.499292Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574284171192996:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:51.499363Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574284171193001:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:51.499364Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:51.502229Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:13:51.510479Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574284171193003:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TTxAllocatorClientTest::ZeroRange [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-07T14:13:53.837808Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:53.840407Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:53.840614Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-07T14:13:53.840652Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:53.840717Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-07T14:13:53.840753Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:53.840784Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:53.840818Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2025-04-07T14:13:53.840841Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-07T14:13:53.841297Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.841337Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:258:2250], now have 1 active actors on pipe 2025-04-07T14:13:53.841402Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:53.851978Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:53.855285Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:53.855414Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:53.856073Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:53.856175Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:53.856457Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:53.856743Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:267:2257] 2025-04-07T14:13:53.858402Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-07T14:13:53.858442Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:267:2257] 2025-04-07T14:13:53.858487Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:53.858713Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:13:53.859026Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.859057Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:270:2259], now have 1 active actors on pipe 2025-04-07T14:13:53.902082Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:53.905034Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:53.905241Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-07T14:13:53.905284Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:53.905323Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-07T14:13:53.905360Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:53.905404Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:53.905445Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2025-04-07T14:13:53.905471Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-07T14:13:53.905993Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.906031Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:407:2362], now have 1 active actors on pipe 2025-04-07T14:13:53.906121Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:53.906270Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:53.908959Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:53.909057Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:53.909568Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:53.909644Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:53.909875Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:53.910036Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:416:2369] 2025-04-07T14:13:53.911278Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T14:13:53.911321Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:416:2369] 2025-04-07T14:13:53.911361Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:53.911597Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:53.912037Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.912073Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:419:2371], now have 1 active actors on pipe 2025-04-07T14:13:53.913495Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.913535Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:427:2376], now have 1 active actors on pipe 2025-04-07T14:13:53.913716Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:53.913736Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:429:2377], now have 1 active actors on pipe 2025-04-07T14:13:53.913905Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:53.913935Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:427:2376] destroyed 2025-04-07T14:13:53.914277Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:53.914300Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:429:2377] destroyed Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2025-04-07T14:12:46.985668Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-07T14:12:46.986079Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-07T14:12:46.986703Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-07T14:12:46.988236Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.988639Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-07T14:12:46.997755Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.997871Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.998002Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-07T14:12:46.998104Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.998177Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.998249Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-07T14:12:46.998358Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-07T14:12:46.999020Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2025-04-07T14:12:46.999472Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.999532Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:12:46.999614Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-04-07T14:12:46.999649Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 5000 >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition |95.8%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-04-07T14:13:55.049110Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:55.052227Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:55.052474Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-07T14:13:55.052509Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:55.052538Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-07T14:13:55.052573Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:55.052606Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:55.052640Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2025-04-07T14:13:55.052686Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-07T14:13:55.053230Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.053283Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2025-04-07T14:13:55.053349Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:55.070408Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:55.073544Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:55.073654Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:55.074616Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:55.074754Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:55.075059Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:55.075319Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2025-04-07T14:13:55.076970Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-07T14:13:55.077017Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2025-04-07T14:13:55.077071Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:55.077501Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:13:55.077820Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.077864Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2025-04-07T14:13:55.115257Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:55.118057Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:55.118310Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-07T14:13:55.118356Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:55.118387Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-07T14:13:55.118418Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:55.118453Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:55.118500Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] empty tx queue 2025-04-07T14:13:55.118526Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-07T14:13:55.118987Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.119032Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:406:2361], now have 1 active actors on pipe 2025-04-07T14:13:55.119111Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:55.119306Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:55.121672Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:55.121771Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:55.122301Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:55.122388Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:55.122591Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:55.122747Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [2:415:2368] 2025-04-07T14:13:55.124050Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-07T14:13:55.124099Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:415:2368] 2025-04-07T14:13:55.124153Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:55.124384Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:13:55.124775Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.124813Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:418:2370], now have 1 active actors on pipe 2025-04-07T14:13:55.137227Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:55.139693Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:55.139883Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-07T14:13:55.139919Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:55.139945Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-07T14:13:55.139981Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:55.140027Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:55.140063Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] empty tx queue 2025-04-07T14:13:55.140088Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-07T14:13:55.140567Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.140608Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:469:2409], now have 1 active actors on pipe 2025-04-07T14:13:55.140706Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:55.140837Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:55.143102Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:55.143207Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:55.143830Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-07T14:13:55.143943Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:55.144199Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer ... 600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:55.701148Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:55.701915Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 8 actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:55.702029Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:55.702352Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:55.702571Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:543:2466] 2025-04-07T14:13:55.704409Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T14:13:55.704470Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:543:2466] 2025-04-07T14:13:55.704521Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:55.704869Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:55.705310Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.705379Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:546:2468], now have 1 active actors on pipe 2025-04-07T14:13:55.707345Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.707398Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:554:2473], now have 1 active actors on pipe 2025-04-07T14:13:55.707532Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.707562Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:555:2474], now have 1 active actors on pipe 2025-04-07T14:13:55.707602Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvStatus 2025-04-07T14:13:55.707811Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:13:55.707927Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.707961Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:556:2474], now have 1 active actors on pipe 2025-04-07T14:13:55.708041Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvStatus 2025-04-07T14:13:55.708249Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:13:55.708382Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvStatus 2025-04-07T14:13:55.708493Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:13:55.719514Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.719584Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:564:2481], now have 1 active actors on pipe 2025-04-07T14:13:55.741607Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:55.744053Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:55.744349Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-07T14:13:55.744393Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:55.744528Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:55.745274Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:55.745324Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-07T14:13:55.745498Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:55.745816Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:55.746020Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:623:2528] 2025-04-07T14:13:55.747807Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-07T14:13:55.748905Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-07T14:13:55.749184Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-07T14:13:55.749631Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-07T14:13:55.749893Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-07T14:13:55.749941Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-07T14:13:55.749979Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:13:55.750018Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T14:13:55.750062Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:623:2528] 2025-04-07T14:13:55.750130Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:55.750392Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2025-04-07T14:13:55.750573Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:55.751009Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:55.751060Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:555:2474] destroyed 2025-04-07T14:13:55.751343Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:55.751377Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:554:2473] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 40 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 40 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 82 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 82 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_UNKNOWN } ErrorCode: OK } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:148:2057] recipient: [4:147:2168] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:150:2057] recipient: [4:147:2168] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:149:2169] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:219:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:153:2057] recipient: [5:151:2173] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:155:2057] recipient: [5:151:2173] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:154:2174] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:224:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:149:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:153:2057] recipient: [6:151:2173] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:155:2057] recipient: [6:151:2173] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:154:2174] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:224:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:150:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:153:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:152:2173] Leader for TabletID 72057594037927937 is [7:155:2174] sender: [7:156:2057] recipient: [7:152:2173] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:155:2174] Leader for TabletID 72057594037927937 is [7:155:2174] sender: [7:225:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:155:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:156:2176] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:159:2057] recipient: [8:156:2176] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:158:2177] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:211:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:160:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:161:2057] recipient: [9:159:2180] Leader for TabletID 72057594037927937 is [9:162:2181] sender: [9:163:2057] recipient: [9:159:2180] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:162:2181] Leader for TabletID 72057594037927937 is [9:162:2181] sender: [9:215:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:162:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:165:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:166:2057] recipient: [10:164:2185] Leader for TabletID 72057594037927937 is [10:167:2186] sender: [10:168:2057] recipient: [10:164:2185] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:167:2186] Leader for TabletID 72057594037927937 is [10:167:2186] sender: [10:237:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:162:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:165:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:166:2057] recipient: [11:164:2185] Leader for TabletID 72057594037927937 is [11:167:2186] sender: [11:168:2057] recipient: [11:164:2185] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:167:2186] Leader for TabletID 72057594037927937 is [11:167:2186] sender: [11:237:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 720 ... 37 is [15:146:2167] sender: [15:217:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:141:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:144:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:145:2057] recipient: [16:143:2166] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:147:2057] recipient: [16:143:2166] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:146:2167] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:216:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:144:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:147:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:148:2057] recipient: [17:146:2168] Leader for TabletID 72057594037927937 is [17:149:2169] sender: [17:150:2057] recipient: [17:146:2168] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:149:2169] Leader for TabletID 72057594037927937 is [17:149:2169] sender: [17:219:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:149:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:152:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:153:2057] recipient: [18:151:2173] Leader for TabletID 72057594037927937 is [18:154:2174] sender: [18:155:2057] recipient: [18:151:2173] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:154:2174] Leader for TabletID 72057594037927937 is [18:154:2174] sender: [18:224:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:149:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:152:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:153:2057] recipient: [19:151:2173] Leader for TabletID 72057594037927937 is [19:154:2174] sender: [19:155:2057] recipient: [19:151:2173] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:154:2174] Leader for TabletID 72057594037927937 is [19:154:2174] sender: [19:224:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:150:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:153:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:154:2057] recipient: [20:152:2173] Leader for TabletID 72057594037927937 is [20:155:2174] sender: [20:156:2057] recipient: [20:152:2173] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:155:2174] Leader for TabletID 72057594037927937 is [20:155:2174] sender: [20:225:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:153:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:156:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:157:2057] recipient: [21:155:2176] Leader for TabletID 72057594037927937 is [21:158:2177] sender: [21:159:2057] recipient: [21:155:2176] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:158:2177] Leader for TabletID 72057594037927937 is [21:158:2177] sender: [21:211:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:157:2057] recipient: [22:97:2132] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:160:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:161:2057] recipient: [22:159:2180] Leader for TabletID 72057594037927937 is [22:162:2181] sender: [22:163:2057] recipient: [22:159:2180] !Reboot 72057594037927937 (actor [22:105:2137]) rebooted! !Reboot 72057594037927937 (actor [22:105:2137]) tablet resolver refreshed! new actor is[22:162:2181] Leader for TabletID 72057594037927937 is [22:162:2181] sender: [22:215:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:162:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:165:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:166:2057] recipient: [23:164:2185] Leader for TabletID 72057594037927937 is [23:167:2186] sender: [23:168:2057] recipient: [23:164:2185] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:167:2186] Leader for TabletID 72057594037927937 is [23:167:2186] sender: [23:237:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:162:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:165:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:166:2057] recipient: [24:164:2185] Leader for TabletID 72057594037927937 is [24:167:2186] sender: [24:168:2057] recipient: [24:164:2185] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:167:2186] Leader for TabletID 72057594037927937 is [24:167:2186] sender: [24:237:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:165:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:168:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:169:2057] recipient: [25:167:2187] Leader for TabletID 72057594037927937 is [25:170:2188] sender: [25:171:2057] recipient: [25:167:2187] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:170:2188] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-07T14:13:55.398330Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:55.401216Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:55.401490Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-07T14:13:55.401532Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:55.401564Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-07T14:13:55.401605Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:55.401641Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:55.401682Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2025-04-07T14:13:55.401711Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-07T14:13:55.402256Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.402315Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2025-04-07T14:13:55.402400Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:55.415424Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:55.418889Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:55.419028Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:55.419810Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-07T14:13:55.419921Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:55.420244Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:55.420538Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2025-04-07T14:13:55.422466Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-07T14:13:55.422524Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2025-04-07T14:13:55.422583Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:55.422883Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:13:55.423253Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.423307Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2025-04-07T14:13:55.463433Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:55.466619Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:55.466900Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-07T14:13:55.466939Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:55.466970Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-07T14:13:55.467003Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:55.467055Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:55.467111Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2025-04-07T14:13:55.467140Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-07T14:13:55.467653Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.467720Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:407:2362], now have 1 active actors on pipe 2025-04-07T14:13:55.467856Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-07T14:13:55.468015Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:55.470631Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:55.470729Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:55.471352Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-07T14:13:55.471465Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:55.471731Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:55.471907Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:416:2369] 2025-04-07T14:13:55.473516Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T14:13:55.473570Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:416:2369] 2025-04-07T14:13:55.473638Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:55.473912Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:55.474343Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.474385Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:419:2371], now have 1 active actors on pipe 2025-04-07T14:13:55.476059Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.476106Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:427:2376], now have 1 active actors on pipe 2025-04-07T14:13:55.476256Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvStatus 2025-04-07T14:13:55.476329Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.476350Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:429:2377], now have 1 active actors on pipe 2025-04-07T14:13:55.476459Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:13:55.476695Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvStatus 2025-04-07T14:13:55.476790Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:13:55.477044Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:55.477081Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:427:2376] destroyed 2025-04-07T14:13:55.477339Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:55.477362Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:429:2377] destroyed 2025-04-07T14:13:55.774943Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:55.777629Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:55.777792Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-07T14:13:55.777820Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:55.777853Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-07T14:13:55.777883Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:55.777916Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:55.777950Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2025-04-07T14:13:55.777971Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-07T14:13:55.778381Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.778427Z node 3 :PERSQ ... 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:55.857277Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:55.857579Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.857613Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:546:2468], now have 1 active actors on pipe 2025-04-07T14:13:55.858770Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.858811Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:554:2473], now have 1 active actors on pipe 2025-04-07T14:13:55.858944Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.858962Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:555:2474], now have 1 active actors on pipe 2025-04-07T14:13:55.858983Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvStatus 2025-04-07T14:13:55.859093Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:13:55.859175Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvStatus 2025-04-07T14:13:55.859273Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:13:55.859298Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.859311Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:556:2474], now have 1 active actors on pipe 2025-04-07T14:13:55.859354Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvStatus 2025-04-07T14:13:55.859411Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-07T14:13:55.870052Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:13:55.870098Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:564:2481], now have 1 active actors on pipe 2025-04-07T14:13:55.884290Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T14:13:55.886204Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T14:13:55.886400Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-07T14:13:55.886431Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-07T14:13:55.886514Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-07T14:13:55.886974Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T14:13:55.887003Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-07T14:13:55.887068Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-07T14:13:55.887243Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T14:13:55.887405Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:623:2528] 2025-04-07T14:13:55.888419Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-07T14:13:55.889128Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-07T14:13:55.889283Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-07T14:13:55.889481Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-07T14:13:55.889629Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-07T14:13:55.889655Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-07T14:13:55.889680Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-07T14:13:55.889702Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-07T14:13:55.889734Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:623:2528] 2025-04-07T14:13:55.889771Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-07T14:13:55.889863Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2025-04-07T14:13:55.890031Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:13:55.890336Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:55.890364Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:555:2474] destroyed 2025-04-07T14:13:55.890400Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:13:55.890415Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:554:2473] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 40 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 40 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 82 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 82 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 97 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 97 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetReadSessionsInfo request" ErrorCode: BAD_REQUEST } >> DataStreams::TestUpdateStream >> DataStreams::TestPutRecordsOfAnauthorizedUser >> DataStreams::TestStreamStorageRetention >> DataStreams::TestDeleteStream >> DataStreams::TestUpdateStorage >> DataStreams::TestNonChargeableUser >> DataStreams::TestGetRecordsStreamWithSingleShard |95.8%| [TA] $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestGetShardIterator >> CdcStreamChangeCollector::InsertSingleRow >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn >> CdcStreamChangeCollector::UpsertToSameKey >> THiveTest::TestCreateTabletAndReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 >> AsyncIndexChangeCollector::InsertSingleRow >> KqpScheme::AddChangefeedNegative [GOOD] >> CdcStreamChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::UpsertToSameKey >> AsyncIndexChangeCollector::DeleteNothing >> SystemView::PartitionStatsTtlFields [GOOD] >> SystemView::PartitionStatsFields >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] >> DataStreams::TestUpdateStream [GOOD] >> DataStreams::Test_AutoPartitioning_Describe >> DataStreams::TestStreamStorageRetention [GOOD] >> DataStreams::TestStreamPagination >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS >> DataStreams::TestPutRecordsOfAnauthorizedUser [GOOD] >> DataStreams::TestPutRecordsWithRead >> DataStreams::TestUpdateStorage [GOOD] >> DataStreams::TestStreamTimeRetention ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AddChangefeedNegative [GOOD] Test command err: Trying to start YDB, gRPC: 64416, MsgBus: 1373 2025-04-07T14:12:05.300377Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573826844654416:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:05.300432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013fa/r3tmp/tmp97zMu1/pdisk_1.dat 2025-04-07T14:12:05.555543Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64416, node 1 2025-04-07T14:12:05.611367Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:05.611389Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:05.611421Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:05.611562Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:05.635422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:05.635543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:05.637316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1373 TClient is connected to server localhost:1373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:06.042986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int64 NOT NULL, timestamp Timestamp NOT NULL, ui64_type Uint64 NOT NULL, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:12:07.573976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573835434589638:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:07.574064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:07.755455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:12:07.801732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573835434589715:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:12:07.801964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573835434589715:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:12:07.802251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573835434589715:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:12:07.802373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573835434589715:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:12:07.802491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573835434589715:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:12:07.802601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573835434589715:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:12:07.802711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573835434589715:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:12:07.802801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573835434589715:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:12:07.802922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573835434589715:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:12:07.803018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573835434589715:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:12:07.803131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573835434589715:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:12:07.803235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573835434589715:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:12:07.806109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:12:07.806164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:12:07.806261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:12:07.806291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:12:07.806513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:12:07.806552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:12:07.806637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:12:07.806671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:12:07.806765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:12:07.806799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:12:07.806856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:12:07.806881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:12:07.807364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:12:07.807433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:12:07.807675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:12:07.807704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:12:07.807841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:12:07.807881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:12:07.808072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:12:07.808105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:12:07.808232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:12:07.808258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45; ... dService] [TPoolFetcherActor] ActorId: [17:7490574279228940327:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:50.829879Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:50.856864Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:13:50.888131Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:13:50.918649Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:13:50.946248Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:13:50.979874Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:13:51.049640Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:13:51.131558Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:7490574283523908125:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:51.131640Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:51.131650Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:7490574283523908130:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:51.136097Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:13:51.146546Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [17:7490574283523908132:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:13:51.587251Z node 17 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[17:7490574262049069429:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:51.587308Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:13:52.294031Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64670, MsgBus: 3432 2025-04-07T14:13:53.051901Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7490574292627507038:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:53.052000Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013fa/r3tmp/tmp8jB26W/pdisk_1.dat 2025-04-07T14:13:53.136876Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:53.160628Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:53.160739Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:53.162185Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64670, node 18 2025-04-07T14:13:53.201711Z node 18 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:53.201735Z node 18 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:53.201742Z node 18 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:53.201850Z node 18 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3432 TClient is connected to server localhost:3432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:53.684799Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:53.701859Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:53.796095Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:53.945665Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:54.023008Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:56.984324Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:7490574305512410630:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:56.984426Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:57.037965Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:13:57.070759Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:13:57.104290Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:13:57.131098Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:13:57.161845Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:13:57.204846Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:13:57.280177Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:7490574309807378425:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:57.280291Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:57.280297Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:7490574309807378430:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:13:57.283421Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:13:57.291602Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [18:7490574309807378432:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:13:58.062358Z node 18 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7490574292627507038:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:58.062422Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:13:58.766936Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.896819Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:26.606294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:26.606367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:26.606399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:26.606428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:26.606489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:26.606534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:26.606592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:26.606932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:26.667622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:26.667671Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:26.681594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:26.681729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:26.681876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:26.692534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:26.692732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:26.693317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:26.693685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:26.698428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:26.700128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:26.700348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:26.700505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:26.700546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:26.700584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:26.700730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:26.709488Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:26.826135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:26.826333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.826524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:26.826717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:26.826775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.828919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:26.829050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:26.829200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.829242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:26.829286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:26.829328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:26.831032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.831085Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:26.831112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:26.832574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.832617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.832683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:26.832727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:26.835865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:26.837459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:26.837640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:26.838475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:26.838617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:26.838666Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:26.838897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:26.838941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:26.839095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:26.839161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:26.841101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:26.841140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:26.841299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:26.841336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:26.841690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:26.841734Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:26.841820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:26.841883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:26.841926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:26.841973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:26.842008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:26.842043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:26.842109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:26.842144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:26.842171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... ult: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:14:00.196563Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:14:00.196792Z node 22 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 239us result status StatusSuccess 2025-04-07T14:14:00.197512Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataStreams::TestDeleteStream [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag >> DataStreams::TestNonChargeableUser [GOOD] >> DataStreams::TestPutRecords >> DataStreams::TestGetShardIterator [GOOD] >> DataStreams::TestGetRecordsWithoutPermission >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> AsyncIndexChangeCollector::UpsertSingleRow >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages >> DataStreams::TestPutRecordsWithRead [GOOD] >> DataStreams::TestPutRecordsCornerCases >> THiveTest::TestCreateTabletAndMixedReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots >> DataStreams::TestDeleteStreamWithEnforceFlag [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse >> DataStreams::TestStreamPagination [GOOD] >> DataStreams::TestShardPagination >> DataStreams::TestPutRecords [GOOD] >> DataStreams::TestPutEmptyMessage >> DataStreams::TestGetRecordsWithoutPermission [GOOD] >> DataStreams::TestListShards1Shard >> DataStreams::Test_AutoPartitioning_Describe [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled >> KqpNewEngine::InShardsWrite >> TKeyValueTest::TestCopyRangeWorksNewApi [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> CdcStreamChangeCollector::DeleteNothing >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow >> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert >> SystemView::PartitionStatsFields [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults >> DataStreams::TestListShards1Shard [GOOD] >> DataStreams::TestInvalidRetentionCombinations >> DataStreams::TestDeleteStreamWithEnforceFlagFalse [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete >> DataStreams::TestPutEmptyMessage [GOOD] >> DataStreams::TestListStreamConsumers >> DataStreams::TestShardPagination [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] >> DataStreams::TestStreamTimeRetention [GOOD] >> DataStreams::TestUnsupported ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:476:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:479:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:480:2057] recipient: [4:478:2500] Leader for TabletID 72057594037927937 is [4:481:2501] sender: [4:482:2057] recipient: [4:478:2500] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:481:2501] Leader for TabletID 72057594037927937 is [4:481:2501] sender: [4:551:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:481:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:484:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:485:2057] recipient: [5:483:2505] Leader for TabletID 72057594037927937 is [5:486:2506] sender: [5:487:2057] recipient: [5:483:2505] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:486:2506] Leader for TabletID 72057594037927937 is [5:486:2506] sender: [5:556:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:481:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:484:2057] recipient: [6:483:2505] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:485:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:486:2506] sender: [6:487:2057] recipient: [6:483:2505] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:486:2506] Leader for TabletID 72057594037927937 is [6:486:2506] sender: [6:556:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:482:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:485:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:486:2057] recipient: [7:484:2505] Leader for TabletID 72057594037927937 is [7:487:2506] sender: [7:488:2057] recipient: [7:484:2505] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:487:2506] Leader for TabletID 72057594037927937 is [7:487:2506] sender: [7:557:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:484:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:487:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:488:2057] recipient: [8:486:2507] Leader for TabletID 72057594037927937 is [8:489:2508] sender: [8:490:2057] recipient: [8:486:2507] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:489:2508] Leader for TabletID 72057594037927937 is [8:489:2508] sender: [8:559:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:484:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:487:2057] recipient: [9:486:2507] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:488:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:489:2508] sender: [9:490:2057] recipient: [9:486:2507] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:489:2508] Leader for TabletID 72057594037927937 is [9:489:2508] sender: [9:559:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:485:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:488:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:489:2057] recipient: [10:487:2507] Leader for TabletID 72057594037927937 is [10:490:2508] sender: [10:491:2057] recipient: [10:487:2507] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:490:2508] Leader for TabletID 72057594037927937 is [10:490:2508] sender: [10:561:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:487:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:489:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:491:2057] recipient: [11:490:2509] Leader for TabletID 72057594037927937 is [11:492:2510] sender: [11:493:2057] recipient: [11:490:2509] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:492:2510] Leader for TabletID 72057594037927937 is [11:492:2510] sender: [11:562:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 927937 is [13:105:2137] sender: [13:492:2057] recipient: [13:490:2509] Leader for TabletID 72057594037927937 is [13:493:2510] sender: [13:494:2057] recipient: [13:490:2509] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:493:2510] Leader for TabletID 72057594037927937 is [13:493:2510] sender: [13:563:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:141:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:144:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:145:2057] recipient: [16:143:2166] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:147:2057] recipient: [16:143:2166] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:146:2167] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:216:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:141:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:144:2057] recipient: [17:143:2166] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:145:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:147:2057] recipient: [17:143:2166] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:146:2167] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:216:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:476:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:479:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:480:2057] recipient: [18:478:2500] Leader for TabletID 72057594037927937 is [18:481:2501] sender: [18:482:2057] recipient: [18:478:2500] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:481:2501] Leader for TabletID 72057594037927937 is [18:481:2501] sender: [18:551:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:481:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:484:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:485:2057] recipient: [19:483:2505] Leader for TabletID 72057594037927937 is [19:486:2506] sender: [19:487:2057] recipient: [19:483:2505] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:486:2506] Leader for TabletID 72057594037927937 is [19:486:2506] sender: [19:556:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:481:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:484:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:485:2057] recipient: [20:483:2505] Leader for TabletID 72057594037927937 is [20:486:2506] sender: [20:487:2057] recipient: [20:483:2505] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:486:2506] Leader for TabletID 72057594037927937 is [20:486:2506] sender: [20:556:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:482:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:485:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:486:2057] recipient: [21:484:2505] Leader for TabletID 72057594037927937 is [21:487:2506] sender: [21:488:2057] recipient: [21:484:2505] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:487:2506] Leader for TabletID 72057594037927937 is [21:487:2506] sender: [21:535:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:484:2057] recipient: [22:97:2132] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:487:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:488:2057] recipient: [22:486:2507] Leader for TabletID 72057594037927937 is [22:489:2508] sender: [22:490:2057] recipient: [22:486:2507] !Reboot 72057594037927937 (actor [22:105:2137]) rebooted! !Reboot 72057594037927937 (actor [22:105:2137]) tablet resolver refreshed! new actor is[22:489:2508] Leader for TabletID 72057594037927937 is [22:489:2508] sender: [22:559:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:484:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:487:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:488:2057] recipient: [23:486:2507] Leader for TabletID 72057594037927937 is [23:489:2508] sender: [23:490:2057] recipient: [23:486:2507] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:489:2508] Leader for TabletID 72057594037927937 is [23:489:2508] sender: [23:559:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:485:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:487:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:489:2057] recipient: [24:488:2507] Leader for TabletID 72057594037927937 is [24:490:2508] sender: [24:491:2057] recipient: [24:488:2507] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:490:2508] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::PartitionStatsFields [GOOD] Test command err: 2025-04-07T14:12:15.274067Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573870602436281:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:15.274214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001088/r3tmp/tmpxiXexK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29696, node 1 2025-04-07T14:12:15.600215Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:15.600282Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:15.616391Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:15.650850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:15.650953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:15.653699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:15.654194Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:15.654207Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:15.654220Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:15.654368Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:16.091051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:16.120553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:16.134567Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490573874276520566:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:16.134623Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:16.171045Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490573873780383757:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:16.163311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:16.202522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:16.202592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:16.220915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:16.220999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:16.225964Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-07T14:12:16.226865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:16.231332Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-07T14:12:16.233532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:16.291270Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:16.355292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:16.385827Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573872224175019:2101];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:16.391440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:16.391722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:16.391787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:16.395431Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 waiting... 2025-04-07T14:12:16.400404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:16.411093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:16.411726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:16.423231Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:16.430802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:16.511274Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:19.786503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:19.969066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573887782306805:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:19.969179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:19.969556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573887782306817:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:19.974012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2025-04-07T14:12:19.994379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573887782306819:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-07T14:12:20.275429Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490573870602436281:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:20.275494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:20.536453Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr89gmby5jmct0dvnxvt0t7p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZjNzk2YjMtMjk0MTM3MTMtNDA0M2VkZTEtN2YxMDkyMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:20.590363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.066745Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr89gnaf11fk654dccrdba7f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZjNzk2YjMtMjk0MTM3MTMtNDA0M2VkZTEtN2YxMDkyMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:12:21.125451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-07T14:12:21.134553Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490573874276520566:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:21.134609Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:21.140725Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490573873780383757:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:21.140794Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:21.382586Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490573872224175019:2101];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:21.382667Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:21.610649Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jr89gnsqcy6s7mtex8q98frt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZjNzk2YjMtMjk0MTM3MTMtNDA0M2VkZTEtN2YxMDkyMDc=, CurrentExecutio ... ot set, use /Root 2025-04-07T14:13:58.460961Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jr89kmeham01gvjxadqxy3cm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=NzU2ZDZlMS1jMzkxMWFkNC02NjUwOTIxYS1hMTA3M2I4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:13:58.463710Z node 11 :SYSTEM_VIEWS INFO: Scan started, actor: [11:7490574311266204934:2960], owner: [11:7490574311266204931:2958], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-07T14:13:58.464174Z node 11 :SYSTEM_VIEWS INFO: Scan prepared, actor: [11:7490574311266204934:2960], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:13:58.464392Z node 11 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [11:7490574311266204934:2960], row count: 1, finished: 1 2025-04-07T14:13:58.464439Z node 11 :SYSTEM_VIEWS INFO: Scan finished, actor: [11:7490574311266204934:2960], owner: [11:7490574311266204931:2958], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-07T14:13:58.466364Z node 11 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035238460, txId: 281474976715732] shutting down 2025-04-07T14:14:00.249940Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7490574319121585425:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:00.250045Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001088/r3tmp/tmpqZNYbh/pdisk_1.dat 2025-04-07T14:14:00.408627Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:00.450835Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:00.450952Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:00.455288Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24835, node 16 2025-04-07T14:14:00.510460Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:00.510487Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:00.510501Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:00.510659Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:00.766041Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:00.780179Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:04.072325Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7490574336301455579:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:04.072342Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7490574336301455587:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:04.072433Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:04.076308Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:14:04.094716Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7490574336301455593:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:14:04.264290Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89kt16dj7r2t905byv3ab4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=ZmM5YzZlMGEtYWY2YjU5ZGMtNTk2N2U3OGEtMmU1MWQ0NTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:04.341210Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89kt7k8b0hsf7cn3dg57jz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=YWM4NzU5NDAtNjcxODAyMjMtZjMyYTFjODEtN2M5ZDhiZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:04.361862Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7490574336301455740:2327], owner: [16:7490574336301455736:2325], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-07T14:14:04.362331Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7490574336301455740:2327], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:14:04.362515Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7490574336301455740:2327], row count: 1, finished: 1 2025-04-07T14:14:04.362554Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7490574336301455740:2327], owner: [16:7490574336301455736:2325], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-07T14:14:04.364275Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035244340, txId: 281474976715662] shutting down 2025-04-07T14:14:05.252281Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7490574319121585425:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:05.252384Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:05.469083Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr89kv9x84cpykbgc4wb3f7x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=NzNmMDBlY2UtZjM4MmMzNzItZWNhMjBlNzctZjE4ODZkNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:05.471944Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7490574340596423084:2338], owner: [16:7490574340596423081:2336], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-07T14:14:05.478723Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7490574340596423084:2338], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:14:05.478984Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7490574340596423084:2338], row count: 1, finished: 1 2025-04-07T14:14:05.479039Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7490574340596423084:2338], owner: [16:7490574340596423081:2336], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-07T14:14:05.481995Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035245468, txId: 281474976715664] shutting down 2025-04-07T14:14:06.558854Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr89kwcq40erj3dymy4rtmba, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=YTBiMjFkMDAtN2E0NTkzY2UtNmQzMmIxYzctODJhYzBjY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:06.560332Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7490574344891390436:2353], owner: [16:7490574344891390433:2351], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-07T14:14:06.560684Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7490574344891390436:2353], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:14:06.560845Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7490574344891390436:2353], row count: 1, finished: 1 2025-04-07T14:14:06.560878Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7490574344891390436:2353], owner: [16:7490574344891390433:2351], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-07T14:14:06.565392Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035246558, txId: 281474976715666] shutting down 2025-04-07T14:14:06.695000Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr89kwfa2sym41badnskgar0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=YzZiYThkZGYtOTQwOTFkY2YtMjU2ZWNkN2EtZDQ3NjYzODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:06.697341Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7490574344891390469:2362], owner: [16:7490574344891390465:2360], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-07T14:14:06.697758Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7490574344891390469:2362], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:14:06.697958Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7490574344891390469:2362], row count: 1, finished: 1 2025-04-07T14:14:06.697993Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7490574344891390469:2362], owner: [16:7490574344891390465:2360], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-07T14:14:06.699797Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035246694, txId: 281474976715668] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] Test command err: 2025-04-07T14:13:57.714579Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574306869902172:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:57.715241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a3c/r3tmp/tmpPvBZVV/pdisk_1.dat 2025-04-07T14:13:58.005750Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18774, node 1 2025-04-07T14:13:58.069442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:58.069512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:58.079406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:58.195814Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:58.195835Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:58.195840Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:58.195920Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:58.572933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.583066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.584411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.585897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:58.586067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:58.586085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:13:58.586746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:58.587241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:58.587256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:13:58.588370Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.591073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238638, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.591100Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:58.591344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:58.592601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.592758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.592802Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:58.592893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:58.592938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:58.592971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:13:58.594557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:13:58.594606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:13:58.594617Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:13:58.594713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:13:58.644902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.645151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.645175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.645237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:13:58.645314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:13:58.645331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:13:58.646771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T14:13:58.646870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.646985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.647399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:13:58.647436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:13:58.647448Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:13:58.647502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:29101 2025-04-07T14:13:58.779941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.780135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.780156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.781711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:13:58.781839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:58.782724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T14:13:58.784426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238827, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.784457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1744035238827, at schemeshard: 72057594046644480 2025-04-07T14:13:58.784603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:13:58.784651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:13:58.784687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2025-04-07T14:13:58.785827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.785982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.786403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T14:13:58.786430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T14:13:58.786438Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:13:58.786482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-04-07T14:13:58.806162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestUpdateStream, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.808185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.810891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: user@builtin, status: S ... 44480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:5655 2025-04-07T14:14:06.157611Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:14:06.157807Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:06.157839Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:14:06.159557Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:14:06.159726Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:14:06.161971Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035246205, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:14:06.162001Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1744035246205, at schemeshard: 72057594046644480 2025-04-07T14:14:06.162178Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2025-04-07T14:14:06.162254Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T14:14:06.162288Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2025-04-07T14:14:06.163496Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:06.163662Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:06.164180Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:14:06.164216Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:14:06.164234Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:14:06.164290Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 waiting... 2025-04-07T14:14:06.171260Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T14:14:06.196906Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:06.197174Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:06.199052Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: user@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/test-topic 2025-04-07T14:14:06.199193Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:06.199349Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:06.199399Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 ProgressState, operation type: TxCreatePQGroup, at tablet# 72057594046644480 2025-04-07T14:14:06.199717Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715660, at schemeshard: 72057594046644480 2025-04-07T14:14:06.199878Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:06.199910Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:06.199925Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T14:14:06.200099Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:06.200124Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:06.200134Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:14:06.203536Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:06.203762Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:06.203921Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:06.204060Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:06.204090Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 1 -> 3 2025-04-07T14:14:06.206032Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:14:06.260803Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:06.261918Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046644480 2025-04-07T14:14:06.261937Z node 7 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:14:06.262136Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046644480 2025-04-07T14:14:06.262149Z node 7 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:14:06.262315Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046644480 2025-04-07T14:14:06.262326Z node 7 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:14:06.262344Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 3 -> 128 2025-04-07T14:14:06.263315Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:06.264564Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046644480 message# TabletId: 72075186224037888 TxId: 281474976715660 Status: OK 2025-04-07T14:14:06.264755Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046644480 message# TabletId: 72075186224037889 TxId: 281474976715660 Status: OK 2025-04-07T14:14:06.264864Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046644480 message# TabletId: 72075186224037890 TxId: 281474976715660 Status: OK 2025-04-07T14:14:06.266293Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035246310, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:14:06.266328Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvOperationPlan, step: 1744035246310, at tablet: 72057594046644480 2025-04-07T14:14:06.277511Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046644480 message# Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715660 Step: 1744035246310 2025-04-07T14:14:06.279780Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046644480 message# Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715660 Step: 1744035246310 2025-04-07T14:14:06.280002Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046644480 message# Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715660 Step: 1744035246310 2025-04-07T14:14:06.280154Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2025-04-07T14:14:06.281901Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:06.282192Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:06.282251Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2025-04-07T14:14:06.282327Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2025-04-07T14:14:06.282368Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-07T14:14:06.282508Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 2, subscribers: 1 2025-04-07T14:14:06.283676Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:06.283729Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:06.283744Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2025-04-07T14:14:06.283927Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:06.283953Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:06.283963Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:14:06.283999Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestShardPagination [GOOD] Test command err: 2025-04-07T14:13:57.704877Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574307055039914:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:57.704983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a1c/r3tmp/tmp4S27fQ/pdisk_1.dat 2025-04-07T14:13:58.009052Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18723, node 1 2025-04-07T14:13:58.063800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:58.063928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:58.073836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:58.195728Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:58.195753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:58.195764Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:58.195834Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:58.567874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.572237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.573374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.574938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:58.575107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:58.575125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:13:58.575894Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:58.576391Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:58.576414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:13:58.577628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.580899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238624, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.580929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:58.581204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:58.582652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.582803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.582871Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:58.582951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:58.582986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:58.583013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:13:58.584822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:13:58.584863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:13:58.584877Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:13:58.584941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:13:58.636569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.636813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.636840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.636910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:13:58.636996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:13:58.637031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:13:58.638577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T14:13:58.638775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.638976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.639630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:13:58.639680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:13:58.639702Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:13:58.639773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:25439 2025-04-07T14:13:58.763995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.764205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.764233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.765949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:13:58.766066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:58.767008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T14:13:58.769105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238813, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.769159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1744035238813, at schemeshard: 72057594046644480 2025-04-07T14:13:58.769459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:13:58.769543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:13:58.769577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2025-04-07T14:13:58.770939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.771093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.771815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T14:13:58.771881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T14:13:58.771893Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:13:58.771941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 40960, code: 500080 2025-04-07T14:13:58.826151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestStreamStorageRetention, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.826815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710 ... 94046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:17275 2025-04-07T14:14:05.634815Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:14:05.634993Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:05.635015Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:05.637647Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:14:05.637810Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:14:05.641069Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035245687, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:14:05.641108Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1744035245687, at schemeshard: 72057594046644480 2025-04-07T14:14:05.641324Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2025-04-07T14:14:05.641401Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T14:14:05.641440Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2025-04-07T14:14:05.643025Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:05.643218Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:05.643842Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:14:05.643887Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:14:05.643905Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:14:05.643965Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2025-04-07T14:14:05.645412Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T14:14:05.697069Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestShardPagination, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:05.697523Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:05.700265Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: user@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/stream_TestShardPagination 2025-04-07T14:14:05.700472Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:05.700743Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:05.700815Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 ProgressState, operation type: TxCreatePQGroup, at tablet# 72057594046644480 2025-04-07T14:14:05.701210Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715660, at schemeshard: 72057594046644480 2025-04-07T14:14:05.701524Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:05.701554Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:05.701569Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T14:14:05.701740Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:05.701759Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:05.701769Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:14:05.707056Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:05.707296Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:05.707421Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:05.707539Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:05.707654Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:05.707774Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:05.707884Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:05.709079Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:05.709284Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:05.709433Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:05.709471Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 1 -> 3 2025-04-07T14:14:05.711369Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:14:05.768111Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:05.769588Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:05.769796Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:05.772070Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:05.772429Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:05.773173Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:05.774815Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:05.775191Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:05.775555Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:05.775986Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:05.776017Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 3 -> 128 2025-04-07T14:14:05.777475Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:05.780279Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035245827, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:14:05.780324Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvOperationPlan, step: 1744035245827, at tablet: 72057594046644480 2025-04-07T14:14:05.780518Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2025-04-07T14:14:05.781883Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:05.782232Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:05.782298Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2025-04-07T14:14:05.782376Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2025-04-07T14:14:05.782423Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-07T14:14:05.782656Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 2, subscribers: 1 2025-04-07T14:14:05.785029Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:05.785075Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:05.785093Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2025-04-07T14:14:05.785283Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:05.785306Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:05.785313Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:14:05.785343Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> KqpNewEngine::InShardsWrite [GOOD] >> KqpNewEngine::Join >> DataStreams::TestPutRecordsCornerCases [GOOD] >> KqpNewEngine::PureExpr >> KqpRanges::IsNotNullSecondComponent >> KqpNewEngine::KeyColumnOrder >> KqpNewEngine::BlindWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestPutRecordsCornerCases [GOOD] Test command err: 2025-04-07T14:13:57.704493Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574307261493985:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:57.704607Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a26/r3tmp/tmp7056O6/pdisk_1.dat 2025-04-07T14:13:58.041905Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30337, node 1 2025-04-07T14:13:58.047722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:58.048329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.048484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:13:58.050350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.050712Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:13:58.050968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.051004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.051063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:13:58.051147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.051298Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:13:58.089852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:58.089945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:58.098714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:58.195818Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:58.195853Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:58.195875Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:58.195977Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21392 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:58.569822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.573786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.575089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.576541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:58.576717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:58.576744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:13:58.577517Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:58.577997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:58.578016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:13:58.579097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.581846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238624, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.581874Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:58.582098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:58.583358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.583490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.583533Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:58.583636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:58.583668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:58.583718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:13:58.585335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:13:58.585376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:13:58.585391Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:13:58.585479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:13:58.639661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.639927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.639953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.640010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:13:58.640074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:13:58.640091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:13:58.641530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T14:13:58.641673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.641907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.642360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:13:58.642393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:13:58.642409Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:13:58.642473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:21392 2025-04-07T14:13:58.807387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.807646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.807708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.809777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:13:58.809918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:58.810828Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T14:13:58.812478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238855, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.812511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1744035238855, at schemeshard: 72057594046644480 2025-04-07T14:13:58.812709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:13:58.812774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:13:58.812802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2025-04-07T14:13:58.814021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.814220Z node 1 : ... v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 3(assignId:2) EndOffset 4 readOffset 0 committedOffset 0 2025-04-07T14:14:09.670172Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 4(assignId:1) EndOffset 2 readOffset 0 committedOffset 0 2025-04-07T14:14:09.670195Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 2(assignId:3) EndOffset 0 readOffset 0 committedOffset 0 2025-04-07T14:14:09.670237Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 0(assignId:5) EndOffset 2 readOffset 0 committedOffset 0 2025-04-07T14:14:09.670259Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 1(assignId:4) EndOffset 8 readOffset 0 committedOffset 0 2025-04-07T14:14:09.670744Z :INFO: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] [null] Confirm partition stream create. Partition stream id: 1. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 3. Read offset: (empty maybe) 2025-04-07T14:14:09.670989Z :INFO: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] [null] Confirm partition stream create. Partition stream id: 2. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 4. Read offset: (empty maybe) 2025-04-07T14:14:09.671283Z :INFO: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] [null] Confirm partition stream create. Partition stream id: 3. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 2. Read offset: (empty maybe) 2025-04-07T14:14:09.671352Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 3(assignId:2), readOffset# 0, commitOffset# 0 2025-04-07T14:14:09.671409Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 3(assignId:2) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2025-04-07T14:14:09.671604Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 4(assignId:1), readOffset# 0, commitOffset# 0 2025-04-07T14:14:09.671638Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 4(assignId:1) EndOffset 2 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2025-04-07T14:14:09.671791Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 2(assignId:3), readOffset# 0, commitOffset# 0 2025-04-07T14:14:09.671838Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 2(assignId:3) EndOffset 0 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2025-04-07T14:14:09.672182Z :INFO: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] [null] Confirm partition stream create. Partition stream id: 4. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 0. Read offset: (empty maybe) 2025-04-07T14:14:09.672380Z :INFO: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] [null] Confirm partition stream create. Partition stream id: 5. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 1. Read offset: (empty maybe) 2025-04-07T14:14:09.672598Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 0(assignId:5), readOffset# 0, commitOffset# 0 2025-04-07T14:14:09.672691Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 1(assignId:4), readOffset# 0, commitOffset# 0 2025-04-07T14:14:09.672723Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 0(assignId:5) EndOffset 2 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2025-04-07T14:14:09.672754Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 1(assignId:4) EndOffset 8 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2025-04-07T14:14:09.680231Z :DEBUG: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:14:09.680434Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (0-2) 2025-04-07T14:14:09.680560Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (3-3) 2025-04-07T14:14:09.680626Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 0} (0-0) 2025-04-07T14:14:09.680701Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {1, 0} (1-1) 2025-04-07T14:14:09.680726Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {2, 0} (2-2) 2025-04-07T14:14:09.680784Z :DEBUG: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] [null] The application data is transferred to the client. Number of messages 3, size 1049088 bytes 2025-04-07T14:14:09.681348Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {3, 0} (3-3) 2025-04-07T14:14:09.681376Z :DEBUG: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] [null] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-07T14:14:09.768023Z :DEBUG: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:14:09.768387Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 4 (0-1) 2025-04-07T14:14:09.768456Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 0 (0-1) 2025-04-07T14:14:09.768483Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {0, 0} (0-0) 2025-04-07T14:14:09.768499Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (0-0) 2025-04-07T14:14:09.768523Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {1, 0} (1-1) 2025-04-07T14:14:09.768561Z :DEBUG: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2025-04-07T14:14:09.768585Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-07T14:14:09.768621Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-07T14:14:09.768658Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2025-04-07T14:14:09.768712Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-07T14:14:09.768709Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (4-4) 2025-04-07T14:14:09.768735Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-07T14:14:09.768764Z :DEBUG: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2025-04-07T14:14:09.768800Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (5-5) 2025-04-07T14:14:09.768836Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (7-7) 2025-04-07T14:14:09.768877Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (6-6) 2025-04-07T14:14:09.768906Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 0} (0-0) 2025-04-07T14:14:09.768928Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {1, 0} (1-1) 2025-04-07T14:14:09.768947Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {2, 0} (2-2) 2025-04-07T14:14:09.768963Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {3, 0} (3-3) 2025-04-07T14:14:09.768982Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {4, 0} (4-4) 2025-04-07T14:14:09.769000Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {5, 0} (5-5) 2025-04-07T14:14:09.769019Z :DEBUG: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] [null] The application data is transferred to the client. Number of messages 6, size 6291456 bytes 2025-04-07T14:14:09.770713Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {6, 0} (6-6) 2025-04-07T14:14:09.770798Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {7, 0} (7-7) 2025-04-07T14:14:09.770848Z :DEBUG: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] [null] The application data is transferred to the client. Number of messages 2, size 2097152 bytes 2025-04-07T14:14:09.771308Z :INFO: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] Closing read session. Close timeout: 0.000000s 2025-04-07T14:14:09.771368Z :INFO: [/Root/] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:stream_TestPutRecordsCornerCases:4:2:1:0 null:stream_TestPutRecordsCornerCases:3:1:3:0 null:stream_TestPutRecordsCornerCases:2:3:0:0 null:stream_TestPutRecordsCornerCases:1:5:7:0 null:stream_TestPutRecordsCornerCases:0:4:1:0 2025-04-07T14:14:09.771415Z :INFO: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 125 BytesRead: 9437696 MessagesRead: 16 BytesReadCompressed: 9437696 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:14:09.771520Z :NOTICE: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T14:14:09.771567Z :DEBUG: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] [null] Abort session to cluster 2025-04-07T14:14:09.772295Z :NOTICE: [/Root/] [/Root/] [d43a8ee3-c877cd2f-94868c1f-b414f64b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:14:09.772469Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 grpc read failed 2025-04-07T14:14:09.772524Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 closed 2025-04-07T14:14:09.772989Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15193403152021417908_v1 is DEAD 2025-04-07T14:14:09.845721Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7490574338301562324:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:09.845800Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn >> DataStreams::TestInvalidRetentionCombinations [GOOD] >> DataStreams::TestListStreamConsumers [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] >> DataStreams::TestUnsupported [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestInvalidRetentionCombinations [GOOD] Test command err: 2025-04-07T14:13:58.192686Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574314232442156:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:58.192757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a17/r3tmp/tmpfT9Sl9/pdisk_1.dat 2025-04-07T14:13:58.476125Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30745, node 1 2025-04-07T14:13:58.489773Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:13:58.494413Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:13:58.518034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:58.518940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.519140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:13:58.521282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.521785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.521821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.521897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:13:58.521977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.522787Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:58.522805Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:58.522822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:58.523052Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:13:58.529974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:58.530056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:58.532523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19050 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:58.758998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.764040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.764904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.766334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:58.766481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:58.766498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:13:58.767185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:58.767471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:58.767485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:13:58.768467Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.771230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238813, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.771256Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:58.771502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:58.772899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.773102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.773150Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:58.773222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:58.773262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:58.773301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:13:58.774988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:13:58.775034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:13:58.775047Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:13:58.775100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:13:58.817964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.818172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.818200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.818264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:13:58.818333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:13:58.818348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:13:58.819642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T14:13:58.819780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.819962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.820369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:13:58.820407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:13:58.820421Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:13:58.820470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:19050 2025-04-07T14:13:58.984093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.984287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.984311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.986023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:13:58.986174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.989026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035239037, transactions count in step: 1, at schemeshard: 72057594046644480 waiting...2025-04-07T14:13:58.989060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1744035239037, at schemeshard: 72057594046644480 2025-04-07T14:13:58.989278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:13:58.989348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:13:58.989394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2025-04-07T14:13:58.990047Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T14:13:58.990790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.990934Z node 1 :F ... 73070Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 1 -> 3 2025-04-07T14:14:09.174865Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:14:09.233071Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:09.233810Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:09.234644Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:09.235389Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:09.235801Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:09.239948Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:09.239987Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 3 -> 128 2025-04-07T14:14:09.241473Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.244214Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035249292, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:14:09.244259Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvOperationPlan, step: 1744035249292, at tablet: 72057594046644480 2025-04-07T14:14:09.244427Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2025-04-07T14:14:09.246049Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:09.246507Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:09.246589Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2025-04-07T14:14:09.246714Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2025-04-07T14:14:09.246772Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-07T14:14:09.247045Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 2, subscribers: 1 2025-04-07T14:14:09.247852Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:09.247913Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:09.247931Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2025-04-07T14:14:09.248131Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:09.248160Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:09.248170Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:14:09.248208Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 2025-04-07T14:14:09.256994Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestInvalidRetentionCombinationsa, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.257418Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:09.259894Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715661, database: /Root, subject: user@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/stream_TestInvalidRetentionCombinationsa 2025-04-07T14:14:09.260119Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:09.260417Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:09.260541Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 ProgressState, operation type: TxCreatePQGroup, at tablet# 72057594046644480 2025-04-07T14:14:09.260955Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715661, at schemeshard: 72057594046644480 2025-04-07T14:14:09.261650Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-07T14:14:09.261696Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715661 2025-04-07T14:14:09.261746Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2025-04-07T14:14:09.261999Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-07T14:14:09.262028Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715661 2025-04-07T14:14:09.262038Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 2 2025-04-07T14:14:09.266405Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:09.266684Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:09.266867Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:09.269049Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:09.269270Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:09.269451Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:09.269487Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715661:0 1 -> 3 2025-04-07T14:14:09.273419Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:14:09.294652Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:09.298685Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:09.299498Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:09.300454Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:09.301235Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:09.301980Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:09.302023Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715661:0 3 -> 128 2025-04-07T14:14:09.303150Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.305888Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035249348, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:14:09.305930Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715661:0 HandleReply TEvOperationPlan, step: 1744035249348, at tablet: 72057594046644480 2025-04-07T14:14:09.306141Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715661:0 128 -> 240 2025-04-07T14:14:09.307569Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:09.307981Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:09.308071Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715661:0 ProgressState 2025-04-07T14:14:09.308182Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715661:0 progress is 1/1 2025-04-07T14:14:09.308237Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715661:0 2025-04-07T14:14:09.308442Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715661, publications: 2, subscribers: 1 2025-04-07T14:14:09.309005Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-07T14:14:09.309070Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715661 2025-04-07T14:14:09.309120Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 10 2025-04-07T14:14:09.309279Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-07T14:14:09.309330Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715661 2025-04-07T14:14:09.309346Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T14:14:09.309407Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715661, subscribers: 1 >> KqpSqlIn::KeySuffix ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestListStreamConsumers [GOOD] Test command err: 2025-04-07T14:13:57.705164Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574309053848787:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:57.705592Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a31/r3tmp/tmpO6Sh3u/pdisk_1.dat 2025-04-07T14:13:58.046085Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28255, node 1 2025-04-07T14:13:58.057671Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:13:58.057924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:58.058632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.058791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:13:58.060139Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:13:58.060454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.061035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.061071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.061123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:13:58.061201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.076408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:58.076499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:58.078981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:58.195678Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:58.195713Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:58.195721Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:58.195820Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:58.573548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.578948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.580123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.581733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:58.581888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:58.581908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:13:58.582809Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:58.583181Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:58.583206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:13:58.584362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.587393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238631, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.587425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:58.587686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:58.589230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.589330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.589364Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:58.589432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:58.589463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:58.589539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:13:58.591238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:13:58.591291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:13:58.591305Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:13:58.591412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:13:58.701989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.702272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.702297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.702361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:13:58.702452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:13:58.702466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:13:58.704223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T14:13:58.704377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.704628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.705087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:13:58.705148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:13:58.705170Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:13:58.705218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:22315 2025-04-07T14:13:58.854178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.854480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.854524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.856150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:13:58.856272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:58.857393Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T14:13:58.859069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238904, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.859099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1744035238904, at schemeshard: 72057594046644480 2025-04-07T14:13:58.859347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:13:58.859439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:13:58.859473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2025-04-07T14:13:58.860829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.861061Z node 1 : ... ProgressState, operationId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.745485Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035249789, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:14:09.745546Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715664:0 HandleReply TEvOperationPlan, step: 1744035249789, at tablet: 72057594046644480 2025-04-07T14:14:09.745711Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 128 -> 240 2025-04-07T14:14:09.751145Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:09.751454Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:09.751506Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715664:0 ProgressState 2025-04-07T14:14:09.751599Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:0 progress is 1/1 2025-04-07T14:14:09.751640Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:0 2025-04-07T14:14:09.751825Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715664, publications: 1, subscribers: 1 2025-04-07T14:14:09.754278Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-07T14:14:09.754317Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715664 2025-04-07T14:14:09.754340Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2025-04-07T14:14:09.754414Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715664, subscribers: 1 E0000 00:00:1744035249.776873 526318 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744035249.776972 526318 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744035249.791602 526318 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744035249.791707 526318 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-07T14:14:09.829033Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropPQ Propose, path: /Root/stream_TestListStreamConsumers, pathId: 0, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.829332Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:09.829366Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.831128Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: user@builtin, status: StatusAccepted, operation: DROP PERSISTENT QUEUE, path: /Root/stream_TestListStreamConsumers 2025-04-07T14:14:09.831368Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:09.831638Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:09.831913Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715665, at schemeshard: 72057594046644480 2025-04-07T14:14:09.832788Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-07T14:14:09.832834Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715665 2025-04-07T14:14:09.832850Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2025-04-07T14:14:09.833067Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-07T14:14:09.833105Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715665 2025-04-07T14:14:09.833117Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2025-04-07T14:14:09.842892Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715665:0 4 -> 5 2025-04-07T14:14:09.844136Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDeleteParts opId# 281474976715665:0 ProgressState 2025-04-07T14:14:09.844186Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715665:0 5 -> 128 2025-04-07T14:14:09.846334Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropPQ TPropose, operationId: 281474976715665:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:14:09.852210Z node 10 :FLAT_TX_SCHEMESHARD INFO: Transaction 281474976715665 reset current state at schemeshard 72057594046644480 because pipe to tablet 72075186224037892 disconnected 2025-04-07T14:14:09.852242Z node 10 :FLAT_TX_SCHEMESHARD INFO: Transaction 281474976715665 reset current state at schemeshard 72057594046644480 because pipe to tablet 72075186224037889 disconnected 2025-04-07T14:14:09.852414Z node 10 :FLAT_TX_SCHEMESHARD INFO: Transaction 281474976715665 reset current state at schemeshard 72057594046644480 because pipe to tablet 72075186224037888 disconnected 2025-04-07T14:14:09.852446Z node 10 :FLAT_TX_SCHEMESHARD INFO: Transaction 281474976715665 reset current state at schemeshard 72057594046644480 because pipe to tablet 72075186224037890 disconnected 2025-04-07T14:14:09.852495Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037893 not found 2025-04-07T14:14:09.852512Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037890 not found 2025-04-07T14:14:09.852526Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037892 not found 2025-04-07T14:14:09.852538Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2025-04-07T14:14:09.852549Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037891 not found 2025-04-07T14:14:09.852565Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-04-07T14:14:09.852621Z node 10 :FLAT_TX_SCHEMESHARD INFO: Transaction 281474976715665 reset current state at schemeshard 72057594046644480 because pipe to tablet 72075186224037891 disconnected 2025-04-07T14:14:09.856154Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035249894, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:14:09.856202Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropPQ TPropose, operationId: 281474976715665:0 HandleReply TEvOperationPlan, step: 1744035249894, at schemeshard: 72057594046644480 2025-04-07T14:14:09.856480Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715665:0 128 -> 240 2025-04-07T14:14:09.859959Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:09.860296Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:09.860356Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715665:0 ProgressState 2025-04-07T14:14:09.860460Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-04-07T14:14:09.860516Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-04-07T14:14:09.860779Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 2, subscribers: 1 2025-04-07T14:14:09.861102Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-07T14:14:09.861141Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715665 2025-04-07T14:14:09.861164Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 18446744073709551615 2025-04-07T14:14:09.861713Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2025-04-07T14:14:09.861794Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-04-07T14:14:09.861819Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-04-07T14:14:09.861843Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-04-07T14:14:09.861867Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-04-07T14:14:09.861906Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-04-07T14:14:09.862173Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-07T14:14:09.862200Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2025-04-07T14:14:09.862213Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 10 2025-04-07T14:14:09.862268Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2025-04-07T14:14:09.864343Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 E0000 00:00:1744035249.869779 526318 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744035249.869902 526318 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] Test command err: 2025-04-07T14:14:02.413915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:14:02.414260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:14:02.414531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001e57/r3tmp/tmpHZArGi/pdisk_1.dat 2025-04-07T14:14:02.713295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:14:02.747013Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:02.749792Z node 1 :TABLET_SAUSAGECACHE INFO: Config updated MemoryLimit: 33554432 2025-04-07T14:14:02.794529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:02.794668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:02.806216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:02.916296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:02.951337Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:14:02.951573Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:14:02.986041Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:14:02.986122Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:14:02.987290Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:14:02.987355Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:14:02.987399Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:14:02.987623Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:14:03.012071Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:14:03.012278Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:14:03.012401Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:14:03.012437Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:03.012473Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:14:03.012508Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:03.012928Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:14:03.013024Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:14:03.013356Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:14:03.013411Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:03.013457Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:03.013498Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:14:03.013528Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:03.013691Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:03.013890Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:14:03.013961Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:14:03.015292Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:03.025887Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:03.026007Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:14:03.205794Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:14:03.209745Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:03.209815Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:03.210434Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:03.210471Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:03.210509Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:03.210700Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:03.210817Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:03.211294Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:03.211359Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:14:03.212759Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:03.213044Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:03.214394Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:14:03.214434Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:03.214936Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:14:03.214976Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:14:03.215023Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:03.215939Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:03.215980Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:03.216026Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:14:03.216074Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:03.216115Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:03.216187Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:03.218559Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:03.219849Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:03.219924Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:14:03.219972Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:03.227048Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:03.227204Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-07T14:14:03.227254Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-07T14:14:03.227286Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-07T14:14:03.250306Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:03.612358Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:03.612433Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:03.612793Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:03.612842Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:03.612895Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:03.613050Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-07T14:14:03.613151Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:03.613317Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:03.613976Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:03.628807Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2025-04-07T14:14:03.628884Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-07T14:14:03.628972Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:03.629014Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:03.629062Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:03.629134Z node 1 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exe ... dinators count is 1 buckets per mediator 2 2025-04-07T14:14:11.348729Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:11.350442Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:14:11.350496Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:11.351225Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:14:11.351294Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:14:11.351370Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:11.352469Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:11.352510Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:11.352560Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:14:11.352624Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:393:2385], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:11.352693Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:11.352798Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:11.353282Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:11.355215Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:14:11.355277Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:11.356094Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:11.364732Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:11.364885Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-07T14:14:11.364938Z node 3 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-07T14:14:11.364972Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-07T14:14:11.388456Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:11.688806Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 534 RawX2: 12884904352 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:11.688889Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:11.689114Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:11.689162Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:11.689207Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:11.689401Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-07T14:14:11.689546Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:11.689729Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:11.690413Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:11.705847Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2025-04-07T14:14:11.705939Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-07T14:14:11.706014Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:11.706056Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:11.706104Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:11.706173Z node 3 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [3:393:2385], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:11.706234Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-07T14:14:11.706338Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:11.708341Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-07T14:14:11.708417Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:11.715096Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:856:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:11.715185Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:866:2690], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:11.715253Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:11.719936Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:14:11.725224Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:11.944082Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:11.945952Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:870:2693], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:14:12.048993Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89m1g1b936n4x9sm8q7rd6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTg2Y2Q2NGUtNDZlZWY2MTYtMjcyN2YyZS04ZDIwMmI4Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:12.049542Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:953:2748], serverId# [3:954:2749], sessionId# [0:0:0] 2025-04-07T14:14:12.049740Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:12.051037Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744035252050950 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-07T14:14:12.061989Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:12.062122Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-07T14:14:12.062167Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:12.110464Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89m1v1d2veq9bsg2hyw70z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmIxM2E3Mi0zZmJhYTI3NS05NDg0MDk4NC03YzQ1MzQxZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:12.110771Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:12.111352Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744035252111292 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-07T14:14:12.122188Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:12.122294Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-07T14:14:12.122327Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:12.212076Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89m1wwcj1xx83kq82n2530, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWE4YjQ4ZDEtNGIwNjRlMy1mMDQyNjBlMy04MDYwM2ZkMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:12.212351Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:12.213211Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1744035252213133 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-07T14:14:12.223972Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:12.224082Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-07T14:14:12.224116Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:12.225342Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:992:2780], serverId# [3:993:2781], sessionId# [0:0:0] 2025-04-07T14:14:12.229448Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:994:2782], serverId# [3:995:2783], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestUnsupported [GOOD] Test command err: 2025-04-07T14:13:57.717354Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574308440370006:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:57.717444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a32/r3tmp/tmprnzRL6/pdisk_1.dat 2025-04-07T14:13:58.041471Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:58.048689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:58.048762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19952, node 1 2025-04-07T14:13:58.062105Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:13:58.062130Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:13:58.068347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:58.069014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.069149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:13:58.073115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:58.076258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.076754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.076824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.076883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:13:58.076957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.196508Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:58.196535Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:58.196541Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:58.196605Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9546 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:58.559360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.567882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.569456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.572944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:58.573156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:58.573246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:13:58.574731Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:58.575768Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:58.575793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:13:58.576955Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.580994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238624, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.581033Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:58.581326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:58.582801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.582961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.583023Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:58.583117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:58.583175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:58.583217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:13:58.585215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:13:58.585264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:13:58.585280Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:13:58.585360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:13:58.638827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.639075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.639113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.639257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:13:58.639379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:13:58.639402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:13:58.641181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T14:13:58.641317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.641486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.641864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:13:58.641903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:13:58.641916Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:13:58.641969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:9546 2025-04-07T14:13:58.789195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.789449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.789482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.791188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:13:58.791328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:58.792121Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T14:13:58.793500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238841, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.793523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1744035238841, at schemeshard: 72057594046644480 2025-04-07T14:13:58.793680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:13:58.793737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:13:58.793761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2025-04-07T14:13:58.794901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.795071Z node 1 :FL ... 245}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1744035245152-7","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744035245,"finish":1744035246},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744035246}' 2025-04-07T14:14:09.354250Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7490574358687861386:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:09.354329Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a32/r3tmp/tmpw0AIcM/pdisk_1.dat 2025-04-07T14:14:09.475753Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:09.501098Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:09.501187Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:09.503568Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29708, node 7 2025-04-07T14:14:09.550659Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:09.550684Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:09.550693Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:09.550796Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:09.817848Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.818234Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:09.818249Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.821885Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:14:09.822109Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:14:09.822135Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:14:09.823299Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:09.825336Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:14:09.825360Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:14:09.829618Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.837255Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035249880, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:14:09.837301Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:14:09.837587Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:14:09.839859Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:09.840063Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:09.840150Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:14:09.840250Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:14:09.840291Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:14:09.840343Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:14:09.841413Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:14:09.841458Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:14:09.841497Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:14:09.841592Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:14:09.902148Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.902429Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:09.902457Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.902519Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T14:14:09.902613Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T14:14:09.902629Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2025-04-07T14:14:09.904615Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T14:14:09.904787Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:09.904999Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:09.905668Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:14:09.905739Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:14:09.905808Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:14:09.905912Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:19891 2025-04-07T14:14:10.091362Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:14:10.091621Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:10.091651Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:14:10.093918Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:14:10.094120Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:10.095038Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T14:14:10.098101Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035250146, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:14:10.098131Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1744035250146, at schemeshard: 72057594046644480 2025-04-07T14:14:10.098328Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2025-04-07T14:14:10.098410Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T14:14:10.098441Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2025-04-07T14:14:10.100235Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:10.100454Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:10.101321Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:14:10.101404Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:14:10.101434Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:14:10.101543Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> KqpSqlIn::SecondaryIndex_SimpleKey >> KqpNewEngine::PureExpr [GOOD] >> KqpNewEngine::PrunePartitionsByLiteral >> KqpNewEngine::MultiOutput >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate >> KqpNewEngine::KeyColumnOrder [GOOD] >> KqpNewEngine::KeyColumnOrder2 >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> KqpNewEngine::BlindWrite [GOOD] >> KqpNewEngine::BlindWriteParameters >> KqpNewEngine::Join [GOOD] >> KqpNewEngine::ItemsLimit >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots [GOOD] >> THiveTest::TestCreateTabletChangeToExternal >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage >> KqpRanges::IsNotNullSecondComponent [GOOD] >> KqpRanges::IsNotNullInValue >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: 2025-04-07T14:14:02.226519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:14:02.226808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:14:02.227002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001df2/r3tmp/tmp1lTEis/pdisk_1.dat 2025-04-07T14:14:02.526512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:14:02.564986Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:02.613805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:02.613967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:02.625360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:02.737465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:02.772855Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2025-04-07T14:14:02.773080Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:14:02.804713Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:14:02.804885Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:14:02.806273Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:14:02.806378Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:14:02.806439Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:14:02.806718Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:14:02.830207Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:14:02.830394Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:14:02.830509Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:671:2562] 2025-04-07T14:14:02.830544Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:02.830578Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:14:02.830643Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.831097Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:645:2547] 2025-04-07T14:14:02.831290Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:14:02.840087Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:14:02.840211Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:14:02.840523Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.840582Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.840626Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:14:02.840715Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:02.841163Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:635:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2025-04-07T14:14:02.841352Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:02.841616Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:14:02.841713Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:14:02.842439Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:14:02.842554Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:14:02.843924Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T14:14:02.843999Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T14:14:02.844057Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T14:14:02.844325Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:14:02.844366Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T14:14:02.844435Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:14:02.844515Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:678:2566] 2025-04-07T14:14:02.844545Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T14:14:02.844571Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T14:14:02.844619Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:02.845397Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T14:14:02.845466Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T14:14:02.845807Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2025-04-07T14:14:02.846020Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:02.846074Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.846109Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T14:14:02.846146Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:14:02.846385Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:14:02.846570Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-07T14:14:02.846632Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-07T14:14:02.847445Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:02.847526Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:02.858313Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:02.858452Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:14:02.858880Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T14:14:02.858946Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-07T14:14:03.038583Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2025-04-07T14:14:03.038694Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:702:2584], serverId# [1:705:2587], sessionId# [0:0:0] 2025-04-07T14:14:03.041559Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-07T14:14:03.041617Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:03.041991Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:03.042010Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:03.042071Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:03.042107Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:03.042156Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-07T14:14:03.042343Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:03.042438Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:03.042550Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:03.042568Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:03.042584Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:03.042704Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:03.042782Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:03.042842Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:03.042904Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:14:03.044272Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:03.044588Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:03.045444Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:03.045487Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-07T14:14:03.045683Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:03.045869Z node ... in PlanQueue unit at 72075186224037889 2025-04-07T14:14:14.685704Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:14.685834Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:14.686039Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:14.686101Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:14:14.686577Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:14.686969Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:14.688411Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 517 RawX2: 17179871639 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:14.688448Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:14.688534Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-04-07T14:14:14.688583Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:14.689642Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:14.689680Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:14.689711Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:14.689917Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:14.690018Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:14.690427Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:14.690498Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-07T14:14:14.690769Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:14.691037Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:14.691995Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 1000 txid# 281474976715657} 2025-04-07T14:14:14.692052Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-04-07T14:14:14.692114Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:14:14.693765Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:14:14.693842Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T14:14:14.693881Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-04-07T14:14:14.693940Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:14.693990Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:14.694070Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:14.694899Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:14:14.694942Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:14:14.694995Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:14.695439Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:14:14.695484Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:14.696318Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:14.696354Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:14.696387Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:14:14.696433Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:14.696471Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:14.696530Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:14.697895Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:14.697964Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:14.700840Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:14.701001Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-07T14:14:14.701064Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-07T14:14:14.701653Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:14.702442Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:14:14.702489Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:14.710748Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:749:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:14.710843Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:758:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:14.710928Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:14.715923Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:14:14.721287Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:14.721388Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:14.906954Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:14.907044Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:14.909009Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:763:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:14:15.146587Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr89m4dn6fsm1de803sjyn74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTJlMDMxNWQtMjRjYzg1NjktZDU2NmM4Mi1hNDlhMTFhMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:15.149530Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:918:2718], serverId# [4:919:2719], sessionId# [0:0:0] 2025-04-07T14:14:15.149924Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T14:14:15.151723Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89m4dn6fsm1de803sjyn74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTJlMDMxNWQtMjRjYzg1NjktZDU2NmM4Mi1hNDlhMTFhMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:15.154363Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89m4dn6fsm1de803sjyn74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTJlMDMxNWQtMjRjYzg1NjktZDU2NmM4Mi1hNDlhMTFhMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:15.154785Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:14:15.156032Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744035255155937 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-07T14:14:15.166957Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T14:14:15.167080Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-07T14:14:15.167173Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-07T14:14:15.167234Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:15.168101Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037889 2025-04-07T14:14:15.168177Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:15.172209Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:935:2728], serverId# [4:936:2729], sessionId# [0:0:0] 2025-04-07T14:14:15.177288Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:937:2730], serverId# [4:938:2731], sessionId# [0:0:0] >> CdcStreamChangeCollector::NewImage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: 2025-04-07T14:14:02.127725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:14:02.128039Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:14:02.128263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001be0/r3tmp/tmpmebqTK/pdisk_1.dat 2025-04-07T14:14:02.432000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:14:02.475471Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:02.523307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:02.523475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:02.534970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:02.646332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:02.683773Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2025-04-07T14:14:02.684024Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:14:02.722700Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:14:02.722881Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:14:02.724307Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:14:02.724413Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:14:02.724485Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:14:02.724807Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:14:02.751228Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:14:02.751397Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:14:02.751507Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:671:2562] 2025-04-07T14:14:02.751543Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:02.751576Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:14:02.751624Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.752087Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:645:2547] 2025-04-07T14:14:02.752268Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:14:02.760510Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:14:02.760630Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:14:02.760935Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.760982Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.761022Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:14:02.761082Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:02.761464Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:635:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2025-04-07T14:14:02.761640Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:02.761859Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:14:02.761943Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:14:02.762502Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:14:02.762613Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:14:02.763721Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T14:14:02.763776Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T14:14:02.763818Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T14:14:02.764083Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:14:02.764122Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T14:14:02.764180Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:14:02.764266Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:678:2566] 2025-04-07T14:14:02.764291Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T14:14:02.764314Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T14:14:02.764346Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:02.765068Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T14:14:02.765131Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T14:14:02.765411Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2025-04-07T14:14:02.765574Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:02.765625Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.765659Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T14:14:02.765688Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:14:02.765918Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:14:02.766083Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-07T14:14:02.766143Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-07T14:14:02.766874Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:02.766935Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:02.777736Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:02.777859Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:14:02.778295Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T14:14:02.778355Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-07T14:14:02.957922Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2025-04-07T14:14:02.958054Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:702:2584], serverId# [1:705:2587], sessionId# [0:0:0] 2025-04-07T14:14:02.962192Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-07T14:14:02.962261Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:02.962749Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:02.962782Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.962872Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:02.962918Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:02.962982Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-07T14:14:02.963212Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:02.963337Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:02.963487Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.963512Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:02.963535Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:02.963676Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:02.963801Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:02.963880Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:02.963939Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:14:02.965850Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:02.966245Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.967514Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.967579Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-07T14:14:02.967925Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:02.968219Z node ... n request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:14.802167Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:14.803282Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 517 RawX2: 17179871639 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:14.803311Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:14.803367Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-04-07T14:14:14.803397Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:14.804130Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:14.804156Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:14.804177Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:14.804298Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:14.804365Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:14.804642Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:14.804703Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-07T14:14:14.804891Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:14.805064Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:14.805707Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 1000 txid# 281474976715657} 2025-04-07T14:14:14.805757Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-04-07T14:14:14.805802Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:14:14.806840Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:14:14.806876Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T14:14:14.806916Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-04-07T14:14:14.806964Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:14.806999Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:14.807062Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:14.807807Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:14:14.807844Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:14:14.807888Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:14.808204Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:14:14.808238Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:14.808900Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:14.808938Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:14.808970Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:14:14.809012Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:14.809044Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:14.809098Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:14.810300Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:14.810347Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:14.812526Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:14.812686Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-07T14:14:14.812748Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-07T14:14:14.813278Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:14.813951Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:14:14.813990Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:14.820690Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:749:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:14.820764Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:758:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:14.820813Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:14.824099Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:14:14.827411Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:14.827500Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:15.014054Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:15.014149Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:15.016638Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:763:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:14:15.124914Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr89m4h3e3fpaksy986q31rd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NjU3OTBlOWItNGM4ZGRlYzAtNTE4MWE2MmItNzJjZDJhMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:15.125418Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:906:2710], serverId# [4:907:2711], sessionId# [0:0:0] 2025-04-07T14:14:15.125613Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:14:15.126800Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744035255126711 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-07T14:14:15.137631Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T14:14:15.137760Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-07T14:14:15.137806Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:15.183820Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89m4v43ba8jp15bfshyv5q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ODk5MjUxOTAtNzEwYzIzNS04YWNlOWNlNy1hOWE2Yjk3Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:15.184105Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:14:15.184889Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744035255184794 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-07T14:14:15.185015Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1744035255184794 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-07T14:14:15.195693Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T14:14:15.195847Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-07T14:14:15.195880Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:15.198599Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:943:2743], serverId# [4:944:2744], sessionId# [0:0:0] 2025-04-07T14:14:15.202657Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:945:2745], serverId# [4:946:2746], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: 2025-04-07T14:14:02.160045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:14:02.160472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:14:02.160937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bff/r3tmp/tmpnOHFzS/pdisk_1.dat 2025-04-07T14:14:02.443847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:14:02.479493Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:02.527766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:02.527917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:02.539202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:02.649544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:02.688193Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2025-04-07T14:14:02.688463Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:14:02.726700Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:14:02.726886Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:14:02.728108Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:14:02.728183Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:14:02.728234Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:14:02.728488Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:14:02.750974Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:14:02.751162Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:14:02.751263Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:671:2562] 2025-04-07T14:14:02.751296Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:02.751324Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:14:02.751366Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.751735Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:645:2547] 2025-04-07T14:14:02.751885Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:14:02.758144Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:14:02.758238Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:14:02.758445Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.758479Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.758517Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:14:02.758571Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:02.758889Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:635:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2025-04-07T14:14:02.759021Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:02.759239Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:14:02.759311Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:14:02.759815Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:14:02.759889Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:14:02.760836Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T14:14:02.760886Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T14:14:02.760928Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T14:14:02.761124Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:14:02.761152Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T14:14:02.761210Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:14:02.761268Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:678:2566] 2025-04-07T14:14:02.761287Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T14:14:02.761304Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T14:14:02.761330Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:02.761853Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T14:14:02.761900Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T14:14:02.762137Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2025-04-07T14:14:02.762263Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:02.762307Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.762331Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T14:14:02.762362Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:14:02.762516Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:14:02.762641Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-07T14:14:02.762686Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-07T14:14:02.763228Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:02.763278Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:02.773984Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:02.774085Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:14:02.774475Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T14:14:02.774528Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-07T14:14:02.953958Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2025-04-07T14:14:02.954109Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:702:2584], serverId# [1:705:2587], sessionId# [0:0:0] 2025-04-07T14:14:02.958348Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-07T14:14:02.958428Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:02.958958Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:02.958991Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.959091Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:02.959148Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:02.959215Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-07T14:14:02.959504Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:02.959651Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:02.959830Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.959860Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:02.959887Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:02.960012Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:02.960121Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:02.960208Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:02.960302Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:14:02.962250Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:02.962678Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.963979Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.964053Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-07T14:14:02.964368Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:02.964645Z node ... awX2: 17179871639 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:15.024156Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:15.024542Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:15.024580Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:15.024623Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:15.024831Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:15.024939Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:15.025285Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:15.025335Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:14:15.025667Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:15.025976Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:15.027308Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:14:15.027366Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:15.027999Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:14:15.028057Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:14:15.028114Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:15.028958Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:15.028992Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:15.029044Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:14:15.029104Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:15.029144Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:15.029222Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:15.029572Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:15.031870Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:14:15.031929Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:15.032375Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:15.039280Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:15.039431Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-07T14:14:15.039486Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-07T14:14:15.039520Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-07T14:14:15.063208Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:15.379319Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 517 RawX2: 17179871639 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:15.379379Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:15.379602Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:15.379652Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:15.379701Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:15.379869Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-07T14:14:15.379967Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:15.380104Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:15.380766Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:15.395801Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2025-04-07T14:14:15.395914Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-07T14:14:15.396009Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:15.396052Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:15.396097Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:15.396168Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:15.396224Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-07T14:14:15.396320Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:15.398468Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-07T14:14:15.398544Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:15.405400Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:857:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:15.405514Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:867:2691], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:15.405592Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:15.410841Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:14:15.415101Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:15.630789Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:15.633605Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:871:2694], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:14:15.734583Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89m53b5q77hfv8cfx6ew3f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YWRlOTk3MDgtMjFmOGM2ZDktODgwZmFiMzItM2RkNzgwYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:15.735025Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:954:2749], serverId# [4:955:2750], sessionId# [0:0:0] 2025-04-07T14:14:15.735187Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:15.736349Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744035255736242 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-07T14:14:15.747471Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:15.747655Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-07T14:14:15.747720Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:15.825644Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89m5e71f389mvgeg99xc9c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OWY5YjA4YTQtMzAwODMxNjctMzE2NmQyMDgtNDljOGIxOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:15.826202Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:15.827276Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744035255827175 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-07T14:14:15.838263Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:15.838425Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-07T14:14:15.838476Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:15.840293Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:978:2769], serverId# [4:979:2770], sessionId# [0:0:0] 2025-04-07T14:14:15.847279Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:980:2771], serverId# [4:981:2772], sessionId# [0:0:0] >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: 2025-04-07T14:14:01.924302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:14:01.924529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:14:01.924781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bb6/r3tmp/tmpFm1FIH/pdisk_1.dat 2025-04-07T14:14:02.308415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:14:02.347112Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:02.398704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:02.398797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:02.410142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:02.532744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:02.573466Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2025-04-07T14:14:02.573742Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:14:02.624448Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:14:02.624634Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:14:02.626043Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:14:02.626120Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:14:02.626172Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:14:02.626465Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:14:02.653612Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:14:02.653763Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:14:02.653875Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:671:2562] 2025-04-07T14:14:02.653910Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:02.653939Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:14:02.653987Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.654363Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:645:2547] 2025-04-07T14:14:02.654530Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:14:02.662212Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:14:02.662336Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:14:02.662591Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.662630Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.662665Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:14:02.662696Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:02.663047Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:635:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2025-04-07T14:14:02.663204Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:02.663414Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:14:02.663494Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:14:02.664061Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:14:02.664151Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:14:02.665218Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T14:14:02.665284Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T14:14:02.665328Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T14:14:02.665530Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:14:02.665563Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T14:14:02.665621Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:14:02.665683Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:678:2566] 2025-04-07T14:14:02.665704Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T14:14:02.665724Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T14:14:02.665759Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:02.666410Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T14:14:02.666474Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T14:14:02.666745Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2025-04-07T14:14:02.666895Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:02.666931Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.666961Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T14:14:02.666988Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:14:02.667193Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:14:02.667356Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-07T14:14:02.667406Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-07T14:14:02.668058Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:02.668115Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:02.678864Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:02.678989Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:14:02.679413Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T14:14:02.679467Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-07T14:14:02.858532Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2025-04-07T14:14:02.858692Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:702:2584], serverId# [1:705:2587], sessionId# [0:0:0] 2025-04-07T14:14:02.862425Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-07T14:14:02.862470Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:02.862771Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:02.862788Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.862841Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:02.862871Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:02.862915Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-07T14:14:02.863069Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:02.863159Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:02.863290Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.863310Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:02.863325Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:02.863444Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:02.863498Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:02.863557Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:02.863602Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:14:02.864813Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:02.865108Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.865935Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.865985Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-07T14:14:02.866208Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:02.866369Z node ... 186224037890 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:15.523404Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037890 2025-04-07T14:14:15.523537Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037890 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:15.523611Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:15.523849Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:14:15.523909Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:14:15.523973Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:15.524039Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T14:14:15.524085Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037890 tableId# [OwnerId: 72057594046644480, LocalPathId: 6] schema version# 1 2025-04-07T14:14:15.524283Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:15.524568Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:15.525393Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-04-07T14:14:15.525421Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:15.528050Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:15.528089Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:15.528130Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:14:15.528188Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:15.528229Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:15.528308Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:15.528429Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 1000 txid# 281474976715657} 2025-04-07T14:14:15.528461Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-04-07T14:14:15.528491Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:14:15.529497Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:14:15.529539Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T14:14:15.529559Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-04-07T14:14:15.529598Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:15.529625Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:15.529670Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:15.529719Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2025-04-07T14:14:15.529740Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T14:14:15.530122Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037890 step# 1000 txid# 281474976715657} 2025-04-07T14:14:15.530154Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-04-07T14:14:15.530184Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:14:15.531597Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:15.531652Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:15.531683Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-07T14:14:15.531963Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:14:15.531986Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-07T14:14:15.532010Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-04-07T14:14:15.532045Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:15.532071Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:15.532115Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T14:14:15.536044Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:15.536507Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:14:15.536557Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:15.536856Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:15.537157Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:15.537269Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-07T14:14:15.537309Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-07T14:14:15.538157Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-04-07T14:14:15.538191Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-07T14:14:15.545322Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:798:2661], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:15.545392Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:809:2666], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:15.545449Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:15.549098Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:14:15.553541Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:15.553637Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:15.553693Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-07T14:14:15.742282Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:15.742346Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:15.742378Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-07T14:14:15.744514Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:812:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:14:15.851376Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr89m57r2tvp7nnt1vwm5yf7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Y2JjOWJjZTUtYjAyOGE0MjMtZGUwNjM1MGMtYWRlZDdiOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:15.851873Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:995:2764], serverId# [4:996:2765], sessionId# [0:0:0] 2025-04-07T14:14:15.852065Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:14:15.853340Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744035255853254 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-07T14:14:15.853486Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744035255853254 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-07T14:14:15.864479Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T14:14:15.864680Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-07T14:14:15.864755Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:15.868845Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1002:2770], serverId# [4:1003:2771], sessionId# [0:0:0] 2025-04-07T14:14:15.874039Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1004:2772], serverId# [4:1005:2773], sessionId# [0:0:0] >> TStorageBalanceTest::TestScenario2 [GOOD] >> TStorageBalanceTest::TestScenario3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: 2025-04-07T14:14:01.922843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:14:01.923095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:14:01.923329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bf1/r3tmp/tmpIzAimH/pdisk_1.dat 2025-04-07T14:14:02.308383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:14:02.347123Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:02.354188Z node 1 :TABLET_SAUSAGECACHE INFO: Config updated MemoryLimit: 33554432 2025-04-07T14:14:02.398924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:02.399089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:02.410322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:02.532449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:02.572607Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:14:02.572834Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:14:02.614030Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:14:02.614109Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:14:02.615458Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:14:02.616270Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:14:02.616344Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:14:02.617121Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:14:02.641237Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:14:02.641409Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:14:02.641510Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:14:02.641548Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:02.641577Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:14:02.641606Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.641990Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:14:02.642079Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:14:02.642378Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:14:02.642432Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.642480Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.642516Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:14:02.642552Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:02.642727Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:02.642931Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:14:02.643013Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:14:02.644418Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:02.654971Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:02.655061Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:14:02.832813Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:14:02.835521Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:02.835569Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.836210Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.836255Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:02.836340Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:02.836532Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:02.836627Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:02.837029Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.837084Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:14:02.838931Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:02.839305Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.840556Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:14:02.840594Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.841043Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:14:02.841083Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:14:02.841124Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:02.841978Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:02.842014Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:02.842055Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:14:02.842101Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:02.842135Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:02.842187Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.844501Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:02.845886Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:02.845978Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:14:02.846020Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:02.859467Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:02.859596Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-07T14:14:02.859629Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-07T14:14:02.859664Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-07T14:14:02.882714Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:03.245644Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:03.245715Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:03.245983Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:03.246031Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:03.246084Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:03.246234Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-07T14:14:03.246336Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:03.246480Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:03.247126Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:03.261710Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2025-04-07T14:14:03.261789Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-07T14:14:03.261849Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:03.261879Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:03.261912Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:03.261967Z node 1 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exe ... 2025-04-07T14:14:15.419259Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:15.419707Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:15.421654Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:14:15.421717Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:15.422474Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:14:15.422556Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:14:15.422622Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:15.423701Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:15.423759Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:15.423832Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:14:15.423908Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:15.423971Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:15.424056Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:15.424491Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:15.426930Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:14:15.427006Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:15.427620Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:15.437475Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:15.437639Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-07T14:14:15.437699Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-07T14:14:15.437742Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-07T14:14:15.462156Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:15.781024Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 517 RawX2: 17179871639 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:15.781115Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:15.781403Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:15.781456Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:15.781507Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:15.781719Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-07T14:14:15.781852Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:15.782007Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:15.782645Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:15.797481Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2025-04-07T14:14:15.797580Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-07T14:14:15.797675Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:15.797722Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:15.797769Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:15.797851Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:15.797920Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-07T14:14:15.798029Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:15.800654Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-07T14:14:15.800821Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:15.809563Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:857:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:15.809668Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:867:2691], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:15.809748Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:15.816472Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:14:15.822412Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:16.041020Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:16.043668Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:871:2694], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:14:16.256203Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89m5fz05zxb6k8jsqf1s0r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzcyYzUzNzYtZjMyNGM3MDktNjY1NzkzYWEtOWRlNDNjNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:16.259529Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:966:2757], serverId# [4:967:2758], sessionId# [0:0:0] 2025-04-07T14:14:16.259900Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T14:14:16.262201Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89m5fz05zxb6k8jsqf1s0r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzcyYzUzNzYtZjMyNGM3MDktNjY1NzkzYWEtOWRlNDNjNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:16.264875Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89m5fz05zxb6k8jsqf1s0r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzcyYzUzNzYtZjMyNGM3MDktNjY1NzkzYWEtOWRlNDNjNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:16.265266Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:16.266627Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744035256266528 Step: 2001 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-07T14:14:16.277706Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:16.277820Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2025-04-07T14:14:16.277922Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-07T14:14:16.277973Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:16.278793Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2025-04-07T14:14:16.278857Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:16.344528Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr89m5ytdbytnbdz17r5vn1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YTE3N2RmZmUtZjNkYTdlZDYtYjY5MzRjNTItNWQzOWFlM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:16.344878Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:16.345862Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744035256345777 Step: 2001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-07T14:14:16.356869Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:16.357017Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-07T14:14:16.357057Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:16.358719Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1001:2782], serverId# [4:1002:2783], sessionId# [0:0:0] 2025-04-07T14:14:16.364001Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1003:2784], serverId# [4:1004:2785], sessionId# [0:0:0] >> KqpRanges::IsNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:43.190345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:43.190413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:43.190444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:43.190471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:43.190526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:43.190569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:43.190642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:43.190950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:43.254414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:43.254450Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:43.263783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:43.263905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:43.264050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:43.272839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:43.272988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:43.273522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:43.273818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:43.278479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:43.280157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:43.280211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:43.280349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:43.280406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:43.280443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:43.280563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:43.286682Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:43.395812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:43.396024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.396219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:43.396411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:43.396468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.398367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:43.398499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:43.398648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.398686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:43.398715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:43.398769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:43.400431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.400482Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:43.400509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:43.402065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.402102Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.402153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:43.402190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:43.405078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:43.406496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:43.406667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:43.407383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:43.407513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:43.407558Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:43.407780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:43.407822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:43.407963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:43.408021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:43.409659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:43.409697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:43.409822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:43.409852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:43.410131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.410170Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:43.410243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:43.410282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:43.410319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:43.410358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:43.410387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:43.410414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:43.410469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:43.410501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:43.410525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... ToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:14:16.857733Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:14:16.857922Z node 24 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 217us result status StatusSuccess 2025-04-07T14:14:16.858531Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:14:16.869392Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:784:2608] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:14:16.869485Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:708:2608] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-07T14:14:16.869604Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:784:2608] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744035256846180 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744035256846180 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744035256846180 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:14:16.871466Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:784:2608] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-07T14:14:16.871535Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:708:2608] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] >> KqpExtractPredicateLookup::OverflowLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:33.185730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:33.185818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:33.185850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:33.185877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:33.185925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:33.185968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:33.186018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:33.186317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:33.254641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:33.254688Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:33.264777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:33.264899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:33.265043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:33.273918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:33.274093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:33.274629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:33.274962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:33.279489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:33.281600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:33.281660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:33.281843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:33.281891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:33.281932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:33.282085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:33.288790Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:33.388142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:33.388350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:33.388554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:33.388778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:33.388849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:33.391128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:33.391337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:33.391534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:33.391590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:33.391652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:33.391710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:33.393950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:33.394014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:33.394053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:33.396166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:33.396215Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:33.396302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:33.396353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:33.399783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:33.401386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:33.401552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:33.402293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:33.402416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:33.402470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:33.402695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:33.402743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:33.402889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:33.402971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:33.404836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:33.404870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:33.405011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:33.405041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:33.405320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:33.405362Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:33.405437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:33.405484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:33.405525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:33.405569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:33.405601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:33.405627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:33.405688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:33.405721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:33.405747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... chemeshard: 72057594046678944 2025-04-07T14:14:17.713614Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 229us result status StatusSuccess 2025-04-07T14:14:17.714484Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:14:17.718893Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:14:17.719085Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 210us result status StatusSuccess 2025-04-07T14:14:17.719754Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpKv::BulkUpsert >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] >> TKeyValueTest::TestConcatWorksNewApi [GOOD] >> TKeyValueTest::TestConcatToLongKey >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> KqpNewEngine::PrunePartitionsByLiteral [GOOD] >> KqpNewEngine::PrunePartitionsByExpr >> KqpNewEngine::LocksMultiShard >> KqpNewEngine::MultiOutput [GOOD] >> KqpNewEngine::LocksSingleShard >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestReorderedExecutor >> KqpSort::ReverseOptimized >> KqpAgg::AggWithLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateTabletChangeToExternal [GOOD] Test command err: 2025-04-07T14:12:27.182700Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:27.186191Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.186412Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-07T14:12:27.186992Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-07T14:12:27.188130Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-07T14:12:27.188179Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:27.189084Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2073] ControllerId# 72057594037932033 2025-04-07T14:12:27.189133Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:27.189261Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:27.189535Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:27.190242Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:27.190294Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:27.190426Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:27.190605Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:19:2063] 2025-04-07T14:12:27.190637Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:19:2063] 2025-04-07T14:12:27.200179Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:27.200247Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:27.202139Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.202304Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.202455Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.202591Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.202723Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.202855Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.202969Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.202994Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:27.203144Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:28:2073] 2025-04-07T14:12:27.203173Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:28:2073] 2025-04-07T14:12:27.203866Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.203954Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:19:2063] 2025-04-07T14:12:27.219037Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:27.219091Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.219134Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.219260Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:27.219300Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.219327Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-07T14:12:27.223962Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-07T14:12:27.225952Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-07T14:12:27.226129Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.226274Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:27.226608Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.226856Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.226894Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-07T14:12:27.226957Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-07T14:12:27.228790Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-07T14:12:27.228836Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-07T14:12:27.228971Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:49:2089] 2025-04-07T14:12:27.228995Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:49:2089] 2025-04-07T14:12:27.233015Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:49:2089] 2025-04-07T14:12:27.233156Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:27.233283Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.233338Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:27.242321Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:27.243477Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:27.243608Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-07T14:12:27.243646Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-07T14:12:27.243716Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:19:2063] 2025-04-07T14:12:27.246073Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-07T14:12:27.246163Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-07T14:12:27.246188Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-07T14:12:27.252337Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:19:2063] 2025-04-07T14:12:27.252878Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:19:2063] 2025-04-07T14:12:27.253270Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:27.253644Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:27.257056Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-07T14:12:27.257096Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.257225Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-07T14:12:27.257536Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-04-07T14:12:27.257578Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 2146435075 Sender# [1:45:2088] SessionId# [0:0:0] Cookie# 0 2025-04-07T14:12:27.257629Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.038165s 2025-04-07T14:12:27.257830Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:27.257918Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:27.257991Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-07T14:12:27.258014Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-07T14:12:27.258627Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:28:2073] 2025-04-07T14:12:27.258665Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:28:2073] 2025-04-07T14:12:27.258716Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-04-07T14:12:27.258748Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-04-07T14:12:27.258877Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: ... Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [18:312:2292] CurrentLeaderTablet: [18:329:2304] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 0}} 2025-04-07T14:14:18.508246Z node 18 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2025-04-07T14:14:18.508335Z node 18 :TABLET_RESOLVER DEBUG: SelectForward node 18 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [18:312:2292] 2025-04-07T14:14:18.508485Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [18:380:2343] 2025-04-07T14:14:18.508564Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [18:380:2343] 2025-04-07T14:14:18.508744Z node 18 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [18:380:2343] 2025-04-07T14:14:18.508954Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [18:380:2343] 2025-04-07T14:14:18.509029Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [18:380:2343] 2025-04-07T14:14:18.509383Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [18:384:2346] 2025-04-07T14:14:18.509440Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [18:384:2346] 2025-04-07T14:14:18.509516Z node 18 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:14:18.509572Z node 18 :TABLET_RESOLVER DEBUG: SelectForward node 18 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [18:263:2256] 2025-04-07T14:14:18.509667Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [18:384:2346] 2025-04-07T14:14:18.509728Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [18:384:2346] 2025-04-07T14:14:18.509803Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [18:384:2346] 2025-04-07T14:14:18.509892Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [18:384:2346] 2025-04-07T14:14:18.510025Z node 18 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [18:384:2346] 2025-04-07T14:14:18.510180Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [18:384:2346] 2025-04-07T14:14:18.510243Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [18:384:2346] 2025-04-07T14:14:18.510289Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [18:384:2346] 2025-04-07T14:14:18.510348Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [18:384:2346] 2025-04-07T14:14:18.510387Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [18:384:2346] 2025-04-07T14:14:18.510485Z node 18 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [18:383:2345] EventType# 268697601 2025-04-07T14:14:18.510742Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:8} Tx{15, NKikimr::NHive::TTxCreateTablet} queued, type NKikimr::NHive::TTxCreateTablet 2025-04-07T14:14:18.510841Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:8} Tx{15, NKikimr::NHive::TTxCreateTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:14:18.511256Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:8} Tx{15, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{10, redo 442b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-07T14:14:18.511332Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:8} Tx{15, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:14:18.522442Z node 18 :BS_PROXY_PUT INFO: [f913878b3da83702] bootstrap ActorId# [18:387:2349] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:8:0:0:230:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T14:14:18.522579Z node 18 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Id# [72057594037927937:2:8:0:0:230:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T14:14:18.522651Z node 18 :BS_PROXY_PUT DEBUG: [f913878b3da83702] restore Id# [72057594037927937:2:8:0:0:230:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T14:14:18.522742Z node 18 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:8:0:0:230:1] Marker# BPG33 2025-04-07T14:14:18.522796Z node 18 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:8:0:0:230:1] Marker# BPG32 2025-04-07T14:14:18.523032Z node 18 :BS_PROXY DEBUG: Send to queueActorId# [18:37:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:8:0:0:230:1] FDS# 230 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T14:14:18.606714Z node 18 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [72057594037927937:2:8:0:0:230:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 22 } Cost# 81811 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 23 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-07T14:14:18.606853Z node 18 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Result# TEvPutResult {Id# [72057594037927937:2:8:0:0:230:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-07T14:14:18.606947Z node 18 :BS_PROXY_PUT INFO: [f913878b3da83702] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:8:0:0:230:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T14:14:18.607129Z node 18 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:8:0:0:230:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-07T14:14:18.607279Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} commited cookie 1 for step 8 2025-04-07T14:14:18.607475Z node 18 :TABLET_MAIN DEBUG: Tablet: 72075186224037888 Received TEvTabletStop from [18:46:2089], reason = ReasonStop Marker# TSYS29 2025-04-07T14:14:18.607523Z node 18 :PIPE_SERVER DEBUG: [72075186224037888] Stop 2025-04-07T14:14:18.607713Z node 18 :TABLET_MAIN NOTICE: Tablet: 72075186224037888 Type: Dummy, EReason: ReasonPill, SuggestedGeneration: 1, KnownGeneration: 1 Marker# TSYS31 2025-04-07T14:14:18.607760Z node 18 :PIPE_SERVER DEBUG: [72075186224037888] Detach 2025-04-07T14:14:18.607901Z node 18 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:3} suiciding, Waste{1:0, 289b +(0, 0b), 2 trc, -0b acc} 2025-04-07T14:14:18.608490Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] peer closed [18:380:2343] 2025-04-07T14:14:18.608539Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [18:380:2343] 2025-04-07T14:14:18.608647Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send [18:49:2089] 2025-04-07T14:14:18.608723Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [18:49:2089] 2025-04-07T14:14:18.608784Z node 18 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [18:46:2089] EventType# 268960257 2025-04-07T14:14:18.608910Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2025-04-07T14:14:18.608975Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:14:18.609086Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:14:18.609157Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:14:18.609326Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-07T14:14:18.609411Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:14:18.609495Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:14:18.609550Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:14:18.609834Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [18:389:2351] 2025-04-07T14:14:18.609867Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [18:389:2351] 2025-04-07T14:14:18.609937Z node 18 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:14:18.609990Z node 18 :TABLET_RESOLVER DEBUG: SelectForward node 18 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [18:312:2292] 2025-04-07T14:14:18.610042Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [18:389:2351] 2025-04-07T14:14:18.610090Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [18:389:2351] 2025-04-07T14:14:18.610172Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [18:389:2351] 2025-04-07T14:14:18.610222Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [18:389:2351] 2025-04-07T14:14:18.610283Z node 18 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2025-04-07T14:14:18.610525Z node 18 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:14:18.610635Z node 18 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-07T14:14:18.610692Z node 18 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-07T14:14:18.610718Z node 18 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-07T14:14:18.610778Z node 18 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [18:312:2292] CurrentLeaderTablet: [18:329:2304] CurrentGeneration: 1 CurrentStep: 0} 2025-04-07T14:14:18.610842Z node 18 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [18:312:2292] CurrentLeaderTablet: [18:329:2304] CurrentGeneration: 1 CurrentStep: 0} 2025-04-07T14:14:18.610923Z node 18 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [18:312:2292] CurrentLeaderTablet: [18:329:2304] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 0}} 2025-04-07T14:14:18.611018Z node 18 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 >> KqpNewEngine::KeyColumnOrder2 [GOOD] >> KqpNewEngine::JoinWithParams >> KqpSqlIn::KeySuffix [GOOD] >> KqpSqlIn::KeySuffix_OnlyTail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: 2025-04-07T14:14:05.647366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:14:05.647622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:14:05.647859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ba4/r3tmp/tmpo4ogMP/pdisk_1.dat 2025-04-07T14:14:05.958685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:14:05.993532Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:06.042317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:06.042463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:06.053852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:06.164495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:06.199315Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2025-04-07T14:14:06.199536Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:14:06.230800Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:14:06.230939Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:14:06.232266Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:14:06.232379Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:14:06.232440Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:14:06.232772Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:14:06.258941Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:14:06.259086Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:14:06.259187Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:671:2562] 2025-04-07T14:14:06.259220Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:06.259249Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:14:06.259306Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:06.259697Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:645:2547] 2025-04-07T14:14:06.259920Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:14:06.268167Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:14:06.268283Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:14:06.268548Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:06.268599Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:06.268642Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:14:06.268720Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:06.269135Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:635:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2025-04-07T14:14:06.269299Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:06.269588Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:14:06.269671Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:14:06.270256Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:14:06.270358Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:14:06.271387Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T14:14:06.271439Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T14:14:06.271493Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T14:14:06.271746Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:14:06.271785Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T14:14:06.271856Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:14:06.271927Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:678:2566] 2025-04-07T14:14:06.271963Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T14:14:06.271991Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T14:14:06.272027Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:06.272820Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T14:14:06.272890Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T14:14:06.273216Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2025-04-07T14:14:06.273397Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:06.273447Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:06.273481Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T14:14:06.273513Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:14:06.273760Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:14:06.273936Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-07T14:14:06.273997Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-07T14:14:06.274768Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:06.274840Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:06.285653Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:06.285768Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:14:06.286206Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T14:14:06.286269Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-07T14:14:06.465252Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2025-04-07T14:14:06.465340Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:702:2584], serverId# [1:705:2587], sessionId# [0:0:0] 2025-04-07T14:14:06.468505Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-07T14:14:06.468572Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:06.468962Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:06.468983Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:06.469058Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:06.469099Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:06.469147Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-07T14:14:06.469336Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:06.469441Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:06.469577Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:06.469596Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:06.469614Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:06.469710Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:06.469783Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:06.469833Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:14:06.469867Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:14:06.471327Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:06.471657Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:06.472543Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:06.472609Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-07T14:14:06.472853Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:06.473054Z node ... 10694Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037890 step# 1000 txid# 281474976715657} 2025-04-07T14:14:17.610727Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-04-07T14:14:17.610774Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:14:17.612295Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:17.612351Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:17.612384Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-07T14:14:17.612748Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:14:17.612786Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-07T14:14:17.612815Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-04-07T14:14:17.612851Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:17.612879Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:17.612934Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T14:14:17.617101Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:17.617530Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:14:17.617611Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:17.617884Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:17.618190Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:17.618274Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-07T14:14:17.618301Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-07T14:14:17.619189Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-04-07T14:14:17.619234Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-07T14:14:17.627028Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:798:2661], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.627114Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:809:2666], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.627193Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.631002Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:14:17.635457Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:17.635539Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:17.635575Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-07T14:14:17.826078Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:17.826195Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:14:17.826251Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-07T14:14:17.829999Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:812:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:14:18.134426Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr89m78sde3zsnkmtb1ze06c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWJhZDU3ODgtMmFhMGRhZjItNGI3YzFlNDUtZjkzODBhMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:18.138127Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1007:2772], serverId# [4:1008:2773], sessionId# [0:0:0] 2025-04-07T14:14:18.138544Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T14:14:18.140773Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89m78sde3zsnkmtb1ze06c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWJhZDU3ODgtMmFhMGRhZjItNGI3YzFlNDUtZjkzODBhMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:18.143776Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89m78sde3zsnkmtb1ze06c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWJhZDU3ODgtMmFhMGRhZjItNGI3YzFlNDUtZjkzODBhMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:18.144193Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:14:18.145540Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744035258145448 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-07T14:14:18.145713Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744035258145448 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-07T14:14:18.156812Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T14:14:18.156984Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-07T14:14:18.157108Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-07T14:14:18.157173Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:18.158144Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037889 2025-04-07T14:14:18.158215Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:18.239522Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89m7sj8ta3v83tjs7c4dz4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NmVhZThkY2ItYWYxNDMzYjctZWFmMjY0ODktZGZjNjUxYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:18.239899Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:14:18.241034Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1744035258240934 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-07T14:14:18.241197Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 1744035258240934 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-07T14:14:18.241297Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 1744035258240934 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-07T14:14:18.241391Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 1744035258240934 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-07T14:14:18.252390Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-07T14:14:18.252590Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-07T14:14:18.252640Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:14:18.256713Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1062:2817], serverId# [4:1063:2818], sessionId# [0:0:0] 2025-04-07T14:14:18.262643Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1064:2819], serverId# [4:1065:2820], sessionId# [0:0:0] >> KqpNewEngine::BlindWriteParameters [GOOD] >> KqpNewEngine::BlindWriteListParameter >> KqpSqlIn::SimpleKey >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Int32_Limit3 >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate [GOOD] >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] Test command err: 2025-04-07T14:13:57.694109Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574309761724356:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:57.694205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a27/r3tmp/tmpj64Ltw/pdisk_1.dat 2025-04-07T14:13:58.048254Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25369, node 1 2025-04-07T14:13:58.062511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:58.063068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.063234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:13:58.064819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.064992Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:13:58.065016Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:13:58.065343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.065369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.065426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:13:58.065512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.112952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:58.113437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:58.120605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:13:58.195792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:58.195813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:58.195820Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:58.195898Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:58.569733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.573785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.575068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.576728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:58.576902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:58.576925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:13:58.577872Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:58.578205Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:58.578226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:13:58.579347Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.582135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238624, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.582166Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:58.582416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:58.583940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.584098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.584144Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:58.584257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:58.584298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:58.584337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:13:58.585999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:13:58.586035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:13:58.586043Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:13:58.586094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:13:58.636751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.637002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.637027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.637081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:13:58.637145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:13:58.637165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:13:58.638890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T14:13:58.639043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.639280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.639804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:13:58.639849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:13:58.639863Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:13:58.639936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:12335 2025-04-07T14:13:58.807125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.807306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.807332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.808921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:13:58.809085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:58.810031Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T14:13:58.811826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238855, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.811864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1744035238855, at schemeshard: 72057594046644480 2025-04-07T14:13:58.812094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:13:58.812159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:13:58.812181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2025-04-07T14:13:58.813537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.813694Z node 1 : ... at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:14:08.839521Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:08.839800Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:08.839826Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:08.839897Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T14:14:08.839981Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T14:14:08.840011Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2025-04-07T14:14:08.841561Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T14:14:08.841688Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:08.841890Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:08.842233Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:14:08.842261Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:14:08.842274Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:14:08.842362Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:26717 2025-04-07T14:14:09.001933Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.002125Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:09.002154Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.003780Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:14:09.003926Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:09.004653Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T14:14:09.006279Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035249054, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:14:09.006311Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1744035249054, at schemeshard: 72057594046644480 2025-04-07T14:14:09.006503Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2025-04-07T14:14:09.006584Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T14:14:09.006617Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2025-04-07T14:14:09.007735Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:09.007913Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:09.008328Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:14:09.008361Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:14:09.008374Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:14:09.008414Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-04-07T14:14:09.061991Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestGetRecords1MBMessagesOneByOneBySeqNo, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.062342Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:09.064552Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: user@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/stream_TestGetRecords1MBMessagesOneByOneBySeqNo 2025-04-07T14:14:09.064750Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:09.064975Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:09.065046Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 ProgressState, operation type: TxCreatePQGroup, at tablet# 72057594046644480 2025-04-07T14:14:09.065321Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715660, at schemeshard: 72057594046644480 2025-04-07T14:14:09.065937Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:09.065978Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:09.065999Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T14:14:09.066175Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:09.066199Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:09.066208Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:14:09.069388Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:09.069622Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:09.069664Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 1 -> 3 2025-04-07T14:14:09.070840Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:14:09.125457Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:09.127178Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:09.127210Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 3 -> 128 2025-04-07T14:14:09.128456Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:09.130996Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035249173, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:14:09.131038Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvOperationPlan, step: 1744035249173, at tablet: 72057594046644480 2025-04-07T14:14:09.131194Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2025-04-07T14:14:09.132515Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:09.132856Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:09.132911Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2025-04-07T14:14:09.132996Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2025-04-07T14:14:09.133035Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-07T14:14:09.133186Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 2, subscribers: 1 2025-04-07T14:14:09.133984Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:09.134021Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:09.134039Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2025-04-07T14:14:09.134191Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:09.134222Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:09.134234Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:14:09.134265Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 2025-04-07T14:14:13.429467Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7490574353977229235:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:13.429537Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpRanges::IsNotNullInValue [GOOD] >> KqpRanges::IsNotNullInJsonValue >> KqpSqlIn::TableSource >> CdcStreamChangeCollector::OldImage [GOOD] >> KqpKv::BulkUpsert [GOOD] >> KqpKv::ReadRows_NonExistentKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::OldImage [GOOD] Test command err: 2025-04-07T14:14:01.920594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:14:01.920968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:14:01.921205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001bca/r3tmp/tmpAy8G3a/pdisk_1.dat 2025-04-07T14:14:02.308435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:14:02.347110Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:02.352423Z node 1 :TABLET_SAUSAGECACHE INFO: Config updated MemoryLimit: 33554432 2025-04-07T14:14:02.398254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:02.398388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:02.409756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:02.532504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:02.572421Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:14:02.572599Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:14:02.614300Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:14:02.614365Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:14:02.615389Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:14:02.616222Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:14:02.616286Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:14:02.617066Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:14:02.634288Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:14:02.635264Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:14:02.635365Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:14:02.635398Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:02.635441Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:14:02.635477Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.636565Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:14:02.636690Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:14:02.637802Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:14:02.637892Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.637942Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.637986Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:14:02.638030Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:02.638200Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:02.639232Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:14:02.639352Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:14:02.640965Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:02.651514Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:02.652312Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:14:02.831670Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:14:02.835750Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:02.835806Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.836491Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.836533Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:02.836574Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:02.836761Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:14:02.836870Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:02.837371Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:02.837432Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:14:02.839117Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:02.839459Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:02.841549Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:14:02.841606Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.842127Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:14:02.842189Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:14:02.842248Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:02.843256Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:02.843307Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:02.843364Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:14:02.843415Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:02.843466Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:02.843550Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:02.846737Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:02.848272Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:02.848345Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:14:02.848375Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:02.859262Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:02.859381Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-07T14:14:02.859425Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-07T14:14:02.859459Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-07T14:14:02.882189Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:03.254855Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:03.254908Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:03.255121Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:03.255156Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:03.255195Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:03.255326Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-07T14:14:03.255422Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:03.255539Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:03.256010Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:03.269323Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2025-04-07T14:14:03.269377Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-07T14:14:03.269443Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:03.269475Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:03.269509Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:03.269554Z node 1 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exe ... 2025-04-07T14:14:20.302720Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:14:20.303153Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:20.305005Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:14:20.305058Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:20.305710Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:14:20.305780Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:14:20.305870Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:20.306985Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:20.307029Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:14:20.307087Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:14:20.307156Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:20.307216Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:14:20.307300Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:20.307777Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:20.310357Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:14:20.310418Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:20.311016Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:14:20.320203Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:20.320381Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-07T14:14:20.320435Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-07T14:14:20.320494Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-07T14:14:20.346211Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:20.696028Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 517 RawX2: 17179871639 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:14:20.696099Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:20.696319Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:20.696370Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:14:20.696416Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-07T14:14:20.696588Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-07T14:14:20.696741Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:14:20.696916Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:14:20.697676Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:14:20.714295Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2025-04-07T14:14:20.714446Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-07T14:14:20.714566Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:20.714623Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:14:20.714672Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:20.714772Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:14:20.714849Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-07T14:14:20.714986Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:20.718050Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-07T14:14:20.718151Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:14:20.726238Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:857:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:20.726343Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:867:2691], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:20.726448Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:20.732372Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:14:20.739328Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:20.966108Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:14:20.969285Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:871:2694], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:14:21.251514Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89ma9m2md5qc0qx6f74yrw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OWJlOWUwMTctYmM3MWIwNDItYzQzNGYyODQtZjU5NWFjZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:21.255269Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:966:2757], serverId# [4:967:2758], sessionId# [0:0:0] 2025-04-07T14:14:21.255633Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T14:14:21.258236Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89ma9m2md5qc0qx6f74yrw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OWJlOWUwMTctYmM3MWIwNDItYzQzNGYyODQtZjU5NWFjZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:21.261269Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89ma9m2md5qc0qx6f74yrw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OWJlOWUwMTctYmM3MWIwNDItYzQzNGYyODQtZjU5NWFjZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:21.261689Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:21.263016Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744035261262913 Step: 2001 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-07T14:14:21.274026Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:21.274125Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2025-04-07T14:14:21.274213Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-07T14:14:21.274274Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:21.274996Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2025-04-07T14:14:21.275054Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:21.333659Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr89matx6ykecd6yamr70agr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZDAyZWI4MjctNDFkYzU5MzktNGY1MGRmOTItOGYwMzRiYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:14:21.333979Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:14:21.334799Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744035261334704 Step: 2001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-07T14:14:21.349219Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:14:21.349322Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-07T14:14:21.349356Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:14:21.351051Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1001:2782], serverId# [4:1002:2783], sessionId# [0:0:0] 2025-04-07T14:14:21.358108Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1003:2784], serverId# [4:1004:2785], sessionId# [0:0:0] >> KqpNewEngine::SimpleUpsertSelect |95.9%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNewEngine::LocksMultiShard [GOOD] >> KqpNewEngine::LocksMultiShardOk >> KqpRanges::IsNull [GOOD] >> KqpRanges::IsNullInValue >> KqpNewEngine::PrunePartitionsByExpr [GOOD] >> KqpNewEngine::PruneWritePartitions >> KqpNewEngine::LocksSingleShard [GOOD] >> KqpNewEngine::LocksNoMutationsSharded >> KqpSort::ReverseOptimized [GOOD] >> KqpSort::ReverseOptimizedWithPredicate >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumnPg >> KqpNewEngine::ItemsLimit [GOOD] >> KqpNewEngine::JoinDictWithPure >> KqpRanges::UpdateWhereInNoFullScan >> KqpNewEngine::JoinWithParams [GOOD] >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] >> KqpNewEngine::LeftSemiJoin >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] >> KqpAgg::AggWithLookup [GOOD] >> KqpAgg::AggWithSelfLookup >> KqpMergeCn::TopSortBy_Int32_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_String_Limit3 >> KqpNewEngine::BlindWriteListParameter [GOOD] >> KqpNewEngine::BatchUpload >> KqpKv::ReadRows_NonExistentKeys [GOOD] >> KqpKv::ReadRows_NotFullPK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:40.286762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:40.286867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:40.286905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:40.286934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:40.286984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:40.287028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:40.287083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:40.287372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:40.333370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:40.333414Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:40.341754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:40.341877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:40.342023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:40.351787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:40.351950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:40.352539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:40.352883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:40.357705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:40.359541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:40.359605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:40.359808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:40.359855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:40.359890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:40.360026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:40.366131Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:40.484170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:40.484381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.484595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:40.484813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:40.484876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.486867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:40.486996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:40.487141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.487189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:40.487220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:40.487261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:40.488916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.488961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:40.488997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:40.490409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.490445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.490491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:40.490531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:40.498468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:40.500415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:40.500598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:40.501443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:40.501555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:40.501604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:40.501822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:40.501866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:40.502033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:40.502109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:40.503959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:40.504003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:40.504135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:40.504166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:40.504462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:40.504500Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:40.504589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:40.504638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:40.504697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:40.504747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:40.504780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:40.504811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:40.504867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:40.504906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:40.504934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... ent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:14:24.595089Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:14:24.595388Z node 30 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 341us result status StatusSuccess 2025-04-07T14:14:24.596311Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:14:24.608460Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:820:2656] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:14:24.608594Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:759:2656] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-07T14:14:24.608798Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:820:2656] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744035264578322 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744035264578322 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744035264578322 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:14:24.612479Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:820:2656] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-07T14:14:24.612561Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:759:2656] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:156:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:158:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:160:2057] recipient: [4:159:2180] Leader for TabletID 72057594037927937 is [4:161:2181] sender: [4:162:2057] recipient: [4:159:2180] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:161:2181] Leader for TabletID 72057594037927937 is [4:161:2181] sender: [4:231:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:161:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:163:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:165:2057] recipient: [5:164:2185] Leader for TabletID 72057594037927937 is [5:166:2186] sender: [5:167:2057] recipient: [5:164:2185] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:166:2186] Leader for TabletID 72057594037927937 is [5:166:2186] sender: [5:236:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:161:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:164:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:165:2057] recipient: [6:163:2185] Leader for TabletID 72057594037927937 is [6:166:2186] sender: [6:167:2057] recipient: [6:163:2185] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:166:2186] Leader for TabletID 72057594037927937 is [6:166:2186] sender: [6:236:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:162:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:164:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:166:2057] recipient: [7:165:2185] Leader for TabletID 72057594037927937 is [7:167:2186] sender: [7:168:2057] recipient: [7:165:2185] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:167:2186] Leader for TabletID 72057594037927937 is [7:167:2186] sender: [7:237:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:167:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:170:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:171:2057] recipient: [8:169:2190] Leader for TabletID 72057594037927937 is [8:172:2191] sender: [8:173:2057] recipient: [8:169:2190] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:172:2191] Leader for TabletID 72057594037927937 is [8:172:2191] sender: [8:242:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:167:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:170:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:171:2057] recipient: [9:169:2190] Leader for TabletID 72057594037927937 is [9:172:2191] sender: [9:173:2057] recipient: [9:169:2190] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:172:2191] Leader for TabletID 72057594037927937 is [9:172:2191] sender: [9:242:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:169:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:172:2057] recipient: [10:171:2191] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:173:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:174:2192] sender: [10:175:2057] recipient: [10:171:2191] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:174:2192] Leader for TabletID 72057594037927937 is [10:174:2192] sender: [10:244:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:174:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:177:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:178:2057] recipient: [11:176:2196] Leader for TabletID 72057594037927937 is [11:179:2197] sender: [11:180:2057] recipient: [11:176:2196] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:179:2197] Leader for TabletID 72057594037927937 is [11:179:2197] sender: [11:249:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... sender: [27:181:2057] recipient: [27:179:2198] Leader for TabletID 72057594037927937 is [27:182:2199] sender: [27:183:2057] recipient: [27:179:2198] !Reboot 72057594037927937 (actor [27:105:2137]) rebooted! !Reboot 72057594037927937 (actor [27:105:2137]) tablet resolver refreshed! new actor is[27:182:2199] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:106:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:139:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:106:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:139:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:141:2057] recipient: [30:97:2132] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:144:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:145:2057] recipient: [30:143:2166] Leader for TabletID 72057594037927937 is [30:146:2167] sender: [30:147:2057] recipient: [30:143:2166] !Reboot 72057594037927937 (actor [30:105:2137]) rebooted! !Reboot 72057594037927937 (actor [30:105:2137]) tablet resolver refreshed! new actor is[30:146:2167] Leader for TabletID 72057594037927937 is [30:146:2167] sender: [30:216:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:106:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:139:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:141:2057] recipient: [31:97:2132] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:144:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:145:2057] recipient: [31:143:2166] Leader for TabletID 72057594037927937 is [31:146:2167] sender: [31:147:2057] recipient: [31:143:2166] !Reboot 72057594037927937 (actor [31:105:2137]) rebooted! !Reboot 72057594037927937 (actor [31:105:2137]) tablet resolver refreshed! new actor is[31:146:2167] Leader for TabletID 72057594037927937 is [31:146:2167] sender: [31:216:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:106:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:139:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:142:2057] recipient: [32:97:2132] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:145:2057] recipient: [32:144:2166] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:146:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:147:2167] sender: [32:148:2057] recipient: [32:144:2166] !Reboot 72057594037927937 (actor [32:105:2137]) rebooted! !Reboot 72057594037927937 (actor [32:105:2137]) tablet resolver refreshed! new actor is[32:147:2167] Leader for TabletID 72057594037927937 is [32:147:2167] sender: [32:217:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:106:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:139:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:147:2057] recipient: [33:97:2132] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:150:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:151:2057] recipient: [33:149:2171] Leader for TabletID 72057594037927937 is [33:152:2172] sender: [33:153:2057] recipient: [33:149:2171] !Reboot 72057594037927937 (actor [33:105:2137]) rebooted! !Reboot 72057594037927937 (actor [33:105:2137]) tablet resolver refreshed! new actor is[33:152:2172] Leader for TabletID 72057594037927937 is [33:152:2172] sender: [33:222:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:106:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:139:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:147:2057] recipient: [34:97:2132] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:150:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:151:2057] recipient: [34:149:2171] Leader for TabletID 72057594037927937 is [34:152:2172] sender: [34:153:2057] recipient: [34:149:2171] !Reboot 72057594037927937 (actor [34:105:2137]) rebooted! !Reboot 72057594037927937 (actor [34:105:2137]) tablet resolver refreshed! new actor is[34:152:2172] Leader for TabletID 72057594037927937 is [34:152:2172] sender: [34:222:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:106:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:139:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:148:2057] recipient: [35:97:2132] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:151:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:152:2057] recipient: [35:150:2171] Leader for TabletID 72057594037927937 is [35:153:2172] sender: [35:154:2057] recipient: [35:150:2171] !Reboot 72057594037927937 (actor [35:105:2137]) rebooted! !Reboot 72057594037927937 (actor [35:105:2137]) tablet resolver refreshed! new actor is[35:153:2172] Leader for TabletID 72057594037927937 is [35:153:2172] sender: [35:223:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:106:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:139:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:153:2057] recipient: [36:97:2132] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:156:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:157:2057] recipient: [36:155:2176] Leader for TabletID 72057594037927937 is [36:158:2177] sender: [36:159:2057] recipient: [36:155:2176] !Reboot 72057594037927937 (actor [36:105:2137]) rebooted! !Reboot 72057594037927937 (actor [36:105:2137]) tablet resolver refreshed! new actor is[36:158:2177] Leader for TabletID 72057594037927937 is [36:158:2177] sender: [36:228:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:101:2057] recipient: [37:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:101:2057] recipient: [37:99:2133] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:106:2057] recipient: [37:99:2133] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:139:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:153:2057] recipient: [37:97:2132] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:156:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:157:2057] recipient: [37:155:2176] Leader for TabletID 72057594037927937 is [37:158:2177] sender: [37:159:2057] recipient: [37:155:2176] !Reboot 72057594037927937 (actor [37:105:2137]) rebooted! !Reboot 72057594037927937 (actor [37:105:2137]) tablet resolver refreshed! new actor is[37:158:2177] Leader for TabletID 72057594037927937 is [37:158:2177] sender: [37:228:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:101:2057] recipient: [38:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:101:2057] recipient: [38:99:2133] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:106:2057] recipient: [38:99:2133] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:139:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:154:2057] recipient: [38:97:2132] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:157:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:158:2057] recipient: [38:156:2176] Leader for TabletID 72057594037927937 is [38:159:2177] sender: [38:160:2057] recipient: [38:156:2176] !Reboot 72057594037927937 (actor [38:105:2137]) rebooted! !Reboot 72057594037927937 (actor [38:105:2137]) tablet resolver refreshed! new actor is[38:159:2177] Leader for TabletID 72057594037927937 is [38:159:2177] sender: [38:229:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:101:2057] recipient: [39:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:101:2057] recipient: [39:99:2133] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:106:2057] recipient: [39:99:2133] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:139:2057] recipient: [39:14:2061] >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate [GOOD] >> KqpSort::PassLimit >> KqpSqlIn::KeySuffix_OnlyTail [GOOD] >> KqpSqlIn::KeySuffix_NotPointPrefix >> KqpNewEngine::SimpleUpsertSelect [GOOD] >> KqpNewEngine::ShuffleWrite >> KqpNewEngine::Select1 >> KqpRanges::WhereInSubquery >> KqpSqlIn::SimpleKey [GOOD] >> KqpSqlIn::SimpleKey_Negated >> KqpRanges::IsNotNullInJsonValue [GOOD] >> KqpRanges::IsNotNullInJsonValue2 >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In >> KqpSqlIn::TableSource [GOOD] >> KqpSqlIn::TupleParameter >> KqpNewEngine::LocksMultiShardOk [GOOD] >> KqpNewEngine::LocksEffects >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> KqpNotNullColumns::AlterAddNotNullColumnPg [GOOD] >> KqpNotNullColumns::AlterDropNotNullColumn >> KqpNewEngine::LocksNoMutationsSharded [GOOD] >> KqpNewEngine::MultiEffects >> KqpNewEngine::PruneWritePartitions [GOOD] >> KqpNewEngine::PruneEffectPartitions >> KqpKv::ReadRows_NotFullPK [GOOD] >> KqpKv::ReadRows_PgValue >> KqpSort::ReverseOptimizedWithPredicate [GOOD] >> KqpSort::ReverseFirstKeyOptimized >> KqpRanges::IsNullInValue [GOOD] >> KqpRanges::IsNullInJsonValue >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] >> KqpNewEngine::JoinDictWithPure [GOOD] >> KqpNewEngine::IdxLookupExtractMembers >> KqpRanges::UpdateWhereInNoFullScan [GOOD] >> KqpRanges::UpdateWhereInWithNull >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards >> KqpMergeCn::TopSortBy_String_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Utf8_Limit2 >> KqpNewEngine::Select1 [GOOD] >> KqpNewEngine::Replace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:144:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:144:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:150:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:150:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:156:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:155:2176] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:159:2057] recipient: [8:155:2176] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:158:2177] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:228:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:155:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:156:2176] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:159:2057] recipient: [9:156:2176] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:158:2177] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:228:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:158:2057] recipient: [10:156:2176] Leader for TabletID 72057594037927937 is [10:159:2177] sender: [10:160:2057] recipient: [10:156:2176] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:159:2177] Leader for TabletID 72057594037927937 is [10:159:2177] sender: [10:229:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:159:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:161:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:163:2057] recipient: [11:162:2181] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:165:2057] recipient: [11:162:2181] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:164:2182] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:234:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 6:2167] sender: [29:216:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:142:2057] recipient: [30:97:2132] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:145:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:146:2057] recipient: [30:144:2166] Leader for TabletID 72057594037927937 is [30:147:2167] sender: [30:148:2057] recipient: [30:144:2166] !Reboot 72057594037927937 (actor [30:105:2137]) rebooted! !Reboot 72057594037927937 (actor [30:105:2137]) tablet resolver refreshed! new actor is[30:147:2167] Leader for TabletID 72057594037927937 is [30:147:2167] sender: [30:217:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:106:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:139:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:147:2057] recipient: [31:97:2132] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:150:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:151:2057] recipient: [31:149:2171] Leader for TabletID 72057594037927937 is [31:152:2172] sender: [31:153:2057] recipient: [31:149:2171] !Reboot 72057594037927937 (actor [31:105:2137]) rebooted! !Reboot 72057594037927937 (actor [31:105:2137]) tablet resolver refreshed! new actor is[31:152:2172] Leader for TabletID 72057594037927937 is [31:152:2172] sender: [31:222:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:106:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:139:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:147:2057] recipient: [32:97:2132] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:150:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:151:2057] recipient: [32:149:2171] Leader for TabletID 72057594037927937 is [32:152:2172] sender: [32:153:2057] recipient: [32:149:2171] !Reboot 72057594037927937 (actor [32:105:2137]) rebooted! !Reboot 72057594037927937 (actor [32:105:2137]) tablet resolver refreshed! new actor is[32:152:2172] Leader for TabletID 72057594037927937 is [32:152:2172] sender: [32:222:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:106:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:139:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:148:2057] recipient: [33:97:2132] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:151:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:152:2057] recipient: [33:150:2171] Leader for TabletID 72057594037927937 is [33:153:2172] sender: [33:154:2057] recipient: [33:150:2171] !Reboot 72057594037927937 (actor [33:105:2137]) rebooted! !Reboot 72057594037927937 (actor [33:105:2137]) tablet resolver refreshed! new actor is[33:153:2172] Leader for TabletID 72057594037927937 is [33:153:2172] sender: [33:223:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:106:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:139:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:153:2057] recipient: [34:97:2132] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:156:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:157:2057] recipient: [34:155:2176] Leader for TabletID 72057594037927937 is [34:158:2177] sender: [34:159:2057] recipient: [34:155:2176] !Reboot 72057594037927937 (actor [34:105:2137]) rebooted! !Reboot 72057594037927937 (actor [34:105:2137]) tablet resolver refreshed! new actor is[34:158:2177] Leader for TabletID 72057594037927937 is [34:158:2177] sender: [34:228:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:106:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:139:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:153:2057] recipient: [35:97:2132] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:156:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:157:2057] recipient: [35:155:2176] Leader for TabletID 72057594037927937 is [35:158:2177] sender: [35:159:2057] recipient: [35:155:2176] !Reboot 72057594037927937 (actor [35:105:2137]) rebooted! !Reboot 72057594037927937 (actor [35:105:2137]) tablet resolver refreshed! new actor is[35:158:2177] Leader for TabletID 72057594037927937 is [35:158:2177] sender: [35:228:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:106:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:139:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:155:2057] recipient: [36:97:2132] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:158:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:159:2057] recipient: [36:157:2177] Leader for TabletID 72057594037927937 is [36:160:2178] sender: [36:161:2057] recipient: [36:157:2177] !Reboot 72057594037927937 (actor [36:105:2137]) rebooted! !Reboot 72057594037927937 (actor [36:105:2137]) tablet resolver refreshed! new actor is[36:160:2178] Leader for TabletID 72057594037927937 is [36:160:2178] sender: [36:230:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:101:2057] recipient: [37:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:101:2057] recipient: [37:99:2133] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:106:2057] recipient: [37:99:2133] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:139:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:160:2057] recipient: [37:97:2132] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:163:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:164:2057] recipient: [37:162:2182] Leader for TabletID 72057594037927937 is [37:165:2183] sender: [37:166:2057] recipient: [37:162:2182] !Reboot 72057594037927937 (actor [37:105:2137]) rebooted! !Reboot 72057594037927937 (actor [37:105:2137]) tablet resolver refreshed! new actor is[37:165:2183] Leader for TabletID 72057594037927937 is [37:165:2183] sender: [37:235:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:101:2057] recipient: [38:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:101:2057] recipient: [38:99:2133] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:106:2057] recipient: [38:99:2133] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:139:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:160:2057] recipient: [38:97:2132] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:163:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:164:2057] recipient: [38:162:2182] Leader for TabletID 72057594037927937 is [38:165:2183] sender: [38:166:2057] recipient: [38:162:2182] !Reboot 72057594037927937 (actor [38:105:2137]) rebooted! !Reboot 72057594037927937 (actor [38:105:2137]) tablet resolver refreshed! new actor is[38:165:2183] Leader for TabletID 72057594037927937 is [38:165:2183] sender: [38:235:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:101:2057] recipient: [39:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:101:2057] recipient: [39:99:2133] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:106:2057] recipient: [39:99:2133] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:139:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:161:2057] recipient: [39:97:2132] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:164:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:165:2057] recipient: [39:163:2182] Leader for TabletID 72057594037927937 is [39:166:2183] sender: [39:167:2057] recipient: [39:163:2182] !Reboot 72057594037927937 (actor [39:105:2137]) rebooted! !Reboot 72057594037927937 (actor [39:105:2137]) tablet resolver refreshed! new actor is[39:166:2183] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:101:2057] recipient: [40:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:101:2057] recipient: [40:99:2133] Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:106:2057] recipient: [40:99:2133] Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:139:2057] recipient: [40:14:2061] >> KqpNewEngine::LeftSemiJoin [GOOD] >> KqpNewEngine::JoinWithPrecompute >> KqpAgg::AggWithSelfLookup [GOOD] >> KqpAgg::AggWithSelfLookup2 >> KqpNewEngine::BatchUpload [GOOD] >> KqpNewEngine::BrokenLocksAtROTx >> KqpNewEngine::ShuffleWrite [GOOD] >> KqpNewEngine::SqlInFromCompact >> KqpSort::PassLimit [GOOD] >> KqpSort::Offset >> KqpNotNullColumns::AlterDropNotNullColumn [GOOD] >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns >> KqpRanges::WhereInSubquery [GOOD] >> KqpRanges::ValidatePredicatesDataQuery >> KqpNewEngine::PureTxMixedWithDeferred >> KqpNewEngine::LocksEffects [GOOD] >> KqpNewEngine::LocksNoMutations >> KqpSqlIn::KeySuffix_NotPointPrefix [GOOD] >> KqpSqlIn::KeyTypeMissmatch_Int >> KqpSqlIn::SimpleKey_Negated [GOOD] >> KqpSqlIn::SelectNotAllElements >> KqpRanges::IsNotNullInJsonValue2 [GOOD] >> KqpRanges::DuplicateKeyPredicateParam >> KqpSort::ReverseFirstKeyOptimized [GOOD] >> KqpSort::ReverseMixedOrderNotOptimized >> KqpNewEngine::MultiEffects [GOOD] >> KqpNewEngine::MultiEffectsOnSameTable >> KqpNewEngine::PruneEffectPartitions [GOOD] >> KqpNewEngine::PrimaryView >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage >> KqpSqlIn::TupleParameter [GOOD] >> KqpSqlIn::TupleLiteral >> KqpRanges::IsNullInJsonValue [GOOD] >> KqpRanges::IsNullPartial >> KqpNewEngine::Replace [GOOD] >> KqpNewEngine::SelfJoin >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In [GOOD] >> KqpSqlIn::SecondaryIndex_TupleParameter >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::Describe >> KqpNewEngine::IdxLookupExtractMembers [GOOD] >> KqpNewEngine::FullScanCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:29.651327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:29.651402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:29.651440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:29.651461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:29.651509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:29.651545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:29.651586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:29.651835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:29.708889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:29.708944Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:29.718461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:29.718566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:29.718731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:29.726151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:29.726303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:29.726749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:29.727022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:29.730966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:29.732437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:29.732496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:29.732611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:29.732645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:29.732704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:29.732797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:29.738107Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:29.835669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:29.835903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:29.836119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:29.836332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:29.836392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:29.838479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:29.838610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:29.838789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:29.838835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:29.838875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:29.838915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:29.840750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:29.840801Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:29.840830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:29.842387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:29.842417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:29.842458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:29.842500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:29.845671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:29.847216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:29.847375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:29.848042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:29.848149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:29.848187Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:29.848353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:29.848384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:29.848511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:29.848578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:29.850744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:29.850795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:29.850976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:29.851014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:29.851362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:29.851401Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:29.851484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:29.851532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:29.851579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:29.851650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:29.851685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:29.851715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:29.851782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:29.851825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:29.851857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... " ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:14:35.798799Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:14:35.799054Z node 34 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 264us result status StatusSuccess 2025-04-07T14:14:35.799868Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:14:35.821248Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1074:2822] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:14:35.821359Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:997:2822] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-07T14:14:35.821521Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1074:2822] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744035275765628 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744035275765628 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1744035275765628 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:14:35.827427Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1074:2822] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-07T14:14:35.827551Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:997:2822] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> KqpKv::ReadRows_PgValue [GOOD] >> KqpKv::ReadRows_PgKey >> KqpNewEngine::BrokenLocksAtROTx [GOOD] >> KqpNewEngine::BrokenLocksAtROTxSharded >> KqpMergeCn::TopSortBy_Utf8_Limit2 [GOOD] >> KqpMergeCn::TopSortBy_Timestamp_Limit2 >> KqpRanges::UpdateWhereInWithNull [GOOD] >> KqpRanges::UpdateWhereInBigLiteralListPrefix >> KqpNewEngine::SqlInFromCompact [GOOD] >> KqpNewEngine::SqlInAsScalar >> KqpSort::Offset [GOOD] >> KqpSort::OffsetPk >> KqpNewEngine::PureTxMixedWithDeferred [GOOD] >> KqpNewEngine::ReadAfterWrite >> TKeyValueTest::TestConcatToLongKey [GOOD] >> KqpExtractPredicateLookup::OverflowLookup [GOOD] >> KqpExtractPredicateLookup::SimpleRange >> KqpAgg::AggWithSelfLookup2 [GOOD] >> KqpAgg::AggWithHop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:148:2057] recipient: [4:147:2168] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:150:2057] recipient: [4:147:2168] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:149:2169] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:219:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:153:2057] recipient: [5:151:2173] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:155:2057] recipient: [5:151:2173] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:154:2174] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:224:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:149:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:153:2057] recipient: [6:151:2173] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:155:2057] recipient: [6:151:2173] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:154:2174] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:224:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:150:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:153:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:152:2173] Leader for TabletID 72057594037927937 is [7:155:2174] sender: [7:156:2057] recipient: [7:152:2173] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:155:2174] Leader for TabletID 72057594037927937 is [7:155:2174] sender: [7:225:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:155:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:158:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:159:2057] recipient: [8:157:2178] Leader for TabletID 72057594037927937 is [8:160:2179] sender: [8:161:2057] recipient: [8:157:2178] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:160:2179] Leader for TabletID 72057594037927937 is [8:160:2179] sender: [8:230:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:155:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:158:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:159:2057] recipient: [9:157:2178] Leader for TabletID 72057594037927937 is [9:160:2179] sender: [9:161:2057] recipient: [9:157:2178] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:160:2179] Leader for TabletID 72057594037927937 is [9:160:2179] sender: [9:230:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:158:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:161:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:162:2057] recipient: [10:160:2180] Leader for TabletID 72057594037927937 is [10:163:2181] sender: [10:164:2057] recipient: [10:160:2180] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:163:2181] Leader for TabletID 72057594037927937 is [10:163:2181] sender: [10:233:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:160:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:163:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:164:2057] recipient: [11:162:2182] Leader for TabletID 72057594037927937 is [11:165:2183] sender: [11:166:2057] recipient: [11:162:2182] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:165:2183] Leader for TabletID 72057594037927937 is [11:165:2183] sender: [11:236:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... sender: [33:173:2057] recipient: [33:172:2189] Leader for TabletID 72057594037927937 is [33:174:2190] sender: [33:175:2057] recipient: [33:172:2189] !Reboot 72057594037927937 (actor [33:105:2137]) rebooted! !Reboot 72057594037927937 (actor [33:105:2137]) tablet resolver refreshed! new actor is[33:174:2190] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:106:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:139:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:106:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:139:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:106:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:139:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:141:2057] recipient: [36:97:2132] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:144:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:145:2057] recipient: [36:143:2166] Leader for TabletID 72057594037927937 is [36:146:2167] sender: [36:147:2057] recipient: [36:143:2166] !Reboot 72057594037927937 (actor [36:105:2137]) rebooted! !Reboot 72057594037927937 (actor [36:105:2137]) tablet resolver refreshed! new actor is[36:146:2167] Leader for TabletID 72057594037927937 is [36:146:2167] sender: [36:216:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:101:2057] recipient: [37:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:101:2057] recipient: [37:99:2133] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:106:2057] recipient: [37:99:2133] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:139:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:141:2057] recipient: [37:97:2132] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:143:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:145:2057] recipient: [37:144:2166] Leader for TabletID 72057594037927937 is [37:146:2167] sender: [37:147:2057] recipient: [37:144:2166] !Reboot 72057594037927937 (actor [37:105:2137]) rebooted! !Reboot 72057594037927937 (actor [37:105:2137]) tablet resolver refreshed! new actor is[37:146:2167] Leader for TabletID 72057594037927937 is [37:146:2167] sender: [37:217:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:101:2057] recipient: [38:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:101:2057] recipient: [38:99:2133] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:106:2057] recipient: [38:99:2133] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:139:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:142:2057] recipient: [38:97:2132] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:145:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:146:2057] recipient: [38:144:2166] Leader for TabletID 72057594037927937 is [38:147:2167] sender: [38:148:2057] recipient: [38:144:2166] !Reboot 72057594037927937 (actor [38:105:2137]) rebooted! !Reboot 72057594037927937 (actor [38:105:2137]) tablet resolver refreshed! new actor is[38:147:2167] Leader for TabletID 72057594037927937 is [38:147:2167] sender: [38:217:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:101:2057] recipient: [39:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:101:2057] recipient: [39:99:2133] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:106:2057] recipient: [39:99:2133] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:139:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:147:2057] recipient: [39:97:2132] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:150:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:151:2057] recipient: [39:149:2171] Leader for TabletID 72057594037927937 is [39:152:2172] sender: [39:153:2057] recipient: [39:149:2171] !Reboot 72057594037927937 (actor [39:105:2137]) rebooted! !Reboot 72057594037927937 (actor [39:105:2137]) tablet resolver refreshed! new actor is[39:152:2172] Leader for TabletID 72057594037927937 is [39:152:2172] sender: [39:222:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:101:2057] recipient: [40:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:101:2057] recipient: [40:99:2133] Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:106:2057] recipient: [40:99:2133] Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:139:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:147:2057] recipient: [40:97:2132] Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:149:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:151:2057] recipient: [40:150:2171] Leader for TabletID 72057594037927937 is [40:152:2172] sender: [40:153:2057] recipient: [40:150:2171] !Reboot 72057594037927937 (actor [40:105:2137]) rebooted! !Reboot 72057594037927937 (actor [40:105:2137]) tablet resolver refreshed! new actor is[40:152:2172] Leader for TabletID 72057594037927937 is [40:152:2172] sender: [40:222:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:101:2057] recipient: [41:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:101:2057] recipient: [41:99:2133] Leader for TabletID 72057594037927937 is [41:105:2137] sender: [41:106:2057] recipient: [41:99:2133] Leader for TabletID 72057594037927937 is [41:105:2137] sender: [41:139:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:105:2137] sender: [41:150:2057] recipient: [41:97:2132] Leader for TabletID 72057594037927937 is [41:105:2137] sender: [41:153:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [41:105:2137] sender: [41:154:2057] recipient: [41:152:2173] Leader for TabletID 72057594037927937 is [41:155:2174] sender: [41:156:2057] recipient: [41:152:2173] !Reboot 72057594037927937 (actor [41:105:2137]) rebooted! !Reboot 72057594037927937 (actor [41:105:2137]) tablet resolver refreshed! new actor is[41:155:2174] Leader for TabletID 72057594037927937 is [41:155:2174] sender: [41:225:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:101:2057] recipient: [42:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:101:2057] recipient: [42:99:2133] Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:106:2057] recipient: [42:99:2133] Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:139:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:155:2057] recipient: [42:97:2132] Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:157:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:159:2057] recipient: [42:158:2178] Leader for TabletID 72057594037927937 is [42:160:2179] sender: [42:161:2057] recipient: [42:158:2178] !Reboot 72057594037927937 (actor [42:105:2137]) rebooted! !Reboot 72057594037927937 (actor [42:105:2137]) tablet resolver refreshed! new actor is[42:160:2179] Leader for TabletID 72057594037927937 is [42:160:2179] sender: [42:230:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:101:2057] recipient: [43:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:101:2057] recipient: [43:99:2133] Leader for TabletID 72057594037927937 is [43:105:2137] sender: [43:106:2057] recipient: [43:99:2133] Leader for TabletID 72057594037927937 is [43:105:2137] sender: [43:139:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:105:2137] sender: [43:155:2057] recipient: [43:97:2132] Leader for TabletID 72057594037927937 is [43:105:2137] sender: [43:158:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [43:105:2137] sender: [43:159:2057] recipient: [43:157:2178] Leader for TabletID 72057594037927937 is [43:160:2179] sender: [43:161:2057] recipient: [43:157:2178] !Reboot 72057594037927937 (actor [43:105:2137]) rebooted! !Reboot 72057594037927937 (actor [43:105:2137]) tablet resolver refreshed! new actor is[43:160:2179] Leader for TabletID 72057594037927937 is [43:160:2179] sender: [43:230:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:101:2057] recipient: [44:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:101:2057] recipient: [44:99:2133] Leader for TabletID 72057594037927937 is [44:105:2137] sender: [44:106:2057] recipient: [44:99:2133] Leader for TabletID 72057594037927937 is [44:105:2137] sender: [44:139:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:105:2137] sender: [44:156:2057] recipient: [44:97:2132] Leader for TabletID 72057594037927937 is [44:105:2137] sender: [44:158:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [44:105:2137] sender: [44:160:2057] recipient: [44:159:2178] Leader for TabletID 72057594037927937 is [44:161:2179] sender: [44:162:2057] recipient: [44:159:2178] !Reboot 72057594037927937 (actor [44:105:2137]) rebooted! !Reboot 72057594037927937 (actor [44:105:2137]) tablet resolver refreshed! new actor is[44:161:2179] Leader for TabletID 72057594037927937 is [44:161:2179] sender: [44:231:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:101:2057] recipient: [45:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:101:2057] recipient: [45:99:2133] Leader for TabletID 72057594037927937 is [45:105:2137] sender: [45:106:2057] recipient: [45:99:2133] Leader for TabletID 72057594037927937 is [45:105:2137] sender: [45:139:2057] recipient: [45:14:2061] >> KqpNewEngine::JoinWithPrecompute [GOOD] >> KqpNewEngine::JoinSameKey >> KqpNewEngine::ContainerRegistryCombiner >> KqpNewEngine::LocksNoMutations [GOOD] >> KqpNewEngine::LocksInRoTx >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] >> KqpNewEngine::PkRangeSelect1 >> KqpSort::ReverseMixedOrderNotOptimized [GOOD] >> KqpSort::ReverseLimitOptimized >> KqpNotNullColumns::Describe [GOOD] >> KqpNotNullColumns::CreateTableWithNotNullColumns >> KqpRanges::DuplicateKeyPredicateParam [GOOD] >> KqpRanges::DuplicateKeyPredicateMixed >> KqpNewEngine::MultiEffectsOnSameTable [GOOD] >> KqpNewEngine::LookupColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] Test command err: 2025-04-07T14:13:57.712716Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574310063896427:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:57.712843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001a24/r3tmp/tmpfFiyLC/pdisk_1.dat 2025-04-07T14:13:58.013728Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:58.050816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:58.050911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:58.054446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16254, node 1 2025-04-07T14:13:58.195755Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:13:58.195774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:13:58.195908Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:13:58.196327Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:13:58.567769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.571806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.572988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.574772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:13:58.574952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:13:58.574973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:13:58.575732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:13:58.576322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:13:58.576352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:13:58.577489Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.580799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238624, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.580825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:13:58.581032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:13:58.582323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.582493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.582559Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:13:58.582670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:13:58.582713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:13:58.582765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:13:58.584584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:13:58.584635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:13:58.584645Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:13:58.584715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:13:58.639040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.639259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.639305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.639376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:13:58.639456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:13:58.639471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:13:58.643638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T14:13:58.643801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.644002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.644434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:13:58.644481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:13:58.644528Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:13:58.644596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:63086 2025-04-07T14:13:58.801412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.801637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.801660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.803083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:13:58.803215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:13:58.804082Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T14:13:58.806398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035238848, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:13:58.806434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1744035238848, at schemeshard: 72057594046644480 2025-04-07T14:13:58.806693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:13:58.806770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:13:58.806812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2025-04-07T14:13:58.808185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:13:58.808327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:13:58.808918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T14:13:58.808957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T14:13:58.808971Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:13:58.809022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-04-07T14:13:58.837112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestGetRecordsStreamWithSingleShard, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:13:58.837595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:13:58.839737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: user ... 391722Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:14:36.391736Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:14:36.391828Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:14:36.452310Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.452622Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:36.452649Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.452751Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T14:14:36.452841Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T14:14:36.452856Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2025-04-07T14:14:36.455056Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T14:14:36.455232Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:36.455510Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:36.456454Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:14:36.456491Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:14:36.456508Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:14:36.456610Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:20941 2025-04-07T14:14:36.705979Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.706181Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:36.706208Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.708381Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:14:36.708539Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:36.713981Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035276760, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:14:36.714017Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1744035276760, at schemeshard: 72057594046644480 2025-04-07T14:14:36.714192Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2025-04-07T14:14:36.714270Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T14:14:36.714301Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2025-04-07T14:14:36.715809Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:36.715951Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:36.716480Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:14:36.716530Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:14:36.716574Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:14:36.716638Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2025-04-07T14:14:36.718375Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T14:14:36.760686Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestGetRecordsWithBigSeqno, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.761035Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:14:36.763429Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: user@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/stream_TestGetRecordsWithBigSeqno 2025-04-07T14:14:36.763653Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:36.763882Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:36.763963Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 ProgressState, operation type: TxCreatePQGroup, at tablet# 72057594046644480 2025-04-07T14:14:36.764270Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715660, at schemeshard: 72057594046644480 2025-04-07T14:14:36.764697Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:36.764753Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:36.764789Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T14:14:36.765044Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:36.765065Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:36.765075Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:14:36.771222Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:36.771491Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:14:36.771534Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 1 -> 3 2025-04-07T14:14:36.774521Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:14:36.832967Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:36.835865Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2025-04-07T14:14:36.835905Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 3 -> 128 2025-04-07T14:14:36.841499Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.846520Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035276893, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:14:36.846593Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvOperationPlan, step: 1744035276893, at tablet: 72057594046644480 2025-04-07T14:14:36.846780Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2025-04-07T14:14:36.848680Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:14:36.849074Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:14:36.849133Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2025-04-07T14:14:36.849222Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2025-04-07T14:14:36.849273Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-07T14:14:36.849419Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 2, subscribers: 1 2025-04-07T14:14:36.851765Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:36.851844Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:36.851861Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2025-04-07T14:14:36.852114Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:14:36.852163Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:14:36.852175Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:14:36.852215Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 |95.9%| [TA] $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {RESULT} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNewEngine::SelfJoin [GOOD] >> KqpNewEngine::ReadRangeWithParams >> KqpKv::ReadRows_PgKey [GOOD] >> KqpKv::ReadRows_Decimal >> KqpSqlIn::KeyTypeMissmatch_Int [GOOD] >> KqpSqlIn::InWithCast >> KqpSqlIn::SelectNotAllElements [GOOD] >> KqpSqlIn::SimpleKey_In_And_In >> KqpRanges::IsNullPartial [GOOD] >> KqpRanges::LiteralOr >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> KqpRanges::NullInKey >> KqpNewEngine::ReadAfterWrite [GOOD] >> KqpNewEngine::ReadDifferentColumns >> KqpNewEngine::PrimaryView [GOOD] >> KqpNewEngine::ContainerRegistryCombiner [GOOD] >> KqpNewEngine::BrokenLocksOnUpdate >> KqpSqlIn::TupleLiteral [GOOD] >> KqpSqlIn::TupleSelect >> KqpNewEngine::BrokenLocksAtROTxSharded [GOOD] >> KqpNewEngine::SqlInAsScalar [GOOD] >> KqpNewEngine::SequentialReadsPragma+Enabled >> KqpSort::OffsetPk [GOOD] >> KqpSort::OffsetTopSort ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-04-07T14:12:54.078857Z :WriteRAW INFO: Random seed for debugging is 1744035174078828 2025-04-07T14:12:54.425758Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574035685364576:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:54.425839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:54.467719Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574038142215823:2166];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:54.638124Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010a4/r3tmp/tmpeP0nIJ/pdisk_1.dat 2025-04-07T14:12:54.649061Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:54.654789Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:54.865069Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:54.902704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:54.902790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:54.903253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:54.903308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:54.905959Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:54.906086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:54.909925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16588, node 1 2025-04-07T14:12:55.051303Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/0010a4/r3tmp/yandexi1RoXs.tmp 2025-04-07T14:12:55.051335Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/0010a4/r3tmp/yandexi1RoXs.tmp 2025-04-07T14:12:55.051472Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/0010a4/r3tmp/yandexi1RoXs.tmp 2025-04-07T14:12:55.051582Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:55.216325Z INFO: TTestServer started on Port 2592 GrpcPort 16588 TClient is connected to server localhost:2592 PQClient connected to localhost:16588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:55.532892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:55.555765Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T14:12:55.625567Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-07T14:12:57.371721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574048570267228:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.371830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.371838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574048570267250:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.374664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-07T14:12:57.389545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574048570267257:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-07T14:12:57.580056Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490574051027117920:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:12:57.580287Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGY4YTlmZTMtM2I3NWNmODUtZjk1NGMxMjUtMjJlMGJiYjU=, ActorId: [2:7490574051027117880:2280], ActorState: ExecuteState, TraceId: 01jr89hs0s1bb9p0zhzmxtbke5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:12:57.582241Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:12:57.584205Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574048570267365:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:12:57.585561Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2M2MDA3MWYtM2UwNjI0Y2EtYTZhYmQwMC0xOGRhNDcxMA==, ActorId: [1:7490574048570267225:2302], ActorState: ExecuteState, TraceId: 01jr89hrws36zhddda4c55fz7r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:12:57.585931Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:12:57.592022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.707239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.821350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:16588", true, true, 1000); 2025-04-07T14:12:58.189067Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr89hsd177cvyvahvtjznjma, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWNkOTg0ODYtNmIwODU2MDYtZmYzYmZhNzktZTg5YTEwNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490574052865235045:2974] 2025-04-07T14:12:59.423658Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574035685364576:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:59.424530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:59.467335Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490574038142215823:2166];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:59.467392Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T14:13:04.562318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:16588 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T14:13:04.675199Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:16588 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 Bur ... ourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-07T14:14:40.748752Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490574493510840422:2462] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-07T14:14:40.751623Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490574493510840422:2462] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-07T14:14:40.936509Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490574493510840422:2462] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-07T14:14:40.937068Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7490574493510840472:2462] connected; active server actors: 1 2025-04-07T14:14:40.937155Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490574493510840422:2462] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-07T14:14:40.937176Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490574493510840422:2462] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-07T14:14:40.940411Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7490574493510840472:2462] disconnected; active server actors: 1 2025-04-07T14:14:40.940435Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7490574493510840472:2462] disconnected no session 2025-04-07T14:14:41.095529Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490574493510840422:2462] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-07T14:14:41.095585Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490574493510840422:2462] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-07T14:14:41.095610Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490574493510840422:2462] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-07T14:14:41.095649Z node 15 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T14:14:41.096760Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2025-04-07T14:14:41.098168Z node 15 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|6238b81b-9625859e-a58bea25-2eb1b9c8_0 2025-04-07T14:14:41.099846Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744035281099 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:14:41.099971Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|6238b81b-9625859e-a58bea25-2eb1b9c8_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-07T14:14:41.096605Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:14:41.096673Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [15:7490574497805807796:2462], now have 1 active actors on pipe 2025-04-07T14:14:41.096958Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T14:14:41.096987Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T14:14:41.100172Z :INFO: [] MessageGroupId [src] SessionId [src|6238b81b-9625859e-a58bea25-2eb1b9c8_0] Write session: close. Timeout = 0 ms 2025-04-07T14:14:41.097096Z node 16 :PERSQUEUE INFO: new Cookie src|6238b81b-9625859e-a58bea25-2eb1b9c8_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-07T14:14:41.097199Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T14:14:41.100217Z :INFO: [] MessageGroupId [src] SessionId [src|6238b81b-9625859e-a58bea25-2eb1b9c8_0] Write session will now close 2025-04-07T14:14:41.097289Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:14:41.097789Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-07T14:14:41.100263Z :DEBUG: [] MessageGroupId [src] SessionId [src|6238b81b-9625859e-a58bea25-2eb1b9c8_0] Write session: aborting 2025-04-07T14:14:41.097810Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-07T14:14:41.097904Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:14:41.100747Z :INFO: [] MessageGroupId [src] SessionId [src|6238b81b-9625859e-a58bea25-2eb1b9c8_0] Write session: gracefully shut down, all writes complete 2025-04-07T14:14:41.100798Z :DEBUG: [] MessageGroupId [src] SessionId [src|6238b81b-9625859e-a58bea25-2eb1b9c8_0] Write session: destroy 2025-04-07T14:14:41.101579Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|6238b81b-9625859e-a58bea25-2eb1b9c8_0 grpc read done: success: 0 data: 2025-04-07T14:14:41.101603Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6238b81b-9625859e-a58bea25-2eb1b9c8_0 grpc read failed 2025-04-07T14:14:41.101632Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6238b81b-9625859e-a58bea25-2eb1b9c8_0 grpc closed 2025-04-07T14:14:41.101654Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6238b81b-9625859e-a58bea25-2eb1b9c8_0 is DEAD 2025-04-07T14:14:41.102680Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:14:41.103179Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:14:41.103244Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [15:7490574497805807796:2462] destroyed 2025-04-07T14:14:41.103287Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T14:14:41.131069Z :INFO: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Starting read session 2025-04-07T14:14:41.131123Z :DEBUG: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Starting cluster discovery 2025-04-07T14:14:41.131324Z :INFO: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:19531: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19531
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:19531. " 2025-04-07T14:14:41.131366Z :DEBUG: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Restart cluster discovery in 0.009589s 2025-04-07T14:14:41.144123Z :DEBUG: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Starting cluster discovery 2025-04-07T14:14:41.144498Z :INFO: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:19531: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19531
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:19531. " 2025-04-07T14:14:41.144572Z :DEBUG: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Restart cluster discovery in 0.012751s 2025-04-07T14:14:41.157851Z :DEBUG: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Starting cluster discovery 2025-04-07T14:14:41.158017Z :INFO: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:19531: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19531
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:19531. " 2025-04-07T14:14:41.158040Z :DEBUG: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Restart cluster discovery in 0.022553s 2025-04-07T14:14:41.180934Z :DEBUG: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Starting cluster discovery 2025-04-07T14:14:41.181268Z :NOTICE: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:19531: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19531
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:19531. " } 2025-04-07T14:14:41.181469Z :NOTICE: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:19531: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19531
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:19531. " } 2025-04-07T14:14:41.181654Z :INFO: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Closing read session. Close timeout: 0.000000s 2025-04-07T14:14:41.181791Z :NOTICE: [/Root] [/Root] [48aa6b04-3114f8f4-fb189876-81d6f30] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:14:41.493980Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7490574497805807827:2476], TxId: 281474976715684, task: 1, CA Id [15:7490574497805807825:2476]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-07T14:14:41.525586Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7490574497805807827:2476], TxId: 281474976715684, task: 1, CA Id [15:7490574497805807825:2476]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T14:14:41.578063Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7490574497805807827:2476], TxId: 281474976715684, task: 1, CA Id [15:7490574497805807825:2476]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T14:14:41.636925Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7490574497805807827:2476], TxId: 281474976715684, task: 1, CA Id [15:7490574497805807825:2476]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T14:14:41.752760Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7490574497805807827:2476], TxId: 281474976715684, task: 1, CA Id [15:7490574497805807825:2476]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-07T14:14:41.884636Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7490574497805807827:2476], TxId: 281474976715684, task: 1, CA Id [15:7490574497805807825:2476]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> KqpRanges::UpdateWhereInBigLiteralListPrefix [GOOD] >> KqpRanges::UpdateWhereInMultipleUpdate >> KqpMergeCn::TopSortBy_Timestamp_Limit2 [GOOD] >> KqpMergeCn::TopSortBy_Interval_Limit3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PrimaryView [GOOD] Test command err: Trying to start YDB, gRPC: 31544, MsgBus: 8140 2025-04-07T14:14:10.998436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574365158754786:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:10.998597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012c0/r3tmp/tmpPdV2C4/pdisk_1.dat 2025-04-07T14:14:11.223824Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31544, node 1 2025-04-07T14:14:11.295582Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:11.295611Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:11.295617Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:11.295778Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:11.326678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:11.326795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:11.328537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8140 TClient is connected to server localhost:8140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:11.747260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:11.770647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:11.898789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:12.019459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:12.070136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:13.235369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574378043658384:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.235484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.451322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.471304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.488990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.511004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.531387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.567658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.601154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574378043658876:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.601227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.601257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574378043658881:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.604020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:13.610895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574378043658883:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 62552, MsgBus: 10966 2025-04-07T14:14:14.972244Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574381429374794:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:14.972347Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012c0/r3tmp/tmpwY45b1/pdisk_1.dat 2025-04-07T14:14:15.025470Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62552, node 2 2025-04-07T14:14:15.062487Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:15.062502Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:15.062506Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:15.062561Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:15.073902Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:15.074001Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:15.075663Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10966 TClient is connected to server localhost:10966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:15.328963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:15.334184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:15.399855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:15.512884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:15.558943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:17.193337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574394314278396:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.193430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.210104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:17.235102Z node 2 :FLAT_TX_SCHEMESHARD ... 7T14:14:32.859285Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574457550856517:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:32.859373Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:32.907076Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:32.944532Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:32.979347Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:33.006947Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:33.034392Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:33.081210Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:33.120294Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574461845824305:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:33.120368Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:33.120427Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574461845824310:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:33.123676Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:33.134972Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574461845824312:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:34.279809Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574444665952943:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:34.279868Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64335, MsgBus: 8024 2025-04-07T14:14:35.099656Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574471774178970:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:35.109453Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012c0/r3tmp/tmp4dnYuA/pdisk_1.dat 2025-04-07T14:14:35.206725Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:35.222802Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:35.222905Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:35.224553Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64335, node 6 2025-04-07T14:14:35.271022Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:35.271043Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:35.271068Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:35.271165Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8024 TClient is connected to server localhost:8024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:35.745453Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:35.756562Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:35.813613Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:35.985672Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:36.067625Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:38.957334Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574484659082544:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:38.957454Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:38.988488Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:39.027185Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:39.063658Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:39.098764Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:39.137717Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:39.176884Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:39.270572Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574488954050339:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:39.270684Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:39.271135Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574488954050344:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:39.276995Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:39.289399Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574488954050346:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:40.095053Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574471774178970:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:40.095141Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:40.503998Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:40.584123Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:14:40.621233Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpNewEngine::FullScanCount [GOOD] >> KqpAgg::AggWithHop [GOOD] >> KqpAgg::GroupByLimit >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] >> KqpNewEngine::PkRangeSelect1 [GOOD] >> KqpNewEngine::OnlineRO_Consistent ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::BrokenLocksAtROTxSharded [GOOD] Test command err: Trying to start YDB, gRPC: 27985, MsgBus: 6148 2025-04-07T14:14:11.334725Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574368353894039:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:11.334820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b6/r3tmp/tmp0yuffl/pdisk_1.dat 2025-04-07T14:14:11.660325Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27985, node 1 2025-04-07T14:14:11.727615Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:11.727639Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:11.727652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:11.727762Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:11.727808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:11.727912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:11.729536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6148 TClient is connected to server localhost:6148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:12.109986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:12.129101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:12.232794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:12.335246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:12.381120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:13.751575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574376943830348:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.751706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.997400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:14.018316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:14.044583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:14.082250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:14.107101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:14.133244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:14.171622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574381238798136:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:14.171719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:14.171791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574381238798141:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:14.175085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:14.184330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574381238798143:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 4389, MsgBus: 23851 2025-04-07T14:14:15.765552Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574386384246454:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:15.765650Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b6/r3tmp/tmpcIWLOB/pdisk_1.dat 2025-04-07T14:14:15.876643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:15.876807Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:15.878478Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:15.878740Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4389, node 2 2025-04-07T14:14:15.935525Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:15.935548Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:15.935556Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:15.935643Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23851 TClient is connected to server localhost:23851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:16.354225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.370456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.417290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.561119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.614914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:18.269710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574399269150037:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:18.269812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:18.296709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.324259Z node 2 :FLAT_TX_SCHEMESHARD W ... rd: 72057594046644480 waiting... 2025-04-07T14:14:32.392069Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:32.462605Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:34.873161Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574467316384797:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.873256Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.919452Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.950326Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.983613Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:35.011498Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:35.043949Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:35.115252Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:35.168934Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574471611352592:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:35.169053Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:35.169057Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574471611352597:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:35.172924Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:35.183909Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574471611352599:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:36.417266Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574454431481228:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:36.417438Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7332, MsgBus: 15017 2025-04-07T14:14:37.496971Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574480402918641:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:37.497057Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b6/r3tmp/tmpig8Y5t/pdisk_1.dat 2025-04-07T14:14:37.647593Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:37.686557Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:37.686674Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:37.688507Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7332, node 6 2025-04-07T14:14:37.745252Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:37.745279Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:37.745291Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:37.745420Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15017 TClient is connected to server localhost:15017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:38.299977Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:38.305974Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:38.314837Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:38.379636Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:38.603505Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:38.683443Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:41.288604Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574497582789508:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.288734Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.344131Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.394467Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.429042Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.464444Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.505013Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.560874Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.610115Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574497582790004:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.610222Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.610318Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574497582790009:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.614081Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:41.622890Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574497582790011:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:42.497381Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574480402918641:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:42.497462Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNewEngine::JoinSameKey [GOOD] >> KqpSqlIn::SecondaryIndex_TupleParameter [GOOD] >> KqpSqlIn::SecondaryIndex_TupleLiteral >> KqpNewEngine::LocksInRoTx [GOOD] >> KqpNewEngine::LiteralKeys >> KqpNewEngine::Delete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::FullScanCount [GOOD] Test command err: Trying to start YDB, gRPC: 16917, MsgBus: 9968 2025-04-07T14:14:05.853707Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574342004299733:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:05.853909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012c2/r3tmp/tmpUU9G6q/pdisk_1.dat 2025-04-07T14:14:06.191853Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16917, node 1 2025-04-07T14:14:06.251454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:06.251553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:06.253170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:06.391467Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:06.391490Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:06.391497Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:06.391585Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9968 TClient is connected to server localhost:9968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:07.009918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:07.033190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:07.143128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:07.237987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:07.281903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:08.095363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574354889203176:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:08.095491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:08.528929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:08.551020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:08.571177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:08.591025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:08.610059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:08.675023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:08.727153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574354889203685:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:08.727229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:08.727445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574354889203690:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:08.731937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:08.739145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574354889203692:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 7783, MsgBus: 8236 2025-04-07T14:14:10.835655Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574362219209439:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:10.835710Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012c2/r3tmp/tmpJJULrS/pdisk_1.dat 2025-04-07T14:14:10.921372Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7783, node 2 2025-04-07T14:14:10.940819Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:10.940905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:10.942163Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:10.970481Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:10.970502Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:10.970508Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:10.970588Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8236 TClient is connected to server localhost:8236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:11.325886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:11.346880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:11.421084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:11.560826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:11.632391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:13.430267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574375104113032:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.430343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.458228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.479830Z node 2 :FLAT_TX_SCHEMESHARD WARN ... abaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.528467Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.558351Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.589466Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.620929Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.653172Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.722644Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.800552Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574467705140473:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.800646Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.800730Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574467705140478:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.804356Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:34.821375Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574467705140480:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:35.940640Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574450525269084:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:35.940722Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31796, MsgBus: 29474 2025-04-07T14:14:37.278121Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574478967226974:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:37.278220Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012c2/r3tmp/tmpb8t0A7/pdisk_1.dat 2025-04-07T14:14:37.360090Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:37.408966Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:37.409038Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:37.410473Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31796, node 6 2025-04-07T14:14:37.452297Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:37.452322Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:37.452333Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:37.452475Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29474 TClient is connected to server localhost:29474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:38.021566Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:38.028153Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:38.037551Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:38.102463Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:38.313383Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:38.393875Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:41.148586Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574496147097853:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.155582Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.166793Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.239218Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.318489Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.354202Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.392765Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.485976Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.547420Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574496147098348:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.547519Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.547949Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574496147098353:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.553049Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:41.564468Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574496147098355:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:42.278224Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574478967226974:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:42.278308Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] Test command err: Trying to start YDB, gRPC: 20022, MsgBus: 1835 2025-04-07T14:14:21.331451Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574412565867562:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:21.331528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00129e/r3tmp/tmpv9N0ne/pdisk_1.dat 2025-04-07T14:14:21.600886Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20022, node 1 2025-04-07T14:14:21.661099Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:21.661120Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:21.661126Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:21.661206Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:21.703378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:21.703481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:21.704989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1835 TClient is connected to server localhost:1835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:22.077112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:23.666404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574421155802775:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:23.666499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:23.880762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574421155802804:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:23.880860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:23.893835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3315, MsgBus: 31183 2025-04-07T14:14:24.585151Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574423409467328:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:24.585240Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00129e/r3tmp/tmpmW4fDI/pdisk_1.dat 2025-04-07T14:14:24.716811Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:24.721856Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:24.721934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3315, node 2 2025-04-07T14:14:24.725211Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:24.761423Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:24.761446Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:24.761465Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:24.761542Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31183 TClient is connected to server localhost:31183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:25.145563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:25.169038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:25.245749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:25.388133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:25.451381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:27.078939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574436294370908:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:27.079021Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:27.107249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:27.132407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:27.155539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:27.176067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:27.197926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:27.269769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:27.299720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574436294371405:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:27.299790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:27.299830Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574436294371410:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:27.302008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:27.307607Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574436294371412:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:28.252062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29219, MsgBus: 18572 2025-04-07T14:14:28.927626Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490574440326555570:2051];s ... ch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:31.684595Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16863, MsgBus: 28483 2025-04-07T14:14:32.597654Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490574459736352766:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:32.597735Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00129e/r3tmp/tmpEZjLZj/pdisk_1.dat 2025-04-07T14:14:32.694468Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:32.701274Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:32.701356Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:32.702978Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16863, node 4 2025-04-07T14:14:32.747761Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:32.747796Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:32.747804Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:32.747899Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28483 TClient is connected to server localhost:28483 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:14:33.153569Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:33.158784Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:35.777253Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574472621255283:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:35.777348Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:35.884981Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574472621255312:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:35.885080Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:35.904912Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28554, MsgBus: 13063 2025-04-07T14:14:36.874300Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574473780060799:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:36.874351Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00129e/r3tmp/tmpY8DGqY/pdisk_1.dat 2025-04-07T14:14:37.032563Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:37.058727Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:37.058814Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:37.061283Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28554, node 5 2025-04-07T14:14:37.108238Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:37.108272Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:37.108282Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:37.108427Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13063 TClient is connected to server localhost:13063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:37.608318Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:40.142802Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574490959930600:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:40.142909Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:40.153370Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30287, MsgBus: 5329 2025-04-07T14:14:40.970070Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574490962446968:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:40.971009Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00129e/r3tmp/tmpnHyNyr/pdisk_1.dat 2025-04-07T14:14:41.071913Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:41.101955Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:41.102044Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:41.104094Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30287, node 6 2025-04-07T14:14:41.169288Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:41.169314Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:41.169323Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:41.169427Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5329 TClient is connected to server localhost:5329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:41.659742Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:41.664558Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:44.487561Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> KqpKv::ReadRows_Decimal [GOOD] >> KqpSort::ReverseLimitOptimized [GOOD] >> KqpSort::ReverseEightShardOptimized >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::JoinSameKey [GOOD] Test command err: Trying to start YDB, gRPC: 10279, MsgBus: 15186 2025-04-07T14:14:11.177669Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574368833357881:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:11.177752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012ba/r3tmp/tmpjxBLV4/pdisk_1.dat 2025-04-07T14:14:11.439427Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10279, node 1 2025-04-07T14:14:11.500016Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:11.500044Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:11.500053Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:11.500170Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:11.519880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:11.520046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:11.521590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15186 TClient is connected to server localhost:15186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:11.938159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:11.954998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:12.044734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:12.166004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:12.213464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:13.445246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574377423294187:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.445362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.657172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.677683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.698056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.720742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.745669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.814120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.847362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574377423294684:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.847404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.847451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574377423294689:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.850059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:13.857502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574377423294691:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:14:14.701411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:14.828980Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 24498, MsgBus: 6132 2025-04-07T14:14:15.395933Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574384203079596:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:15.396003Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012ba/r3tmp/tmpIcyk5F/pdisk_1.dat 2025-04-07T14:14:15.466748Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24498, node 2 2025-04-07T14:14:15.497488Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:15.497582Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:15.499492Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:15.520055Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:15.520080Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:15.520089Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:15.520169Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6132 TClient is connected to server localhost:6132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:15.890525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:15.900948Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:15.909026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:15.982203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.119746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.199677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:17.873721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574392793015883:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.873796Z node 2 :KQP_WORKLOAD_SE ... : 72057594046644480 waiting... 2025-04-07T14:14:32.408179Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:32.469849Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:34.648624Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574466208448830:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.648720Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.680551Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.710997Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.743274Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.772247Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.801537Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.833063Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.870686Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574466208449319:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.870760Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.870765Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574466208449325:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.873656Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:34.882202Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574466208449327:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:36.369787Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574453323545228:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:36.369851Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20020, MsgBus: 18619 2025-04-07T14:14:38.980869Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574482186398587:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:38.980938Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012ba/r3tmp/tmptdQ3Br/pdisk_1.dat 2025-04-07T14:14:39.098201Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:39.128637Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:39.128774Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:39.130653Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20020, node 6 2025-04-07T14:14:39.188444Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:39.188466Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:39.188476Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:39.188618Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18619 TClient is connected to server localhost:18619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:39.779455Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:39.789654Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:39.797583Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:39.880517Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:40.069252Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:40.152522Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:42.907917Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574499366269479:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:42.908026Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:42.928498Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:42.973242Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:43.015831Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:43.050351Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:43.090878Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:43.162413Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:43.264589Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574503661237279:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:43.264687Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574503661237284:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:43.264715Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:43.268657Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:43.283141Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574503661237286:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:43.980121Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574482186398587:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:43.980215Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpRanges::NullInKeySuffix >> KqpNewEngine::ReadRangeWithParams [GOOD] >> KqpNewEngine::ScalarFunctions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpKv::ReadRows_Decimal [GOOD] Test command err: Trying to start YDB, gRPC: 1459, MsgBus: 7127 2025-04-07T14:14:18.794631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574397146721862:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:18.794991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a9/r3tmp/tmp6X5gkp/pdisk_1.dat 2025-04-07T14:14:19.047514Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1459, node 1 2025-04-07T14:14:19.108987Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:19.109018Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:19.109032Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:19.109136Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:19.142273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:19.142387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:19.144056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7127 TClient is connected to server localhost:7127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:19.526377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:21.084247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574410031624242:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.084326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.298478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.425309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574410031624349:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.425377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.425542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574410031624354:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.429366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:14:21.436521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574410031624356:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } Trying to start YDB, gRPC: 21777, MsgBus: 7933 2025-04-07T14:14:22.251332Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574417190172845:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:22.251944Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a9/r3tmp/tmpzCR4zc/pdisk_1.dat 2025-04-07T14:14:22.348126Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21777, node 2 2025-04-07T14:14:22.395365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:22.395437Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:22.396533Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:22.402202Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:22.402222Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:22.402232Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:22.402316Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7933 TClient is connected to server localhost:7933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:22.763079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:25.041185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574430075075353:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:25.043024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:25.049269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS [] IsSuccess(): 1 GetStatus(): SUCCESS 2025-04-07T14:14:25.170523Z node 2 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: no keys are found in request's proto Trying to start YDB, gRPC: 16950, MsgBus: 14765 2025-04-07T14:14:26.003413Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490574432843962617:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:26.003824Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a9/r3tmp/tmpwYwnaR/pdisk_1.dat 2025-04-07T14:14:26.090282Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16950, node 3 2025-04-07T14:14:26.131937Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:26.132001Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:26.133529Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:26.139673Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:26.139691Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:26.139711Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:26.139798Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14765 TClient is connected to server localhost:14765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:26.485385Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... ode 4, TabletId: 72075186224037930 not found 2025-04-07T14:14:35.825869Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715744:0, at schemeshard: 72057594046644480 2025-04-07T14:14:35.902325Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037931 not found 2025-04-07T14:14:35.906137Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715746:0, at schemeshard: 72057594046644480 2025-04-07T14:14:35.996077Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037932 not found 2025-04-07T14:14:36.012009Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715748:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.107867Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037933 not found 2025-04-07T14:14:36.112461Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715750:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.205213Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037934 not found 2025-04-07T14:14:36.207402Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715752:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.286466Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037935 not found 2025-04-07T14:14:36.291269Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715754:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.418442Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037936 not found 2025-04-07T14:14:36.421729Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715756:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.498813Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037937 not found 2025-04-07T14:14:36.504333Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715758:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.582198Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037938 not found 2025-04-07T14:14:36.585367Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715760:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.718137Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037939 not found Trying to start YDB, gRPC: 17606, MsgBus: 12602 2025-04-07T14:14:37.537071Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574481129746647:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:37.537135Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a9/r3tmp/tmpEFHd5X/pdisk_1.dat 2025-04-07T14:14:37.655524Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:37.655618Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:37.656815Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:37.669995Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17606, node 5 2025-04-07T14:14:37.766585Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:37.766614Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:37.766624Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:37.766759Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12602 TClient is connected to server localhost:12602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:38.263497Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:38.270357Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:40.992029Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.100254Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037888 not found 2025-04-07T14:14:41.104908Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.198691Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037889 not found 2025-04-07T14:14:41.205514Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.331716Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037890 not found Trying to start YDB, gRPC: 22186, MsgBus: 23199 2025-04-07T14:14:42.189368Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574503348915575:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:42.189522Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a9/r3tmp/tmpok1RNw/pdisk_1.dat 2025-04-07T14:14:42.290637Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:42.316495Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:42.316589Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:42.318308Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22186, node 6 2025-04-07T14:14:42.366252Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:42.366384Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:42.366396Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:42.366492Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23199 TClient is connected to server localhost:23199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:42.883766Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:42.891990Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:45.696308Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574516233818094:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:45.696503Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:45.706566Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:45.818877Z node 6 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Type mismatch, got type Uint64 for column Key22, but expected Decimal(22,9) 2025-04-07T14:14:45.828432Z node 6 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Type mismatch, got type Decimal(35,10) for column Key22, but expected Decimal(22,9) >> KqpRanges::DuplicateKeyPredicateMixed [GOOD] >> KqpNewEngine::LookupColumns [GOOD] >> KqpNewEngine::StreamLookupWithView >> KqpNotNullColumns::ReplaceNotNullPk >> KqpNewEngine::MultiSelect >> KqpRanges::ValidatePredicatesDataQuery [GOOD] >> KqpReturning::ReturningTwice >> KqpNewEngine::ReadDifferentColumns [GOOD] >> KqpNewEngine::ReadDifferentColumnsPk >> KqpNewEngine::BrokenLocksOnUpdate [GOOD] >> KqpNewEngine::DeferredEffects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::DuplicateKeyPredicateMixed [GOOD] Test command err: Trying to start YDB, gRPC: 25737, MsgBus: 21108 2025-04-07T14:14:10.977907Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574362129572912:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:10.977993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012bf/r3tmp/tmpRDHkzb/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25737, node 1 2025-04-07T14:14:11.223431Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:14:11.225509Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:14:11.235643Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:11.258171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:11.258212Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:11.258239Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:11.258338Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:11.317347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:11.317465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:11.319142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21108 TClient is connected to server localhost:21108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:11.665121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:11.689855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:11.789133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:11.902792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:11.973368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:13.204096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574375014476508:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.204183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.369025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.387094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.404312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.426939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.449256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.473168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:13.515519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574375014476999:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.515582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.515586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574375014477004:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:13.517986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:13.524959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574375014477006:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:14:14.432210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:14.619335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:14:14.722201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:14:14.814754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:14:15.025477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9524, MsgBus: 30315 2025-04-07T14:14:15.958454Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574386016347930:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:15.958508Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012bf/r3tmp/tmpUgxm08/pdisk_1.dat 2025-04-07T14:14:16.043714Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:16.065384Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:16.065487Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:16.066502Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9524, node 2 2025-04-07T14:14:16.114857Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:16.114877Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:16.114886Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:16.114975Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30315 TClient is connected to server localhost:30315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:16.467551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.482966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.529224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:1 ... _SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:35.873537Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:35.964010Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:38.238751Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574482513572669:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:38.238863Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:38.288048Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:38.360440Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:38.428486Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:38.465012Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:38.500040Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:38.577702Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:38.624598Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574482513573170:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:38.624722Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:38.625063Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574482513573175:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:38.629360Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:38.640000Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574482513573177:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:39.932957Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574465333701808:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:39.933035Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1398, MsgBus: 18944 2025-04-07T14:14:41.038376Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574497331014726:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:41.038445Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012bf/r3tmp/tmpN3iPkg/pdisk_1.dat 2025-04-07T14:14:41.165542Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:41.194314Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:41.194409Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:41.196064Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1398, node 6 2025-04-07T14:14:41.289246Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:41.289272Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:41.289282Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:41.289397Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18944 TClient is connected to server localhost:18944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:41.866503Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:41.885933Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.962329Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:42.145248Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:42.221670Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:44.904118Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574510215918310:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:44.904227Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:44.957778Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:44.997718Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:45.042857Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:45.074811Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:45.111591Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:45.156582Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:45.225064Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574514510886102:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:45.225178Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:45.225233Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574514510886107:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:45.230479Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:45.243968Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574514510886109:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:46.052420Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574497331014726:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:46.052486Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LookupColumns [GOOD] Test command err: Trying to start YDB, gRPC: 9129, MsgBus: 1808 2025-04-07T14:14:15.125723Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574385216083343:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:15.125844Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b1/r3tmp/tmp9c2a4Q/pdisk_1.dat 2025-04-07T14:14:15.341430Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9129, node 1 2025-04-07T14:14:15.391574Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:15.391595Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:15.391604Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:15.391687Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:15.452888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:15.452987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:15.454536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1808 TClient is connected to server localhost:1808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:15.821586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:15.837442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:15.934201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.039933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.114736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:17.476611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574393806019643:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.476712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.695576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:17.715035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:17.736232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:17.759484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:17.824257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:17.847886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:17.881641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574393806020136:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.881717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.881849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574393806020141:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.885088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:17.892521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574393806020143:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 9167, MsgBus: 9031 2025-04-07T14:14:19.356701Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574402184901067:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:19.356785Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b1/r3tmp/tmpNbUTsj/pdisk_1.dat 2025-04-07T14:14:19.422452Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9167, node 2 2025-04-07T14:14:19.457515Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:19.457592Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:19.459535Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:19.492395Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:19.492417Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:19.492424Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:19.492521Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9031 TClient is connected to server localhost:9031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:19.882472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:19.888734Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:19.893373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:19.946842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.075920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:20.153716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.055325Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574415069804653:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:22.055405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:22.089358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, sub ... _SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:36.140347Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:36.211651Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:38.678003Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574484566647796:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:38.678101Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:38.722151Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:38.762911Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:38.839712Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:38.910342Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:38.980233Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:39.014822Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:39.062149Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574488861615589:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:39.062247Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:39.062324Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574488861615594:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:39.066261Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:39.078122Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574488861615596:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:40.153414Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574471681744213:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:40.153515Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1181, MsgBus: 11787 2025-04-07T14:14:41.449622Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574496997662487:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:41.449694Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b1/r3tmp/tmpFgftps/pdisk_1.dat 2025-04-07T14:14:41.579540Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:41.579639Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:41.580082Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:41.594224Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1181, node 6 2025-04-07T14:14:41.651715Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:41.651741Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:41.651753Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:41.651901Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11787 TClient is connected to server localhost:11787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:42.217146Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:42.225997Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:42.298887Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:42.475914Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:42.557257Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:45.314534Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574514177533371:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:45.314654Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:45.342175Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:45.383380Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:45.419112Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:45.453042Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:45.503180Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:45.543101Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:45.605174Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574514177533866:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:45.605287Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:45.605706Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574514177533871:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:45.609473Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:45.622312Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574514177533873:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:46.454516Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574496997662487:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:46.454581Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSqlIn::InWithCast [GOOD] >> KqpRanges::NullInKey [GOOD] >> KqpRanges::LiteralOrCompisite >> KqpNotNullColumns::UpsertNotNullPk >> KqpNewEngine::SequentialReadsPragma+Enabled [GOOD] >> KqpNewEngine::SequentialReadsPragma-Enabled >> KqpNewEngine::OnlineRO_Consistent [GOOD] >> KqpNewEngine::OnlineRO_Inconsistent ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::InWithCast [GOOD] Test command err: Trying to start YDB, gRPC: 31678, MsgBus: 2272 2025-04-07T14:14:13.365271Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574378873902499:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:13.365384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b5/r3tmp/tmpGzM8PT/pdisk_1.dat 2025-04-07T14:14:13.588278Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31678, node 1 2025-04-07T14:14:13.644512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:13.644545Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:13.644553Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:13.644655Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:13.698787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:13.698883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:13.700497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2272 TClient is connected to server localhost:2272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:14.059863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:14.088945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:14.214419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:14.345894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:14.418689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:15.658078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574387463838818:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:15.658270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:15.857918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:15.886774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:15.918764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:15.939935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:15.967900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:15.999134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:16.038651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574391758806606:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:16.038741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:16.038822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574391758806611:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:16.041706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:16.049603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574391758806613:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:14:16.812368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:16.833996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:14:16.861259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 2025-04-07T14:14:18.365663Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574378873902499:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:18.365721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 30828, MsgBus: 9323 2025-04-07T14:14:20.086325Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574408245735067:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:20.086401Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b5/r3tmp/tmpShQLWv/pdisk_1.dat 2025-04-07T14:14:20.164915Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30828, node 2 2025-04-07T14:14:20.189632Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:20.189710Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:20.190948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:20.219812Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:20.219839Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:20.219846Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:20.219948Z node 2 :NET_CLASSIFIER ERROR: got bad dist ... in>: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:37.607921Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:37.641982Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:37.679106Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:37.715043Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:37.748569Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:37.780689Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:38.006308Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574482414312255:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:38.006403Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:38.006424Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574482414312260:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:38.010379Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:38.029915Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490574482414312262:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:39.223549Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:39.306305Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:14:39.345856Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490574465234440837:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:39.345919Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:39.347966Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:4:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:5:22: Warning: At function: Filter, At function: Coalesce
:6:23: Warning: At function: SqlIn
:6:23: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 14843, MsgBus: 16311 2025-04-07T14:14:42.692497Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574499684411677:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:42.692997Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b5/r3tmp/tmpxMys4o/pdisk_1.dat 2025-04-07T14:14:42.787643Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:42.825618Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:42.825697Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14843, node 5 2025-04-07T14:14:42.827017Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:42.869271Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:42.869300Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:42.869312Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:42.869440Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16311 TClient is connected to server localhost:16311 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:14:43.386638Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:43.402846Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:43.484929Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:43.665841Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:43.778206Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:46.465427Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574516864282566:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.465542Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.516485Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.553566Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.589295Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.621343Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.657345Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.702368Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.793120Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574516864283067:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.793194Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574516864283072:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.793223Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.796920Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:46.809681Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574516864283074:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:47.693042Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574499684411677:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:47.780351Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNewEngine::JoinIdxLookup >> KqpRanges::LiteralOr [GOOD] >> KqpRanges::Like >> KqpSort::TopSortParameter >> KqpNewEngine::Delete [GOOD] >> KqpNewEngine::DeleteOn >> KqpSort::OffsetTopSort [GOOD] >> KqpSqlIn::SimpleKey_In_And_In [GOOD] >> KqpSqlIn::SecondaryIndex_TupleSelect >> KqpNotNullColumns::ReplaceNotNullPk [GOOD] >> KqpNotNullColumns::ReplaceNotNullPkPg >> KqpNewEngine::LiteralKeys [GOOD] >> KqpMergeCn::TopSortBy_Interval_Limit3 [GOOD] >> KqpSqlIn::KeyTypeMissmatch_Str >> KqpSqlIn::TupleSelect [GOOD] >> KqpSqlIn::TupleNotOnlyOfKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::OffsetTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 19091, MsgBus: 11937 2025-04-07T14:14:15.294554Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574385781367725:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:15.294718Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b0/r3tmp/tmpFG9eCI/pdisk_1.dat 2025-04-07T14:14:15.526216Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19091, node 1 2025-04-07T14:14:15.583182Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:15.583213Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:15.583222Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:15.583363Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:15.610489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:15.610608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:15.612286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11937 TClient is connected to server localhost:11937 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:16.046678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.055624Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:16.060700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.166313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.293993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.368683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:17.843272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574394371304046:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.843372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:18.098747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.122694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.142965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.181929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.205326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.231015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.261359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574398666271834:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:18.261433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:18.261480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574398666271839:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:18.264440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:18.272090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574398666271841:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 13243, MsgBus: 61147 2025-04-07T14:14:20.426927Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574405200152566:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:20.427000Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b0/r3tmp/tmpiUrNcX/pdisk_1.dat 2025-04-07T14:14:20.528710Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13243, node 2 2025-04-07T14:14:20.566856Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:20.566955Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:20.568589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:20.588953Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:20.588974Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:20.588988Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:20.589141Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61147 TClient is connected to server localhost:61147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:20.972072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.987751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:21.037428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:21.182593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:21.236386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:23.173421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574418085056163:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:23.173531Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:23.215651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo u ... _SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:38.720981Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:38.786769Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:41.334653Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574498214887106:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.334752Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.385765Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.422264Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.455024Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.522278Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.558661Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.617069Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.670480Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574498214887604:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.670563Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.670752Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574498214887609:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.674458Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:41.683830Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574498214887611:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:42.794097Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574481035016239:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:42.794156Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5240, MsgBus: 29594 2025-04-07T14:14:44.550046Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574509071103343:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:44.550153Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b0/r3tmp/tmpdJHLr1/pdisk_1.dat 2025-04-07T14:14:44.676145Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:44.691663Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:44.691755Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:44.693297Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5240, node 6 2025-04-07T14:14:44.760750Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:44.760772Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:44.760782Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:44.760905Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29594 TClient is connected to server localhost:29594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:14:45.333179Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:45.350246Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:45.413460Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:45.646949Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:45.722126Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.315962Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574526250974212:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.316069Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.378705Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.408981Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.447894Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.482271Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.563370Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.635777Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.681183Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574526250974715:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.681267Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.681330Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574526250974720:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.684379Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:48.694222Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574526250974722:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:49.550553Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574509071103343:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:49.550661Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit [GOOD] >> KqpNewEngine::DependentSelect >> KqpRanges::UpdateWhereInMultipleUpdate [GOOD] >> KqpRanges::UpdateWhereInFullScan >> KqpRanges::NullInKeySuffix [GOOD] >> KqpRanges::NullInPredicate >> KqpNotNullColumns::UpsertNotNullPk [GOOD] >> KqpNotNullColumns::UpsertNotNull >> KqpSort::ReverseEightShardOptimized [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LiteralKeys [GOOD] Test command err: Trying to start YDB, gRPC: 25101, MsgBus: 63857 2025-04-07T14:14:19.223507Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574400620278430:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:19.223577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a8/r3tmp/tmpI0KOxV/pdisk_1.dat 2025-04-07T14:14:19.497584Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25101, node 1 2025-04-07T14:14:19.543863Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:19.543884Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:19.543895Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:19.543970Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:19.577285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:19.577392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:19.579027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63857 TClient is connected to server localhost:63857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:19.997516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.018200Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:14:20.026985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.146739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.247617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.293045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:21.595690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574409210214726:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.595784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.813197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.834706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.855339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.876217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.900964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.921749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.955544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574409210215217:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.955641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.955719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574409210215222:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.959217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:21.967938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574409210215224:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 7573, MsgBus: 17792 2025-04-07T14:14:23.670974Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574419564199671:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:23.671053Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a8/r3tmp/tmpAcizEz/pdisk_1.dat 2025-04-07T14:14:23.726133Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7573, node 2 2025-04-07T14:14:23.772574Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:23.772684Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:23.774305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:23.775815Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:23.775837Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:23.775843Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:23.775938Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17792 TClient is connected to server localhost:17792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:24.208735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:24.215155Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:24.220556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:24.293645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:24.441097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:24.517940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:26.388238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574432449103253:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:26.388323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found ... ard: 72057594046644480 waiting... 2025-04-07T14:14:40.310594Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:40.389992Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:42.560969Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574500780682815:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:42.561056Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:42.607684Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:42.636773Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:42.675804Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:42.707854Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:42.750660Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:42.803220Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:42.858156Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574500780683307:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:42.858267Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:42.858581Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574500780683312:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:42.862384Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:42.879851Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574500780683314:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:44.248974Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574487895779390:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:44.249053Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23913, MsgBus: 9962 2025-04-07T14:14:45.960276Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574515969953656:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:45.960318Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a8/r3tmp/tmpBXXtVr/pdisk_1.dat 2025-04-07T14:14:46.046497Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:46.068754Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:46.068856Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:46.070249Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23913, node 6 2025-04-07T14:14:46.111517Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:46.111546Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:46.111562Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:46.111672Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9962 TClient is connected to server localhost:9962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:46.648587Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:46.655578Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:46.666631Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:46.746178Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:46.949253Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:47.043285Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:49.306135Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574533149824540:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.306250Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.349915Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.420243Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.465045Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.510084Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.580114Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.617533Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.705370Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574533149825041:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.705479Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.705569Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574533149825046:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.709628Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:49.721293Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574533149825048:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:50.963722Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574515969953656:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:50.963834Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::TopSortBy_Interval_Limit3 [GOOD] Test command err: Trying to start YDB, gRPC: 31525, MsgBus: 13221 2025-04-07T14:14:15.239867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574386689335145:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:15.239984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012ae/r3tmp/tmpqEn4zG/pdisk_1.dat 2025-04-07T14:14:15.515834Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31525, node 1 2025-04-07T14:14:15.576235Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:15.576260Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:15.576272Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:15.576373Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:15.599874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:15.599972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:15.601685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13221 TClient is connected to server localhost:13221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:16.031642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.046797Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:16.058757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.170344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.328395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:16.381275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:17.896765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574395279271452:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.896900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:18.169280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.191341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.215407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.281636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.301818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.344590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.374768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574399574239246:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:18.374833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:18.374837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574399574239251:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:18.377202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:18.384749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574399574239253:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:19.168447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:19.744883Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035259764, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 24355, MsgBus: 11155 2025-04-07T14:14:20.339523Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574408394109964:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:20.339625Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012ae/r3tmp/tmpW1X9u9/pdisk_1.dat 2025-04-07T14:14:20.445910Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:20.469530Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:20.469621Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:20.471102Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24355, node 2 2025-04-07T14:14:20.513194Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:20.513215Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:20.513223Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:20.513313Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11155 TClient is connected to server localhost:11155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:20.931635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.938934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.989980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:21.135408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:21.207476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:22.934669Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574416984046258:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you do ... DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.348343Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.404036Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.478176Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.524906Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.565534Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.633851Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.669472Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.766479Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574495848148713:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.766557Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.766592Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574495848148718:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.770228Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:41.778549Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574495848148720:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:42.656821Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574478668277324:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:42.656895Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:43.062603Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:44.040198Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035284075, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 24914, MsgBus: 23488 2025-04-07T14:14:44.994165Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574510056588466:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:44.994239Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012ae/r3tmp/tmp2ytc7U/pdisk_1.dat 2025-04-07T14:14:45.090593Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:45.112184Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:45.112261Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:45.114272Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24914, node 6 2025-04-07T14:14:45.158844Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:45.158866Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:45.158875Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:45.158986Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23488 TClient is connected to server localhost:23488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:45.768765Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:45.780286Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:45.840789Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:46.018763Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:46.098563Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:48.703485Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574527236459360:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.703552Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.725909Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.755665Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.820994Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.850753Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.882027Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.954917Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.020066Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574531531427159:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.020161Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.020182Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574531531427164:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.025008Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:49.036724Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574531531427166:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:49.998981Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574510056588466:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:49.999046Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:50.215021Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.345709Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035291376, txId: 281474976715673] shutting down >> KqpNewEngine::MultiSelect [GOOD] >> KqpNewEngine::MultiStatement >> KqpKv::ReadRows_SpecificKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::ReverseEightShardOptimized [GOOD] Test command err: Trying to start YDB, gRPC: 62635, MsgBus: 24154 2025-04-07T14:14:19.608594Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574403733645513:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:19.608692Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a5/r3tmp/tmp9GboDs/pdisk_1.dat 2025-04-07T14:14:19.865644Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62635, node 1 2025-04-07T14:14:19.932138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:19.932261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:19.933964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:19.947486Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:19.947512Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:19.947519Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:19.947636Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24154 TClient is connected to server localhost:24154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:20.407314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.425824Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:14:20.439173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.549544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.669285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.729998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:22.031120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574416618549114:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:22.031259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:22.251456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.274569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.296277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.359619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.424549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.448751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.488575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574416618549612:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:22.488639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:22.488683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574416618549617:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:22.491810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:22.499311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574416618549619:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 21771, MsgBus: 11911 2025-04-07T14:14:24.374249Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574425635065221:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:24.374515Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a5/r3tmp/tmpyJvjyy/pdisk_1.dat 2025-04-07T14:14:24.473325Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21771, node 2 2025-04-07T14:14:24.503659Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:24.503770Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:24.504484Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:24.549142Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:24.549183Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:24.549192Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:24.549336Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11911 TClient is connected to server localhost:11911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:24.998165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:25.020618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:25.100701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:25.272200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:25.347734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:27.388496Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574438519968830:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:27.388583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:27.417546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo u ... suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:41.873462Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:44.262688Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574507928480887:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:44.262762Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:44.304740Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:44.340098Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:44.381858Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:44.413980Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:44.444695Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:44.480598Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:44.567474Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574507928481384:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:44.567575Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:44.568368Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574507928481389:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:44.575599Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:44.593120Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T14:14:44.593407Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574507928481391:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:45.832746Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574490748609990:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:45.832815Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 65364, MsgBus: 5651 2025-04-07T14:14:46.984027Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574519734600733:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:46.984098Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a5/r3tmp/tmpig8RzR/pdisk_1.dat 2025-04-07T14:14:47.114584Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:47.143458Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:47.143550Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:47.145432Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65364, node 6 2025-04-07T14:14:47.194327Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:47.194349Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:47.194358Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:47.194472Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5651 TClient is connected to server localhost:5651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:47.754130Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:47.761759Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:47.771301Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:47.853092Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.043928Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:48.114862Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:50.778453Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574536914471625:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:50.778548Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:50.832792Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.863713Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.899121Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.933281Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.972855Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.045024Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.087720Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574541209439421:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.087813Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.087874Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574541209439426:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.091380Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:51.101036Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574541209439428:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:51.985442Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574519734600733:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:51.985512Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNewEngine::StreamLookupWithView [GOOD] >> KqpNewEngine::Truncated >> KqpNewEngine::ReadDifferentColumnsPk [GOOD] >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput >> KqpNewEngine::DeferredEffects [GOOD] >> KqpNewEngine::DecimalColumn >> KqpSqlIn::SecondaryIndex_TupleLiteral [GOOD] >> KqpNotNullColumns::UpdateNotNullPk >> KqpNewEngine::Aggregate >> KqpReturning::ReturningTwice [GOOD] >> KqpReturning::ReturningSerial >> KqpRanges::LiteralOrCompisite [GOOD] >> KqpRanges::LiteralOrCompisiteCollision >> KqpNotNullColumns::ReplaceNotNullPkPg [GOOD] >> KqpNotNullColumns::ReplaceNotNull >> KqpSort::TopSortParameter [GOOD] >> KqpSort::TopSortExprPk >> KqpNewEngine::DeleteOn [GOOD] >> KqpNewEngine::DeleteWithBuiltin >> KqpNewEngine::JoinIdxLookup [GOOD] >> KqpNewEngine::JoinIdxLookupWithPredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::SecondaryIndex_TupleLiteral [GOOD] Test command err: Trying to start YDB, gRPC: 19569, MsgBus: 17904 2025-04-07T14:14:14.739591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574380300852311:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:14.739660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b4/r3tmp/tmp20jiOm/pdisk_1.dat 2025-04-07T14:14:14.959863Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19569, node 1 2025-04-07T14:14:15.021131Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:15.021162Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:15.021169Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:15.021280Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:15.060058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:15.060135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:15.061760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17904 TClient is connected to server localhost:17904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:15.361475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:15.382170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:15.474219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:15.584335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:15.655808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:17.014556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574393185755928:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.014654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.278563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:17.301958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:17.320208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:17.354607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:17.375327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:17.397791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:17.434880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574393185756420:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.434927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574393185756425:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.434977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:17.437613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:17.444765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574393185756427:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:18.246389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.272083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:14:18.296245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:14:19.739862Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574380300852311:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:19.763297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:66: Warning: At function: Filter, At function: Coalesce
:5:78: Warning: At function: SqlIn
:5:78: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:66: Warning: At function: Filter, At function: Coalesce
:5:78: Warning: At function: SqlIn
:5:78: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 29815, MsgBus: 64037 2025-04-07T14:14:21.039935Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574409342543425:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:21.040020Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b4/r3tmp/tmpA63Xnp/pdisk_1.dat 2025-04-07T14:14:21.122447Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:21.153257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:21.153314Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:21.154360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29815, node 2 2025-04-07T14:14:21.192722Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:21.192755Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:21.192764Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:21.192880Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64037 TClient is connected to server localhost:64037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitR ... -07T14:14:40.405184Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:40.416198Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490574491718719410:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:41.546026Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490574474538848022:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:41.546110Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:41.655341Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.735205Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:14:41.785559Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:60: Warning: At function: Filter, At function: Coalesce
:7:33: Warning: At function: SqlIn
:7:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:60: Warning: At function: Filter, At function: Coalesce
:7:33: Warning: At function: SqlIn
:7:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 14289, MsgBus: 5796 2025-04-07T14:14:45.887979Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574515123963522:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:45.888049Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012b4/r3tmp/tmpYakdNW/pdisk_1.dat 2025-04-07T14:14:45.989917Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14289, node 5 2025-04-07T14:14:46.027069Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:46.027174Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:46.046814Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:46.101354Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:46.101381Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:46.101390Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:46.101536Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5796 TClient is connected to server localhost:5796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:46.640426Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:46.647423Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:46.656578Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:46.737419Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:46.915089Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.990570Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:49.514082Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574532303834405:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.514202Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.561312Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.626183Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.668470Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.705234Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.744104Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.795253Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.851794Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574532303834900:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.851945Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.852212Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574532303834905:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.855945Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:49.870529Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574532303834907:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:50.920089Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574515123963522:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:50.920220Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:51.060249Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.138069Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.200949Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:60: Warning: At function: Filter, At function: Coalesce
:6:33: Warning: At function: SqlIn
:6:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:60: Warning: At function: Filter, At function: Coalesce
:6:33: Warning: At function: SqlIn
:6:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> KqpNewEngine::OnlineRO_Inconsistent [GOOD] >> KqpNewEngine::Nondeterministic >> KqpNewEngine::DuplicatedResults >> CompressExecutor::TestExecutorMemUsage [GOOD] >> KqpNotNullColumns::UpsertNotNull [GOOD] >> KqpNotNullColumns::UpsertNotNullPg >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] >> KqpSqlIn::KeyTypeMissmatch_Str [GOOD] >> KqpSqlIn::SecondaryIndex_PgKey+EnableKqpDataQueryStreamLookup >> KqpRanges::Like [GOOD] >> KqpNewEngine::DependentSelect [GOOD] >> KqpNewEngine::DqSourceCount ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2025-04-07T14:12:54.077830Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1744035174077798 2025-04-07T14:12:54.389812Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574037204871297:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:54.389958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:54.416029Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574036182396309:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:54.617362Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:54.617385Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001097/r3tmp/tmp0mRVSL/pdisk_1.dat 2025-04-07T14:12:54.643253Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:54.858460Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:54.895647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:54.895732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:54.898360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:54.898420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:54.907204Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:54.907354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:54.909631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3551, node 1 2025-04-07T14:12:55.049882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001097/r3tmp/yandexPIw5QQ.tmp 2025-04-07T14:12:55.049944Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001097/r3tmp/yandexPIw5QQ.tmp 2025-04-07T14:12:55.050096Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001097/r3tmp/yandexPIw5QQ.tmp 2025-04-07T14:12:55.050218Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:55.215440Z INFO: TTestServer started on Port 4063 GrpcPort 3551 TClient is connected to server localhost:4063 PQClient connected to localhost:3551 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:55.495564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-07T14:12:57.342841Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574049067298367:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.342890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574049067298359:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.342976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.348411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T14:12:57.363545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574049067298374:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T14:12:57.581458Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574050089774102:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:12:57.581474Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490574049067298417:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:12:57.581691Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGU0YjgxOTgtZThiZTBiNmUtMjFmNTE2NGUtN2VkZGJiNDA=, ActorId: [2:7490574049067298343:2280], ActorState: ExecuteState, TraceId: 01jr89hrvxcje7091aezsdxf6x, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:12:57.582692Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZThmOTI1OTYtYTc3ZDQzMmQtYjMzYjBlYjQtYzFkNTllYQ==, ActorId: [1:7490574050089774053:2303], ActorState: ExecuteState, TraceId: 01jr89hs0e71s8yt05e6rc83mp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:12:57.583214Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:12:57.583217Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:12:57.593090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.722296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.837829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:3551", true, true, 1000); 2025-04-07T14:12:58.189061Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr89hsde8sqw66ymcx3bep6w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjBjNGUxZTEtMmNkMDY2N2ItYmJiYWM0NzYtOWViZjkwNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490574054384741787:2956] 2025-04-07T14:12:59.392833Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574037204871297:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:59.392921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:12:59.416853Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490574036182396309:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:59.416943Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-07T14:13:04.539178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:3551 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-07T14:13:04.664963Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:3551 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Cod ... 1 } 2025-04-07T14:14:53.624732Z :INFO: [/Root] MessageGroupId [test-message-group-id] Running cds request ms 2025-04-07T14:14:53.634297Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a149115-95feda30-3c358b57-c7e29f28_0] Got CDS response: write_sessions_clusters { clusters { endpoint: "localhost:8635" name: "dc1" available: true } primary_cluster_selection_reason: CLIENT_LOCATION } version: 1 2025-04-07T14:14:53.634369Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a149115-95feda30-3c358b57-c7e29f28_0] Start write session. Will connect to endpoint: localhost:8635 2025-04-07T14:14:53.640127Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a149115-95feda30-3c358b57-c7e29f28_0] Write session: send init request: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-04-07T14:14:53.644711Z node 15 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-07T14:14:53.644749Z node 15 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2025-04-07T14:14:53.645115Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-04-07T14:14:53.645288Z node 15 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:48686 2025-04-07T14:14:53.645315Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:48686 proto=v1 topic=test-topic durationSec=0 2025-04-07T14:14:53.645326Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T14:14:53.646704Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-04-07T14:14:53.646848Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-07T14:14:53.646870Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T14:14:53.646883Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-07T14:14:53.646902Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490574549874837270:2539] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-04-07T14:14:53.649429Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7490574549874837270:2539] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-04-07T14:14:53.838980Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710697. Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-07T14:14:53.839093Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7490574549874837282:2541] TxId: 281474976710697. Ctx: { TraceId: 01jr89naeh7tcy2847q77aqcwb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MjI1MmQ0ZTMtNzFiZGFmMjctNWM0Y2I4NTMtOGY2MzRhNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-07T14:14:53.839383Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=MjI1MmQ0ZTMtNzFiZGFmMjctNWM0Y2I4NTMtOGY2MzRhNWM=, ActorId: [15:7490574549874837271:2541], ActorState: ExecuteState, TraceId: 01jr89naeh7tcy2847q77aqcwb, Create QueryResponse for error on request, msg: Test retry state: get retry delay 2025-04-07T14:14:53.842941Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a149115-95feda30-3c358b57-c7e29f28_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=MjI1MmQ0ZTMtNzFiZGFmMjctNWM0Y2I4NTMtOGY2MzRhNWM=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr89naej836t7ys3pr8fs39r" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2025-04-07T14:14:53.842969Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a149115-95feda30-3c358b57-c7e29f28_0] Write session will restart in 2.000000s 2025-04-07T14:14:53.843085Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a149115-95feda30-3c358b57-c7e29f28_0] Write session: Do CDS request 2025-04-07T14:14:53.843112Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a149115-95feda30-3c358b57-c7e29f28_0] Do schedule cds request after 2000 ms 2025-04-07T14:14:53.841084Z node 15 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [15:7490574549874837270:2539] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=MjI1MmQ0ZTMtNzFiZGFmMjctNWM0Y2I4NTMtOGY2MzRhNWM=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr89naej836t7ys3pr8fs39r" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2025-04-07T14:14:53.841193Z node 15 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=MjI1MmQ0ZTMtNzFiZGFmMjctNWM0Y2I4NTMtOGY2MzRhNWM=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr89naej836t7ys3pr8fs39r" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2025-04-07T14:14:53.841551Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD 2025-04-07T14:14:54.080146Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710699. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T14:14:54.080256Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7490574549874837346:2551] TxId: 281474976710699. Ctx: { TraceId: 01jr89napbf66ebce1q85q5er5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MmMxZWJiNDMtNDY0MTk1YjEtMzQxNmExY2UtZGNjZWQxZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T14:14:54.080569Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=MmMxZWJiNDMtNDY0MTk1YjEtMzQxNmExY2UtZGNjZWQxZjI=, ActorId: [15:7490574549874837343:2551], ActorState: ExecuteState, TraceId: 01jr89napbf66ebce1q85q5er5, Create QueryResponse for error on request, msg: 2025-04-07T14:14:54.081410Z node 15 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr89napbf66ebce1qb3bgpw0" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-07T14:14:54.210194Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720683. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T14:14:54.210335Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7490574553388620262:2465] TxId: 281474976720683. Ctx: { TraceId: 01jr89naby3frtyfxfcxedtwf2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=NWYxNWY4MzUtZmUyZmIyMDItMjE3NmYwMDktMWNlMjY3ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T14:14:54.210763Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=NWYxNWY4MzUtZmUyZmIyMDItMjE3NmYwMDktMWNlMjY3ZWE=, ActorId: [16:7490574549093652949:2465], ActorState: ExecuteState, TraceId: 01jr89naby3frtyfxfcxedtwf2, Create QueryResponse for error on request, msg: 2025-04-07T14:14:54.211839Z node 16 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr89natecfpy3k19s8mcb34h" } } YdbStatus: UNAVAILABLE ConsumedRu: 302 } 2025-04-07T14:14:54.310273Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710701. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T14:14:54.310409Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7490574554169804695:2543] TxId: 281474976710701. Ctx: { TraceId: 01jr89nafh37ajkj9wn7cw49a9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=NDE4ZGUzZGEtMTdlODA3ZTYtZGE1NmI1MWYtZDhhNWRmN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-07T14:14:54.310788Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=NDE4ZGUzZGEtMTdlODA3ZTYtZGE1NmI1MWYtZDhhNWRmN2M=, ActorId: [15:7490574549874837290:2543], ActorState: ExecuteState, TraceId: 01jr89nafh37ajkj9wn7cw49a9, Create QueryResponse for error on request, msg: 2025-04-07T14:14:54.311829Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr89naxy8pha8hh6hp9ky399" } } YdbStatus: UNAVAILABLE ConsumedRu: 304 } 2025-04-07T14:14:54.630857Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a149115-95feda30-3c358b57-c7e29f28_0] Write session: close. Timeout = 0 ms 2025-04-07T14:14:54.630919Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a149115-95feda30-3c358b57-c7e29f28_0] Write session will now close 2025-04-07T14:14:54.630980Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a149115-95feda30-3c358b57-c7e29f28_0] Write session: aborting 2025-04-07T14:14:54.631909Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a149115-95feda30-3c358b57-c7e29f28_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-04-07T14:14:54.631956Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a149115-95feda30-3c358b57-c7e29f28_0] Write session: destroy >> KqpKv::ReadRows_SpecificKey [GOOD] >> KqpKv::ReadRows_UnknownTable >> KqpNewEngine::Update >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> THiveTest::TestHiveBalancerDifferentResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] Test command err: Trying to start YDB, gRPC: 31459, MsgBus: 16848 2025-04-07T14:14:23.203776Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574419286367540:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:23.205101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001298/r3tmp/tmpdzP6vu/pdisk_1.dat 2025-04-07T14:14:23.495816Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31459, node 1 2025-04-07T14:14:23.561253Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:23.561277Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:23.561286Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:23.561386Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:23.576777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:23.576928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:23.579270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16848 TClient is connected to server localhost:16848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:23.989039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:25.749569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574427876302687:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:25.749680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:25.998097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:26.107229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574432171270085:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:26.107318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:26.107356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574432171270090:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:26.109905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:14:26.116569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574432171270092:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } Trying to start YDB, gRPC: 29303, MsgBus: 32082 2025-04-07T14:14:27.022630Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574435514573000:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:27.022736Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001298/r3tmp/tmp3waikz/pdisk_1.dat 2025-04-07T14:14:27.096467Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29303, node 2 2025-04-07T14:14:27.125834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:27.125931Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:27.127821Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:27.150868Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:27.150903Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:27.150909Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:27.150989Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32082 TClient is connected to server localhost:32082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:27.483986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:27.502357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:27.583052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:27.711573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:27.772390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:29.481595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574444104509291:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:29.481681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:29.516561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:29.550632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:29.579204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:29.603889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:29.628202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:29.691473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:29.754353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574444104509790:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:29.754409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:29.754510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574444104509795:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:29.757445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, ... : 72057594046644480 waiting... 2025-04-07T14:14:45.364497Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:45.459149Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:48.150651Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574527787733820:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.150742Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.195383Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.228617Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.256284Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.285368Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.313218Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.389361Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.465557Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574527787734324:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.465666Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.465686Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574527787734329:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.469411Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:48.481166Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574527787734331:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:14:49.391520Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574510607863083:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:49.391787Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28261, MsgBus: 18076 2025-04-07T14:14:50.674099Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574537203331473:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:50.674174Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001298/r3tmp/tmpdYyNBm/pdisk_1.dat 2025-04-07T14:14:50.775182Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:50.805640Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:50.805715Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:50.807060Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28261, node 6 2025-04-07T14:14:50.857372Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:50.857400Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:50.857410Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:50.857563Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18076 TClient is connected to server localhost:18076 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:51.399499Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:51.406670Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:51.415256Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:51.489703Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:51.653652Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:51.748371Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:54.324260Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574554383202382:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.324406Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.346277Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.376763Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.409525Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.445278Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.515458Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.550619Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.592374Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574554383202877:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.592489Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.592777Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574554383202882:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.597151Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:54.606484Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574554383202884:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:55.676767Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574537203331473:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:55.676846Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpRanges::NullInPredicate [GOOD] >> KqpRanges::NullInPredicateRow >> KqpNewEngine::MultiStatement [GOOD] >> KqpNewEngine::MultiStatementMixPure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::Like [GOOD] Test command err: Trying to start YDB, gRPC: 7664, MsgBus: 10330 2025-04-07T14:14:18.085504Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574398268714337:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:18.085560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012ab/r3tmp/tmp12F3Hm/pdisk_1.dat 2025-04-07T14:14:18.304971Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7664, node 1 2025-04-07T14:14:18.366804Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:18.366827Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:18.366834Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:18.366907Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:18.408688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:18.408843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:18.410544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10330 TClient is connected to server localhost:10330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:18.747029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:18.767591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:18.895951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:19.006082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:19.058391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.876272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574406858650642:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:20.876397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.132150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.195739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.217740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.239259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.266445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.299047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.333601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574411153618436:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.333680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.333690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574411153618441:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.336164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:21.342746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574411153618443:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:22.191256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.347762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.465334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.567871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.818931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-07T14:14:23.085721Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574398268714337:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:23.085817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29245, MsgBus: 16862 2025-04-07T14:14:23.762096Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574418364719593:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:23.762175Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012ab/r3tmp/tmpuoE1lD/pdisk_1.dat 2025-04-07T14:14:23.852299Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:23.875773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:23.875862Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:23.877447Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29245, node 2 2025-04-07T14:14:23.923269Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:23.923308Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:23.923321Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:23.923430Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16862 TClient is connected to server localhost:16862 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:24.318169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:24.327908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:24.405166Z ... N: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.869109Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.902520Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.937127Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.984189Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:47.050523Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574522373414211:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:47.050605Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:47.050701Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574522373414216:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:47.054601Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:47.065208Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574522373414218:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:47.967917Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574500898575524:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:47.968021Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:48.239102Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.502758Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.729438Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.878213Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.277543Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25446, MsgBus: 20022 2025-04-07T14:14:50.960771Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574535434328915:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:50.960829Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012ab/r3tmp/tmpEEKGBB/pdisk_1.dat 2025-04-07T14:14:51.063241Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:51.075543Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:51.075645Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:51.078098Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25446, node 6 2025-04-07T14:14:51.123199Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:51.123248Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:51.123261Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:51.123438Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20022 TClient is connected to server localhost:20022 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:51.718052Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:51.726501Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:51.740698Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.825564Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:52.034245Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:52.114204Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:54.719453Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574552614199818:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.719553Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.773546Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.807706Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.844338Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.877965Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.907970Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.949577Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.997719Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574552614200312:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.997814Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.997982Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574552614200317:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:55.001527Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:55.012292Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574552614200319:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:55.961270Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574535434328915:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:55.961337Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:56.231016Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpNotNullColumns::UpdateNotNullPk [GOOD] >> KqpNotNullColumns::UpdateNotNullPkPg >> KqpNotNullColumns::ReplaceNotNull [GOOD] >> KqpNotNullColumns::ReplaceNotNullPg >> KqpMergeCn::TopSortByDesc_Double_Limit3 >> KqpNewEngine::Truncated [GOOD] >> KqpNewEngine::StaleRO >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRewriteThenLastValueNewApi >> KqpRanges::UpdateWhereInFullScan [GOOD] >> KqpRanges::ValidatePredicates >> KqpNewEngine::Aggregate [GOOD] >> KqpNewEngine::AggregateTuple >> KqpNotNullColumns::InsertNotNullPk >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] >> KqpNotNullColumns::UpsertNotNullPkPg >> KqpNewEngine::DecimalColumn [GOOD] >> KqpNewEngine::DecimalColumn35 >> KqpNotNullColumns::UpsertNotNullPg [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndex >> KqpNewEngine::DeleteWithBuiltin [GOOD] >> KqpNewEngine::DeleteON >> KqpNewEngine::DuplicatedResults [GOOD] >> KqpNewEngine::FlatmapLambdaMutiusedConnections >> KqpKv::ReadRows_UnknownTable [GOOD] >> KqpKv::ReadRows_SpecificReturnValue >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput [GOOD] >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage >> KqpSort::TopSortExprPk [GOOD] >> KqpSort::TopSortTableExpr ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:27.561565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:27.561641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:27.561672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:27.561701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:27.561754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:27.561802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:27.561871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:27.562193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:27.629662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:27.629713Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:27.644584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:27.644703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:27.644879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:27.654429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:27.654590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:27.655158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:27.655492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:27.661751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:27.663520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:27.663583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:27.663748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:27.663794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:27.663831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:27.663963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:27.669759Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:27.775972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:27.776164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.776344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:27.776504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:27.776568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.778478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:27.778601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:27.778755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.778798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:27.778832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:27.778869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:27.780614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.780687Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:27.780724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:27.782139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.782176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.782226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:27.782273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:27.785447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:27.787006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:27.787201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:27.788042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:27.788197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:27.788250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:27.788489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:27.788535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:27.788721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:27.788807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:27.790713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:27.790754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:27.790951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:27.790996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:27.791306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.791348Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:27.791448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:27.791513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:27.791560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:27.791623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:27.791659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:27.791691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:27.791765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:27.791801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:27.791831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... 6 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:15:00.152482Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:15:00.152914Z node 46 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 451us result status StatusSuccess 2025-04-07T14:15:00.153676Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:15:00.164843Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1002:2774] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:15:00.164927Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1003:2774] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:15:00.164996Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:922:2774] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-07T14:15:00.165059Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:922:2774] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-07T14:15:00.165162Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1002:2774] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744035300137440 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744035300137440 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:15:00.165535Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1003:2774] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1744035300137440 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:15:00.169116Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1002:2774] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-04-07T14:15:00.169229Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:922:2774] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-07T14:15:00.170008Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1003:2774] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-07T14:15:00.170091Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:922:2774] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] Test command err: Trying to start YDB, gRPC: 20124, MsgBus: 9194 2025-04-07T14:14:20.375650Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574406550351911:2175];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:20.376124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00129f/r3tmp/tmpM3Xr80/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20124, node 1 2025-04-07T14:14:20.664102Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:20.665454Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:14:20.665465Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:14:20.717160Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:20.717179Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:20.717185Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:20.717272Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:20.751037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:20.751205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:20.753995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9194 TClient is connected to server localhost:9194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:21.167396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:21.190991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:21.317539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:21.446478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:21.495359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:23.231454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574419435255398:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:23.231596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:23.451703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:23.480016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:23.502861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:23.524142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:23.546665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:23.573393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:23.610001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574419435255893:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:23.610069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:23.610235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574419435255898:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:23.613391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:23.625204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574419435255900:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:14:24.498588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:24.530151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:14:24.574676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:14:25.369284Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574406550351911:2175];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:25.369358Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 13807, MsgBus: 18398 2025-04-07T14:14:27.903988Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574436622692537:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:27.904060Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00129f/r3tmp/tmp5BaiKx/pdisk_1.dat 2025-04-07T14:14:27.980159Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:28.004688Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:28.004773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:28.006816Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13807, node 2 2025-04-07T14:14:28.055492Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:28.055515Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:28.055523Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:28.055697Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localh ... ode 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.765909Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.958278Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574518585510343:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.958394Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.959663Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574518585510348:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.963599Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:46.976647Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490574518585510350:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:14:47.762233Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490574501405639119:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:47.762302Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:48.071730Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.108245Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.147132Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17255, MsgBus: 11495 2025-04-07T14:14:51.902333Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574541941624304:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:51.902401Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00129f/r3tmp/tmpKp0WQW/pdisk_1.dat 2025-04-07T14:14:52.038025Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:52.047131Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:52.047227Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:52.048749Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17255, node 5 2025-04-07T14:14:52.094709Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:52.094733Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:52.094741Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:52.094874Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11495 TClient is connected to server localhost:11495 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:52.651601Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:52.660630Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:52.672223Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:52.753914Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:52.912587Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:52.978635Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:55.560208Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574559121495195:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:55.560298Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:55.593154Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:55.663523Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:55.695548Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:55.765034Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:55.795710Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:55.869564Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:55.965298Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574559121495702:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:55.965399Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:55.965871Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574559121495707:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:55.971478Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:55.982617Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574559121495709:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:56.905343Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574541941624304:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:56.905415Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:57.137777Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:57.176184Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:14:57.219536Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:56: Warning: At function: Filter, At function: Coalesce
:7:29: Warning: At function: SqlIn
:7:29: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:5:17: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] >> KqpAgg::GroupByLimit [GOOD] >> KqpExtractPredicateLookup::ComplexRange >> KqpNewEngine::JoinIdxLookupWithPredicate [GOOD] >> KqpNewEngine::JoinPure >> KqpRanges::LiteralOrCompisiteCollision [GOOD] >> KqpRanges::NoFullScanAtScanQuery >> KqpNewEngine::Nondeterministic [GOOD] >> KqpNewEngine::OrderedScalarContext >> KqpNewEngine::ScalarFunctions [GOOD] >> KqpNewEngine::ScalarMultiUsage >> KqpNewEngine::Update [GOOD] >> KqpNewEngine::UpdateFromParams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] Test command err: Trying to start YDB, gRPC: 16512, MsgBus: 20252 2025-04-07T14:14:21.733916Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574412225806500:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:21.734280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00129a/r3tmp/tmpYdXB8P/pdisk_1.dat 2025-04-07T14:14:21.991602Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16512, node 1 2025-04-07T14:14:22.057220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:22.057244Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:22.057249Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:22.057351Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:22.071164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:22.071292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:22.073138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20252 TClient is connected to server localhost:20252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:22.464912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:22.495335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:22.600068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:22.721982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:22.807084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:24.327938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574425110710093:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:24.328034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:24.533166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:24.563066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:24.586985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:24.615444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:24.645015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:24.681907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:24.734744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574425110710587:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:24.734836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:24.735130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574425110710592:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:24.739436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:24.746983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574425110710594:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:14:25.707274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:25.778745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:14:25.851194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:14:26.733952Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574412225806500:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:26.734020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:26: Warning: At function: Filter, At function: Coalesce
:5:49: Warning: At function: SqlIn
:5:49: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:21: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:26: Warning: At function: Filter, At function: Coalesce
:5:49: Warning: At function: SqlIn
:5:49: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:21: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 13334, MsgBus: 26368 2025-04-07T14:14:28.428743Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574441395962421:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:28.428830Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00129a/r3tmp/tmphF8PVF/pdisk_1.dat 2025-04-07T14:14:28.513986Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:28.533431Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:28.533528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:28.535116Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13334, node 2 2025-04-07T14:14:28.572303Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:28.572324Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:28.572332Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:28.572441Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26368 TClient is connected to server localhost:26368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... vice] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:47.849802Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574523974401556:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:47.852994Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:47.860639Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490574523974401558:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:49.021121Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.064291Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490574511089497455:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:49.064415Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:49.100759Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.137016Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:22: Warning: At function: Filter, At function: Coalesce
:7:31: Warning: At function: SqlIn
:7:31: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:5:17: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 28191, MsgBus: 20083 2025-04-07T14:14:52.533920Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574544111181856:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:52.533988Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00129a/r3tmp/tmpdb9HDW/pdisk_1.dat 2025-04-07T14:14:52.656707Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:52.684583Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:52.684694Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:52.686437Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28191, node 5 2025-04-07T14:14:52.741145Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:52.741164Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:52.741171Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:52.741260Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20083 TClient is connected to server localhost:20083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:53.249491Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.266881Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.337950Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.509294Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.595680Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:56.152266Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574561291052768:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:56.152356Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:56.200356Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:56.232038Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:56.299900Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:56.339784Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:56.379274Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:56.421349Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:56.466871Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574561291053266:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:56.466971Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:56.467210Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574561291053271:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:56.471049Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:56.482359Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574561291053273:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:57.534280Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574544111181856:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:57.534342Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:57.634552Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:57.673430Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:14:57.755250Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:37: Warning: At function: SqlIn
:7:37: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:37: Warning: At function: SqlIn
:7:37: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> KqpNotNullColumns::SelectNotNullColumns >> KqpNotNullColumns::UpdateNotNullPkPg [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNull >> KqpNewEngine::DqSourceCount [GOOD] >> KqpNewEngine::DqSource >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup >> KqpNotNullColumns::ReplaceNotNullPg [GOOD] >> KqpNotNullColumns::OptionalParametersDataQuery >> KqpNewEngine::MultiStatementMixPure [GOOD] >> KqpNewEngine::MultiUsagePrecompute >> KqpNotNullColumns::InsertNotNullPk [GOOD] >> KqpNotNullColumns::InsertNotNullPkPg >> KqpNotNullColumns::UpsertNotNullPkPg [GOOD] >> KqpRanges::DateKeyPredicate >> KqpSort::ReverseRangeOptimized >> KqpKv::ReadRows_SpecificReturnValue [GOOD] >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 >> KqpMergeCn::TopSortByDesc_Double_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Float_Limit4 >> KqpSqlIn::SecondaryIndex_PgKey+EnableKqpDataQueryStreamLookup [GOOD] >> KqpSqlIn::SecondaryIndex_PgKey-EnableKqpDataQueryStreamLookup >> KqpReturning::ReturningSerial [GOOD] >> KqpReturning::ReturningColumnsOrder >> KqpRanges::NullInPredicateRow [GOOD] >> KqpRanges::UpdateMulti >> KqpNewEngine::AggregateTuple [GOOD] >> KqpNewEngine::AsyncIndexUpdate >> KqpNewEngine::StaleRO [GOOD] >> KqpNewEngine::StaleRO_Immediate >> KqpNewEngine::DecimalColumn35 [GOOD] >> KqpNewEngine::ComplexLookupLimit >> KqpNewEngine::FlatmapLambdaMutiusedConnections [GOOD] >> KqpNewEngine::EmptyMapWithBroadcast >> KqpNewEngine::DeleteON [GOOD] >> KqpNewEngine::DeleteByKey >> KqpSort::TopSortTableExpr [GOOD] >> KqpSort::TopSortTableExprOffset >> KqpNotNullColumns::SelectNotNullColumns [GOOD] >> KqpNotNullColumns::UpdateNotNull >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] >> KqpNewEngine::JoinPure [GOOD] >> KqpNewEngine::JoinPureUncomparableKeys >> KqpNotNullColumns::UpdateTable_DontChangeNotNull [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex >> KqpNotNullColumns::UpdateTable_UniqIndex [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndexPg >> KqpNewEngine::UpdateFromParams [GOOD] >> KqpNewEngine::UnionAllPure >> KqpNewEngine::OrderedScalarContext [GOOD] >> KqpNewEngine::PagingNoPredicateExtract >> KqpNotNullColumns::InsertNotNullPkPg [GOOD] >> KqpNotNullColumns::InsertNotNull >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] Test command err: Trying to start YDB, gRPC: 16237, MsgBus: 26385 2025-04-07T14:14:33.114860Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574462339630458:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:33.115511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128b/r3tmp/tmpAzjh22/pdisk_1.dat 2025-04-07T14:14:33.367968Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16237, node 1 2025-04-07T14:14:33.444055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:33.444136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:33.445243Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:33.445275Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:33.445283Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:33.445379Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:33.445855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26385 TClient is connected to server localhost:26385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:33.906855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:33.928105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:34.034887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:34.161250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:34.234892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:35.747814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574470929566759:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:35.747922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:35.990213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.065610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.136239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.166656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.193547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.257744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:36.320141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574475224534557:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:36.320212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:36.320574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574475224534562:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:36.323755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:36.334283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574475224534564:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 14183, MsgBus: 7134 2025-04-07T14:14:38.151144Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574483188718912:2174];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:38.193510Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128b/r3tmp/tmpT7IPEl/pdisk_1.dat 2025-04-07T14:14:38.279031Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:38.293641Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:38.293735Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:38.296254Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14183, node 2 2025-04-07T14:14:38.341417Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:38.341440Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:38.341460Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:38.341586Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7134 TClient is connected to server localhost:7134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:14:38.724955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:38.732515Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:38.740509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:38.784063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:38.936205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:39.001047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:41.298910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574496073622352:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.298974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.338172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsa ... X_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:55.864632Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:55.940627Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:58.480193Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574572225594505:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:58.480294Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:58.527038Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:58.575772Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:58.606728Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:58.638746Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:58.667100Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:58.701751Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:58.776381Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574572225595003:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:58.776466Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:58.776491Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574572225595008:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:58.779754Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:58.788858Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574572225595010:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:00.041709Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574555045723617:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:00.041798Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15874, MsgBus: 9428 2025-04-07T14:15:01.524350Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574582971712098:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:01.524408Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128b/r3tmp/tmpwaKQQi/pdisk_1.dat 2025-04-07T14:15:01.637403Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:01.662515Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:01.662612Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:01.666385Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15874, node 6 2025-04-07T14:15:01.717123Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:01.717159Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:01.717173Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:01.717349Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9428 TClient is connected to server localhost:9428 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:02.227937Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:02.250560Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:02.310833Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:02.471112Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:02.547919Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:04.971309Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574595856615668:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:04.971426Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:05.020168Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:05.089398Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:05.124003Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:05.156495Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:05.191438Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:05.246218Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:05.291207Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574600151583461:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:05.291306Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:05.291649Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574600151583466:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:05.297041Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:05.307177Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574600151583468:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:06.524491Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574582971712098:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:06.524572Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNewEngine::DqSource [GOOD] >> KqpNewEngine::DqSourceLiteralRange >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 >> KqpNewEngine::ScalarMultiUsage [GOOD] >> KqpSort::ReverseRangeOptimized [GOOD] >> KqpSort::ReverseRangeLimitOptimized >> KqpNotNullColumns::OptionalParametersDataQuery [GOOD] >> KqpNotNullColumns::OptionalParametersScanQuery >> KqpMergeCn::TopSortBy_Float_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_Date_Limit4 >> KqpRanges::DateKeyPredicate [GOOD] >> KqpRanges::DuplicateKeyPredicateLiteral >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 [GOOD] >> KqpMergeCn::SortBy_PK_Uint64_Desc >> KqpNewEngine::MultiUsagePrecompute [GOOD] >> KqpNewEngine::MultiUsageInnerConnection >> KqpNewEngine::AsyncIndexUpdate [GOOD] >> KqpNewEngine::AutoChooseIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::ScalarMultiUsage [GOOD] Test command err: Trying to start YDB, gRPC: 24496, MsgBus: 22639 2025-04-07T14:14:27.720762Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574438532751535:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:27.726491Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128d/r3tmp/tmpJkEWVx/pdisk_1.dat 2025-04-07T14:14:27.980810Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24496, node 1 2025-04-07T14:14:28.066769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:28.066792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:28.066799Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:28.066886Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:28.093243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:28.093462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:28.096122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22639 TClient is connected to server localhost:22639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:28.502479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:30.176337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574451417654045:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:30.176356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574451417654053:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:30.176473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:30.180719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:14:30.190178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574451417654059:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } Trying to start YDB, gRPC: 9198, MsgBus: 19292 2025-04-07T14:14:31.151057Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574455009156974:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:31.151618Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128d/r3tmp/tmprHICkN/pdisk_1.dat 2025-04-07T14:14:31.249866Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:31.266617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:31.266671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:31.267567Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9198, node 2 2025-04-07T14:14:31.311530Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:31.311545Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:31.311549Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:31.311609Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19292 TClient is connected to server localhost:19292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:31.705911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:31.714922Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:31.730844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:31.859719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:31.999845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:32.076134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:33.976760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574463599093121:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:33.976829Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.011852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.040426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.077190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.099553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.162323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.194881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:34.232075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574467894060913:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.232142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574467894060918:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.232174Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:34.235620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:34.244874Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574467894060920:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:14:35.609314Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490574472189028635:2487], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:3:26: Error: At function: KiWriteTable! ... adService] [TPoolFetcherActor] ActorId: [5:7490574541677379891:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.499950Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.500128Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574541677379896:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.503860Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:51.512577Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574541677379898:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:14:52.569524Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:52.599061Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:14:52.630902Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:14:52.668263Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T14:14:52.794820Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574524497508498:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:52.794905Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:57.492068Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035297333, txId: 281474976710692] shutting down 2025-04-07T14:14:57.998887Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035297879, txId: 281474976710695] shutting down 2025-04-07T14:14:58.505069Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035298355, txId: 281474976710698] shutting down 2025-04-07T14:14:59.309053Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035299146, txId: 281474976710701] shutting down 2025-04-07T14:14:59.840284Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035299727, txId: 281474976710704] shutting down 2025-04-07T14:15:00.440428Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035300301, txId: 281474976710707] shutting down 2025-04-07T14:15:01.223653Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035301036, txId: 281474976710710] shutting down 2025-04-07T14:15:01.678283Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035301708, txId: 281474976710713] shutting down 2025-04-07T14:15:02.042336Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035302079, txId: 281474976710715] shutting down Trying to start YDB, gRPC: 18067, MsgBus: 30021 2025-04-07T14:15:03.048834Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574591902271043:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:03.048900Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128d/r3tmp/tmpN0pLFx/pdisk_1.dat 2025-04-07T14:15:03.171879Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:03.190870Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:03.190981Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:03.193066Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18067, node 6 2025-04-07T14:15:03.255437Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:03.255466Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:03.255499Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:03.255622Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30021 TClient is connected to server localhost:30021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:03.831987Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:03.837460Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:03.847850Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:03.928111Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:04.129920Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:04.204416Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:06.908587Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574604787174613:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.908701Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.956525Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:06.989857Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.019858Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.048579Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.078927Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.148092Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.194442Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574609082142409:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.194546Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.194667Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574609082142414:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.198849Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:07.208918Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574609082142416:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:08.050717Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574591902271043:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:08.050808Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNewEngine::StaleRO_Immediate [GOOD] >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin >> KqpRanges::UpdateMulti [GOOD] >> KqpRanges::UpdateWhereInBigLiteralList >> KqpNotNullColumns::UpdateNotNull [GOOD] >> KqpNotNullColumns::UpdateNotNullPg >> KqpSort::UnionAllSortLimit >> KqpReturning::ReturningColumnsOrder [GOOD] >> KqpReturning::ReturningTypes >> KqpNotNullColumns::InsertNotNull [GOOD] >> KqpNotNullColumns::InsertFromSelect >> KqpNewEngine::EmptyMapWithBroadcast [GOOD] >> KqpNewEngine::FlatMapLambdaInnerPrecompute >> KqpSqlIn::SecondaryIndex_PgKey-EnableKqpDataQueryStreamLookup [GOOD] >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In >> KqpSort::TopSortTableExprOffset [GOOD] >> KqpSort::TopSortResults >> KqpNewEngine::JoinPureUncomparableKeys [GOOD] >> KqpNewEngine::JoinProjectMulti >> KqpNewEngine::UnionAllPure [GOOD] >> KqpNewEngine::UpsertEmptyInput >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex [GOOD] >> KqpNotNullColumns::UpdateOnNotNull >> KqpNewEngine::ComplexLookupLimit [GOOD] >> KqpNewEngine::DeleteByKey [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup >> KqpRm::SingleTask ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:27.387032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:27.387108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:27.387148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:27.387188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:27.387236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:27.387289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:27.387347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:27.387664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:27.456985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:27.457032Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:27.467234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:27.467347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:27.467484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:27.476938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:27.477084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:27.477645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:27.477961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:27.485031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:27.486852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:27.486896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:27.487011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:27.487047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:27.487070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:27.487161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:27.493163Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:27.588849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:27.589034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.589205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:27.589378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:27.589444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.591274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:27.591399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:27.591551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.591605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:27.591664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:27.591708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:27.593412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.593462Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:27.593493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:27.594901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.594938Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.594982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:27.595024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:27.598206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:27.599653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:27.599846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:27.600580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:27.600703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:27.600747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:27.600996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:27.601052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:27.601200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:27.601264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:27.605500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:27.605545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:27.605707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:27.605744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:27.606078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:27.606116Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:27.606213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:27.606260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:27.606302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:27.606335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:27.606380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:27.606416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:27.606482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:27.606518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:27.606549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... emeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T14:15:13.878922Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-07T14:15:13.879412Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T14:15:13.879483Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T14:15:13.879511Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-07T14:15:13.884857Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-04-07T14:15:13.884914Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:15:13.885106Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-07T14:15:13.885193Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-04-07T14:15:13.885217Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-04-07T14:15:13.885248Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2025-04-07T14:15:13.885589Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T14:15:13.885642Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-07T14:15:13.885665Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-07T14:15:13.885689Z node 114 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-04-07T14:15:13.885713Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-07T14:15:13.885766Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-04-07T14:15:13.888808Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-07T14:15:13.888867Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:15:13.889095Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:15:13.889209Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-04-07T14:15:13.889242Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-07T14:15:13.889284Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-04-07T14:15:13.889321Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-07T14:15:13.889364Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-07T14:15:13.889397Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-07T14:15:13.889489Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:15:13.889527Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2025-04-07T14:15:13.889553Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2025-04-07T14:15:13.889584Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-07T14:15:13.889611Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2025-04-07T14:15:13.889634Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2025-04-07T14:15:13.889676Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-07T14:15:13.890138Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:15:13.890228Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:15:13.892498Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:15:13.892546Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:15:13.892589Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:15:13.894809Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-07T14:15:13.899217Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 341 RawX2: 489626274070 } TabletId: 72075186233409546 State: 4 2025-04-07T14:15:13.899288Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-04-07T14:15:13.900845Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:15:13.901264Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-07T14:15:13.901472Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-07T14:15:13.901705Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-07T14:15:13.902029Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:15:13.902066Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-07T14:15:13.902129Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T14:15:13.902167Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-07T14:15:13.902205Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:15:13.907024Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-07T14:15:13.907087Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-04-07T14:15:13.907548Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-07T14:15:13.907775Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-07T14:15:13.907820Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-07T14:15:13.908604Z node 114 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-07T14:15:13.908687Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-07T14:15:13.908714Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [114:611:2540] 2025-04-07T14:15:13.912686Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 489626274074 } TabletId: 72075186233409547 State: 4 2025-04-07T14:15:13.912760Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-04-07T14:15:13.914335Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-07T14:15:13.914753Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 2025-04-07T14:15:13.914944Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-07T14:15:13.915166Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-04-07T14:15:13.917813Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:15:13.917859Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:15:13.917926Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:15:13.920677Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-07T14:15:13.920741Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-04-07T14:15:13.921235Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-07T14:15:13.921603Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-07T14:15:13.921673Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> KqpSort::ReverseRangeLimitOptimized [GOOD] >> KqpSort::TopSortExpr ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::ComplexLookupLimit [GOOD] Test command err: Trying to start YDB, gRPC: 28927, MsgBus: 25172 2025-04-07T14:14:39.255653Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574490106818671:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:39.255694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001286/r3tmp/tmpFT3BnU/pdisk_1.dat 2025-04-07T14:14:39.571946Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28927, node 1 2025-04-07T14:14:39.618373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:39.618484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:39.620762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:39.647917Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:39.647939Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:39.647948Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:39.648034Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25172 TClient is connected to server localhost:25172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:40.132986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:40.153418Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:14:41.929662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574498696753763:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:41.929743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:42.114365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:42.234508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574502991721161:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:42.234555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:42.234700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574502991721166:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:42.238169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:14:42.252199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574502991721168:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:14:43.311367Z node 1 :RPC_REQUEST WARN: Client lost Trying to start YDB, gRPC: 7094, MsgBus: 11622 2025-04-07T14:14:43.967230Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574504733597947:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:43.967340Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001286/r3tmp/tmpGv9foP/pdisk_1.dat 2025-04-07T14:14:44.074157Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:44.094449Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:44.094516Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:44.095802Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7094, node 2 2025-04-07T14:14:44.148246Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:44.148271Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:44.148275Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:44.148347Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11622 TClient is connected to server localhost:11622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:44.566674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:44.573639Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:44.600048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:44.677823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:44.834468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:44.917014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:46.923230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574517618501540:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.923379Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.942456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.972763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:47.008411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:47.031128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:47.055864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:47.093106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:47.146128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574521913469330:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:47.146205Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:47.146525Z node 2 :KQP_WORKLOAD_SE ... 976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:04.315935Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574595619029444:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:04.316020Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:04.381668Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:04.413478Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:04.445132Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:04.482218Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:04.553127Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:04.620393Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:04.663519Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574595619029943:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:04.663693Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:04.663953Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574595619029948:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:04.666929Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:04.675492Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574595619029950:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:05.782228Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:05.799351Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574578439158536:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:05.799395Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4835, MsgBus: 6795 2025-04-07T14:15:06.889561Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574605221780125:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001286/r3tmp/tmp4vzvPY/pdisk_1.dat 2025-04-07T14:15:06.937261Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:15:06.979508Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:07.007692Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:07.007795Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:07.009217Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4835, node 6 2025-04-07T14:15:07.054763Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:07.054790Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:07.054799Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:07.054924Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6795 TClient is connected to server localhost:6795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:07.593865Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:07.600561Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:07.613326Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:07.693398Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:15:07.864269Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:07.962281Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.611701Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574622401650864:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.611802Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.666627Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.704101Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.744018Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.774266Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.808906Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.846278Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.909792Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574622401651359:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.909878Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.910072Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574622401651364:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.914414Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:10.924677Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574622401651366:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:11.887096Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574605221780125:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:11.887174Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:12.269912Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpNewEngine::PagingNoPredicateExtract [GOOD] >> KqpNotNullColumns::UpdateNotNullPg [GOOD] >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn >> KqpNewEngine::DqSourceLiteralRange [GOOD] >> KqpNewEngine::DqSourceLimit >> KqpRanges::DuplicateKeyPredicateLiteral [GOOD] >> KqpRanges::DuplicateCompositeKeyPredicate >> KqpMergeCn::TopSortBy_Date_Limit4 [GOOD] >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 >> KqpRm::SingleTask [GOOD] >> KqpRm::SingleSnapshotByExchanger ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PagingNoPredicateExtract [GOOD] Test command err: Trying to start YDB, gRPC: 8483, MsgBus: 30250 2025-04-07T14:14:40.718085Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574494612522871:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:40.718142Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001285/r3tmp/tmpqaGgyq/pdisk_1.dat 2025-04-07T14:14:41.028472Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8483, node 1 2025-04-07T14:14:41.113244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:41.113344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:41.114417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:41.130349Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:41.130373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:41.130390Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:41.130491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30250 TClient is connected to server localhost:30250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:41.606470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:41.638759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:41.759162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:41.903866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:41.961150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:43.369014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574507497426466:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:43.369108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:43.555506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:43.579365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:43.605197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:43.630607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:43.668135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:43.697871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:43.774317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574507497426963:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:43.774401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:43.774533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574507497426968:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:43.777821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:43.785763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574507497426970:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 11345, MsgBus: 13681 2025-04-07T14:14:45.467959Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574516145173532:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:45.468023Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001285/r3tmp/tmpNSrs4x/pdisk_1.dat 2025-04-07T14:14:45.603909Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11345, node 2 2025-04-07T14:14:45.641855Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:45.641916Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:45.643533Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:45.671576Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:45.671622Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:45.671631Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:45.671741Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13681 TClient is connected to server localhost:13681 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:46.075846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:46.094591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:46.168437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.324170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:46.389642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:48.521190Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574529030077108:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.521321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.561576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.589157Z node 2 :FLAT_TX_SCHEMESHAR ... , suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:03.182700Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:03.265000Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:03.428435Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:03.506903Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:05.972975Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574600836432772:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:05.973136Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:05.998452Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:06.033393Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:06.064395Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:06.134893Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:06.169047Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:06.238814Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:06.306467Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574605131400573:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.306561Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574605131400578:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.306573Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.310456Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:06.324964Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574605131400580:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 64939, MsgBus: 22577 2025-04-07T14:15:08.560296Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574611584232374:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:08.560356Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001285/r3tmp/tmpmRxCAB/pdisk_1.dat 2025-04-07T14:15:08.682268Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:08.699297Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:08.699396Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:08.700983Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64939, node 6 2025-04-07T14:15:08.754326Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:08.754354Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:08.754364Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:08.754498Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22577 TClient is connected to server localhost:22577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:09.270354Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:09.287418Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:09.363755Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:09.503345Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:09.570221Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:12.261689Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574628764103249:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:12.261790Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:12.314513Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:12.368015Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:12.403913Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:12.438951Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:12.471465Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:12.509409Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:12.603426Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574628764103751:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:12.603534Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:12.603738Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574628764103756:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:12.607585Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:12.622065Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574628764103758:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:13.567750Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574611584232374:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:13.567917Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2025-04-07T14:15:15.688163Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:15:15.688637Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e89/r3tmp/tmpgHFVIW/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:15:15.689290Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e89/r3tmp/tmpgHFVIW/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e89/r3tmp/tmpgHFVIW/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5245475323940499179 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:15:15.747558Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:15.747957Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:15.765823Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:451:2098] with ResourceBroker at [2:422:2097] 2025-04-07T14:15:15.766004Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:452:2099] 2025-04-07T14:15:15.766179Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:450:2332] with ResourceBroker at [1:421:2313] 2025-04-07T14:15:15.766250Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:453:2333] 2025-04-07T14:15:15.766459Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:15.766505Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:15.766540Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:15.766561Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:15.767482Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:15.777703Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035315 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:15.777907Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:15.777978Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035315 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:15.778338Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:15.778448Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:15.778594Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:15.778637Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:15.778745Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035315 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:15.779784Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:15.779816Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:15.779904Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035315 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:15.781397Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-07T14:15:15.781601Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:15.782306Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:15.782422Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:15.782545Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:15.782709Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:15.782802Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:15.782983Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:15.783128Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:15.783184Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:15.785741Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:15.785819Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:15.785870Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:15.785915Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:15.785953Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:450:2332])) 2025-04-07T14:15:15.791537Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:15.791825Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-2-1 (1 by [1:450:2332]) (release resources {0, 100}) 2025-04-07T14:15:15.791898Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.000000 (remove task kqp-1-2-1 (1 by [1:450:2332])) 2025-04-07T14:15:15.791950Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. >> KqpRm::ResourceBrokerNotEnoughResources >> KqpSort::UnionAllSortLimit [GOOD] >> KqpSqlIn::CantRewrite >> KqpNotNullColumns::InsertFromSelect [GOOD] >> KqpNotNullColumns::InsertNotNullPg >> KqpMergeCn::SortBy_PK_Uint64_Desc [GOOD] >> KqpMergeCn::SortBy_Int32 >> KqpNewEngine::MultiUsageInnerConnection [GOOD] >> KqpNewEngine::MultipleBroadcastJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 5919, MsgBus: 8244 2025-04-07T14:14:48.328196Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574526527649965:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:48.328262Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001273/r3tmp/tmpwRrGhX/pdisk_1.dat 2025-04-07T14:14:48.593002Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5919, node 1 2025-04-07T14:14:48.658555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:48.658580Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:48.658586Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:48.658688Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:48.678465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:48.678568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:48.680291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8244 TClient is connected to server localhost:8244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:49.101974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:50.876936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574535117585187:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:50.877038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.153920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.258210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574539412552584:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.258294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.258553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574539412552589:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.261744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:14:51.270030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574539412552591:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:14:51.489172Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574539412552683:2328], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing key column in input: Key for table: /Root/TestReplaceNotNullPk, code: 2029 2025-04-07T14:14:51.490530Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTFhMTVkMjMtNjg1MTMzYTgtNzljZjJhZjctNWVjNDdlYjY=, ActorId: [1:7490574535117585169:2300], ActorState: ExecuteState, TraceId: 01jr89n8aefd6ntcbdg65nnysq, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-04-07T14:14:51.505759Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574539412552692:2332], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:49: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:49: Error: Failed to convert 'Key': Null to Uint64
:1:49: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-07T14:14:51.506999Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTFhMTVkMjMtNjg1MTMzYTgtNzljZjJhZjctNWVjNDdlYjY=, ActorId: [1:7490574535117585169:2300], ActorState: ExecuteState, TraceId: 01jr89n8b9fx2fzd93sqc0qx21, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 10917, MsgBus: 64805 2025-04-07T14:14:52.259289Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574545989522220:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:52.259385Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001273/r3tmp/tmpYjzsDt/pdisk_1.dat 2025-04-07T14:14:52.366072Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:52.366528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:52.366829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:52.383578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10917, node 2 2025-04-07T14:14:52.419404Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:52.419426Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:52.419431Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:52.419518Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64805 TClient is connected to server localhost:64805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:14:52.781771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.861865Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574554579457425:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.861937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.876469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.907965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574554579457523:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.908050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.908399Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574554579457528:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.912194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:14:54.920425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574554579457530:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:14:55.099611Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490574558874424916:2327], status: PRECONDITION_FAILED, issues:
: Error: Typ ... -04-07T14:15:06.941795Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.987058Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.019076Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.049778Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.077574Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.112306Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.155474Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.209073Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574607696258188:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.209168Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.209217Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574607696258193:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.212311Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:07.220987Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574607696258195:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:08.236484Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:08.962084Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574590516386810:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:08.964120Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12197, MsgBus: 8277 2025-04-07T14:15:10.021201Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574620744544700:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:10.021253Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001273/r3tmp/tmpQVA4WP/pdisk_1.dat 2025-04-07T14:15:10.166218Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:10.183782Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:10.183878Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:10.185643Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12197, node 6 2025-04-07T14:15:10.227062Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:10.227080Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:10.227087Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:10.227215Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8277 TClient is connected to server localhost:8277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:10.765028Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:10.775510Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:10.849868Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:11.031275Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:11.111150Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:13.669797Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574633629448306:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:13.670082Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:13.688937Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:13.715599Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:13.745389Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:13.775278Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:13.810835Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:13.844697Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:13.906746Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574633629448793:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:13.906862Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:13.907100Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574633629448798:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:13.910764Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:13.922627Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574633629448800:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:15.026717Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574620744544700:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:15.026783Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:15.093414Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.532772Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035315568, txId: 281474976715673] shutting down 2025-04-07T14:15:15.738721Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035315778, txId: 281474976715675] shutting down 2025-04-07T14:15:15.879225Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035315918, txId: 281474976715677] shutting down >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin [GOOD] >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin >> KqpNotNullColumns::UpdateOnNotNull [GOOD] >> KqpNotNullColumns::UpdateOnNotNullPg >> KqpRm::Reduce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2025-04-07T14:15:17.401986Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:15:17.402486Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e83/r3tmp/tmpMd25Xw/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:15:17.409235Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e83/r3tmp/tmpMd25Xw/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e83/r3tmp/tmpMd25Xw/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4012279632029040693 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:15:17.459459Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:17.459759Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:17.476179Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:451:2098] with ResourceBroker at [2:422:2097] 2025-04-07T14:15:17.476316Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:452:2099] 2025-04-07T14:15:17.476474Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:450:2332] with ResourceBroker at [1:421:2313] 2025-04-07T14:15:17.476543Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:453:2333] 2025-04-07T14:15:17.476690Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:17.476736Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:17.476763Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:17.476784Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:17.476942Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:17.484988Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035317 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:17.485148Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:17.485202Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035317 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2025-04-07T14:15:17.485457Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:17.485534Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:17.485630Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:17.485661Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:17.485717Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035317 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:17.485905Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:17.485920Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:17.485954Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035317 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2025-04-07T14:15:17.486232Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-07T14:15:17.486360Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:17.486805Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:17.486894Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:17.486965Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:17.487089Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:17.487138Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:17.487270Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:17.487378Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:17.487403Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:17.489591Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:450:2332]) priority=0 resources={0, 1000} 2025-04-07T14:15:17.489640Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:17.489673Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 1000} for task kqp-1-2-1 (1 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:17.489698Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:17.489725Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 2.500000 (insert task kqp-1-2-1 (1 by [1:450:2332])) 2025-04-07T14:15:17.489873Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 1000ExternalMemory: 0 } 2025-04-07T14:15:17.489920Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-2 (2 by [1:450:2332]) priority=0 resources={0, 100000} 2025-04-07T14:15:17.489947Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-2 (2 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:17.489980Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task kqp-1-2-2 (2 by [1:450:2332]) 2025-04-07T14:15:17.490004Z node 1 :RESOURCE_BROKER DEBUG: Removing task kqp-1-2-2 (2 by [1:450:2332]) 2025-04-07T14:15:17.490055Z node 1 :KQP_RESOURCE_MANAGER NOTICE: TxId: 1, taskId: 2. Not enough memory for query, requested: 100000. TxResourcesInfo { TxId: 1, Database: , tx initially granted memory: 0B, tx total memory allocations: 1000B, tx largest successful memory allocation: 1000B, tx largest failed memory allocation: 0B, tx total execution units: 0, started at: 2025-04-07T14:15:17.489534Z } >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] >> KqpNewEngine::AutoChooseIndex [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLimit >> KqpRm::SingleSnapshotByExchanger [GOOD] >> KqpNewEngine::FlatMapLambdaInnerPrecompute [GOOD] >> KqpNewEngine::DqSourceSequentialLimit >> KqpRm::SnapshotSharingByExchanger >> KqpNewEngine::UpsertEmptyInput [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumn >> KqpRanges::UpdateWhereInBigLiteralList [GOOD] >> KqpRanges::ScanKeyPrefix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2025-04-07T14:15:17.089108Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:15:17.089635Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e84/r3tmp/tmp9olrnO/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:15:17.090272Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e84/r3tmp/tmp9olrnO/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e84/r3tmp/tmp9olrnO/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5895684313149305761 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:15:17.127665Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:17.128010Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:17.140690Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:451:2098] with ResourceBroker at [2:422:2097] 2025-04-07T14:15:17.140845Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:452:2099] 2025-04-07T14:15:17.140972Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:450:2332] with ResourceBroker at [1:421:2313] 2025-04-07T14:15:17.141024Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:453:2333] 2025-04-07T14:15:17.141131Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:17.141177Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:17.141198Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:17.141212Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:17.141334Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:17.152184Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035317 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:17.152388Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:17.152454Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035317 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:17.152791Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:17.152900Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:17.153025Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:17.153061Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:17.153148Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035317 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:17.153333Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:17.153357Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:17.153406Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035317 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:17.153719Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-07T14:15:17.153876Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:17.154411Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:17.154510Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:17.154630Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:17.154831Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:17.154934Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:17.155059Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:17.155219Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:17.155266Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:17.157945Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:17.158019Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:17.158065Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:17.158109Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:17.158155Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:450:2332])) 2025-04-07T14:15:17.158312Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:17.158379Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-1-2 (2 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:17.158416Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-1-2 (2 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:17.158462Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:17.158506Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-1-2 (2 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:17.158543Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:450:2332])) 2025-04-07T14:15:17.158599Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:17.158806Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:17.158921Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035317 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-04-07T14:15:17.159130Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:18.154285Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-07T14:15:18.154438Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-2-1 (1 by [1:450:2332]) (release resources {0, 100}) 2025-04-07T14:15:18.154496Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.300200 (remove task kqp-1-2-1 (1 by [1:450:2332])) 2025-04-07T14:15:18.154538Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.100400 2025-04-07T14:15:18.154601Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-07T14:15:18.154652Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-2-1-2 (2 by [1:450:2332]) (release resources {0, 100}) 2025-04-07T14:15:18.154694Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.300200 to 0.100400 (remove task kqp-2-1-2 (2 by [1:450:2332])) 2025-04-07T14:15:18.154735Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-07T14:15:18.154932Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:18.155162Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035318 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:18.155472Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:18.431499Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:24.363024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:24.363118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:24.363156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:24.363198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:24.363260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:24.363313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:24.363382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:24.363748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:24.441303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:24.441377Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:24.453135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:24.453281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:24.453456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:24.464178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:24.464370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:24.465033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:24.465409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:24.470312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:24.471915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:24.471979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:24.472150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:24.472192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:24.472230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:24.472343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:24.478570Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:24.600775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:24.601020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:24.601254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:24.601490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:24.601556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:24.604165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:24.604304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:24.604484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:24.604534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:24.604579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:24.604627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:24.609585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:24.609655Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:24.609692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:24.611664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:24.611714Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:24.611766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:24.611813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:24.616034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:24.629530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:24.629756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:24.630670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:24.630831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:24.630905Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:24.631169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:24.631221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:24.631400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:24.631472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:24.634102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:24.634157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:24.634335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:24.634379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:24.634738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:24.634797Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:24.634885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:24.634957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:24.635010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:24.635059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:24.635115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:24.635146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:24.635244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:24.635285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:24.635317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... ompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:15:18.017506Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:15:18.017758Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 289us result status StatusSuccess 2025-04-07T14:15:18.018599Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:15:18.039985Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1180:2923] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:15:18.040086Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1117:2923] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-07T14:15:18.040225Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1180:2923] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744035317978223 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744035317978223 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1744035317978223 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:15:18.042390Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1180:2923] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-07T14:15:18.042490Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1117:2923] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> KqpRm::Reduce [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2025-04-07T14:15:18.850992Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:15:18.851351Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e6f/r3tmp/tmpyi4Gc9/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:15:18.851822Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e6f/r3tmp/tmpyi4Gc9/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e6f/r3tmp/tmpyi4Gc9/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8701140547869361072 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:15:18.889643Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:18.889960Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:18.903182Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:451:2098] with ResourceBroker at [2:422:2097] 2025-04-07T14:15:18.903301Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:452:2099] 2025-04-07T14:15:18.903438Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:450:2332] with ResourceBroker at [1:421:2313] 2025-04-07T14:15:18.903484Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:453:2333] 2025-04-07T14:15:18.903564Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:18.903594Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:18.903611Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:18.903623Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:18.903729Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:18.912949Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035318 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:18.913137Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:18.913203Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035318 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:18.913463Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:18.913562Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:18.913709Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:18.913747Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:18.913837Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035318 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:18.913998Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:18.914012Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:18.914061Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035318 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:18.914373Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-07T14:15:18.914499Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:18.914945Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:18.915025Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:18.915128Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:18.915237Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:18.915296Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:18.915437Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:18.915517Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:18.915549Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:18.917907Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:18.917985Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:18.918047Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:18.918087Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:18.918131Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:450:2332])) 2025-04-07T14:15:18.918312Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:18.918461Z node 1 :RESOURCE_BROKER DEBUG: Update task kqp-1-1-1 (1 by [1:450:2332]) (priority=0 type=kqp_query resources={0, 30} resubmit=0) 2025-04-07T14:15:18.918495Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:18.918524Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.075000 (insert task kqp-1-1-1 (1 by [1:450:2332])) 2025-04-07T14:15:18.918553Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 70, Free Tier: 0, ExecutionUnits: 0. >> KqpReturning::ReturningTypes [GOOD] >> KqpRm::NotEnoughMemory >> KqpNewEngine::JoinProjectMulti [GOOD] >> KqpNewEngine::JoinMultiConsumer >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup >> KqpNotNullColumns::UpdateTable_UniqIndexPg [GOOD] >> KqpNotNullColumns::UpdateTable_Immediate >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy >> KqpSort::TopSortExpr [GOOD] >> KqpSort::TopParameter >> KqpRm::NotEnoughExecutionUnits >> KqpNewEngine::DeleteWithInputMultiConsumption [GOOD] >> KqpRm::ManyTasks >> KqpRm::NotEnoughMemory [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningTypes [GOOD] Test command err: Trying to start YDB, gRPC: 19396, MsgBus: 19498 2025-04-07T14:14:27.675190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574438528407584:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:27.675284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128c/r3tmp/tmpBVUNJ2/pdisk_1.dat 2025-04-07T14:14:28.017913Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19396, node 1 2025-04-07T14:14:28.081136Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:28.081159Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:28.081168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:28.081244Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:28.086687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:28.086821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:28.089040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19498 TClient is connected to server localhost:19498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:28.461048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:28.481632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:28.574399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:28.698655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:28.770382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:30.387388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574451413311179:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:30.387504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:30.635098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:30.655639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:30.683718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:30.701586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:30.762915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:30.826363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:30.855174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574451413311679:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:30.855228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:30.855232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574451413311684:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:30.857634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:30.864305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574451413311686:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Warning: Type annotation, code: 1030
:4:13: Warning: At function: RemovePrefixMembers, At function: RemoveSystemMembers, At function: PersistableRepr, At function: SqlProject
:4:27: Warning: At function: Filter, At function: Coalesce
:4:50: Warning: At function: SqlIn
:4:50: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:13: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 18081, MsgBus: 3518 2025-04-07T14:14:32.844542Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574460481449274:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:32.844627Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128c/r3tmp/tmprXfGKY/pdisk_1.dat 2025-04-07T14:14:32.937492Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:32.949839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:32.949900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18081, node 2 2025-04-07T14:14:32.951491Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:32.986430Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:32.986457Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:32.986465Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:32.986550Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3518 TClient is connected to server localhost:3518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:33.369890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:33.389529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:33.464513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:33.596113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:33.670689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:35.520767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574473366352855:2377], DatabaseId: /Root, PoolId: default, Failed ... Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:06.499949Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:09.142342Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574617820636261:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:09.142463Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:09.198196Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:09.227483Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:09.268596Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:09.306359Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:09.342891Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:09.377956Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:09.431359Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574617820636757:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:09.431454Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:09.431479Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574617820636762:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:09.435226Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:09.443182Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574617820636764:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:10.364799Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574600640765375:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:10.364870Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:10.757203Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 [[[2];["321"]];[["111"];[2]]] Trying to start YDB, gRPC: 10780, MsgBus: 2746 2025-04-07T14:15:12.756977Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574630451695932:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:12.757054Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128c/r3tmp/tmptflVA1/pdisk_1.dat 2025-04-07T14:15:12.863655Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:12.898592Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:12.898670Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:12.900089Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10780, node 6 2025-04-07T14:15:12.953268Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:12.953295Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:12.953309Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:12.953441Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2746 TClient is connected to server localhost:2746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:13.508742Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:13.518406Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:13.593019Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:13.764394Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:13.851946Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:16.635751Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574647631566820:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:16.635966Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:16.680391Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:16.714082Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:16.749038Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:16.788396Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:16.821867Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:16.894420Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:16.965137Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574647631567321:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:16.965236Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:16.965513Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574647631567326:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:16.970501Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:16.979805Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574647631567328:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:17.757062Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574630451695932:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:17.757155Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] >> KqpRm::DisonnectNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2025-04-07T14:15:20.572485Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:15:20.572994Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e53/r3tmp/tmptUwAQE/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:15:20.573591Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e53/r3tmp/tmptUwAQE/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e53/r3tmp/tmptUwAQE/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6460301572791218893 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:15:20.610073Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:20.610403Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:20.624204Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:451:2098] with ResourceBroker at [2:422:2097] 2025-04-07T14:15:20.624342Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:452:2099] 2025-04-07T14:15:20.624511Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:450:2332] with ResourceBroker at [1:421:2313] 2025-04-07T14:15:20.624580Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:453:2333] 2025-04-07T14:15:20.624713Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:20.624759Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:20.624784Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:20.624803Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:20.624942Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:20.636500Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035320 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:20.636709Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:20.636806Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035320 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:20.637145Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:20.637271Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:20.637419Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:20.637463Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:20.637574Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035320 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:20.637810Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:20.637837Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:20.637899Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035320 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:20.638348Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-07T14:15:20.638538Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:20.639241Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:20.639354Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:20.639493Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:20.639664Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:20.639784Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:20.639978Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:20.640122Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:20.640182Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 >> KqpRm::NotEnoughExecutionUnits [GOOD] >> KqpNotNullColumns::InsertNotNullPg [GOOD] >> KqpNotNullColumns::FailedMultiEffects >> KqpRm::ManyTasks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DeleteWithInputMultiConsumption [GOOD] Test command err: Trying to start YDB, gRPC: 24073, MsgBus: 17215 2025-04-07T14:14:46.322674Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574520204852323:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:46.322756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001280/r3tmp/tmpnfJmkF/pdisk_1.dat 2025-04-07T14:14:46.629197Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24073, node 1 2025-04-07T14:14:46.691888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:46.691996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:46.697885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:46.729335Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:46.729365Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:46.729373Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:46.729462Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17215 TClient is connected to server localhost:17215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:47.239362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:47.261016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:47.390336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:47.535656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:47.597315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:48.985268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574528794788610:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:48.985354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.198494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.228652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.253830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.277889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.304824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.331074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.376375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574533089756401:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.376439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.376497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574533089756406:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.379564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:49.388383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574533089756408:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 25700, MsgBus: 20833 2025-04-07T14:14:51.273740Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574539854199854:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:51.273807Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001280/r3tmp/tmp1mezGu/pdisk_1.dat 2025-04-07T14:14:51.359712Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25700, node 2 2025-04-07T14:14:51.400295Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:51.400360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:51.405344Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:51.434054Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:51.434072Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:51.434087Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:51.434172Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20833 TClient is connected to server localhost:20833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:51.845789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:51.852852Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:51.874338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:51.935691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:52.088507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:52.167379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:54.028786Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574552739103433:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.028865Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.066138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo u ... CHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:08.010547Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:08.109533Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:10.563792Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574622570303646:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.563891Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.614827Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.691895Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.726383Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.757967Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.826355Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.861819Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.898895Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574622570304144:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.898960Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574622570304149:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.898968Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.902489Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:10.912216Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574622570304151:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:12.092769Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574609685400052:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:12.092848Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17583, MsgBus: 31710 2025-04-07T14:15:14.163445Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574639873405270:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:14.163511Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001280/r3tmp/tmph1zs8C/pdisk_1.dat 2025-04-07T14:15:14.259740Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:14.274454Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:14.274560Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:14.275964Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17583, node 6 2025-04-07T14:15:14.332392Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:14.332419Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:14.332430Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:14.332552Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31710 TClient is connected to server localhost:31710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:14.869387Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:14.883646Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:15:14.949707Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.128746Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:15.208376Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:17.970250Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574652758308884:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.970376Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:18.010898Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:18.043321Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:18.072932Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:18.103626Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:18.136405Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:18.189130Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:18.237079Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574657053276679:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:18.237178Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:18.237424Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574657053276685:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:18.242357Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:18.251652Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574657053276687:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:19.177257Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574639873405270:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:19.177324Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] Test command err: 2025-04-07T14:13:05.036552Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574082996323752:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:13:05.036635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001892/r3tmp/tmpUSX65i/pdisk_1.dat 2025-04-07T14:13:05.641088Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:13:05.643374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:13:05.643454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:13:05.661638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1595 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-07T14:13:06.012890Z node 1 :TX_PROXY DEBUG: actor# [1:7490574082996323982:2100] Handle TEvNavigate describe path dc-1 2025-04-07T14:13:06.012932Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574087291291552:2253] HANDLE EvNavigateScheme dc-1 2025-04-07T14:13:06.013052Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7490574082996324021:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:13:06.013092Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7490574082996324021:2119], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-07T14:13:06.016797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574087291291553:2254][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-07T14:13:06.025930Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574078701356404:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574087291291557:2254] 2025-04-07T14:13:06.025997Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574078701356404:2049] Subscribe: subscriber# [1:7490574087291291557:2254], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.026053Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574078701356407:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574087291291558:2254] 2025-04-07T14:13:06.026067Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574078701356407:2052] Subscribe: subscriber# [1:7490574087291291558:2254], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.026087Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574078701356410:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7490574087291291559:2254] 2025-04-07T14:13:06.026100Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7490574078701356410:2055] Subscribe: subscriber# [1:7490574087291291559:2254], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-07T14:13:06.026143Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574087291291557:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574078701356404:2049] 2025-04-07T14:13:06.026164Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574087291291558:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574078701356407:2052] 2025-04-07T14:13:06.026181Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574087291291559:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574078701356410:2055] 2025-04-07T14:13:06.026242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574087291291553:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574087291291554:2254] 2025-04-07T14:13:06.026274Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574087291291553:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574087291291555:2254] 2025-04-07T14:13:06.026337Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7490574087291291553:2254][/dc-1] Set up state: owner# [1:7490574082996324021:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:06.026429Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574087291291553:2254][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7490574087291291556:2254] 2025-04-07T14:13:06.026473Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7490574087291291553:2254][/dc-1] Path was already updated: owner# [1:7490574082996324021:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-07T14:13:06.026520Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574087291291557:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574087291291554:2254], cookie# 1 2025-04-07T14:13:06.026536Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574087291291558:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574087291291555:2254], cookie# 1 2025-04-07T14:13:06.026548Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574087291291559:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574087291291556:2254], cookie# 1 2025-04-07T14:13:06.026573Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574078701356404:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574087291291557:2254] 2025-04-07T14:13:06.026593Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574078701356404:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574087291291557:2254], cookie# 1 2025-04-07T14:13:06.026609Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574078701356407:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574087291291558:2254] 2025-04-07T14:13:06.026623Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574078701356407:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574087291291558:2254], cookie# 1 2025-04-07T14:13:06.026634Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574078701356410:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7490574087291291559:2254] 2025-04-07T14:13:06.026653Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7490574078701356410:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7490574087291291559:2254], cookie# 1 2025-04-07T14:13:06.037108Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574087291291557:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574078701356404:2049], cookie# 1 2025-04-07T14:13:06.037152Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574087291291558:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574078701356407:2052], cookie# 1 2025-04-07T14:13:06.037178Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7490574087291291559:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574078701356410:2055], cookie# 1 2025-04-07T14:13:06.037217Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574087291291553:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574087291291554:2254], cookie# 1 2025-04-07T14:13:06.037252Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574087291291553:2254][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-07T14:13:06.037270Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574087291291553:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574087291291555:2254], cookie# 1 2025-04-07T14:13:06.037296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574087291291553:2254][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-07T14:13:06.037318Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574087291291553:2254][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7490574087291291556:2254], cookie# 1 2025-04-07T14:13:06.037336Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7490574087291291553:2254][/dc-1] Unexpected sync response: sender# [1:7490574087291291556:2254], cookie# 1 2025-04-07T14:13:06.095328Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7490574082996324021:2119], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-07T14:13:06.095654Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7490574082996324021:2119], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... 18984051:2110], cacheItem# { Subscriber: { Subscriber: [3:7490574126998853656:2350] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:15:19.205084Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574659574799132:2722], recipient# [3:7490574659574799131:2679], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:15:20.023878Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490574106408919338:2200], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:15:20.024046Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490574106408919338:2200], cacheItem# { Subscriber: { Subscriber: [2:7490574110703887124:2572] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:15:20.024162Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490574664754670601:3621], recipient# [2:7490574664754670600:2659], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:15:20.079281Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490574109818984051:2110], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:15:20.079439Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574109818984051:2110], cacheItem# { Subscriber: { Subscriber: [3:7490574114113951740:2344] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:15:20.079544Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574663869766430:2723], recipient# [3:7490574663869766429:2680], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:15:20.100234Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490574109818984051:2110], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:15:20.100375Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574109818984051:2110], cacheItem# { Subscriber: { Subscriber: [3:7490574114113951740:2344] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:15:20.100483Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574663869766432:2724], recipient# [3:7490574663869766431:2681], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:15:20.153475Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490574106408919338:2200], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:15:20.153737Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490574106408919338:2200], cacheItem# { Subscriber: { Subscriber: [2:7490574110703887124:2572] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:15:20.153853Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7490574106408919338:2200], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:15:20.153905Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490574664754670607:3622], recipient# [2:7490574664754670605:2660], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:15:20.153929Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7490574106408919338:2200], cacheItem# { Subscriber: { Subscriber: [2:7490574119293821960:2763] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:15:20.153989Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7490574664754670608:3623], recipient# [2:7490574664754670606:2661], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:15:20.205727Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7490574109818984051:2110], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-07T14:15:20.205871Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7490574109818984051:2110], cacheItem# { Subscriber: { Subscriber: [3:7490574126998853656:2350] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-07T14:15:20.205955Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7490574663869766434:2725], recipient# [3:7490574663869766433:2682], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpNewEngine::DqSourceLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2025-04-07T14:15:21.482942Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:15:21.483471Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e38/r3tmp/tmp3Ssc68/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:15:21.485946Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e38/r3tmp/tmp3Ssc68/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e38/r3tmp/tmp3Ssc68/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8429553359608151506 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:15:21.520091Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:21.520405Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:21.535585Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:451:2098] with ResourceBroker at [2:422:2097] 2025-04-07T14:15:21.535716Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:452:2099] 2025-04-07T14:15:21.535878Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:450:2332] with ResourceBroker at [1:421:2313] 2025-04-07T14:15:21.535929Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:453:2333] 2025-04-07T14:15:21.536013Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:21.536039Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:21.536056Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:21.536068Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:21.536167Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.544605Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035321 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:21.544814Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.544883Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035321 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:21.545178Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:21.545279Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:21.545408Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:21.545453Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.545560Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035321 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:21.545793Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:21.545819Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.545874Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035321 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:21.546293Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-07T14:15:21.546472Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.547086Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.547200Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.547307Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.547460Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.547569Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:21.547731Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:21.547894Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:21.547949Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Decimal_Limit5 >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In [GOOD] >> KqpSqlIn::PhasesCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2025-04-07T14:15:21.563304Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:15:21.563870Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e48/r3tmp/tmpXcgnr4/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:15:21.564478Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e48/r3tmp/tmpXcgnr4/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e48/r3tmp/tmpXcgnr4/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3480411508454588236 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:15:21.601193Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:21.601490Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:21.614727Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:451:2098] with ResourceBroker at [2:422:2097] 2025-04-07T14:15:21.614855Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:452:2099] 2025-04-07T14:15:21.615007Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:450:2332] with ResourceBroker at [1:421:2313] 2025-04-07T14:15:21.615070Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:453:2333] 2025-04-07T14:15:21.615177Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:21.615217Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:21.615244Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:21.615262Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:21.615393Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.624175Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035321 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:21.624360Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.624435Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035321 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:21.624803Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:21.624912Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:21.625055Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:21.625093Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.625190Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035321 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:21.625409Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:21.625431Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.625490Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035321 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:21.625882Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-07T14:15:21.626063Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.626686Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.626795Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.626918Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.627093Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.627178Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:21.627513Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:21.627665Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:21.627727Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:21.630382Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:21.630465Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.630508Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:21.630545Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.630583Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:450:2332])) 2025-04-07T14:15:21.630743Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:21.630920Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-2 (2 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:21.630952Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-2 (2 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.630978Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-2 (2 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:21.631007Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-2 (2 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.631035Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-1-2-2 (2 by [1:450:2332])) 2025-04-07T14:15:21.631058Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:21.631139Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-3-3 (3 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:21.631173Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-3-3 (3 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.631200Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-3-3 (3 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:21.631219Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-3-3 (3 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.631252Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.750000 (insert task kqp-1-3-3 (3 by [1:450:2332])) 2025-04-07T14:15:21.631273Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 3. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:21.631355Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-4-4 (4 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:21.631385Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-4-4 (4 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.631416Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-4-4 (4 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:21.631445Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-4-4 (4 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.631469Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.750000 to 1.000000 (insert task kqp-1-4-4 (4 by [1:450:2332])) 2025-04-07T14:15:21.631488Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 4. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:21.631568Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-5-5 (5 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:21.631586Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-5-5 (5 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.631609Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-5-5 (5 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:21.631643Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-5-5 (5 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.631663Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.000000 to 1.250000 (insert task kqp-1-5-5 (5 by [1:450:2332])) 2025-04-07T14:15:21.631683Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 5. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:21.631758Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-6-6 (6 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:21.631793Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-6-6 (6 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.631829Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-6-6 (6 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:21.631849Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-6-6 (6 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.631882Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.250000 to 1.500000 (insert task kqp-1-6-6 (6 by [1:450:2332])) 2025-04-07T14:15:21.631923Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 6. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:21.632004Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-7-7 (7 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:21.632025Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-7-7 (7 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.632046Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-7-7 (7 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:21.632078Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-7-7 (7 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.632098Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.500000 to 1.750000 (insert task kqp-1-7-7 (7 by [1:450:2332])) 2025-04-07T14:15:21.632124Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 7. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:21.632203Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-8-8 (8 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:21.632229Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-8-8 (8 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.632255Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-8-8 (8 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:21.632274Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-8-8 (8 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.632293Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.750000 to 2.000000 (insert task kqp-1-8-8 (8 by [1:450:2332])) 2025-04-07T14:15:21.632311Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 8. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:21.632409Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-9-9 (9 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:21.632430Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-9-9 (9 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.632451Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-9-9 (9 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:21.632470Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-9-9 (9 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.632489Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 2.000000 to 2.250000 (insert task kqp-1-9-9 (9 by [1:450:2332])) 2025-04-07T14:15:21.632515Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 9. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:21.632589Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [1:450:2332]) (release resources {0, 100}) 2025-04-07T14:15:21.632644Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 2.250000 to 2.000000 (remove task kqp-1-1-1 (1 by [1:450:2332])) 2025-04-07T14:15:21.632700Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. |96.1%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> LabeledDbCounters::TwoTabletsKillOneTablet [GOOD] >> SystemView::CollectPreparedQueries >> BackupRestoreS3::RestoreTablePartitioningSettings >> KqpSort::TopSortResults [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] >> KqpRm::DisonnectNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DqSourceLimit [GOOD] Test command err: Trying to start YDB, gRPC: 29102, MsgBus: 26313 2025-04-07T14:14:47.153073Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574523678546516:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:47.153304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127f/r3tmp/tmpQQ1O0x/pdisk_1.dat 2025-04-07T14:14:47.455677Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29102, node 1 2025-04-07T14:14:47.529742Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:47.529761Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:47.529767Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:47.529898Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:47.539004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:47.539140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:47.545510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26313 TClient is connected to server localhost:26313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:48.025106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:48.043462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:48.150175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:48.283921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:48.356434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:49.970791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574532268482671:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:49.970880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:50.208416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.234141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.264248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.286401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.319482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.388618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.439329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574536563450468:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:50.439392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:50.439584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574536563450473:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:50.442694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:50.450732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574536563450475:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 22738, MsgBus: 14187 2025-04-07T14:14:52.602675Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574542430917520:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:52.602743Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127f/r3tmp/tmpDBiRnu/pdisk_1.dat 2025-04-07T14:14:52.696750Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22738, node 2 2025-04-07T14:14:52.734025Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:52.734132Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:52.737489Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:52.762968Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:52.762994Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:52.763007Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:52.763111Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14187 TClient is connected to server localhost:14187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:53.141834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.155210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.201019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.344385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.420238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:55.490682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574555315821121:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:55.490751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:55.524016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:55.550511Z node 2 :FLAT_TX_SCHEMESH ... CHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:10.547160Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:10.615205Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:13.442692Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574633470244975:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:13.442777Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:13.474121Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:13.502757Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:13.532726Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:13.564682Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:13.598420Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:13.634483Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:13.714059Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574633470245474:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:13.714171Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:13.714261Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574633470245479:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:13.717361Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:13.725757Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574633470245481:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:15:14.580765Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574616290374100:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:14.580836Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24155, MsgBus: 27654 2025-04-07T14:15:16.077802Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574647714078329:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:16.077864Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127f/r3tmp/tmpJSho0P/pdisk_1.dat 2025-04-07T14:15:16.176807Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:16.190474Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:16.190580Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:16.192681Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24155, node 6 2025-04-07T14:15:16.238421Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:16.238453Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:16.238464Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:16.238565Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27654 TClient is connected to server localhost:27654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:16.774879Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:16.788249Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:16.867783Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:17.027132Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:17.106504Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:19.924510Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574660598981937:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:19.924622Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:19.976301Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:20.007688Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:20.042238Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:20.070462Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:20.107350Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:20.144106Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:20.202203Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574664893949728:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:20.202306Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:20.202481Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574664893949733:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:20.206591Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:20.218038Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574664893949735:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:21.078228Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574647714078329:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:21.078295Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpRanges::DuplicateCompositeKeyPredicate [GOOD] >> KqpRanges::DeleteNotFullScan >> KqpMergeCn::SortBy_Int32 [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage >> Sharding::XXUsage [GOOD] >> KqpSqlIn::CantRewrite [GOOD] >> KqpSqlIn::ComplexKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2025-04-07T14:15:21.914045Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:15:21.914580Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001de9/r3tmp/tmp1wwDLi/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:15:21.915115Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001de9/r3tmp/tmp1wwDLi/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001de9/r3tmp/tmp1wwDLi/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2649267778067727081 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:15:21.949438Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:21.949676Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:21.963478Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:451:2098] with ResourceBroker at [2:422:2097] 2025-04-07T14:15:21.963654Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:452:2099] 2025-04-07T14:15:21.963798Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:450:2332] with ResourceBroker at [1:421:2313] 2025-04-07T14:15:21.963892Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:453:2333] 2025-04-07T14:15:21.964021Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:21.964065Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:21.964147Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:21.964172Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:21.964313Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.976868Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035321 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:21.977075Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.977155Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035321 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:21.977523Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:21.977664Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:21.977803Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:21.977854Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.977951Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035321 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:21.978189Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:21.978216Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.978284Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035321 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:21.978741Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-07T14:15:21.978940Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.979593Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.979729Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.979849Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.980034Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:21.980173Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:21.980338Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:21.980515Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:21.980562Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:22.930505Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-07T14:15:22.930602Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-07T14:15:22.930803Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2025-04-07T14:15:22.930948Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-04-07T14:15:22.931500Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2025-04-07T14:15:22.931906Z node 2 :TX_PROXY WARN: actor# [2:141:2085] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-07T14:15:22.932038Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-07T14:15:22.932232Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:75:2071] ServerId# [1:346:2266] TabletId# 72057594037932033 PipeClientId# [2:75:2071] 2025-04-07T14:15:22.932414Z node 2 :KQP_RESOURCE_MANAGER INFO: Subcriber is not available for info exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:22.932450Z node 2 :KQP_RESOURCE_MANAGER INFO: Kill previous info exchanger subscriber for 'kqpexch+/dc-1' at [2:455:2101], reason: tenant updated 2025-04-07T14:15:22.932608Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:22.934758Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:22.934888Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:23.240453Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 11613487667212403472 7212226610915941473 15968839847660419085 7236134593000599515 13145430341776602742 11831624487883095348 5825058294684683015 344736243182460676 8522399419932122958 14202731975910647996 7258072237907732274 5192785612051081617 8055163334277481150 18381938475196163809 14143713552957716591 7752540946284457821 2215691323566585544 14249086062349091987 4279329532751381157 11868008111975342418 9940663889374234286 2125223115335016043 2061221457062257334 14373703861948700108 10380598872615810396 14342301752949937276 18184658526525824907 5611598268096064447 10154398661159590829 13421847070131922632 10992629554354664670 18118892741378560933 4701750536817736521 7423632527461939749 16856315411079233564 7363091295631754573 12729821029034116370 7340278752194042354 1583454436974167190 15886133561478063916 17099506110185566985 4121290740197983060 11548832867833153762 10973687362131329962 13550602526582530368 836805572989745269 9724798731430574977 7651027280358069471 2982184711617266753 6097570526445761639 15744549514892775363 14876688792313761939 14265471151832606246 18345913892515183696 12575231762280956496 15945569676987145378 9379459363245754582 10206914935263148174 5730161105789774465 13518776182901799524 18134719151608698138 3647164121188937286 14017939087453884258 1516698054498664865 3888724543603907886 14492644186862489695 11316217352990749347 702612686933208074 15158271565907928737 13032600662817810927 765293966243412708 9323970140367294055 4975399698819586833 3450045851260199484 9885826544986663315 3891227068453855272 15368802308526829738 181734115647453944 8014314461159160447 8684047190260745835 17344451199678322129 6700589354200866241 2476914794515322250 17196233709816285049 11691536283657165644 2729922283974485148 11706165882762468062 13636293406444559057 1356896364622759335 10848837453134323423 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopSortResults [GOOD] Test command err: Trying to start YDB, gRPC: 24371, MsgBus: 24117 2025-04-07T14:14:51.143067Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574538306744843:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:51.143156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00126c/r3tmp/tmpDcvVPj/pdisk_1.dat 2025-04-07T14:14:51.404330Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24371, node 1 2025-04-07T14:14:51.479157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:51.479294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:51.480361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:51.485024Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:51.485076Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:51.485088Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:51.485182Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24117 TClient is connected to server localhost:24117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:51.939878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:51.953643Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:14:51.971014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:52.089986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:52.231984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:52.305036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.818988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574546896681128:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:53.819096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.040557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.067280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.090710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.114000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.138226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.165351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.197016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574551191648914:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.197072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.197093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574551191648919:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.199484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:54.206806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574551191648921:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 19612, MsgBus: 1757 2025-04-07T14:14:56.084491Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574562490763004:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:56.084566Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00126c/r3tmp/tmpDBhE1R/pdisk_1.dat 2025-04-07T14:14:56.235795Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:56.260607Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:56.260750Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:56.262486Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19612, node 2 2025-04-07T14:14:56.313041Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:56.313061Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:56.313067Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:56.313134Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1757 TClient is connected to server localhost:1757 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:14:56.706559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:56.715183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:56.779624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:56.913714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:56.980220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:58.917677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574571080699300:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:58.917785Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:58.975957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsa ... e 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:08.360330Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:10.560415Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574619810716679:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.560495Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.599107Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.629302Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.660410Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.694777Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.731361Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.763694Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.804198Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574619810717173:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.804300Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.804487Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574619810717178:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.807995Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:10.818560Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490574619810717180:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:12.349730Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490574606925813088:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:12.349806Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18451, MsgBus: 6461 2025-04-07T14:15:13.824554Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574634932385097:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:13.824648Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00126c/r3tmp/tmp4482hy/pdisk_1.dat 2025-04-07T14:15:13.911036Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:13.931985Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:13.932058Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:13.933049Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18451, node 5 2025-04-07T14:15:13.977292Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:13.977317Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:13.977328Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:13.977449Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6461 TClient is connected to server localhost:6461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:15:14.457762Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:14.479226Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:14.537799Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:14.670617Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:14.763220Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:17.222230Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574652112255969:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.222323Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.260541Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.330914Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.373556Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.406567Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.475912Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.545307Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.593197Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574652112256474:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.593266Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.593291Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574652112256479:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.596122Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:17.605810Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574652112256481:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:18.794615Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:18.853308Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574634932385097:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:18.853370Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] Test command err: Trying to start YDB, gRPC: 18654, MsgBus: 25985 2025-04-07T14:14:55.264011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574555765768048:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:55.264121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00125f/r3tmp/tmpy9PW7P/pdisk_1.dat 2025-04-07T14:14:55.593032Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18654, node 1 2025-04-07T14:14:55.663316Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:55.663327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:55.663344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:55.663356Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:55.663416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:55.663440Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:55.665706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25985 TClient is connected to server localhost:25985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:56.140004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:56.152869Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:14:57.700254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574564355703271:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:57.700355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:57.930810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:58.052164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574568650670668:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:58.052227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:58.052360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574568650670673:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:58.056278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:14:58.064221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574568650670675:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:14:58.394280Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574568650670790:2334], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:61: Error: At function: KiUpdateTable!
:1:61: Error: Cannot update primary key column: Key 2025-04-07T14:14:58.394604Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2VmMDZiZjItZDQ5MTViNmQtY2U4ZWI0ZDEtYWFmNGQ5ZjI=, ActorId: [1:7490574564355703253:2300], ActorState: ExecuteState, TraceId: 01jr89nf26839gy2scyztp3s59, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:14:58.413387Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574568650670799:2338], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiUpdateTable!
:1:63: Error: Cannot update primary key column: Key 2025-04-07T14:14:58.414711Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2VmMDZiZjItZDQ5MTViNmQtY2U4ZWI0ZDEtYWFmNGQ5ZjI=, ActorId: [1:7490574564355703253:2300], ActorState: ExecuteState, TraceId: 01jr89nf35fre5s8amqk5gqh28, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 26280, MsgBus: 24081 2025-04-07T14:14:59.194525Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574576038263751:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:59.194587Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00125f/r3tmp/tmp9AFtT7/pdisk_1.dat 2025-04-07T14:14:59.271734Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26280, node 2 2025-04-07T14:14:59.323084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:59.323169Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:59.324889Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:59.333159Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:59.333181Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:59.333189Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:59.333265Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24081 TClient is connected to server localhost:24081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:59.689417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:59.693787Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:02.228563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574588923166268:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:02.228657Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:02.244165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:02.283359Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574588923166365:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:02.283426Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:02.283472Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574588923166370:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:02.287299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:02.296864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574588923166372:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:15:02.634098Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490574588923166485:2334], status: GEN ... cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574635539530047:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:13.913601Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00125f/r3tmp/tmpw03zlo/pdisk_1.dat 2025-04-07T14:15:14.033032Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:14.042996Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:14.043090Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:14.044257Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5324, node 5 2025-04-07T14:15:14.093546Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:14.093568Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:14.093576Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:14.093676Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19950 TClient is connected to server localhost:19950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:14.548344Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:16.979689Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574648424432553:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:16.979813Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:16.996654Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.054417Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574652719399947:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.054505Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.054537Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574652719399952:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.057786Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:17.066555Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574652719399954:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:15:17.478308Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7490574652719400076:2337], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiWriteTable!
:1:45: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:45: Error: Failed to convert 'Value': Null to String
:1:45: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-07T14:15:17.480260Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=Mjg4NTY3NDMtYzEwYWE0NzAtZjU2NTRlYjAtNjhlNzc1Y2Y=, ActorId: [5:7490574648424432550:2301], ActorState: ExecuteState, TraceId: 01jr89p1pp76e50pszyve4zdam, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 5341, MsgBus: 13357 2025-04-07T14:15:18.342603Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574657398585881:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:18.342667Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00125f/r3tmp/tmpFDcA6X/pdisk_1.dat 2025-04-07T14:15:18.439754Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:18.467992Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:18.468103Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:18.469526Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5341, node 6 2025-04-07T14:15:18.517322Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:18.517348Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:18.517357Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:18.517488Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13357 TClient is connected to server localhost:13357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:19.047152Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:19.056085Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:21.898229Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574670283488395:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:21.898312Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:21.914702Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:21.997428Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574670283488497:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:21.997514Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:21.997688Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574670283488502:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.001803Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:22.011484Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574670283488504:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:15:22.612464Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T14:15:22.637455Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490574674578455924:2338], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 2025-04-07T14:15:22.637676Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NGI2MzM3MDMtMTg5N2IxY2EtNTA2OWU5OWQtNzMyMTBkZjU=, ActorId: [6:7490574670283488377:2301], ActorState: ExecuteState, TraceId: 01jr89p6jm4hchfrfdpw2zwghj, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> KqpRm::SnapshotSharingByExchanger [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::SortBy_Int32 [GOOD] Test command err: Trying to start YDB, gRPC: 3819, MsgBus: 31706 2025-04-07T14:14:54.412506Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574553999569936:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:54.412612Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001261/r3tmp/tmp5FhHZ1/pdisk_1.dat 2025-04-07T14:14:54.709314Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3819, node 1 2025-04-07T14:14:54.771821Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:54.771847Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:54.771854Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:54.771972Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:54.783131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:54.783270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:54.784889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31706 TClient is connected to server localhost:31706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:55.206500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:56.989748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574562589505145:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:56.989864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:57.209795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS Trying to start YDB, gRPC: 24311, MsgBus: 13650 2025-04-07T14:14:58.051064Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574569901956656:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:58.051156Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001261/r3tmp/tmptejPDV/pdisk_1.dat 2025-04-07T14:14:58.137114Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24311, node 2 2025-04-07T14:14:58.174905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:58.174990Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:58.176693Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:58.181585Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:58.181606Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:58.181615Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:58.181696Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13650 TClient is connected to server localhost:13650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:14:58.523572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:00.647165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574578491891881:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:00.647241Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:00.657442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:00.699873Z node 2 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Unknown table '/Root/WrongTable' Trying to start YDB, gRPC: 17780, MsgBus: 12121 2025-04-07T14:15:01.355892Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490574583877283868:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:01.355957Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001261/r3tmp/tmpaGJ6fR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17780, node 3 2025-04-07T14:15:01.454486Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:01.454560Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:01.456374Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:01.466058Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:01.488846Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:01.488868Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:01.488877Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:01.488970Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12121 TClient is connected to server localhost:12121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:01.858304Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:01.862900Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:04.163635Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS Trying to start YDB, gRPC: 1137, MsgBus: 22085 2025-04-07T14:15:04.885500Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490574596146862276:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:04.885602Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001261/r3tmp/tmpXIXpUj/pdisk_1.dat 2025-04-07T14:15:04.973835Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1137, node 4 2025-04-07T14:15:05.021642Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:05.021736Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0, ... , DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:14.277019Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:14.336555Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:14.371992Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:14.407769Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:14.440656Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:14.515152Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:14.585507Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:14.671766Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574640717151702:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:14.671812Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574640717151707:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:14.671861Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:14.675082Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:14.683457Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574640717151709:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:15:15.697830Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.930185Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574623537280330:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:15.930269Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:16.374924Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035316415, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 2671, MsgBus: 28474 2025-04-07T14:15:17.242175Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574651646515178:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:17.242236Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001261/r3tmp/tmpdA6JOj/pdisk_1.dat 2025-04-07T14:15:17.334448Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:17.362522Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:17.362624Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:17.364451Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2671, node 6 2025-04-07T14:15:17.422616Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:17.422643Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:17.422654Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:17.422770Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28474 TClient is connected to server localhost:28474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:15:17.905130Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.921614Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:15:17.994868Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:15:18.125061Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:18.198582Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:20.870304Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574664531418772:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:20.870377Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:20.989150Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:21.022831Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:21.059212Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:21.091067Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:21.133518Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:21.178124Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:21.233215Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574668826386565:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:21.233300Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:21.233371Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574668826386570:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:21.241927Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:21.257553Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574668826386572:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:22.243102Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574651646515178:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:22.243163Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:22.349686Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.104029Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035323142, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2025-04-07T14:15:19.781007Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:15:19.781519Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e6d/r3tmp/tmpYXUynI/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:15:19.782045Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e6d/r3tmp/tmpYXUynI/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e6d/r3tmp/tmpYXUynI/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8616834890726680366 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:15:19.819899Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:19.820205Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:15:19.833716Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:451:2098] with ResourceBroker at [2:422:2097] 2025-04-07T14:15:19.833834Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:452:2099] 2025-04-07T14:15:19.833964Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:450:2332] with ResourceBroker at [1:421:2313] 2025-04-07T14:15:19.834039Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:453:2333] 2025-04-07T14:15:19.834155Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:19.834198Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:19.834226Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:15:19.834243Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:15:19.834371Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:19.842775Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035319 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:19.842962Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:19.843025Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035319 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:19.843348Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:19.843458Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:15:19.843586Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:19.843626Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:19.843737Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035319 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:19.843960Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:15:19.843985Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:19.844042Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035319 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:19.844428Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-07T14:15:19.844555Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:19.845216Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:19.845322Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:19.845454Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:19.845650Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:15:19.845732Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:19.845866Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:19.846072Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:19.846123Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:20.792027Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-07T14:15:20.792136Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-07T14:15:20.792253Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:20.792308Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:20.792354Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:20.792385Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:20.792422Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:450:2332])) 2025-04-07T14:15:20.792594Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:20.792684Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-1-2 (2 by [1:450:2332]) priority=0 resources={0, 100} 2025-04-07T14:15:20.792718Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-1-2 (2 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:20.792768Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:450:2332]) from queue queue_kqp_resource_manager 2025-04-07T14:15:20.792804Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-1-2 (2 by [1:450:2332]) to queue queue_kqp_resource_manager 2025-04-07T14:15:20.792837Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:450:2332])) 2025-04-07T14:15:20.792900Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:20.792953Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:20.793083Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035320 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-04-07T14:15:20.793395Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:21.056386Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-07T14:15:21.056516Z node 2 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [2:451:2098]) priority=0 resources={0, 100} 2025-04-07T14:15:21.056556Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [2:451:2098]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.056594Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [2:451:2098]) from queue queue_kqp_resource_manager 2025-04-07T14:15:21.056639Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [2:451:2098]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.056705Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [2:451:2098])) 2025-04-07T14:15:21.056828Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:21.056890Z node 2 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-2-2 (2 by [2:451:2098]) priority=0 resources={0, 100} 2025-04-07T14:15:21.056929Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-2-2 (2 by [2:451:2098]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.056973Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-2-2 (2 by [2:451:2098]) from queue queue_kqp_resource_manager 2025-04-07T14:15:21.057033Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-2-2 (2 by [2:451:2098]) to queue queue_kqp_resource_manager 2025-04-07T14:15:21.057067Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-2-2 (2 by [2:451:2098])) 2025-04-07T14:15:21.057145Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-07T14:15:21.057272Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.057479Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035321 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-04-07T14:15:21.057801Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:21.322133Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-07T14:15:21.322246Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [1:450:2332]) (release resources {0, 100}) 2025-04-07T14:15:21.322293Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350100 (remove task kqp-1-1-1 (1 by [1:450:2332])) 2025-04-07T14:15:21.322327Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200200 2025-04-07T14:15:21.322398Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-07T14:15:21.322467Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-2-1-2 (2 by [1:450:2332]) (release resources {0, 100}) 2025-04-07T14:15:21.322521Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350100 to 0.200200 (remove task kqp-2-1-2 (2 by [1:450:2332])) 2025-04-07T14:15:21.322558Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-07T14:15:21.322608Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.322753Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035322 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:21.323031Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:15:21.609501Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-07T14:15:21.609629Z node 2 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [2:451:2098]) (release resources {0, 100}) 2025-04-07T14:15:21.609682Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [2:451:2098])) 2025-04-07T14:15:21.609713Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2025-04-07T14:15:21.609764Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-07T14:15:21.609814Z node 2 :RESOURCE_BROKER DEBUG: Finish task kqp-2-2-2 (2 by [2:451:2098]) (release resources {0, 100}) 2025-04-07T14:15:21.609848Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-2-2 (2 by [2:451:2098])) 2025-04-07T14:15:21.609878Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-07T14:15:21.609930Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:15:21.610066Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035323 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:15:21.610310Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:15:24.229260Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request |96.1%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNotNullColumns::AlterAddNotNullColumn [GOOD] >> KqpNotNullColumns::AlterAddIndex >> KqpNewEngine::DqSourceSequentialLimit [GOOD] >> KqpNewEngine::DqSourceLocksEffects |96.1%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNewEngine::AutoChooseIndexOrderByLimit [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLambda >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] >> KqpRanges::ScanKeyPrefix [GOOD] >> KqpNewEngine::MultipleBroadcastJoin [GOOD] >> KqpNotNullColumns::FailedMultiEffects [GOOD] >> KqpPg::InsertNoTargetColumns_Simple >> KqpPg::JoinWithQueryService+StreamLookup >> KqpPg::CreateTableBulkUpsertAndRead >> KqpSort::TopParameter [GOOD] >> KqpSort::TopParameterFilter >> KqpPg::ValuesInsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 63657, MsgBus: 19228 2025-04-07T14:14:48.211339Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574528744159769:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:48.211405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127d/r3tmp/tmpezXoOw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 63657, node 1 2025-04-07T14:14:48.497742Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:14:48.499770Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:14:48.502215Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:48.545236Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:48.545272Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:48.545281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:48.545385Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:48.558631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:48.558741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:48.560489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19228 TClient is connected to server localhost:19228 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:48.961825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:48.973691Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:14:50.849676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574537334094990:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:50.849805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.108271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.212168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-07T14:14:51.239566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.266110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715762:2, at schemeshard: 72057594046644480 2025-04-07T14:14:51.289814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.309790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.339815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715766:2, at schemeshard: 72057594046644480 2025-04-07T14:14:51.367403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715767:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.399798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715770:2, at schemeshard: 72057594046644480 2025-04-07T14:14:51.425236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715771:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.449504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715774:2, at schemeshard: 72057594046644480 2025-04-07T14:14:51.471000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715775:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.496585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.533735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715778:2, at schemeshard: 72057594046644480 2025-04-07T14:14:51.557670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715779:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.611055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715782:2, at schemeshard: 72057594046644480 2025-04-07T14:14:51.634651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715783:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.672734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574541629063594:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.672817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.762300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574541629063888:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.762366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.762368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574541629063893:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.764553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-07T14:14:51.771015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574541629063895:2422], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-07T14:14:53.211731Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574528744159769:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:53.211803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [] Trying to start YDB, gRPC: 28015, MsgBus: 14246 2025-04-07T14:14:54.927143Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574553939471107:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:54.927208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127d/r3tmp/tmpEz3a7o/pdisk_1.dat 2025-04-07T14:14:55.025034Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:55.037970Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:55.038056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:55.039527Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28015, node 2 2025-04-07T14:14:55.077834Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:55.077861Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:55.077868Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:55.077971Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14246 TClient is connected to server localhost:14246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 Sta ... ard: 72057594046644480 waiting... 2025-04-07T14:15:12.368727Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:12.440134Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:14.914591Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574639702577176:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:14.914678Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:14.967093Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.002625Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.042787Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.070585Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.101854Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.134758Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.191046Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574643997544969:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:15.191128Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:15.191165Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574643997544974:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:15.194969Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:15.206785Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574643997544976:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:16.404261Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574626817673596:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:16.404357Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18544, MsgBus: 5791 2025-04-07T14:15:18.313427Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574656693768195:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:18.340065Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127d/r3tmp/tmptbL5bK/pdisk_1.dat 2025-04-07T14:15:18.424547Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18544, node 6 2025-04-07T14:15:18.446131Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:18.446236Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:18.455270Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:18.498991Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:18.499013Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:18.499019Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:18.499112Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5791 TClient is connected to server localhost:5791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:15:19.035605Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:19.041766Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:19.052211Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:19.107477Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:19.291190Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:19.389687Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:22.122496Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574673873639012:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.122591Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.170820Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.200849Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.246114Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.284400Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.327837Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.400238Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.447400Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574673873639512:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.447493Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.447560Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574673873639517:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.451344Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:22.460000Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574673873639519:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:23.311289Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574656693768195:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:23.311372Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpPg::InsertFromSelect_Simple >> KqpPg::ReadPgArray >> KqpPg::CopyTableSerialColumns >> KqpPg::NoTableQuery >> KqpPg::TypeCoercionBulkUpsert >> KqpPg::ReadPgArray [GOOD] >> KqpPg::TableInsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ScanKeyPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 2792, MsgBus: 25733 2025-04-07T14:14:47.372096Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574523144474549:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:47.373959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127e/r3tmp/tmp7fXz3M/pdisk_1.dat 2025-04-07T14:14:47.697414Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2792, node 1 2025-04-07T14:14:47.762939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:47.763085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:47.764543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:47.766994Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:47.767025Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:47.767040Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:47.767143Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25733 TClient is connected to server localhost:25733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:48.176537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:48.193223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:48.299019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:48.420054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:48.470079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:50.016107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574536029378169:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:50.016195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:50.273744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.301398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.322450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.350332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.373484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.466764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:50.539560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574536029378676:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:50.539630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:50.539772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574536029378681:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:50.542740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:50.550924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574536029378683:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:14:51.351963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.541917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.693406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.833269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:14:52.134839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T14:14:52.373355Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574523144474549:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:52.373430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16684, MsgBus: 2076 2025-04-07T14:14:53.043665Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574548086027764:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:53.043744Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127e/r3tmp/tmplX4eWL/pdisk_1.dat 2025-04-07T14:14:53.110281Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16684, node 2 2025-04-07T14:14:53.146179Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:53.146281Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:53.148457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:53.178999Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:53.179023Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:53.179032Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:53.179129Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2076 TClient is connected to server localhost:2076 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:53.571321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.607036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.676757Z no ... N: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.411710Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.443251Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.511075Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.539011Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.567754Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.606650Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574643662420290:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:15.606743Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:15.606828Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574643662420295:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:15.610613Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:15.619912Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574643662420297:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:16.478648Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574626482548898:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:16.478756Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:16.669466Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:16.964901Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.151740Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.319780Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.677777Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63671, MsgBus: 29585 2025-04-07T14:15:19.607489Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574659099901791:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:19.607540Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127e/r3tmp/tmpcGux6C/pdisk_1.dat 2025-04-07T14:15:19.709307Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:19.729427Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:19.729524Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:19.730802Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63671, node 6 2025-04-07T14:15:19.783688Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:19.783719Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:19.783727Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:19.783881Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29585 TClient is connected to server localhost:29585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:20.331651Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:20.337460Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:20.343190Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:20.425342Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:20.576372Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:20.676469Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:23.190502Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574676279772686:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:23.190599Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:23.236937Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.307523Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.342373Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.376231Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.403075Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.438082Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.519814Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574676279773189:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:23.520123Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:23.520691Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574676279773194:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:23.525180Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:23.540683Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574676279773196:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:24.611175Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574659099901791:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:24.615409Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::MultipleBroadcastJoin [GOOD] Test command err: Trying to start YDB, gRPC: 23975, MsgBus: 4673 2025-04-07T14:14:48.916603Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574527592272112:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:48.916720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001271/r3tmp/tmpTEUIjP/pdisk_1.dat 2025-04-07T14:14:49.197783Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23975, node 1 2025-04-07T14:14:49.276157Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:49.276173Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:49.276182Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:49.276239Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:49.281790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:49.282033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:49.283797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4673 TClient is connected to server localhost:4673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:49.803447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:49.827606Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:49.839545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:49.997050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:50.159906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:50.220808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.649013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574540477175713:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.649124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:51.906429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.946616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:51.974526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:52.006729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:52.036509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:52.070491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:52.120554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574544772143506:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:52.120617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:52.120751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574544772143511:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:52.123862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:52.132492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574544772143513:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 32561, MsgBus: 26610 2025-04-07T14:14:54.096970Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574554242000732:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:54.097034Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001271/r3tmp/tmpMxonDp/pdisk_1.dat 2025-04-07T14:14:54.194374Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:54.211932Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:54.212019Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:54.213397Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32561, node 2 2025-04-07T14:14:54.257871Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:54.257895Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:54.257907Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:54.258006Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26610 TClient is connected to server localhost:26610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:54.635114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:54.650553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:54.725393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:54.862423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:54.920105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:56.829077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574562831937018:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:56.829159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:56.856390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsa ... Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:14.566037Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:14.612183Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:14.647560Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:14.712874Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:14.742054Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:14.773750Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:14.844061Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:14.910531Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574638286682219:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:14.910668Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:14.911254Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574638286682224:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:14.916885Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:14.928925Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574638286682226:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:15.950438Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574621106810804:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:15.950509Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 32722, MsgBus: 18235 2025-04-07T14:15:17.869982Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574651281926794:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:17.870044Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001271/r3tmp/tmpJB3mD8/pdisk_1.dat 2025-04-07T14:15:17.969226Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:17.992827Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:17.992936Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:17.994545Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32722, node 6 2025-04-07T14:15:18.047041Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:18.047064Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:18.047070Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:18.047161Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18235 TClient is connected to server localhost:18235 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:18.532533Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:18.539055Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:18.545455Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:18.618322Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:18.786603Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:18.864625Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:21.662824Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574668461797680:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:21.663026Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:21.695113Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:21.732701Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:21.766346Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:21.833410Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:21.865900Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:21.938920Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.003382Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574672756765483:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.003484Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.003728Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574672756765488:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.007953Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:22.018234Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574672756765490:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:22.928187Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574651281926794:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:22.928531Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:23.282565Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.315509Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.354606Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 [] >> KqpNewEngine::JoinMultiConsumer [GOOD] >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns >> KqpPg::EmptyQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::FailedMultiEffects [GOOD] Test command err: Trying to start YDB, gRPC: 21917, MsgBus: 25912 2025-04-07T14:15:00.553246Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574576955629668:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:00.553690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00124c/r3tmp/tmpBMfeHv/pdisk_1.dat 2025-04-07T14:15:00.874314Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21917, node 1 2025-04-07T14:15:00.912865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:00.912964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:00.914899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:00.953737Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:00.953761Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:00.953770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:00.953891Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25912 TClient is connected to server localhost:25912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:01.422833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:01.437709Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:15:03.090743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574589840532184:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:03.090848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:03.274307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:03.423390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574589840532284:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:03.423474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:03.423595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574589840532289:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:03.426462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:03.433339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574589840532291:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:15:03.760626Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574589840532408:2334], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestInsertNotNullPk, code: 2029 2025-04-07T14:15:03.760863Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2MwNzM3NjktNzkzZmFjMTktMmNlNzU4YTQtYzk2OTQ4YjU=, ActorId: [1:7490574589840532166:2300], ActorState: ExecuteState, TraceId: 01jr89nma02neq2kcksn391f7v, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-04-07T14:15:03.774888Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574589840532417:2338], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-07T14:15:03.775105Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2MwNzM3NjktNzkzZmFjMTktMmNlNzU4YTQtYzk2OTQ4YjU=, ActorId: [1:7490574589840532166:2300], ActorState: ExecuteState, TraceId: 01jr89nmaqezm3bn3mm7pm6nen, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 5076, MsgBus: 10639 2025-04-07T14:15:04.370912Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574595882840487:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:04.371018Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00124c/r3tmp/tmpaiEcZp/pdisk_1.dat 2025-04-07T14:15:04.443021Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5076, node 2 2025-04-07T14:15:04.474443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:04.474529Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:04.481916Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:04.505496Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:04.505524Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:04.505531Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:04.505622Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10639 TClient is connected to server localhost:10639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:04.861184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:07.209294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574608767742998:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.209393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.245415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.286886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574608767743096:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.286961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574608767743101:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.286969Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.290598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:07.299666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574608767743103:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07 ... )) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:17.327522Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:17.329304Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13860, node 5 2025-04-07T14:15:17.403375Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:17.403394Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:17.403400Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:17.403490Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15323 TClient is connected to server localhost:15323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:15:17.875062Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:20.729233Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574664474678217:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:20.729326Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:20.745823Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:20.787261Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574664474678315:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:20.787359Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:20.787499Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574664474678320:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:20.791497Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:20.800706Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574664474678322:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:15:21.185489Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7490574668769645733:2334], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-04-07T14:15:21.186084Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=OGY5YTNlYTAtMjE3NzlhNzEtOWI5ZTU3ZTEtNjI0NDgxYmE=, ActorId: [5:7490574664474678199:2301], ActorState: ExecuteState, TraceId: 01jr89p5ak7w8y3f401m2j88sr, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-07T14:15:21.408844Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T14:15:21.412280Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7490574668769645742:2338], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 2025-04-07T14:15:21.412861Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=OGY5YTNlYTAtMjE3NzlhNzEtOWI5ZTU3ZTEtNjI0NDgxYmE=, ActorId: [5:7490574664474678199:2301], ActorState: ExecuteState, TraceId: 01jr89p5baaynh6hc667x52sv5, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 4185, MsgBus: 62336 2025-04-07T14:15:22.199071Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574672538928387:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:22.199159Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00124c/r3tmp/tmpRrPUHL/pdisk_1.dat 2025-04-07T14:15:22.319005Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:22.331270Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:22.331392Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:22.333237Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4185, node 6 2025-04-07T14:15:22.392877Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:22.392895Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:22.392916Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:22.393013Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62336 TClient is connected to server localhost:62336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:15:22.881166Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:25.257291Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574685423830897:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:25.257407Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:25.266519Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:25.342359Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574685423830998:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:25.342445Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:25.342508Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574685423831003:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:25.346399Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:25.357663Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574685423831005:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:15:25.567235Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490574685423831095:2328], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:55: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64,'Value':String>
:3:55: Error: Failed to convert 'Value': Null to String
:3:55: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-07T14:15:25.567473Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDhlNWZmYzYtNmFjNzlkNDctY2Y0YzZmMmUtNDBiZjdiMWU=, ActorId: [6:7490574685423830894:2301], ActorState: ExecuteState, TraceId: 01jr89p9kbbr91e4a15a9wr9b7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:15:25.584131Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] >> SystemView::CollectPreparedQueries [GOOD] >> SystemView::CollectScanQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::JoinMultiConsumer [GOOD] Test command err: Trying to start YDB, gRPC: 27888, MsgBus: 1820 2025-04-07T14:14:50.924656Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574537419972721:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:50.933590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00126e/r3tmp/tmpsI6IYO/pdisk_1.dat 2025-04-07T14:14:51.217968Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27888, node 1 2025-04-07T14:14:51.276760Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:51.276811Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:51.276820Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:51.276909Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:51.330268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:51.330369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:51.332083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1820 TClient is connected to server localhost:1820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:51.738654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:51.752131Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:14:51.761159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:51.861276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:52.018708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:52.092918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.639636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574550304876344:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:53.639745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:53.875879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:53.901314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:53.925569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:53.998420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.026238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.094298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:54.130654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574554599844144:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.130744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.131002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574554599844149:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:54.134206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:54.142861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574554599844151:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 7989, MsgBus: 13312 2025-04-07T14:14:56.279251Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574560275277056:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:56.279305Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00126e/r3tmp/tmpoKiAo3/pdisk_1.dat 2025-04-07T14:14:56.407912Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:56.433823Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:56.433902Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:56.435347Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7989, node 2 2025-04-07T14:14:56.475561Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:56.475582Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:56.475598Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:56.475691Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13312 TClient is connected to server localhost:13312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:14:56.864364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:14:56.881215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:56.954194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:57.104304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:57.185153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:59.353099Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574573160180630:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:59.353174Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:59.390037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe ... CHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:14.743811Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:14.818108Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:17.522302Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574651680821022:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.522412Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.566792Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.599360Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.628383Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.657909Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.686518Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.760593Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.817325Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574651680821520:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.817415Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574651680821525:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.817417Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.820910Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:17.829226Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574651680821527:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:18.869119Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574634500950125:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:18.869185Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25381, MsgBus: 12640 2025-04-07T14:15:20.368862Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574664767417369:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:20.368944Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00126e/r3tmp/tmpieUZ49/pdisk_1.dat 2025-04-07T14:15:20.497612Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:20.528361Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:20.528474Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:20.533919Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25381, node 6 2025-04-07T14:15:20.598075Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:20.598102Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:20.598110Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:20.598232Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12640 TClient is connected to server localhost:12640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:21.136406Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:21.159220Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:21.250215Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:21.425799Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:21.501844Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:24.365160Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574681947288251:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:24.365259Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:24.414759Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.448050Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.485057Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.519043Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.554691Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.590603Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.629923Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574681947288748:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:24.629987Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574681947288753:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:24.630022Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:24.633825Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:24.643667Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574681947288755:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:25.393769Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574664767417369:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:25.394053Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> BackupRestoreS3::RestoreTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreTableSplitBoundaries >> KqpRanges::NoFullScanAtScanQuery [GOOD] >> KqpRanges::NoFullScanAtDNFPredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] Test command err: Trying to start YDB, gRPC: 29988, MsgBus: 12769 2025-04-07T14:14:49.633181Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574530481031743:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:49.633247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00126f/r3tmp/tmpIRL2YI/pdisk_1.dat 2025-04-07T14:14:49.965686Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29988, node 1 2025-04-07T14:14:50.013928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:50.014001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:50.015631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:50.045238Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:50.045266Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:50.045275Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:50.045418Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12769 TClient is connected to server localhost:12769 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:50.511131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:52.208446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574543365934253:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:52.208520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:52.452492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:52.574983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574543365934353:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:52.575056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:52.575070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574543365934358:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:52.578649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:14:52.585549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574543365934360:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:14:52.768129Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574543365934452:2328], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestUpsertNotNullPk, code: 2029 2025-04-07T14:14:52.768417Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTc1YTMzOWEtNjU0NDBkMTQtYWVkNzNhZDctMjQzN2U1MzU=, ActorId: [1:7490574543365934250:2300], ActorState: ExecuteState, TraceId: 01jr89n9jfard0ykx8yp1efmz6, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-04-07T14:14:52.785134Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574543365934461:2332], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-07T14:14:52.786353Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTc1YTMzOWEtNjU0NDBkMTQtYWVkNzNhZDctMjQzN2U1MzU=, ActorId: [1:7490574543365934250:2300], ActorState: ExecuteState, TraceId: 01jr89n9k958a1c13xqn1v8xt9, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 61482, MsgBus: 4925 2025-04-07T14:14:53.521811Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574550636935557:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:53.521896Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00126f/r3tmp/tmpVSm5bK/pdisk_1.dat 2025-04-07T14:14:53.605621Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61482, node 2 2025-04-07T14:14:53.639740Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:53.639822Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:53.641632Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:53.668347Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:53.668372Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:53.668384Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:53.668479Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4925 TClient is connected to server localhost:4925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:54.040086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:54.044706Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:56.288422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574563521838074:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:56.288507Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:56.306610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:14:56.358797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574563521838172:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:56.358880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:56.358963Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574563521838177:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:56.361905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:14:56.369636Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574563521838179:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T ... Id: [4:7490574592016240467:2358], ActorState: ExecuteState, TraceId: 01jr89npy84ctv5tsak94z4hjf, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 25830, MsgBus: 25288 2025-04-07T14:15:08.192349Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574611291317682:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:08.192436Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00126f/r3tmp/tmpouE5vF/pdisk_1.dat 2025-04-07T14:15:08.307567Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25830, node 5 2025-04-07T14:15:08.335561Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:08.335672Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:08.337150Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:08.381336Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:08.381365Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:08.381374Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:08.381491Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25288 TClient is connected to server localhost:25288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:08.895782Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:11.338549Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574624176220203:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:11.340729Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574624176220191:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:11.340869Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:11.341613Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:15:11.351704Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574624176220205:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:15:11.453612Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:15:13.192178Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574611291317682:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:13.216703Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:18.645332Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr89p17t9fj6dz8afb9zt1ry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NmJiYzVlZjgtNWIyOTg5MTgtZDgxNmEzODktY2VkMmQ0NjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T14:15:18.645505Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NmJiYzVlZjgtNWIyOTg5MTgtZDgxNmEzODktY2VkMmQ0NjQ=, ActorId: [5:7490574645651057629:2503], ActorState: ExecuteState, TraceId: 01jr89p17t9fj6dz8afb9zt1ry, Create QueryResponse for error on request, msg: 2025-04-07T14:15:20.165998Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr89p2vw1h4r72zc70jb7aqj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MjNhNTUyOS00MjhmNDAxNi1lZTY1MjRhOC1hYTY2NTZhYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T14:15:20.166239Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=MjNhNTUyOS00MjhmNDAxNi1lZTY1MjRhOC1hYTY2NTZhYQ==, ActorId: [5:7490574654240992328:2531], ActorState: ExecuteState, TraceId: 01jr89p2vw1h4r72zc70jb7aqj, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 14601, MsgBus: 62668 2025-04-07T14:15:20.892252Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574662794105628:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:20.892334Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00126f/r3tmp/tmpyAKflc/pdisk_1.dat 2025-04-07T14:15:21.042652Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:21.059297Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:21.059421Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:21.061198Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14601, node 6 2025-04-07T14:15:21.107360Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:21.107381Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:21.107388Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:21.107491Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62668 TClient is connected to server localhost:62668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:21.690180Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:24.456286Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574679973975442:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:24.456377Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:24.475572Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.531979Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574679973975590:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:24.532057Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574679973975595:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:24.532061Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:24.536525Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:24.550075Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574679973975597:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:15:25.901184Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574662794105628:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:25.901232Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpMergeCn::TopSortBy_Decimal_Limit5 [GOOD] >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpSqlIn::PhasesCount [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpRanges::DeleteNotFullScan [GOOD] >> KqpRanges::CastKeyBounds >> KqpPg::NoTableQuery [GOOD] >> KqpPg::PgCreateTable >> KqpPg::EmptyQuery [GOOD] >> KqpPg::DuplicatedColumns |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::PhasesCount [GOOD] Test command err: Trying to start YDB, gRPC: 16324, MsgBus: 11976 2025-04-07T14:14:52.529275Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574545081825338:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:52.529334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00126a/r3tmp/tmpNewD6j/pdisk_1.dat 2025-04-07T14:14:52.789261Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16324, node 1 2025-04-07T14:14:52.856764Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:52.856789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:52.856795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:52.856885Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:52.874294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:52.874438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:52.876132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11976 TClient is connected to server localhost:11976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:53.319294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.343055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.464451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.610355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:53.687088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:55.050702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574557966728927:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:55.050797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:55.260549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:55.282931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:55.303335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:55.327468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:55.352914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:55.385209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:55.431343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574557966729418:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:55.431403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:55.431437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574557966729423:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:55.434372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:55.441874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574557966729425:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Warning: Type annotation, code: 1030
:4:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:5:22: Warning: At function: Filter, At function: Coalesce
:6:23: Warning: At function: SqlIn
:6:23: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 11461, MsgBus: 15294 2025-04-07T14:14:57.492122Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574566198201332:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:57.492174Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00126a/r3tmp/tmpfr05Pw/pdisk_1.dat 2025-04-07T14:14:57.567173Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:57.595063Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:57.595173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11461, node 2 2025-04-07T14:14:57.596535Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:57.628224Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:57.628246Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:57.628253Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:57.628355Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15294 TClient is connected to server localhost:15294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:58.015490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:58.021438Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:14:58.028365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:58.109863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:58.264417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:58.338876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:00.306963Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574579083104928:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, ... 7T14:15:17.205166Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574651272241275:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.205239Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.260981Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.300924Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.343495Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.385838Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.426178Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.458797Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.573757Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574651272241769:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.573843Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.574079Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574651272241774:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:17.577724Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:17.585672Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490574651272241776:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:15:18.663093Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:18.712498Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:15:18.742671Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490574634092370389:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:18.742738Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:18.758939Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26892, MsgBus: 2269 2025-04-07T14:15:22.725033Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574671427088706:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:22.725123Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00126a/r3tmp/tmpxCAH6m/pdisk_1.dat 2025-04-07T14:15:22.864764Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:22.883549Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:22.883651Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:22.885440Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26892, node 5 2025-04-07T14:15:22.953900Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:22.953929Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:22.953937Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:22.954073Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2269 TClient is connected to server localhost:2269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:23.466136Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:23.481301Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:23.565924Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:23.733273Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:23.812930Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:26.461434Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574688606959595:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:26.461524Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:26.504000Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:26.529533Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:26.555443Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:26.584241Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:26.608361Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:26.642456Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:26.715804Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574688606960091:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:26.715893Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:26.716125Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574688606960096:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:26.721562Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:26.737001Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574688606960098:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:27.728871Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574671427088706:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:27.728938Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpPg::InsertNoTargetColumns_Simple [GOOD] >> KqpPg::InsertNoTargetColumns_Serial >> KqpNotNullColumns::AlterAddIndex [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpExtractPredicateLookup::SimpleRange [GOOD] >> KqpExtractPredicateLookup::PointJoin+EnableKqpDataQueryStreamLookup >> KqpPg::JoinWithQueryService+StreamLookup [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup >> BasicStatistics::TwoServerlessTwoSharedDbs |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpSqlIn::ComplexKey [GOOD] >> KqpSqlIn::Dict >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> THiveTest::TestHiveBalancerHighUsage >> KqpNewEngine::DqSourceLocksEffects [GOOD] >> KqpPg::CopyTableSerialColumns [GOOD] >> KqpPg::CreateIndex >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::AlterAddIndex [GOOD] Test command err: Trying to start YDB, gRPC: 6982, MsgBus: 19522 2025-04-07T14:14:58.118611Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574571594092333:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:58.118680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00125a/r3tmp/tmpvKFQUx/pdisk_1.dat 2025-04-07T14:14:58.416882Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6982, node 1 2025-04-07T14:14:58.488815Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:58.488841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:58.488848Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:58.488981Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:58.489933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:58.490057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:58.491741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19522 TClient is connected to server localhost:19522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:58.879033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:58.901727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:59.039096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:59.180360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:59.243744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:00.702633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574580184028614:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:00.702732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:00.928953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:00.957973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:01.011512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:01.047988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:01.115886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:01.175441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:01.208454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574584478996407:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:01.208534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:01.208556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574584478996412:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:01.211739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:01.219743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574584478996414:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 29851, MsgBus: 18729 2025-04-07T14:15:03.068057Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574591396810263:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:03.068099Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00125a/r3tmp/tmp27QCOE/pdisk_1.dat 2025-04-07T14:15:03.183226Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:03.204996Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:03.205072Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:03.207555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29851, node 2 2025-04-07T14:15:03.274220Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:03.274252Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:03.274260Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:03.274391Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18729 TClient is connected to server localhost:18729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:15:03.658685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:03.665669Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:03.672879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:15:03.747327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:03.898516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:15:03.972100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:05.813031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574599986746551:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:05.813103Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:05.853594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo uns ... nd or you don't have access permissions } 2025-04-07T14:15:22.959725Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:23.005720Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.039616Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.106073Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.139036Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.166913Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.214799Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.298508Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574679428117916:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:23.298610Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:23.298685Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574679428117921:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:23.302991Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:23.311253Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574679428117923:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:24.440053Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574662248246523:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:24.440116Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:24.467857Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1063, MsgBus: 11430 2025-04-07T14:15:25.278458Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574684497123773:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:25.278501Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00125a/r3tmp/tmpKOnsTz/pdisk_1.dat 2025-04-07T14:15:25.372426Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:25.388780Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:25.388872Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:25.390420Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1063, node 6 2025-04-07T14:15:25.426343Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:25.426369Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:25.426380Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:25.426493Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11430 TClient is connected to server localhost:11430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:25.927392Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:25.934717Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:25.985244Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:26.162600Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:26.228491Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:28.688379Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574697382027353:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:28.688481Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:28.732307Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:28.765440Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:28.795488Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:28.837696Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:28.869849Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:28.910122Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:28.972725Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574697382027853:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:28.972806Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:28.972961Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574697382027858:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:28.976554Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:28.985981Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574697382027860:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:30.066385Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.132616Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T14:15:30.176216Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.278913Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574684497123773:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:30.279015Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard >> ColumnStatistics::CountMinSketchServerlessStatistics >> HttpRequest::AnalyzeServerless >> KqpSort::TopParameterFilter [GOOD] >> SystemView::CollectScanQueries [GOOD] >> HttpRequest::Status >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DqSourceLocksEffects [GOOD] Test command err: Trying to start YDB, gRPC: 20565, MsgBus: 64525 2025-04-07T14:14:56.586953Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574561155264772:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:56.587042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00125c/r3tmp/tmpR7hZu1/pdisk_1.dat 2025-04-07T14:14:56.910697Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20565, node 1 2025-04-07T14:14:56.918869Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:14:56.952353Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:56.952374Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:56.952391Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:56.952501Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:56.977707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:56.977867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:56.979508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64525 TClient is connected to server localhost:64525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:57.383557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:57.400364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:57.515299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:57.646117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:57.698498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:59.056857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574574040168376:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:59.056947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:59.245437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:59.271249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:59.295389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:59.335588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:59.358525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:59.381400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:59.423396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574574040168867:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:59.423508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:59.423648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574574040168872:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:59.426702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:59.433950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574574040168874:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 4962, MsgBus: 1029 2025-04-07T14:15:01.322026Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574581813795623:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:01.322244Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00125c/r3tmp/tmpP1Kw3g/pdisk_1.dat 2025-04-07T14:15:01.446506Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:01.471636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:01.471725Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:01.473036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4962, node 2 2025-04-07T14:15:01.527717Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:01.527745Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:01.527762Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:01.527888Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1029 TClient is connected to server localhost:1029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:15:01.941624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:01.958455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:02.040969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:02.200131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:02.273620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:04.237707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574594698699216:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:04.237794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:04.281399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at sc ... 0660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:20.279498Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:22.575535Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574674039485421:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.575635Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.615470Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.683889Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.755455Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.793295Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.829216Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.905980Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.985136Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574674039485929:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.985222Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.985286Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574674039485934:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.989115Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:22.998897Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574674039485936:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:15:24.290226Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574661154581842:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:24.290320Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6133, MsgBus: 30116 2025-04-07T14:15:25.352880Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574686416981890:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:25.352933Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00125c/r3tmp/tmp1FRh89/pdisk_1.dat 2025-04-07T14:15:25.430561Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:25.456188Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:25.456256Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:25.457544Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6133, node 6 2025-04-07T14:15:25.497971Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:25.497993Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:25.498000Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:25.498099Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30116 TClient is connected to server localhost:30116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:25.907495Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:25.919668Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:25.970153Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:26.142543Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:26.216490Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:28.978005Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574699301885478:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:28.978078Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.028104Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.064379Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.098485Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.135182Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.170817Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.225624Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.278725Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574703596853269:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.278829Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574703596853274:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.278832Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.282700Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:29.295803Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574703596853276:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:30.364292Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574686416981890:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:30.366260Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:30.863748Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=YzcxNGYxODEtYjdjNDYzODYtNWUwZGM1YzctMWJhMTY2YzE=, ActorId: [6:7490574707891820889:2461], ActorState: ExecuteState, TraceId: 01jr89pep33cwgkysweqkvzth9, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`, code: 2001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] Test command err: Trying to start YDB, gRPC: 3511, MsgBus: 19896 2025-04-07T14:15:03.385239Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574589792377604:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:03.385742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001240/r3tmp/tmpd36eAa/pdisk_1.dat 2025-04-07T14:15:03.666613Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3511, node 1 2025-04-07T14:15:03.747353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:03.747514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:03.749197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:03.760272Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:03.760296Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:03.760305Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:03.760388Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19896 TClient is connected to server localhost:19896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:04.225653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:06.136428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574602677279904:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.136532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.318989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:06.436637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574602677280006:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.436740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.436878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574602677280011:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.440066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:06.448137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574602677280013:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } Trying to start YDB, gRPC: 26695, MsgBus: 5714 2025-04-07T14:15:07.639973Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574606894322455:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:07.640353Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001240/r3tmp/tmpcjZTru/pdisk_1.dat 2025-04-07T14:15:07.719775Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26695, node 2 2025-04-07T14:15:07.741213Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:07.741286Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:07.742975Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:07.778794Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:07.778818Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:07.778822Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:07.778887Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5714 TClient is connected to server localhost:5714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:08.269750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:08.276451Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:10.417658Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574619779224954:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.417762Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.436496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.515638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574619779225055:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.515727Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.516114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574619779225060:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:10.519735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:10.529926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574619779225062:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:15:10.997400Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490574619779225200:2340], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiUpdateTable!
:1:63: Error: Can't set NULL or optional value to not null column: Value, code: 2031 2025-04-07T14:15:10.997620Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Mzk3NDA4NTQtZDc1NWMwYjAtYTc2OTY3OWUtZjEwMTU5ODA=, ActorId: [2:7490574619779224936:2300], ActorState: ExecuteState, TraceId: 01jr89nvc2565xptq7k8xjt1er, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 20791, MsgBus: 17997 2025-04-07T14:15:11.802026Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490574628038021620:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:11.802115Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001240/r3tmp/tmpB2HPuC/pdisk_1.dat 2025-04-07T14:15:11.905541Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:11.933849Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:11.933934Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: ... lient.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001240/r3tmp/tmpjmyRqF/pdisk_1.dat 2025-04-07T14:15:25.180691Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:25.183945Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:25.184040Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:25.186005Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10694, node 6 2025-04-07T14:15:25.222744Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:25.222769Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:25.222779Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:25.222890Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8312 TClient is connected to server localhost:8312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:25.675446Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:28.534563Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574697248116122:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:28.534694Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:28.558957Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:28.642537Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574697248116271:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:28.642653Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:28.642800Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574697248116276:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:28.647070Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:28.656552Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574697248116278:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:15:29.779590Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7490574701543083780:2353], TxId: 281474976715664, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=6&id=ZWExMTZlOWYtNTRjNTgzNmUtM2IzYzUwN2QtNGI1NTk4MDU=. TraceId : 01jr89pdbq2byqpxk0y94kk230. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 }. 2025-04-07T14:15:29.780129Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7490574701543083782:2354], TxId: 281474976715664, task: 2. Ctx: { TraceId : 01jr89pdbq2byqpxk0y94kk230. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=6&id=ZWExMTZlOWYtNTRjNTgzNmUtM2IzYzUwN2QtNGI1NTk4MDU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [6:7490574701543083776:2301], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:15:29.780373Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7490574701543083783:2355], TxId: 281474976715664, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr89pdbq2byqpxk0y94kk230. SessionId : ydb://session/3?node_id=6&id=ZWExMTZlOWYtNTRjNTgzNmUtM2IzYzUwN2QtNGI1NTk4MDU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [6:7490574701543083776:2301], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:15:29.780533Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7490574701543083784:2356], TxId: 281474976715664, task: 4. Ctx: { SessionId : ydb://session/3?node_id=6&id=ZWExMTZlOWYtNTRjNTgzNmUtM2IzYzUwN2QtNGI1NTk4MDU=. CustomerSuppliedId : . TraceId : 01jr89pdbq2byqpxk0y94kk230. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [6:7490574701543083776:2301], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:15:29.782800Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZWExMTZlOWYtNTRjNTgzNmUtM2IzYzUwN2QtNGI1NTk4MDU=, ActorId: [6:7490574697248116104:2301], ActorState: ExecuteState, TraceId: 01jr89pdbq2byqpxk0y94kk230, Create QueryResponse for error on request, msg: 2025-04-07T14:15:29.799105Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:29.818965Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490574701543083815:2360], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-04-07T14:15:29.819210Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZWExMTZlOWYtNTRjNTgzNmUtM2IzYzUwN2QtNGI1NTk4MDU=, ActorId: [6:7490574697248116104:2301], ActorState: ExecuteState, TraceId: 01jr89pdrc8tgx2wtq13xhsm3w, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-07T14:15:29.840948Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490574701543083834:2368], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-04-07T14:15:29.842865Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZWExMTZlOWYtNTRjNTgzNmUtM2IzYzUwN2QtNGI1NTk4MDU=, ActorId: [6:7490574697248116104:2301], ActorState: ExecuteState, TraceId: 01jr89pds160fcss9znqpazak4, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-07T14:15:29.862848Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490574701543083853:2376], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-04-07T14:15:29.864548Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZWExMTZlOWYtNTRjNTgzNmUtM2IzYzUwN2QtNGI1NTk4MDU=, ActorId: [6:7490574697248116104:2301], ActorState: ExecuteState, TraceId: 01jr89pdssa6tmwd7y9hdynkza, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-07T14:15:30.078027Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574684363213609:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:30.078134Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:30.247489Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T14:15:30.251198Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490574701543083872:2384], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-04-07T14:15:30.251829Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZWExMTZlOWYtNTRjNTgzNmUtM2IzYzUwN2QtNGI1NTk4MDU=, ActorId: [6:7490574697248116104:2301], ActorState: ExecuteState, TraceId: 01jr89pdte9khhvmf1nh4t3w18, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-07T14:15:30.830283Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T14:15:30.835866Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490574705838051198:2394], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-04-07T14:15:30.836189Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZWExMTZlOWYtNTRjNTgzNmUtM2IzYzUwN2QtNGI1NTk4MDU=, ActorId: [6:7490574697248116104:2301], ActorState: ExecuteState, TraceId: 01jr89pe6j9prq6nnsa4ky5457, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-07T14:15:31.390097Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T14:15:31.395611Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490574705838051230:2404], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-04-07T14:15:31.396574Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZWExMTZlOWYtNTRjNTgzNmUtM2IzYzUwN2QtNGI1NTk4MDU=, ActorId: [6:7490574697248116104:2301], ActorState: ExecuteState, TraceId: 01jr89pes0bbmapbyzt0j4rehv, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopParameterFilter [GOOD] Test command err: Trying to start YDB, gRPC: 8873, MsgBus: 9799 2025-04-07T14:15:04.953975Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574595046944650:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:04.954022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001239/r3tmp/tmpYslshv/pdisk_1.dat 2025-04-07T14:15:05.271700Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8873, node 1 2025-04-07T14:15:05.321430Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:05.321480Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:05.321493Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:05.321595Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:05.328869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:05.328979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:05.330888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9799 TClient is connected to server localhost:9799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:05.806917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:05.820122Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:15:05.839905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:05.970778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:06.120811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:06.192246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:07.576969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574607931848242:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.577119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.828629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.857394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.884590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.920233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.950297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.999197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:08.049417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574612226816033:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:08.049486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:08.049649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574612226816038:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:08.052650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:08.059057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574612226816040:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 24374, MsgBus: 21896 2025-04-07T14:15:09.892984Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574617384985168:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:09.893065Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001239/r3tmp/tmpCxuucR/pdisk_1.dat 2025-04-07T14:15:09.994568Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:10.019032Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:10.019111Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:10.023183Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24374, node 2 2025-04-07T14:15:10.080437Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:10.080458Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:10.080477Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:10.080610Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21896 TClient is connected to server localhost:21896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:15:10.482659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:10.492767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:10.566778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:10.717149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:10.784138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:13.035695Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574634564856064:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:13.035787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:13.075748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe ... ropose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.246308Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.274267Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.302149Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.340885Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.546133Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574681708814156:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:24.546249Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:24.546394Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574681708814161:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:24.549922Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:24.562398Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490574681708814164:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:25.865349Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490574664528942767:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:25.865421Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5122, MsgBus: 18350 2025-04-07T14:15:26.672552Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574691984568083:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:26.672657Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001239/r3tmp/tmp3S7aEY/pdisk_1.dat 2025-04-07T14:15:26.795022Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:26.808592Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:26.812758Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:26.814157Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5122, node 5 2025-04-07T14:15:26.865135Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:26.865157Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:26.865163Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:26.865246Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18350 TClient is connected to server localhost:18350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:27.360689Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:27.380132Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:27.457400Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:27.628359Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:27.695604Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:30.281721Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574709164438971:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:30.281795Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:30.325996Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.360093Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.402531Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.437853Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.471257Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.504325Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.559201Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574709164439464:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:30.559276Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574709164439469:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:30.559304Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:30.563451Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:30.574055Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574709164439471:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:31.675044Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574691984568083:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:31.675108Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (declare $limit (DataType 'Uint64)) (declare $value (DataType 'Int32)) (let $1 (KqpTable '"/Root/TwoShard" '"72057594046644480:2" '"" '1)) (let $2 '('"Key" '"Value1" '"Value2")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 (DataType 'Int32)) (let $5 (Min (Uint64 '"1001") $limit)) (let $6 (StructType '('"Key" (OptionalType (DataType 'Uint32))) '('"Value1" (OptionalType (DataType 'String))) '('"Value2" (OptionalType $4)))) (let $7 '('('"_logical_id" '481) '('"_id" '"270b863d-a991a909-5232665-7185cdb9") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($12) (block '( (let $13 (lambda '($16) (block '( (let $17 (Member $16 '"Value2")) (return (Member $16 '"Key") (Member $16 '"Value1") $17 (Coalesce (!= $17 $value) (Bool 'false))) )))) (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda '($18 $19 $20 $21) $21) $5)) (let $15 (lambda '($22 $23 $24 $25) $22 $23 $24)) (return (FromFlow (WideMap $14 $15))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($26) (FromFlow (NarrowMap (Take (ToFlow $26) $5) (lambda '($27 $28 $29) (AsStruct '('"Key" $27) '('"Value1" $28) '('"Value2" $29)))))) '('('"_logical_id" '494) '('"_id" '"3c02f88c-9221de75-a23c340d-ae88cb0d")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '('('"$limit") '('"$value")) '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) >> BasicStatistics::TwoDatabases >> KqpPg::DuplicatedColumns [GOOD] >> KqpPg::DropTablePg >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] Test command err: Trying to start YDB, gRPC: 17219, MsgBus: 27277 2025-04-07T14:14:55.310263Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574558299212684:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:55.310363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00125d/r3tmp/tmpvTrFMI/pdisk_1.dat 2025-04-07T14:14:55.627361Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17219, node 1 2025-04-07T14:14:55.695394Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:55.695422Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:55.695429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:55.695534Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:55.699971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:55.700060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:55.701616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27277 TClient is connected to server localhost:27277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:56.162905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:56.185882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:56.328492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:56.455465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:14:56.536645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:57.975727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574566889148990:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:57.975852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:58.184817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:58.204820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:58.225641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:58.247518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:58.273326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:58.305631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:58.349313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574571184116780:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:58.349404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:58.349464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574571184116785:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:58.352599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:58.361407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574571184116787:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 18386, MsgBus: 6330 2025-04-07T14:15:00.401661Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574580050167999:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:00.401751Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00125d/r3tmp/tmpkXHSUF/pdisk_1.dat 2025-04-07T14:15:00.485014Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18386, node 2 2025-04-07T14:15:00.526073Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:00.526177Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:00.528823Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:00.560880Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:00.560910Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:00.560919Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:00.561025Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6330 TClient is connected to server localhost:6330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:00.967706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:00.988374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:01.061187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:01.215053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:15:01.277245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:15:03.314251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574592935071605:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:03.314304Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:03.381781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:03.422389Z node 2 :FLAT_TX_SCHEMESHARD ... 776654Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:22.212086Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574674561193151:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.212188Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.243072Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.277821Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.312042Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.343106Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.377944Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.413505Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:22.459128Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574674561193645:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.459197Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574674561193650:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.459206Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:22.462367Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:22.472377Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574674561193652:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:23.664153Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:23.732065Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574657381322258:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:23.732166Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14408, MsgBus: 4954 2025-04-07T14:15:25.577957Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574686879947019:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:25.578047Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00125d/r3tmp/tmpmjUhZB/pdisk_1.dat 2025-04-07T14:15:25.680595Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:25.698101Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:25.698189Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:25.699528Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14408, node 6 2025-04-07T14:15:25.735472Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:25.735503Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:25.735513Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:25.735616Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4954 TClient is connected to server localhost:4954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:26.279845Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:26.297933Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:26.373677Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:26.538662Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:26.615544Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:29.435368Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574704059817912:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.435468Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.487486Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.526424Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.559535Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.596689Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.633063Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.704883Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.756150Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574704059818414:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.756256Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.756561Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574704059818419:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.760766Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:29.770398Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574704059818421:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:30.614270Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574686879947019:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:30.614355Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:31.017330Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::CollectScanQueries [GOOD] Test command err: 2025-04-07T14:12:16.861166Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573875121431451:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:16.862128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00106a/r3tmp/tmpEfseYg/pdisk_1.dat 2025-04-07T14:12:17.252838Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:17.292252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:17.292406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:17.295743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22296, node 1 2025-04-07T14:12:17.403587Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:17.403614Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:17.403627Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:17.403738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:17.732446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:17.882008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:17.902538Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490573878858188208:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:17.903048Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/PQ/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:17.948948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:17.949078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:17.977165Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 waiting... 2025-04-07T14:12:17.977151Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573876692017533:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:17.982797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:17.991391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:17.991467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:17.994434Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:17.997965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:18.001196Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [2:7490573872397050027:2061], interval end# 2025-04-07T14:12:18.000000Z, event interval end# 2025-04-07T14:12:18.000000Z 2025-04-07T14:12:18.001227Z node 2 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [2:7490573872397050027:2061], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-07T14:12:18.001044Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [3:7490573874563220856:2061], interval end# 2025-04-07T14:12:18.000000Z, event interval end# 2025-04-07T14:12:18.000000Z 2025-04-07T14:12:18.001091Z node 3 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [3:7490573874563220856:2061], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-07T14:12:18.001692Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [1:7490573875121431443:2118], interval end# 2025-04-07T14:12:18.000000Z, event interval end# 2025-04-07T14:12:18.000000Z 2025-04-07T14:12:18.001714Z node 1 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [1:7490573875121431443:2118], query logs count# 0, processor ids count# 1, processor id to database count# 0 2025-04-07T14:12:18.003037Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [1:7490573875121431290:2070], interval end# 2025-04-07T14:12:18.000000Z, event interval end# 2025-04-07T14:12:18.000000Z 2025-04-07T14:12:18.003076Z node 1 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [1:7490573875121431290:2070], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-07T14:12:18.027398Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [3:7490573878858188281:2122], interval end# 2025-04-07T14:12:18.000000Z, event interval end# 2025-04-07T14:12:18.000000Z 2025-04-07T14:12:18.027451Z node 3 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [3:7490573878858188281:2122], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-07T14:12:18.163063Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/PQ/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:18.163119Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [2:7490573876692017377:2055], interval end# 2025-04-07T14:12:18.000000Z, event interval end# 2025-04-07T14:12:18.000000Z 2025-04-07T14:12:18.163161Z node 2 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [2:7490573876692017377:2055], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-07T14:12:18.169892Z node 2 :SYSTEM_VIEWS INFO: [72075186224037895] OnActivateExecutor 2025-04-07T14:12:18.169981Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInitSchema::Execute 2025-04-07T14:12:18.171119Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [3:7490573878858188281:2122], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2025-04-07T14:12:18.180099Z node 3 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [3:7490573878858188281:2122], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/PQ 2025-04-07T14:12:18.181122Z node 3 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [3:7490573878858188281:2122], database# /Root/PQ, no sysview processor 2025-04-07T14:12:18.283052Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInitSchema::Complete 2025-04-07T14:12:18.283142Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInit::Execute 2025-04-07T14:12:18.283602Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2025-04-07T14:12:18.283719Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading interval metrics: query count# 0 2025-04-07T14:12:18.283773Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading interval query tops: total query count# 0 2025-04-07T14:12:18.283818Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading nodes to request: nodes count# 0, hashes count# 0 2025-04-07T14:12:18.283850Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 6, result count# 0 2025-04-07T14:12:18.283993Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 7, result count# 0 2025-04-07T14:12:18.284064Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 8, result count# 0 2025-04-07T14:12:18.284117Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 9, result count# 0 2025-04-07T14:12:18.284140Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 10, result count# 0 2025-04-07T14:12:18.284165Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 11, result count# 0 2025-04-07T14:12:18.284209Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 12, result count# 0 2025-04-07T14:12:18.284256Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 13, result count# 0 2025-04-07T14:12:18.284313Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 14, result count# 0 2025-04-07T14:12:18.284348Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 15, result count# 0 2025-04-07T14:12:18.284414Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 16, partCount count# 0 2025-04-07T14:12:18.284454Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 19, partCount count# 0 2025-04-07T14:12:18.284473Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 17, result count# 0 2025-04-07T14:12:18.284535Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 18, result count# 0 2025-04-07T14:12:18.284716Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Reset: interval end# 2025-04-07T14:12:18.000000Z 2025-04-07T14:12:18.284847Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [2:7490573876692017377:2055], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2025-04-07T14:12:18.284964Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-04-07T14:12:18.288963Z node 2 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [2:7490573876692017377:2055], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/PQ 2025-04-07T14:12:18.289027Z node 2 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [2:7490573876692017377:2055], database# /Root/PQ, no sysview processor 2025-04-07T14:12:18.290125Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:12:18.297158Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInit::Complete 2025-04-07T14:12:18.306457Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxConfigure::Execute: database# /Root/PQ 2025-04-07T14:12:18.320954Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxConfigure::Complete 2025-04-07T14:12:18.324447Z node 2 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsColle ... le: (empty maybe) 2025-04-07T14:15:23.375626Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:23.375752Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22741 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:23.623281Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:23.638410Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:26.249703Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574689161838560:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:26.249703Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574689161838552:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:26.249793Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:26.253450Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:26.262915Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490574689161838566:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:15:26.818321Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr89pa975vcfz7a2bkp07yh9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZGMwZGY0N2UtMzRlNGFkNDUtYmVjMGMyNzAtMzBkOWE3ZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:15:26.987188Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr89pb0959zmsee2ytstv5dq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YjliYzA1ZWYtNzNhZWZjNTQtMThjNDAxNTktOTQ1MzU2ZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:15:27.131942Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr89pb0wbyd071hxp983wbrv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZTMyZjc5MWUtOTljYTA1NjgtYTM2ZWU2NWYtODQ4NjMxYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:15:27.134605Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7490574693456806005:2337], owner: [7:7490574693456806002:2335], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-07T14:15:27.135920Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7490574693456806005:2337], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:15:27.136611Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7490574693456806005:2337], row count: 2, finished: 1 2025-04-07T14:15:27.136656Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7490574693456806005:2337], owner: [7:7490574693456806002:2335], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-07T14:15:27.138992Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035327129, txId: 281474976710663] shutting down 2025-04-07T14:15:28.055057Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7490574697805379222:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:28.055162Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00106a/r3tmp/tmp7bPJIY/pdisk_1.dat 2025-04-07T14:15:28.138339Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:28.159013Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:28.159100Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:28.160805Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31184, node 8 2025-04-07T14:15:28.202624Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:28.202644Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:28.202651Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:28.202772Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:28.411221Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:28.419466Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:31.358359Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490574710690281819:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:31.358412Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490574710690281814:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:31.358467Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:31.361591Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:31.370313Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7490574710690281828:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:15:31.571054Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89pf8w8dyy2nchjs633wyy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=NTVkYWE1My1lMTIwN2ZhNy04MWQxZTA1Yi1hNDlhMDUyOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:15:31.668996Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89pfg09awj44g1ngf7vn0v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=ZmE5YTFmMzYtZWVkOGU0ZWYtNzFhNzNlMzAtNWRhODAwOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:15:31.673666Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035331710, txId: 281474976715662] shutting down 2025-04-07T14:15:31.810181Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr89pfkd4w08wy8cg36sf235, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=NjhmMjBlM2EtMjdiZDBjZjYtMTg5YmNmNzctOTVhNzhhNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:15:31.815325Z node 8 :SYSTEM_VIEWS INFO: Scan started, actor: [8:7490574710690281992:2337], owner: [8:7490574710690281989:2335], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-07T14:15:31.828204Z node 8 :SYSTEM_VIEWS INFO: Scan prepared, actor: [8:7490574710690281992:2337], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:15:31.831058Z node 8 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [8:7490574710690281992:2337], row count: 2, finished: 1 2025-04-07T14:15:31.831101Z node 8 :SYSTEM_VIEWS INFO: Scan finished, actor: [8:7490574710690281992:2337], owner: [8:7490574710690281989:2335], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-07T14:15:31.833750Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035331809, txId: 281474976715664] shutting down >> BackupRestoreS3::RestoreTableSplitBoundaries [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:32.267683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:32.267796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:32.267840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:32.267873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:32.267931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:32.267984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:32.268048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:32.268399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:32.339249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:32.339312Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:32.350481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:32.350621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:32.350804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:32.360870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:32.361047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:32.361674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:32.362071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:32.367337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:32.369292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:32.369369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:32.369551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:32.369601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:32.369644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:32.369789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:32.377120Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:32.470443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:32.470677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.470909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:32.471156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:32.471225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.473549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:32.473689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:32.473865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.473911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:32.473950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:32.473996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:32.475822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.475873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:32.475910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:32.477484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.477525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.477579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:32.477630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:32.480922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:32.482442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:32.482614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:32.483388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:32.483514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:32.483561Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:32.483813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:32.483856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:32.484012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:32.484085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:32.485864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:32.485905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:32.486064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:32.486100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:32.486400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:32.486441Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:32.486532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:32.486583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:32.486629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:32.486682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:32.486717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:32.486750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:32.486813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:32.486852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:32.486882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... Task: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:15:32.758542Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:15:32.758798Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 290us result status StatusSuccess 2025-04-07T14:15:32.759561Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:15:32.771628Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1107:2878] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:15:32.771735Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1054:2878] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-07T14:15:32.771891Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1107:2878] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744035332747569 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744035332747569 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744035332747569 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:15:32.774205Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1107:2878] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-07T14:15:32.774300Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1054:2878] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard >> KqpPg::InsertNoTargetColumns_Serial [GOOD] >> KqpPg::InsertValuesFromTableWithDefault >> BasicStatistics::Simple >> KqpPg::JoinWithQueryService-StreamLookup [GOOD] >> KqpPg::Insert_Serial |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless >> KqpPg::CreateIndex [GOOD] >> KqpPg::CreateNotNullPgColumn |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpRanges::CastKeyBounds [GOOD] >> KqpPg::TypeCoercionBulkUpsert [GOOD] >> KqpPg::TypeCoercionInsert >> HttpRequest::Probe >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] >> KqpRanges::MergeRanges >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::CastKeyBounds [GOOD] Test command err: Trying to start YDB, gRPC: 19636, MsgBus: 12219 2025-04-07T14:15:00.804472Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574580167900630:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:00.806636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001243/r3tmp/tmpAWN2cY/pdisk_1.dat 2025-04-07T14:15:01.108473Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19636, node 1 2025-04-07T14:15:01.184216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:01.184324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:01.185622Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:01.185631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:01.185637Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:01.185732Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:01.186026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12219 TClient is connected to server localhost:12219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:01.666947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:03.520907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574593052803083:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:03.521119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:03.717444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:03.831992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574593052803186:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:03.832087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574593052803191:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:03.832100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:03.834910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:03.845216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574593052803193:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:15:04.051078Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574597347770579:2328], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestUpsertNotNullPk, code: 2029 2025-04-07T14:15:04.051330Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDk0NmMxMTQtNTBjMWYwMDItYjA4YWU0NmMtOTg2N2ZkYzc=, ActorId: [1:7490574593052803079:2300], ActorState: ExecuteState, TraceId: 01jr89nmk0feyvtn83s9akjh82, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-04-07T14:15:04.108709Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-07T14:15:04.115612Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574597347770588:2332], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Key, code: 2031 2025-04-07T14:15:04.115874Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDk0NmMxMTQtNTBjMWYwMDItYjA4YWU0NmMtOTg2N2ZkYzc=, ActorId: [1:7490574593052803079:2300], ActorState: ExecuteState, TraceId: 01jr89nmkt75crtwr0dbjdsfx0, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 11935, MsgBus: 21950 2025-04-07T14:15:04.650192Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574596021993187:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:04.650648Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001243/r3tmp/tmp8odMlw/pdisk_1.dat 2025-04-07T14:15:04.729525Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:04.755296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:04.755372Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11935, node 2 2025-04-07T14:15:04.758135Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:04.788902Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:04.788921Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:04.788926Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:04.788990Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21950 TClient is connected to server localhost:21950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:05.167976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:05.188189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:05.242569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:05.410121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:05.473938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:07.410734Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574608906896765:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.410811Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:07.445351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.472915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.500505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:07.527401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665 ... , issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:27.224164Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:27.291852Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:27.326620Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:27.355112Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:27.385569Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:27.455229Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:27.546035Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574695534036181:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:27.546190Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:27.546638Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574695534036186:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:27.550835Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:27.564329Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574695534036188:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:28.860951Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574678354164806:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:28.861084Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Join2"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Delete","Table":"Join2"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","Stats":{"ComputeNodes":[{"Tasks":[{"NodeId":5,"FinishTimeMs":1744035329031,"TaskId":1,"Host":"ghrun-lqtfcktism","ComputeTimeUs":102}],"CpuTimeUs":472}],"UseLlvm":"undefined","Tasks":1,"PhysicalStageId":0,"StageDurationUs":0,"BaseTimeMs":1744035329031,"NodesScanShards":[],"CpuTimeUs":{"Count":1,"Sum":472,"Max":472,"Min":472}},"CTE Name":"precompute_0_0"}],"Node Type":"Effect"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":296233,"CpuTimeUs":290168},"ProcessCpuTimeUs":1603,"TotalDurationUs":304266,"ResourcePoolId":"default","QueuedTimeUs":444},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-Cpu":0.472,"Name":"Delete","Table":"Join2"}],"Node Type":"Delete"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 3582, MsgBus: 21146 2025-04-07T14:15:29.946419Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574703026412968:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:29.946469Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001243/r3tmp/tmpT3CIEJ/pdisk_1.dat 2025-04-07T14:15:30.074108Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:30.077821Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:30.077919Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:30.080356Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3582, node 6 2025-04-07T14:15:30.133357Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:30.133387Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:30.133399Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:30.133541Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21146 TClient is connected to server localhost:21146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:30.700929Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:15:30.713335Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.775030Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:30.944384Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:31.022213Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:33.533336Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574720206283829:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.533533Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.566598Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.605398Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.637434Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.675922Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.708300Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.743001Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.827935Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574720206284327:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.828037Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.828136Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574720206284332:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.831795Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:33.840703Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574720206284334:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:15:34.946541Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574703026412968:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:34.956755Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 [GOOD] Test command err: Trying to start YDB, gRPC: 2566, MsgBus: 17769 2025-04-07T14:14:59.687447Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574575293279267:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:59.687498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00124f/r3tmp/tmp4Kqm0s/pdisk_1.dat 2025-04-07T14:14:59.980322Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2566, node 1 2025-04-07T14:15:00.046441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:00.046567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:00.059375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:00.097192Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:00.097214Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:00.097221Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:00.097315Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17769 TClient is connected to server localhost:17769 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:00.515883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:00.544463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:00.678441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:00.823333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:00.906664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:02.389347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574588178182872:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:02.389479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:02.601404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:02.627420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:02.648144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:02.689350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:02.713887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:02.745823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:02.791238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574588178183368:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:02.791317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:02.791445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574588178183373:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:02.794239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:02.801458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574588178183375:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:15:03.761780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:04.414341Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035304452, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 63051, MsgBus: 11052 2025-04-07T14:15:05.009349Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574599082602826:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:05.009405Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00124f/r3tmp/tmpsYKJq7/pdisk_1.dat 2025-04-07T14:15:05.108618Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:05.125263Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:05.125344Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:05.126923Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63051, node 2 2025-04-07T14:15:05.169204Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:05.169228Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:05.169234Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:05.169321Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11052 TClient is connected to server localhost:11052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:05.593191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:05.598790Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:05.602959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:05.675492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:05.830451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:05.931650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:07.868781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574607672539114:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don' ... rmissions } 2025-04-07T14:15:25.997914Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:26.055764Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:26.088447Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:26.118686Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:26.143262Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:26.170159Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:26.235727Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:26.312458Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574689153756003:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:26.312555Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:26.312624Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574689153756008:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:26.316684Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:26.326797Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574689153756010:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:27.435190Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:27.705874Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574671973884595:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:27.705936Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:28.307752Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035328336, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 2702, MsgBus: 26533 2025-04-07T14:15:29.207660Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574703097869931:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:29.207708Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00124f/r3tmp/tmpnljmmi/pdisk_1.dat 2025-04-07T14:15:29.314878Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:29.330637Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:29.330744Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:29.332712Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2702, node 6 2025-04-07T14:15:29.385579Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:29.385602Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:29.385611Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:29.385737Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26533 TClient is connected to server localhost:26533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:29.983287Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:29.989837Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:30.000198Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:30.076617Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:30.264926Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:30.349408Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:33.045377Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574720277740804:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.045458Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.115103Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.152626Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.185026Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.223938Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.256400Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.324957Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.364093Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574720277741301:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.364224Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.364511Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574720277741306:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.367725Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:33.376840Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574720277741308:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:34.213528Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574703097869931:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:34.213590Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:34.689231Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:35.764912Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035335770, txId: 281474976715673] shutting down |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpPg::InsertValuesFromTableWithDefault [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast >> KqpPg::DropTablePg [GOOD] >> KqpPg::DropTablePgMultiple |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables >> KqpPg::Insert_Serial [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpPg::CreateNotNullPgColumn [GOOD] >> KqpPg::CreateSequence |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync >> BasicStatistics::TwoServerlessDbs >> KqpSqlIn::Dict [GOOD] >> KqpSqlIn::Delete |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 12047, MsgBus: 30805 2025-04-07T14:15:03.693268Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574593727045416:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:03.693321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00123b/r3tmp/tmptURCX8/pdisk_1.dat 2025-04-07T14:15:03.998451Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12047, node 1 2025-04-07T14:15:04.087609Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:04.087635Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:04.087652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:04.087762Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:04.099799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:04.099934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:04.101454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30805 TClient is connected to server localhost:30805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:04.574817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:04.589317Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:04.599197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:04.701676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:04.832362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:04.891400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:06.517641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574606611949004:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.517762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.758601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:06.781494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:06.809660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:06.833056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:06.857360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:06.887582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:06.927615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574606611949498:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.927693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.927972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574606611949503:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:06.931602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:06.940957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574606611949505:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:07.889735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:08.013941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24109, MsgBus: 21544 2025-04-07T14:15:09.078201Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574616242775864:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:09.078241Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00123b/r3tmp/tmpsDtBip/pdisk_1.dat 2025-04-07T14:15:09.162040Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:09.186173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:09.186256Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:09.188896Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24109, node 2 2025-04-07T14:15:09.232596Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:09.232622Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:09.232630Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:09.232751Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21544 TClient is connected to server localhost:21544 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:09.608640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:09.614833Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:09.627260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:09.699477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:09.848288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:09.919569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:12.007367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [Workloa ... or: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:31.148590Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:31.197890Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:31.234585Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:31.271216Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:31.302855Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:31.335165Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:31.370760Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:31.455692Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574712271040256:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:31.455791Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574712271040261:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:31.455794Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:31.459425Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:31.468365Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574712271040263:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:15:32.569704Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:32.728259Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:15:32.737410Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574695091168875:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:32.737488Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27366, MsgBus: 1354 2025-04-07T14:15:34.069534Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574722958683531:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:34.069593Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00123b/r3tmp/tmphARcOg/pdisk_1.dat 2025-04-07T14:15:34.175509Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:34.184805Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:34.184916Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:34.186317Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27366, node 6 2025-04-07T14:15:34.234762Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:34.234788Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:34.234796Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:34.234924Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1354 TClient is connected to server localhost:1354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:34.798175Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:34.824204Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:34.910231Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:35.098198Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:35.165820Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:37.791471Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574735843587151:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:37.791595Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:37.842799Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:37.875663Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:37.943321Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:37.975611Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:38.006294Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:38.086598Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:38.183528Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574740138554950:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:38.183630Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:38.183712Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574740138554955:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:38.187321Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:38.196621Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574740138554957:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:39.069814Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574722958683531:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:39.069890Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:39.466412Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:39.634885Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> BasicStatistics::SimpleGlobalIndex >> KqpPg::DropTablePgMultiple [GOOD] >> KqpPg::DropIndex |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpPg::InsertValuesFromTableWithDefaultAndCast [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool >> KqpPg::InsertValuesFromTableWithDefaultText [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpPg::CreateSequence [GOOD] >> KqpPg::AlterSequence |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpPg::CreateTableBulkUpsertAndRead [GOOD] >> KqpPg::CreateTableSerialColumns >> KqpRanges::MergeRanges [GOOD] >> HttpRequest::ProbeServerless |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::MergeRanges [GOOD] Test command err: Trying to start YDB, gRPC: 20428, MsgBus: 3814 2025-04-07T14:14:43.435148Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574504027884884:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:43.435439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001281/r3tmp/tmpbIbc5K/pdisk_1.dat 2025-04-07T14:14:43.702515Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20428, node 1 2025-04-07T14:14:43.764263Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:43.764282Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:43.764287Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:43.764366Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:43.789525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:43.789632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:43.790823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3814 TClient is connected to server localhost:3814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:44.248462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:44.261609Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:14:44.268361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:44.394639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:44.533516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:44.603915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:46.278825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574516912788493:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.278942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.508946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.538626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.567957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.598075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.633099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.665737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:46.707779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574516912788988:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.707858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.708083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574516912788993:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:46.712228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:46.725616Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:14:46.726211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574516912788995:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:14:47.658170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:47.828624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.031624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.162016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.411533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T14:14:48.443959Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574504027884884:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:48.446336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 63955, MsgBus: 6807 2025-04-07T14:14:49.425070Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574530234152040:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:49.425164Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001281/r3tmp/tmpP34occ/pdisk_1.dat 2025-04-07T14:14:49.534890Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:49.549177Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:49.549258Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:49.550839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63955, node 2 2025-04-07T14:14:49.597195Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:49.597219Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:49.597225Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:49.597301Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6807 TClient is connected to server localhost:6807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:49.966111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 2814749767156 ... ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:32.989325Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.024317Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.084152Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.117389Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.161841Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490574721828565725:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.161933Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.161979Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490574721828565730:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.165182Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:33.175240Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7490574721828565732:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:33.814158Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7490574700353727052:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:33.814228Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:34.448971Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:35.147541Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035335161, txId: 281474976715673] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND (Key2 = 100 OR Key2 = 300) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]]] ------------------------ 2025-04-07T14:15:35.601119Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035335637, txId: 281474976715675] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND Key2 IN (100, 300, 400) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]];[[10u]]] ------------------------ Trying to start YDB, gRPC: 28999, MsgBus: 10127 2025-04-07T14:15:36.610546Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7490574732368395990:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:36.610625Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001281/r3tmp/tmp9M8Kf0/pdisk_1.dat 2025-04-07T14:15:36.723793Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:36.752786Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:36.752903Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:36.754633Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28999, node 9 2025-04-07T14:15:36.797535Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:36.797559Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:36.797567Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:36.797679Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10127 TClient is connected to server localhost:10127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:37.314096Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:37.328539Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:37.395337Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:37.612566Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:37.689774Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:40.618312Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7490574749548266888:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:40.618415Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:40.663688Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:40.699117Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:40.730168Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:40.761850Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:40.797145Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:40.889243Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:40.971198Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7490574749548267389:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:40.971313Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:40.971480Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7490574749548267394:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:40.975456Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:40.984634Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7490574749548267396:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:15:41.610768Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7490574732368395990:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:41.618958Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:42.150081Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:42.864585Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035342889, txId: 281474976710673] shutting down >> HttpRequest::Analyze [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: 2025-04-07T14:15:23.567333Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574676510278643:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:23.567384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002198/r3tmp/tmp5QEY2c/pdisk_1.dat 2025-04-07T14:15:23.936943Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:23.959117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:23.959218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:23.969320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22350, node 1 2025-04-07T14:15:24.157010Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:24.157034Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:24.157045Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:24.157156Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:24.582743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.598609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:15:24.600048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.603744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:15:24.603977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:15:24.604047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:15:24.605790Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:15:24.606595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:15:24.606621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:15:24.608072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:24.612833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035324654, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:15:24.612878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:15:24.613093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:15:24.614312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:15:24.614422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:15:24.614455Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:15:24.614513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:15:24.614582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:15:24.614615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:15:24.616464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:15:24.616510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:15:24.616529Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:15:24.616589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:15:25.908769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574685100214114:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:25.908848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:26.294411Z node 1 :TX_PROXY DEBUG: actor# [1:7490574676510278768:2136] Handle TEvProposeTransaction 2025-04-07T14:15:26.294446Z node 1 :TX_PROXY DEBUG: actor# [1:7490574676510278768:2136] TxId# 281474976710658 ProcessProposeTransaction 2025-04-07T14:15:26.294538Z node 1 :TX_PROXY DEBUG: actor# [1:7490574676510278768:2136] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7490574689395181449:2617] 2025-04-07T14:15:26.355622Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574689395181449:2617] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { PartitioningPolicy { MinPartitionsCount: 10 SplitByLoadSettings { Enabled: true } } } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-04-07T14:15:26.356004Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574689395181449:2617] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:15:26.356083Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574689395181449:2617] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:15:26.356299Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574689395181449:2617] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:15:26.356435Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574689395181449:2617] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:15:26.356498Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574689395181449:2617] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-07T14:15:26.356642Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574689395181449:2617] txid# 281474976710658 HANDLE EvClientConnected 2025-04-07T14:15:26.357028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:26.359407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T14:15:26.359909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:15:26.359941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:26.362652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table 2025-04-07T14:15:26.362764Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574689395181449:2617] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-07T14:15:26.362834Z node 1 :TX_PROXY DEBUG: Actor# [1:7490574689395181449:2617] txid# 281474976710658 SEND to# [1:7490574689395181448:2303] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-07T14:15:26.362873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:15:26.363090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:15:26.363162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:15:26.363557Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:15:26.363892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:15:26.363927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:15:26.363975Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:15:26.364167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:15:26.364190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 2814749767106 ... 281474976710765:2 ProgressState 2025-04-07T14:15:43.088429Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:2 progress is 3/3 2025-04-07T14:15:43.088459Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:0 2025-04-07T14:15:43.088540Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:1 2025-04-07T14:15:43.088549Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:2 2025-04-07T14:15:43.089841Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2025-04-07T14:15:43.091874Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 281474976710762, at schemeshard: 72057594046644480 2025-04-07T14:15:43.092953Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TDropLock Propose: opId# 281474976710766:0, path# /Root/table 2025-04-07T14:15:43.093076Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710766:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:15:43.094345Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710766, database: /Root, subject: , status: StatusAccepted, operation: DROP LOCK, path: /Root/table 2025-04-07T14:15:43.094429Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710766, status# StatusAccepted 2025-04-07T14:15:43.094583Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710766:0 ProgressState 2025-04-07T14:15:43.095713Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710766, at schemeshard: 72057594046644480 2025-04-07T14:15:43.096872Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035343141, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:15:43.096899Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710766:0 HandleReply TEvOperationPlan: step# 1744035343141 2025-04-07T14:15:43.096912Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710766:0 128 -> 240 2025-04-07T14:15:43.097959Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710766:0 ProgressState 2025-04-07T14:15:43.098018Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710766:0 progress is 1/1 2025-04-07T14:15:43.098055Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710766:0 2025-04-07T14:15:43.099334Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710766 2025-04-07T14:15:43.102735Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710762 2025-04-07T14:15:43.437446Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7490574763508254503:2350] [0] Resolve database: name# /Root 2025-04-07T14:15:43.437738Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7490574763508254503:2350] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2025-04-07T14:15:43.437759Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7490574763508254503:2350] [0] Send request: schemeShardId# 72057594046644480 2025-04-07T14:15:43.438145Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7490574763508254503:2350] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715661 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:15112" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1744035342 } EndTime { seconds: 1744035343 } } 2025-04-07T14:15:43.443278Z node 10 :TX_PROXY DEBUG: actor# [10:7490574746328383269:2135] Handle TEvNavigate describe path /Root/table 2025-04-07T14:15:43.443349Z node 10 :TX_PROXY DEBUG: Actor# [10:7490574763508254509:3690] HANDLE EvNavigateScheme /Root/table 2025-04-07T14:15:43.443754Z node 10 :TX_PROXY DEBUG: Actor# [10:7490574763508254509:3690] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T14:15:43.443863Z node 10 :TX_PROXY DEBUG: Actor# [10:7490574763508254509:3690] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2025-04-07T14:15:43.445297Z node 10 :TX_PROXY DEBUG: Actor# [10:7490574763508254509:3690] Handle TEvDescribeSchemeResult Forward to# [10:7490574763508254507:2351] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710760 CreateStep: 1744035342966 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableIndexes { Name: "value_idx" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "Value" SchemaVersion: 2 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 7 PathOwnerId: 72057594046644480 |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpRanges::ValidatePredicates [GOOD] |96.3%| [TA] $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TA] {RESULT} $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze [GOOD] Test command err: 2025-04-07T14:15:32.446267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:32.446736Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:32.446921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002076/r3tmp/tmpFUvICX/pdisk_1.dat 2025-04-07T14:15:32.914439Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3081, node 1 2025-04-07T14:15:33.381039Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:33.381097Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:33.381131Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:33.381796Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:33.389454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.507994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:33.508507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:33.526736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18798 2025-04-07T14:15:34.190549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:37.066925Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:37.110333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:37.110453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:37.151387Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:37.155429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:37.353212Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:37.408249Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:37.408346Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:37.437936Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:37.438120Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:37.438315Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:37.438385Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:37.438455Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:37.438501Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:37.438553Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:37.438601Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:37.438987Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:37.639150Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:37.639270Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:37.646526Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:37.665578Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:37.667755Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:37.669106Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:15:37.691979Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:37.692052Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:37.692117Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:15:37.694120Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:37.694213Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:37.711184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:37.727355Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:37.727509Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:37.740087Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:37.758489Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:37.798666Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:38.107358Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:38.283079Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:39.226542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:39.226655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:39.296484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:15:39.516344Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:15:39.516626Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:15:39.516975Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:15:39.517122Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:15:39.517236Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:15:39.517358Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:15:39.517503Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:15:39.517638Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:15:39.517756Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:15:39.517876Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:15:39.517978Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:15:39.518081Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:15:39.568114Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:15:39.568206Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:15:39.568437Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:15:39.568573Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:15:39.568707Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:15:39.568822Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxI ... d;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:15:39.834196Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:15:39.834368Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:15:39.834403Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:15:39.834527Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:15:39.834567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:15:39.834720Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:15:39.834756Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:15:39.834847Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:15:39.834875Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:15:39.835117Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:15:39.835171Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:15:39.835264Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:15:39.835303Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:15:39.835460Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:15:39.835494Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:15:39.835569Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:15:39.835605Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:15:39.835661Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:15:39.835692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:15:39.835735Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:15:39.835778Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:15:39.836079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:15:39.836126Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:15:39.836293Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:15:39.836331Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:15:39.836443Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:15:39.836480Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:15:39.836624Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:15:39.836656Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:15:39.836788Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:15:39.836821Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:15:41.215098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2917:3119], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:41.215276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:41.221486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037889 2025-04-07T14:15:41.993682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3065:3163], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:41.993899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:42.006441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037889 waiting actualization: 0/0.000012s 2025-04-07T14:15:44.143788Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3389:3608] 2025-04-07T14:15:44.146420Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:3386:3250] , Record { OperationId: "\000\000\000\000\023\350M=\340%\010\204\326\034\361_" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } } 2025-04-07T14:15:44.146494Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Create new force traversal operation, OperationId=M=% _ 2025-04-07T14:15:44.146522Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Create new force traversal table, OperationId=M=% _ , PathId [OwnerId: 72075186224037889, LocalPathId: 4] Answer: 'Analyze sent. OperationId: 00000004z89myy0988gkb1swaz' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; >> KqpPg::DropIndex [GOOD] >> KqpPg::DropSequence >> BasicStatistics::ServerlessGlobalIndex >> KqpPg::AlterSequence [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence >> HttpRequest::Status [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull >> BasicStatistics::TwoNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ValidatePredicates [GOOD] Test command err: Trying to start YDB, gRPC: 22176, MsgBus: 5394 2025-04-07T14:14:24.733985Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574425582427298:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:24.735442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001292/r3tmp/tmpnaOL4W/pdisk_1.dat 2025-04-07T14:14:25.043615Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22176, node 1 2025-04-07T14:14:25.128874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:25.128976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:25.130643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:25.145654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:25.145682Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:25.145691Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:25.145812Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5394 TClient is connected to server localhost:5394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:25.589931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:25.613926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:25.747783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:25.884677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:25.947031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:27.552214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574438467330883:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:27.552312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:27.872475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:27.901391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:27.925666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:27.952172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:27.982366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:28.009062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:28.082244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574442762298679:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:28.082313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:28.082495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574442762298684:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:28.085662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:28.094188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574442762298686:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:14:28.914913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:29.144283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:14:29.313676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:14:29.452647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:14:29.736558Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574425582427298:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:29.736616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:14:29.753994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:1:44: Warning: At function: Coalesce
:1:58: Warning: At function: SqlIn
:1:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 21613, MsgBus: 14229 2025-04-07T14:14:30.989108Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574451181414243:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:30.989206Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001292/r3tmp/tmp8iW9Iv/pdisk_1.dat 2025-04-07T14:14:31.067800Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:31.091052Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:31.091133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:31.094744Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21613, node 2 2025-04-07T14:14:31.157184Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:31.157206Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:31.157214Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:31.157296Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14229 TClient is connected to server localhost:14229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:31.538265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 2814749 ... ;[[3u]];[[4u]];[[5u]];[[6u]]] RECEIVED: [[[2u]];[[3u]];[[4u]];[[5u]];[[6u]]] 2025-04-07T14:15:36.496568Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035336533, txId: 281474976715735] shutting down Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 <= 2000 AND Key2 <= 2 AND Key3 <= "resource_3" AND Key4 <= "uid:11" ORDER BY `Value`; 2025-04-07T14:15:36.764378Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035336806, txId: 281474976715737] shutting down 2025-04-07T14:15:37.058905Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035337093, txId: 281474976715739] shutting down EXPECTED: [[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]]] RECEIVED: [[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key2 > 8 ORDER BY `Value`; 2025-04-07T14:15:37.257653Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035337296, txId: 281474976715741] shutting down 2025-04-07T14:15:37.480147Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035337520, txId: 281474976715743] shutting down EXPECTED: [[[19u]];[[20u]]] RECEIVED: [[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key2 < 9 ORDER BY `Value`; 2025-04-07T14:15:37.753350Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035337765, txId: 281474976715745] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]]] 2025-04-07T14:15:38.014076Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035338045, txId: 281474976715747] shutting down Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key2 <= 2 AND Key3 <= "resource_3" AND Key4 <= "uid:11" ORDER BY `Value`; 2025-04-07T14:15:38.180206Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:15:38.180231Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:38.303916Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035338339, txId: 281474976715749] shutting down 2025-04-07T14:15:38.607907Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035338640, txId: 281474976715751] shutting down EXPECTED: [[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]]] RECEIVED: [[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 = 2000 AND Key2 = 2 AND Key3 = "resource_3" AND Key4 = "uid:11" ORDER BY `Value`; 2025-04-07T14:15:38.828786Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035338871, txId: 281474976715753] shutting down EXPECTED: [[[10u]]] RECEIVED: [[[10u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 != 2000 AND Key2 != 2 AND Key3 != "resource_3" AND Key4 != "uid:11" ORDER BY `Value`; 2025-04-07T14:15:39.081485Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035339123, txId: 281474976715755] shutting down 2025-04-07T14:15:39.559837Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035339599, txId: 281474976715757] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] 2025-04-07T14:15:40.053585Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035340089, txId: 281474976715759] shutting down Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 IS NULL ORDER BY `Value`; 2025-04-07T14:15:40.246086Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035340285, txId: 281474976715761] shutting down 2025-04-07T14:15:40.414038Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035340453, txId: 281474976715763] shutting down EXPECTED: [] RECEIVED: [] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key2 IS NULL ORDER BY `Value`; 2025-04-07T14:15:40.639277Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035340677, txId: 281474976715765] shutting down 2025-04-07T14:15:40.835620Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035340873, txId: 281474976715767] shutting down EXPECTED: [] RECEIVED: [] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 IS NOT NULL ORDER BY `Value`; 2025-04-07T14:15:41.015972Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035341048, txId: 281474976715769] shutting down 2025-04-07T14:15:41.193365Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035341230, txId: 281474976715771] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 > 1000 AND Key2 IS NULL ORDER BY `Value`; 2025-04-07T14:15:41.434798Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035341461, txId: 281474976715773] shutting down 2025-04-07T14:15:41.690916Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035341727, txId: 281474976715775] shutting down EXPECTED: [] RECEIVED: [] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 > 1000 OR Key2 IS NULL ORDER BY `Value`; 2025-04-07T14:15:41.934279Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035341972, txId: 281474976715777] shutting down EXPECTED: [[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] 2025-04-07T14:15:42.180440Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035342217, txId: 281474976715779] shutting down Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 >= 1000 OR Key2 IS NOT NULL ORDER BY `Value`; 2025-04-07T14:15:42.396239Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035342434, txId: 281474976715781] shutting down 2025-04-07T14:15:42.635702Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035342672, txId: 281474976715783] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < 9000 OR Key3 IS NOT NULL ORDER BY `Value`; 2025-04-07T14:15:42.895953Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035342924, txId: 281474976715785] shutting down 2025-04-07T14:15:43.145168Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035343183, txId: 281474976715787] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < 9000 OR Key3 IS NULL ORDER BY `Value`; 2025-04-07T14:15:43.395076Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035343435, txId: 281474976715789] shutting down 2025-04-07T14:15:43.630915Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035343666, txId: 281474976715791] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Value = 20 ORDER BY `Value`; 2025-04-07T14:15:43.829144Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035343869, txId: 281474976715793] shutting down 2025-04-07T14:15:44.011973Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035344051, txId: 281474976715795] shutting down EXPECTED: [[[20u]]] RECEIVED: [[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE (Key1 <= 1000) OR (Key1 > 2000 AND Key1 < 5000) OR (Key1 >= 8000) ORDER BY `Value`; 2025-04-07T14:15:44.436372Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035344471, txId: 281474976715797] shutting down 2025-04-07T14:15:44.867502Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035344905, txId: 281474976715799] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[11u]];[[12u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[11u]];[[12u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < NULL ORDER BY `Value`; 2025-04-07T14:15:44.974681Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035344961, txId: 281474976715801] shutting down 2025-04-07T14:15:45.111695Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035345083, txId: 281474976715803] shutting down EXPECTED: [] RECEIVED: [] 2025-04-07T14:15:45.151959Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037919 not found 2025-04-07T14:15:45.152596Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037921 not found 2025-04-07T14:15:45.154130Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037920 not found |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpPg::CreateTableSerialColumns [GOOD] >> KqpPg::CreateUniqPgColumn >> KqpSqlIn::Delete [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] Test command err: 2025-04-07T14:15:35.460197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:35.460496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:35.460622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002025/r3tmp/tmpDjuSXS/pdisk_1.dat 2025-04-07T14:15:35.754866Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18154, node 1 2025-04-07T14:15:35.944991Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:35.945047Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:35.945077Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:35.945638Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:35.947710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:36.038928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:36.039050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:36.052866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24389 2025-04-07T14:15:36.664574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:39.269785Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:39.325048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:39.325137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:39.362624Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:39.365795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:39.557290Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:39.606277Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:39.606379Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:39.634061Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:39.634243Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:39.634448Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:39.634516Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:39.634574Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:39.634629Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:39.634678Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:39.634734Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:39.635123Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:39.843028Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:39.843135Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:39.849121Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:39.855598Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:39.856441Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:39.857409Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:15:39.875047Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:39.875121Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:39.875199Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:15:39.877068Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:39.877165Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:39.890872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:39.903106Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:39.903270Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:39.914937Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:39.930713Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:39.969858Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:40.213388Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:40.381378Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:41.099125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:41.099238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:41.114119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:15:41.284157Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:15:41.284403Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:15:41.284753Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:15:41.284873Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:15:41.284992Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:15:41.285120Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:15:41.285244Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:15:41.285372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:15:41.285492Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:15:41.285607Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:15:41.285708Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:15:41.285810Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:15:41.337198Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:15:41.337290Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:15:41.337490Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:15:41.337558Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:15:41.337622Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:15:41.337687Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTx ... ::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:15:41.571949Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:15:41.572173Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:15:41.572204Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:15:41.572322Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:15:41.572346Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:15:41.572447Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:15:41.572472Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:15:41.572610Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:15:41.572646Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:15:41.572735Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:15:41.572757Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:15:41.572963Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:15:41.572996Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:15:41.573067Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:15:41.573093Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:15:41.573194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:15:41.573220Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:15:41.573278Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:15:41.573306Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:15:41.573346Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:15:41.573369Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:15:41.573396Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:15:41.573422Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:15:41.573614Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:15:41.573645Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:15:41.573786Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:15:41.573820Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:15:41.573945Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:15:41.573977Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:15:41.574084Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:15:41.574111Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:15:41.574179Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:15:41.574200Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:15:42.891871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2919:3120], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:42.892058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:42.895640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037889 2025-04-07T14:15:43.642381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3064:3163], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.642555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.655305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037889 waiting actualization: 0/0.000015s 2025-04-07T14:15:45.694607Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3391:3611] 2025-04-07T14:15:45.696346Z node 2 :STATISTICS DEBUG: [72075186224037897] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION Answer: 'No analyze operation' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::Delete [GOOD] Test command err: Trying to start YDB, gRPC: 4073, MsgBus: 21277 2025-04-07T14:15:11.884836Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574627227529515:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:11.884900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001235/r3tmp/tmpaprufc/pdisk_1.dat 2025-04-07T14:15:12.195291Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4073, node 1 2025-04-07T14:15:12.260175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:12.263991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:12.293086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:12.317465Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:12.317517Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:12.317528Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:12.317651Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21277 TClient is connected to server localhost:21277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:12.780372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:12.792479Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:15:12.797599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:12.931371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:13.075592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:13.131905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:14.739004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574640112433111:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:14.739139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:14.946757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:14.978971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.017805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.083830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.107593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.173795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:15.212929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574644407400910:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:15.213003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:15.213220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574644407400915:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:15.216350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:15.225605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574644407400917:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 28228, MsgBus: 21804 2025-04-07T14:15:16.989916Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574646459104496:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:16.990006Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001235/r3tmp/tmpsxmkyU/pdisk_1.dat 2025-04-07T14:15:17.072842Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:17.090795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:17.090877Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:17.092454Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28228, node 2 2025-04-07T14:15:17.136831Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:17.136860Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:17.136871Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:17.136976Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21804 TClient is connected to server localhost:21804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:15:17.547467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:17.562525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:17.633553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:17.784330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:17.839367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:19.872572Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574659344008096:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:19.872692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:19.904991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo uns ... hemeshard: 72057594046644480 2025-04-07T14:15:35.301225Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:35.432577Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574728079205237:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:35.432685Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:35.432691Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490574728079205242:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:35.435941Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:35.445275Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490574728079205244:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:36.564722Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:36.597120Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:15:36.646264Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:15:36.712869Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490574710899333866:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:36.712947Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 27428, MsgBus: 6644 2025-04-07T14:15:40.097196Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574749325274877:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:40.097281Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001235/r3tmp/tmpBpOg1e/pdisk_1.dat 2025-04-07T14:15:40.208381Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:40.232862Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:40.232955Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:40.234407Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27428, node 5 2025-04-07T14:15:40.281725Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:40.281754Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:40.281763Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:40.281902Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6644 TClient is connected to server localhost:6644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:15:40.770035Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:40.788497Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:40.862896Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:41.028507Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:41.112418Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:43.476258Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574762210178478:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.476342Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.516587Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:43.583391Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:43.610233Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:43.635578Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:43.662716Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:43.694931Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:43.768522Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574762210178980:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.768609Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574762210178985:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.768615Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.772292Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:43.782544Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574762210178987:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:15:44.821388Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:44.853617Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:15:44.883876Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:15:45.097383Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574749325274877:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:45.097461Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpExtractPredicateLookup::ComplexRange [GOOD] >> HttpRequest::AnalyzeServerless [GOOD] >> KqpPg::DropSequence [GOOD] >> KqpPg::DropTableIfExists >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull [GOOD] >> KqpPg::LongDomainName |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] Test command err: 2025-04-07T14:15:35.375415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:35.375822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:35.376001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002023/r3tmp/tmpOQpXaR/pdisk_1.dat 2025-04-07T14:15:35.671753Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14488, node 1 2025-04-07T14:15:35.873645Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:35.873692Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:35.873713Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:35.874142Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:35.876088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:35.972387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:35.972531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:35.986888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63889 2025-04-07T14:15:36.605785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:39.453029Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:39.482698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:39.482773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:39.521901Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:39.525796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:39.690798Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:39.737779Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:39.737883Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:39.764885Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:39.765086Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:39.765275Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:39.765332Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:39.765403Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:39.765461Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:39.765519Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:39.765567Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:39.765915Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:39.968203Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:39.968279Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:39.973089Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:39.979865Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:39.980649Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:39.981668Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2025-04-07T14:15:39.996401Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:39.996457Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:39.996530Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-07T14:15:39.998281Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:39.998372Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:40.011517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:40.028199Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:40.028351Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:40.040740Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:40.056934Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:40.096753Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:40.322454Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:40.476580Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:41.088198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:15:41.611049Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:41.749840Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-07T14:15:41.749907Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:15:41.749991Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2487:2905], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:15:41.752168Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2495:2909] 2025-04-07T14:15:41.753016Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2495:2909], schemeshard id = 72075186224037899 2025-04-07T14:15:42.702711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2615:3186], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:42.702870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:42.719976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-07T14:15:42.953834Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3042];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:15:42.954072Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3042];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:15:42.954362Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3042];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:15:42.954485Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3042];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:15:42.954593Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3042];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:15:42.954702Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3042];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:15:42.954809Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3042];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:15:42.954912Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3042];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:15:42.955027Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3042];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:15:42.955139Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3042];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:15:42.955254Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3042];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:15:42.955366Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3042];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:15:42.982982Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[2:2769:3045];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04 ... shed;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:15:43.314855Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:15:43.315018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:15:43.315057Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:15:43.315195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:15:43.315236Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:15:43.315400Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:15:43.315438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:15:43.315541Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:15:43.315582Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:15:43.318038Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:15:43.318096Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:15:43.318179Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:15:43.318222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:15:43.318380Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:15:43.318417Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:15:43.318499Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:15:43.318534Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:15:43.318592Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:15:43.318627Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:15:43.318668Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:15:43.318699Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:15:43.318998Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:15:43.319095Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:15:43.319254Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:15:43.319302Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:15:43.319439Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:15:43.319483Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:15:43.319654Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:15:43.319694Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:15:43.319798Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:15:43.319837Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:15:44.891128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3406:3290], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:44.900583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:44.903031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715662:0, at schemeshard: 72075186224037899 2025-04-07T14:15:46.070218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3562:3338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:46.070691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:46.073758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 waiting actualization: 0/0.000019s 2025-04-07T14:15:48.568165Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3892:3825] 2025-04-07T14:15:48.579451Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:3888:3423] , Record { OperationId: "\000\000\000\000\031\307\004W\250f\272w}0\210\374" Tables { PathId { OwnerId: 72057594046644480 LocalId: 2 } } } 2025-04-07T14:15:48.579515Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Create new force traversal operation, OperationId=Wfw}0 2025-04-07T14:15:48.579552Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Create new force traversal table, OperationId=Wfw}0 , PathId [OwnerId: 72057594046644480, LocalPathId: 2] Answer: 'Analyze sent. OperationId: 00000006e70hbtgsntexyk127w' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; >> ColumnShardTiers::DSConfigsStub ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpExtractPredicateLookup::ComplexRange [GOOD] Test command err: Trying to start YDB, gRPC: 19383, MsgBus: 23126 2025-04-07T14:14:19.685234Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574401681346388:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:19.685361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a4/r3tmp/tmpnw4kg1/pdisk_1.dat 2025-04-07T14:14:19.965276Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19383, node 1 2025-04-07T14:14:20.043800Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:20.043840Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:20.043857Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:20.044001Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:20.056217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:20.056348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:20.057936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23126 TClient is connected to server localhost:23126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:20.457482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.481184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.589710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.734647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:20.803910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:22.290815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574414566249835:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:22.290907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:22.498482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.522601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.543993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.566723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.591116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.615495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.689849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574414566250332:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:22.689920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:22.689995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574414566250337:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:22.693598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:22.702505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574414566250339:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:14:24.684749Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574401681346388:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:24.684813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22786, MsgBus: 2321 2025-04-07T14:14:25.604558Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574428145300975:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:25.604659Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a4/r3tmp/tmpNjOx7B/pdisk_1.dat 2025-04-07T14:14:25.718547Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:14:25.723353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:25.723457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:25.725028Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22786, node 2 2025-04-07T14:14:25.769223Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:25.769249Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:25.769258Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:25.769347Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2321 TClient is connected to server localhost:2321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:26.139807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:26.151163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:26.239809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:26.403910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:14:26.478323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:14:28.490278Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574441030204580:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:28.490367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Roo ... on type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:15:36.996998Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:15:37.034607Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:15:37.079826Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:15:37.115817Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T14:15:37.151606Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T14:15:37.186700Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-07T14:15:37.219525Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-07T14:15:37.254137Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31666, MsgBus: 16207 2025-04-07T14:15:40.274688Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7490574751349373831:2224];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a4/r3tmp/tmprVobYz/pdisk_1.dat 2025-04-07T14:15:40.352237Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:15:40.374918Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31666, node 10 2025-04-07T14:15:40.403533Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:40.403681Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:40.405581Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:40.437928Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:40.437952Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:40.437960Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:40.438085Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16207 TClient is connected to server localhost:16207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:41.136888Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:41.145488Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:41.160555Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:41.239693Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:41.451506Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:41.546235Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:44.745480Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7490574768529244542:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:44.745590Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:44.797319Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:44.838706Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:44.876580Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:15:44.910944Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:15:44.950443Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:45.027204Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:15:45.088877Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7490574772824212336:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:45.088973Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:45.089048Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7490574772824212341:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:45.092976Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:15:45.103819Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7490574772824212343:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:15:45.274456Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7490574751349373831:2224];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:45.274554Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:15:46.534233Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:46.572788Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:15:46.608853Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:15:46.646662Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:15:46.720526Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-07T14:15:46.760339Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T14:15:46.798425Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-07T14:15:46.834251Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-07T14:15:46.875160Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-07T14:15:46.915807Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 >> KqpPg::InsertFromSelect_Simple [GOOD] >> KqpPg::InsertFromSelect_NoReorder >> TStorageBalanceTest::TestScenario3 [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: 2025-04-07T14:12:27.422355Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:27.425548Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.425760Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-07T14:12:27.426245Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-07T14:12:27.427265Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-07T14:12:27.427313Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:27.428180Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2073] ControllerId# 72057594037932033 2025-04-07T14:12:27.428227Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:27.428354Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:27.428591Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:27.429375Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:27.429426Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:27.429549Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:27.429712Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:19:2063] 2025-04-07T14:12:27.429740Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:19:2063] 2025-04-07T14:12:27.439777Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:27.439824Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:27.441667Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.441832Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.441972Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.442096Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.442229Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.442364Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.442504Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.442535Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:27.442599Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:28:2073] 2025-04-07T14:12:27.442623Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:28:2073] 2025-04-07T14:12:27.443317Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.443403Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:19:2063] 2025-04-07T14:12:27.459562Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:27.459626Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.459669Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.459746Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:27.459773Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.459799Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-07T14:12:27.462913Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-07T14:12:27.464698Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-07T14:12:27.464934Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.465120Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:27.465461Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.465736Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.465790Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-07T14:12:27.465859Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-07T14:12:27.465945Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-07T14:12:27.465967Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-07T14:12:27.466087Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:49:2089] 2025-04-07T14:12:27.466112Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:49:2089] 2025-04-07T14:12:27.466468Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:49:2089] 2025-04-07T14:12:27.466591Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:27.466701Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.466738Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:27.469243Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:27.469560Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:27.469677Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-07T14:12:27.469717Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-07T14:12:27.469785Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:19:2063] 2025-04-07T14:12:27.470671Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-07T14:12:27.470748Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-07T14:12:27.470772Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-07T14:12:27.478736Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:19:2063] 2025-04-07T14:12:27.478807Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:19:2063] 2025-04-07T14:12:27.479076Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:27.479431Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:27.481172Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-07T14:12:27.481218Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.481326Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-07T14:12:27.481778Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-04-07T14:12:27.481824Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 2146435075 Sender# [1:45:2088] SessionId# [0:0:0] Cookie# 0 2025-04-07T14:12:27.481878Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.021925s 2025-04-07T14:12:27.482123Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:27.482229Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:27.482300Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-07T14:12:27.482331Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-07T14:12:27.482427Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:28:2073] 2025-04-07T14:12:27.482455Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:28:2073] 2025-04-07T14:12:27.482494Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-04-07T14:12:27.482540Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-04-07T14:12:27.482685Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: ... f5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:487:0:0:246:1] Marker# BPG32 2025-04-07T14:15:49.141044Z node 25 :BS_PROXY DEBUG: Send to queueActorId# [25:402:2087] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:487:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T14:15:49.144238Z node 25 :BS_PROXY_PUT DEBUG: [c8b7fa19229240f5] received {EvVPutResult Status# OK ID# [72057594037927937:2:487:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 502 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 503 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-07T14:15:49.144339Z node 25 :BS_PROXY_PUT DEBUG: [c8b7fa19229240f5] Result# TEvPutResult {Id# [72057594037927937:2:487:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-07T14:15:49.144387Z node 25 :BS_PROXY_PUT INFO: [c8b7fa19229240f5] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:487:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T14:15:49.145201Z node 25 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:487:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-07T14:15:49.145796Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} commited cookie 1 for step 487 2025-04-07T14:15:49.147354Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} Tx{1487, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-04-07T14:15:49.147412Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} Tx{1487, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:15:49.147610Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} Tx{1487, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{986, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-04-07T14:15:49.147659Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} Tx{1487, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:15:49.147766Z node 25 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [25:1271:2637] 2025-04-07T14:15:49.147799Z node 25 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [25:1271:2637] 2025-04-07T14:15:49.147850Z node 25 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [25:1214:2599] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.048) *****----------------------------------------------------------------------------------------------- (0.048) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.05) *******--------------------------------------------------------------------------------------------- (0.068) ******---------------------------------------------------------------------------------------------- (0.064) *****----------------------------------------------------------------------------------------------- (0.054) *******--------------------------------------------------------------------------------------------- (0.066) *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.048) 2025-04-07T14:15:49.250336Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1488, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-07T14:15:49.250401Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1488, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:15:49.250503Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004363872}: tablet 72075186224037957 wasn't changed 2025-04-07T14:15:49.250531Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004363872}: tablet 72075186224037957 skipped channel 0 2025-04-07T14:15:49.250590Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004363872}: tablet 72075186224037957 skipped channel 1 2025-04-07T14:15:49.250613Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004363872}: tablet 72075186224037957 skipped channel 2 2025-04-07T14:15:49.250658Z node 25 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923004363872}(72075186224037957)::Execute - TryToBoot was not successfull 2025-04-07T14:15:49.250707Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1488, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{987, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-04-07T14:15:49.250742Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1488, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:15:49.264451Z node 25 :BS_PROXY_PUT INFO: [e70d84823a0fd088] bootstrap ActorId# [25:11664:6169] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:488:0:0:246:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T14:15:49.264579Z node 25 :BS_PROXY_PUT DEBUG: [e70d84823a0fd088] Id# [72057594037927937:2:488:0:0:246:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T14:15:49.264621Z node 25 :BS_PROXY_PUT DEBUG: [e70d84823a0fd088] restore Id# [72057594037927937:2:488:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T14:15:49.264681Z node 25 :BS_PROXY_PUT DEBUG: [e70d84823a0fd088] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:488:0:0:246:1] Marker# BPG33 2025-04-07T14:15:49.264714Z node 25 :BS_PROXY_PUT DEBUG: [e70d84823a0fd088] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:488:0:0:246:1] Marker# BPG32 2025-04-07T14:15:49.264800Z node 25 :BS_PROXY DEBUG: Send to queueActorId# [25:402:2087] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:488:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T14:15:49.266953Z node 25 :BS_PROXY_PUT DEBUG: [e70d84823a0fd088] received {EvVPutResult Status# OK ID# [72057594037927937:2:488:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 503 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 504 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-07T14:15:49.267047Z node 25 :BS_PROXY_PUT DEBUG: [e70d84823a0fd088] Result# TEvPutResult {Id# [72057594037927937:2:488:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-07T14:15:49.267083Z node 25 :BS_PROXY_PUT INFO: [e70d84823a0fd088] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:488:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T14:15:49.267520Z node 25 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:488:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-07T14:15:49.267922Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} commited cookie 1 for step 488 2025-04-07T14:15:49.269136Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1489, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-04-07T14:15:49.269176Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1489, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:15:49.269321Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1489, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{988, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-04-07T14:15:49.269355Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1489, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:15:49.269437Z node 25 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [25:1271:2637] 2025-04-07T14:15:49.269466Z node 25 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [25:1271:2637] 2025-04-07T14:15:49.269501Z node 25 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [25:1214:2599] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.048) *****----------------------------------------------------------------------------------------------- (0.048) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.05) *******--------------------------------------------------------------------------------------------- (0.068) ******---------------------------------------------------------------------------------------------- (0.064) *****----------------------------------------------------------------------------------------------- (0.054) *******--------------------------------------------------------------------------------------------- (0.066) *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.048) 2025-04-07T14:15:49.370973Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1490, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-07T14:15:49.371043Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1490, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:15:49.371136Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004117920}: tablet 72075186224037918 wasn't changed 2025-04-07T14:15:49.371163Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004117920}: tablet 72075186224037918 skipped channel 0 2025-04-07T14:15:49.371227Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004117920}: tablet 72075186224037918 skipped channel 1 2025-04-07T14:15:49.371245Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004117920}: tablet 72075186224037918 skipped channel 2 2025-04-07T14:15:49.371289Z node 25 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923004117920}(72075186224037918)::Execute - TryToBoot was not successfull 2025-04-07T14:15:49.371337Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1490, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{989, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-04-07T14:15:49.371369Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1490, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpPg::AlterColumnSetDefaultFromSequence [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpPg::CreateUniqPgColumn [GOOD] >> KqpPg::CreateUniqComplexPgColumn |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpPg::DropTableIfExists [GOOD] >> KqpPg::DropTableIfExists_GenericQuery >> KqpPg::LongDomainName [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpPg::InsertFromSelect_NoReorder [GOOD] >> KqpPg::InsertFromSelect_Serial ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: Trying to start YDB, gRPC: 3909, MsgBus: 8360 2025-04-07T14:15:26.793001Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574689872823961:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:26.793037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00215a/r3tmp/tmp87jahL/pdisk_1.dat 2025-04-07T14:15:27.114642Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3909, node 1 2025-04-07T14:15:27.188462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:27.188521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:27.192290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:27.284619Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:27.284645Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:27.284655Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:27.284783Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8360 TClient is connected to server localhost:8360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:27.861386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:29.516599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574702757726486:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.516715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.553695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.689034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574702757726590:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.689120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.700525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.734975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574702757726666:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.735037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.735056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574702757726671:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.738680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:15:29.746862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574702757726673:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } Trying to start YDB, gRPC: 21813, MsgBus: 18603 2025-04-07T14:15:31.085978Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574712628533735:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:31.086030Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00215a/r3tmp/tmpqlA6dI/pdisk_1.dat 2025-04-07T14:15:31.169483Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:31.192229Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:31.192306Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:31.193617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21813, node 2 2025-04-07T14:15:31.238234Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:31.238252Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:31.238257Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:31.238359Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18603 TClient is connected to server localhost:18603 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:31.637140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:31.642531Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:33.718706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574721218468947:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.718785Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.726845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.770617Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574721218469049:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.770715Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.784950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.822897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574721218469125:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.822951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574721218469130:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.822958Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.826072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:15:33.856643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574721218469132:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 2 ... ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:45.713898Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574774014913109:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:45.713976Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:45.714028Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7490574774014913122:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:45.717935Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:15:45.727025Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574774014913138:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:15:45.849728Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24833, MsgBus: 6772 2025-04-07T14:15:46.959354Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574774562016798:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:46.959417Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00215a/r3tmp/tmpiDchts/pdisk_1.dat 2025-04-07T14:15:47.069413Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:47.096861Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:47.096952Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:47.098359Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24833, node 6 2025-04-07T14:15:47.137938Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:47.137957Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:47.137963Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:47.138055Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6772 TClient is connected to server localhost:6772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:47.553158Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:49.778058Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574787446919314:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.778113Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574787446919292:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.778450Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.781331Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:15:49.788348Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574787446919321:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:15:49.882077Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490574787446919389:2309], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Default expr b is nullable or optional, but column has not null constraint. 2025-04-07T14:15:49.882329Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=YzE0NDRjNjEtNDBiMjA2NzAtNjZhOWIyZDAtYTE2OWI1OGY=, ActorId: [6:7490574787446919290:2300], ActorState: ExecuteState, TraceId: 01jr89pz3kb3k7s1p70fsr0v44, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Default expr b is nullable or optional, but column has not null constraint. Trying to start YDB, gRPC: 17388, MsgBus: 19691 2025-04-07T14:15:50.477708Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7490574794717251999:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:50.477782Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00215a/r3tmp/tmpa7GnH3/pdisk_1.dat 2025-04-07T14:15:50.556740Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:50.581411Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:50.581512Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:50.583393Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17388, node 7 2025-04-07T14:15:50.625279Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:50.625304Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:50.625309Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:50.625392Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19691 TClient is connected to server localhost:19691 WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'... TClient::Ls request: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 Processi... (TRUNCATED) WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' success. 2025-04-07T14:15:51.071529Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:53.425286Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574807602154522:2304], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:53.425286Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574807602154503:2301], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:53.425377Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:53.429484Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:15:53.438123Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490574807602154532:2305], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:15:53.514994Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull [GOOD] Test command err: Trying to start YDB, gRPC: 28106, MsgBus: 15853 2025-04-07T14:15:26.813806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574689350021798:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:26.813867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00216e/r3tmp/tmpcxAkOt/pdisk_1.dat 2025-04-07T14:15:27.120973Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:27.168115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:27.168215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28106, node 1 2025-04-07T14:15:27.169483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:27.285052Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:27.285073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:27.285086Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:27.285157Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15853 TClient is connected to server localhost:15853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:27.915276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:27.929488Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:15:29.616310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574702234924112:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.616399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574702234924090:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.616691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.621291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:15:29.631338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574702234924119:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:15:29.747181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28932, MsgBus: 15142 2025-04-07T14:15:30.921720Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574709633613275:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:30.921789Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00216e/r3tmp/tmpBLzK23/pdisk_1.dat 2025-04-07T14:15:31.001585Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:31.022255Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:31.022333Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:31.026504Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28932, node 2 2025-04-07T14:15:31.073449Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:31.073472Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:31.073479Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:31.073567Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15142 TClient is connected to server localhost:15142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:15:31.514109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.576597Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574722518515782:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.576646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574722518515766:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.576990Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.581105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:15:33.587931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574722518515795:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:15:33.682106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62660, MsgBus: 18801 2025-04-07T14:15:34.799510Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490574725178059337:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:34.799554Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00216e/r3tmp/tmp2ZcTsM/pdisk_1.dat 2025-04-07T14:15:34.889903Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:34.904611Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 62660, node 3 2025-04-07T14:15:34.905665Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:34.908186Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:34.943280Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:34.943297Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:34.943302Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:34.943393Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18801 TClient is connected to server localhost:18801 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 ... pool default not found or you don't have access permissions } 2025-04-07T14:15:45.481911Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:45.484802Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:15:45.493373Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7490574772623001016:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:15:45.601898Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1919, MsgBus: 22513 2025-04-07T14:15:46.859880Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574775438646262:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:46.859959Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00216e/r3tmp/tmpxprHmY/pdisk_1.dat 2025-04-07T14:15:46.972769Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:47.004575Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:47.004695Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:47.006033Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1919, node 6 2025-04-07T14:15:47.050015Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:47.050038Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:47.050046Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:47.050154Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22513 TClient is connected to server localhost:22513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:47.508239Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:49.551130Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574788323548788:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.551467Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490574788323548762:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.551538Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.554918Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:15:49.562033Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490574788323548791:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:15:49.650811Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490574788323548859:2309], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: invalid input syntax for type integer: "text" 2025-04-07T14:15:49.651100Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NDMyNjk2LTYxOTkwODdiLTRlMWJlMzNmLWI5NjU1MTdm, ActorId: [6:7490574788323548760:2300], ActorState: ExecuteState, TraceId: 01jr89pz28dzqz2zcta94aqg61, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: invalid input syntax for type integer: "text" Trying to start YDB, gRPC: 24235, MsgBus: 14182 2025-04-07T14:15:50.351717Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7490574791797576700:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:50.351788Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00216e/r3tmp/tmpP1d5Wy/pdisk_1.dat 2025-04-07T14:15:50.432375Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:50.458631Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:50.458725Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:50.460007Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24235, node 7 2025-04-07T14:15:50.501252Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:50.501270Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:50.501277Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:50.501372Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14182 TClient is connected to server localhost:14182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:51.024736Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:53.345756Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574804682479209:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:53.345800Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574804682479196:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:53.345884Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:53.349314Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:15:53.359025Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490574804682479225:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:15:53.489618Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:15:53.587718Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:15:53.637573Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7490574804682479511:2332], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-04-07T14:15:53.637766Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MjRjMGEyZGUtOWE1ZjlkMWItZDJhZTI4NDktN2EzZDE0NGE=, ActorId: [7:7490574804682479509:2331], ActorState: ExecuteState, TraceId: 01jr89q50pfc5mnrec2qc66p8p, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestGetStorageInfo >> ColumnShardTiers::TieringUsage |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> ColumnShardTiers::TTLUsage >> ColumnShardTiers::DSConfigsWithQueryServiceDdl >> ColumnShardTiers::DSConfigs |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [GOOD] >> KqpPg::CheckPgAutoParams >> KqpPg::CreateUniqComplexPgColumn [GOOD] >> KqpPg::CreateTempTable >> KqpPg::InsertFromSelect_Serial [GOOD] >> KqpPg::EquiJoin >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::DropTableIfExists_GenericQuery [GOOD] Test command err: Trying to start YDB, gRPC: 16641, MsgBus: 20015 2025-04-07T14:15:27.535658Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574692723242498:2178];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:27.536432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00208b/r3tmp/tmpcFANUC/pdisk_1.dat 2025-04-07T14:15:27.877072Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16641, node 1 2025-04-07T14:15:27.925883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:27.925994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:27.927795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:27.941810Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:27.941835Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:27.941860Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:27.941950Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20015 TClient is connected to server localhost:20015 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:28.328682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:29.931193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574701313177595:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.931200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574701313177587:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.931304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.934820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:15:29.943082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574701313177601:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } Trying to start YDB, gRPC: 1750, MsgBus: 23417 2025-04-07T14:15:30.577157Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574706329573534:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:30.577226Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00208b/r3tmp/tmpp0EMp2/pdisk_1.dat 2025-04-07T14:15:30.652956Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:30.678852Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:30.678911Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 1750, node 2 2025-04-07T14:15:30.680198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:30.730478Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:30.730516Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:30.730524Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:30.730607Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23417 TClient is connected to server localhost:23417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:31.090191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:33.276005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574719214476046:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.276031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574719214476038:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.276095Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.278955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:15:33.286082Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574719214476052:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } Trying to start YDB, gRPC: 23467, MsgBus: 25115 2025-04-07T14:15:34.051495Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490574723045134221:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:34.051574Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00208b/r3tmp/tmpzcvdZN/pdisk_1.dat 2025-04-07T14:15:34.126148Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:34.153579Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:34.153650Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 23467, node 3 2025-04-07T14:15:34.155097Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:34.193166Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:34.193195Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:34.193203Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:34.193286Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25115 TClient is connected to server localhost:25115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:15:34.523321Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:34.544557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:34.644116Z node 3 :FLAT_TX_SCHEMESHARD WARN: O ... ts.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7490574794202158114:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:50.120753Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00208b/r3tmp/tmp3cA4dB/pdisk_1.dat 2025-04-07T14:15:50.200045Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13994, node 2025-04-07T14:15:50.223419Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 7 2025-04-07T14:15:50.223507Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:50.224907Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:50.260446Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:50.260468Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:50.260477Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:50.260573Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18473 TClient is connected to server localhost:18473 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:50.747813Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:53.480623Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574807087060624:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:53.480823Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:53.505858Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574807087060650:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:53.505985Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:53.514157Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:15:53.589658Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574807087060740:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:53.589733Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:53.589774Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574807087060745:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:53.593175Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:15:53.600912Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490574807087060747:2325], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:15:53.805577Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-04-07T14:15:53.824232Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7490574807087060897:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:31: Error: At function: KiReadTable!
:2:31: Error: Cannot find table 'db.[/Root/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:15:53.825961Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=N2ExOGVhMS03MjJhYWFlNS03YzlmZDdkOC05OWU3ZjA1NA==, ActorId: [7:7490574807087060894:2346], ActorState: ExecuteState, TraceId: 01jr89q56h85cc9ccmbs4nq6y9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 11882, MsgBus: 10071 2025-04-07T14:15:54.400322Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7490574808836968080:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:54.400497Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00208b/r3tmp/tmpCynCW6/pdisk_1.dat 2025-04-07T14:15:54.519797Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:54.529969Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:54.530068Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:54.531861Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11882, node 8 2025-04-07T14:15:54.568225Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:54.568253Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:54.568263Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:54.568389Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10071 TClient is connected to server localhost:10071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:55.049752Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:57.408429Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490574821721870600:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:57.408481Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490574821721870579:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:57.408822Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:57.412453Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:15:57.422657Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7490574821721870609:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:15:57.595117Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:15:57.746637Z node 8 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 8, TabletId: 72075186224037888 not found 2025-04-07T14:15:57.760774Z node 8 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [8:7490574821721870866:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:31: Error: At function: KiReadTable!
:2:31: Error: Cannot find table 'db.[/Root/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:15:57.761077Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=ZjkxZjJhODctNzJkYTZhYjYtZmI1YjdhZDgtYTMyNTM4ODQ=, ActorId: [8:7490574821721870863:2340], ActorState: ExecuteState, TraceId: 01jr89q91k77z8xc3a86yszt1h, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> KqpPg::ValuesInsert [GOOD] >> PgCatalog::PgType |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpPg::TypeCoercionInsert [GOOD] >> KqpPg::TableSelect >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest::LdapServerIsUnavailable >> TLdapUtilsSearchFilterCreatorTest::GetDefaultFilter [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithoutLoginPlaceholders [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithOneLoginPlaceholder [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithSearchAttribute [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnames [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV4List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV6List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesLdapsScheme [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> KqpPg::CreateTempTable [GOOD] >> KqpPg::CreateTempTableSerial ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] Test command err: 2025-04-07T14:12:27.197782Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:27.200708Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.200897Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:27.201652Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [2:64:2071] ControllerId# 72057594037932033 2025-04-07T14:12:27.201684Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:27.201787Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:27.201995Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:27.202872Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:27.202910Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:27.204576Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:63:2070] Create Queue# [2:70:2075] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.204777Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:63:2070] Create Queue# [2:71:2076] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.204894Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:63:2070] Create Queue# [2:72:2077] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.205042Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:63:2070] Create Queue# [2:73:2078] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.205163Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:63:2070] Create Queue# [2:74:2079] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.205286Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:63:2070] Create Queue# [2:75:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.205410Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:63:2070] Create Queue# [2:76:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.205435Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:27.205495Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:64:2071] 2025-04-07T14:12:27.205531Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:64:2071] 2025-04-07T14:12:27.205575Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:27.205618Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:27.206076Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:27.206202Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:27.208376Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.208469Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:27.209195Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [3:83:2072] ControllerId# 72057594037932033 2025-04-07T14:12:27.209227Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:27.209288Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:27.209443Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:27.209883Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:27.209909Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:27.211190Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:82:2071] Create Queue# [3:89:2076] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.211311Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:82:2071] Create Queue# [3:90:2077] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.211496Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:82:2071] Create Queue# [3:91:2078] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.211623Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:82:2071] Create Queue# [3:92:2079] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.211780Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:82:2071] Create Queue# [3:93:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.211917Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:82:2071] Create Queue# [3:94:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.212039Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:82:2071] Create Queue# [3:95:2082] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.212062Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:27.212109Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:83:2072] 2025-04-07T14:12:27.212135Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:83:2072] 2025-04-07T14:12:27.212166Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:27.212198Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:27.212534Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:27.212763Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:27.214845Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.214979Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-07T14:12:27.215585Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-07T14:12:27.216597Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-07T14:12:27.216642Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:27.217358Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:105:2075] ControllerId# 72057594037932033 2025-04-07T14:12:27.217390Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:27.217457Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:27.217626Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:27.272808Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:27.272871Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:27.274342Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:104:2074] Create Queue# [1:113:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.274493Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:104:2074] Create Queue# [1:114:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.274627Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:104:2074] Create Queue# [1:115:2082] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.274734Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:104:2074] Create Queue# [1:116:2083] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.274848Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:104:2074] Create Queue# [1:117:2084] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.274933Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:104:2074] Create Queue# [1:118:2085] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.275019Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:104:2074] Create Queue# [1:119:2086] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.275043Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:27.275097Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:105:2075] 2025-04-07T14:12:27.275120Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:105:2075] 2025-04-07T14:12:27.275147Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:27.275179Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:27.275857Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:27.276068Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.286944Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:105:2075] 2025-04-07T14:12:27.287013Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.287073Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.288529Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.296392Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:64:2071] 2025-04-07T14:12:27.296446Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.296474Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.296654Z node 3 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.296829Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:83:2072] 2025-04-07T14:12:27.296882Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.296919Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.297117Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtain ... 8eda0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:477:1] Marker# BPG32 2025-04-07T14:16:01.118507Z node 44 :BS_PROXY DEBUG: Send to queueActorId# [44:33:2077] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:477:1] FDS# 477 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T14:16:01.119258Z node 44 :BS_PROXY_PUT DEBUG: [9521640286a8eda0] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:477:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 18 } Cost# 83755 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 19 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-07T14:16:01.119319Z node 44 :BS_PROXY_PUT DEBUG: [9521640286a8eda0] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:477:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-07T14:16:01.119352Z node 44 :BS_PROXY_PUT INFO: [9521640286a8eda0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:477:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T14:16:01.119453Z node 44 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:477:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-07T14:16:01.119517Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-04-07T14:16:01.119789Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [44:308:2288] 2025-04-07T14:16:01.119822Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [44:308:2288] 2025-04-07T14:16:01.119904Z node 44 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:16:01.119979Z node 44 :TABLET_RESOLVER DEBUG: SelectForward node 44 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [44:263:2256] 2025-04-07T14:16:01.120036Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [44:308:2288] 2025-04-07T14:16:01.120077Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [44:308:2288] 2025-04-07T14:16:01.120118Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [44:308:2288] 2025-04-07T14:16:01.120166Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [44:308:2288] 2025-04-07T14:16:01.120247Z node 44 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [44:308:2288] 2025-04-07T14:16:01.120338Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [44:308:2288] 2025-04-07T14:16:01.120381Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [44:308:2288] 2025-04-07T14:16:01.120425Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [44:308:2288] 2025-04-07T14:16:01.120470Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [44:308:2288] 2025-04-07T14:16:01.120506Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [44:308:2288] 2025-04-07T14:16:01.120551Z node 44 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [44:307:2287] EventType# 268697621 2025-04-07T14:16:01.120893Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [44:311:2291] 2025-04-07T14:16:01.120933Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [44:311:2291] 2025-04-07T14:16:01.120992Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [44:311:2291] 2025-04-07T14:16:01.205889Z node 44 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:16:01.206002Z node 44 :TABLET_RESOLVER DEBUG: SelectForward node 44 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [44:263:2256] 2025-04-07T14:16:01.206114Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [44:311:2291] 2025-04-07T14:16:01.206172Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [44:311:2291] 2025-04-07T14:16:01.206243Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [44:311:2291] 2025-04-07T14:16:01.206360Z node 44 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [44:311:2291] 2025-04-07T14:16:01.206541Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [44:311:2291] 2025-04-07T14:16:01.206592Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [44:311:2291] 2025-04-07T14:16:01.206630Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [44:311:2291] 2025-04-07T14:16:01.206682Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [44:311:2291] 2025-04-07T14:16:01.206725Z node 44 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [44:311:2291] 2025-04-07T14:16:01.206801Z node 44 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [44:310:2290] EventType# 268697615 2025-04-07T14:16:01.206930Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} queued, type NKikimr::NHive::TTxDeleteTablet 2025-04-07T14:16:01.207005Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:16:01.207191Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} hope 1 -> done Change{5, redo 102b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-07T14:16:01.207261Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:16:01.218098Z node 44 :BS_PROXY_PUT INFO: [758a346c7e0f5aa1] bootstrap ActorId# [44:314:2294] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:5:0:0:104:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T14:16:01.218224Z node 44 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] Id# [72057594037927937:2:5:0:0:104:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T14:16:01.218283Z node 44 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] restore Id# [72057594037927937:2:5:0:0:104:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T14:16:01.218347Z node 44 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:5:0:0:104:1] Marker# BPG33 2025-04-07T14:16:01.218408Z node 44 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:5:0:0:104:1] Marker# BPG32 2025-04-07T14:16:01.218540Z node 44 :BS_PROXY DEBUG: Send to queueActorId# [44:33:2077] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:5:0:0:104:1] FDS# 104 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T14:16:01.219624Z node 44 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] received {EvVPutResult Status# OK ID# [72057594037927937:2:5:0:0:104:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 80818 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-07T14:16:01.219733Z node 44 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] Result# TEvPutResult {Id# [72057594037927937:2:5:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-07T14:16:01.219799Z node 44 :BS_PROXY_PUT INFO: [758a346c7e0f5aa1] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:5:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T14:16:01.219962Z node 44 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:5:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-07T14:16:01.220061Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} commited cookie 1 for step 5 2025-04-07T14:16:01.220297Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} queued, type NKikimr::NHive::TTxDeleteTabletResult 2025-04-07T14:16:01.220385Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:16:01.220707Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} hope 1 -> done Change{6, redo 106b alter 0b annex 0, ~{ 16, 1 } -{ }, 0 gb} 2025-04-07T14:16:01.220798Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:16:01.231825Z node 44 :BS_PROXY_PUT INFO: [ef85ab71b8475fcf] bootstrap ActorId# [44:316:2296] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:6:0:0:104:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T14:16:01.231956Z node 44 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] Id# [72057594037927937:2:6:0:0:104:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T14:16:01.232022Z node 44 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] restore Id# [72057594037927937:2:6:0:0:104:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T14:16:01.232084Z node 44 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:6:0:0:104:1] Marker# BPG33 2025-04-07T14:16:01.232131Z node 44 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:6:0:0:104:1] Marker# BPG32 2025-04-07T14:16:01.232263Z node 44 :BS_PROXY DEBUG: Send to queueActorId# [44:33:2077] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:6:0:0:104:1] FDS# 104 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T14:16:01.233122Z node 44 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] received {EvVPutResult Status# OK ID# [72057594037927937:2:6:0:0:104:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 20 } Cost# 80818 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 21 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-07T14:16:01.233208Z node 44 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] Result# TEvPutResult {Id# [72057594037927937:2:6:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-07T14:16:01.233260Z node 44 :BS_PROXY_PUT INFO: [ef85ab71b8475fcf] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:6:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T14:16:01.233423Z node 44 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:6:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-07T14:16:01.233524Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} commited cookie 1 for step 6 >> PgCatalog::PgType [GOOD] >> PgCatalog::InformationSchema >> KqpPg::EquiJoin [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder >> KqpPg::TableInsert [GOOD] >> KqpPg::TableArrayInsert >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest::LdapServerIsUnavailable [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood >> PgCatalog::InformationSchema [GOOD] >> PgCatalog::CheckSetConfig >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> KqpPg::CreateTempTableSerial [GOOD] >> KqpPg::DeleteWithQueryService >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad >> LdapAuthProviderTest::LdapRequestWithEmptyHost [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad >> KqpPg::InsertNoTargetColumns_ColumnOrder [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood >> KqpPg::CheckPgAutoParams [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::CheckPgAutoParams [GOOD] Test command err: Trying to start YDB, gRPC: 21869, MsgBus: 5502 2025-04-07T14:15:26.826363Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574688838565107:2122];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:26.827689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00214f/r3tmp/tmp8Hgm6y/pdisk_1.dat 2025-04-07T14:15:27.148276Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21869, node 1 2025-04-07T14:15:27.216049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:27.216622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:27.218152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:27.286781Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:27.286800Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:27.286808Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:27.286887Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5502 TClient is connected to server localhost:5502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:27.929574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:27.938869Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:15:29.386658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574701723467540:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.386780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.549372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.679734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574701723467674:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.679827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.680037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574701723467679:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.682890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:29.691927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574701723467681:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:15:30.107957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715665:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19497, MsgBus: 19036 2025-04-07T14:15:31.946832Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574710924638186:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:31.952274Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00214f/r3tmp/tmp2isZKX/pdisk_1.dat 2025-04-07T14:15:32.049049Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:32.060639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:32.060713Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:32.061750Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19497, node 2 2025-04-07T14:15:32.099815Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:32.099835Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:32.099841Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:32.099969Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19036 TClient is connected to server localhost:19036 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:32.538047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:34.469642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574723809540704:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:34.469701Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574723809540696:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:34.469987Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:34.473652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:15:34.482124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574723809540710:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:15:34.590160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:15:34.625227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T14:15:34.694031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-07T14:15:34.722553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710762:2, at schemeshard: 72057594046644480 2025-04-07T14:15:34.761969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-04-07T14:15:34.805353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:15:34.869550Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDk3NDIzYWQtN2YzMGUzZC1hMTM3ZTZmNi02MTRhNzhhNA==, ActorId: [2:7490574723809540668:2299], ActorState: ExecuteState, TraceId: 01jr89pjm089ajc18r3wkrvqjs, Create QueryResponse for error on request, msg: 2025-04-07T14:15:34.905644Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDk3NDIzYWQtN2YzMGUzZC1hMTM3ZTZmNi02MTRhNzhhNA==, ActorId: [2:7490574723809540668:2299], ActorState: ExecuteState, TraceId: 01jr89pjpy3bq0yb1gbt6eta2s, Create QueryResponse for error on request, msg: 2025-04-07T14:15:34.927218Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id= ... 06], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:02.078690Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28736, MsgBus: 24947 2025-04-07T14:16:03.108421Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7490574850282276480:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:03.108501Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00214f/r3tmp/tmpkRmj1n/pdisk_1.dat 2025-04-07T14:16:03.235280Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28736, node 9 2025-04-07T14:16:03.260753Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:03.260877Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:03.262377Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:03.288840Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:03.288869Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:03.288881Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:03.289020Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24947 TClient is connected to server localhost:24947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:03.833015Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:16:06.323394Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7490574863167179004:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:06.323515Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7490574863167178994:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:06.324049Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:06.327547Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:16:06.337134Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7490574863167179009:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:06.427178Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:16:06.618278Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:16:06.988580Z node 9 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [9:7490574863167179388:2365], owner: [9:7490574863167178967:2296], statement id: 0 2025-04-07T14:16:06.988802Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=N2Y1MWYxMTEtNWZhNmQzYjctMWQ5MTJhMWUtYTFlNWM2YmM=, ActorId: [9:7490574863167179386:2364], ActorState: ExecuteState, TraceId: 01jr89qj29560mmpjdjzy958qw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:16:07.143421Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7490574867462146712:2376], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:55: Error: At function: PgOp
:2:55: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-04-07T14:16:07.143618Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=M2JkN2I2YzYtNGYwMzhkOWUtMTJmZjY0ZmQtMzAyODc3NzM=, ActorId: [9:7490574867462146709:2374], ActorState: ExecuteState, TraceId: 01jr89qj6gctnhvjknrmhr01h7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:16:07.171400Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7490574867462146726:2382], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:57: Error: At function: PgAnd
:2:67: Error: At function: PgOp
:2:67: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-04-07T14:16:07.171612Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=OWVjNDVhNGMtNTgyNTc1OGUtNjNkMTU3ZGYtODE4NmQ3OGI=, ActorId: [9:7490574867462146722:2380], ActorState: ExecuteState, TraceId: 01jr89qj7d947vz7gkxqrnfz3a, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:16:07.181782Z node 9 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr89qj89220tgg9eqxfmwff1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZmZlNWViNDgtMzkyZmFkYTItNzM2Nzg1MS00Y2U4MTgwZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (yexception) yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-04-07T14:16:07.182600Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=ZmZlNWViNDgtMzkyZmFkYTItNzM2Nzg1MS00Y2U4MTgwZg==, ActorId: [9:7490574867462146735:2386], ActorState: ExecuteState, TraceId: 01jr89qj89220tgg9eqxfmwff1, Create QueryResponse for error on request, msg: 2025-04-07T14:16:07.211995Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:16:07.278885Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-07T14:16:07.324802Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7490574867462146900:2411], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2025-04-07T14:16:07.324963Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=MWUzOTc1ZjUtZDRiN2ZlMS1lYjdhMjM5LWEwNWNiMmQ1, ActorId: [9:7490574867462146897:2409], ActorState: ExecuteState, TraceId: 01jr89qjcgcnycc093kneykv1t, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:16:07.344359Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7490574867462146915:2418], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/PgTable2] 2025-04-07T14:16:07.344584Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=Mzk1OTRmN2QtZTQ3MjIyMzAtYTgyOGYxNjAtZWMwYzE2ZmY=, ActorId: [9:7490574867462146912:2416], ActorState: ExecuteState, TraceId: 01jr89qjd2f9wmkwswpbttpw0v, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:16:07.613523Z node 9 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr89qjdp71bextcq5z12kkmv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZDM2MDgxMTctNjk4N2MwN2YtMzQ4OGUxMjItMWRhYzRjZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (yexception) yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-04-07T14:16:07.613836Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=ZDM2MDgxMTctNjk4N2MwN2YtMzQ4OGUxMjItMWRhYzRjZTE=, ActorId: [9:7490574867462146924:2422], ActorState: ExecuteState, TraceId: 01jr89qjdp71bextcq5z12kkmv, Create QueryResponse for error on request, msg: 2025-04-07T14:16:07.646301Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T14:16:08.021545Z node 9 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 9, TabletId: 72075186224037892 not found 2025-04-07T14:16:08.040598Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-07T14:16:08.114000Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7490574850282276480:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:08.114333Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpPg::DeleteWithQueryService [GOOD] >> KqpPg::CreateTableIfNotExists_GenericQuery >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword >> KqpPg::InsertNoTargetColumns_NotOneSize [GOOD] >> KqpPg::InsertNoTargetColumns_Alter >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD >> GroupWriteTest::WriteHardRateDispatcher [GOOD] >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] >> TPersQueueTest::DirectReadPreCached >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] Test command err: RandomSeed# 7255577386516789803 2025-04-07T14:12:44.436273Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 5 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-07T14:12:44.451289Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-07T14:12:44.451336Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 going to send TEvBlock {TabletId# 5 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-07T14:12:44.453225Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-07T14:12:44.464258Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:12:44.466353Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-07T14:12:48.913436Z 3 00h01m01.465843s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:2:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 319 2025-04-07T14:16:11.871209Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-07T14:16:11.871275Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:16:11.871312Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-07T14:16:11.871334Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-07T14:16:12.038757Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2025-04-07T14:16:12.038835Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:38.159412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:38.159479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:38.159521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:38.159544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:38.159582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:38.159614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:38.159671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:38.159924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:38.216531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:38.216570Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:38.224658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:38.224759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:38.224913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:38.232535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:38.232651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:38.233107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:38.233339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:38.237654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:38.239431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:38.239491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:38.239652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:38.239694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:38.239728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:38.239901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:38.245791Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:38.345131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:38.345333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:38.345511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:38.345700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:38.345743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:38.347892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:38.348031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:38.348180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:38.348227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:38.348264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:38.348309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:38.349569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:38.349601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:38.349643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:38.350785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:38.350812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:38.350845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:38.350876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:38.353030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:38.354117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:38.354270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:38.355034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:38.355135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:38.355172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:38.355380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:38.355420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:38.355537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:38.355591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:38.357154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:38.357184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:38.357284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:38.357309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:38.357561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:38.357591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:38.357647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:38.357681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:38.357708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:38.357741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:38.357766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:38.357785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:38.357827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:38.357856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:38.357875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... ult: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:16:13.114540Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:16:13.114766Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 246us result status StatusSuccess 2025-04-07T14:16:13.115531Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.5%| [TA] $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] Test command err: Trying to start YDB, gRPC: 29146, MsgBus: 17129 2025-04-07T14:15:26.819313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574688844702464:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:26.825758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002113/r3tmp/tmp8k174a/pdisk_1.dat 2025-04-07T14:15:27.184398Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:27.214129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:27.214198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:27.215420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29146, node 1 2025-04-07T14:15:27.282574Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:27.282605Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:27.282611Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:27.282693Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17129 TClient is connected to server localhost:17129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:27.945850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:29.426599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.569338Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-04-07T14:15:29.646705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.693637Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-04-07T14:15:29.735728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.785753Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-04-07T14:15:29.850854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.893662Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-04-07T14:15:29.960113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.995434Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill f f t t 2025-04-07T14:15:30.031904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.117861Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill f f t t 2025-04-07T14:15:30.145647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.179570Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2025-04-07T14:15:30.216366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.301289Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2025-04-07T14:15:30.339859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.394900Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:30.395992Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 281474976710692 at tablet 72075186224037896 errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710692] at 72075186224037896 while waiting for scan finish) | 2025-04-07T14:15:30.397127Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710692 at tablet 72075186224037896 status: ERROR errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710692] at 72075186224037896 while waiting for scan finish) | 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-07T14:15:30.440284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.485185Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-07T14:15:30.531693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.578508Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-07T14:15:30.620827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.660244Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-07T14:15:30.700189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.791532Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-07T14:15:30.846785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.885200Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-07T14:15:30.929884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.985393Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-07T14:15:31.024982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2025-04-07T14:15:31.073689Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-07T14:15:31.113322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710723:0, at schemeshard: 72057594046644480 2025-04-07T14:15:31.174897Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-07T14:15:31.231540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710727:0, at schemeshard: 72057594046644480 2025-04-07T14:15:31.268572Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-07T14:15:31.304277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710731:0, at schemeshard: 7205759 ... eId: 01jr89qgfq6ydv38h5kjg7df7f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:16:05.389314Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:16:05.393382Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037889 not found 2025-04-07T14:16:05.396952Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715665:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1155, MsgBus: 27774 2025-04-07T14:16:06.188971Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7490574861987451165:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:06.189073Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002113/r3tmp/tmpXk8Qdm/pdisk_1.dat 2025-04-07T14:16:06.295275Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:06.315636Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:06.315720Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:06.317244Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1155, node 7 2025-04-07T14:16:06.354032Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:06.354055Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:06.354061Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:06.354155Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27774 TClient is connected to server localhost:27774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:06.782249Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:16:09.157878Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574874872353674:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:09.157991Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:09.201072Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:09.237431Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574874872353777:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:09.237512Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:09.237729Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574874872353782:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:09.241212Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:16:09.250021Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490574874872353784:2318], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } Trying to start YDB, gRPC: 65511, MsgBus: 26907 2025-04-07T14:16:10.181799Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7490574877920169963:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:10.181845Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002113/r3tmp/tmpfvWGy8/pdisk_1.dat 2025-04-07T14:16:10.272224Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:10.293434Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:10.293522Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:10.295135Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65511, node 8 2025-04-07T14:16:10.339907Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:10.339935Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:10.339940Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:10.340047Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26907 TClient is connected to server localhost:26907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:10.807898Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:16:13.070747Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490574890805072465:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:13.070814Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7490574890805072473:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:13.070860Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:13.075326Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:16:13.086217Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7490574890805072479:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:16:13.176776Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:16:13.256568Z node 8 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [8:7490574890805072650:2322], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Table name conflict: db.[/Root/test] is used to reference multiple tables. 2025-04-07T14:16:13.256740Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=ODdjNjFkOTAtYWQ0Zjk0MDktY2M3ZWMxNzYtNjYwYjAzYjE=, ActorId: [8:7490574890805072643:2318], ActorState: ExecuteState, TraceId: 01jr89qr5x355zzhfw5v0jzwxy, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:16:13.407833Z node 8 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 8, TabletId: 72075186224037888 not found 2025-04-07T14:16:13.436110Z node 8 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [8:7490574890805072760:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:31: Error: At function: KiReadTable!
:2:31: Error: Cannot find table 'db.[/Root/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:16:13.436379Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=OGY5ZWE1NjItMzUzMDdlODAtOTNlMTI2ZmYtMTJkMzQ3MDA=, ActorId: [8:7490574890805072757:2347], ActorState: ExecuteState, TraceId: 01jr89qraz952tycgev007qyh4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> TPersQueueTest::BadTopic >> TPersQueueTest::SetupLockSession2 >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> KqpPg::InsertNoTargetColumns_Alter [GOOD] >> KqpPg::ExplainColumnsReorder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] Test command err: 2025-04-07T14:16:01.984156Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574841034912884:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:01.984674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a2/r3tmp/tmpQo9656/pdisk_1.dat 2025-04-07T14:16:02.249165Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5689, node 1 2025-04-07T14:16:02.328339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:02.328422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:02.330105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:02.375654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:02.375678Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:02.375684Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:02.375768Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:02.591140Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:02.599780Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:63573, port: 63573 2025-04-07T14:16:02.599907Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:02.620227Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:02.665193Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:02.712956Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:02.713368Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:02.713426Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:02.761072Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:02.808989Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:02.810363Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****bWNQ (61CCC086) () has now valid token of ldapuser@ldap 2025-04-07T14:16:04.018928Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574854189890636:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:04.018998Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a2/r3tmp/tmpNQ1eT7/pdisk_1.dat 2025-04-07T14:16:04.081734Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16186, node 2 2025-04-07T14:16:04.134717Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:04.134742Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:04.134748Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:04.134819Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:04.141863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:04.141935Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:04.143642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:04.294683Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:04.297964Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:18136 ldap://localhost:18136 ldap://localhost:11111, port: 18136 2025-04-07T14:16:04.298069Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:04.309353Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:04.357096Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:04.400930Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:04.401392Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:04.401450Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:04.444958Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:04.488963Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:04.489936Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****SP-w (86C205F9) () has now valid token of ldapuser@ldap 2025-04-07T14:16:06.083972Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490574862967612613:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:06.084041Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a2/r3tmp/tmpkHkcla/pdisk_1.dat 2025-04-07T14:16:06.141261Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24126, node 3 2025-04-07T14:16:06.186927Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:06.186950Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:06.186957Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:06.187047Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:06.202663Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:06.202753Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:06.204252Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:06.349480Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:06.351944Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:7928, port: 7928 2025-04-07T14:16:06.351987Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:06.358490Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:06.405115Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-07T14:16:06.453285Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****WB3Q (29CC72B6) () has now valid token of ldapuser@ldap 2025-04-07T14:16:08.324037Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490574870413001328:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:08.324095Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a2/r3tmp/tmpH0D2zy/pdisk_1.dat 2025-04-07T14:16:08.384443Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30010, node 4 2025-04-07T14:16:08.426545Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:08.426571Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:08.426579Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:08.426654Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:08.443734Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:08.443805Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:08.445343Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:08.510079Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:08.512844Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:20767, port: 20767 2025-04-07T14:16:08.512937Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:08.520263Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:08.561038Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:20767. Invalid credentials 2025-04-07T14:16:08.561413Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****UMkQ (87597BD8) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:10.748931Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574878726312319:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:10.749028Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a2/r3tmp/tmpHyfG9m/pdisk_1.dat 2025-04-07T14:16:10.816815Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13112, node 5 2025-04-07T14:16:10.861616Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:10.861633Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:10.861638Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:10.861722Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:10.878361Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:10.878444Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:10.880066Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:10.943670Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:10.946533Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:63873, port: 63873 2025-04-07T14:16:10.946577Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:10.953596Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:10.997058Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:63873. Invalid credentials 2025-04-07T14:16:10.997374Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****1HiA (60FD3A09) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:13.066693Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574890204038713:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:13.066786Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012a2/r3tmp/tmpNmQeuA/pdisk_1.dat 2025-04-07T14:16:13.147555Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61063, node 6 2025-04-07T14:16:13.201274Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:13.201372Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:13.202894Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:13.204178Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:13.204206Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:13.204221Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:13.204323Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:13.316950Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:13.319636Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:24687, port: 24687 2025-04-07T14:16:13.319714Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:13.327637Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:13.373091Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:13.373544Z node 6 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:24687 return no entries 2025-04-07T14:16:13.373985Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****cjpg (6FD6540F) () has now permanent error message 'Could not login via LDAP' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] Test command err: 2025-04-07T14:16:01.981712Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574839609688426:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:01.981764Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128a/r3tmp/tmpRX5wzq/pdisk_1.dat 2025-04-07T14:16:02.260495Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16610, node 1 2025-04-07T14:16:02.342461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:02.342599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:02.344041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:02.375727Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:02.375772Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:02.375779Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:02.375863Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:02.646710Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:02.650475Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://unavailablehost:14042, port: 14042 2025-04-07T14:16:02.650616Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:02.654404Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not start TLS. Can't contact LDAP server 2025-04-07T14:16:02.654903Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****-mTw (1A5CD4F0) () has now retryable error message 'Could not login via LDAP' 2025-04-07T14:16:04.149882Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574855137181088:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:04.149939Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128a/r3tmp/tmpDniMnl/pdisk_1.dat 2025-04-07T14:16:04.220242Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5508, node 2 2025-04-07T14:16:04.271932Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:04.271956Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:04.271981Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:04.272046Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:04.274639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:04.274788Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:04.276051Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:04.424412Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:04.426795Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****ZY3w (D7036639) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:06.360699Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490574862023825003:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:06.360742Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128a/r3tmp/tmp9bAXun/pdisk_1.dat 2025-04-07T14:16:06.420190Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18302, node 3 2025-04-07T14:16:06.474213Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:06.474236Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:06.474242Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:06.474350Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:06.477556Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:06.477613Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:06.478811Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:06.663913Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:06.666187Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****sxmg (D44AE12E) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:08.536182Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490574871599320726:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:08.536246Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128a/r3tmp/tmpvPWlDi/pdisk_1.dat 2025-04-07T14:16:08.595095Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24350, node 4 2025-04-07T14:16:08.639712Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:08.639727Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:08.639732Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:08.639805Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:08.652632Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:08.652755Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:08.654311Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:08.764375Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:08.766973Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****MyXw (7F3C28DA) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:10.834546Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574881199896942:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:10.834598Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128a/r3tmp/tmpB4nGsl/pdisk_1.dat 2025-04-07T14:16:10.908412Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12304, node 5 2025-04-07T14:16:10.949995Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:10.950018Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:10.950026Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:10.950117Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:10.951066Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:10.951145Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:10.952736Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:11.053190Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:11.055666Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****hZlQ (251372FC) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:13.201995Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574890365982079:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:13.202065Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00128a/r3tmp/tmp0zOe7t/pdisk_1.dat 2025-04-07T14:16:13.307862Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4025, node 6 2025-04-07T14:16:13.343114Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:13.343193Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:13.344906Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:13.345383Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:13.345402Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:13.345414Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:13.345518Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:13.460554Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:13.464337Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:27510, port: 27510 2025-04-07T14:16:13.464441Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:13.521110Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:13.565554Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****rlSA (0F49C3B9) () has now valid token of ldapuser@ldap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] Test command err: 2025-04-07T14:16:01.981750Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574840906795016:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:01.981799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127a/r3tmp/tmpISBzYS/pdisk_1.dat 2025-04-07T14:16:02.261877Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27661, node 1 2025-04-07T14:16:02.328172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:02.328285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:02.329869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:02.375741Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:02.375771Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:02.375777Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:02.375861Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:02.591150Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:02.595440Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:9091, port: 9091 2025-04-07T14:16:02.599820Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:02.611708Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:02.660981Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:02.661304Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:02.661334Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:02.705011Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:02.748920Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:02.749913Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****Brqw (EB41EFC0) () has now valid token of ldapuser@ldap 2025-04-07T14:16:04.155831Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574855292905464:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:04.155891Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127a/r3tmp/tmppIgI0l/pdisk_1.dat 2025-04-07T14:16:04.222895Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4964, node 2 2025-04-07T14:16:04.272126Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:04.272141Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:04.272145Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:04.272207Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:04.276577Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:04.276628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:04.278374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:04.344798Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:04.347260Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10802, port: 10802 2025-04-07T14:16:04.347390Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:04.350179Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:04.393156Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****hpng (C1B1B629) () has now valid token of ldapuser@ldap 2025-04-07T14:16:06.461394Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490574861132170193:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:06.461457Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127a/r3tmp/tmp7MLmTV/pdisk_1.dat 2025-04-07T14:16:06.520868Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21157, node 3 2025-04-07T14:16:06.580595Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:06.580609Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:06.580613Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:06.580711Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:06.582678Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:06.582743Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:06.584379Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:06.704568Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:06.707139Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:27752 ldap://localhost:27752 ldap://localhost:11111, port: 27752 2025-04-07T14:16:06.707193Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:06.713583Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:06.756876Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:06.757224Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:06.757266Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:06.800886Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:06.844885Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:06.845576Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****PDuQ (DC288E54) () has now valid token of ldapuser@ldap 2025-04-07T14:16:08.685444Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490574869405507935:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:08.685492Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127a/r3tmp/tmp0UUiRv/pdisk_1.dat 2025-04-07T14:16:08.750818Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3981, node 4 2025-04-07T14:16:08.800313Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:08.800332Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:08.800337Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:08.800416Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:08.801700Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:08.801775Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:08.803498Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:08.877670Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:08.880158Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:9724, port: 9724 2025-04-07T14:16:08.880208Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:08.883110Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-07T14:16:08.924865Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:08.925181Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:08.925213Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-07T14:16:08.968937Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-07T14:16:09.016941Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-07T14:16:09.017552Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****gBAw (E9D9886F) () has now valid token of ldapuser@ldap 2025-04-07T14:16:10.878289Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574880357161716:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:10.878348Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127a/r3tmp/tmpUkoWUw/pdisk_1.dat 2025-04-07T14:16:10.954996Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23005, node 5 2025-04-07T14:16:10.988396Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:10.988415Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:10.988422Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:10.988502Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:10.999112Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:10.999168Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:11.000844Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:11.112423Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:11.114887Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:14797, port: 14797 2025-04-07T14:16:11.114948Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:11.117400Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-07T14:16:11.165116Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****xE2w (8D3521F6) () has now valid token of ldapuser@ldap 2025-04-07T14:16:13.164199Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574892824609614:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:13.164268Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127a/r3tmp/tmpIzl85q/pdisk_1.dat 2025-04-07T14:16:13.247859Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4224, node 6 2025-04-07T14:16:13.291625Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:13.291651Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:13.291659Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:13.291757Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:13.295715Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:13.295834Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:13.297366Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:13.374991Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:13.377584Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:26548, port: 26548 2025-04-07T14:16:13.377682Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:13.380197Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:26548. Invalid credentials 2025-04-07T14:16:13.380382Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****E83g (B1C88D1C) () has now permanent error message 'Could not login via LDAP' >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD >> TPersQueueTest::ReadFromSeveralPartitionsMigrated >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> KqpPg::TableSelect [GOOD] >> KqpPg::V1CreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-04-07T14:16:03.720428Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574851395119672:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:03.720509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001258/r3tmp/tmplbwbHr/pdisk_1.dat 2025-04-07T14:16:03.965024Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13080, node 1 2025-04-07T14:16:04.010581Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:04.010597Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:04.010604Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:04.010699Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:04.071604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:04.071713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:04.073485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:04.149952Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:04.153302Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:10962, port: 10962 2025-04-07T14:16:04.153387Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:04.209084Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:04.252958Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:04.301777Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****z8NQ (45DF7C6F) () has now valid token of ldapuser@ldap 2025-04-07T14:16:05.894800Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574856555608787:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:05.894870Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001258/r3tmp/tmpqTm5pZ/pdisk_1.dat 2025-04-07T14:16:05.968623Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64295, node 2 2025-04-07T14:16:06.015126Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:06.015251Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:06.016769Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:06.016787Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:06.016794Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:06.016866Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:06.016965Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:06.061989Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:06.077303Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:6297, port: 6297 2025-04-07T14:16:06.077389Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:06.129107Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:06.172915Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:06.173368Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:06.173410Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:06.217007Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:06.264926Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:06.265675Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****gTtw (8C228A14) () has now valid token of ldapuser@ldap 2025-04-07T14:16:08.208207Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490574870227361084:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:08.208254Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001258/r3tmp/tmpr6R5dp/pdisk_1.dat 2025-04-07T14:16:08.273526Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7389, node 3 2025-04-07T14:16:08.329385Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:08.329470Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:08.330985Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:08.331096Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:08.331120Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:08.331127Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:08.331244Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:08.407829Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:08.410971Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:23153, port: 23153 2025-04-07T14:16:08.411038Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:08.461078Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:08.505387Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****2bTg (BA91F7AB) () has now valid token of ldapuser@ldap 2025-04-07T14:16:10.513564Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490574877441112531:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:10.513635Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001258/r3tmp/tmpgnfU74/pdisk_1.dat 2025-04-07T14:16:10.585249Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8249, node 4 2025-04-07T14:16:10.620500Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:10.620521Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:10.620525Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:10.620602Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:10.628151Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:10.628210Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:10.629836Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:10.678421Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:10.680812Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://qqq:28703 ldaps://localhost:28703 ldaps://localhost:11111, port: 28703 2025-04-07T14:16:10.680863Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:10.733160Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:10.776947Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:10.777379Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:10.777422Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:10.825009Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:10.868953Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:10.869773Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****eNuA (2FCA6C41) () has now valid token of ldapuser@ldap 2025-04-07T14:16:12.642679Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574887994753861:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:12.642718Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001258/r3tmp/tmpB90O50/pdisk_1.dat 2025-04-07T14:16:12.708995Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17922, node 5 2025-04-07T14:16:12.749032Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:12.749051Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:12.749056Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:12.749141Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:12.767819Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:12.767877Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:12.769532Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:12.843284Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:12.846026Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:9759, port: 9759 2025-04-07T14:16:12.846096Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:12.897191Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-07T14:16:12.944979Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:12.945370Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:12.945418Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-07T14:16:12.989034Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-07T14:16:13.037003Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-07T14:16:13.038031Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****AEXw (58AA759A) () has now valid token of ldapuser@ldap 2025-04-07T14:16:14.911717Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574894674542807:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:14.911766Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001258/r3tmp/tmpepuBi3/pdisk_1.dat 2025-04-07T14:16:14.991420Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10021, node 6 2025-04-07T14:16:15.037490Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:15.037511Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:15.037517Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:15.037604Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:15.043650Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:15.043727Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:15.045232Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:15.196702Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:15.199522Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:6400, port: 6400 2025-04-07T14:16:15.199637Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:15.253063Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-04-07T14:16:15.253146Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:6400. Bad search filter 2025-04-07T14:16:15.253505Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****LaRg (5835CBF6) () has now permanent error message 'Could not login via LDAP' >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood >> Yq_1::Basic >> Yq_1::ListConnections >> Yq_1::ModifyConnections >> PrivateApi::PingTask >> KqpPg::ExplainColumnsReorder [GOOD] >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: Trying to start YDB, gRPC: 3510, MsgBus: 31971 2025-04-07T14:15:26.845671Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574689861297986:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:26.845735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002158/r3tmp/tmpVzp5LC/pdisk_1.dat 2025-04-07T14:15:27.177131Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:27.216710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:27.216816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:27.220033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3510, node 1 2025-04-07T14:15:27.282505Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:27.282533Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:27.282553Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:27.282686Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31971 TClient is connected to server localhost:31971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:27.872352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 16 2025-04-07T14:15:29.446932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.597849Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:29.604884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.647196Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:29.667704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574702746200682:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.667704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574702746200674:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.667796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.670864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T14:15:29.681974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574702746200688:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } f f t t 18 2025-04-07T14:15:30.295994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.336698Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:30.343481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.387413Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 21 2025-04-07T14:15:30.886955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.940957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 23 2025-04-07T14:15:31.468386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-07T14:15:31.512922Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:31.516190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-07T14:15:31.550103Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:31.854027Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574689861297986:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:31.854131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 20 2025-04-07T14:15:31.926401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-07T14:15:31.968757Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:31.974280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 2025-04-07T14:15:32.052861Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 700 2025-04-07T14:15:32.438135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2025-04-07T14:15:32.484632Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:32.488786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-07T14:15:32.524923Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 701 2025-04-07T14:15:32.929189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-04-07T14:15:32.970569Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:32.979636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.017852Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 25 2025-04-07T14:15:33.503985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710721:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.543892Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:33.549512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710723:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.586813Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill text 0 text 0 text 1 text 1 text 2 text 2 text 3 text 3 text 4 text 4 text 5 text 5 text 6 text 6 text 7 text 7 text 8 text 8 text 9 text 9 1042 2025-04-07T14:15:33.935548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710730:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.970193Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:33.976198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710732:0, at schemeshard: 72057594046644480 2025-04-07T14:15:34.019943Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill bpchar 0 bpchar 0 bpchar 1 bpchar 1 bpchar 2 bpchar 2 bpchar 3 bpchar 3 bpchar 4 bpchar 4 bpchar 5 bpchar 5 bpchar 6 bpchar 6 bpchar 7 bpchar 7 bpchar 8 bpchar 8 bpchar 9 bpchar 9 1043 2025-04-07T14:15:34.445638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710739:0, at schemeshard: 72057594046644480 2025-04-07T14:15:34.487682Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:34.513361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710741:0, at schemeshard: 72057594046644480 2025-04-07T14:15:34.546991Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill varchar 0 varchar 0 varchar 1 varchar 1 varchar 2 varchar 2 varchar 3 varchar 3 varchar 4 varchar 4 varchar 5 varchar 5 varchar 6 varchar 6 varchar 7 varchar 7 varchar 8 varchar 8 varchar 9 varchar 9 19 2025-04-07T14:15:34.927777Z node 1 :FLAT_TX_SCHEMESH ... rd: 72057594046644480 2025-04-07T14:16:10.091671Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:16:10.432139Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490574880212279486:2350], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2025-04-07T14:16:10.432313Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZjQ0NDAzZjEtY2JlYWEyNGMtZTExNTQxMDMtMTNmOTg0MTc=, ActorId: [6:7490574880212279484:2349], ActorState: ExecuteState, TraceId: 01jr89qndm0jh7pagmj6c5evea, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-07T14:16:10.652530Z node 6 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, unexpected exception caught: (yexception) yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-04-07T14:16:10.672881Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490574880212279497:2355], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
: Error: (yexception) yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" , code: 2029 2025-04-07T14:16:10.673089Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=MmFhYmM2YzEtNmI3ZTY3ZDMtNmQ5NDBhNTEtNDliOWI2N2U=, ActorId: [6:7490574880212279495:2354], ActorState: ExecuteState, TraceId: 01jr89qne54htq7znk24vsqf6k, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-04-07T14:16:10.693251Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490574880212279514:2361], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/nopg] 2025-04-07T14:16:10.695100Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=Njg4YTNjODUtZTgyMTAwMWYtYmFkZjk1ZmItNTMxNmNkMWM=, ActorId: [6:7490574880212279512:2360], ActorState: ExecuteState, TraceId: 01jr89qnnp6ma73ak4zyqx3w0w, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 22248, MsgBus: 7613 2025-04-07T14:16:11.376264Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7490574885002675631:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:11.376341Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002158/r3tmp/tmpRrz486/pdisk_1.dat 2025-04-07T14:16:11.461781Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22248, node 7 2025-04-07T14:16:11.501183Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:11.501276Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:11.502906Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:11.516511Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:11.516535Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:11.516544Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:11.516652Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7613 TClient is connected to server localhost:7613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:11.942790Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:16:14.235337Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574897887578120:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:14.235380Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490574897887578152:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:14.235448Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:14.238596Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:16:14.247968Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490574897887578158:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:14.364753Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:16:14.733131Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:16:18.738346Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:287:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:18.738643Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:18.738702Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002158/r3tmp/tmpjQW9ZY/pdisk_1.dat 2025-04-07T14:16:18.999588Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:19.030808Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:19.079106Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:19.079274Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:19.090980Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:19.209921Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:610:2519], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:19.210034Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:620:2524], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:19.210417Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:19.215291Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T14:16:19.372768Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:624:2527], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } PreparedQuery: "3bfd014a-aaa17b3b-d1b881d4-8da3125d" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'218) \'(\'\"_id\" \'\"c7b32422-fd064de5-8f4528c9-129b7826\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:49.455478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:49.455559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:49.455592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:49.455620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:49.455685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:49.455732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:49.455792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:49.456074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:49.524329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:49.524376Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:49.532607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:49.532748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:49.532889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:49.542010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:49.542154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:49.542712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:49.543009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:49.547305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:49.548872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:49.548939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:49.549089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:49.549129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:49.549160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:49.549265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:49.554525Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:49.624306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:49.624507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:49.624737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:49.624938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:49.624992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:49.626700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:49.626842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:49.626994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:49.627043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:49.627079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:49.627128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:49.628624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:49.628703Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:49.628745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:49.630102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:49.630139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:49.630186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:49.630229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:49.633636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:49.635000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:49.635160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:49.635962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:49.636077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:49.636125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:49.636345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:49.636390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:49.636563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:49.636627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:49.638268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:49.638304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:49.638457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:49.638492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:49.638774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:49.638808Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:49.638877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:49.638926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:49.638969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:49.639010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:49.639045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:49.639076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:49.639135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:49.639171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:49.639198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... ToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:16:19.949080Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:16:19.949300Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 243us result status StatusSuccess 2025-04-07T14:16:19.949977Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:16:19.960905Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:784:2606] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:16:19.961018Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:706:2606] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-07T14:16:19.961174Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:784:2606] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744035379936362 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744035379936362 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744035379936362 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:16:19.963352Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:784:2606] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-07T14:16:19.963453Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:706:2606] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood >> KqpExtractPredicateLookup::PointJoin+EnableKqpDataQueryStreamLookup [GOOD] >> KqpExtractPredicateLookup::PointJoin-EnableKqpDataQueryStreamLookup >> Yq_1::DescribeConnection >> Yq_1::DeleteConnections >> PgCatalog::CheckSetConfig [GOOD] >> PgCatalog::PgDatabase >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:25.074005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:25.074089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:25.074124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:25.074158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:25.074206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:25.074255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:25.074315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:25.074639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:25.147432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:25.147488Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:25.174232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:25.174380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:25.174550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:25.185686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:25.185881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:25.186525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:25.186942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:25.193900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:25.202731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:25.202808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:25.202986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:25.203036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:25.203075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:25.203220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:25.212924Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:25.331750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:25.331973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:25.332218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:25.332421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:25.332484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:25.339869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:25.340022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:25.340225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:25.340275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:25.340315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:25.340368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:25.342689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:25.342749Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:25.342784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:25.344441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:25.344483Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:25.344535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:25.344581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:25.348276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:25.350022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:25.350253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:25.351216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:25.351358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:25.351415Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:25.351690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:25.351746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:25.351916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:25.351988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:25.356497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:25.356544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:25.356724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:25.356767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:25.357081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:25.357125Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:25.357215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:25.357268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:25.357315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:25.357364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:25.357402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:25.357431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:25.357511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:25.357550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:25.357579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... CompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:16:23.366055Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:16:23.366321Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 296us result status StatusSuccess 2025-04-07T14:16:23.367133Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:16:23.378050Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:829:2665] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:16:23.378126Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:768:2665] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-07T14:16:23.378237Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:829:2665] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744035383356076 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744035383356076 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744035383356076 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:16:23.380055Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:829:2665] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-07T14:16:23.380139Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:768:2665] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TPersQueueTest::DirectReadPreCached [GOOD] >> TPersQueueTest::DirectReadNotCached >> TPersQueueTest::BadTopic [GOOD] >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable >> Yq_1::CreateQuery_With_Idempotency >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-04-07T14:16:00.372300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:00.372477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:00.372633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015be/r3tmp/tmpgyMmON/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15330, node 1 TClient is connected to server localhost:6563 2025-04-07T14:16:00.744298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:00.771844Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:00.774759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:00.774800Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:00.774827Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:00.774961Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:00.818783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:00.818893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:00.830030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:00.964975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:16:01.021744Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:653:2546], Recipient [1:710:2591]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:16:01.022631Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:653:2546], Recipient [1:710:2591]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:16:01.022824Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:710:2591];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:16:01.037256Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:710:2591];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:16:01.037482Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-07T14:16:01.043359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:16:01.043506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:16:01.043673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:16:01.043741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:16:01.043818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:16:01.043907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:16:01.043999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:16:01.044108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:16:01.044181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:16:01.044262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:16:01.044331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:16:01.044406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:16:01.057144Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:653:2546], Recipient [1:710:2591]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:16:01.058246Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-04-07T14:16:01.058523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:16:01.058578Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:16:01.058744Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:16:01.058869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:16:01.058926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:16:01.058969Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:16:01.059031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:16:01.059075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:16:01.059112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:16:01.059131Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:16:01.059251Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:16:01.059292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:16:01.059330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:16:01.059352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:16:01.059414Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:16:01.059449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:16:01.059475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:16:01.059491Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:16:01.059544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:16:01.059567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:16:01.059584Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:16:01.059609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:16:01.059632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:16:01.059650Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:16:01.059794Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-04-07T14:16:01.059860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=28; 2025-04-07T14:16:01.059938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72 ... =1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445360;count=1;;8:size=1445928;count=1;;9:size=1445400;count=1;;10:size=1445360;count=1;;11:size=808584;count=1;;12:size=4462848;count=6;;13:size=1445744;count=1;;14:size=1445448;count=1;;15:size=1445608;count=1;;16:size=1445376;count=1;;17:size=1445920;count=1;;18:size=2010800;count=3;;19:size=2010864;count=3;;20:size=1206216;count=1;;21:size=1206376;count=1;;22:size=1225616;count=1;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:73/0:size=3289;count=16;;1:size=58921;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445360;count=1;;8:size=1445928;count=1;;9:size=1445400;count=1;;10:size=1445360;count=1;;11:size=808584;count=1;;12:size=4462848;count=6;;13:size=1445744;count=1;;14:size=1445448;count=1;;15:size=1445608;count=1;;16:size=1445376;count=1;;17:size=1445920;count=1;;18:size=2010800;count=3;;19:size=2010864;count=3;;20:size=1206216;count=1;;21:size=1206376;count=1;;22:size=1225616;count=1;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-07T14:16:24.337382Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-04-07T14:16:24.337427Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=with_appended.cpp:80;portions=29,;task_id=e288dc98-13ba11f0-9888de55-54f8935; 2025-04-07T14:16:24.337610Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1578312;portion_bytes=1578312;portion_raw_bytes=52994676; 2025-04-07T14:16:24.337671Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=DEFAULT;path_id=3;portion=11;before_size=18270312;after_size=16692000;before_rows=517509;after_rows=473004; 2025-04-07T14:16:24.337707Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1578312;portion_bytes=1578312;portion_raw_bytes=52994676; 2025-04-07T14:16:24.337779Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1445744;portion_bytes=1445744;portion_raw_bytes=48428075; 2025-04-07T14:16:24.337806Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=DEFAULT;path_id=3;portion=16;before_size=16692000;after_size=15246256;before_rows=473004;after_rows=431806; 2025-04-07T14:16:24.337824Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1445744;portion_bytes=1445744;portion_raw_bytes=48428075; 2025-04-07T14:16:24.337855Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=3029160;portion_bytes=3029160;portion_raw_bytes=102081919; 2025-04-07T14:16:24.337886Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=3;portion=29;before_size=15246256;after_size=18275416;before_rows=431806;after_rows=517509; 2025-04-07T14:16:24.337911Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=3029160;portion_bytes=3029160;portion_raw_bytes=102081919; 2025-04-07T14:16:24.337955Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:29;path_id:3;records_count:85703;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:3029160;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-04-07T14:16:24.338124Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=tiering.cpp:48;tiering_info=__DEFAULT/0.000000s;$$DELETE/85798.000000s;; 2025-04-07T14:16:24.338173Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=manager.cpp:14;event=unlock;process_id=CS::GENERAL::e288dc98-13ba11f0-9888de55-54f8935; 2025-04-07T14:16:24.338214Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;fline=granule.cpp:99;event=OnCompactionFinished;info=(granule:3;path_id:3;size:18275416;portions_count:29;); 2025-04-07T14:16:24.338242Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:16:24.338281Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:16:24.338327Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;tablet_id=72075186224037888;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=1; 2025-04-07T14:16:24.338371Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;tablet_id=72075186224037888;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-04-07T14:16:24.338395Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;tablet_id=72075186224037888;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:16:24.338427Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:16:24.338452Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:16:24.338488Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;tablet_id=72075186224037888;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:16:24.338540Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:16:24.338678Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:20 Blob count: 1 VERIFY failed (2025-04-07T14:16:24.338837Z): tablet_id=72075186224037888;task_id=e288dc98-13ba11f0-9888de55-54f8935;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+995 (0x178A2F83) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x178925DB) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x18B63726) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4577 (0x45BDC2F1) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x2F97B6FD) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+810 (0x1CBD983A) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3390 (0x1CBCA95E) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3487 (0x1CA386DF) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+852 (0x1C9DD274) NActors::IActor::Receive(TAutoPtr&)+247 (0x18B1A327) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3599 (0x33F416AF) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12583 (0x33F39E27) NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&)+49 (0x33F36BE1) NKikimr::Tests::NCS::THelperSchemaless::SendDataViaActorSystem(TBasicString>, std::__y1::shared_ptr, Ydb::StatusIds_StatusCode const&) const+7817 (0x348F59E9) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+4360 (0x174A2448) std::__y1::__function::__func, void ()>::operator()()+280 (0x174B52F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x17D5A9E9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x17D22639) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1204 (0x174B42A4) NUnitTest::TTestFactory::Execute()+2438 (0x17D23F06) NUnitTest::RunMain(int, char**)+5149 (0x17D5462D) ??+0 (0x7F770850BD90) __libc_start_main+128 (0x7F770850BE40) _start+41 (0x15565029) >> PrivateApi::PingTask [GOOD] >> PrivateApi::GetTask >> TPersQueueTest::SetupLockSession2 [GOOD] >> TPersQueueTest::SetupLockSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-04-07T14:16:03.688552Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574848786380814:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:03.688615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001259/r3tmp/tmpSVTe8c/pdisk_1.dat 2025-04-07T14:16:03.872405Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:03.874918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:03.874968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:03.876875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8315, node 1 2025-04-07T14:16:03.917850Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:03.917868Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:03.917875Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:03.917950Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:04.006378Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:04.009551Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:2169, port: 2169 2025-04-07T14:16:04.009615Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:04.065112Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:04.112940Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:04.113372Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:04.113417Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:04.156944Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:04.200952Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:04.202471Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****GuhA (4FDE460A) () has now valid token of ldapuser@ldap 2025-04-07T14:16:07.696957Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****GuhA (4FDE460A) 2025-04-07T14:16:07.697756Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:2169, port: 2169 2025-04-07T14:16:07.697853Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:07.749108Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:07.749644Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:2169 return no entries 2025-04-07T14:16:07.750078Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****GuhA (4FDE460A) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:08.689029Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574848786380814:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:08.689094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:16:12.699052Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****GuhA (4FDE460A) 2025-04-07T14:16:14.584485Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574895183178773:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:14.584559Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001259/r3tmp/tmpI4zFn8/pdisk_1.dat 2025-04-07T14:16:14.664030Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63669, node 2 2025-04-07T14:16:14.708425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:14.708507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:14.710027Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:14.717647Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:14.717663Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:14.717670Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:14.717782Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:14.838826Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:14.842396Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:20661, port: 20661 2025-04-07T14:16:14.842516Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:14.852988Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:14.897167Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:14.940939Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:14.986465Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****H9WA (CC561EC2) () has now valid token of ldapuser@ldap 2025-04-07T14:16:16.900360Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490574907169104768:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:16.900396Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001259/r3tmp/tmpwvcxYo/pdisk_1.dat 2025-04-07T14:16:16.959972Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19836, node 3 2025-04-07T14:16:17.005567Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:17.005584Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:17.005589Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:17.005701Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:17.018752Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:17.018837Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:17.020300Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:17.101196Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:17.103907Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:20689, port: 20689 2025-04-07T14:16:17.104027Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:17.116441Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:17.161103Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:17.205497Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****1jgw (063119A0) () has now valid token of ldapuser@ldap 2025-04-07T14:16:18.950226Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490574912083894574:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:18.950268Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001259/r3tmp/tmpyKWFl8/pdisk_1.dat 2025-04-07T14:16:19.009232Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7799, node 4 2025-04-07T14:16:19.062589Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:19.062616Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:19.062624Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:19.062736Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:19.069715Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:19.069781Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:19.071393Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:19.166810Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:19.169464Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21829, port: 21829 2025-04-07T14:16:19.169575Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:19.177180Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:19.221124Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:19.269467Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****ERMw (DCA39F66) () has now valid token of ldapuser@ldap 2025-04-07T14:16:21.345882Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574925263748180:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:21.345963Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001259/r3tmp/tmp0rAQt4/pdisk_1.dat 2025-04-07T14:16:21.408005Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15048, node 5 2025-04-07T14:16:21.450573Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:21.450596Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:21.450604Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:21.450696Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:21.468795Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:21.468849Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:21.470039Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:21.529392Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:21.532734Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:12373, port: 12373 2025-04-07T14:16:21.532805Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:21.540726Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:21.585138Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-07T14:16:21.632970Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:21.633462Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:21.633517Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-07T14:16:21.677013Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-07T14:16:21.725102Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-07T14:16:21.726282Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****QRqg (AF567AFF) () has now valid token of ldapuser@ldap 2025-04-07T14:16:23.885717Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574936136494268:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:23.885763Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001259/r3tmp/tmp2OJ6mA/pdisk_1.dat 2025-04-07T14:16:23.957095Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15526, node 6 2025-04-07T14:16:24.002508Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:24.002532Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:24.002539Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:24.002629Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:24.014917Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:24.014975Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:24.016310Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:24.116778Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:24.118611Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:2057, port: 2057 2025-04-07T14:16:24.118718Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:24.127524Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:24.169207Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-04-07T14:16:24.169295Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:2057. Bad search filter 2025-04-07T14:16:24.169818Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****JGvA (3A793201) () has now permanent error message 'Could not login via LDAP' >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] >> PgCatalog::PgDatabase [GOOD] >> PgCatalog::PgRoles ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:217:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:143:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:146:2167] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:149:2057] recipient: [4:146:2167] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:148:2168] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:218:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:148:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:150:2172] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:154:2057] recipient: [5:150:2172] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:153:2173] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:223:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:155:2057] recipient: [7:153:2174] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:157:2057] recipient: [7:153:2174] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:156:2175] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:226:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:156:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:155:2176] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:159:2057] recipient: [8:155:2176] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:158:2177] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:228:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:155:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:156:2176] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:159:2057] recipient: [9:156:2176] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:158:2177] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:228:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:158:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:159:2057] recipient: [10:157:2177] Leader for TabletID 72057594037927937 is [10:160:2178] sender: [10:161:2057] recipient: [10:157:2177] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:160:2178] Leader for TabletID 72057594037927937 is [10:160:2178] sender: [10:230:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:158:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:161:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:162:2057] recipient: [11:160:2180] Leader for TabletID 72057594037927937 is [11:163:2181] sender: [11:164:2057] recipient: [11:160:2180] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:163:2181] Leader for TabletID 72057594037927937 is [11:163:2181] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 7 ... etID 72057594037927937 is [106:105:2137] sender: [106:106:2057] recipient: [106:99:2133] Leader for TabletID 72057594037927937 is [106:105:2137] sender: [106:139:2057] recipient: [106:14:2061] !Reboot 72057594037927937 (actor [106:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [106:105:2137] sender: [106:234:2057] recipient: [106:97:2132] Leader for TabletID 72057594037927937 is [106:105:2137] sender: [106:237:2057] recipient: [106:236:2246] Leader for TabletID 72057594037927937 is [106:105:2137] sender: [106:238:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [106:239:2247] sender: [106:240:2057] recipient: [106:236:2246] !Reboot 72057594037927937 (actor [106:105:2137]) rebooted! !Reboot 72057594037927937 (actor [106:105:2137]) tablet resolver refreshed! new actor is[106:239:2247] Leader for TabletID 72057594037927937 is [106:239:2247] sender: [106:292:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:101:2057] recipient: [107:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:101:2057] recipient: [107:99:2133] Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:106:2057] recipient: [107:99:2133] Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:139:2057] recipient: [107:14:2061] !Reboot 72057594037927937 (actor [107:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:239:2057] recipient: [107:97:2132] Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:241:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:243:2057] recipient: [107:242:2251] Leader for TabletID 72057594037927937 is [107:244:2252] sender: [107:245:2057] recipient: [107:242:2251] !Reboot 72057594037927937 (actor [107:105:2137]) rebooted! !Reboot 72057594037927937 (actor [107:105:2137]) tablet resolver refreshed! new actor is[107:244:2252] Leader for TabletID 72057594037927937 is [107:244:2252] sender: [107:314:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:101:2057] recipient: [108:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:101:2057] recipient: [108:99:2133] Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:106:2057] recipient: [108:99:2133] Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:139:2057] recipient: [108:14:2061] !Reboot 72057594037927937 (actor [108:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:239:2057] recipient: [108:97:2132] Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:242:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:243:2057] recipient: [108:241:2251] Leader for TabletID 72057594037927937 is [108:244:2252] sender: [108:245:2057] recipient: [108:241:2251] !Reboot 72057594037927937 (actor [108:105:2137]) rebooted! !Reboot 72057594037927937 (actor [108:105:2137]) tablet resolver refreshed! new actor is[108:244:2252] Leader for TabletID 72057594037927937 is [108:244:2252] sender: [108:314:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:101:2057] recipient: [109:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:101:2057] recipient: [109:99:2133] Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:106:2057] recipient: [109:99:2133] Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:139:2057] recipient: [109:14:2061] !Reboot 72057594037927937 (actor [109:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:240:2057] recipient: [109:97:2132] Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:243:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:244:2057] recipient: [109:242:2251] Leader for TabletID 72057594037927937 is [109:245:2252] sender: [109:246:2057] recipient: [109:242:2251] !Reboot 72057594037927937 (actor [109:105:2137]) rebooted! !Reboot 72057594037927937 (actor [109:105:2137]) tablet resolver refreshed! new actor is[109:245:2252] Leader for TabletID 72057594037927937 is [109:245:2252] sender: [109:293:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:101:2057] recipient: [110:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:101:2057] recipient: [110:99:2133] Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:106:2057] recipient: [110:99:2133] Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:139:2057] recipient: [110:14:2061] !Reboot 72057594037927937 (actor [110:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:242:2057] recipient: [110:97:2132] Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:244:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:246:2057] recipient: [110:245:2253] Leader for TabletID 72057594037927937 is [110:247:2254] sender: [110:248:2057] recipient: [110:245:2253] !Reboot 72057594037927937 (actor [110:105:2137]) rebooted! !Reboot 72057594037927937 (actor [110:105:2137]) tablet resolver refreshed! new actor is[110:247:2254] Leader for TabletID 72057594037927937 is [110:247:2254] sender: [110:317:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:101:2057] recipient: [111:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:101:2057] recipient: [111:99:2133] Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:106:2057] recipient: [111:99:2133] Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:139:2057] recipient: [111:14:2061] !Reboot 72057594037927937 (actor [111:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:242:2057] recipient: [111:97:2132] Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:245:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:246:2057] recipient: [111:244:2253] Leader for TabletID 72057594037927937 is [111:247:2254] sender: [111:248:2057] recipient: [111:244:2253] !Reboot 72057594037927937 (actor [111:105:2137]) rebooted! !Reboot 72057594037927937 (actor [111:105:2137]) tablet resolver refreshed! new actor is[111:247:2254] Leader for TabletID 72057594037927937 is [111:247:2254] sender: [111:317:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:101:2057] recipient: [112:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:101:2057] recipient: [112:99:2133] Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:106:2057] recipient: [112:99:2133] Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:139:2057] recipient: [112:14:2061] !Reboot 72057594037927937 (actor [112:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:243:2057] recipient: [112:97:2132] Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:245:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:247:2057] recipient: [112:246:2253] Leader for TabletID 72057594037927937 is [112:248:2254] sender: [112:249:2057] recipient: [112:246:2253] !Reboot 72057594037927937 (actor [112:105:2137]) rebooted! !Reboot 72057594037927937 (actor [112:105:2137]) tablet resolver refreshed! new actor is[112:248:2254] Leader for TabletID 72057594037927937 is [112:248:2254] sender: [112:318:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:101:2057] recipient: [113:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:101:2057] recipient: [113:99:2133] Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:106:2057] recipient: [113:99:2133] Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:139:2057] recipient: [113:14:2061] !Reboot 72057594037927937 (actor [113:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:248:2057] recipient: [113:97:2132] Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:251:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:252:2057] recipient: [113:250:2258] Leader for TabletID 72057594037927937 is [113:253:2259] sender: [113:254:2057] recipient: [113:250:2258] !Reboot 72057594037927937 (actor [113:105:2137]) rebooted! !Reboot 72057594037927937 (actor [113:105:2137]) tablet resolver refreshed! new actor is[113:253:2259] Leader for TabletID 72057594037927937 is [113:253:2259] sender: [113:323:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:101:2057] recipient: [114:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:101:2057] recipient: [114:99:2133] Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:106:2057] recipient: [114:99:2133] Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:139:2057] recipient: [114:14:2061] !Reboot 72057594037927937 (actor [114:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:248:2057] recipient: [114:97:2132] Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:251:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:252:2057] recipient: [114:250:2258] Leader for TabletID 72057594037927937 is [114:253:2259] sender: [114:254:2057] recipient: [114:250:2258] !Reboot 72057594037927937 (actor [114:105:2137]) rebooted! !Reboot 72057594037927937 (actor [114:105:2137]) tablet resolver refreshed! new actor is[114:253:2259] Leader for TabletID 72057594037927937 is [114:253:2259] sender: [114:323:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:101:2057] recipient: [115:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:101:2057] recipient: [115:99:2133] Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:106:2057] recipient: [115:99:2133] Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:139:2057] recipient: [115:14:2061] !Reboot 72057594037927937 (actor [115:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:249:2057] recipient: [115:97:2132] Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:252:2057] recipient: [115:14:2061] Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:253:2057] recipient: [115:251:2258] Leader for TabletID 72057594037927937 is [115:254:2259] sender: [115:255:2057] recipient: [115:251:2258] !Reboot 72057594037927937 (actor [115:105:2137]) rebooted! !Reboot 72057594037927937 (actor [115:105:2137]) tablet resolver refreshed! new actor is[115:254:2259] Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:101:2057] recipient: [116:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:101:2057] recipient: [116:99:2133] Leader for TabletID 72057594037927937 is [116:105:2137] sender: [116:106:2057] recipient: [116:99:2133] Leader for TabletID 72057594037927937 is [116:105:2137] sender: [116:139:2057] recipient: [116:14:2061] >> Yq_1::ListConnections [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable >> Yq_1::Basic_Null |96.6%| [TA] $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Yq_1::DescribeJob >> Yq_1::ModifyConnections [GOOD] >> Yq_1::ModifyQuery >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad >> Yq_1::CreateConnection_With_Existing_Name >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> Yq_1::DeleteConnections [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection >> PgCatalog::PgRoles [GOOD] >> PgCatalog::PgTables >> TPersQueueTest::ReadFromSeveralPartitionsMigrated [GOOD] >> TPersQueueTest::SchemeshardRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2025-04-07T14:16:01.981681Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574840033696795:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:01.981723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127c/r3tmp/tmp8SP5BU/pdisk_1.dat 2025-04-07T14:16:02.249505Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1860, node 1 2025-04-07T14:16:02.325130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:02.325231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:02.327116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:02.375654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:02.375687Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:02.375693Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:02.375768Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:02.605355Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:02.608897Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:21854, port: 21854 2025-04-07T14:16:02.608985Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:02.665111Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-07T14:16:02.713947Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****gB1w (11128DA7) () has now valid token of ldapuser@ldap 2025-04-07T14:16:04.047617Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574852221621845:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:04.047719Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127c/r3tmp/tmpzfvwe7/pdisk_1.dat 2025-04-07T14:16:04.114964Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7658, node 2 2025-04-07T14:16:04.163909Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:04.163939Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:04.163945Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:04.164040Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:04.169163Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:04.169279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:04.170680Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:04.231901Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:04.234615Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:31859, port: 31859 2025-04-07T14:16:04.234683Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:04.293050Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:31859. Invalid credentials 2025-04-07T14:16:04.293448Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****AExQ (4BFEB124) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:06.090918Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490574862475215138:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:06.090962Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127c/r3tmp/tmpOokoBb/pdisk_1.dat 2025-04-07T14:16:06.151583Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12330, node 3 2025-04-07T14:16:06.201406Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:06.201429Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:06.201441Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:06.201552Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:06.209392Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:06.209473Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:06.210967Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:06.344368Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:06.347883Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:16118, port: 16118 2025-04-07T14:16:06.347965Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:06.397123Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:16118. Invalid credentials 2025-04-07T14:16:06.397505Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****YhsA (D61E8AAF) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:08.511081Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490574870958170346:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:08.511132Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127c/r3tmp/tmp1cdD9q/pdisk_1.dat 2025-04-07T14:16:08.571337Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7925, node 4 2025-04-07T14:16:08.604100Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:08.604119Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:08.604124Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:08.604180Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:08.625910Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:08.625984Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:08.627595Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:08.709793Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:08.712198Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:8054, port: 8054 2025-04-07T14:16:08.712262Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:08.765034Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:08.765363Z node 4 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:8054 return no entries 2025-04-07T14:16:08.765699Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****M4Og (3AC5F695) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:10.641126Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574878648041149:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:10.641218Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127c/r3tmp/tmpkil2HX/pdisk_1.dat 2025-04-07T14:16:10.708803Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21967, node 5 2025-04-07T14:16:10.754324Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:10.754353Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:10.754362Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:10.754448Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:10.759447Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:10.759531Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:10.761169Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:10.904110Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:10.906712Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:3289, port: 3289 2025-04-07T14:16:10.906777Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:10.957039Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:11.001002Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:11.001420Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:11.001476Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:11.044966Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:11.088968Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:11.089825Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****JFqQ (D47D39DA) () has now valid token of ldapuser@ldap 2025-04-07T14:16:13.644218Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****JFqQ (D47D39DA) 2025-04-07T14:16:13.644304Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:3289, port: 3289 2025-04-07T14:16:13.644363Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:13.697101Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:13.740923Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:13.741416Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:13.741450Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:13.784933Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:13.832919Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:13.833709Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****JFqQ (D47D39DA) () has now valid token of ldapuser@ldap 2025-04-07T14:16:15.641547Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574878648041149:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:15.645776Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:16:18.647432Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****JFqQ (D47D39DA) 2025-04-07T14:16:18.647498Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:3289, port: 3289 2025-04-07T14:16:18.647552Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:18.701136Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:18.749028Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:18.749488Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:18.749528Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:18.793016Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:18.840974Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:18.841975Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****JFqQ (D47D39DA) () has now valid token of ldapuser@ldap 2025-04-07T14:16:21.647243Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574927830117495:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:21.647313Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00127c/r3tmp/tmpcGmV0x/pdisk_1.dat 2025-04-07T14:16:21.741436Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23368, node 6 2025-04-07T14:16:21.781440Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:21.781609Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:21.782979Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:21.800776Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:21.800798Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:21.800806Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:21.800915Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:21.906655Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:21.910131Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:2844, port: 2844 2025-04-07T14:16:21.910217Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:21.961147Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:22.005459Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****zEQA (DDDF8806) () has now valid token of ldapuser@ldap 2025-04-07T14:16:25.655454Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****zEQA (DDDF8806) 2025-04-07T14:16:25.655558Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:2844, port: 2844 2025-04-07T14:16:25.655653Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:25.713200Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:25.757556Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****zEQA (DDDF8806) () has now valid token of ldapuser@ldap 2025-04-07T14:16:26.647476Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574927830117495:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:26.647559Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:16:29.656989Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****zEQA (DDDF8806) 2025-04-07T14:16:29.657080Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:2844, port: 2844 2025-04-07T14:16:29.657165Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:29.713079Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:29.757426Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****zEQA (DDDF8806) () has now valid token of ldapuser@ldap >> Yq_1::DescribeConnection [GOOD] >> Yq_1::DeleteQuery >> PrivateApi::GetTask [GOOD] >> PrivateApi::Nodes >> Yq_1::Basic [GOOD] >> Yq_1::Basic_EmptyList >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2025-04-07T14:16:20.081934Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574924000650415:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:20.082157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:16:20.221280Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:18541: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:18541 } ] E0407 14:16:20.224027446 595949 dns_resolver.cc:162] no server name supplied in dns URI E0407 14:16:20.224174755 595949 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-07T14:16:21.083828Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:21.229792Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:18541: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:18541 } ] 2025-04-07T14:16:21.235983Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:18541: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:18541 2025-04-07T14:16:22.084440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00130c/r3tmp/tmp1NAegY/pdisk_1.dat 2025-04-07T14:16:22.452250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574932590585583:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:22.452307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 18541, node 1 2025-04-07T14:16:22.526345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:22.526387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:22.526457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:16:22.526562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TClient is connected to server localhost:11993 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:22.953902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:22.959889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:22.961678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:22.963651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:22.963871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:22.963922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:16:22.965458Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:16:22.966716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:22.966741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:16:22.967992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:22.972045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035383020, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:22.972078Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:16:22.972309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:16:22.973996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:22.974193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:22.974316Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:16:22.974445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:16:22.974501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:16:22.974559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:16:22.990839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:16:22.990906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:16:22.990925Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:22.990964Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-07T14:16:22.991003Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-07T14:16:22.991016Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-07T14:16:22.991032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:16:22.991286Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-07T14:16:22.991308Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-07T14:16:22.991314Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-07T14:16:22.993079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:22.993345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:22.995232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2025-04-07T14:16:22.995441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:22.995650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:22.995735Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:22.996683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:16:22.996741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:16:22.996759Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:16:22.996984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:16:22.997009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:16:22.997018Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:16:22.998033Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-07T14:16:22.998063Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-07T14:16:22.998069Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-07T14:16:22.998408Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-07T14:16:22.998435Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-07T14:16:22.998445Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-07T14:16:22.999224Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-07T14:16:22.999243Z node 1 :YQ_CONTROL_PLANE_STOR ... =Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-07T14:16:35.021822Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925400:2503], TxId: 281474976715690, task: 2. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:35.021862Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925399:2502], TxId: 281474976715690, task: 1. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7490574987679925399 RawX2: 4503616807242182 } } DstEndpoint { ActorId { RawX1: 7490574987679925400 RawX2: 4503616807242183 } } InMemory: true DstStageId: 1 } 2025-04-07T14:16:35.021873Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925399:2502], TxId: 281474976715690, task: 1. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update output channelId: 1, peer: [4:7490574987679925400:2503] 2025-04-07T14:16:35.021935Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. Shards State: TShardState{ TabletId: 72075186224037888, Last Key , Ranges: [#0: [(String : yandexcloud://WTF, String : ) ; (String : yandexcloud://WTF)]], Points: [], RetryAttempt: 0, ResolveAttempt: 0 } 2025-04-07T14:16:35.021948Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. effective maxinflight 1 sorted 1 2025-04-07T14:16:35.021954Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. BEFORE: 1.0 2025-04-07T14:16:35.022010Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. Send EvRead to shardId: 72075186224037888, tablePath: Root/yq/connections, ranges: [(String : yandexcloud://WTF, String : ) ; (String : yandexcloud://WTF)] , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2025-04-07T14:16:35.022033Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. AFTER: 0.1 2025-04-07T14:16:35.022040Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2025-04-07T14:16:35.022085Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925399:2502], TxId: 281474976715690, task: 1. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:35.022097Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-07T14:16:35.022107Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-07T14:16:35.022412Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. Recv TEvReadResult from ShardID=72075186224037888, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-04-07T14:16:35.022429Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. Taken 0 locks 2025-04-07T14:16:35.022439Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. new data for read #0 seqno = 1 finished = 1 2025-04-07T14:16:35.022453Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925399:2502], TxId: 281474976715690, task: 1. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-04-07T14:16:35.022490Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925399:2502], TxId: 281474976715690, task: 1. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:35.022511Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-07T14:16:35.022525Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. enter pack cells method shardId: 72075186224037888 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-07T14:16:35.022539Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. exit pack cells method shardId: 72075186224037888 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-07T14:16:35.022550Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. returned 0 rows; processed 0 rows 2025-04-07T14:16:35.022579Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. dropping batch for read #0 2025-04-07T14:16:35.022587Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. effective maxinflight 1 sorted 1 2025-04-07T14:16:35.022597Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-07T14:16:35.022614Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1, CA Id [4:7490574987679925399:2502]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-07T14:16:35.022694Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925399:2502], TxId: 281474976715690, task: 1. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:16:35.022704Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925400:2503], TxId: 281474976715690, task: 2. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-07T14:16:35.022726Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 2. Finish input channelId: 1, from: [4:7490574987679925399:2502] 2025-04-07T14:16:35.022758Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925400:2503], TxId: 281474976715690, task: 2. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:35.022758Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925399:2502], TxId: 281474976715690, task: 1. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-07T14:16:35.022773Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925399:2502], TxId: 281474976715690, task: 1. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:35.022790Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1. Tasks execution finished 2025-04-07T14:16:35.022799Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925399:2502], TxId: 281474976715690, task: 1. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:16:35.022810Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925400:2503], TxId: 281474976715690, task: 2. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:16:35.022843Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925400:2503], TxId: 281474976715690, task: 2. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:35.022875Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-07T14:16:35.022883Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 2. Tasks execution finished 2025-04-07T14:16:35.022883Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 1. pass away 2025-04-07T14:16:35.022892Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490574987679925400:2503], TxId: 281474976715690, task: 2. Ctx: { TraceId : 01jr89rd81bqyjhntb6v79vnw3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Y2UxM2E0Y2EtN2Q0NmUxODAtYzQ5ZmNkNjMtNmZjNTNkNGM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:16:35.022948Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715690, task: 2. pass away 2025-04-07T14:16:35.022959Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715690;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:16:35.022994Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715690;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:16:35.609377Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:1776: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:1776 >> Yq_1::CreateConnection_With_Existing_Name [GOOD] >> Yq_1::CreateConnections_With_Idempotency >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable [GOOD] >> TPersQueueTest::BadSids >> KqpPg::PgCreateTable [GOOD] >> KqpPg::PgUpdate >> UpsertLoad::ShouldWriteDataBulkUpsert >> TPersQueueTest::DirectReadNotCached [FAIL] >> TPersQueueTest::DirectReadBadCases >> Yq_1::CreateQuery_With_Idempotency [GOOD] >> Yq_1::CreateQuery_Without_Connection >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] >> PgCatalog::PgTables [GOOD] >> KqpPg::TableArrayInsert [GOOD] >> KqpPg::Returning >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] Test command err: 2025-04-07T14:16:01.981806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574840751302697:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:01.981882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001269/r3tmp/tmpADNlBm/pdisk_1.dat 2025-04-07T14:16:02.244342Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6540, node 1 2025-04-07T14:16:02.331202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:02.331290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:02.333004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:02.375629Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:02.375657Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:02.375668Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:02.375752Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:02.598447Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:02.602082Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:31093, port: 31093 2025-04-07T14:16:02.602609Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:02.611708Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:02.660983Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:02.705738Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****Ivcg (067EC918) () has now valid token of ldapuser@ldap 2025-04-07T14:16:04.113127Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574853153474043:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:04.113192Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001269/r3tmp/tmpd26s1O/pdisk_1.dat 2025-04-07T14:16:04.201214Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16463, node 2 2025-04-07T14:16:04.240428Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:04.240495Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:04.242174Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:04.257440Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:04.257460Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:04.257467Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:04.257563Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:04.396950Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:04.399466Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:13949, port: 13949 2025-04-07T14:16:04.399619Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:04.402593Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:04.445128Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****NtzQ (DFA7951A) () has now valid token of ldapuser@ldap 2025-04-07T14:16:06.172916Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490574861178693263:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:06.172957Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001269/r3tmp/tmpEQZYK4/pdisk_1.dat 2025-04-07T14:16:06.232491Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6694, node 3 2025-04-07T14:16:06.270731Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:06.270748Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:06.270755Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:06.270829Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:06.293057Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:06.293141Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:06.294728Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:06.365773Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:06.368062Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:9444, port: 9444 2025-04-07T14:16:06.368120Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:06.370697Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-04-07T14:16:06.370745Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:9444. Bad search filter 2025-04-07T14:16:06.370889Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****q1jg (35F93B25) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:08.422635Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490574869452171470:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:08.422721Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001269/r3tmp/tmpdQpViC/pdisk_1.dat 2025-04-07T14:16:08.493103Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25922, node 4 2025-04-07T14:16:08.539126Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:08.539148Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:08.539155Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:08.539242Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:08.542208Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:08.542305Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:08.543917Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:08.685003Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:08.688007Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:30092, port: 30092 2025-04-07T14:16:08.688077Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:08.697353Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:08.745115Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:08.788917Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:08.789280Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:08.789310Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:08.836970Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:08.884934Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:08.885798Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****tWMw (CD71CDDF) () has now valid token of ldapuser@ldap 2025-04-07T14:16:13.422745Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490574869452171470:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:13.422811Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:16:13.426302Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****tWMw (CD71CDDF) 2025-04-07T14:16:13.426365Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:30092, port: 30092 2025-04-07T14:16:13.426411Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:13.433653Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:13.477016Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:13.520993Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:13.521706Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:13.521747Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:13.568949Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:13.612965Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:13.613900Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****tWMw (CD71CDDF) () has now valid token of ldapuser@ldap 2025-04-07T14:16:17.428115Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****tWMw (CD71CDDF) 2025-04-07T14:16:17.428186Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:30092, port: 30092 2025-04-07T14:16:17.428254Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:17.435900Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:17.477169Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:17.520939Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:17.521388Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:17.521412Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:17.565019Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:17.608999Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:17.610134Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****tWMw (CD71CDDF) () has now valid token of ldapuser@ldap 2025-04-07T14:16:19.356313Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574918718142941:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:19.356350Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001269/r3tmp/tmpZSWTLF/pdisk_1.dat 2025-04-07T14:16:19.447683Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10529, node 5 2025-04-07T14:16:19.482073Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:19.482355Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:19.484145Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:19.503505Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:19.503531Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:19.503539Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:19.503638Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:19.623456Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:19.626181Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:26812, port: 26812 2025-04-07T14:16:19.626302Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:19.636387Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:19.681171Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:19.725647Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****Zdaw (A1FCFB7F) () has now valid token of ldapuser@ldap 2025-04-07T14:16:24.356559Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574918718142941:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:24.356644Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:16:24.361185Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Zdaw (A1FCFB7F) 2025-04-07T14:16:24.361327Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:26812, port: 26812 2025-04-07T14:16:24.361410Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:24.371309Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:24.413184Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:24.457566Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****Zdaw (A1FCFB7F) () has now valid token of ldapuser@ldap 2025-04-07T14:16:28.362960Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Zdaw (A1FCFB7F) 2025-04-07T14:16:28.363127Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:26812, port: 26812 2025-04-07T14:16:28.363229Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:28.372549Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:28.417149Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:28.465416Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****Zdaw (A1FCFB7F) () has now valid token of ldapuser@ldap 2025-04-07T14:16:30.182124Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490574965146356264:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:30.182301Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001269/r3tmp/tmpFq60Xh/pdisk_1.dat 2025-04-07T14:16:30.337664Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:30.358584Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:30.358670Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:30.360334Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2619, node 6 2025-04-07T14:16:30.415511Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:30.415539Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:30.415550Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:30.415701Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:30.578405Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:30.581866Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5129, port: 5129 2025-04-07T14:16:30.581950Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:30.590661Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:30.637118Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:30.680961Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:30.681485Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:30.681531Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:30.725004Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:30.769044Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:30.770015Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****8Tiw (7F33D617) () has now valid token of ldapuser@ldap 2025-04-07T14:16:34.185906Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****8Tiw (7F33D617) 2025-04-07T14:16:34.186204Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5129, port: 5129 2025-04-07T14:16:34.186284Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-07T14:16:34.194244Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:34.241151Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:34.241586Z node 6 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:5129 return no entries 2025-04-07T14:16:34.242129Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****8Tiw (7F33D617) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:35.182280Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7490574965146356264:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:35.182368Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:16:39.188399Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****8Tiw (7F33D617) >> Yq_1::Basic_Null [GOOD] >> Yq_1::Basic_TaggedLiteral >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> PgCatalog::PgTables [GOOD] Test command err: Trying to start YDB, gRPC: 63478, MsgBus: 25960 2025-04-07T14:15:26.798409Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574690178519389:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:26.798473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002154/r3tmp/tmpErot0j/pdisk_1.dat 2025-04-07T14:15:27.154019Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63478, node 1 2025-04-07T14:15:27.199220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:27.199317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:27.201315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:27.285175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:27.285193Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:27.285209Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:27.285313Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25960 TClient is connected to server localhost:25960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:27.905130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 16 2025-04-07T14:15:29.392240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgbool; DECLARE $key1 as pgbool; DECLARE $value0 as pgbool; DECLARE $value1 as pgbool; INSERT INTO `Pg16_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2025-04-07T14:15:29.542480Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:29.551559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574703063422000:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.551712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574703063422006:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.551797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.555319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:15:29.564591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574703063422014:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } f f t t 2025-04-07T14:15:30.354958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgbool; DECLARE $value1 as _pgbool; INSERT INTO `Pg1000_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2025-04-07T14:15:30.406573Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {t,t} 18 2025-04-07T14:15:30.710620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgchar; DECLARE $key1 as pgchar; DECLARE $value0 as pgchar; DECLARE $value1 as pgchar; INSERT INTO `Pg18_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2025-04-07T14:15:30.805982Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2025-04-07T14:15:31.131758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgchar; DECLARE $value1 as _pgchar; INSERT INTO `Pg1002_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2025-04-07T14:15:31.164715Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {1,1} 21 2025-04-07T14:15:31.541430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as pgint2; DECLARE $value1 as pgint2; INSERT INTO `Pg21_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2025-04-07T14:15:31.587126Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:15:31.803620Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574690178519389:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:31.803672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0 0 1 1 2025-04-07T14:15:31.906184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgint2; DECLARE $value1 as _pgint2; INSERT INTO `Pg1005_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2025-04-07T14:15:31.940400Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {1,1} 23 2025-04-07T14:15:32.226471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint4; DECLARE $key1 as pgint4; DECLARE $value0 as pgint4; DECLARE $value1 as pgint4; INSERT INTO `Pg23_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2025-04-07T14:15:32.271770Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2025-04-07T14:15:32.584182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgint4; DECLARE $value1 as _pgint4; INSERT INTO `Pg1007_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2025-04-07T14:15:32.622244Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {1,1} 20 2025-04-07T14:15:32.966862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint8; DECLARE $key1 as pgint8; DECLARE $value0 as pgint8; DECLARE $value1 as pgint8; INSERT INTO `Pg20_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2025-04-07T14:15:33.005769Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2025-04-07T14:15:33.331470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgint8; DECLARE $value1 as _pgint8; INSERT INTO `Pg1016_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2025-04-07T14:15:33.369610Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {1,1} 700 2025-04-07T14:15:33.694242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgfloat4; DECLARE $key1 as pgfloat4; DECLARE $value0 as pgfloat4; DECLARE $value1 as pgfloat4; INSERT INTO `Pg700_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2025-04-07T14:15:33.725798Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2025-04-07T14:15:34.041087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710726:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgfloat4; DECLARE $value1 as _pgfloat4; INSERT INT ... issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:26.674820Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:16:26.685529Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7490574949602814163:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } Trying to start YDB, gRPC: 13941, MsgBus: 4156 2025-04-07T14:16:27.887093Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7490574954324475614:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:27.887156Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002154/r3tmp/tmp1ATgdL/pdisk_1.dat 2025-04-07T14:16:28.002119Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:28.032853Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:28.032984Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:28.034622Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13941, node 9 2025-04-07T14:16:28.083927Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:28.083967Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:28.083975Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:28.084125Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4156 TClient is connected to server localhost:4156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:28.642558Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:16:31.952189Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7490574971504345440:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:31.952262Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7490574971504345431:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:31.952585Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:31.956283Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:16:31.965938Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7490574971504345446:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } Trying to start YDB, gRPC: 20552, MsgBus: 5756 2025-04-07T14:16:32.978465Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7490574972917211951:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:32.978550Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002154/r3tmp/tmp7UQsWZ/pdisk_1.dat 2025-04-07T14:16:33.142151Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:33.164121Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:33.164242Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:33.165765Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20552, node 10 2025-04-07T14:16:33.213418Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:33.213442Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:33.213451Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:33.213605Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5756 TClient is connected to server localhost:5756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:33.719618Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:16:36.893446Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7490574990097081747:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:36.893447Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7490574990097081766:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:36.893548Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:36.897334Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:16:36.909158Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7490574990097081776:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:16:37.024185Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:16:37.093820Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:16:37.978831Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7490574972917211951:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:37.978891Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:16:40.040159Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-04-07T14:16:40.066986Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:16:40.371564Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7490575007276951562:2413], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jr89rjee4cbfnk1ah7vt1cb5. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=10&id=MmJlY2FiZGYtZjMyMzRhYzEtNjZiYTRkMDUtNjAzZTFjYzc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: "pg_proc" }. 2025-04-07T14:16:40.372406Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7490575007276951563:2414], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=10&id=MmJlY2FiZGYtZjMyMzRhYzEtNjZiYTRkMDUtNjAzZTFjYzc=. TraceId : 01jr89rjee4cbfnk1ah7vt1cb5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [10:7490575007276951559:2410], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:16:40.375255Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MmJlY2FiZGYtZjMyMzRhYzEtNjZiYTRkMDUtNjAzZTFjYzc=, ActorId: [10:7490575007276951552:2410], ActorState: ExecuteState, TraceId: 01jr89rjee4cbfnk1ah7vt1cb5, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] Test command err: 2025-04-07T14:16:04.733832Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574851608510680:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:04.733895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001249/r3tmp/tmpTLpWMk/pdisk_1.dat 2025-04-07T14:16:04.973598Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2454, node 1 2025-04-07T14:16:05.003661Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:05.003695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:05.003702Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:05.003791Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:05.065274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:05.065371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:05.067016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:05.120594Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:05.123607Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:22448, port: 22448 2025-04-07T14:16:05.124293Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:05.129430Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:22448. Invalid credentials 2025-04-07T14:16:05.129963Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****TJ9Q (90BC72CF) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:06.710050Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574861459197395:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:06.710131Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001249/r3tmp/tmpAfwku1/pdisk_1.dat 2025-04-07T14:16:06.785315Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30251, node 2 2025-04-07T14:16:06.834809Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:06.834835Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:06.834843Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:06.834912Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:06.836214Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:06.836312Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:06.837917Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:06.971087Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:06.974525Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:9681, port: 9681 2025-04-07T14:16:06.974639Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:06.977979Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:06.978267Z node 2 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:9681 return no entries 2025-04-07T14:16:06.978474Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****WrCA (8B5DF64A) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:08.912451Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490574870061059256:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:08.912504Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001249/r3tmp/tmp2fm6IN/pdisk_1.dat 2025-04-07T14:16:08.973028Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18351, node 3 2025-04-07T14:16:09.013782Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:09.013797Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:09.013801Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:09.013875Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:09.033730Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:09.033788Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:09.035391Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:09.197950Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:09.201178Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:22497, port: 22497 2025-04-07T14:16:09.201242Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:09.204032Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:09.248922Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:09.249241Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:09.249295Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:09.296940Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:09.344993Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:09.345631Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****R-jg (9100D127) () has now valid token of ldapuser@ldap 2025-04-07T14:16:12.920910Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****R-jg (9100D127) 2025-04-07T14:16:12.921040Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:22497, port: 22497 2025-04-07T14:16:12.921088Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:12.924069Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:12.968885Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:12.969204Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:12.969242Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:13.016883Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:13.060896Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:13.061415Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****R-jg (9100D127) () has now valid token of ldapuser@ldap 2025-04-07T14:16:13.912728Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490574870061059256:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:13.912824Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:16:16.922707Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****R-jg (9100D127) 2025-04-07T14:16:16.922759Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:22497, port: 22497 2025-04-07T14:16:16.922816Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:16.925465Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:16.968885Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:16.969296Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:16.969331Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:17.012923Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:17.061132Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:17.061737Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****R-jg (9100D127) () has now valid token of ldapuser@ldap 2025-04-07T14:16:19.798946Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490574916844513570:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:19.799052Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001249/r3tmp/tmpogAQ4O/pdisk_1.dat 2025-04-07T14:16:19.901551Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22167, node 4 2025-04-07T14:16:19.928781Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:19.928847Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:19.930900Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:19.956373Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:19.956392Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:19.956396Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:19.956471Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:20.064428Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:20.068095Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:3199, port: 3199 2025-04-07T14:16:20.068228Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:20.071623Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:20.117198Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****KAxA (FEA2375D) () has now valid token of ldapuser@ldap 2025-04-07T14:16:24.799344Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490574916844513570:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:24.799469Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:16:24.803344Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****KAxA (FEA2375D) 2025-04-07T14:16:24.803694Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:3199, port: 3199 2025-04-07T14:16:24.803795Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:24.807496Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:24.849093Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****KAxA (FEA2375D) () has now valid token of ldapuser@ldap 2025-04-07T14:16:28.805198Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****KAxA (FEA2375D) 2025-04-07T14:16:28.805282Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:3199, port: 3199 2025-04-07T14:16:28.805345Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:28.808771Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:28.857300Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****KAxA (FEA2375D) () has now valid token of ldapuser@ldap 2025-04-07T14:16:30.624776Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490574967234874594:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:30.624904Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001249/r3tmp/tmpX81epo/pdisk_1.dat 2025-04-07T14:16:30.763383Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:30.778069Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:30.778151Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:30.784142Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24366, node 5 2025-04-07T14:16:30.811106Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:30.811130Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:30.811136Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:30.811217Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:30.964896Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-07T14:16:30.968471Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:15560, port: 15560 2025-04-07T14:16:30.968571Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:30.972112Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:31.012933Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-07T14:16:31.013380Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-07T14:16:31.013450Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:31.061013Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:31.104988Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-07T14:16:31.105692Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****ekKA (F41DA971) () has now valid token of ldapuser@ldap 2025-04-07T14:16:34.629200Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****ekKA (F41DA971) 2025-04-07T14:16:34.629439Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:15560, port: 15560 2025-04-07T14:16:34.629557Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-07T14:16:34.635214Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-07T14:16:34.635440Z node 5 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:15560 return no entries 2025-04-07T14:16:34.635563Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****ekKA (F41DA971) () has now permanent error message 'Could not login via LDAP' 2025-04-07T14:16:35.625064Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490574967234874594:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:35.625137Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:16:37.630541Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****ekKA (F41DA971) |96.6%| [TA] $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> PrivateApi::Nodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: 2025-04-07T14:16:39.717363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:39.717623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:39.717831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014a1/r3tmp/tmpioTQVv/pdisk_1.dat 2025-04-07T14:16:40.098902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:40.142464Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:40.200584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:40.200746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:40.211927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:40.328140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:40.668215Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-04-07T14:16:40.668339Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:702:2586], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-04-07T14:16:40.674808Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:702:2586], subTag: 2} started# 5 actors each with inflight# 4 2025-04-07T14:16:40.674906Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-07T14:16:40.674972Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-07T14:16:40.675016Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-07T14:16:40.675043Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-07T14:16:40.675071Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-07T14:16:40.687014Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 1} session: ydb://session/3?node_id=1&id=OGEzMjNiMGYtYWNhZTM4NjEtNGRlOWRlZjItYjA4ODFhYmE= 2025-04-07T14:16:40.688837Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 2} session: ydb://session/3?node_id=1&id=ZTU5YTE5MmItZGZiOGQ0ODYtM2JlOWQwOTMtNDAwMTFiYTg= 2025-04-07T14:16:40.690267Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 3} session: ydb://session/3?node_id=1&id=NGMzZjg1ZjgtOTYyNmJkYTgtZjA5ZmVjNjktOWJmMWIwZWU= 2025-04-07T14:16:40.692499Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 4} session: ydb://session/3?node_id=1&id=MzRjYzAyNWMtMTk3MDJjOWEtYzJjM2U1YWItYWI0MmEyMjY= 2025-04-07T14:16:40.692542Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 5} session: ydb://session/3?node_id=1&id=ZWI3MmQ0YS04ODBiMWJlNy03ODk2MjMxZS01N2ViNDkyOQ== 2025-04-07T14:16:40.697194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:716:2600], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:40.697261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:740:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:40.697310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:741:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:40.697342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:40.697371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:743:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:40.697407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:745:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:40.697457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:40.703502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:16:40.936109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:752:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:40.936193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:40.936248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:40.936307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:755:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:40.936365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2634], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:41.642145Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 4} finished in 1744035401.642086s, errors=0 2025-04-07T14:16:41.642469Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1744035401642 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:41.672306Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 2} finished in 1744035401.672243s, errors=0 2025-04-07T14:16:41.672558Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1744035401672 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:41.742087Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 5} finished in 1744035401.742048s, errors=0 2025-04-07T14:16:41.742282Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1744035401742 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:41.811664Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 3} finished in 1744035401.811632s, errors=0 2025-04-07T14:16:41.811850Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1744035401811 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:41.879816Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 1} finished in 1744035401.879785s, errors=0 2025-04-07T14:16:41.879997Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1744035401879 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:41.880050Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:702:2586], subTag: 2} finished in 1.205437s, oks# 20, errors# 0 2025-04-07T14:16:41.880150Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:703:2587] with tag# 2 >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom >> Yq_1::DescribeJob [GOOD] >> Yq_1::DescribeQuery >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [GOOD] Test command err: 2025-04-07T14:16:20.070604Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574924004128749:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:20.070654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0407 14:16:20.255765386 595886 dns_resolver.cc:162] no server name supplied in dns URI E0407 14:16:20.256188783 595886 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-07T14:16:21.072372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:21.248642Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2807: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2807 } ] 2025-04-07T14:16:21.257672Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2807: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:2807 2025-04-07T14:16:22.072730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:22.279752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:16:22.282916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574932594063725:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:22.378417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574932594063725:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:22.466529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574932594063725:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:22.532517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574932594063725:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:22.606878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574932594063725:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:22.678940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574932594063725:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:22.747544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574932594063725:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001316/r3tmp/tmpUSJxnd/pdisk_1.dat 2025-04-07T14:16:22.854083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574932594063725:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:22.953677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574932594063725:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 2807, node 1 2025-04-07T14:16:23.055281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:23.067800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:23.070236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2025-04-07T14:16:23.070454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:23.070683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:23.070780Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:23.085226Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-07T14:16:23.085260Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-07T14:16:23.085269Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-07T14:16:23.085358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:23.085435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:23.085458Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:23.085938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:23.085964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:23.085975Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:16:23.086437Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-07T14:16:23.086450Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-07T14:16:23.086455Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-07T14:16:23.089546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035383132, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:23.089640Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710657:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035383132, at schemeshard: 72057594046644480 2025-04-07T14:16:23.089772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:16:23.091342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:23.091549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:23.091606Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:16:23.091697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:16:23.091751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:16:23.091818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 2, subscribers: 0 2025-04-07T14:16:23.092606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:23.092646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:23.092687Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:16:23.092878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:23.092906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:23.092915Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:16:23.092944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2025-04-07T14:16:23.093770Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-07T14:16:23.093793Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-07T14:16:23.093800Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-07T14:16:23.094045Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-07T14:16:23.094070Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-07T14:16:23.094075Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-07T14:16:23.095053Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-07T14:16:23.095078Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-07T14:16:23.095084Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-07T14:16:23.095313Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-07T14:16:23.095332Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-07T14:16:23.095337Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-07T14:16:23.096839Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:16:23.097273Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-07T14:16:23.097297Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-07T14:16:23.097302Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-07T14:16:23.097358Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04- ... edId : . SessionId : ydb://session/3?node_id=7&id=NDVjOTlmOWMtNmI1MGEwY2UtZGJlMWY0YWEtZjU3OTI3YmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-04-07T14:16:40.778255Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947648:2462], TxId: 281474976710680, task: 1. Ctx: { TraceId : 01jr89rjkz2p3xfn60f188ce6d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=NDVjOTlmOWMtNmI1MGEwY2UtZGJlMWY0YWEtZjU3OTI3YmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:40.778261Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710680, task: 1, CA Id [7:7490575008902947648:2462]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-07T14:16:40.778268Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710680, task: 1, CA Id [7:7490575008902947648:2462]. enter pack cells method shardId: 72075186224037899 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-07T14:16:40.778274Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710680, task: 1, CA Id [7:7490575008902947648:2462]. exit pack cells method shardId: 72075186224037899 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-07T14:16:40.778275Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947646:2461], TxId: 281474976710679, task: 2. Ctx: { TraceId : 01jr89rjtvazc2qw072mm6ycej. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MzYzNDBiZTktYTE3MjY0NDgtYTQxZDAwMTAtYjA2NGZjNDM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:40.778279Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710680, task: 1, CA Id [7:7490575008902947648:2462]. returned 0 rows; processed 0 rows 2025-04-07T14:16:40.778298Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710680, task: 1, CA Id [7:7490575008902947648:2462]. dropping batch for read #0 2025-04-07T14:16:40.778302Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710680, task: 1, CA Id [7:7490575008902947648:2462]. effective maxinflight 1024 sorted 0 2025-04-07T14:16:40.778307Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710680, task: 1, CA Id [7:7490575008902947648:2462]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-07T14:16:40.778314Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710680, task: 1, CA Id [7:7490575008902947648:2462]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-07T14:16:40.778321Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947646:2461], TxId: 281474976710679, task: 2. Ctx: { TraceId : 01jr89rjtvazc2qw072mm6ycej. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MzYzNDBiZTktYTE3MjY0NDgtYTQxZDAwMTAtYjA2NGZjNDM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:16:40.778339Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947648:2462], TxId: 281474976710680, task: 1. Ctx: { TraceId : 01jr89rjkz2p3xfn60f188ce6d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=NDVjOTlmOWMtNmI1MGEwY2UtZGJlMWY0YWEtZjU3OTI3YmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:16:40.778345Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947645:2460], TxId: 281474976710679, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=MzYzNDBiZTktYTE3MjY0NDgtYTQxZDAwMTAtYjA2NGZjNDM=. TraceId : 01jr89rjtvazc2qw072mm6ycej. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-07T14:16:40.778360Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947645:2460], TxId: 281474976710679, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=MzYzNDBiZTktYTE3MjY0NDgtYTQxZDAwMTAtYjA2NGZjNDM=. TraceId : 01jr89rjtvazc2qw072mm6ycej. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:40.778371Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947649:2463], TxId: 281474976710680, task: 2. Ctx: { TraceId : 01jr89rjkz2p3xfn60f188ce6d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=NDVjOTlmOWMtNmI1MGEwY2UtZGJlMWY0YWEtZjU3OTI3YmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-07T14:16:40.778379Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710679, task: 1. Tasks execution finished 2025-04-07T14:16:40.778384Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710680, task: 2. Finish input channelId: 1, from: [7:7490575008902947648:2462] 2025-04-07T14:16:40.778390Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947645:2460], TxId: 281474976710679, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=MzYzNDBiZTktYTE3MjY0NDgtYTQxZDAwMTAtYjA2NGZjNDM=. TraceId : 01jr89rjtvazc2qw072mm6ycej. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:16:40.778401Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947649:2463], TxId: 281474976710680, task: 2. Ctx: { TraceId : 01jr89rjkz2p3xfn60f188ce6d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=NDVjOTlmOWMtNmI1MGEwY2UtZGJlMWY0YWEtZjU3OTI3YmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:40.778421Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947649:2463], TxId: 281474976710680, task: 2. Ctx: { TraceId : 01jr89rjkz2p3xfn60f188ce6d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=NDVjOTlmOWMtNmI1MGEwY2UtZGJlMWY0YWEtZjU3OTI3YmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:16:40.778448Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947646:2461], TxId: 281474976710679, task: 2. Ctx: { TraceId : 01jr89rjtvazc2qw072mm6ycej. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MzYzNDBiZTktYTE3MjY0NDgtYTQxZDAwMTAtYjA2NGZjNDM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:40.778469Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710679, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-07T14:16:40.778498Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710679, task: 2. Tasks execution finished 2025-04-07T14:16:40.778505Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947646:2461], TxId: 281474976710679, task: 2. Ctx: { TraceId : 01jr89rjtvazc2qw072mm6ycej. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MzYzNDBiZTktYTE3MjY0NDgtYTQxZDAwMTAtYjA2NGZjNDM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-07T14:16:40.778505Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710679, task: 1. pass away 2025-04-07T14:16:40.778547Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710679, task: 2. pass away 2025-04-07T14:16:40.778587Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710679;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:16:40.778590Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710679;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:16:40.778681Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947648:2462], TxId: 281474976710680, task: 1. Ctx: { TraceId : 01jr89rjkz2p3xfn60f188ce6d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=NDVjOTlmOWMtNmI1MGEwY2UtZGJlMWY0YWEtZjU3OTI3YmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-07T14:16:40.778691Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947648:2462], TxId: 281474976710680, task: 1. Ctx: { TraceId : 01jr89rjkz2p3xfn60f188ce6d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=NDVjOTlmOWMtNmI1MGEwY2UtZGJlMWY0YWEtZjU3OTI3YmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:40.778702Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710680, task: 1. Tasks execution finished 2025-04-07T14:16:40.778706Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947648:2462], TxId: 281474976710680, task: 1. Ctx: { TraceId : 01jr89rjkz2p3xfn60f188ce6d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=NDVjOTlmOWMtNmI1MGEwY2UtZGJlMWY0YWEtZjU3OTI3YmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:16:40.778751Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710680, task: 1. pass away 2025-04-07T14:16:40.778808Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710680;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:16:40.778898Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947649:2463], TxId: 281474976710680, task: 2. Ctx: { TraceId : 01jr89rjkz2p3xfn60f188ce6d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=NDVjOTlmOWMtNmI1MGEwY2UtZGJlMWY0YWEtZjU3OTI3YmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:40.778914Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710680, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-07T14:16:40.778919Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710680, task: 2. Tasks execution finished 2025-04-07T14:16:40.778923Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7490575008902947649:2463], TxId: 281474976710680, task: 2. Ctx: { TraceId : 01jr89rjkz2p3xfn60f188ce6d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=NDVjOTlmOWMtNmI1MGEwY2UtZGJlMWY0YWEtZjU3OTI3YmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:16:40.778948Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710680, task: 2. pass away 2025-04-07T14:16:40.778988Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710680;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:16:40.893019Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jr89rk2g1hn05wgh3xbt0ksn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NDVjOTlmOWMtNmI1MGEwY2UtZGJlMWY0YWEtZjU3OTI3YmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:16:40.893116Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jr89rk2fa7brf325x8gkve9g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MzYzNDBiZTktYTE3MjY0NDgtYTQxZDAwMTAtYjA2NGZjNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:16:41.446861Z node 7 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:7402: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:7402 >> KqpPg::PgUpdate [GOOD] >> KqpPg::PgUpdateCompoundKey >> TPersQueueTest::SchemeshardRestart [GOOD] >> TPersQueueTest::SameOffset >> UpsertLoad::ShouldWriteDataBulkUpsertBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] Test command err: 2025-04-07T14:16:22.891403Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574929108024449:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:22.891465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0407 14:16:23.051142015 597193 dns_resolver.cc:162] no server name supplied in dns URI E0407 14:16:23.051333882 597193 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-07T14:16:23.892926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:24.063756Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:31942: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31942 } ] 2025-04-07T14:16:24.083900Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:31942: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:31942 2025-04-07T14:16:24.893437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012fa/r3tmp/tmprFzlGX/pdisk_1.dat 2025-04-07T14:16:25.329421Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:16:25.329529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574941992926775:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:25.397357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574941992926775:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 31942, node 1 2025-04-07T14:16:25.463232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:16:25.463907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:25.464091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:16:25.465015Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:25.465014Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:25.465566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:25.466055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:25.466084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:25.466146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:16:25.466219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:16:25.542608Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-07T14:16:25.542640Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-07T14:16:25.542653Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-07T14:16:25.542660Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-07T14:16:25.542665Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-07T14:16:25.542667Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-07T14:16:25.544184Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-07T14:16:25.544189Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-07T14:16:25.544216Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-07T14:16:25.544216Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-07T14:16:25.544221Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-07T14:16:25.544223Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-07T14:16:25.544979Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-07T14:16:25.544998Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-07T14:16:25.545001Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-07T14:16:25.545102Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-07T14:16:25.545206Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-07T14:16:25.545211Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-07T14:16:25.546072Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-07T14:16:25.546101Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-07T14:16:25.546114Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-07T14:16:25.546117Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-07T14:16:25.546131Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-07T14:16:25.546142Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-07T14:16:25.546838Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-07T14:16:25.546856Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-07T14:16:25.546860Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-07T14:16:25.546875Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-07T14:16:25.546886Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-07T14:16:25.546891Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-07T14:16:25.547920Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-07T14:16:25.547958Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-07T14:16:25.547963Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-07T14:16:25.547988Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-07T14:16:25.548000Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-07T14:16:25.548008Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-07T14:16:25.551155Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-07T14:16:25.551189Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-07T14:16:25.551195Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-07T14:16:25.565136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574941992927144:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:25.565138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574941992927136:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:25.565214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:25.629241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715657:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:10832 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-04-07T14:16:25.637696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:25.638455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:25.638598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:25.638620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2025-04-07T14:16:25.638677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:25.638693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-07T14:16:25.638796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:25.638835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T14:16:25.639025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 1 -> 128 2025-04-07T14:16:25.639171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:25.644220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:16:25.644900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2025-04-07T14:16:25.645036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:25.645234Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operation ... tx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Start compute actor [4:7490575013184929780:2589], task: 2 2025-04-07T14:16:41.419060Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929780:2589], TxId: 281474976715710, task: 2. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Set execution timeout 299.816765s 2025-04-07T14:16:41.419498Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929780:2589], TxId: 281474976715710, task: 2. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-07T14:16:41.419618Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929780:2589], TxId: 281474976715710, task: 2. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7490575013184929779 RawX2: 4503616807242268 } } DstEndpoint { ActorId { RawX1: 7490575013184929780 RawX2: 4503616807242269 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7490575013184929780 RawX2: 4503616807242269 } } DstEndpoint { ActorId { RawX1: 7490575013184929775 RawX2: 4503616807242232 } } InMemory: true } 2025-04-07T14:16:41.419633Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929780:2589], TxId: 281474976715710, task: 2. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update input channelId: 1, peer: [4:7490575013184929779:2588] 2025-04-07T14:16:41.419669Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929780:2589], TxId: 281474976715710, task: 2. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:41.419762Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 1, CA Id [4:7490575013184929779:2588]. Recv TEvReadResult from ShardID=72075186224037894, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= LockId: 281474976715704 DataShard: 72075186224037894 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 9, BrokenTxLocks= 2025-04-07T14:16:41.419787Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 1, CA Id [4:7490575013184929779:2588]. Taken 1 locks 2025-04-07T14:16:41.419798Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 1, CA Id [4:7490575013184929779:2588]. new data for read #0 seqno = 1 finished = 1 2025-04-07T14:16:41.419818Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929779:2588], TxId: 281474976715710, task: 1. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2025-04-07T14:16:41.419834Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929779:2588], TxId: 281474976715710, task: 1. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:41.419849Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 1, CA Id [4:7490575013184929779:2588]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-07T14:16:41.419870Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 1, CA Id [4:7490575013184929779:2588]. enter pack cells method shardId: 72075186224037894 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-07T14:16:41.419893Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 1, CA Id [4:7490575013184929779:2588]. exit pack cells method shardId: 72075186224037894 processedRows: 0 packed rows: 1 freeSpace: 8388557 2025-04-07T14:16:41.419908Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 1, CA Id [4:7490575013184929779:2588]. returned 1 rows; processed 1 rows 2025-04-07T14:16:41.419950Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 1, CA Id [4:7490575013184929779:2588]. dropping batch for read #0 2025-04-07T14:16:41.419971Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 1, CA Id [4:7490575013184929779:2588]. effective maxinflight 1024 sorted 0 2025-04-07T14:16:41.419983Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 1, CA Id [4:7490575013184929779:2588]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-07T14:16:41.419997Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 1, CA Id [4:7490575013184929779:2588]. returned async data processed rows 1 left freeSpace 8388557 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-07T14:16:41.420204Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929779:2588], TxId: 281474976715710, task: 1. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:16:41.420220Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929780:2589], TxId: 281474976715710, task: 2. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-07T14:16:41.420230Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929779:2588], TxId: 281474976715710, task: 1. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:41.420246Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 2. Finish input channelId: 1, from: [4:7490575013184929779:2588] 2025-04-07T14:16:41.420261Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-07T14:16:41.420273Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929780:2589], TxId: 281474976715710, task: 2. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:41.420273Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929779:2588], TxId: 281474976715710, task: 1. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-04-07T14:16:41.420296Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929779:2588], TxId: 281474976715710, task: 1. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:41.420310Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 1. Tasks execution finished 2025-04-07T14:16:41.420320Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929779:2588], TxId: 281474976715710, task: 1. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-07T14:16:41.420430Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 1. pass away 2025-04-07T14:16:41.420432Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929780:2589], TxId: 281474976715710, task: 2. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:16:41.420523Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715710;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:16:41.420529Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929780:2589], TxId: 281474976715710, task: 2. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:41.420549Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-07T14:16:41.420554Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 2. Tasks execution finished 2025-04-07T14:16:41.420562Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575013184929780:2589], TxId: 281474976715710, task: 2. Ctx: { TraceId : 01jr89rkgh9xsa4xp9semnrzx9. SessionId : ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:16:41.420603Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715710, task: 2. pass away 2025-04-07T14:16:41.420646Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715710;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:16:41.421843Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715711. Ctx: { TraceId: 01jr89rkgh9xsa4xp9semnrzx9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZmY2MWI0ODAtNzRlNjEzZGUtOTJjZWNlOS03NDcyYTJmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:16:41.953242Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:10657: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:10657 >> ReadLoad::ShouldReadKqp >> UpsertLoad::ShouldCreateTable >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: 2025-04-07T14:16:45.454169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:45.454366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:45.454574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001497/r3tmp/tmp0LiSzr/pdisk_1.dat 2025-04-07T14:16:45.697631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:45.727070Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:45.775864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:45.776027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:45.787306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:45.897884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:46.188325Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-04-07T14:16:46.188443Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:702:2586], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-07T14:16:46.283436Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:702:2586], subTag: 2} TUpsertActor finished in 0.094646s, errors=0 2025-04-07T14:16:46.283526Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:703:2587] with tag# 2 >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: 2025-04-07T14:16:42.077205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:42.077475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:42.077680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014a0/r3tmp/tmpahY1AQ/pdisk_1.dat 2025-04-07T14:16:42.324434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:42.357950Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:42.406580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:42.406707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:42.418007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:42.529244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:42.829730Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-07T14:16:42.829856Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:702:2586], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-07T14:16:42.896501Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:702:2586], subTag: 2} TUpsertActor finished in 0.066336s, errors=0 2025-04-07T14:16:42.896586Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:703:2587] with tag# 2 2025-04-07T14:16:45.741452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:45.741736Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:45.741791Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0014a0/r3tmp/tmp3Lb22l/pdisk_1.dat 2025-04-07T14:16:45.971278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:45.993454Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:46.039258Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:46.039397Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:46.050903Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:46.162058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:46.430046Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-07T14:16:46.430154Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:702:2586], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-04-07T14:16:46.496680Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:702:2586], subTag: 2} TUpsertActor finished in 0.066227s, errors=0 2025-04-07T14:16:46.496779Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:703:2587] with tag# 2 >> Yq_1::ModifyQuery [GOOD] >> Yq_1::Basic_EmptyList [GOOD] >> Yq_1::Basic_EmptyDict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: 2025-04-07T14:16:45.900193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:45.900372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:45.900514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00148b/r3tmp/tmpCQPxJq/pdisk_1.dat 2025-04-07T14:16:46.149519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:46.180976Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:46.230695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:46.230825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:46.242026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:46.351471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:46.638309Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2025-04-07T14:16:46.638414Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:702:2586], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2025-04-07T14:16:46.641314Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:702:2586], subTag: 2} started# 5 actors each with inflight# 4 2025-04-07T14:16:46.641364Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-07T14:16:46.641407Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-07T14:16:46.641455Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-07T14:16:46.641479Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-07T14:16:46.641502Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-07T14:16:46.643938Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 1} session: ydb://session/3?node_id=1&id=NzBmNTNiYzQtY2I0ZTFiOTAtNDk5MWEyYzktNGMxYzgwZTk= 2025-04-07T14:16:46.645273Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 2} session: ydb://session/3?node_id=1&id=NTQ0MmQzMzktMzJkZmEwZTUtMTQ3M2YzYWMtNjhmMDNjODA= 2025-04-07T14:16:46.646495Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 3} session: ydb://session/3?node_id=1&id=ZWYyZTlmYzctZjcyOWFlMzAtYmNlMjNhZGUtMzZkMGFmMDE= 2025-04-07T14:16:46.648654Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 4} session: ydb://session/3?node_id=1&id=MTg1ZWY5OTYtYjJmMDQwMzEtYzFlNDExNjMtZmFlMTM2MTM= 2025-04-07T14:16:46.648720Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 5} session: ydb://session/3?node_id=1&id=ZjVmMGVhYzktY2YxZWFhYzgtNDc2MDFlM2YtZTAyMWMyZTc= 2025-04-07T14:16:46.652012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:716:2600], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:46.652133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:740:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:46.652184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:741:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:46.652225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:46.652270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:743:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:46.652318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:745:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:46.652390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:46.658503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:16:46.898194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:752:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:46.898259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:46.898294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:46.898337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:755:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:46.898383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2634], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:47.190803Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 3} finished in 1744035407.190763s, errors=0 2025-04-07T14:16:47.191031Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1744035407190 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:47.257052Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 2} finished in 1744035407.257006s, errors=0 2025-04-07T14:16:47.257217Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1744035407257 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:47.326547Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 4} finished in 1744035407.326497s, errors=0 2025-04-07T14:16:47.326761Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1744035407326 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:47.397928Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 5} finished in 1744035407.397891s, errors=0 2025-04-07T14:16:47.398138Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1744035407397 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:47.468034Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 1} finished in 1744035407.467995s, errors=0 2025-04-07T14:16:47.468277Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1744035407467 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:47.468334Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:702:2586], subTag: 2} finished in 0.827173s, oks# 20, errors# 0 2025-04-07T14:16:47.468485Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:703:2587] with tag# 2 >> KqpPg::Returning [GOOD] >> KqpPg::SelectIndex >> UpsertLoad::ShouldWriteKqpUpsert2 >> Yq_1::CreateConnections_With_Idempotency [GOOD] >> Yq_1::DeleteQuery [GOOD] >> KqpPg::PgUpdateCompoundKey [GOOD] >> KqpPg::PgAggregate ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [GOOD] Test command err: 2025-04-07T14:16:20.073005Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574920187016813:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:20.073156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0407 14:16:20.232635365 595887 dns_resolver.cc:162] no server name supplied in dns URI E0407 14:16:20.232838588 595887 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-07T14:16:21.075002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:21.240249Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:32010: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32010 } ] 2025-04-07T14:16:21.241534Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:32010: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:32010 2025-04-07T14:16:22.075893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:22.411955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:16:22.412832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574928776951787:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001322/r3tmp/tmpxnpWii/pdisk_1.dat 2025-04-07T14:16:22.496899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574928776951787:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:22.551904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 TServer::EnableGrpc on GrpcPort 32010, node 1 2025-04-07T14:16:22.553511Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:22.553623Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:22.553939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:16:22.554469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:22.554604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:16:22.555912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:22.556427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:22.556454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:22.556503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:16:22.556573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:16:22.770151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:22.782137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:22.786660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2025-04-07T14:16:22.786877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:22.787113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:22.788139Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:22.797143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:16:22.797278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:16:22.797308Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:22.797570Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-07T14:16:22.797599Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-07T14:16:22.797605Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-07T14:16:22.797613Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-07T14:16:22.797616Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-07T14:16:22.797618Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-07T14:16:22.801700Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-07T14:16:22.801729Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-07T14:16:22.801735Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-07T14:16:22.802637Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-07T14:16:22.802654Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-07T14:16:22.802660Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-07T14:16:22.803516Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-07T14:16:22.803541Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-07T14:16:22.803547Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-07T14:16:22.804292Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-07T14:16:22.804317Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-07T14:16:22.804323Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-07T14:16:22.805239Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-07T14:16:22.805262Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-07T14:16:22.805270Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-07T14:16:22.806076Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-07T14:16:22.806094Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-07T14:16:22.806098Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-07T14:16:22.806788Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-07T14:16:22.806813Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-07T14:16:22.806817Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-07T14:16:22.807449Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-07T14:16:22.807465Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-07T14:16:22.807469Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-07T14:16:22.808276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:16:22.808324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:16:22.808337Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:16:22.809097Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-07T14:16:22.809124Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-07T14:16:22.809128Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-07T14:16:22.809926Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-07T14:16:22.809943Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-07T14:16:22.809947Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-07T14:16:22.810894Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-07T14:16:22.810910Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-07T14:16:22.810913Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-07T14:16:22.818756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:16:22.822011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035382866, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:22.822077Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715657:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035382866, at schemeshard: 72057594046644480 2025-04-07T14:16:22.822216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:16:22.824730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:22.824876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:22.824986Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:16:22.825201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:16:22.825257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, oper ... 820693Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. AFTER: 0.1 2025-04-07T14:16:45.820706Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2025-04-07T14:16:45.820775Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422814:3012], TxId: 281474976715814, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. TraceId : 01jr89rqqt627aewwvhh54fszm. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:45.820793Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-07T14:16:45.820811Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-07T14:16:45.821114Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422815:3013], TxId: 281474976715814, task: 2. Ctx: { TraceId : 01jr89rqqt627aewwvhh54fszm. SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-04-07T14:16:45.821259Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422815:3013], TxId: 281474976715814, task: 2. Ctx: { TraceId : 01jr89rqqt627aewwvhh54fszm. SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7490575031649422814 RawX2: 4503616807242692 } } DstEndpoint { ActorId { RawX1: 7490575031649422815 RawX2: 4503616807242693 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7490575031649422815 RawX2: 4503616807242693 } } DstEndpoint { ActorId { RawX1: 7490575031649422810 RawX2: 4503616807242039 } } InMemory: true } 2025-04-07T14:16:45.821280Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422815:3013], TxId: 281474976715814, task: 2. Ctx: { TraceId : 01jr89rqqt627aewwvhh54fszm. SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Update input channelId: 1, peer: [4:7490575031649422814:3012] 2025-04-07T14:16:45.821335Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422815:3013], TxId: 281474976715814, task: 2. Ctx: { TraceId : 01jr89rqqt627aewwvhh54fszm. SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:45.821376Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. Recv TEvReadResult from ShardID=72075186224037897, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-04-07T14:16:45.821397Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. Taken 0 locks 2025-04-07T14:16:45.821409Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. new data for read #0 seqno = 1 finished = 1 2025-04-07T14:16:45.821427Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422814:3012], TxId: 281474976715814, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. TraceId : 01jr89rqqt627aewwvhh54fszm. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-04-07T14:16:45.821441Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422814:3012], TxId: 281474976715814, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. TraceId : 01jr89rqqt627aewwvhh54fszm. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:45.821457Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-07T14:16:45.821472Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. enter pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-07T14:16:45.821495Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. exit pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 1 freeSpace: 8387509 2025-04-07T14:16:45.821514Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. returned 1 rows; processed 1 rows 2025-04-07T14:16:45.821552Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. dropping batch for read #0 2025-04-07T14:16:45.821569Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. effective maxinflight 1024 sorted 0 2025-04-07T14:16:45.821580Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-07T14:16:45.821594Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1, CA Id [4:7490575031649422814:3012]. returned async data processed rows 1 left freeSpace 8387509 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-07T14:16:45.821801Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422814:3012], TxId: 281474976715814, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. TraceId : 01jr89rqqt627aewwvhh54fszm. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:16:45.821814Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422815:3013], TxId: 281474976715814, task: 2. Ctx: { TraceId : 01jr89rqqt627aewwvhh54fszm. SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2025-04-07T14:16:45.821825Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422814:3012], TxId: 281474976715814, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. TraceId : 01jr89rqqt627aewwvhh54fszm. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:45.821846Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 2. Finish input channelId: 1, from: [4:7490575031649422814:3012] 2025-04-07T14:16:45.821879Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-07T14:16:45.821895Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422814:3012], TxId: 281474976715814, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. TraceId : 01jr89rqqt627aewwvhh54fszm. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-07T14:16:45.821904Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422815:3013], TxId: 281474976715814, task: 2. Ctx: { TraceId : 01jr89rqqt627aewwvhh54fszm. SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:45.821916Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422814:3012], TxId: 281474976715814, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. TraceId : 01jr89rqqt627aewwvhh54fszm. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:45.821932Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1. Tasks execution finished 2025-04-07T14:16:45.821945Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422814:3012], TxId: 281474976715814, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. TraceId : 01jr89rqqt627aewwvhh54fszm. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:16:45.822047Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 1. pass away 2025-04-07T14:16:45.822099Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422815:3013], TxId: 281474976715814, task: 2. Ctx: { TraceId : 01jr89rqqt627aewwvhh54fszm. SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:16:45.822138Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715814;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:16:45.822240Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422815:3013], TxId: 281474976715814, task: 2. Ctx: { TraceId : 01jr89rqqt627aewwvhh54fszm. SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:45.822277Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-07T14:16:45.822291Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 2. Tasks execution finished 2025-04-07T14:16:45.822303Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575031649422815:3013], TxId: 281474976715814, task: 2. Ctx: { TraceId : 01jr89rqqt627aewwvhh54fszm. SessionId : ydb://session/3?node_id=4&id=NWNiNDM2NzItMzYyOWM0ZGQtZDU2YWI1MC02YjFjOTI0ZQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-07T14:16:45.822356Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715814, task: 2. pass away 2025-04-07T14:16:45.822420Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715814;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:16:46.258214Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:2884: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:2884 >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] >> ReadLoad::ShouldReadIterate ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [GOOD] Test command err: 2025-04-07T14:16:22.776172Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574932133750942:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:22.776296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0407 14:16:22.936159194 597092 dns_resolver.cc:162] no server name supplied in dns URI E0407 14:16:22.937062468 597092 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-07T14:16:23.777882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:23.942395Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8115: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8115 } ] 2025-04-07T14:16:23.942932Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8115: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8115 2025-04-07T14:16:24.778412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012fe/r3tmp/tmp9IHORH/pdisk_1.dat 2025-04-07T14:16:25.056905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:16:25.057011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574945018653276:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:25.132267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574945018653276:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 8115, node 1 2025-04-07T14:16:25.183535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:25.183645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:16:25.184834Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:25.185009Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:25.185095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:25.185530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:25.185553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:25.185589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:16:25.185636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TClient is connected to server localhost:21149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:25.477346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:25.484909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:25.485810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:25.487202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:25.487356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:25.487384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:16:25.487905Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:16:25.488244Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:25.488257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:16:25.489063Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:25.490857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035385533, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:25.490875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:16:25.491009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:16:25.491968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:25.492065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:25.492104Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:16:25.492171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:16:25.492194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:16:25.492219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:16:25.493379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:16:25.493408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:16:25.493417Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:25.493468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:16:25.565704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:25.565839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:25.567431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2025-04-07T14:16:25.567640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:25.567865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:25.567974Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:25.568482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:16:25.568554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:16:25.568569Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:16:25.568780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:16:25.568798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:16:25.568804Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:16:25.570338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035385617, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:25.570364Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035385617, at schemeshard: 72057594046644480 2025-04-07T14:16:25.570429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-04-07T14:16:25.571384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:25.571495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:25.571524Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T14:16:25.571551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress i ... DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:46.788304Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715806, task: 1, CA Id [4:7490575034693016189:2983]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-07T14:16:46.788307Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575034693016190:2984], TxId: 281474976715806, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGNhNmZjMzMtMjkxMWE3NDMtNTMyZmJkMTAtODg0MDhlN2Q=. CustomerSuppliedId : . TraceId : 01jr89rrpy07rnb4ge8h7d2k1f. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-04-07T14:16:46.788321Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715806, task: 1, CA Id [4:7490575034693016189:2983]. enter pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-07T14:16:46.788334Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715806, task: 1, CA Id [4:7490575034693016189:2983]. exit pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-07T14:16:46.788346Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715806, task: 1, CA Id [4:7490575034693016189:2983]. returned 0 rows; processed 0 rows 2025-04-07T14:16:46.788373Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715806, task: 1, CA Id [4:7490575034693016189:2983]. dropping batch for read #0 2025-04-07T14:16:46.788384Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715806, task: 1, CA Id [4:7490575034693016189:2983]. effective maxinflight 1024 sorted 0 2025-04-07T14:16:46.788395Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715806, task: 1, CA Id [4:7490575034693016189:2983]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-07T14:16:46.788409Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715806, task: 1, CA Id [4:7490575034693016189:2983]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-07T14:16:46.788414Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575034693016190:2984], TxId: 281474976715806, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGNhNmZjMzMtMjkxMWE3NDMtNTMyZmJkMTAtODg0MDhlN2Q=. CustomerSuppliedId : . TraceId : 01jr89rrpy07rnb4ge8h7d2k1f. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7490575034693016189 RawX2: 4503616807242663 } } DstEndpoint { ActorId { RawX1: 7490575034693016190 RawX2: 4503616807242664 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7490575034693016190 RawX2: 4503616807242664 } } DstEndpoint { ActorId { RawX1: 7490575034693016185 RawX2: 4503616807242014 } } InMemory: true } 2025-04-07T14:16:46.788425Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575034693016190:2984], TxId: 281474976715806, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGNhNmZjMzMtMjkxMWE3NDMtNTMyZmJkMTAtODg0MDhlN2Q=. CustomerSuppliedId : . TraceId : 01jr89rrpy07rnb4ge8h7d2k1f. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Update input channelId: 1, peer: [4:7490575034693016189:2983] 2025-04-07T14:16:46.788466Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575034693016189:2983], TxId: 281474976715806, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGNhNmZjMzMtMjkxMWE3NDMtNTMyZmJkMTAtODg0MDhlN2Q=. TraceId : 01jr89rrpy07rnb4ge8h7d2k1f. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:16:46.788469Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575034693016190:2984], TxId: 281474976715806, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGNhNmZjMzMtMjkxMWE3NDMtNTMyZmJkMTAtODg0MDhlN2Q=. CustomerSuppliedId : . TraceId : 01jr89rrpy07rnb4ge8h7d2k1f. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:46.788486Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575034693016190:2984], TxId: 281474976715806, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGNhNmZjMzMtMjkxMWE3NDMtNTMyZmJkMTAtODg0MDhlN2Q=. CustomerSuppliedId : . TraceId : 01jr89rrpy07rnb4ge8h7d2k1f. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2025-04-07T14:16:46.788504Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715806, task: 2. Finish input channelId: 1, from: [4:7490575034693016189:2983] 2025-04-07T14:16:46.788544Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575034693016190:2984], TxId: 281474976715806, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGNhNmZjMzMtMjkxMWE3NDMtNTMyZmJkMTAtODg0MDhlN2Q=. CustomerSuppliedId : . TraceId : 01jr89rrpy07rnb4ge8h7d2k1f. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:46.788565Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575034693016189:2983], TxId: 281474976715806, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGNhNmZjMzMtMjkxMWE3NDMtNTMyZmJkMTAtODg0MDhlN2Q=. TraceId : 01jr89rrpy07rnb4ge8h7d2k1f. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-04-07T14:16:46.788579Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575034693016190:2984], TxId: 281474976715806, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGNhNmZjMzMtMjkxMWE3NDMtNTMyZmJkMTAtODg0MDhlN2Q=. CustomerSuppliedId : . TraceId : 01jr89rrpy07rnb4ge8h7d2k1f. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:16:46.788595Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575034693016189:2983], TxId: 281474976715806, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGNhNmZjMzMtMjkxMWE3NDMtNTMyZmJkMTAtODg0MDhlN2Q=. TraceId : 01jr89rrpy07rnb4ge8h7d2k1f. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:46.788623Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575034693016190:2984], TxId: 281474976715806, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGNhNmZjMzMtMjkxMWE3NDMtNTMyZmJkMTAtODg0MDhlN2Q=. CustomerSuppliedId : . TraceId : 01jr89rrpy07rnb4ge8h7d2k1f. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:46.788632Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715806, task: 1. Tasks execution finished 2025-04-07T14:16:46.788642Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575034693016189:2983], TxId: 281474976715806, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGNhNmZjMzMtMjkxMWE3NDMtNTMyZmJkMTAtODg0MDhlN2Q=. TraceId : 01jr89rrpy07rnb4ge8h7d2k1f. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-07T14:16:46.788651Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715806, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-07T14:16:46.788679Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715806, task: 2. Tasks execution finished 2025-04-07T14:16:46.788688Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575034693016190:2984], TxId: 281474976715806, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGNhNmZjMzMtMjkxMWE3NDMtNTMyZmJkMTAtODg0MDhlN2Q=. CustomerSuppliedId : . TraceId : 01jr89rrpy07rnb4ge8h7d2k1f. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-07T14:16:46.788742Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715806, task: 1. pass away 2025-04-07T14:16:46.788743Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715806, task: 2. pass away 2025-04-07T14:16:46.788811Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715806;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:16:46.788812Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715806;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:16:46.791556Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: DescribeQueryRequest - DescribeQueryResult: {query_id: "utquedpsep7d8h7joeb7" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:662: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } 2025-04-07T14:16:47.687799Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:26328: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:26328 2025-04-07T14:16:48.007229Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 14] state 'Ready' dataSize 640 rowCount 1 cpuUsage 0 2025-04-07T14:16:48.012976Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037896 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 15] state 'Ready' dataSize 800 rowCount 1 cpuUsage 0 2025-04-07T14:16:48.014796Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 7] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-07T14:16:48.045112Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037897 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 16] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-07T14:16:48.045170Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037892 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 11] state 'Ready' dataSize 8232 rowCount 1 cpuUsage 0 2025-04-07T14:16:48.045443Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037894 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 13] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-07T14:16:48.045486Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 8] state 'Ready' dataSize 1992 rowCount 1 cpuUsage 0 2025-04-07T14:16:48.045502Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 9] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-07T14:16:48.049744Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037893 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 12] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-07T14:16:48.055280Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 10] state 'Ready' dataSize 4904 rowCount 1 cpuUsage 0 >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] Test command err: 2025-04-07T14:16:30.696089Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574965198936822:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:30.696182Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0407 14:16:30.868043522 599941 dns_resolver.cc:162] no server name supplied in dns URI E0407 14:16:30.868203218 599941 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-07T14:16:31.697989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:31.870340Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:10896: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10896 } ] 2025-04-07T14:16:31.879743Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:10896: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:10896 2025-04-07T14:16:32.698708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:32.888122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:16:32.889588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574973788871764:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:32.985824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574973788871764:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:33.059094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574973788871764:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:33.139006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574973788871764:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012cf/r3tmp/tmpFX3cYf/pdisk_1.dat 2025-04-07T14:16:33.202477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574973788871764:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:33.290139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574973788871764:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 10896, node 1 2025-04-07T14:16:33.328430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:16:33.328862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:33.328990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:16:33.330035Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:33.330053Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:33.330180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:33.330579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:33.330599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:33.330637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:16:33.330675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:16:33.421034Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-07T14:16:33.421054Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-07T14:16:33.421072Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-07T14:16:33.421083Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-07T14:16:33.421084Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-07T14:16:33.421090Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-07T14:16:33.422432Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-07T14:16:33.422458Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-07T14:16:33.422460Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-07T14:16:33.422465Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-07T14:16:33.422473Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-07T14:16:33.422478Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-07T14:16:33.423118Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-07T14:16:33.423141Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-07T14:16:33.423146Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-07T14:16:33.423187Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-07T14:16:33.423198Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-07T14:16:33.423203Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-07T14:16:33.424025Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-07T14:16:33.424044Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-07T14:16:33.424049Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-07T14:16:33.424090Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-07T14:16:33.424100Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-07T14:16:33.424105Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-07T14:16:33.424911Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-07T14:16:33.424927Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-07T14:16:33.424931Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-07T14:16:33.424988Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-07T14:16:33.424999Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-07T14:16:33.425004Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-07T14:16:33.425721Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-07T14:16:33.425738Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-07T14:16:33.425743Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-07T14:16:33.427063Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-07T14:16:33.427105Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-07T14:16:33.427111Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-07T14:16:33.428936Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-07T14:16:33.428969Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-07T14:16:33.428973Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-07T14:16:33.443743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574978083839509:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:33.443872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574978083839515:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:33.443923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:33.448687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:33.457692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:33.458645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:33.458817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:33.458848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2025-04-07T14:16:33.458915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:33.458933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-07T14:16:33.458975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:33.459037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/poo ... ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.750970Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751027Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751059Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751096Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751139Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751162Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751222Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751271Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751302Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751365Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751393Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751424Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751485Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751520Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751577Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751673Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751716Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751748Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751777Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751831Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751879Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751941Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.751974Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752021Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752040Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752094Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752121Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752195Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752229Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752259Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752308Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752329Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752388Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752420Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752449Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752499Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752537Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752584Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752616Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752651Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752725Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752754Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752783Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752818Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752880Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752922Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.752982Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753022Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753083Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753139Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753186Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753243Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753270Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753302Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753354Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753392Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753455Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753489Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753521Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753569Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753609Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753651Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753714Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753761Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753813Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753843Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753874Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753946Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.753980Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754031Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754062Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754094Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754161Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754186Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754242Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754263Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754319Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754340Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754421Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754457Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754488Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754542Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754572Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754604Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754657Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754687Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754745Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754803Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754824Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754877Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754897Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.754960Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755003Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755041Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755090Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755118Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755151Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755204Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755235Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755271Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755325Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755372Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755410Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755474Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755503Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755535Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755617Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755652Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755688Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755740Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755769Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755806Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755872Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755908Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.755984Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756036Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756081Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756183Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756259Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756291Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756322Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756356Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756410Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756439Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756477Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756550Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756585Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756617Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756654Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:47.756724Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: >> TPersQueueTest::SetupLockSession [GOOD] >> TPersQueueTest::StreamReadCreateAndDestroyMsgs >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: 2025-04-07T14:16:45.653510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:45.653736Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:45.653917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00148d/r3tmp/tmp9ySTbU/pdisk_1.dat 2025-04-07T14:16:45.921839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:45.951512Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:45.998996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:45.999127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:46.010289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:46.120205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:46.406030Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-04-07T14:16:46.406142Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:702:2586], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-07T14:16:46.497207Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:702:2586], subTag: 2} TUpsertActor finished in 0.090779s, errors=0 2025-04-07T14:16:46.497272Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:703:2587] with tag# 2 2025-04-07T14:16:48.788220Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:48.788494Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:48.788552Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00148d/r3tmp/tmpvYd8pd/pdisk_1.dat 2025-04-07T14:16:49.021890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:49.045879Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:49.091868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:49.091999Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:49.103222Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:49.213629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:49.474205Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-04-07T14:16:49.474329Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:702:2586], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-04-07T14:16:49.561069Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:702:2586], subTag: 2} TUpsertActor finished in 0.086348s, errors=0 2025-04-07T14:16:49.561142Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:703:2587] with tag# 2 >> TPersQueueTest::DirectReadBadCases [GOOD] >> TPersQueueTest::DirectReadStop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:43.596961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:43.597016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:43.597039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:43.597061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:43.597097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:43.597130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:43.597189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:43.597439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:43.646227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:43.646263Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:43.653871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:43.653957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:43.654071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:43.661628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:43.661770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:43.662161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:43.662392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:43.666466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:43.668036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:43.668079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:43.668186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:43.668215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:43.668251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:43.668348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:43.673899Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:43.767438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:43.767591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.767784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:43.767929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:43.767971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.769516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:43.769613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:43.769725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.769764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:43.769790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:43.769821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:43.771423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.771465Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:43.771497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:43.772978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.773016Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.773061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:43.773106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:43.776309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:43.777724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:43.777890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:43.778718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:43.778856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:43.778918Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:43.779106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:43.779150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:43.779316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:43.779396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:43.781201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:43.781245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:43.781387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:43.781423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:43.781700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:43.781743Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:43.781835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:43.781885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:43.781926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:43.781975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:43.782011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:43.782040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:43.782113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:43.782151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:43.782189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... shard: 72057594046678944 2025-04-07T14:16:49.660609Z node 102 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 311us result status StatusSuccess 2025-04-07T14:16:49.661850Z node 102 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:16:49.666942Z node 102 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:16:49.667149Z node 102 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 228us result status StatusSuccess 2025-04-07T14:16:49.667754Z node 102 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DbCounters::TabletsSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: 2025-04-07T14:16:46.567428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:46.567749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:46.567984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001481/r3tmp/tmpwBvyPp/pdisk_1.dat 2025-04-07T14:16:46.833308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:46.865840Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:46.913896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:46.914029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:46.925479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:47.036087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:47.350692Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2025-04-07T14:16:47.350829Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:702:2586], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-07T14:16:47.434477Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:702:2586], subTag: 2} TUpsertActor finished in 0.083251s, errors=0 2025-04-07T14:16:47.434579Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:703:2587] with tag# 2 2025-04-07T14:16:49.547235Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:49.547456Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:49.547498Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001481/r3tmp/tmpfdtI1K/pdisk_1.dat 2025-04-07T14:16:49.746245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:49.771575Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:49.817936Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:49.818033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:49.829093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:49.939471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:50.206068Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-04-07T14:16:50.206186Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:702:2586], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-07T14:16:50.272829Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:702:2586], subTag: 2} TUpsertActor finished in 0.066335s, errors=0 2025-04-07T14:16:50.272925Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:703:2587] with tag# 2 >> TPersQueueTest::BadSids [GOOD] >> TPersQueueTest::Cache >> YdbTableBulkUpsertOlap::UpsertArrowBatch >> YdbYqlClient::DiscoveryLocationOverride >> YdbYqlClient::TestDoubleKey >> TTableProfileTests::WrongTableProfile >> TGRpcClientLowTest::SimpleRequest >> ReadLoad::ShouldReadKqp [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows >> KqpPg::SelectIndex [GOOD] >> KqpPg::TableDeleteAllData >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] >> TGRpcYdbTest::DropTableBadRequest >> UpsertLoad::ShouldDropCreateTable [GOOD] >> ReadLoad::ShouldReadIterate [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows >> KqpPg::PgAggregate [GOOD] >> KqpPg::MkqlTerminate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] Test command err: 2025-04-07T14:16:50.981752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:50.981976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:50.982127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001461/r3tmp/tmpm6cyuc/pdisk_1.dat 2025-04-07T14:16:51.263654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:51.297751Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:51.349619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:51.349731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:51.361037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:51.470845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:51.754029Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-04-07T14:16:51.754147Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:702:2586], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-04-07T14:16:51.757250Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:702:2586], subTag: 2} started# 5 actors each with inflight# 4 2025-04-07T14:16:51.757322Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-07T14:16:51.757377Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-07T14:16:51.757416Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-07T14:16:51.757441Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-07T14:16:51.757467Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-07T14:16:51.760170Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 1} session: ydb://session/3?node_id=1&id=NWUyZjIyN2QtM2U2Njc5OGQtOTFiNjRiZTYtODIyNzBjYTk= 2025-04-07T14:16:51.761591Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 2} session: ydb://session/3?node_id=1&id=OTY0ZDg4M2QtMWRlZTgxODctM2ExYjcyYmUtODc5YzY4ZWM= 2025-04-07T14:16:51.762964Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 3} session: ydb://session/3?node_id=1&id=ZjljNTZmNTUtMTFmNWFkNzYtYWIxNjhmZDAtZDg1MmVjNjQ= 2025-04-07T14:16:51.765280Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 4} session: ydb://session/3?node_id=1&id=YjViODdlZDYtMWMyZTA3NGMtNTc3NTU5ZjAtYWRhNTQyMWQ= 2025-04-07T14:16:51.765319Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 5} session: ydb://session/3?node_id=1&id=OTBjYjhkMjUtMmNlMzUwODItZDNiYmIyMGUtZGUxZTMzNzI= 2025-04-07T14:16:51.768826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:716:2600], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:51.768947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:740:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:51.769014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:741:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:51.769059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:51.769107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:743:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:51.769165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:745:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:51.769240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:51.775477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:16:52.001994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:752:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:52.002096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:52.002181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:52.002237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:755:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:52.002319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2634], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:52.380443Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 2} finished in 1744035412.380374s, errors=0 2025-04-07T14:16:52.380766Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1744035412380 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:52.452908Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 5} finished in 1744035412.452859s, errors=0 2025-04-07T14:16:52.453161Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1744035412452 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:52.527040Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 4} finished in 1744035412.526995s, errors=0 2025-04-07T14:16:52.527307Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1744035412526 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:52.601226Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 1} finished in 1744035412.601167s, errors=0 2025-04-07T14:16:52.601481Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1744035412601 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:52.677117Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:703:2587], subTag: 3} finished in 1744035412.677081s, errors=0 2025-04-07T14:16:52.677386Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:702:2586], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1744035412677 OperationsOK: 4 OperationsError: 0 } 2025-04-07T14:16:52.677445Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:702:2586], subTag: 2} finished in 0.920347s, oks# 20, errors# 0 2025-04-07T14:16:52.677545Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:703:2587] with tag# 2 >> Yq_1::CreateQuery_Without_Connection [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: 2025-04-07T14:16:48.246790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:48.246989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:48.247130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001472/r3tmp/tmpvrP7bE/pdisk_1.dat 2025-04-07T14:16:48.486260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:48.517706Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:48.565370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:48.565478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:48.576883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:48.689213Z node 1 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# BrandNewTable in dir# /Root 2025-04-07T14:16:48.834335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:610:2519], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:48.834491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:48.853885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:49.166153Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2025-04-07T14:16:49.167284Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-04-07T14:16:49.188955Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 1} TUpsertActor finished in 0.021454s, errors=0 2025-04-07T14:16:49.189244Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-07T14:16:49.189387Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-04-07T14:16:49.244537Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 3} TUpsertActor finished in 0.054967s, errors=0 2025-04-07T14:16:49.244616Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:719:2596] with tag# 3 2025-04-07T14:16:51.486318Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:51.486571Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:51.486619Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001472/r3tmp/tmpDGcHOY/pdisk_1.dat 2025-04-07T14:16:51.665178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:51.687176Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:51.732660Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:51.732803Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:51.744166Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:51.853671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:52.111830Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2025-04-07T14:16:52.111939Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:702:2586], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 2025-04-07T14:16:52.518042Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:702:2586], subTag: 2} TUpsertActor finished in 0.405835s, errors=0 2025-04-07T14:16:52.518146Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:703:2587] with tag# 2 2025-04-07T14:16:52.521554Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 drops table# table in dir# /Root 2025-04-07T14:16:52.529786Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:745:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:52.529880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:52.746773Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# table in dir# /Root 2025-04-07T14:16:52.757781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:811:2674], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:52.757880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:52.766631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:16:52.811494Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-07T14:16:53.022796Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2025-04-07T14:16:53.023037Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:741:2625], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-04-07T14:16:53.034668Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:741:2625], subTag: 1} TUpsertActor finished in 0.011385s, errors=0 2025-04-07T14:16:53.034970Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-07T14:16:53.035226Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:741:2625], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-04-07T14:16:53.092217Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:741:2625], subTag: 3} TUpsertActor finished in 0.056640s, errors=0 2025-04-07T14:16:53.092305Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:902:2747] with tag# 3 >> KqpOlapScheme::InsertAddInsertDrop [GOOD] >> KqpOlapScheme::InsertDropAddColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] Test command err: 2025-04-07T14:16:26.373617Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574946341362913:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:26.373704Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0407 14:16:26.512116609 598206 dns_resolver.cc:162] no server name supplied in dns URI E0407 14:16:26.512238597 598206 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-07T14:16:27.375478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:27.519875Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:27383: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27383 } ] 2025-04-07T14:16:27.527841Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:27383: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:27383 2025-04-07T14:16:28.376386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:28.775738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:16:28.775966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574954931297857:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012f3/r3tmp/tmpfIAtMg/pdisk_1.dat 2025-04-07T14:16:28.870606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574954931297857:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:28.942683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574954931297857:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 27383, node 1 2025-04-07T14:16:28.968346Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:28.968371Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:28.969213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2025-04-07T14:16:28.971169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-07T14:16:28.972861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:16:28.973461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:28.973612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:16:28.974841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:28.975435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:28.975463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:28.975512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:16:28.975559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TClient is connected to server localhost:19605 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:29.191290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:29.196263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:29.197542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:29.199622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:29.199805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:29.199823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:16:29.200521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:16:29.201281Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:29.201306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:16:29.202407Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:29.205172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035389250, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:29.205202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:16:29.205455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:16:29.206716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:29.206889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:29.206942Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:16:29.207014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:16:29.207050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:16:29.207091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:16:29.208998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:16:29.209042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:16:29.209058Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:29.209119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:16:29.304421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:29.304602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:29.306557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2025-04-07T14:16:29.306703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:29.306874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:29.306956Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:29.307381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:16:29.307417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:16:29.307432Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:16:29.307600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:16:29.307640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:16:29.307659Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:16:29.310850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035389355, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:29.310904Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035389355, at schemeshar ... .223803Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.223851Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.223904Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.223936Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.223995Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224022Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224101Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224129Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224183Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224215Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224286Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224323Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224401Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224430Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224482Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224512Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224588Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224617Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224682Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224715Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224806Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224857Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224913Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.224946Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225000Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225032Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225100Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225138Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225183Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225215Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225299Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225325Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225374Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225407Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225445Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225483Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225517Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225562Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225597Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225675Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225707Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225745Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225782Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225818Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225908Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225936Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.225983Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226014Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226064Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226092Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226145Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226175Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226225Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226260Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226308Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226336Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226397Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226430Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226487Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226544Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226614Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226657Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226719Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226773Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226810Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226868Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226930Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.226980Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227021Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227062Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227111Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227176Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227215Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227255Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227299Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227337Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227376Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227424Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227452Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227502Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227532Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227597Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227629Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227681Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227720Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227802Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227831Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227878Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.227917Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228023Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228056Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228110Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228148Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228188Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228234Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228275Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228314Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228357Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228398Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228439Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228478Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228525Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228589Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228626Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228711Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228740Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228810Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228881Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.228941Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229019Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229074Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229152Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229205Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229241Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229318Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229348Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229415Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229461Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229516Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229568Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229605Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229645Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229684Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229731Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229765Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229807Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229914Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.229948Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.230035Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:52.230076Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: [good] Yq_1::CreateQuery_Without_Connection >> YdbYqlClient::TestTzTypesFullStack >> Yq_1::Basic_TaggedLiteral [GOOD] >> TGRpcClientLowTest::SimpleRequest [GOOD] >> TGRpcClientLowTest::SimpleRequestDummyService >> YdbYqlClient::DiscoveryLocationOverride [GOOD] >> YdbYqlClient::DeleteTableWithDeletedIndex >> TGRpcNewCoordinationClient::SessionMethods >> TGRpcYdbTest::DropTableBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryBadRequest >> YdbYqlClient::TestDoubleKey [GOOD] >> YdbYqlClient::TestMultipleModifications ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] Test command err: 2025-04-07T14:16:28.814290Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574958209825529:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:28.814583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0407 14:16:29.002279795 599072 dns_resolver.cc:162] no server name supplied in dns URI E0407 14:16:29.002491187 599072 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-07T14:16:29.816231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:29.999389Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4353: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4353 } ] 2025-04-07T14:16:30.006316Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4353: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4353 2025-04-07T14:16:30.816858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:31.167575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:16:31.168525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574971094727765:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:31.248318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574971094727765:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:31.325593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574971094727765:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:31.382371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574971094727765:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012e7/r3tmp/tmp3kLgcj/pdisk_1.dat 2025-04-07T14:16:31.443233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574971094727765:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:31.504621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574971094727765:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 4353, node 1 2025-04-07T14:16:31.561330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:16:31.561917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:31.562080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:16:31.563106Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:31.563108Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:31.563553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:31.563923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:31.563961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:31.564012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:16:31.564064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TClient is connected to server localhost:17691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-04-07T14:16:31.825930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "PostgreSQL" } 2025-04-07T14:16:31.825978Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:31.826778Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:31.826800Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:31.826808Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:31.826933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:31.838144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:31.849436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:31.850757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:31.852734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:31.852915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:31.852935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:16:31.853898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:16:31.854559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:31.854583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:16:31.855850Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:31.859172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035391903, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:31.859208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:16:31.859451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:16:31.861414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:31.861645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:31.861739Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:16:31.861854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:16:31.861903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:16:31.861944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:16:31.863972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:31.864017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:31.864041Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:31.864115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:16:31.864511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:31.864873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:31.866476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2025-04-07T14:16:31.866603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:31.866771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:31.866826Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:31.868113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:16:31.8681 ... ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.704520Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.704559Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.704606Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.704649Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.704731Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.704843Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.704881Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.704954Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.704989Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705040Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705079Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705127Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705196Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705229Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705286Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705343Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705390Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705437Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705478Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705522Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705570Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705609Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705650Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705693Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705738Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705773Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705858Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705892Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705949Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.705984Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706034Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706069Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706124Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706199Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706234Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706287Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706332Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706376Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706425Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706469Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706509Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706554Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706625Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706661Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706728Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706763Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706814Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706847Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706898Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706937Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.706981Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707025Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707066Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707117Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707193Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707235Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707283Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707330Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707376Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707424Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707524Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707573Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707662Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707699Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707766Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707843Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707876Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.707933Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708009Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708054Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708110Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708144Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708191Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708223Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708285Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708319Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708396Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708432Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708497Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708533Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708583Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708708Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708745Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708800Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708852Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708903Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.708947Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709010Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709059Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709108Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709167Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709199Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709260Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709294Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709350Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709380Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709434Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709463Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709521Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709552Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709595Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709628Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709676Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709709Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709759Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709793Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709852Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709879Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709955Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.709987Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710047Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710099Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710143Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710195Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710233Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710275Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710314Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710355Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710396Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710438Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710477Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710517Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710558Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710626Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710712Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710756Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710879Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.710963Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.711006Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:16:54.711092Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: >> YdbYqlClient::RetryOperationAsync >> TTableProfileTests::WrongTableProfile [GOOD] >> TYqlDateTimeTests::DateKey >> YdbTableBulkUpsertOlap::UpsertArrowBatch [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowDupField >> TPersQueueTest::SameOffset [GOOD] >> TPersQueueTest::SchemeOperationsTest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] >> KqpPg::MkqlTerminate [GOOD] >> KqpPg::NoSelectFullScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: 2025-04-07T14:16:52.057458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:52.057664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:52.057815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001452/r3tmp/tmpCBTcX9/pdisk_1.dat 2025-04-07T14:16:52.337568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:52.368463Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:52.416279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:52.416414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:52.427648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:52.537302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:52.849472Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2025-04-07T14:16:52.851022Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:702:2586], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-04-07T14:16:52.900733Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:702:2586], subTag: 1} TUpsertActor finished in 0.049426s, errors=0 2025-04-07T14:16:52.901206Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-04-07T14:16:52.901305Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [1:711:2595] with id# {Tag: 0, parent: [1:702:2586], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-04-07T14:16:52.902037Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:702:2586], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-04-07T14:16:52.902163Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:714:2598] 2025-04-07T14:16:52.902227Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 1} Bootstrap called, sample# 0 2025-04-07T14:16:52.902257Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 1} Connect to# 72075186224037888 called 2025-04-07T14:16:52.902775Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-04-07T14:16:52.907363Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 1} finished in 0.004545s, read# 1000 2025-04-07T14:16:52.907657Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:714:2598] with chunkSize# 0 finished: 0 { DurationMs: 4 OperationsOK: 1000 OperationsError: 0 } 2025-04-07T14:16:52.907754Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:717:2601] 2025-04-07T14:16:52.907792Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 2} Bootstrap called, sample# 0 2025-04-07T14:16:52.907813Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 2} Connect to# 72075186224037888 called 2025-04-07T14:16:52.908006Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-04-07T14:16:53.097427Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 2} finished in 0.189375s, read# 1000 2025-04-07T14:16:53.097579Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:717:2601] with chunkSize# 1 finished: 0 { DurationMs: 189 OperationsOK: 1000 OperationsError: 0 } 2025-04-07T14:16:53.097688Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:720:2604] 2025-04-07T14:16:53.097734Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 3} Bootstrap called, sample# 0 2025-04-07T14:16:53.097764Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 3} Connect to# 72075186224037888 called 2025-04-07T14:16:53.098038Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-04-07T14:16:53.119643Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 3} finished in 0.021558s, read# 1000 2025-04-07T14:16:53.119746Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:720:2604] with chunkSize# 10 finished: 0 { DurationMs: 21 OperationsOK: 1000 OperationsError: 0 } 2025-04-07T14:16:53.119841Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:723:2607] 2025-04-07T14:16:53.119877Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 4} Bootstrap called, sample# 1000 2025-04-07T14:16:53.119900Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 4} Connect to# 72075186224037888 called 2025-04-07T14:16:53.120084Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-04-07T14:16:53.122334Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 4} finished in 0.001746s, sampled# 1000, iter finished# 1, oks# 1000 2025-04-07T14:16:53.122417Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:702:2586], subTag: 3} received keyCount# 1000 2025-04-07T14:16:53.122534Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:702:2586], subTag: 3} started read actor with id# [1:726:2610] 2025-04-07T14:16:53.122575Z node 1 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [1:711:2595], subTag: 5} Bootstrap called, will read keys# 1000 2025-04-07T14:16:53.443885Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:702:2586], subTag: 3} received point times# 1000, Inflight left# 0 2025-04-07T14:16:53.444097Z node 1 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 321 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 34\n" } 2025-04-07T14:16:53.444262Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [1:702:2586], subTag: 3} finished in 0.542819s with report: { DurationMs: 4 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 189 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 21 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 321 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 34\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-04-07T14:16:53.444575Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:711:2595] with tag# 3 2025-04-07T14:16:56.255177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:56.255514Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:56.255585Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001452/r3tmp/tmpR94ELF/pdisk_1.dat 2025-04-07T14:16:56.476607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:56.497698Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:56.543022Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:56.543145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:56.554207Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:56.663335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:56.922216Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2025-04-07T14:16:56.922442Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:702:2586], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-04-07T14:16:56.944223Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:702:2586], subTag: 1} TUpsertActor finished in 0.021593s, errors=0 2025-04-07T14:16:56.944627Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-04-07T14:16:56.944716Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [2:711:2595] with id# {Tag: 0, parent: [2:702:2586], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-04-07T14:16:56.945690Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:702:2586], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-04-07T14:16:56.945824Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:714:2598] 2025-04-07T14:16:56.945948Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 1} Bootstrap called, sample# 0 2025-04-07T14:16:56.945989Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 1} Connect to# 72075186224037888 called 2025-04-07T14:16:56.946238Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-04-07T14:16:56.946890Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 1} finished in 0.000608s, read# 10 2025-04-07T14:16:56.947054Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:714:2598] with chunkSize# 0 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-04-07T14:16:56.947189Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:717:2601] 2025-04-07T14:16:56.947229Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 2} Bootstrap called, sample# 0 2025-04-07T14:16:56.947263Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 2} Connect to# 72075186224037888 called 2025-04-07T14:16:56.947397Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-04-07T14:16:56.948812Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 2} finished in 0.001389s, read# 10 2025-04-07T14:16:56.948876Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:717:2601] with chunkSize# 1 finished: 0 { DurationMs: 1 OperationsOK: 10 OperationsError: 0 } 2025-04-07T14:16:56.948931Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:720:2604] 2025-04-07T14:16:56.948952Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 3} Bootstrap called, sample# 0 2025-04-07T14:16:56.948968Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 3} Connect to# 72075186224037888 called 2025-04-07T14:16:56.949089Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-04-07T14:16:56.949449Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 3} finished in 0.000331s, read# 10 2025-04-07T14:16:56.949513Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:720:2604] with chunkSize# 10 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-04-07T14:16:56.949573Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:723:2607] 2025-04-07T14:16:56.949599Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 4} Bootstrap called, sample# 10 2025-04-07T14:16:56.949622Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 4} Connect to# 72075186224037888 called 2025-04-07T14:16:56.949720Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-04-07T14:16:56.949973Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:711:2595], subTag: 4} finished in 0.000201s, sampled# 10, iter finished# 1, oks# 10 2025-04-07T14:16:56.950049Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:702:2586], subTag: 3} received keyCount# 10 2025-04-07T14:16:56.950161Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:702:2586], subTag: 3} started read actor with id# [2:726:2610] 2025-04-07T14:16:56.950219Z node 2 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [2:711:2595], subTag: 5} Bootstrap called, will read keys# 10 2025-04-07T14:16:57.193549Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:702:2586], subTag: 3} received point times# 1000, Inflight left# 0 2025-04-07T14:16:57.193734Z node 2 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 243 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 21\n" } 2025-04-07T14:16:57.193899Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [2:702:2586], subTag: 3} finished in 0.249037s with report: { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 1 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 243 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 21\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-04-07T14:16:57.193998Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:711:2595] with tag# 3 >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex >> Yq_1::DescribeQuery [GOOD] >> TGRpcClientLowTest::SimpleRequestDummyService [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken >> TGRpcNewCoordinationClient::SessionMethods [GOOD] >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout >> YdbYqlClient::TestTzTypesFullStack [GOOD] >> YdbYqlClient::TestVariant >> YdbYqlClient::DeleteTableWithDeletedIndex [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitions >> TGRpcYdbTest::ExecuteQueryBadRequest [GOOD] >> TGRpcYdbTest::ExecuteDmlQuery >> YdbYqlClient::TestDecimal1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [GOOD] Test command err: 2025-04-07T14:16:29.368278Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574959009615206:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:29.368369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0407 14:16:29.516211149 599383 dns_resolver.cc:162] no server name supplied in dns URI E0407 14:16:29.516362963 599383 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-07T14:16:30.370050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:30.515097Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13184: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13184 } ] 2025-04-07T14:16:30.527107Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13184: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:13184 2025-04-07T14:16:31.370475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:31.714152Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:16:31.715518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574967599550148:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:31.786164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574967599550148:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:31.838390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574967599550148:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:31.914115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574967599550148:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012db/r3tmp/tmpmjCano/pdisk_1.dat 2025-04-07T14:16:32.014392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574967599550148:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 13184, node 1 2025-04-07T14:16:32.068972Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:32.069325Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:32.086170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2025-04-07T14:16:32.087819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-07T14:16:32.089624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:16:32.090292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:32.090483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:16:32.091897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:32.092557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:32.092593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:32.092679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:16:32.092757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TClient is connected to server localhost:13515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:32.334673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:32.340439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:32.342191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:32.344214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:32.344457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:32.344479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:16:32.345388Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:16:32.345782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:32.345812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:16:32.346737Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:32.349104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035392393, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:32.349152Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:16:32.349396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:16:32.350811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:32.350917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:32.350957Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:16:32.351027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:16:32.351064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:16:32.351105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:16:32.352414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:32.352447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:32.352459Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:32.352508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:16:32.431146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:32.431370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:32.433336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2025-04-07T14:16:32.433513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:32.433690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:32.433763Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:32.434419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:16:32.434502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:16:32.434511Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:16:32.434673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:16:32.434691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:16:32.434697Z node 1 :FLAT_TX ... 5-04-07T14:16:56.287496Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1, CA Id [4:7490575076225456598:2953]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2025-04-07T14:16:56.287568Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456598:2953], TxId: 281474976715791, task: 1. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:56.287583Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1, CA Id [4:7490575076225456598:2953]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-07T14:16:56.287601Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1, CA Id [4:7490575076225456598:2953]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-07T14:16:56.287601Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456599:2954], TxId: 281474976715791, task: 2. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-07T14:16:56.287747Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456599:2954], TxId: 281474976715791, task: 2. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7490575076225456598 RawX2: 4503616807242633 } } DstEndpoint { ActorId { RawX1: 7490575076225456599 RawX2: 4503616807242634 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7490575076225456599 RawX2: 4503616807242634 } } DstEndpoint { ActorId { RawX1: 7490575076225456593 RawX2: 4503616807242330 } } InMemory: true } 2025-04-07T14:16:56.287757Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456599:2954], TxId: 281474976715791, task: 2. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update input channelId: 1, peer: [4:7490575076225456598:2953] 2025-04-07T14:16:56.287813Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456599:2954], TxId: 281474976715791, task: 2. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:56.288288Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1, CA Id [4:7490575076225456598:2953]. Recv TEvReadResult from ShardID=72075186224037895, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-04-07T14:16:56.288308Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1, CA Id [4:7490575076225456598:2953]. Taken 0 locks 2025-04-07T14:16:56.288319Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1, CA Id [4:7490575076225456598:2953]. new data for read #0 seqno = 1 finished = 1 2025-04-07T14:16:56.288339Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456598:2953], TxId: 281474976715791, task: 1. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2025-04-07T14:16:56.288354Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456598:2953], TxId: 281474976715791, task: 1. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:56.288371Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1, CA Id [4:7490575076225456598:2953]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-07T14:16:56.288386Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1, CA Id [4:7490575076225456598:2953]. enter pack cells method shardId: 72075186224037895 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-07T14:16:56.288410Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1, CA Id [4:7490575076225456598:2953]. exit pack cells method shardId: 72075186224037895 processedRows: 0 packed rows: 1 freeSpace: 8386365 2025-04-07T14:16:56.288427Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1, CA Id [4:7490575076225456598:2953]. returned 1 rows; processed 1 rows 2025-04-07T14:16:56.288466Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1, CA Id [4:7490575076225456598:2953]. dropping batch for read #0 2025-04-07T14:16:56.288477Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1, CA Id [4:7490575076225456598:2953]. effective maxinflight 1024 sorted 0 2025-04-07T14:16:56.288488Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1, CA Id [4:7490575076225456598:2953]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-07T14:16:56.288502Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1, CA Id [4:7490575076225456598:2953]. returned async data processed rows 1 left freeSpace 8386365 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-07T14:16:56.288730Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456598:2953], TxId: 281474976715791, task: 1. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:16:56.288749Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456598:2953], TxId: 281474976715791, task: 1. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:56.288755Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456599:2954], TxId: 281474976715791, task: 2. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-07T14:16:56.288781Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-07T14:16:56.288793Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 2. Finish input channelId: 1, from: [4:7490575076225456598:2953] 2025-04-07T14:16:56.288836Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456599:2954], TxId: 281474976715791, task: 2. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:56.289016Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456599:2954], TxId: 281474976715791, task: 2. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:16:56.289032Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456598:2953], TxId: 281474976715791, task: 1. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-04-07T14:16:56.289055Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456598:2953], TxId: 281474976715791, task: 1. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-07T14:16:56.289070Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1. Tasks execution finished 2025-04-07T14:16:56.289079Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456598:2953], TxId: 281474976715791, task: 1. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-07T14:16:56.289162Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 1. pass away 2025-04-07T14:16:56.289252Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715791;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:16:56.289412Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456599:2954], TxId: 281474976715791, task: 2. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:16:56.289457Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-07T14:16:56.289465Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 2. Tasks execution finished 2025-04-07T14:16:56.289475Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7490575076225456599:2954], TxId: 281474976715791, task: 2. Ctx: { TraceId : 01jr89s1xy2fja8n9ve7zcd3ty. SessionId : ydb://session/3?node_id=4&id=MTk4ZmZlMjQtMTFlZTUxYTUtNTBjMWZhNGItNmNmYzYxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:16:56.289547Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715791, task: 2. pass away 2025-04-07T14:16:56.289616Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715791;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:16:56.401989Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:18171: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:18171 2025-04-07T14:16:57.398787Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: Client is stopped >> YdbYqlClient::TestMultipleModifications [GOOD] >> YdbYqlClient::TestDescribeTableWithShardStats >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] >> Yq_1::Basic_EmptyDict [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowDupField [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: 2025-04-07T14:16:47.509452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:47.509689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:47.509848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001475/r3tmp/tmpY0l6tF/pdisk_1.dat 2025-04-07T14:16:47.765311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:47.796826Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:47.844463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:47.844565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:47.855664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:47.965614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:48.254615Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2025-04-07T14:16:48.255748Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:702:2586], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-04-07T14:16:48.279340Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:702:2586], subTag: 1} TUpsertActor finished in 0.023387s, errors=0 2025-04-07T14:16:48.279571Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2025-04-07T14:16:48.279633Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:702:2586], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2025-04-07T14:16:48.280367Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:702:2586], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-04-07T14:16:48.280471Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:702:2586], subTag: 3} started fullscan actor# [1:714:2598] 2025-04-07T14:16:48.280524Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 1} Bootstrap called, sample# 100 2025-04-07T14:16:48.280556Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 1} Connect to# 72075186224037888 called 2025-04-07T14:16:48.281120Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-04-07T14:16:48.281820Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:711:2595], subTag: 1} finished in 0.000604s, sampled# 100, iter finished# 1, oks# 100 2025-04-07T14:16:48.281942Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:702:2586], subTag: 3} received keyCount# 100 2025-04-07T14:16:48.282083Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:702:2586], subTag: 3} started# 10 actors each with inflight# 1 2025-04-07T14:16:48.282130Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 2} Bootstrap called 2025-04-07T14:16:48.282162Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-07T14:16:48.282197Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 3} Bootstrap called 2025-04-07T14:16:48.282218Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-07T14:16:48.282238Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 4} Bootstrap called 2025-04-07T14:16:48.282249Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-07T14:16:48.282262Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 5} Bootstrap called 2025-04-07T14:16:48.282273Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-07T14:16:48.282285Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 6} Bootstrap called 2025-04-07T14:16:48.282296Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-07T14:16:48.282307Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 7} Bootstrap called 2025-04-07T14:16:48.282337Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-07T14:16:48.282358Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 8} Bootstrap called 2025-04-07T14:16:48.282374Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-07T14:16:48.282386Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 9} Bootstrap called 2025-04-07T14:16:48.282396Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-07T14:16:48.282411Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 10} Bootstrap called 2025-04-07T14:16:48.282422Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-07T14:16:48.282435Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 11} Bootstrap called 2025-04-07T14:16:48.282453Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-07T14:16:48.283507Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 2} session: ydb://session/3?node_id=1&id=YjEzNzQ5ZDEtNTQ0ZmQ5NWUtM2EyM2Q2OTgtMjRiNzdkZWE= 2025-04-07T14:16:48.285248Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 3} session: ydb://session/3?node_id=1&id=OWY1ZjVkMDAtZmY4YzMyY2YtMmUzYjU1ZjQtNTdmOWQ3Nzk= 2025-04-07T14:16:48.285368Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 4} session: ydb://session/3?node_id=1&id=NjViMzJiMGYtYmY4MzYzLWUzZmU5ZWFhLTMwZDdhMDU= 2025-04-07T14:16:48.286232Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 5} session: ydb://session/3?node_id=1&id=NGEzYzY1MzMtYmM2ZDYzOTMtMTEzMGFlZGUtZWExZGU2NGM= 2025-04-07T14:16:48.287018Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 6} session: ydb://session/3?node_id=1&id=NzIwMzJjNTAtYjNjMGQ0MzItMTI3ZDYyMzEtZjMzODI1NmI= 2025-04-07T14:16:48.288525Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 7} session: ydb://session/3?node_id=1&id=ZGMzMTU3NWUtYTljMjM4NGItNGM3Mzg2YTctYzAyMWVkNWI= 2025-04-07T14:16:48.288580Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 8} session: ydb://session/3?node_id=1&id=OGQ5MDI3YjgtMzZiYjdlM2MtMTIzMzIwNWUtNzQ0NTU4ZGY= 2025-04-07T14:16:48.289374Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 9} session: ydb://session/3?node_id=1&id=ODRkZWE0MTEtOGY0ZTExY2ItNjI1NzAwZDEtOTI2MmRlZjQ= 2025-04-07T14:16:48.290145Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 10} session: ydb://session/3?node_id=1&id=MjEwOGFiNzMtMzA3ODFjNmEtN2IzMjc2YjEtN2FjZmQ1NTA= 2025-04-07T14:16:48.290905Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:711:2595], subTag: 11} session: ydb://session/3?node_id=1&id=ZDZjYzUwZDQtMmExMWNmYTktYmViOGQ1N2MtN2EzMDQ5MjI= 2025-04-07T14:16:48.294007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:769:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:48.294077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:770:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:48.294108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:771:2649], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:48.294147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:772:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:48.294206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:773:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:48.294251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:776:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:48.294344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:738:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:48.294979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:48.295015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Re ... UyNWEtZjEwYzk0Zi1mNTk4MDgzYi00MDFkMDc2Ng== 2025-04-07T14:16:55.825819Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 3} session: ydb://session/3?node_id=2&id=MjEzYjc4MjMtYTczOGEzNWUtZTUxMDlkNjktNjVkMTE1YTc= 2025-04-07T14:16:55.826641Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 4} session: ydb://session/3?node_id=2&id=MzhmYTQ4MzktZTQwNTM5MDAtYTNmOGMxYzgtYzc1YTAxMTg= 2025-04-07T14:16:55.827619Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 5} session: ydb://session/3?node_id=2&id=YWE0MjRkMjEtMTIxNDI4Mi00NTk3MzhlNy1kNzkzNjI3Nw== 2025-04-07T14:16:55.828565Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 6} session: ydb://session/3?node_id=2&id=N2NkMjcxOTAtOGRhM2MyNzgtODQ3N2Q4Ny0yMGRlY2Y1ZA== 2025-04-07T14:16:55.829694Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 7} session: ydb://session/3?node_id=2&id=OGRkMThjNjAtNDBiMzUwMWItOGVmMmYzNzItM2I5YjQ1YWU= 2025-04-07T14:16:55.830688Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 8} session: ydb://session/3?node_id=2&id=ZDAxY2ZlMzAtMjRjN2EyNzMtN2RmMGZiMmMtOTQzNmNmNTk= 2025-04-07T14:16:55.831614Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 9} session: ydb://session/3?node_id=2&id=MmU3MDgwNjYtOTUwY2JkNzUtZDQyM2NkMC04YjdhNmQ5Yw== 2025-04-07T14:16:55.832552Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 10} session: ydb://session/3?node_id=2&id=ZmRjMzc2YmMtZDQ3MjBiNWQtMzM0YmU3NDctMjNjM2UwYmI= 2025-04-07T14:16:55.833328Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 11} session: ydb://session/3?node_id=2&id=NGZkODM4ZWItZDA2ODJkN2EtZTFlOTAxZC02NzdhMWE5ZQ== 2025-04-07T14:16:55.835991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:738:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:55.836056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:770:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:55.836095Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:771:2649], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:55.836136Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:772:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:55.836179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:773:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:55.836214Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:774:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:55.836237Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:776:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:55.836261Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:778:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:55.836621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:780:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:55.837195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:791:2669], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:55.837905Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:55.838144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:804:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:55.842607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:16:56.025821Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:793:2671], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:56.025921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:794:2672], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:56.025987Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:795:2673], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:56.026056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:796:2674], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:56.026113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:797:2675], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:56.026184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:798:2676], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:56.026256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:799:2677], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:56.026317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:803:2681], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:56.026363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:805:2683], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:56.026406Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:825:2694], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:56.382831Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 4} finished in 0.556144s, errors=0 2025-04-07T14:16:56.382959Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:702:2586], subTag: 3} finished: 4 { Tag: 4 DurationMs: 556 OperationsOK: 100 OperationsError: 0 } 2025-04-07T14:16:56.699153Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 10} finished in 0.866552s, errors=0 2025-04-07T14:16:56.699405Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:702:2586], subTag: 3} finished: 10 { Tag: 10 DurationMs: 866 OperationsOK: 100 OperationsError: 0 } 2025-04-07T14:16:56.970706Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 7} finished in 1.140964s, errors=0 2025-04-07T14:16:56.970927Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:702:2586], subTag: 3} finished: 7 { Tag: 7 DurationMs: 1140 OperationsOK: 100 OperationsError: 0 } 2025-04-07T14:16:57.350841Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 11} finished in 1.517466s, errors=0 2025-04-07T14:16:57.351131Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:702:2586], subTag: 3} finished: 11 { Tag: 11 DurationMs: 1517 OperationsOK: 100 OperationsError: 0 } 2025-04-07T14:16:57.792372Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 9} finished in 1.960702s, errors=0 2025-04-07T14:16:57.792566Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:702:2586], subTag: 3} finished: 9 { Tag: 9 DurationMs: 1960 OperationsOK: 100 OperationsError: 0 } 2025-04-07T14:16:58.228937Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 6} finished in 2.400318s, errors=0 2025-04-07T14:16:58.229206Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:702:2586], subTag: 3} finished: 6 { Tag: 6 DurationMs: 2400 OperationsOK: 100 OperationsError: 0 } 2025-04-07T14:16:58.684553Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 8} finished in 2.853812s, errors=0 2025-04-07T14:16:58.684868Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:702:2586], subTag: 3} finished: 8 { Tag: 8 DurationMs: 2853 OperationsOK: 100 OperationsError: 0 } 2025-04-07T14:16:59.175149Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 5} finished in 3.347487s, errors=0 2025-04-07T14:16:59.175397Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:702:2586], subTag: 3} finished: 5 { Tag: 5 DurationMs: 3347 OperationsOK: 100 OperationsError: 0 } 2025-04-07T14:16:59.657778Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 3} finished in 3.831902s, errors=0 2025-04-07T14:16:59.658048Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:702:2586], subTag: 3} finished: 3 { Tag: 3 DurationMs: 3831 OperationsOK: 100 OperationsError: 0 } 2025-04-07T14:17:00.232935Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:711:2595], subTag: 2} finished in 4.407935s, errors=0 2025-04-07T14:17:00.233291Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:702:2586], subTag: 3} finished: 2 { Tag: 2 DurationMs: 4407 OperationsOK: 100 OperationsError: 0 } 2025-04-07T14:17:00.233341Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:702:2586], subTag: 3} finished in 4.410223s, oks# 1000, errors# 0 2025-04-07T14:17:00.233556Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:711:2595] with tag# 3 >> TYqlDateTimeTests::DateKey [GOOD] >> TYqlDateTimeTests::DatetimeKey >> TYqlDateTimeTests::SimpleUpsertSelect |96.7%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [GOOD] Test command err: 2025-04-07T14:16:20.067445Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574921299440157:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:20.067513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:16:20.220962Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:11466: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11466 } ] E0407 14:16:20.221973707 595888 dns_resolver.cc:162] no server name supplied in dns URI E0407 14:16:20.222139682 595888 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-07T14:16:21.071584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:21.231092Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:11466: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11466 } ] 2025-04-07T14:16:21.235997Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:11466: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:11466 2025-04-07T14:16:22.072164Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00130a/r3tmp/tmpGfMaff/pdisk_1.dat 2025-04-07T14:16:22.472625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:16:22.474222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7490574929889375276:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:22.525506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 TServer::EnableGrpc on GrpcPort 11466, node 1 2025-04-07T14:16:22.526177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:22.526509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:16:22.527782Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:22.528235Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:22.528296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:22.529120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:22.529172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:22.529248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:16:22.529328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:16:22.801810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:22.815062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:22.817275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2025-04-07T14:16:22.817524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:22.817712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:22.817792Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:22.830759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:16:22.830860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:16:22.830896Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:22.831339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:16:22.831377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:16:22.831389Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:16:22.831877Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-07T14:16:22.831911Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-07T14:16:22.831913Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-07T14:16:22.831923Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-07T14:16:22.831929Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-07T14:16:22.831935Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-07T14:16:22.833543Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-07T14:16:22.833548Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-07T14:16:22.833561Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-07T14:16:22.833564Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-07T14:16:22.833569Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-07T14:16:22.833571Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-07T14:16:22.834546Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-07T14:16:22.834568Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-07T14:16:22.834574Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-07T14:16:22.834677Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-07T14:16:22.834688Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-07T14:16:22.834693Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-07T14:16:22.835881Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-07T14:16:22.835905Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-07T14:16:22.835910Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-07T14:16:22.841188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035382887, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:22.841244Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715657:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035382887, at schemeshard: 72057594046644480 2025-04-07T14:16:22.841349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:16:22.841917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:16:22.842127Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-07T14:16:22.842157Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-07T14:16:22.842164Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-07T14:16:22.842199Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-07T14:16:22.842212Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-07T14:16:22.842217Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-07T14:16:22.843018Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-07T14:16:22.843030Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-07T14:16:22.843034Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-07T14:16:22.843319Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-07T14:16:22.843342Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-07T14:16:22.843347Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-07T14:16:22.843757Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-07T14:16:22.843778Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-07T14:16:22.843790Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-07T14:16:22.844281Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-07T14:16:22.844308Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-07T14:16:22.844319Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-07T14:16:22.845937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:22.846085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:22.846209Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:16:22.846308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:16:22.846356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 ... ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154186Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154219Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154291Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154322Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154385Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154418Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154480Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154512Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154576Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154607Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154671Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154704Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154774Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154804Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154862Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154902Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154955Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.154996Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155046Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155084Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155136Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155172Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155222Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155264Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155318Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155360Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155410Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155450Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155503Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155542Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155595Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155635Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155682Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155727Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155771Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155809Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155863Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155893Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.155947Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.156004Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.156049Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.156104Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.156137Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.156241Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.156275Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.156339Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.156381Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.156443Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.156494Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.156564Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.156591Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.156653Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.158750Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.158914Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.158962Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.159140Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.159183Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.159283Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.159390Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.159498Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.159604Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.159837Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.159881Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.160076Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.160115Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.160283Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.160320Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.160489Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.160526Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.160627Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.160759Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.160873Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.160939Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.160974Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.161109Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.161229Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.161338Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.161506Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.161550Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.161719Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.161856Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.161964Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.162075Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.162185Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.162233Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.162398Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.162551Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.162670Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.162828Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.163105Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.163162Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.163276Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.163381Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.163580Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.163620Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.163822Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.163866Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.164053Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.164096Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.164265Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.164303Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.164466Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.164504Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.164653Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.164875Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.164927Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.164997Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165052Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165108Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165157Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165211Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165335Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165374Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165427Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165496Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165612Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165646Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165707Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165754Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165818Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165853Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.165927Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.166028Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.166156Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.166221Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.166350Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.166400Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.166498Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.166561Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-07T14:17:00.166631Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts >> YdbYqlClient::TestVariant [GOOD] >> YdbYqlClient::TestTransactionQueryError >> YdbYqlClient::CreateTableWithUniformPartitions [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning |96.7%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::DirectReadStop [GOOD] >> TPersQueueTest::DirectReadCleanCache >> YdbYqlClient::TestDecimal1 [GOOD] >> YdbYqlClient::TestDecimal35 >> KqpPg::NoSelectFullScan [GOOD] >> TGRpcYdbTest::ExecuteDmlQuery [GOOD] >> TGRpcYdbTest::ExecutePreparedQuery >> TPersQueueTest::StreamReadCreateAndDestroyMsgs [GOOD] >> TPersQueueTest::StreamReadCommitAndStatusMsgs >> YdbYqlClient::TestDescribeTableWithShardStats [GOOD] >> YdbYqlClient::TestExplicitPartitioning >> YdbYqlClient::ConnectDbAclIsStrictlyChecked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::NoSelectFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 23403, MsgBus: 2614 2025-04-07T14:15:26.800227Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574690650176055:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:26.800318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002195/r3tmp/tmpaPD1sd/pdisk_1.dat 2025-04-07T14:15:27.159487Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23403, node 1 2025-04-07T14:15:27.226020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:27.226465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:27.231417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:27.282600Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:27.282623Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:27.282634Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:27.282752Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2614 TClient is connected to server localhost:2614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:27.878243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:27.889376Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:15:29.486161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574703535078437:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.486171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574703535078449:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.486270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.490984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:15:29.501793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574703535078451:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } Trying to start YDB, gRPC: 6265, MsgBus: 4651 2025-04-07T14:15:30.517505Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574706188488896:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:30.517675Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002195/r3tmp/tmp7UhnUR/pdisk_1.dat 2025-04-07T14:15:30.612453Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6265, node 2 2025-04-07T14:15:30.647015Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:30.647120Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:30.649912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:30.661720Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:30.661737Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:30.661742Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:30.661802Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4651 TClient is connected to server localhost:4651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:31.024704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:31.031215Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 16 --!syntax_pg CREATE TABLE Pg16 ( key bool PRIMARY KEY, value bool ); 2025-04-07T14:15:33.013631Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574719073391419:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.013706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.029386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg16 (key, value) VALUES ( 'false'::bool, 'false'::bool ) 2025-04-07T14:15:33.153970Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574719073391522:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.154037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574719073391527:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.154041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:33.156778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:15:33.163720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574719073391529:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } --!syntax_pg INSERT INTO Pg16 (key, value) VALUES ( 'true'::bool, 'true'::bool ) f f t t --!syntax_pg CREATE TABLE Pg16array ( key int2 PRIMARY KEY, value _bool ); 2025-04-07T14:15:33.838848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg16array (key, value) VALUES ( '0'::int2, ARRAY ['false'::bool, 'false'::bool] ); --!syntax_pg INSERT INTO Pg16array (key, value) VALUES ( '1'::int2, ARRAY ['true'::bool, 'true'::bool] ); 18 21 --!syntax_pg CREATE TABLE Pg21 ( key int2 PRIMARY KEY, value int2 ); 2025-04-07T14:15:34.534176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg21 (key, value) VALUES ( '0'::int2, '0'::int2 ) --!syntax_pg INSERT INTO Pg21 (key, value) VALUES ( '1'::int2, '1'::int2 ) --!syntax_pg INSERT INTO Pg21 (key, value) VALUES ( '2'::int2, '2'::int2 ) 0 0 1 1 2 2 --!syntax_pg CREATE TABLE Pg21array ( key int2 PRIMARY KEY, value _int2 ); 2025-04-07T14:15:35.352420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg21array ... _client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002195/r3tmp/tmp0HbvVw/pdisk_1.dat 2025-04-07T14:16:54.152351Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:54.190438Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:54.190551Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:54.192371Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10366, node 6 2025-04-07T14:16:54.236138Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:54.236162Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:54.236173Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:54.236322Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23067 TClient is connected to server localhost:23067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:54.732159Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:16:57.092425Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490575081051230277:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:57.092486Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490575081051230264:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:57.092909Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:57.096335Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:16:57.104534Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490575081051230287:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:57.225370Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:16:57.591809Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NjQ4YjhmNTMtNDUxMDViM2ItM2NmNzI1MWEtZTEyYjhlY2U=, ActorId: [6:7490575081051230449:2318], ActorState: ExecuteState, TraceId: 01jr89s36j09hgvry3mw76xj45, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:957: ydb/core/kqp/query_data/kqp_query_data.cpp:517: Terminate was called, reason(56): ERROR: invalid byte sequence for encoding "UTF8": 0x00 Trying to start YDB, gRPC: 6262, MsgBus: 64879 2025-04-07T14:16:58.351263Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7490575085365294771:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:58.351323Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002195/r3tmp/tmpDeHY9J/pdisk_1.dat 2025-04-07T14:16:58.457386Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6262, node 7 2025-04-07T14:16:58.478647Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:58.478744Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:58.480584Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:58.519075Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:58.519102Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:58.519114Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:58.519248Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64879 TClient is connected to server localhost:64879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:59.048441Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:01.558168Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490575098250197286:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:01.558194Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490575098250197291:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:01.558242Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:01.562371Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:17:01.573177Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490575098250197300:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:17:01.696139Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (null, 3)","aid [7, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["aid (4, 3)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (4, 3)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["aid (4, 3)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpOlapScheme::InsertDropAddColumn [GOOD] >> KqpOlapScheme::PrimaryKeyNotDefaultColumnFamily >> TYqlDateTimeTests::SimpleUpsertSelect [GOOD] >> TYqlDateTimeTests::TimestampKey >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace [GOOD] >> TGRpcNewCoordinationClient::SessionReconnectReattach >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy >> TYqlDateTimeTests::DatetimeKey [GOOD] >> TYqlDateTimeTests::IntervalKey >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore [GOOD] >> TGRpcClientLowTest::MultipleSimpleRequests >> YdbYqlClient::TestDecimal35 [GOOD] >> YdbYqlClient::TestDecimalFullStack >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] >> TGRpcYdbTest::ExecutePreparedQuery [GOOD] >> TGRpcYdbTest::DeleteFromAfterCreate >> YdbYqlClient::TestTransactionQueryError [GOOD] >> YdbYqlClient::TestReadWrongTable >> YdbYqlClient::ConnectDbAclIsStrictlyChecked [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase >> THiveTest::TestCheckSubHiveMigrationWithReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots >> TPersQueueTest::Cache [GOOD] >> TPersQueueTest::CacheHead ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] Test command err: 2025-04-07T14:16:52.711695Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575060895841164:2177];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:52.711767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001887/r3tmp/tmpVw7Wxb/pdisk_1.dat 2025-04-07T14:16:53.031529Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24590, node 1 2025-04-07T14:16:53.090145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:53.090202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:53.101310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:53.201226Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:53.201249Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:53.201254Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:53.201316Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:53.584546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.589735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:53.591222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.597890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:53.598283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:53.598311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:16:53.600546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:53.600578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:16:53.601013Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:16:53.602234Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.605873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035413652, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:53.605902Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:16:53.606263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:16:53.608725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:53.608913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:53.609137Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:16:53.609403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:16:53.609448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:16:53.609503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:16:53.612651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:53.612721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:53.612732Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:53.612789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:16:56.327396Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575076093501888:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:56.327477Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001887/r3tmp/tmpI3Qkue/pdisk_1.dat 2025-04-07T14:16:56.390400Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20918, node 4 2025-04-07T14:16:56.439122Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:56.439179Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:56.443034Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:56.447711Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:56.447736Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:56.447742Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:56.447826Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23516 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:56.635262Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:56.635515Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:56.635534Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:56.637049Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:56.637161Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:56.637176Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:16:56.637872Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:16:56.638177Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:56.638195Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:16:56.639177Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:56.641531Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035416690, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:56.641549Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:16:56.641714Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:16:56.642718Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:56.642801Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:56.642837Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:16:56.642894Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:16:56.642914Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:16:56.642937Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:16:56.643503Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720 ... ARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:04.190025Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:04.191572Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:04.191721Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:04.191768Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:04.191857Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:04.191895Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:04.191941Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:04.192302Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:04.192344Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:04.192365Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:04.192425Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:17:06.468559Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:06.469053Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2025-04-07T14:17:06.469711Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:06.469749Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:06.471682Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T14:17:06.471878Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:06.472085Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:06.472175Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:17:06.472525Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T14:17:06.472844Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:06.472879Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:06.472898Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:06.473065Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:06.473093Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:06.473102Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:17:06.477050Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:06.477241Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:06.477369Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:06.477470Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:06.477493Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-07T14:17:06.478892Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:17:06.535138Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:06.535164Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:06.535432Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:06.535449Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:06.535595Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:06.535609Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:06.536169Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:06.536190Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:06.536214Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2025-04-07T14:17:06.537339Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:17:06.540027Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035426588, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:06.540121Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744035426588 2025-04-07T14:17:06.540243Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2025-04-07T14:17:06.541579Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:06.541902Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:06.541960Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:17:06.543364Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:06.543409Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:06.543427Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:17:06.543662Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:06.543693Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:06.543703Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:17:06.548263Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715658 Step: 1744035426588 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 1193 } } 2025-04-07T14:17:06.548523Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037891 Status: COMPLETE TxId: 281474976715658 Step: 1744035426588 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037891 CpuTimeUsec: 1165 } } 2025-04-07T14:17:06.549697Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1744035426588 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 937 } } 2025-04-07T14:17:06.549923Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715658 Step: 1744035426588 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 959 } } 2025-04-07T14:17:06.551472Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:06.551687Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:06.552192Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:06.552348Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:06.552377Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:06.552422Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2025-04-07T14:17:06.554648Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T14:17:06.554747Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T14:17:06.554801Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 >> YdbYqlClient::RetryOperationAsync [GOOD] >> YdbYqlClient::QueryLimits >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions >> TYqlDateTimeTests::TimestampKey [GOOD] >> TYqlDecimalTests::SimpleUpsertSelect >> YdbTableBulkUpsert::Types >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize >> TYqlDateTimeTests::IntervalKey [GOOD] >> TYqlDateTimeTests::SimpleOperations >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull >> KqpOlapScheme::PrimaryKeyNotDefaultColumnFamily [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] >> YdbYqlClient::TestReadWrongTable [GOOD] >> TPersQueueTest::SchemeOperationsTest [GOOD] >> TPersQueueTest::SchemeOperationFirstClassCitizen >> YdbYqlClient::TestDecimalFullStack [GOOD] >> YdbYqlClient::TestDescribeDirectory >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] >> YdbYqlClient::CheckDefaultTableSettings3 >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::PrimaryKeyNotDefaultColumnFamily [GOOD] Test command err: Trying to start YDB, gRPC: 23829, MsgBus: 2893 2025-04-07T14:12:01.471659Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573809784458254:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:01.471888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001414/r3tmp/tmppnLKB4/pdisk_1.dat 2025-04-07T14:12:01.751120Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23829, node 1 2025-04-07T14:12:01.808140Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:12:01.808156Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:12:01.808161Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:12:01.808234Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:01.847312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:01.847408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:01.849062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2893 TClient is connected to server localhost:2893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:02.202418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (created_at, id_second)) PARTITION BY HASH(created_at) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1, TTL = Interval("PT1H") ON created_at); 2025-04-07T14:12:03.593268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573818374393476:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:03.593359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:03.763972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:12:03.819796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573818374393552:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:12:03.820006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573818374393552:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:12:03.820310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573818374393552:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:12:03.820435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573818374393552:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:12:03.820540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573818374393552:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:12:03.820689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573818374393552:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:12:03.820810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573818374393552:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:12:03.820901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573818374393552:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:12:03.820993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573818374393552:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:12:03.821119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573818374393552:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:12:03.821226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573818374393552:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:12:03.821340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490573818374393552:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:12:03.824159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:12:03.824207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:12:03.824318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:12:03.824354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:12:03.824560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:12:03.824603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:12:03.824756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:12:03.824804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:12:03.824882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:12:03.824909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:12:03.824946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:12:03.825014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:12:03.825478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:12:03.825537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:12:03.825774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:12:03.825808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:12:03.825944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:12:03.825973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:12:03.826178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:12:03.826210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:12:03.826358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:12:03.826393Z node 1 :TX_COLUMNSHARD WARN: table ... 7:06.123793Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:06.157401Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:06.157543Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:06.160087Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31542, node 14 2025-04-07T14:17:06.224954Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:06.224992Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:06.225008Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:06.225176Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11874 TClient is connected to server localhost:11874 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:06.975510Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/TableWithFamily` (Key Uint64 FAMILY family1 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family1, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="zstd", COMPRESSION_LEVEL=1)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-04-07T14:17:10.869844Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7490575137127880302:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:10.869979Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:10.898629Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:10.972706Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[14:7490575137127880349:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:17:10.972802Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[14:7490575137127880349:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:17:10.973212Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[14:7490575137127880349:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:17:10.973415Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[14:7490575137127880349:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:17:10.973587Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[14:7490575137127880349:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:17:10.973753Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[14:7490575137127880349:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:17:10.973918Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[14:7490575137127880349:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:17:10.974082Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[14:7490575137127880349:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:17:10.974239Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[14:7490575137127880349:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:17:10.974363Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[14:7490575137127880349:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:17:10.974536Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[14:7490575137127880349:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:17:10.974705Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[14:7490575137127880349:2308];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:17:10.982772Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:17:10.982871Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:17:10.983050Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:17:10.983094Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:17:10.983390Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:17:10.983423Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:17:10.983617Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:17:10.983673Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:17:10.983776Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:17:10.983808Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:17:10.983866Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:17:10.983901Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:17:10.984561Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:17:10.984623Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:17:10.984954Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:17:10.985000Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:17:10.985217Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:17:10.985256Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:17:10.985595Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:17:10.985635Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:17:10.985820Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:17:10.985868Z node 14 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:17:10.988465Z node 14 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7490575115653043189:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:10.988900Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> YdbYqlClient::QueryLimits [GOOD] >> YdbYqlClient::QueryStats >> THiveTest::TestCreateAndDeleteTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools >> YdbTableBulkUpsertOlap::UpsertCSV [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadWrongTable [GOOD] Test command err: 2025-04-07T14:16:55.941763Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575073092111106:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:55.942035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001880/r3tmp/tmpiIdC5K/pdisk_1.dat 2025-04-07T14:16:56.222678Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13378, node 1 2025-04-07T14:16:56.301212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:56.301327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:56.305440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:56.318679Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:56.318704Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:56.318714Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:56.318786Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:56.537073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:56.541159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:56.548294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:56.550104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:56.550300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:56.550317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:16:56.551173Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:16:56.551608Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:56.551629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:16:56.552813Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:56.555662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035416599, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:56.555694Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:16:56.555933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:16:56.557388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:56.557593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:56.557662Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:16:56.557768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:16:56.557814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:16:56.557860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:16:56.559586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:56.559622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:56.559633Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:56.559715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:16:58.069275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575085977013976:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:58.069276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575085977013968:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:58.069360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:58.071608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:58.071815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:58.071861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-07T14:16:58.071923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:58.071956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T14:16:58.072056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:58.072129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710658:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T14:16:58.072321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 1 -> 128 2025-04-07T14:16:58.072549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:58.072577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:16:58.074858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T14:16:58.075123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:58.075679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:58.075788Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, ProgressState 2025-04-07T14:16:58.076018Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:58.076108Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:58.076219Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:58.076410Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:16:58.077437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:16:58.077489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:16:58.077540Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:16:58.077808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:16:58.077837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:16:58.077848Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:16:58.078029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:16:58.078061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:16:58.078069Z node 1 :F ... wYzQ0NDUtMzQ0YjZjYzgtYzM3Nzk4MmI=, ActorId: [7:7490575119320739670:2298], ActorState: ExecuteState, TraceId: 01jr89scmpbr2ska7peka7r366, Create QueryResponse for error on request, msg: 2025-04-07T14:17:08.425964Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7490575129105770540:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:08.426052Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001880/r3tmp/tmptYBwl0/pdisk_1.dat 2025-04-07T14:17:08.522016Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:08.542964Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:08.543046Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:08.544997Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19046, node 10 2025-04-07T14:17:08.599539Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:08.599561Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:08.599567Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:08.599671Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:08.797676Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:08.798006Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:08.798032Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:08.799727Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:08.799879Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:08.799898Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:08.800566Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:08.801289Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:08.801315Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:08.802513Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:08.805542Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035428849, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:08.805568Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:08.805771Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:08.807308Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:08.807449Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:08.807511Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:08.807593Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:08.807633Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:08.807667Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:08.808431Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:08.808478Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:08.808494Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:08.808559Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:17:08.837199Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jr89sef467af9qmaq9dx5r5r, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:60316, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 9.998532s 2025-04-07T14:17:08.843882Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jr89sefb2xp8em6p8b7j930g, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:60332, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-07T14:17:11.246117Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jr89sgtd5fjn6tr85bvfbsa6, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:60346, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-07T14:17:11.248574Z node 10 :TX_PROXY ERROR: [ReadTable [10:7490575141990673416:2299] TxId# 281474976715658] Navigate request failed for table 'Root/NoTable' 2025-04-07T14:17:11.248767Z node 10 :TX_PROXY ERROR: [ReadTable [10:7490575141990673416:2299] TxId# 281474976715658] RESPONSE Status# ResolveError shard: 0 table: Root/NoTable 2025-04-07T14:17:11.249236Z node 10 :READ_TABLE_API NOTICE: [10:7490575141990673415:2299] Finish grpc stream, status: 400070
: Error: Failed to resolve table Root/NoTable, code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Failed to resolve table Root/NoTable 2025-04-07T14:17:11.252982Z node 10 :GRPC_SERVER DEBUG: [0x51a00007ce80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.252984Z node 10 :GRPC_SERVER DEBUG: [0x51a00004f880] received request Name# Request ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.253201Z node 10 :GRPC_SERVER DEBUG: [0x51a00006ea80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.253212Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b7080] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.253363Z node 10 :GRPC_SERVER DEBUG: [0x51a00006e480] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.253412Z node 10 :GRPC_SERVER DEBUG: [0x51a00007c880] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.253506Z node 10 :GRPC_SERVER DEBUG: [0x51a000031280] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.253579Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a6280] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.253661Z node 10 :GRPC_SERVER DEBUG: [0x51a00006d880] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.253729Z node 10 :GRPC_SERVER DEBUG: [0x51a00006d280] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.253793Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a2680] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.253896Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a2080] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.253952Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a1a80] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.254063Z node 10 :GRPC_SERVER DEBUG: [0x51b000175a80] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.254117Z node 10 :GRPC_SERVER DEBUG: [0x51a00006c080] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.254241Z node 10 :GRPC_SERVER DEBUG: [0x51a00006b480] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.254284Z node 10 :GRPC_SERVER DEBUG: [0x51a0000c4e80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.254393Z node 10 :GRPC_SERVER DEBUG: [0x51a0000c1280] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.254429Z node 10 :GRPC_SERVER DEBUG: [0x51a00003d280] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.254540Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ca280] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.254562Z node 10 :GRPC_SERVER DEBUG: [0x51a0000c9c80] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.254676Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a5680] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.254702Z node 10 :GRPC_SERVER DEBUG: [0x51a00006c680] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-07T14:17:11.254833Z node 10 :GRPC_SERVER DEBUG: [0x51a00003e480] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] Test command err: 2025-04-07T14:16:53.500522Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575063468078417:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:53.500612Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001881/r3tmp/tmpww9Q4h/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15989, node 1 2025-04-07T14:16:53.831424Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:53.844398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2025-04-07T14:16:53.844502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "PostgreSQL" } 2025-04-07T14:16:53.844519Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:53.844842Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:53.846895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-07T14:16:53.849820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:16:53.850580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:53.850744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:16:53.856853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:53.857939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:53.857968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:53.858023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:16:53.858146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.878531Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:53.878572Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:53.878582Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:53.878676Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:53.887932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:53.888108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:53.895111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:54.122386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:54.127990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:54.134983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:54.136781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:54.136952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:54.136988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:16:54.137847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:16:54.138285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:54.138306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:16:54.139389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:54.142054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035414184, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:54.142081Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:16:54.142283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:16:54.143726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:54.143853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:54.143904Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:16:54.143992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:16:54.144020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:16:54.144048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:16:54.145844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:54.145890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:54.145905Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:54.145959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:16:56.713198Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575074993361614:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:56.713270Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001881/r3tmp/tmpO63rpz/pdisk_1.dat 2025-04-07T14:16:56.770986Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9383, node 4 2025-04-07T14:16:56.823588Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:56.823653Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:56.826208Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:56.833540Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:56.833573Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:56.833580Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:56.833678Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:57.022118Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:57.022398Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:57.022432Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:57.023876Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:57.024027Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:57.024047Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:16:57.024644Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId ... 15660 2025-04-07T14:17:11.011072Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:11.011090Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2025-04-07T14:17:11.011291Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:17:11.011315Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:11.011324Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T14:17:11.011468Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:17:11.011495Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:11.011505Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2025-04-07T14:17:11.011603Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:17:11.011628Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:11.011637Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 3 2025-04-07T14:17:11.011754Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:17:11.011775Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:11.011787Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 1 2025-04-07T14:17:11.017603Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035431061, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:11.017662Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035431061, at schemeshard: 72057594046644480 2025-04-07T14:17:11.017764Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2025-04-07T14:17:11.017869Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035431061, at schemeshard: 72057594046644480 2025-04-07T14:17:11.017906Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:1 128 -> 240 2025-04-07T14:17:11.017959Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035431061, at schemeshard: 72057594046644480 2025-04-07T14:17:11.017996Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:2 128 -> 240 2025-04-07T14:17:11.018050Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715660:3, HandleReply TEvOperationPlan: step# 1744035431061 2025-04-07T14:17:11.018090Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:3 128 -> 240 2025-04-07T14:17:11.019716Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:11.020256Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:11.020339Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2025-04-07T14:17:11.020434Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/4 2025-04-07T14:17:11.020613Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:2 ProgressState 2025-04-07T14:17:11.020684Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:2 progress is 2/4 2025-04-07T14:17:11.020852Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:3 ProgressState 2025-04-07T14:17:11.020899Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:3 progress is 3/4 2025-04-07T14:17:11.020993Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:1 ProgressState 2025-04-07T14:17:11.021035Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:1 progress is 4/4 2025-04-07T14:17:11.021062Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-07T14:17:11.021101Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2025-04-07T14:17:11.021115Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:2 2025-04-07T14:17:11.021133Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:3 2025-04-07T14:17:11.021152Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 5, subscribers: 1 2025-04-07T14:17:11.022513Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:17:11.022555Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:11.022578Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2025-04-07T14:17:11.022766Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:17:11.022787Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:11.022797Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T14:17:11.022950Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:17:11.022970Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:11.022980Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2025-04-07T14:17:11.023108Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:17:11.023134Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:11.023144Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2025-04-07T14:17:11.023269Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:17:11.023301Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:11.023310Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 2 2025-04-07T14:17:11.023348Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 2025-04-07T14:17:11.025962Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7490575135088992708:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:17:11.078255Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715661:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T14:17:11.078422Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T14:17:11.080436Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715661, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T14:17:11.153359Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89sgjpasm275k5gm6w258m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YTM3ZjFmZjUtNTdjZTM0NjctOGMxMjNlYTQtMTI2ZjMwMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:11.157777Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89sgjpasm275k5gm6w258m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YTM3ZjFmZjUtNTdjZTM0NjctOGMxMjNlYTQtMTI2ZjMwMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:11.229990Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr89sgqs31gn111ctg3n1ef2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YTM3ZjFmZjUtNTdjZTM0NjctOGMxMjNlYTQtMTI2ZjMwMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:11.234290Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr89sgqs31gn111ctg3n1ef2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YTM3ZjFmZjUtNTdjZTM0NjctOGMxMjNlYTQtMTI2ZjMwMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] Test command err: 2025-04-07T14:16:52.794841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575059979163045:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:52.795470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001886/r3tmp/tmpd0g0KB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15477, node 1 2025-04-07T14:16:53.101323Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:53.101356Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:53.106477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2025-04-07T14:16:53.106549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "PostgreSQL" } 2025-04-07T14:16:53.106555Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:53.108980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-07T14:16:53.120289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:16:53.121284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:53.121453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:16:53.124229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:53.124938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:53.124961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:53.125000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:16:53.125062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.149610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:53.149724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:53.158777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:53.197066Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:53.197088Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:53.197095Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:53.197191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65408 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:53.573935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.580268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:53.581606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.584900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:53.585087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:53.585118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:16:53.587021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:16:53.587182Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:53.587198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:16:53.588306Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.598001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035413638, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:53.598037Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:16:53.598309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:16:53.599844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:53.599959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:53.600034Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:16:53.600119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:16:53.600156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:16:53.600188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:16:53.602386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:53.602438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:53.602450Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:53.602504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:16:56.229665Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575078834723929:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:56.229740Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001886/r3tmp/tmppQsscS/pdisk_1.dat 2025-04-07T14:16:56.308084Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:56.333293Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:56.333368Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:56.335853Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16685, node 4 2025-04-07T14:16:56.374019Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:56.374044Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:56.374051Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:56.374152Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:56.589903Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:56.590243Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:56.590282Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:56.592088Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:56.592282Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:56.592301Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:16:56.593176Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txI ... hId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:03.211223Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:03.211512Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:03.211535Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:03.213272Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:03.213427Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:03.213445Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:03.214294Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:03.214743Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:03.214768Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:03.215795Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:03.218073Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035423263, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:03.218097Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:03.218274Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:03.219412Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:03.219545Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:03.219590Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:03.219668Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:03.219706Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:03.219742Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:03.220144Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:03.220180Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:03.220194Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:03.220242Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-04-07T14:17:05.835940Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-07T14:17:05.843107Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-07T14:17:05.850244Z node 10 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token TestRequest(database="/Root", token="invalid token") => {SUCCESS, 0} 2025-04-07T14:17:05.860896Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-07T14:17:05.869222Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-07T14:17:07.156431Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7490575122094482965:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:07.156496Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001886/r3tmp/tmphdLw58/pdisk_1.dat 2025-04-07T14:17:07.245076Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:07.274390Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:07.274491Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:07.277109Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8324, node 13 2025-04-07T14:17:07.320680Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:07.320699Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:07.320707Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:07.320824Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:07.553485Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:07.553825Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:07.553856Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:07.555959Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:07.556170Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:07.556192Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:07.557120Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:07.557803Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:07.557825Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:07.559136Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:07.562019Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035427610, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:07.562049Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:07.562278Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:07.563877Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:07.564048Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:07.564109Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:07.564188Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:07.564228Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:07.564278Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:07.564966Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:07.565010Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:07.565026Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:07.565090Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWhileStarting >> YdbTableBulkUpsert::Types [GOOD] >> YdbTableBulkUpsert::Uint8 >> YdbTableBulkUpsert::Nulls >> TYqlDecimalTests::SimpleUpsertSelect [GOOD] >> TYqlDecimalTests::NegativeValues >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad >> YdbYqlClient::SecurityTokenAuth >> YdbYqlClient::TestReadTableMultiShardWholeTable ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions [GOOD] Test command err: 2025-04-07T14:16:56.520873Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575077191842520:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:56.520958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00187c/r3tmp/tmpTA0Hri/pdisk_1.dat 2025-04-07T14:16:56.759779Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61996, node 1 2025-04-07T14:16:56.825097Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:56.825116Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:56.825124Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:56.825227Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:56.848741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:56.848873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:56.851809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:57.005632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:57.010833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:57.012012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:57.013884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:57.014074Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:57.014094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:16:57.014870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:16:57.015409Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:57.015437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:16:57.016807Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:57.019799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035417068, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:57.019824Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:16:57.020079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:16:57.021322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:57.021476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:57.021524Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:16:57.021623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:16:57.021663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:16:57.021700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:16:57.023510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:16:57.023564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:16:57.023580Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:57.023637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:16:57.061069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:57.061203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2025-04-07T14:16:57.061492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:57.061513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:57.062801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2025-04-07T14:16:57.062970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:57.063144Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:57.063209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateKesus, at tablet# 72057594046644480 2025-04-07T14:16:57.063504Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T14:16:57.063669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:16:57.063708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:16:57.063722Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:16:57.063921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:16:57.063946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:16:57.063952Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:16:57.066728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:16:57.066812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-07T14:16:57.124886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2025-04-07T14:16:57.126073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:57.128266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035417173, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:57.128296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2025-04-07T14:16:57.128357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-04-07T14:16:57.129415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:57.129598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:57.129654Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T14:16:57.129707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T14:16:57.129737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T14:16:57.129826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2025-04-07T14:16:57.130287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:16:57.130333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:16:57.130345Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:16:57.130499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:16:57.130520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:16:57.130548Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:16:57.130577Z node 1 : ... D INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2025-04-07T14:17:11.063856Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715662:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:11.063945Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:0 2 -> 3 2025-04-07T14:17:11.075578Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:0 3 -> 128 2025-04-07T14:17:11.076930Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715662:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:11.079636Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035431124, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:11.079673Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715662:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2025-04-07T14:17:11.079763Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:0 128 -> 240 2025-04-07T14:17:11.081471Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:11.081733Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:11.081794Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715662:0 ProgressState 2025-04-07T14:17:11.081869Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-04-07T14:17:11.081920Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-04-07T14:17:11.082029Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715662, publications: 2, subscribers: 1 2025-04-07T14:17:11.082684Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-07T14:17:11.082734Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715662 2025-04-07T14:17:11.082749Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 13 2025-04-07T14:17:11.082903Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-07T14:17:11.082915Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715662 2025-04-07T14:17:11.082937Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 4 2025-04-07T14:17:11.082963Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715662, subscribers: 1 2025-04-07T14:17:11.247945Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/node2, operationId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:11.248127Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715663:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:11.248147Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:11.248201Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715663:0 progress is 1/1 2025-04-07T14:17:11.248265Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715663:0 2025-04-07T14:17:11.248278Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715663, publications: 2, subscribers: 0 2025-04-07T14:17:11.251072Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715663, database: /Root, subject: user_a@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /Root/node2, add access: +(SR):user_b@builtin 2025-04-07T14:17:11.251221Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:11.251564Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:11.253387Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715663 2025-04-07T14:17:11.253436Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715663 2025-04-07T14:17:11.253454Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715663, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T14:17:11.253715Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046644480, cookie: 281474976715663 2025-04-07T14:17:11.253754Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715663 2025-04-07T14:17:11.253765Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715663, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 14 2025-04-07T14:17:11.253799Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715663, subscribers: 0 2025-04-07T14:17:11.272445Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/node3, operationId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:11.272627Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715664:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:11.272652Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:11.272728Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:0 progress is 1/1 2025-04-07T14:17:11.272810Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:0 2025-04-07T14:17:11.272844Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715664, publications: 2, subscribers: 0 2025-04-07T14:17:11.274539Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715664, database: /Root, subject: user_a@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /Root/node3, add access: +(SR):user_b@builtin, add access: +(UR):user_b@builtin 2025-04-07T14:17:11.274683Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:11.275017Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:11.275774Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-07T14:17:11.275846Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715664 2025-04-07T14:17:11.275863Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T14:17:11.276093Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-07T14:17:11.276117Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715664 2025-04-07T14:17:11.276132Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 15 2025-04-07T14:17:11.276171Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715664, subscribers: 0 2025-04-07T14:17:11.295475Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/node4, operationId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:11.295640Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:11.295658Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:11.295715Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-04-07T14:17:11.295801Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-04-07T14:17:11.295807Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 2, subscribers: 0 2025-04-07T14:17:11.297629Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: user_a@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /Root/node4, add access: +(SR):user_b@builtin, add access: +(UR):user_b@builtin, add access: +(ER):user_b@builtin 2025-04-07T14:17:11.297787Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:11.298080Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:11.298743Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-07T14:17:11.298777Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715665 2025-04-07T14:17:11.298790Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2025-04-07T14:17:11.298949Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 16 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-07T14:17:11.299000Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2025-04-07T14:17:11.299023Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 16 2025-04-07T14:17:11.299053Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull [GOOD] >> YdbYqlClient::CopyTables >> THiveTest::TestCreateAndReassignTabletWhileStarting [GOOD] >> THiveTest::TestCreateExternalTablet >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts >> TPersQueueTest::DirectReadCleanCache [GOOD] >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder >> THiveTest::TestCreateExternalTablet [GOOD] >> DbCounters::TabletsSimple [GOOD] >> LabeledDbCounters::OneTablet >> YdbYqlClient::TestDescribeDirectory [GOOD] >> TTableProfileTests::OverwriteCompactionPolicy >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert >> TPersQueueTest::StreamReadCommitAndStatusMsgs [GOOD] >> TPersQueueTest::StreamReadManyUpdateTokenAndRead >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestDescribeDirectory [GOOD] Test command err: 2025-04-07T14:17:00.446503Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575095882162993:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:00.447162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001877/r3tmp/tmpqQhc8Z/pdisk_1.dat 2025-04-07T14:17:00.693169Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14176, node 1 2025-04-07T14:17:00.774160Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:00.774189Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:00.774204Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:00.774318Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:00.780505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:00.780677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:00.783810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:01.011877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:01.016848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:01.025220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:01.027373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:01.027586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:01.027614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:01.028599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:01.029275Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:01.029294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:01.030682Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:01.034125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035421079, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:01.034162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:01.034450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:01.036202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:01.036374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:01.036430Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:01.036518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:01.036564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:01.036616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:01.038971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:01.039024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:01.039040Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:01.039109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:17:02.599396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575104472098574:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:02.599397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575104472098569:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:02.599469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:02.601642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:02.601808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:02.601830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-07T14:17:02.601874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:02.601883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T14:17:02.601938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:02.602002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710658:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T14:17:02.602209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 1 -> 128 2025-04-07T14:17:02.602431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:02.602463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:17:02.604543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T14:17:02.604872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:02.605363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:02.605463Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, ProgressState 2025-04-07T14:17:02.605706Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:02.605822Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:02.605947Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:02.606145Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:17:02.607190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:02.607242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:02.607264Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:02.607430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:02.607474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:02.607483Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:17:02.607605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:02.607650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:02.607676Z node 1 : ... ode 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:15.457194Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:15.457287Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:15.457527Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T14:17:15.458090Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:15.458132Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:15.458149Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:15.458348Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:15.458370Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:15.458380Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:17:15.460201Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035435506, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:15.460233Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035435506, at schemeshard: 72057594046644480 2025-04-07T14:17:15.460321Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-04-07T14:17:15.461628Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:15.461792Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:15.461839Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T14:17:15.461911Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T14:17:15.461948Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T14:17:15.461991Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2025-04-07T14:17:15.462382Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:15.462422Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:15.462435Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:17:15.462582Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:15.462606Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:15.462616Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:17:15.462647Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-04-07T14:17:15.470810Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7490575158276846097:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:15.470887Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:15.488815Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo/Test, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.489223Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 1 -> 2 2025-04-07T14:17:15.489737Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:15.489758Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.491149Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo/Test 2025-04-07T14:17:15.491335Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:15.491479Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:15.491540Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:17:15.491775Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T14:17:15.492177Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:15.492218Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:15.492232Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 4 2025-04-07T14:17:15.492353Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:15.492369Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:15.492375Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 1 2025-04-07T14:17:15.494962Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:15.495004Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2025-04-07T14:17:15.495956Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715659:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:17:15.551039Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715659:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:15.551058Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:15.551098Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 3 -> 128 2025-04-07T14:17:15.552390Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715659:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:17:15.555460Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035435597, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:15.555509Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715659:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744035435597 2025-04-07T14:17:15.555631Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 129 2025-04-07T14:17:15.557574Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:15.557875Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:15.557934Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715659:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:17:15.559572Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:15.559628Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:15.559649Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2025-04-07T14:17:15.559941Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:15.560007Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:15.560024Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T14:17:15.562334Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715659 Step: 1744035435597 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1480 } } 2025-04-07T14:17:15.562723Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715659:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:15.562754Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.562774Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 129 -> 240 2025-04-07T14:17:15.563854Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2025-04-07T14:17:15.563931Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2025-04-07T14:17:15.563991Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 >> TYqlDateTimeTests::SimpleOperations [GOOD] >> YdbYqlClient::QueryStats [GOOD] >> YdbYqlClient::RenameTables >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad [GOOD] >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] Test command err: 2025-04-07T14:16:59.100825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575088858283533:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:59.100896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001878/r3tmp/tmpWlouC9/pdisk_1.dat 2025-04-07T14:16:59.336015Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15861, node 1 2025-04-07T14:16:59.405591Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:59.405613Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:59.405624Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:59.405734Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:59.425743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:59.425878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:59.428281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:59.602407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:59.612547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:59.613806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:59.615605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:59.615781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:59.615799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:16:59.616692Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:16:59.617047Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:59.617068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:16:59.618168Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:59.620693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035419665, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:59.620715Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:16:59.620988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:16:59.622173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:59.622303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:59.622340Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:16:59.622420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:16:59.622449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:16:59.622477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:16:59.623799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:59.623830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:59.623843Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:59.623938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:18019 2025-04-07T14:16:59.800053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:59.800451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:59.800476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:59.803105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-04-07T14:16:59.809518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035419854, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:59.810892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:16:59.810939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2025-04-07T14:16:59.811664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2025-04-07T14:16:59.815323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:16:59.815803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:59.815826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:16:59.817113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-04-07T14:17:00.319714Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490575093705249266:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:00.319762Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:17:00.332519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:00.332638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:00.335061Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:17:00.335807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:00.733007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035420778, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:00.735118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:17:00.735284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2025-04-07T14:17:00.736246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-04-07T14:17:01.461166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:17:01.462499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:01.462535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:17:01.462717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex, operationId: 281474976710660:1, transaction: WorkingDir: "/Root/ydb_ut_tenant/Table-1" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "MyIndex" KeyColumnNames: "Value" Type: EIndexTypeGlobal IndexImplTableDescriptions { } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2025-04-07T14:17:01.462872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:01.462926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex/indexImplTable, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-07T14:17:01.463571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:01.465747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, ope ... e) 2025-04-07T14:17:13.201467Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:13.468314Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:13.468649Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:13.468720Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:13.470359Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:13.470570Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:13.470590Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:13.471252Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:13.475826Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:13.475860Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:13.477598Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:13.480902Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035433525, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:13.480939Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:13.481151Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:13.482737Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:13.482909Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:13.482999Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:13.483106Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:13.483166Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:13.483227Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:13.483832Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:13.483878Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:13.483894Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:13.483956Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:17:16.437654Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:16.438180Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2025-04-07T14:17:16.438737Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:16.438777Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:16.440683Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T14:17:16.440897Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:16.441082Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:16.441178Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:17:16.441496Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T14:17:16.441813Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:16.441844Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:16.441858Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:16.442002Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:16.442019Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:16.442025Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:17:16.445198Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:16.445272Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-07T14:17:16.446385Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:17:16.501103Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:16.501130Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:16.501206Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2025-04-07T14:17:16.502984Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:17:16.505794Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035436549, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:16.505866Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744035436549 2025-04-07T14:17:16.505973Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2025-04-07T14:17:16.507307Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:16.507525Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:16.507572Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:17:16.508460Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:16.508490Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:16.508500Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:17:16.508622Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:16.508636Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:16.508642Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:17:16.510207Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1744035436549 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1062 } } 2025-04-07T14:17:16.510561Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:16.510603Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:16.510630Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2025-04-07T14:17:16.511800Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T14:17:16.511892Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T14:17:16.511943Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 >> YdbYqlClient::SecurityTokenAuth [GOOD] >> YdbYqlClient::RetryOperationTemplate ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDateTimeTests::SimpleOperations [GOOD] Test command err: 2025-04-07T14:16:52.724690Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575061001756288:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:52.725538Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018d4/r3tmp/tmpDCNkzn/pdisk_1.dat 2025-04-07T14:16:53.034072Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18959, node 1 2025-04-07T14:16:53.104568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:53.104678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:53.144208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:53.201168Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:53.201193Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:53.201198Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:53.201272Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16345 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:53.574399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.580633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:53.582276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.584768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:53.584972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:53.585004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:16:53.587032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:16:53.587242Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:53.587270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:16:53.588807Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.595635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035413638, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:53.595679Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:16:53.595983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:16:53.598457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:53.598650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:53.598719Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:16:53.598836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:16:53.598895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:16:53.598947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:16:53.606333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:53.606405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:53.606432Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:53.606523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:16345 2025-04-07T14:16:53.829932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.830309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:53.830337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.833088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant 2025-04-07T14:16:53.833240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:53.833481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:53.833539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:53.833559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 waiting... 2025-04-07T14:16:53.834812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:16:53.834857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:16:53.834872Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:16:53.835088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:16:53.835131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:16:53.835140Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:16:53.835345Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:16:53.836297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:53.836320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T14:16:53.837566Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.840057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035413883, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:53.840083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-07T14:16:53.840272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-07T14:16:53.841591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:53.841801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:53.841833Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2025-04-07T14:16:53.841885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:16:53.841911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:16:53.841939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2025-04-07T14:16:53.842770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:16:53.842796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:16:53.842808Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:16:53.843092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:16:53.843111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:16:53.843120Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:16:53.843150Z node 1 :FLA ... :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:1 128 -> 240 2025-04-07T14:17:14.947381Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035434988, at schemeshard: 72057594046644480 2025-04-07T14:17:14.947423Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:2 128 -> 240 2025-04-07T14:17:14.947479Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715660:3, HandleReply TEvOperationPlan: step# 1744035434988 2025-04-07T14:17:14.947525Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:3 128 -> 240 2025-04-07T14:17:14.949114Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:14.949621Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:14.949687Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2025-04-07T14:17:14.949782Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/4 2025-04-07T14:17:14.949932Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:2 ProgressState 2025-04-07T14:17:14.949979Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:2 progress is 2/4 2025-04-07T14:17:14.950053Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:3 ProgressState 2025-04-07T14:17:14.950097Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:3 progress is 3/4 2025-04-07T14:17:14.950177Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:1 ProgressState 2025-04-07T14:17:14.950221Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:1 progress is 4/4 2025-04-07T14:17:14.950248Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-07T14:17:14.950274Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2025-04-07T14:17:14.950291Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:2 2025-04-07T14:17:14.950300Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:3 2025-04-07T14:17:14.950319Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 5, subscribers: 1 2025-04-07T14:17:14.951948Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:17:14.952007Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:14.952026Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2025-04-07T14:17:14.952234Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:17:14.952264Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:14.952275Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T14:17:14.952408Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:17:14.952433Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:14.952443Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2025-04-07T14:17:14.952558Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:17:14.952611Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:14.952631Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2025-04-07T14:17:14.953487Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-04-07T14:17:14.953518Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2025-04-07T14:17:14.953528Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 2 2025-04-07T14:17:14.953568Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 2025-04-07T14:17:14.955201Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7490575152104669585:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:17:15.023805Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715661:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T14:17:15.024066Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T14:17:15.026385Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715661, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T14:17:15.130281Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89smdn1msvce1m1tda0485, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDQwNjU2ZWQtZjkwMmJkZDMtOWM0N2U2NDctZGVlOTI2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:15.230773Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89smm1506ve8fwnqzts6gh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDQwNjU2ZWQtZjkwMmJkZDMtOWM0N2U2NDctZGVlOTI2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:15.633570Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr89smq7dsyzt4knnz84299t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDQwNjU2ZWQtZjkwMmJkZDMtOWM0N2U2NDctZGVlOTI2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:15.640375Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr89smq7dsyzt4knnz84299t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDQwNjU2ZWQtZjkwMmJkZDMtOWM0N2U2NDctZGVlOTI2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:16.038170Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr89sn40c9qtgc5psyzt9a52, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDQwNjU2ZWQtZjkwMmJkZDMtOWM0N2U2NDctZGVlOTI2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:16.045705Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr89sn40c9qtgc5psyzt9a52, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDQwNjU2ZWQtZjkwMmJkZDMtOWM0N2U2NDctZGVlOTI2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:16.135606Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr89sngp1g08s0ynw1ph1hxh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDQwNjU2ZWQtZjkwMmJkZDMtOWM0N2U2NDctZGVlOTI2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:16.228058Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr89snkbcf8203zmvtthjtdb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDQwNjU2ZWQtZjkwMmJkZDMtOWM0N2U2NDctZGVlOTI2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:16.324574Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr89snpbb8svfn108e9er2nb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDQwNjU2ZWQtZjkwMmJkZDMtOWM0N2U2NDctZGVlOTI2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:16.427006Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr89sns9dd1drg6k62h7pnxp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDQwNjU2ZWQtZjkwMmJkZDMtOWM0N2U2NDctZGVlOTI2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:16.540818Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr89snwf3amhkjrxdy24zmts, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDQwNjU2ZWQtZjkwMmJkZDMtOWM0N2U2NDctZGVlOTI2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:16.782257Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7490575139219766431:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:16.782360Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:17:16.822025Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr89sp00662kayf6p7sz48tg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDQwNjU2ZWQtZjkwMmJkZDMtOWM0N2U2NDctZGVlOTI2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:16.825182Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr89sp00662kayf6p7sz48tg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDQwNjU2ZWQtZjkwMmJkZDMtOWM0N2U2NDctZGVlOTI2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied >> TYqlDecimalTests::NegativeValues [GOOD] >> TYqlDecimalTests::DecimalKey >> YdbYqlClient::TestReadTableMultiShardWholeTable [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot >> TGRpcYdbTest::RemoveNotExistedDirectory ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] Test command err: 2025-04-07T14:16:52.714603Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575058663587586:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:52.714970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00189c/r3tmp/tmppfVLzH/pdisk_1.dat 2025-04-07T14:16:53.035049Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61329, node 1 2025-04-07T14:16:53.116546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:53.116748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:53.123699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:53.197071Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:53.197107Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:53.197116Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:53.197213Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:53.573365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.585915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:53.587033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.588693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:53.588824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:53.588848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:16:53.590232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:53.590254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:16:53.591251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.592310Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:16:53.594635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035413638, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:53.594687Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:16:53.595161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:16:53.596849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:53.596990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:53.597032Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:16:53.597092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:16:53.597127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:16:53.597157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:16:53.599802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:53.599841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:53.599851Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:53.599901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:63121 2025-04-07T14:16:53.809954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.813635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T14:16:53.813862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:53.813889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.816836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2025-04-07T14:16:53.817077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:53.817292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:53.817396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 waiting... 2025-04-07T14:16:53.818259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:16:53.818318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:16:53.818334Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:16:53.818538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:16:53.818571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:16:53.818582Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:16:53.818789Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:16:53.834065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:16:53.834429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:16:53.834599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:16:53.834767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:16:53.834800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T14:16:53.841842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:16:53.883582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7490575062958555766:2286];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:16:53.903853Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7490575062958555766:2286];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:16:53.904024Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-07T14:16:53.913534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490575062958555766:2286];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:16:53.913900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490575062958555766:2286];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:16:53.914201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490575062958555766:2286];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:16:53.914309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490575062958555766:2286];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:16:53.914391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490575062958555766:2286];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:16:53.914477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490575062958555766:2286];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abs ... -04-07T14:17:16.743363Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2025-04-07T14:17:16.743412Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2025-04-07T14:17:16.743517Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2025-04-07T14:17:16.743559Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2025-04-07T14:17:16.743584Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T14:17:16.743625Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2025-04-07T14:17:16.743647Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2025-04-07T14:17:16.743657Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2025-04-07T14:17:16.743673Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2025-04-07T14:17:16.745147Z node 10 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:17:16.745196Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:16.745230Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:16.745245Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T14:17:16.745443Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:16.745493Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:16.745504Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T14:17:16.745655Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:16.745696Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:16.745706Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T14:17:16.745832Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:16.745855Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:16.745866Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2025-04-07T14:17:16.745961Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:16.745983Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:16.746008Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2025-04-07T14:17:16.746043Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-04-07T14:17:16.747775Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7490575161093639151:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:17:16.846500Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T14:17:16.846689Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T14:17:16.848942Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T14:17:16.972934Z node 10 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:17:16.973090Z node 10 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976715661 at tablet 72075186224037888 2025-04-07T14:17:16.975441Z node 10 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:17:16.977741Z node 10 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715661 at step 1744035437025 at tablet 72075186224037888 { Transactions { TxId: 281474976715661 AckTo { RawX1: 7490575148208736641 RawX2: 42949675324 } } Step: 1744035437025 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:17:16.977779Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:17:16.977875Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:17:16.977894Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:17:16.977912Z node 10 :TX_DATASHARD DEBUG: Found ready operation [1744035437025:281474976715661] in PlanQueue unit at 72075186224037888 2025-04-07T14:17:16.978059Z node 10 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744035437025:281474976715661 keys extracted: 0 2025-04-07T14:17:16.978356Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:17:16.979659Z node 10 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1744035437025 txid# 281474976715661} 2025-04-07T14:17:16.979689Z node 10 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744035437025} 2025-04-07T14:17:16.979723Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:17:16.979769Z node 10 :TX_DATASHARD DEBUG: Complete [1744035437025 : 281474976715661] from 72075186224037888 at tablet 72075186224037888 send result to client [10:7490575161093639260:2824], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:17:16.979805Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:17:16.980450Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89sp5mafey925zwhsrq50n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Y2EyYWExZDEtNmZiMWE5N2ItZDJkMmEyYTAtZmNmM2ExZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:16.982884Z node 10 :TX_DATASHARD INFO: Start scan, at: [10:7490575161093639289:2128], tablet: [10:7490575161093639007:2302], scanId: 3, table: /Root/LogsX, gen: 1, deadline: 2025-04-07T14:27:16.982443Z 2025-04-07T14:17:16.983668Z node 10 :TX_DATASHARD DEBUG: Got ScanDataAck, at: [10:7490575161093639289:2128], scanId: 3, table: /Root/LogsX, gen: 1, tablet: [10:7490575161093639007:2302], freeSpace: 8388608;limits:(bytes=0;chunks=0); 2025-04-07T14:17:16.983686Z node 10 :TX_DATASHARD DEBUG: Wakeup driver at: [10:7490575161093639289:2128] 2025-04-07T14:17:16.985349Z node 10 :TX_DATASHARD DEBUG: Range 0 of 1 exhausted: try next one. table: /Root/LogsX range: [(Utf8 : NULL, Timestamp : NULL) ; ()) next range: 2025-04-07T14:17:16.985368Z node 10 :TX_DATASHARD DEBUG: TableRanges is over, at: [10:7490575161093639289:2128], scanId: 3, table: /Root/LogsX 2025-04-07T14:17:16.985389Z node 10 :TX_DATASHARD DEBUG: Finish scan, at: [10:7490575161093639289:2128], scanId: 3, table: /Root/LogsX, reason: 0, abortEvent: 2025-04-07T14:17:16.985410Z node 10 :TX_DATASHARD DEBUG: Send ScanData, from: [10:7490575161093639289:2128], to: [10:7490575161093639284:2321], scanId: 3, table: /Root/LogsX, bytes: 11000, rows: 100, page faults: 0, finished: 1, pageFault: 0 2025-04-07T14:17:16.985515Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-07T14:17:16.985584Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:17:16.985609Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:17:16.985634Z node 10 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:17:16.985663Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:17:16.994251Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035437025, txId: 281474976715661] shutting down 2025-04-07T14:17:17.405578Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89speh3gbvpbq932v4dqfd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MjA1OGYzZS03MjYzMDBiZi04YTZlYmI1NS0xNWQwMjk3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 100 rows Negative (wrong format): BAD_REQUEST Negative (wrong data): SCHEME_ERROR FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8016;columns=9; 2025-04-07T14:17:17.450593Z node 10 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:139;event=cannot_parse;message=Invalid: Ran out of field metadata, likely malformed;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (less columns): BAD_REQUEST FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8984;columns=10; 2025-04-07T14:17:17.460142Z node 10 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:139;event=cannot_parse;message=Serialization error: batch is not valid: Invalid: Offsets buffer size (bytes): 400 isn't large enough for length: 100;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (reordered columns): BAD_REQUEST >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] >> YdbTableBulkUpsertOlap::UpsertCsvBug >> ColumnShardTiers::DSConfigs [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> YdbYqlClient::CopyTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateExternalTablet [GOOD] Test command err: 2025-04-07T14:12:27.262315Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:27.264631Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.264803Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-07T14:12:27.265322Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-07T14:12:27.266134Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-07T14:12:27.266167Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:27.266660Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2073] ControllerId# 72057594037932033 2025-04-07T14:12:27.266703Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:27.266785Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:27.266974Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:27.267518Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:27.267549Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:27.267650Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:27.267770Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:19:2063] 2025-04-07T14:12:27.267790Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:19:2063] 2025-04-07T14:12:27.274707Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:27.274737Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:27.275936Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.276051Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.276135Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.276208Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.276283Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.276345Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.276439Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:27:2072] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.276477Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:27.276642Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:28:2073] 2025-04-07T14:12:27.276702Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:28:2073] 2025-04-07T14:12:27.277948Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.278069Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:19:2063] 2025-04-07T14:12:27.287338Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:27.287434Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.287489Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.287601Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:27.287630Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.287674Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-07T14:12:27.292594Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-07T14:12:27.294645Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-07T14:12:27.294860Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.295060Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:27.295398Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.295693Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.295730Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-07T14:12:27.295790Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-07T14:12:27.295876Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-07T14:12:27.295905Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-07T14:12:27.296026Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:49:2089] 2025-04-07T14:12:27.296054Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:49:2089] 2025-04-07T14:12:27.296521Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:49:2089] 2025-04-07T14:12:27.296677Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:27.296806Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.296854Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:27.299741Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-07T14:12:27.300069Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:27.300180Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-07T14:12:27.300220Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-07T14:12:27.300292Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:19:2063] 2025-04-07T14:12:27.301258Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-07T14:12:27.301341Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-07T14:12:27.301367Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-07T14:12:27.308846Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:19:2063] 2025-04-07T14:12:27.308931Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:19:2063] 2025-04-07T14:12:27.309215Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:28:2073] 2025-04-07T14:12:27.309548Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:27.310294Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-07T14:12:27.310332Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.310438Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-07T14:12:27.310637Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-04-07T14:12:27.310681Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 2146435075 Sender# [1:45:2088] SessionId# [0:0:0] Cookie# 0 2025-04-07T14:12:27.310738Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.022929s 2025-04-07T14:12:27.310978Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:27.311091Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-07T14:12:27.311165Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-07T14:12:27.311195Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-07T14:12:27.311285Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:28:2073] 2025-04-07T14:12:27.311317Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:28:2073] 2025-04-07T14:12:27.311359Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-04-07T14:12:27.311389Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-04-07T14:12:27.311527Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: ... aderTablet: [148:270:2260] CurrentGeneration: 2 CurrentStep: 0} 2025-04-07T14:17:16.085289Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72057594037927937 CurrentLeader: [148:263:2256] CurrentLeaderTablet: [148:270:2260] CurrentGeneration: 2 CurrentStep: 0} 2025-04-07T14:17:16.085355Z node 148 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037927937 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72057594037927937 Cookie: 0 CurrentLeader: [148:263:2256] CurrentLeaderTablet: [148:270:2260] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 0}} 2025-04-07T14:17:16.085390Z node 148 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72057594037927937 followers: 0 2025-04-07T14:17:16.085485Z node 148 :TABLET_RESOLVER DEBUG: SelectForward node 148 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [148:263:2256] 2025-04-07T14:17:16.085594Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [148:260:2255] 2025-04-07T14:17:16.085675Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [148:260:2255] 2025-04-07T14:17:16.085815Z node 148 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [148:260:2255] 2025-04-07T14:17:16.085989Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [148:260:2255] 2025-04-07T14:17:16.086058Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [148:260:2255] 2025-04-07T14:17:16.086113Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [148:260:2255] 2025-04-07T14:17:16.086170Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [148:260:2255] 2025-04-07T14:17:16.086218Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [148:260:2255] 2025-04-07T14:17:16.086289Z node 148 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [148:259:2254] EventType# 268697601 2025-04-07T14:17:16.086516Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} queued, type NKikimr::NHive::TTxCreateTablet 2025-04-07T14:17:16.086599Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:17:16.087296Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{4, redo 1145b alter 0b annex 0, ~{ 14, 0, 1, 2 } -{ }, 0 gb} 2025-04-07T14:17:16.087404Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:17:16.087591Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [148:303:2283] 2025-04-07T14:17:16.087629Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [148:303:2283] 2025-04-07T14:17:16.087706Z node 148 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:17:16.087758Z node 148 :TABLET_RESOLVER DEBUG: SelectForward node 148 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [148:92:2120] 2025-04-07T14:17:16.087842Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [148:303:2283] 2025-04-07T14:17:16.087879Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result local node, try to connect [148:303:2283] 2025-04-07T14:17:16.087919Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [148:303:2283] 2025-04-07T14:17:16.088007Z node 148 :PIPE_SERVER DEBUG: [72057594037932033] Accept Connect Originator# [148:303:2283] 2025-04-07T14:17:16.088148Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [148:303:2283] 2025-04-07T14:17:16.088182Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [148:303:2283] 2025-04-07T14:17:16.088208Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [148:303:2283] 2025-04-07T14:17:16.088251Z node 148 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [148:270:2260] EventType# 268637702 2025-04-07T14:17:16.088383Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{26, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2025-04-07T14:17:16.088458Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{26, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:17:16.088623Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{26, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{19, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:17:16.088721Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{26, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:17:16.088976Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-07T14:17:16.089065Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:17:16.089431Z node 148 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923004950528}(72075186224037888)::Execute - TryToBoot was not successfull 2025-04-07T14:17:16.089544Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2025-04-07T14:17:16.089634Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:17:16.100593Z node 148 :BS_PROXY_PUT INFO: [210b5ea57fa83861] bootstrap ActorId# [148:306:2286] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:703:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-07T14:17:16.100713Z node 148 :BS_PROXY_PUT DEBUG: [210b5ea57fa83861] Id# [72057594037927937:2:4:0:0:703:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-07T14:17:16.100749Z node 148 :BS_PROXY_PUT DEBUG: [210b5ea57fa83861] restore Id# [72057594037927937:2:4:0:0:703:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-07T14:17:16.100800Z node 148 :BS_PROXY_PUT DEBUG: [210b5ea57fa83861] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:703:1] Marker# BPG33 2025-04-07T14:17:16.100827Z node 148 :BS_PROXY_PUT DEBUG: [210b5ea57fa83861] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:703:1] Marker# BPG32 2025-04-07T14:17:16.100917Z node 148 :BS_PROXY DEBUG: Send to queueActorId# [148:37:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:703:1] FDS# 703 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-07T14:17:16.101885Z node 148 :BS_PROXY_PUT DEBUG: [210b5ea57fa83861] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:703:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 18 } Cost# 85535 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 19 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-07T14:17:16.101967Z node 148 :BS_PROXY_PUT DEBUG: [210b5ea57fa83861] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:703:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-07T14:17:16.102014Z node 148 :BS_PROXY_PUT INFO: [210b5ea57fa83861] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:703:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-07T14:17:16.102135Z node 148 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:703:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-07T14:17:16.102213Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-04-07T14:17:16.102457Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:17:16.102539Z node 148 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-07T14:17:16.102602Z node 148 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-07T14:17:16.102639Z node 148 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-07T14:17:16.102676Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-07T14:17:16.102715Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-07T14:17:16.102752Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-07T14:17:16.103054Z node 148 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [148:310:2289] 2025-04-07T14:17:16.103113Z node 148 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [148:310:2289] 2025-04-07T14:17:16.103234Z node 148 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:17:16.103389Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:17:16.103520Z node 148 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-07T14:17:16.103590Z node 148 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-07T14:17:16.103624Z node 148 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-07T14:17:16.103675Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-07T14:17:16.103732Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-07T14:17:16.103764Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-07T14:17:16.103861Z node 148 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-07T14:17:16.103925Z node 148 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-04-07T14:17:16.104047Z node 148 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [148:310:2289] 2025-04-07T14:17:16.104105Z node 148 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [148:310:2289] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] Test command err: 2025-04-07T14:16:00.215072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:00.215261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:00.215386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015ba/r3tmp/tmpX5n8WE/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13026, node 1 TClient is connected to server localhost:5330 2025-04-07T14:16:00.584234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:00.611318Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:00.613946Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:00.613985Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:00.614006Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:00.614159Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:00.658410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:00.658512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:00.669811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT tier1 (TYPE TIER) WITH tierConfig = `Name : "abc1" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;EXPECTATION=1;WAITING=0 SNAPSHOT: {"tiers":{}} TiersCount incorrect: {"tiers":{}};expectation=1 SNAPSHOT: {"tiers":{}} TiersCount incorrect: {"tiers":{}};expectation=1 2025-04-07T14:16:13.235446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:724:2599], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:13.235572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2604], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:13.236102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:13.239486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T14:16:13.251600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:738:2607], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T14:16:13.487156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-07T14:16:14.177956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:16:14.497112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 incorrect snapshot 2025-04-07T14:16:15.206802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-07T14:16:15.802769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:16:16.203832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 incorrect snapshot REQUEST=CREATE OBJECT tier1 (TYPE TIER) WITH tierConfig = `Name : "abc1" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;RESULT=;EXPECTATION=1 incorrect snapshot SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} REQUEST=CREATE OBJECT tier2 (TYPE TIER) WITH tierConfig = `Name : "abc2" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tier2 (TYPE TIER) WITH tierConfig = `Name : "abc2" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT tier2 (TYPE TIER) WITH tierConfig = `Name : "abc2" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;EXPECTATION=1;WAITING=1 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=2 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=2 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}};expectation=1 2025-04-07T14:16:31.440366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:16:31.440413Z node 1 :IMPORT WARN: Table profi ... t":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}};expectation=0 incorrect snapshot SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=2 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 2025-04-07T14:17:18.342868Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:18.342933Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037891 2025-04-07T14:17:18.342973Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier1' at tablet 72075186224037891 2025-04-07T14:17:18.343045Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037891 2025-04-07T14:17:18.343096Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier1' at tablet 72075186224037891 2025-04-07T14:17:18.343130Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier1' at tablet 72075186224037891 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 2025-04-07T14:17:18.343365Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:18.343385Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037892 2025-04-07T14:17:18.343409Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier1' at tablet 72075186224037892 2025-04-07T14:17:18.343442Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037892 2025-04-07T14:17:18.343465Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier1' at tablet 72075186224037892 2025-04-07T14:17:18.343485Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier1' at tablet 72075186224037892 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 2025-04-07T14:17:18.343707Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:18.343729Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037893 2025-04-07T14:17:18.343777Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier1' at tablet 72075186224037893 2025-04-07T14:17:18.343814Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037893 2025-04-07T14:17:18.343837Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier1' at tablet 72075186224037893 2025-04-07T14:17:18.343856Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier1' at tablet 72075186224037893 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 2025-04-07T14:17:18.344078Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:18.344099Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037894 2025-04-07T14:17:18.344171Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier1' at tablet 72075186224037894 2025-04-07T14:17:18.344207Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037894 2025-04-07T14:17:18.344232Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier1' at tablet 72075186224037894 2025-04-07T14:17:18.344251Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier1' at tablet 72075186224037894 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=2 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} REQUEST=DROP OBJECT tier1 (TYPE TIER);RESULT=;EXPECTATION=1 SNAPSHOT: {"tiers":{}} SNAPSHOT: {"tiers":{}} TiersCount incorrect: {"tiers":{}};expectation=2 SNAPSHOT: {"tiers":{}} SNAPSHOT: {"tiers":{}} SNAPSHOT: {"tiers":{}} 2025-04-07T14:17:19.452547Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:19.452599Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037891 2025-04-07T14:17:19.452640Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037891 SNAPSHOT: {"tiers":{}} 2025-04-07T14:17:19.452776Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:19.452800Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037892 2025-04-07T14:17:19.452827Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037892 SNAPSHOT: {"tiers":{}} 2025-04-07T14:17:19.452882Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:19.452905Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037893 2025-04-07T14:17:19.452932Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037893 SNAPSHOT: {"tiers":{}} 2025-04-07T14:17:19.452983Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:19.453006Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037894 2025-04-07T14:17:19.453065Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037894 SNAPSHOT: {"tiers":{}} SNAPSHOT: {"tiers":{}} TiersCount incorrect: {"tiers":{}};expectation=2 SNAPSHOT: {"tiers":{}} SNAPSHOT: {"tiers":{}} TiersCount incorrect: {"tiers":{}};expectation=1 E0407 14:17:20.550439255 586154 backup_poller.cc:113] run_poller: UNKNOWN:Timer list shutdown {created_time:"2025-04-07T14:17:20.550228996+00:00"} >> YdbQueryService::TestForbidExecuteWithoutAttach ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs [GOOD] Test command err: 2025-04-07T14:16:00.265191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:16:00.265393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:16:00.265518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015c4/r3tmp/tmp6j9GnP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5362, node 1 TClient is connected to server localhost:5235 2025-04-07T14:16:00.654377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:16:00.687854Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:00.690761Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:00.690800Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:00.690826Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:00.690989Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:00.735435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:00.735542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:00.746499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT tier1 (TYPE TIER) WITH tierConfig = `Name : "abc1" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;EXPECTATION=1;WAITING=0 SNAPSHOT: {"tiers":{}} TiersCount incorrect: {"tiers":{}};expectation=1 SNAPSHOT: {"tiers":{}} TiersCount incorrect: {"tiers":{}};expectation=1 2025-04-07T14:16:13.483993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:726:2600], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:13.484238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:13.486964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-07T14:16:13.629117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:842:2677], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:13.629227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:13.629583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:847:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:13.633654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-07T14:16:13.811518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:849:2684], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } incorrect snapshot 2025-04-07T14:16:14.310277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:16:14.603244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-07T14:16:15.193976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-07T14:16:15.814284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:16:16.217748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 incorrect snapshot REQUEST=CREATE OBJECT tier1 (TYPE TIER) WITH tierConfig = `Name : "abc1" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;RESULT=;EXPECTATION=1 incorrect snapshot SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} REQUEST=CREATE OBJECT tier2 (TYPE TIER) WITH tierConfig = `Name : "abc2" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tier2 (TYPE TIER) WITH tierConfig = `Name : "abc2" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT tier2 (TYPE TIER) WITH tierConfig = `Name : "abc2" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;EXPECTATION=1;WAITING=1 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=2 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=2 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAcc ... "}}}};expectation=0 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}};expectation=0 incorrect snapshot SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=2 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=2 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 2025-04-07T14:17:18.551311Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:18.551369Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037891 2025-04-07T14:17:18.551415Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier1' at tablet 72075186224037891 2025-04-07T14:17:18.551496Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037891 2025-04-07T14:17:18.551555Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier1' at tablet 72075186224037891 2025-04-07T14:17:18.551596Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier1' at tablet 72075186224037891 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 2025-04-07T14:17:18.551859Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:18.551882Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037892 2025-04-07T14:17:18.551906Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier1' at tablet 72075186224037892 2025-04-07T14:17:18.551945Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037892 2025-04-07T14:17:18.552001Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier1' at tablet 72075186224037892 2025-04-07T14:17:18.552027Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier1' at tablet 72075186224037892 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 2025-04-07T14:17:18.552298Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:18.552324Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037894 2025-04-07T14:17:18.552348Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier1' at tablet 72075186224037894 2025-04-07T14:17:18.552384Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037894 2025-04-07T14:17:18.552411Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier1' at tablet 72075186224037894 2025-04-07T14:17:18.552432Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier1' at tablet 72075186224037894 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 2025-04-07T14:17:18.552657Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:18.552694Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037893 2025-04-07T14:17:18.552771Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier1' at tablet 72075186224037893 2025-04-07T14:17:18.552811Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037893 2025-04-07T14:17:18.552847Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier1' at tablet 72075186224037893 2025-04-07T14:17:18.552873Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier1' at tablet 72075186224037893 REQUEST=DROP OBJECT tier1 (TYPE TIER);RESULT=;EXPECTATION=1 SNAPSHOT: {"tiers":{}} SNAPSHOT: {"tiers":{}} TiersCount incorrect: {"tiers":{}};expectation=2 SNAPSHOT: {"tiers":{}} SNAPSHOT: {"tiers":{}} SNAPSHOT: {"tiers":{}} 2025-04-07T14:17:19.632694Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:19.632745Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037891 2025-04-07T14:17:19.632789Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037891 SNAPSHOT: {"tiers":{}} 2025-04-07T14:17:19.632851Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:19.632873Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037892 2025-04-07T14:17:19.632897Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037892 SNAPSHOT: {"tiers":{}} 2025-04-07T14:17:19.632942Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:19.632962Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037894 2025-04-07T14:17:19.632986Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037894 SNAPSHOT: {"tiers":{}} 2025-04-07T14:17:19.633032Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:19.633051Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037893 2025-04-07T14:17:19.633093Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037893 SNAPSHOT: {"tiers":{}} SNAPSHOT: {"tiers":{}} TiersCount incorrect: {"tiers":{}};expectation=1 SNAPSHOT: {"tiers":{}} SNAPSHOT: {"tiers":{}} TiersCount incorrect: {"tiers":{}};expectation=2 >> TTableProfileTests::OverwriteCompactionPolicy [GOOD] >> TTableProfileTests::OverwriteExecutionPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CopyTables [GOOD] Test command err: 2025-04-07T14:17:04.677692Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575109755438786:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:04.677854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00186b/r3tmp/tmpp7YAaL/pdisk_1.dat 2025-04-07T14:17:04.960507Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18997, node 1 2025-04-07T14:17:05.028051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:05.028156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:05.030430Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:05.030489Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:05.030501Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:05.030589Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:05.031465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:05.272590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:05.276481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:05.277525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:05.279179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:05.279420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:05.279441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:05.280285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:05.280933Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:05.280957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:05.281863Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:05.284102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035425328, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:05.284125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:05.284298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:05.285552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:05.285676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:05.285736Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:05.285810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:05.285877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:05.285927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:05.292653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:05.292740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:05.292751Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:05.292842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:17:05.316738Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:37846 Call 2025-04-07T14:17:05.327382Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:37854 2025-04-07T14:17:07.112546Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:60050 Call Call 2025-04-07T14:17:07.129097Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS INFO: User has no permission to perform query on this database, database: /Root, user: test_user@builtin, from ip: ipv6:[::1]:60070 2025-04-07T14:17:07.144418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:07.144677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:07.144703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:07.144766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:17:07.144847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:17:07.144867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:17:07.146534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +(ConnDB):test_user@builtin:- 2025-04-07T14:17:07.146774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:07.147012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:07.147746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:07.147793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:07.147812Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:17:07.147907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 2025-04-07T14:17:08.514191Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575129413209771:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:08.514255Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00186b/r3tmp/tmpvXJeIR/pdisk_1.dat 2025-04-07T14:17:08.585445Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:08.618626Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:08.618714Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7032, node 4 2025-04-07T14:17:08.621928Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:08.656718Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:08.656743Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:08.656749Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:08.656847Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046 ... P TABLE, path: Root/Omited-Indexes-Table-8 2025-04-07T14:17:19.580725Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035439622, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:19.584078Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715680, done: 0, blocked: 1 2025-04-07T14:17:19.590378Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715680:0 2025-04-07T14:17:19.600374Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037904 not found 2025-04-07T14:17:19.601128Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:17:19.602222Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-1, pathId: 0, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-07T14:17:19.602377Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715681:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:19.603900Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715681, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-1 2025-04-07T14:17:19.612643Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035439657, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:19.615649Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715681, done: 0, blocked: 1 2025-04-07T14:17:19.621936Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715681:0 2025-04-07T14:17:19.634159Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-2, pathId: 0, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-07T14:17:19.634334Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715682:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:19.636257Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-04-07T14:17:19.637190Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715682, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-2 2025-04-07T14:17:19.638338Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:17:19.646580Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035439692, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:19.650544Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715682, done: 0, blocked: 1 2025-04-07T14:17:19.655700Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715682:0 2025-04-07T14:17:19.665799Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2025-04-07T14:17:19.667095Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:17:19.669625Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-3, pathId: 0, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-07T14:17:19.669769Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715683:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:19.671434Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715683, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-3 2025-04-07T14:17:19.682797Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035439727, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:19.689131Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715683, done: 0, blocked: 1 2025-04-07T14:17:19.693094Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715683:0 2025-04-07T14:17:19.702478Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037890 not found 2025-04-07T14:17:19.702895Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-4, pathId: 0, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-04-07T14:17:19.703029Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715684:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:19.704213Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:17:19.705435Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715684, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-4 2025-04-07T14:17:19.715298Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035439762, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:19.718191Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715684, done: 0, blocked: 1 2025-04-07T14:17:19.722253Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715684:0 2025-04-07T14:17:19.731958Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-5, pathId: 0, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-07T14:17:19.732107Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715685:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:19.733162Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037891 not found 2025-04-07T14:17:19.734011Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715685, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-5 2025-04-07T14:17:19.734815Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:17:19.747411Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035439790, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:19.752743Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715685, done: 0, blocked: 1 2025-04-07T14:17:19.759182Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715685:0 2025-04-07T14:17:19.768548Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037895 not found 2025-04-07T14:17:19.770340Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:17:19.772001Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-6, pathId: 0, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-07T14:17:19.772141Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715686:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:19.773853Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715686, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-6 2025-04-07T14:17:19.783385Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035439825, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:19.786115Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715686, done: 0, blocked: 1 2025-04-07T14:17:19.790478Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2025-04-07T14:17:19.790563Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715686, publications: 2, subscribers: 1 2025-04-07T14:17:19.790994Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715686, subscribers: 1 2025-04-07T14:17:19.800002Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-7, pathId: 0, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-07T14:17:19.800140Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715687:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:19.800987Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:17:19.801161Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037894 not found 2025-04-07T14:17:19.802265Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715687, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-7 2025-04-07T14:17:19.811297Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035439860, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:19.818300Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715687, done: 0, blocked: 1 2025-04-07T14:17:19.824469Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715687:0 2025-04-07T14:17:19.832621Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037892 not found 2025-04-07T14:17:19.834155Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:17:19.834326Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-8, pathId: 0, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-07T14:17:19.834472Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715688:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:19.836178Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715688, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-8 2025-04-07T14:17:19.844868Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035439888, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:19.846896Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715688, done: 0, blocked: 1 2025-04-07T14:17:19.852283Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715688:0 2025-04-07T14:17:19.858868Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037893 not found 2025-04-07T14:17:19.860717Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> YdbTableBulkUpsert::Uint8 [GOOD] >> YdbTableBulkUpsert::SyncIndexShouldSucceed >> TGRpcYdbTest::RemoveNotExistedDirectory [GOOD] >> TGRpcYdbTest::ReadTable >> YdbOlapStore::LogGrepNonExisting >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] >> YdbYqlClient::RenameTables [GOOD] >> YdbYqlClient::RetryOperationTemplate [GOOD] >> YdbYqlClient::RetryOperationSync >> YdbYqlClient::BuildInfo >> TTableProfileTests::UseDefaultProfile >> TPersQueueTest::SchemeOperationFirstClassCitizen [GOOD] >> TPersQueueTest::SchemeOperationsCheckPropValues >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribe ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] Test command err: 2025-04-07T14:17:06.899102Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575121206202583:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:06.899159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001866/r3tmp/tmp5pDCXD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15040, node 1 2025-04-07T14:17:07.198217Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:07.198632Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:07.208312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2025-04-07T14:17:07.208442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "PostgreSQL" } 2025-04-07T14:17:07.208452Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:07.211476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-07T14:17:07.213883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:17:07.214548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:07.214695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:17:07.216437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:07.217054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:07.217099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:07.217154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:17:07.217219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:17:07.234756Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:07.234777Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:07.234782Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:07.234879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:07.254104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:07.254261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:07.257271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11424 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:07.458146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:07.463542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:07.464993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:07.466893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:07.467077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:07.467098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:07.467963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:07.468622Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:07.468658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:07.470014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:07.473413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035427519, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:07.473445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:07.473669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:07.475260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:07.475430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:07.475487Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:07.475593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:07.475643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:07.475692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:07.477808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:07.477856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:07.477876Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:07.477935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:11424 2025-04-07T14:17:07.662691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: TableProfilesConfig { TableProfiles { Name: "default" StoragePolicy: "default" } CompactionPolicies { Name: "default" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } CompactionPolicies { Name: "compaction1" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } CompactionPolicies { Name: "compaction2" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSna ... hard: 72057594046644480 2025-04-07T14:17:22.030039Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2025-04-07T14:17:22.030495Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:22.030524Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:22.032391Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T14:17:22.032612Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:22.032821Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:22.032923Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:17:22.033269Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T14:17:22.033524Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:22.033561Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:22.033577Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:22.033756Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:22.033785Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:22.033801Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:17:22.040288Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:22.040361Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-07T14:17:22.041673Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:17:22.096981Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:22.097007Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:22.097068Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2025-04-07T14:17:22.098460Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:17:22.101063Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035442149, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:22.101110Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744035442149 2025-04-07T14:17:22.101208Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2025-04-07T14:17:22.102372Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:22.102620Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:22.102663Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:17:22.103439Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:22.103490Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:22.103507Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:17:22.103699Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:22.103730Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:22.103740Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:17:22.105667Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1744035442149 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1175 } } 2025-04-07T14:17:22.105853Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:22.105879Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:22.105905Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2025-04-07T14:17:22.107088Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T14:17:22.107185Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T14:17:22.107248Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T14:17:22.125104Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: Root/Test, pathId: , opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:17:22.125621Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:22.125659Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:17:22.127748Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: Root/Test 2025-04-07T14:17:22.127893Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 ProgressState, operation type: TxAlterTable, at tablet# 72057594046644480 2025-04-07T14:17:22.127917Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2025-04-07T14:17:22.128178Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T14:17:22.129821Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:22.133591Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#281474976715659:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046644480 message# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: PREPARED TxId: 281474976715659 MinStep: 1744035442150 MaxStep: 18446744073709551615 PrepareArriveTime: 1744035442131292 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 144 } } 2025-04-07T14:17:22.133612Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:22.133684Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 3 -> 128 2025-04-07T14:17:22.136202Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:22.139398Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035442184, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:22.139465Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#281474976715659:0 HandleReply TEvOperationPlan, operationId: 281474976715659:0, stepId: 1744035442184, at schemeshard: 72057594046644480 2025-04-07T14:17:22.139660Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 129 2025-04-07T14:17:22.141244Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:22.141464Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:22.141522Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715659:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:17:22.142421Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:22.142484Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:22.142502Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 4 2025-04-07T14:17:22.144251Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715659 Step: 1744035442184 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1048 } } 2025-04-07T14:17:22.144428Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715659:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:22.144461Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:17:22.144483Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 129 -> 240 2025-04-07T14:17:22.145752Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2025-04-07T14:17:22.145854Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2025-04-07T14:17:22.145906Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::RenameTables [GOOD] Test command err: 2025-04-07T14:16:57.403508Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575079515312864:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:57.403618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00187b/r3tmp/tmp5f54tP/pdisk_1.dat 2025-04-07T14:16:57.682987Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28508, node 1 2025-04-07T14:16:57.755711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:57.755840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:57.758379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:57.766496Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:57.766515Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:57.766553Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:57.766645Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8150 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:57.990123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:57.995249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:57.996637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:57.998453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:57.998653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:57.998671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:16:57.999500Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:16:58.000090Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:58.000110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:16:58.001172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:58.003929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035418048, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:58.003962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:16:58.004213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:16:58.005618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:58.005763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:58.005815Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:16:58.005898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:16:58.005934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:16:58.005973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:16:58.007787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:58.007862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:58.007887Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:58.007958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 2025-04-07T14:17:02.403792Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490575079515312864:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:02.403869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 2025-04-07T14:17:03.809770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575105285117751:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:03.809792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575105285117743:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:03.809898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:03.812273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:03.812490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:03.812516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-07T14:17:03.812590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:03.812609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-07T14:17:03.812709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:03.812780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710658:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T14:17:03.813038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 1 -> 128 2025-04-07T14:17:03.813245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:03.813277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:17:03.815242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T14:17:03.815508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:03.815941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:03.816016Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, ProgressState 2025-04-07T14:17:03.816180Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:03.816293Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:03.816410Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:03.816597Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:17:03.817583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:03.817620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:03.817634Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:03.817825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle ... KeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:17:22.175209Z node 10 :TX_PROXY DEBUG: Actor# [10:7490575187675724761:3537] HANDLE EvNavigateKeySetResult, txid# 281474976715672 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:17:22.175236Z node 10 :TX_PROXY DEBUG: Actor# [10:7490575187675724761:3537] txid# 281474976715672 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715672 TabletId# 72057594046644480} 2025-04-07T14:17:22.175395Z node 10 :TX_PROXY DEBUG: Actor# [10:7490575187675724761:3537] txid# 281474976715672 HANDLE EvClientConnected 2025-04-07T14:17:22.175511Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-1, pathId: 0, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:17:22.175604Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715672:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:22.177313Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715672, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-1 2025-04-07T14:17:22.177379Z node 10 :TX_PROXY DEBUG: Actor# [10:7490575187675724761:3537] txid# 281474976715672 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715672} 2025-04-07T14:17:22.177418Z node 10 :TX_PROXY DEBUG: Actor# [10:7490575187675724761:3537] txid# 281474976715672 SEND to# [10:7490575187675724760:2354] Source {TEvProposeTransactionStatus txid# 281474976715672 Status# 53} 2025-04-07T14:17:22.178374Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T14:17:22.178382Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T14:17:22.178485Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T14:17:22.178485Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T14:17:22.186058Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035442233, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:22.188784Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715672, done: 0, blocked: 1 2025-04-07T14:17:22.191796Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T14:17:22.191800Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T14:17:22.191894Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T14:17:22.191895Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T14:17:22.192951Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715672:0 2025-04-07T14:17:22.199916Z node 10 :GRPC_SERVER DEBUG: Got grpc request# DropTableRequest, traceId# 01jr89svgq5mtmyz50ctakmf25, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:45774, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-07T14:17:22.200060Z node 10 :TX_PROXY DEBUG: actor# [10:7490575170495853637:2135] Handle TEvProposeTransaction 2025-04-07T14:17:22.200079Z node 10 :TX_PROXY DEBUG: actor# [10:7490575170495853637:2135] TxId# 281474976715673 ProcessProposeTransaction 2025-04-07T14:17:22.200109Z node 10 :TX_PROXY DEBUG: actor# [10:7490575170495853637:2135] Cookie# 0 userReqId# "" txid# 281474976715673 SEND to# [10:7490575187675724840:3611] 2025-04-07T14:17:22.201124Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037890 not found 2025-04-07T14:17:22.202374Z node 10 :TX_PROXY DEBUG: Actor# [10:7490575187675724840:3611] txid# 281474976715673 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "Root" OperationType: ESchemeOpDropTable Drop { Name: "Table-2" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:45774" 2025-04-07T14:17:22.202421Z node 10 :TX_PROXY DEBUG: Actor# [10:7490575187675724840:3611] txid# 281474976715673 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:17:22.203028Z node 10 :TX_PROXY DEBUG: Actor# [10:7490575187675724840:3611] txid# 281474976715673 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:17:22.203117Z node 10 :TX_PROXY DEBUG: Actor# [10:7490575187675724840:3611] HANDLE EvNavigateKeySetResult, txid# 281474976715673 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:17:22.203179Z node 10 :TX_PROXY DEBUG: Actor# [10:7490575187675724840:3611] txid# 281474976715673 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2025-04-07T14:17:22.203399Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:17:22.203508Z node 10 :TX_PROXY DEBUG: Actor# [10:7490575187675724840:3611] txid# 281474976715673 HANDLE EvClientConnected 2025-04-07T14:17:22.203687Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-2, pathId: 0, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:17:22.203785Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:22.205333Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715673, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-2 2025-04-07T14:17:22.205435Z node 10 :TX_PROXY DEBUG: Actor# [10:7490575187675724840:3611] txid# 281474976715673 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715673} 2025-04-07T14:17:22.205473Z node 10 :TX_PROXY DEBUG: Actor# [10:7490575187675724840:3611] txid# 281474976715673 SEND to# [10:7490575187675724839:2358] Source {TEvProposeTransactionStatus txid# 281474976715673 Status# 53} 2025-04-07T14:17:22.206985Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T14:17:22.207070Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T14:17:22.207172Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T14:17:22.207220Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T14:17:22.214675Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035442261, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:22.217443Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715673, done: 0, blocked: 1 2025-04-07T14:17:22.221238Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T14:17:22.221342Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T14:17:22.221395Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T14:17:22.221439Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T14:17:22.222173Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715673:0 2025-04-07T14:17:22.226698Z node 10 :GRPC_SERVER DEBUG: [0x51a000016880] received request Name# Request ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.226710Z node 10 :GRPC_SERVER DEBUG: [0x51a00001d480] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.226904Z node 10 :GRPC_SERVER DEBUG: [0x51a00006f680] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.226962Z node 10 :GRPC_SERVER DEBUG: [0x51a00001ce80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.227050Z node 10 :GRPC_SERVER DEBUG: [0x51a000148880] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.227161Z node 10 :GRPC_SERVER DEBUG: [0x51a000015c80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.227163Z node 10 :GRPC_SERVER DEBUG: [0x51a000016280] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.227288Z node 10 :GRPC_SERVER DEBUG: [0x51a00011e880] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.227361Z node 10 :GRPC_SERVER DEBUG: [0x51a00006f080] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.227411Z node 10 :GRPC_SERVER DEBUG: [0x51a000069080] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.227505Z node 10 :GRPC_SERVER DEBUG: [0x51a00016da80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.227537Z node 10 :GRPC_SERVER DEBUG: [0x51a00016ec80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.227667Z node 10 :GRPC_SERVER DEBUG: [0x51a000015680] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.227682Z node 10 :GRPC_SERVER DEBUG: [0x51b000189580] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.227817Z node 10 :GRPC_SERVER DEBUG: [0x51a000149480] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.227858Z node 10 :GRPC_SERVER DEBUG: [0x51a000068480] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.227952Z node 10 :GRPC_SERVER DEBUG: [0x51a000185480] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.228032Z node 10 :GRPC_SERVER DEBUG: [0x51a00016e080] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.228094Z node 10 :GRPC_SERVER DEBUG: [0x51a000016e80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.228183Z node 10 :GRPC_SERVER DEBUG: [0x51a00001aa80] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.228194Z node 10 :GRPC_SERVER DEBUG: [0x51a00016c880] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.228320Z node 10 :GRPC_SERVER DEBUG: [0x51a00014ca80] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.228363Z node 10 :GRPC_SERVER DEBUG: [0x51a000068a80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.228511Z node 10 :GRPC_SERVER DEBUG: [0x51a00001c280] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:22.231705Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2025-04-07T14:17:22.232759Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> KqpExtractPredicateLookup::PointJoin-EnableKqpDataQueryStreamLookup [GOOD] >> KqpExtractPredicateLookup::SqlInJoin+EnableKqpDataQueryStreamLookup >> YdbTableBulkUpsertOlap::UpsertCsvBug [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard >> YdbQueryService::TestForbidExecuteWithoutAttach [GOOD] >> YdbS3Internal::TestS3Listing >> TYqlDecimalTests::DecimalKey [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::DecimalKey [GOOD] Test command err: 2025-04-07T14:17:02.119070Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575102542669003:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:02.119186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001872/r3tmp/tmpjc6tg7/pdisk_1.dat 2025-04-07T14:17:02.401993Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7045, node 1 2025-04-07T14:17:02.474258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:02.474373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:02.476579Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:02.476597Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:02.476630Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:02.476728Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:02.477278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:02.695706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:02.700037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:02.701102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:02.702789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:02.702936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:02.702954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:02.704233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:02.704480Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:02.704507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:02.705777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:02.708692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035422752, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:02.708719Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:02.708934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:02.710291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:02.710406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:02.710457Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:02.710533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:02.710569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:02.710620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:02.712188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:02.712223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:02.712243Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:02.712302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:17:04.307791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:04.308420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T14:17:04.309089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:04.309125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:04.311216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-04-07T14:17:04.311378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:04.311587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:04.311684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:17:04.312006Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:17:04.312274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:04.312321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:04.312338Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:04.312557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:04.312622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:04.312633Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:17:04.316596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:04.316716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T14:17:04.318303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:17:04.373877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:04.373906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:04.373984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T14:17:04.375879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:17:04.379148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035424425, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:04.379196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744035424425 2025-04-07T14:17:04.379320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T14:17:04.380864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:04.381137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:04.381191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:17:04.382847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:04.382877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:04.382887Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:17:04.383037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:04.383056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:04.383066Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: ... 81474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2025-04-07T14:17:22.662319Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:22.662357Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:22.662374Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2025-04-07T14:17:22.665088Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035442709, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:22.665130Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035442709, at schemeshard: 72057594046644480 2025-04-07T14:17:22.665221Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2025-04-07T14:17:22.665319Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035442709, at schemeshard: 72057594046644480 2025-04-07T14:17:22.665361Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2025-04-07T14:17:22.665406Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035442709, at schemeshard: 72057594046644480 2025-04-07T14:17:22.665442Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2025-04-07T14:17:22.665499Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1744035442709 2025-04-07T14:17:22.665543Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2025-04-07T14:17:22.666929Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:22.667376Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:22.667450Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2025-04-07T14:17:22.667534Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2025-04-07T14:17:22.667700Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2025-04-07T14:17:22.667749Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2025-04-07T14:17:22.667827Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2025-04-07T14:17:22.667864Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2025-04-07T14:17:22.667941Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2025-04-07T14:17:22.667976Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2025-04-07T14:17:22.668014Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T14:17:22.668040Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2025-04-07T14:17:22.668059Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2025-04-07T14:17:22.668070Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2025-04-07T14:17:22.668087Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2025-04-07T14:17:22.669547Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:22.669588Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:22.669605Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T14:17:22.669782Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:22.669804Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:22.669816Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T14:17:22.669912Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:22.669943Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:22.669956Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T14:17:22.670047Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:22.670067Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:22.670079Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2025-04-07T14:17:22.670195Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:22.670240Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:22.670250Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2025-04-07T14:17:22.670284Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-04-07T14:17:22.672076Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7490575187129359580:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:17:22.741615Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T14:17:22.741762Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T14:17:22.743464Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T14:17:22.858328Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89svytbtnwzscgf92j8pg9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OGU5NGU2NWQtOTRlODc2Zi03MDhlZjRiYi01MGM4NDI2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:22.965542Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89sw5h75wwpkz8x4fws1yn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OGU5NGU2NWQtOTRlODc2Zi03MDhlZjRiYi01MGM4NDI2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:23.071921Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89sw8tcxyzgffz9xj0xh7d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OGU5NGU2NWQtOTRlODc2Zi03MDhlZjRiYi01MGM4NDI2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:23.160467Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr89swc31p9mqdbr1afrf6e4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OGU5NGU2NWQtOTRlODc2Zi03MDhlZjRiYi01MGM4NDI2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:23.267096Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr89swewcm7dye225qrja273, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OGU5NGU2NWQtOTRlODc2Zi03MDhlZjRiYi01MGM4NDI2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:23.384756Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr89swj76bb056a349wtccf2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OGU5NGU2NWQtOTRlODc2Zi03MDhlZjRiYi01MGM4NDI2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:23.493448Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr89swnx02pz7mh8md65aazg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OGU5NGU2NWQtOTRlODc2Zi03MDhlZjRiYi01MGM4NDI2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:23.680180Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr89swsafa462zrwwr58q8rv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OGU5NGU2NWQtOTRlODc2Zi03MDhlZjRiYi01MGM4NDI2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:23.931854Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr89swz81e0dcx36jpedrtxb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OGU5NGU2NWQtOTRlODc2Zi03MDhlZjRiYi01MGM4NDI2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:24.203369Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr89sx7150w0bmpx9qtsm206, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OGU5NGU2NWQtOTRlODc2Zi03MDhlZjRiYi01MGM4NDI2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn >> YdbTableBulkUpsert::SyncIndexShouldSucceed [GOOD] >> YdbTableBulkUpsert::Timeout >> TGRpcYdbTest::ReadTable [GOOD] >> TGRpcYdbTest::ReadTablePg >> YdbYqlClient::BuildInfo [GOOD] >> YdbYqlClient::AlterTableAddIndexWithDataColumn >> TTableProfileTests::OverwriteExecutionPolicy [GOOD] >> TTableProfileTests::OverwriteCachingPolicy |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> TSchemeShardTopicSplitMergeTest::MargePartitions >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts >> YdbYqlClient::TestReadTableMultiShardWithDescribe [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit >> KqpTx::SnapshotROInteractive1 >> TTableProfileTests::UseDefaultProfile [GOOD] >> TTableProfileTests::UseTableProfilePreset >> TPersQueueTest::CacheHead [GOOD] >> TPersQueueTest::CheckACLForGrpcWrite >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder [GOOD] >> TPersQueueTest::Delete >> ColumnShardTiers::DSConfigsStub [GOOD] >> KqpPg::TableDeleteAllData [GOOD] >> KqpPg::TableDeleteWhere >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard [GOOD] >> YdbYqlClient::AlterTableAddIndex >> YdbS3Internal::TestS3Listing [GOOD] >> YdbS3Internal::TestAccessCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub [GOOD] Test command err: 2025-04-07T14:15:52.618452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:52.618760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:52.618959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00164f/r3tmp/tmpNYBTcw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18283, node 1 TClient is connected to server localhost:29634 2025-04-07T14:15:53.383828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:53.435352Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:53.443469Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:53.443533Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:53.443572Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:53.443781Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:53.490559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:53.490713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:53.502365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:53.653412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-07T14:15:53.762034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:15:53.762256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:15:53.762483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:15:53.762560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:15:53.762627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:15:53.762701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:15:53.762777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:15:53.762894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:15:53.762976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:15:53.763060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:15:53.763142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:15:53.763204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:710:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:15:53.776457Z node 1 :TX_TIERING INFO: fline=manager.cpp:41;event=start_subscribing_metadata; 2025-04-07T14:15:53.778184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:15:53.778283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:15:53.778385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:15:53.778416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:15:53.778608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:15:53.778655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:15:53.778784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:15:53.778835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:15:53.778929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:15:53.778973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:15:53.779025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:15:53.779062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:15:53.779585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:15:53.779647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:15:53.779816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:15:53.779847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:15:53.779986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:15:53.780014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:15:53.780151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:15:53.780180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:15:53.780276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:15:53.780304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:15:53.801970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:713:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:15:53.802060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:713:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:15:53.802239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:713:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:15:53.802325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:713:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:15:53.802391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:713:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:15:53.802451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:713:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:15:53.802546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:713:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normal ... } } `;RESULT=;EXPECTATION=1 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc"}}}} 2025-04-07T14:16:53.279057Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:16:53.279115Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037888 2025-04-07T14:16:53.279156Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier1' at tablet 72075186224037888 2025-04-07T14:16:53.279239Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037888 2025-04-07T14:16:53.279311Z node 1 :TX_TIERING DEBUG: manager.cpp:98 :Tier 'tier1' started at tablet 72075186224037888 2025-04-07T14:16:53.279345Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier2' at tablet 72075186224037888 2025-04-07T14:16:53.279380Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier1' at tablet 72075186224037888 2025-04-07T14:16:53.279410Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier1' at tablet 72075186224037888 2025-04-07T14:16:53.279438Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier2' at tablet 72075186224037888 2025-04-07T14:16:53.279456Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier2' at tablet 72075186224037888 2025-04-07T14:16:53.279498Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:16:53.279518Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037889 2025-04-07T14:16:53.279542Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier1' at tablet 72075186224037889 2025-04-07T14:16:53.279569Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037889 2025-04-07T14:16:53.279601Z node 1 :TX_TIERING DEBUG: manager.cpp:98 :Tier 'tier1' started at tablet 72075186224037889 2025-04-07T14:16:53.279620Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier2' at tablet 72075186224037889 2025-04-07T14:16:53.279644Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier1' at tablet 72075186224037889 2025-04-07T14:16:53.279663Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier1' at tablet 72075186224037889 2025-04-07T14:16:53.279682Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier2' at tablet 72075186224037889 2025-04-07T14:16:53.279701Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier2' at tablet 72075186224037889 2025-04-07T14:16:53.279733Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:16:53.279754Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037891 2025-04-07T14:16:53.279773Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier1' at tablet 72075186224037891 2025-04-07T14:16:53.279806Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037891 2025-04-07T14:16:53.279834Z node 1 :TX_TIERING DEBUG: manager.cpp:98 :Tier 'tier1' started at tablet 72075186224037891 2025-04-07T14:16:53.279853Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier2' at tablet 72075186224037891 2025-04-07T14:16:53.279877Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier1' at tablet 72075186224037891 2025-04-07T14:16:53.279894Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier1' at tablet 72075186224037891 2025-04-07T14:16:53.279913Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier2' at tablet 72075186224037891 2025-04-07T14:16:53.279932Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier2' at tablet 72075186224037891 2025-04-07T14:16:53.279978Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:16:53.279999Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037890 2025-04-07T14:16:53.280062Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier1' at tablet 72075186224037890 2025-04-07T14:16:53.280091Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037890 2025-04-07T14:16:53.280122Z node 1 :TX_TIERING DEBUG: manager.cpp:98 :Tier 'tier1' started at tablet 72075186224037890 2025-04-07T14:16:53.280141Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier2' at tablet 72075186224037890 2025-04-07T14:16:53.280166Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier1' at tablet 72075186224037890 2025-04-07T14:16:53.280184Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier1' at tablet 72075186224037890 2025-04-07T14:16:53.280203Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier2' at tablet 72075186224037890 2025-04-07T14:16:53.280221Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier2' at tablet 72075186224037890 FINISHED_REQUEST=ALTER OBJECT tier1 (TYPE TIER) SET tierConfig = `Name : "abc1" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;EXPECTATION=1;WAITING=1 REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 2025-04-07T14:17:03.995032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715757:0, at schemeshard: 72057594046644480 REQUEST=DROP TABLE `/Root/olapStore/olapTable`;RESULT=
: Info: Execution, code: 1060
:1:12: Info: Executing DROP TABLE
: Info: Success, code: 4 ;EXPECTATION=1 FINISHED_REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tier1(TYPE TIER);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tier1(TYPE TIER);RESULT=;EXPECTATION=1 SNAPSHOT: {"tiers":{"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc"}}}} TiersCount incorrect: {"tiers":{"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc"}}}};expectation=0 2025-04-07T14:17:15.867002Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:15.867077Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037888 2025-04-07T14:17:15.867146Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037888 2025-04-07T14:17:15.867201Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier2' at tablet 72075186224037888 2025-04-07T14:17:15.867269Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier2' at tablet 72075186224037888 2025-04-07T14:17:15.867307Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier2' at tablet 72075186224037888 2025-04-07T14:17:15.867366Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:15.867392Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037889 2025-04-07T14:17:15.867422Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037889 2025-04-07T14:17:15.867451Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier2' at tablet 72075186224037889 2025-04-07T14:17:15.867478Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier2' at tablet 72075186224037889 2025-04-07T14:17:15.867498Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier2' at tablet 72075186224037889 2025-04-07T14:17:15.867534Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:15.867556Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037891 2025-04-07T14:17:15.867584Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037891 2025-04-07T14:17:15.867610Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier2' at tablet 72075186224037891 2025-04-07T14:17:15.867634Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier2' at tablet 72075186224037891 2025-04-07T14:17:15.867661Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier2' at tablet 72075186224037891 2025-04-07T14:17:15.867693Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:15.867716Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037890 2025-04-07T14:17:15.867779Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037890 2025-04-07T14:17:15.867811Z node 1 :TX_TIERING DEBUG: manager.cpp:79 :Restarting tier 'tier2' at tablet 72075186224037890 2025-04-07T14:17:15.867838Z node 1 :TX_TIERING DEBUG: manager.cpp:143 :Take config for tier 'tier2' at tablet 72075186224037890 2025-04-07T14:17:15.867861Z node 1 :TX_TIERING DEBUG: manager.cpp:145 :Ignore tier 'tier2' at tablet 72075186224037890 FINISHED_REQUEST=DROP OBJECT tier1(TYPE TIER);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tier2(TYPE TIER);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tier2(TYPE TIER);RESULT=;EXPECTATION=1 SNAPSHOT: {"tiers":{}} 2025-04-07T14:17:27.659115Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:27.659182Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037888 2025-04-07T14:17:27.659273Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037888 2025-04-07T14:17:27.659345Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:27.659378Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037889 2025-04-07T14:17:27.659419Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037889 2025-04-07T14:17:27.659470Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:27.659490Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037891 2025-04-07T14:17:27.659518Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037891 2025-04-07T14:17:27.659554Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:49;event=TEvRefreshSubscriberData;snapshot=configs; 2025-04-07T14:17:27.659576Z node 1 :TX_TIERING INFO: manager.cpp:123 :Take configs: snapshots secrets at tablet 72075186224037890 2025-04-07T14:17:27.659633Z node 1 :TX_TIERING DEBUG: manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037890 FINISHED_REQUEST=DROP OBJECT tier2(TYPE TIER);EXPECTATION=1;WAITING=1 >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:17:28.343545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:17:28.343670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:17:28.343708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:17:28.343751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:17:28.344713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:17:28.344760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:17:28.344855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:17:28.346054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:17:28.412372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:17:28.412432Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:28.423159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:17:28.423302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:17:28.423471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:17:28.430661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:17:28.430841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:17:28.433840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:17:28.434157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:17:28.439389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:17:28.446688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:17:28.446781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:17:28.446901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:17:28.446948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:17:28.447067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:17:28.447256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:17:28.454344Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:17:28.544976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:17:28.546585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:28.547545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:17:28.549242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:17:28.549314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:28.552234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:17:28.555066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:17:28.555276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:28.555432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:17:28.555523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:17:28.555578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:17:28.557825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:28.557880Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:17:28.557906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:17:28.559161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:28.559201Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:28.559240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:17:28.559317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:17:28.563194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:17:28.564653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:17:28.565748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:17:28.566630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:17:28.566730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:17:28.566768Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:17:28.567024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:17:28.567084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:17:28.567324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:17:28.567415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:17:28.569296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:17:28.569344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:17:28.569471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:17:28.569498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:17:28.569676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:28.569718Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:17:28.569786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:17:28.569808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:17:28.569844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:17:28.569879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:17:28.569905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:17:28.569926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:17:28.569975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:17:28.570006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:17:28.570027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:17:28.571491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:17:28.571584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:17:28.571617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:17:28.571651Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:17:28.571704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:17:28.571804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 548 TxId: 104 Status: OK 2025-04-07T14:17:29.921921Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 104 Status: OK 2025-04-07T14:17:29.921959Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-07T14:17:29.922002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-07T14:17:29.923118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-07T14:17:29.923277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-07T14:17:29.923314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-07T14:17:29.923579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-04-07T14:17:29.923607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-07T14:17:29.923633Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-04-07T14:17:29.958372Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:17:29.958509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 380 RawX2: 8589936941 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:17:29.958563Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2025-04-07T14:17:29.958642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-07T14:17:30.007769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-04-07T14:17:30.007949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-07T14:17:30.008039Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-07T14:17:30.008094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-07T14:17:30.008134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-07T14:17:30.008317Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-07T14:17:30.008480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:17:30.008543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:17:30.011304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:17:30.011728Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:17:30.011775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:17:30.011947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:17:30.012165Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:17:30.012203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-07T14:17:30.012247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-07T14:17:30.012311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:17:30.012352Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-07T14:17:30.012453Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-07T14:17:30.012489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:17:30.012538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-07T14:17:30.012579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:17:30.012620Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T14:17:30.012650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T14:17:30.012807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-07T14:17:30.012850Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-04-07T14:17:30.012882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-07T14:17:30.012915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-07T14:17:30.014258Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:17:30.014352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:17:30.014385Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:17:30.014423Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-07T14:17:30.014462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:17:30.014826Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:17:30.014884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:17:30.014905Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:17:30.014933Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-07T14:17:30.014957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:17:30.015011Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-04-07T14:17:30.015053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:406:2373] 2025-04-07T14:17:30.019894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T14:17:30.020095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T14:17:30.020182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:17:30.020226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:542:2478] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } TestModificationResults wait txId: 105 2025-04-07T14:17:30.030508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:17:30.030744Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:17:30.030924Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, at schemeshard: 72057594046678944 2025-04-07T14:17:30.035369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "You cannot merge non-contiguous partitions" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:17:30.035545Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-07T14:17:30.035809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-07T14:17:30.035849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-07T14:17:30.036222Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-07T14:17:30.036312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T14:17:30.036347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:640:2565] TestWaitNotification: OK eventTxId 105 >> TGRpcYdbTest::ReadTablePg [GOOD] >> TGRpcYdbTest::OperationTimeout >> YdbYqlClient::AlterTableAddIndexWithDataColumn [GOOD] >> YdbYqlClient::CheckDefaultTableSettings1 >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> YdbTableBulkUpsert::Timeout [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> TPersQueueTest::StreamReadManyUpdateTokenAndRead [GOOD] >> TPersQueueTest::SetupWriteSession >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::Timeout [GOOD] Test command err: 2025-04-07T14:17:10.936495Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575139089509368:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:10.936606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001863/r3tmp/tmpPTJalU/pdisk_1.dat 2025-04-07T14:17:11.212045Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5195, node 1 2025-04-07T14:17:11.231268Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:11.231293Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:11.255788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:17:11.256538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:11.256767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:17:11.260318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:11.260759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:11.260780Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:11.260793Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:11.260899Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:11.261101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:11.261141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:11.261235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:17:11.261324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:17:11.289307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:11.289420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:11.292373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:11.479526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:11.483771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:11.484852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:11.486555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:11.486730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:11.486766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:11.487572Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:11.488071Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:11.488094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:11.489235Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:11.492094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035431537, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:11.492127Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:11.492365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:11.493730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:11.493882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:11.493970Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:11.494058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:11.494102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:11.494146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:11.495705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:11.495753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:11.495765Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:11.495832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:17:13.108980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Types, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:13.109796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T14:17:13.110589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:13.110613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:13.113606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Types 2025-04-07T14:17:13.113838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:13.114073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:13.114207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:17:13.114486Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:17:13.114902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:13.114934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:13.114947Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:13.115121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:13.115149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:13.115179Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:17:13.120087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:13.120180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T14:17:13.122404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:17:13.185693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:13.185741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:13.185803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T14:17:13.200244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:17:13.205336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035433252, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:13.205429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744035433252 2025-04-07T14:17:13.205618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T14:17:13.207831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:1 ... Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037893 Status: COMPLETE TxId: 281474976715658 Step: 1744035450514 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 20 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037893 CpuTimeUsec: 814 } } 2025-04-07T14:17:30.510962Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037908 Status: COMPLETE TxId: 281474976715658 Step: 1744035450514 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 19 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037908 CpuTimeUsec: 875 } } 2025-04-07T14:17:30.511130Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037899 Status: COMPLETE TxId: 281474976715658 Step: 1744035450514 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 14 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037899 CpuTimeUsec: 648 } } 2025-04-07T14:17:30.511280Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037895 Status: COMPLETE TxId: 281474976715658 Step: 1744035450514 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 16 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037895 CpuTimeUsec: 910 } } 2025-04-07T14:17:30.511431Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037905 Status: COMPLETE TxId: 281474976715658 Step: 1744035450514 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 16 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037905 CpuTimeUsec: 701 } } 2025-04-07T14:17:30.511587Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037898 Status: COMPLETE TxId: 281474976715658 Step: 1744035450514 OrderId: 281474976715658 ExecLatency: 5 ProposeLatency: 14 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037898 CpuTimeUsec: 6034 } } 2025-04-07T14:17:30.512500Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037911 Status: COMPLETE TxId: 281474976715658 Step: 1744035450514 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 9 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037911 CpuTimeUsec: 861 } } 2025-04-07T14:17:30.512745Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.512913Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.513032Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.513128Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.513325Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.513497Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.513711Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.513847Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.513934Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.514049Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.514182Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.514312Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.514436Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.514549Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.514639Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.514758Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.514840Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.514968Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.515168Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.515271Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.515373Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.515491Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.515576Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.515761Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.515940Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.516120Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.516266Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.516367Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.519056Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.519335Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037897 Status: COMPLETE TxId: 281474976715658 Step: 1744035450514 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 15 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037897 CpuTimeUsec: 793 } } 2025-04-07T14:17:30.520557Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037901 Status: COMPLETE TxId: 281474976715658 Step: 1744035450514 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 14 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037901 CpuTimeUsec: 789 } } 2025-04-07T14:17:30.522046Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037903 Status: COMPLETE TxId: 281474976715658 Step: 1744035450514 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 14 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037903 CpuTimeUsec: 913 } } 2025-04-07T14:17:30.522822Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.523478Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.524105Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:30.524165Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:30.524256Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2025-04-07T14:17:30.527802Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T14:17:30.527909Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T14:17:30.527976Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 1 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 2 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 4 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 8 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 16 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 32 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 64 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 128 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 256 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 512 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 1024 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 2048 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 4096 usec
: Error: Bulk upsert to table '/Root/ui32'Deadline exceeded 8192 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 16384 usec >> YdbYqlClient::RetryOperationSync [GOOD] >> YdbYqlClient::RetryOperationLimitedDuration ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] Test command err: 2025-04-07T14:17:15.338330Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575158391052134:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:15.338431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00185c/r3tmp/tmpa2Q1hj/pdisk_1.dat 2025-04-07T14:17:15.639565Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25399, node 1 2025-04-07T14:17:15.666871Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:15.667014Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:15.670021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:17:15.670836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:15.670972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:17:15.684983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:15.685719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:15.685765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:15.685826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:17:15.685900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.693307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:15.693408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:15.695622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:15.696525Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:15.696555Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:15.696563Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:15.696640Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:15.923556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.928507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:15.929646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.932090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:15.932237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:15.932250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:15.933422Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:15.933955Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:15.933977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:15.935165Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.938149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035435982, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:15.938175Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:15.938426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:15.939877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:15.940008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:15.940053Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:15.940135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:15.940157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:15.940194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:15.943859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:15.943905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:15.943925Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:15.944015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:17:15.975590Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jr89sne67pah0tw7bwats2em, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33680, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 9.997101s 2025-04-07T14:17:15.999730Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jr89snehds46w3fcqa39fc2b, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33694, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-07T14:17:17.706868Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jr89sq4a42x1qzzwevbz20gt, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:37986, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-07T14:17:17.707552Z node 1 :TX_PROXY DEBUG: actor# [1:7490575158391052353:2137] Handle TEvProposeTransaction 2025-04-07T14:17:17.707570Z node 1 :TX_PROXY DEBUG: actor# [1:7490575158391052353:2137] TxId# 281474976710658 ProcessProposeTransaction 2025-04-07T14:17:17.707624Z node 1 :TX_PROXY DEBUG: actor# [1:7490575158391052353:2137] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7490575166980987720:2618] 2025-04-07T14:17:17.777445Z node 1 :TX_PROXY DEBUG: Actor# [1:7490575166980987720:2618] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:37986" 2025-04-07T14:17:17.777741Z node 1 :TX_PROXY DEBUG: Actor# [1:7490575166980987720:2618] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:17:17.777808Z node 1 :TX_PROXY DEBUG: Actor# [1:7490575166980987720:2618] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:17:17.777946Z node 1 :TX_PROXY DEBUG: Actor# [1:7490575166980987720:2618] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:17:17.778114Z node 1 :TX_PROXY DEBUG: Actor# [1:7490575166980987720:2618] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:17:17.778165Z node 1 :TX_PROXY DEBUG: Actor# [1:7490575166980987720:2618] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-07T14:17:17.778284Z node 1 :TX_PROXY DEBUG: Actor# [1:7490575166980987720:2618] txid# 281474976710658 HANDLE EvClientConnected 2025-04-07T14:17:17.778603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:17.779242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T14:17:17.780217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:17.780241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:17.782140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T14:17:17.782309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:17.782512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard D ... ode 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit WaitForStreamClearance 2025-04-07T14:17:31.396020Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit ReadTableScan 2025-04-07T14:17:31.396030Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2025-04-07T14:17:31.396160Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Continue 2025-04-07T14:17:31.396180Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:17:31.396189Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-04-07T14:17:31.396198Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-04-07T14:17:31.396206Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-04-07T14:17:31.396231Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-04-07T14:17:31.396376Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [10:7490575228001792641:2122], Recipient [10:7490575223706824388:2309]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-07T14:17:31.396399Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-07T14:17:31.396420Z node 10 :READ_TABLE_API DEBUG: [10:7490575228001792623:2364] Adding quota request to queue ShardId: 0, TxId: 281474976715680 2025-04-07T14:17:31.396457Z node 10 :READ_TABLE_API DEBUG: [10:7490575228001792623:2364] Assign stream quota to Shard 0, Quota 5, TxId 281474976715680 Reserved: 5 of 25, Queued: 0 2025-04-07T14:17:31.396574Z node 10 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 5 2025-04-07T14:17:31.396794Z node 10 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037897, TxId: 281474976715681, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 4 2025-04-07T14:17:31.396843Z node 10 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037897, TxId: 281474976715681, PendingAcks: 0 2025-04-07T14:17:31.396862Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 4 2025-04-07T14:17:31.396907Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2025-04-07T14:17:31.396921Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715681, at: 72075186224037897 2025-04-07T14:17:31.396936Z node 10 :READ_TABLE_API DEBUG: [10:7490575228001792623:2364] got stream part, size: 75, RU required: 128 rate limiter absent 2025-04-07T14:17:31.396993Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7490575223706824388:2309], Recipient [10:7490575223706824388:2309]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:17:31.397017Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:17:31.397041Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-04-07T14:17:31.397059Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-04-07T14:17:31.397074Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for ReadTableScan 2025-04-07T14:17:31.397084Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2025-04-07T14:17:31.397099Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715681] at 72075186224037897 error: , IsFatalError: 0 2025-04-07T14:17:31.397114Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-04-07T14:17:31.397125Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit ReadTableScan 2025-04-07T14:17:31.397134Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit FinishPropose 2025-04-07T14:17:31.397142Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit FinishPropose 2025-04-07T14:17:31.397157Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is DelayComplete 2025-04-07T14:17:31.397170Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit FinishPropose 2025-04-07T14:17:31.397180Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit CompletedOperations 2025-04-07T14:17:31.397188Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit CompletedOperations 2025-04-07T14:17:31.397216Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-04-07T14:17:31.397226Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit CompletedOperations 2025-04-07T14:17:31.397237Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976715681] at 72075186224037897 has finished 2025-04-07T14:17:31.397247Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:17:31.397255Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-04-07T14:17:31.397262Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-04-07T14:17:31.397270Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-04-07T14:17:31.397274Z node 10 :READ_TABLE_API DEBUG: [10:7490575228001792623:2364] Starting inactivity timer for 600.000000s with tag 3 2025-04-07T14:17:31.397295Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-04-07T14:17:31.397307Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715681] at 72075186224037897 on unit FinishPropose 2025-04-07T14:17:31.397324Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715681 at tablet 72075186224037897 send to client, exec latency: 1 ms, propose latency: 2 ms, status: COMPLETE 2025-04-07T14:17:31.397364Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2025-04-07T14:17:31.397518Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [10:7490575228001792624:2364], Recipient [10:7490575223706824388:2309]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715681 2025-04-07T14:17:31.397541Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-04-07T14:17:31.397551Z node 10 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037897 txId 281474976715681 2025-04-07T14:17:31.397569Z node 10 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037897 txId 281474976715681 2025-04-07T14:17:31.397627Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [10:7490575228001792624:2364], Recipient [10:7490575223706824388:2309]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715681 2025-04-07T14:17:31.397644Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-04-07T14:17:31.397709Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7490575228001792624:2364], Recipient [10:7490575223706824388:2309]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744035451438 TxId: 281474976715680 2025-04-07T14:17:31.399508Z node 10 :READ_TABLE_API NOTICE: [10:7490575228001792623:2364] Finish grpc stream, status: 400000 2025-04-07T14:17:31.402835Z node 10 :GRPC_SERVER DEBUG: [0x51a0000eac80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.402838Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ea680] received request Name# Request ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.403026Z node 10 :GRPC_SERVER DEBUG: [0x51a00002ee80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.403055Z node 10 :GRPC_SERVER DEBUG: [0x51a0000eb280] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.403202Z node 10 :GRPC_SERVER DEBUG: [0x51a00002be80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.403211Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ea080] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.403359Z node 10 :GRPC_SERVER DEBUG: [0x51a000050a80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.403364Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e9a80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.403487Z node 10 :GRPC_SERVER DEBUG: [0x51a000137a80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.403487Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d3280] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.403618Z node 10 :GRPC_SERVER DEBUG: [0x51a00002dc80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.403618Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e8280] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.403795Z node 10 :GRPC_SERVER DEBUG: [0x51b00016d580] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.403798Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d3880] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.403950Z node 10 :GRPC_SERVER DEBUG: [0x51a0000de680] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.403976Z node 10 :GRPC_SERVER DEBUG: [0x51a000023480] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.404097Z node 10 :GRPC_SERVER DEBUG: [0x51a000025280] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.404129Z node 10 :GRPC_SERVER DEBUG: [0x51a0000de080] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.404244Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e9480] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.404253Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d2c80] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.404367Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d1480] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.404368Z node 10 :GRPC_SERVER DEBUG: [0x51a00002b880] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.404506Z node 10 :GRPC_SERVER DEBUG: [0x51a00004c280] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-07T14:17:31.404508Z node 10 :GRPC_SERVER DEBUG: [0x51a0000c4280] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> TTableProfileTests::OverwriteCachingPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat >> TTableProfileTests::UseTableProfilePreset [GOOD] >> TTableProfileTests::UseTableProfilePresetViaSdk >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain >> KqpTx::SnapshotROInteractive1 [GOOD] >> YdbYqlClient::AlterTableAddIndex [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp >> ColumnBuildTest::AlreadyExists >> YdbYqlClient::TestExplicitPartitioning [GOOD] >> YdbS3Internal::TestAccessCheck [GOOD] >> YdbS3Internal::BadRequests >> KqpImmediateEffects::UpsertDuplicates >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 9228, MsgBus: 11052 2025-04-07T14:17:28.507537Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575214607248669:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:28.507665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001137/r3tmp/tmpolNHri/pdisk_1.dat 2025-04-07T14:17:28.853391Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9228, node 1 2025-04-07T14:17:28.879474Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:28.879514Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:28.881699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:28.881804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:28.883458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:29.007666Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:29.007879Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:29.007899Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:29.008009Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11052 TClient is connected to server localhost:11052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:29.624842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:29.667493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:29.807798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:29.930068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:29.980400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:31.290970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575227492152280:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:31.299119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:31.721884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:31.751697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:31.779941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:31.803739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:31.830794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:31.896191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:31.943918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575227492152777:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:31.943999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:31.944263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575227492152783:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:31.950198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:31.960235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575227492152785:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] >> YdbYqlClient::CheckDefaultTableSettings2 >> KqpTx::BeginTransactionBadMode >> TGRpcYdbTest::OperationTimeout [GOOD] >> TGRpcYdbTest::OperationCancelAfter ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestExplicitPartitioning [GOOD] Test command err: 2025-04-07T14:16:52.717248Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575058649202307:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:52.717419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018b5/r3tmp/tmpjjIplg/pdisk_1.dat 2025-04-07T14:16:53.054600Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:53.078906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:53.078987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 20432, node 1 2025-04-07T14:16:53.092327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:53.095073Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:53.095113Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:53.096407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:16:53.097244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:53.097391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:16:53.109184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:53.111081Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:53.111121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:53.111345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:16:53.111434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.197109Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:53.197130Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:53.197136Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:53.197215Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:53.573388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.579047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:53.580487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.583578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-04-07T14:16:53.584293Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:53.584396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T14:16:53.587014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:16:53.587184Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:53.587198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:16:53.588725Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:53.595074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035413638, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:53.595152Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:16:53.595454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:16:53.597840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:53.597995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:53.598041Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:16:53.598112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:16:53.598138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:16:53.598172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:16:53.601629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:53.601676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:53.601699Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:53.601769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:16:55.175213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575071534105202:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:55.175303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:55.590066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:16:55.590500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSchemeError, reason: Column Key has wrong key type Double, at schemeshard: 72057594046644480 2025-04-07T14:16:55.592540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusSchemeError, reason: Column Key has wrong key type Double, operation: CREATE TABLE, path: /Root/Test 2025-04-07T14:16:56.751226Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575075490034901:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:56.751295Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018b5/r3tmp/tmpclrGNe/pdisk_1.dat 2025-04-07T14:16:56.830938Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:56.854587Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:56.854642Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:56.858151Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16857, node 4 2025-04-07T14:16:56.896619Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:56.896640Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:56.896647Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:56.896754Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:57.055136Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:57.055509Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:57.055546Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, su ... MESHARD INFO: MkDir::TPropose operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:32.979244Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:32.979412Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T14:17:32.980774Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:32.980808Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:32.980822Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:17:32.980981Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:32.980996Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:32.981003Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T14:17:32.981066Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:32.981080Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:32.981087Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T14:17:32.981148Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:32.981164Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:32.981170Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2025-04-07T14:17:32.981227Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:32.981242Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:32.981249Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2025-04-07T14:17:32.983872Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035453027, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:32.983909Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035453027, at schemeshard: 72057594046644480 2025-04-07T14:17:32.984019Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2025-04-07T14:17:32.984153Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035453027, at schemeshard: 72057594046644480 2025-04-07T14:17:32.984224Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2025-04-07T14:17:32.984339Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035453027, at schemeshard: 72057594046644480 2025-04-07T14:17:32.984391Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2025-04-07T14:17:32.984461Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1744035453027 2025-04-07T14:17:32.984510Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2025-04-07T14:17:32.985921Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:32.986327Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:32.986379Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2025-04-07T14:17:32.986444Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2025-04-07T14:17:32.986557Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2025-04-07T14:17:32.986591Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2025-04-07T14:17:32.986681Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2025-04-07T14:17:32.986727Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2025-04-07T14:17:32.986792Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2025-04-07T14:17:32.986825Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2025-04-07T14:17:32.986845Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T14:17:32.986869Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2025-04-07T14:17:32.986885Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2025-04-07T14:17:32.986894Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2025-04-07T14:17:32.986906Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2025-04-07T14:17:32.988610Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:32.988644Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:32.988657Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T14:17:32.988855Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:32.988877Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:32.988888Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T14:17:32.989090Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:32.989130Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:32.989150Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T14:17:32.989326Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:32.989355Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:32.989366Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2025-04-07T14:17:32.989470Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:32.989491Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:32.989506Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2025-04-07T14:17:32.989542Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-04-07T14:17:32.991206Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7490575230811042047:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:17:33.093188Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T14:17:33.093372Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T14:17:33.095471Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T14:17:33.200278Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89t61acyzhhs6dmy966yds, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZjhkN2VkNTQtMTg5ZjZjYzUtY2RkOGJiZDgtNGNlOWNlNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:33.517010Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89t68xangcxbf3tvcv794n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZjhkN2VkNTQtMTg5ZjZjYzUtY2RkOGJiZDgtNGNlOWNlNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] Test command err: 2025-04-07T14:17:16.009386Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575163786520581:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:16.009500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001857/r3tmp/tmpI9y0pi/pdisk_1.dat 2025-04-07T14:17:16.284163Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28364, node 1 2025-04-07T14:17:16.302952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:16.303122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:17:16.321215Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:16.321254Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:16.350072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:16.350166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:16.350882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:16.352385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:16.352403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:16.352448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:17:16.352838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:17:16.366611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:16.390418Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:16.390440Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:16.390455Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:16.390578Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:16.615028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:16.620378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:16.621607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:16.623320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:16.623525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:16.623546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:16.624582Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:16.624933Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:16.624953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:16.626172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:16.629032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035436675, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:16.629062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:16.629286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:16.630609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:16.630753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:16.630818Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:16.630894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:16.630934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:16.630993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:16.633122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:16.633167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:16.633185Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:16.633275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1744042636202479 Nodes { NodeId: 1024 Host: "localhost" Port: 14171 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1744042636202479 } Nodes { NodeId: 1 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 2 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 3 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-04-07T14:17:19.547688Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575176527053682:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:19.547756Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001857/r3tmp/tmpzhxL2D/pdisk_1.dat 2025-04-07T14:17:19.636188Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:19.665020Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:19.665085Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:19.666932Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15308, node 4 2025-04-07T14:17:19.735974Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:19.736011Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:19.736019Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:19.736135Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:19.938778Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:19.939085Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:19.939112Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:19.941370Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:19.941541Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 Prog ... ted: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:27.323967Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:27.324329Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:27.324359Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:27.326129Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:27.326297Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:27.326318Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:27.327825Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:27.327848Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:27.328134Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:27.329173Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:27.332309Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035447378, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:27.332342Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:27.332575Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:27.333917Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:27.334059Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:27.334108Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:27.334176Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:27.334207Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:27.334239Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:27.335487Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:27.335532Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:27.335548Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:27.335605Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1744042646970860 Nodes { NodeId: 1024 Host: "localhost" Port: 24449 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1744042646970860 } Nodes { NodeId: 10 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 11 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 12 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-04-07T14:17:30.686950Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7490575223053795256:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:30.687046Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001857/r3tmp/tmpoUiCQL/pdisk_1.dat 2025-04-07T14:17:30.800006Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:30.835825Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:30.835899Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:30.837922Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29745, node 13 2025-04-07T14:17:30.890482Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:30.890505Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:30.890513Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:30.890595Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:31.147959Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:31.148291Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:31.148321Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:31.149969Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:31.150122Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:31.150141Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:31.150788Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:31.151304Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:31.151325Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:31.152434Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:31.154907Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035451200, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:31.154936Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:31.155136Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:31.156430Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:31.156554Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:31.156600Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:31.156694Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:31.156730Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:31.156787Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:31.157262Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:31.157304Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:31.157321Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:31.157379Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1744042650796530 Nodes { NodeId: 1024 Host: "localhost" Port: 8629 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1744042650796530 } Nodes { NodeId: 13 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 14 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 15 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert >> TPersQueueTest::SchemeOperationsCheckPropValues [GOOD] >> TPersQueueTest::ReadRuleServiceType >> KqpImmediateEffects::Delete >> KqpEffects::InsertAbort_Params_Success >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain [GOOD] >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism >> ColumnBuildTest::AlreadyExists [GOOD] >> YdbOlapStore::LogGrepNonExisting [GOOD] >> YdbOlapStore::LogGrepExisting ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:17:31.937979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:17:31.938091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:17:31.938133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:17:31.938166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:17:31.938209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:17:31.938234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:17:31.938276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:17:31.938547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:17:31.995877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:17:31.995935Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:32.004794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:17:32.004944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:17:32.005123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:17:32.009801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:17:32.009967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:17:32.010500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:17:32.010698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:17:32.012295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:17:32.013470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:17:32.013530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:17:32.013672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:17:32.013712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:17:32.013751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:17:32.013894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:17:32.019917Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:17:32.112629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:17:32.112859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:32.113077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:17:32.113297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:17:32.113338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:32.115515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:17:32.115643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:17:32.115813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:32.115878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:17:32.115912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:17:32.115957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:17:32.117762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:32.117804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:17:32.117839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:17:32.119344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:32.119381Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:32.119431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:17:32.119494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:17:32.122751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:17:32.124205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:17:32.124362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:17:32.125144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:17:32.125268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:17:32.125321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:17:32.125522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:17:32.125570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:17:32.125685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:17:32.125745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:17:32.127351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:17:32.127402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:17:32.127529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:17:32.127556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:17:32.127740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:32.127778Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:17:32.127849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:17:32.127870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:17:32.127907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:17:32.127950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:17:32.127979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:17:32.128040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:17:32.128097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:17:32.128151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:17:32.128203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:17:32.129611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:17:32.129696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:17:32.129721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:17:32.129747Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:17:32.129791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:17:32.129889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... MKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'27))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.748766Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2044:3911], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'28))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.753989Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2045:3912], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'29))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.759071Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2046:3913], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'30))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.764136Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2047:3914], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'31))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.768876Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2048:3915], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'32))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.774202Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2049:3916], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'33))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.780218Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2050:3917], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'34))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.786402Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2051:3918], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'35))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.792542Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2052:3919], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'36))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.798492Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2053:3920], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'37))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.803971Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2054:3921], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'38))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.809323Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2055:3922], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'39))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.815101Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2056:3923], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'40))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.821158Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2057:3924], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'41))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.827063Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2058:3925], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'42))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.832167Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2059:3926], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'43))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.837801Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2060:3927], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'44))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.844499Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2061:3928], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'45))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.851150Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2062:3929], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'46))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.858141Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2063:3930], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'47))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.864170Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2064:3931], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'48))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.868983Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2065:3932], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'49))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-07T14:17:36.873955Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2066:3933], Recipient [1:744:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'50))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:17:34.790242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:17:34.790326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:17:34.790359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:17:34.790381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:17:34.790422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:17:34.790447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:17:34.790487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:17:34.790729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:17:34.844178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:17:34.844223Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:34.852584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:17:34.852724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:17:34.852896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:17:34.858397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:17:34.858561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:17:34.859113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:17:34.859314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:17:34.861335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:17:34.862516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:17:34.862573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:17:34.862696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:17:34.862746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:17:34.862794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:17:34.862947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:17:34.869532Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:17:34.985067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:17:34.985291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:34.985488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:17:34.985716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:17:34.985787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:34.987944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:17:34.988079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:17:34.988241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:34.988295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:17:34.988329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:17:34.988359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:17:34.989971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:34.990014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:17:34.990043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:17:34.991313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:34.991349Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:34.991389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:17:34.991436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:17:34.994536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:17:34.996090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:17:34.996278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:17:34.997140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:17:34.997286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:17:34.997344Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:17:34.997564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:17:34.997601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:17:34.997732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:17:34.997795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:17:34.999382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:17:34.999434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:17:34.999585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:17:34.999615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:17:34.999846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:17:34.999884Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:17:34.999961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:17:35.000000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:17:35.000035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:17:35.000070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:17:35.000107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:17:35.000143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:17:35.000191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:17:35.000242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:17:35.000269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:17:35.002047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:17:35.002150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:17:35.002187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:17:35.002221Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:17:35.002281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:17:35.002367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 47Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:783:2662] TestWaitNotification: OK eventTxId 105 2025-04-07T14:17:37.177520Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-04-07T14:17:37.177747Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 275us result status StatusSuccess 2025-04-07T14:17:37.178123Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 2 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2025-04-07T14:17:37.179757Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 106 DatabaseName: "/MyRoot/ServerLessDB" Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } 2025-04-07T14:17:37.182247Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-07T14:17:37.182341Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1136:3011], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:17:37.182506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 106, at schemeshard: 72075186233409549 2025-04-07T14:17:37.182579Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 106, txId# 281474976725757 2025-04-07T14:17:37.182672Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1136:3011], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:17:37.184439Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-07T14:17:37.184496Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1136:3011], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-07T14:17:37.186291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerLessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" Columns { Name: "value" Type: "Uint64" DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: true } } Internal: true } TxId: 281474976725757 TabletId: 72075186233409549 FailOnExist: true , at schemeshard: 72075186233409549 2025-04-07T14:17:37.186445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/ServerLessDB/Table, pathId: , opId: 281474976725757:0, at schemeshard: 72075186233409549 2025-04-07T14:17:37.186684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976725757:1, propose status:StatusInvalidParameter, reason: Cannot alter type for column 'value', at schemeshard: 72075186233409549 2025-04-07T14:17:37.190015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976725757, response: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2025-04-07T14:17:37.190178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976725757, database: /MyRoot/ServerLessDB, subject: , status: StatusInvalidParameter, reason: Cannot alter type for column 'value', operation: ALTER TABLE, path: /MyRoot/ServerLessDB/Table 2025-04-07T14:17:37.190342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976725757, status# StatusInvalidParameter 2025-04-07T14:17:37.190408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-04-07T14:17:37.190468Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 106, cookie: 106, txId: 281474976725757, status: StatusInvalidParameter 2025-04-07T14:17:37.190561Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1136:3011], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-04-07T14:17:37.191171Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuilder::TTxReply: ReplyOnCreation, BuildIndexId: 106, status: BAD_REQUEST, error: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column 'value', replyTo: [1:1136:3011] 2025-04-07T14:17:37.191413Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Message: TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } |96.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> YdbS3Internal::BadRequests [GOOD] >> YdbScripting::BasicV0 >> YdbYqlClient::CheckDefaultTableSettings2 [GOOD] >> KqpImmediateEffects::UpsertDuplicates [GOOD] >> KqpTx::BeginTransactionBadMode [GOOD] >> KqpTx::CommitPrepared >> TGRpcYdbTest::OperationCancelAfter [GOOD] >> TPersQueueTest::CheckACLForGrpcWrite [GOOD] >> TPersQueueTest::CheckACLForGrpcRead >> KqpWrite::CastValues ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] Test command err: 2025-04-07T14:17:21.086345Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575184644144153:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:21.086503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001850/r3tmp/tmp8JlkLM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12893, node 1 2025-04-07T14:17:21.394269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2025-04-07T14:17:21.394376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "PostgreSQL" } 2025-04-07T14:17:21.394391Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:21.400021Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:21.400041Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:21.413443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-07T14:17:21.428781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:17:21.429263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:21.429417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:17:21.429786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:21.429883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:21.430923Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:21.430940Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:21.430945Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:21.431063Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:21.431586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:21.432016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:21.432048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:21.432141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:17:21.432200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:17:21.433203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26301 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:21.649219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:21.657503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:21.658817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:21.660550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:21.660700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:21.660714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:21.661792Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:21.661930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:21.661948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:21.662951Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:21.665316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035441708, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:21.665342Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:21.665501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:21.666594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:21.666738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:21.666800Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:21.666900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:21.666934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:21.666964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:21.668771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:21.668813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:21.668830Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:21.668891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:17:23.289683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/OlapStore, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:23.289918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:23.290015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/OlapStore/OlapTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-07T14:17:23.290568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 1 -> 2 2025-04-07T14:17:23.291148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:23.291185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-07T14:17:23.292972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/OlapStore/OlapTable 2025-04-07T14:17:23.293154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:23.293400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:23.293496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:1 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:17:23.293788Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:23.294005Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:17:23.294565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:23.294606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:23.294622Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:23.294821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:23.294850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:23.294859Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:17:23.295016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:23.295057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:23.295072Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 1 2025-04-07T14:17:23.299980Z node 1 :FLAT_TX ... INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 ProgressState at tablet: 72057594046644480 2025-04-07T14:17:37.530579Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:17:37.530831Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976710759 Step: 1744035457570 OrderId: 281474976710759 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 817 } } 2025-04-07T14:17:37.532789Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710759 2025-04-07T14:17:37.532831Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2025-04-07T14:17:37.532851Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-07T14:17:37.533029Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710759 2025-04-07T14:17:37.533094Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2025-04-07T14:17:37.533119Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 3 2025-04-07T14:17:37.533389Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 8 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710759 2025-04-07T14:17:37.533436Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2025-04-07T14:17:37.533457Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 8], version: 4 2025-04-07T14:17:37.533669Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:37.533720Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-07T14:17:37.533753Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-07T14:17:37.536550Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:0 ProgressState 2025-04-07T14:17:37.536636Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 2/3 2025-04-07T14:17:37.539596Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976710759 Step: 1744035457570 OrderId: 281474976710759 ExecLatency: 7 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 1063 } } 2025-04-07T14:17:37.539798Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:37.539831Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:2, at schemeshard: 72057594046644480 2025-04-07T14:17:37.539855Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:2 129 -> 240 2025-04-07T14:17:37.540936Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:2 ProgressState 2025-04-07T14:17:37.541014Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:2 progress is 3/3 2025-04-07T14:17:37.541057Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-07T14:17:37.541171Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:1 2025-04-07T14:17:37.541191Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:2 2025-04-07T14:17:37.542543Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-07T14:17:37.544724Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 281474976715662, at schemeshard: 72057594046644480 2025-04-07T14:17:37.545878Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TDropLock Propose: opId# 281474976710760:0, path# /Root/Test 2025-04-07T14:17:37.545997Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710760:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:37.547374Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /Root, subject: , status: StatusAccepted, operation: DROP LOCK, path: /Root/Test 2025-04-07T14:17:37.547461Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2025-04-07T14:17:37.547617Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710760:0 ProgressState 2025-04-07T14:17:37.548832Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046644480 2025-04-07T14:17:37.550132Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035457598, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:37.550166Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 1744035457598 2025-04-07T14:17:37.550181Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-07T14:17:37.551226Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710760:0 ProgressState 2025-04-07T14:17:37.551292Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-07T14:17:37.551326Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-07T14:17:37.552338Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-07T14:17:37.583472Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Test, pathId: , opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:37.583912Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715663:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:37.583934Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:37.585615Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715663, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Test 2025-04-07T14:17:37.585790Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715663:0 ProgressState, operation type: TxAlterTable, at tablet# 72057594046644480 2025-04-07T14:17:37.585817Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715663:0 2 -> 3 2025-04-07T14:17:37.586075Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715663, at schemeshard: 72057594046644480 2025-04-07T14:17:37.587130Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#281474976715663:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:37.591049Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#281474976715663:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046644480 message# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: PREPARED TxId: 281474976715663 MinStep: 1744035457571 MaxStep: 18446744073709551615 PrepareArriveTime: 1744035457588686 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 184 } } 2025-04-07T14:17:37.591069Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:37.591114Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715663:0 3 -> 128 2025-04-07T14:17:37.592261Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#281474976715663:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:37.595097Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035457640, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:37.595139Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#281474976715663:0 HandleReply TEvOperationPlan, operationId: 281474976715663:0, stepId: 1744035457640, at schemeshard: 72057594046644480 2025-04-07T14:17:37.595430Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715663:0 128 -> 129 2025-04-07T14:17:37.596692Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:37.596931Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:37.596991Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715663:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:17:37.597314Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715663 2025-04-07T14:17:37.597348Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715663 2025-04-07T14:17:37.597364Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715663, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2025-04-07T14:17:37.599338Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 Step: 1744035457640 OrderId: 281474976715663 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1475 } } 2025-04-07T14:17:37.599511Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715663:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:37.599543Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:37.599568Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715663:0 129 -> 240 2025-04-07T14:17:37.600693Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715663:0 ProgressState 2025-04-07T14:17:37.600772Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715663:0 progress is 1/1 2025-04-07T14:17:37.600818Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715663:0 >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyType ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings2 [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00183f/r3tmp/tmppmZJWq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61333, node 1 TClient is connected to server localhost:19396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:17:27.435499Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575211138207526:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:27.435567Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00183f/r3tmp/tmphmxng6/pdisk_1.dat 2025-04-07T14:17:27.516427Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:27.542562Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:27.542641Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:27.544704Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8381, node 4 2025-04-07T14:17:27.614577Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:27.614598Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:27.614605Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:27.614702Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:27.799495Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:27.799876Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:27.799909Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:27.801483Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:27.801661Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:27.801679Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:27.802491Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:27.803011Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:27.803034Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:27.804358Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:27.809306Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035447854, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:27.809337Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:27.809557Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:27.811010Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:27.811150Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:27.811212Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:27.811277Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:27.811311Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:27.811355Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:27.811920Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:27.811967Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:27.811981Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:27.812054Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:17:29.818712Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490575219728143110:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:29.818815Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:30.023927Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:30.024432Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2025-04-07T14:17:30.024952Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:30.024979Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:30.026671Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T14:17:30.026880Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:30.027018Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:30.027120Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:17:30.027422Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T14:17:30.027922Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:30.027968Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:30.028015Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:30.028240Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:30.028267Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:30.028285Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:17:30.032855Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:30.032923Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-07T14:17:30.035252Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:17:30.092536Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:30.092558Z node 4 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:30.092632Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2025-04-07T14:17:30.094083Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:17:30.096614Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035450143, tra ... ) 2025-04-07T14:17:35.441900Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:35.667548Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:35.667910Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:35.667938Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:35.669868Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:35.670086Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:35.670101Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T14:17:35.671497Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:35.671521Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:35.672602Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:35.673658Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:35.675429Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035455722, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:35.675458Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:35.675674Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:35.677053Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:35.677208Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:35.677257Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:35.677341Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:35.677379Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:35.677416Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:35.677853Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:35.677903Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:35.677925Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:35.677993Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:17:38.165319Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:38.165748Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2025-04-07T14:17:38.166293Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:38.166328Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:38.168591Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T14:17:38.168825Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:38.169042Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:38.169144Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:17:38.169764Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:38.169810Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:38.169829Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:38.170095Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:38.170156Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:38.170170Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:17:38.170340Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T14:17:38.178555Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:38.178641Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-07T14:17:38.180795Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:17:38.253584Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:38.253612Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:38.253701Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2025-04-07T14:17:38.256006Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:17:38.268271Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035458305, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:38.268343Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744035458305 2025-04-07T14:17:38.268506Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2025-04-07T14:17:38.273902Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:38.274192Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:38.274243Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:17:38.276140Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:38.276620Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:38.276647Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:17:38.276890Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:38.276916Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:38.276926Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:17:38.278544Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1744035458305 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 8 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1295 } } 2025-04-07T14:17:38.279249Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:38.279281Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:38.279327Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2025-04-07T14:17:38.282562Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T14:17:38.282661Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T14:17:38.282718Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertDuplicates [GOOD] Test command err: Trying to start YDB, gRPC: 27508, MsgBus: 15647 2025-04-07T14:17:34.645342Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575240035118872:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:34.645432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ece/r3tmp/tmpn0lSho/pdisk_1.dat 2025-04-07T14:17:34.893884Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27508, node 1 2025-04-07T14:17:34.952215Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:34.952232Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:34.952239Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:34.952348Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:34.999876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:35.000015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:35.001628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15647 TClient is connected to server localhost:15647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:35.385274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:35.400613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:35.508061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:35.627880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:35.693607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:37.205929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575252920022459:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:37.206041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:37.402932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:37.425516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:37.447479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:37.468635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:37.490805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:37.513884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:37.544333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575252920022949:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:37.544419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:37.544577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575252920022954:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:37.547841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:37.554966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575252920022956:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:17:38.394892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::UpsertExistingKey ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::OperationCancelAfter [GOOD] Test command err: 2025-04-07T14:17:19.951287Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575177613847986:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:19.951351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001851/r3tmp/tmpvGc3P0/pdisk_1.dat 2025-04-07T14:17:20.226115Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21742, node 1 2025-04-07T14:17:20.286967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:20.287158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:20.290627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:20.296023Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:20.296054Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:20.296063Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:20.296179Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:20.568779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:20.575538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:20.577087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:20.579093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:20.579299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:20.579326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:20.580780Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:20.581013Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:20.581033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:20.582382Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:20.585762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035440630, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:20.585796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:20.586111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:20.593116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:20.593306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:20.593366Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:20.593504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:20.593565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:20.593634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:20.595782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:20.595856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:20.595873Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:20.596006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:17:23.177981Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575193733877716:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:23.178076Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001851/r3tmp/tmpM4UsRY/pdisk_1.dat 2025-04-07T14:17:23.252992Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:23.278483Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:23.278598Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:23.284546Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63028, node 4 2025-04-07T14:17:23.315124Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:23.315145Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:23.315152Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:23.315248Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30066 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:23.486359Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:23.486573Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:23.486596Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:23.490734Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:23.490857Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:23.490867Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:23.492336Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:23.492360Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:23.493390Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:23.495718Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:23.498997Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035443542, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:23.499028Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:23.499265Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:23.500695Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:23.500833Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:23.500873Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:23.500930Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:23.500991Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:23.501037Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:23.502402Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72 ... Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:31.503509Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27462, node 10 2025-04-07T14:17:31.558751Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:31.558781Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:31.558794Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:31.558907Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:31.800766Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:31.801156Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:31.801186Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:31.803626Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:31.803805Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:31.803826Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T14:17:31.806909Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:31.806937Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:31.808459Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:31.809454Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:31.811549Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035451858, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:31.811581Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:31.811812Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:31.813362Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:31.813519Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:31.813578Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:31.813664Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:31.813694Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:31.813734Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:31.814422Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:31.814460Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:31.814478Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:31.814561Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1
: Error: Operation timeout. 2025-04-07T14:17:35.438691Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7490575242778679511:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:35.438762Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001851/r3tmp/tmpVrFAjf/pdisk_1.dat 2025-04-07T14:17:35.539141Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:35.563425Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:35.563516Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:35.566541Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6445, node 13 2025-04-07T14:17:35.611380Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:35.611401Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:35.611409Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:35.611497Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26584 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:35.866347Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:35.866594Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:35.866609Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:35.868208Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:35.868360Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:35.868373Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T14:17:35.869643Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:35.869912Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:35.869934Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:35.871140Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:35.873955Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035455918, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:35.873986Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:35.874208Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:35.875925Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:35.876082Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:35.876121Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:35.876180Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:35.876208Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:35.876240Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:35.877301Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:35.877333Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:35.877345Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:35.877388Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1
: Error: Operation cancelled. >> TTableProfileTests::UseTableProfilePresetViaSdk [GOOD] >> TTableProfileTests::OverwritePartitioningPolicy >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts >> TestProgram::YqlKernelStartsWithScalar >> TestProgram::YqlKernelStartsWithScalar [GOOD] >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] >> KqpImmediateEffects::Delete [GOOD] >> KqpImmediateEffects::DeleteAfterInsert >> KqpLocks::TwoPhaseTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "prefix" } Constant { Bytes: "Lorem" } } } Command { Assign { Function { Arguments { Name: "string" } Arguments { Name: "prefix" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=Lorem;column=G:prefix;};{arguments=[G:string;G:prefix;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; string: [ "Lorem ipsum dolor sit amet.", "ipsum dolor sit amet." ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> KqpImmediateEffects::ConflictingKeyW1RR2 >> KqpEffects::InsertAbort_Params_Success [GOOD] >> KqpEffects::InsertAbort_Select_Conflict >> KqpPg::V1CreateTable [GOOD] >> KqpPg::TempTablesSessionsIsolation ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] Test command err: 2025-04-07T14:17:23.438805Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575190847575285:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:23.438975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001843/r3tmp/tmpIrBwYF/pdisk_1.dat 2025-04-07T14:17:23.731255Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62861, node 1 2025-04-07T14:17:23.797932Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:23.797959Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:23.797969Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:23.798063Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:23.804919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:23.805028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:23.815503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:24.088973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:24.100556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:24.101839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:24.103581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:24.103779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:24.103800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:24.104882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:24.105534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:24.105557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:24.106962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:24.110289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035444158, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:24.110319Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:24.110548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:24.111908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:24.112065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:24.112127Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:24.112204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:24.112251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:24.112289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:24.114890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:24.114924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:24.114943Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:24.115012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:17:27.085339Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575210488146397:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:27.085389Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001843/r3tmp/tmp74zJqz/pdisk_1.dat 2025-04-07T14:17:27.167508Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:27.194196Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:27.194288Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:27.196576Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1239, node 4 2025-04-07T14:17:27.236177Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:27.236207Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:27.236213Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:27.236305Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:27.429603Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:27.429882Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:27.429917Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:27.431524Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:27.431692Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:27.431703Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:27.432961Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:27.432986Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:27.434407Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:27.437365Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:27.439038Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035447483, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:27.439066Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:27.439269Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:27.440734Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:27.440884Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:27.440928Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:27.440994Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:27.441025Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:27.441063Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:27.441610Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdate ... athType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:34.260366Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:34.260707Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:34.260740Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:34.262307Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:34.262500Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:34.262523Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:34.263236Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:34.263635Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:34.263655Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:34.264733Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:34.267344Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035454315, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:34.267375Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:34.267616Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:34.268933Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:34.269066Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:34.269102Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:34.269151Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:34.269177Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:34.269206Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:34.269916Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:34.269978Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:34.269993Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:34.270044Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:17:34.359776Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:34.359966Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:34.360657Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 0 2025-04-07T14:17:37.604548Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7490575251146754354:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:37.604604Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001843/r3tmp/tmp8NACGv/pdisk_1.dat 2025-04-07T14:17:37.703152Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:37.727386Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:37.727447Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:37.730029Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1894, node 13 2025-04-07T14:17:37.772231Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:37.772250Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:37.772258Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:37.772374Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:38.004034Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:38.004331Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:38.004354Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:38.005920Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:38.006089Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:38.006112Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:38.007027Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:38.007440Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:38.007462Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:38.008718Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:38.011653Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035458060, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:38.011694Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:38.011951Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:38.013477Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:38.013638Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:38.013696Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:38.013767Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:38.013796Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:38.013828Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:38.014254Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:38.014283Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:38.014297Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:38.014373Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:17:38.193662Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:38.193798Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:38.194295Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 0 >> YdbTableBulkUpsert::Nulls [GOOD] >> YdbTableBulkUpsert::NotNulls >> YdbScripting::BasicV0 [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey >> KqpWrite::UpsertNullKey >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus >> KqpTx::CommitPrepared [GOOD] >> KqpWrite::CastValues [GOOD] >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast >> TPersQueueTest::Delete [GOOD] >> TPersQueueTest::FetchRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbScripting::BasicV0 [GOOD] Test command err: 2025-04-07T14:17:21.900282Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575182441717218:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:21.900576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001849/r3tmp/tmpN3BZFJ/pdisk_1.dat 2025-04-07T14:17:22.159753Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13075, node 1 2025-04-07T14:17:22.236384Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:22.236403Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:22.236414Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:22.236516Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:22.249691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:22.249808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:22.254677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:22.429474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:22.434191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:22.435331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:22.437009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:22.437194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:22.437210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:22.438163Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:22.438447Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:22.438473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:22.439522Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:22.442013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035442485, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:22.442035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:22.442226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:22.443372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:22.443505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:22.443548Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:22.443605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:22.443640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:22.443673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:22.445538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:22.445578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:22.445591Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:22.445782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:17:25.398910Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575201094384617:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:25.398981Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001849/r3tmp/tmphfEWoq/pdisk_1.dat 2025-04-07T14:17:25.500290Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:25.536253Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:25.536323Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:25.538445Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17750, node 4 2025-04-07T14:17:25.597203Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:25.597220Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:25.597225Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:25.597307Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:25.765318Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:25.765608Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:25.765633Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:25.767371Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:25.767641Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:25.767661Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:25.769121Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:25.769835Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:25.769876Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:25.771386Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:25.774467Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035445817, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:25.774503Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:25.774719Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:25.776213Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:25.776342Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:25.776381Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:25.776440Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:25.776467Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:25.776499Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:25.777916Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720 ... :TPropose operationId#281474976715659:2 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:42.331067Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:42.331136Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:42.331157Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:17:42.331388Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:42.331426Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:42.331437Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T14:17:42.331564Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:42.331578Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:42.331587Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T14:17:42.331697Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:42.331726Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:42.331732Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2025-04-07T14:17:42.331821Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T14:17:42.331868Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:42.331879Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:42.331886Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2025-04-07T14:17:42.336483Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035462379, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:42.336525Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035462379, at schemeshard: 72057594046644480 2025-04-07T14:17:42.336632Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2025-04-07T14:17:42.336755Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035462379, at schemeshard: 72057594046644480 2025-04-07T14:17:42.336806Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2025-04-07T14:17:42.336879Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035462379, at schemeshard: 72057594046644480 2025-04-07T14:17:42.336949Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2025-04-07T14:17:42.337027Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1744035462379 2025-04-07T14:17:42.337073Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2025-04-07T14:17:42.338816Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:42.339401Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:42.339474Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2025-04-07T14:17:42.339582Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2025-04-07T14:17:42.339761Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2025-04-07T14:17:42.339819Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2025-04-07T14:17:42.339908Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2025-04-07T14:17:42.339946Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2025-04-07T14:17:42.340044Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2025-04-07T14:17:42.340080Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2025-04-07T14:17:42.340104Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T14:17:42.340130Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2025-04-07T14:17:42.340142Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2025-04-07T14:17:42.340151Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2025-04-07T14:17:42.340169Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2025-04-07T14:17:42.342961Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:42.343028Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:42.343045Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-04-07T14:17:42.343243Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:42.343286Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:42.343297Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T14:17:42.343415Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:42.343461Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:42.343472Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T14:17:42.343611Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:42.343656Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:42.343667Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2025-04-07T14:17:42.343774Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:42.343810Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:42.343821Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2025-04-07T14:17:42.343858Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-04-07T14:17:42.345858Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7490575272992064756:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:17:42.435711Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T14:17:42.435880Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T14:17:42.438275Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T14:17:42.512566Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr89tcv79z74qgjtm501957t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjdhYzk0OTItMTM4MTliOTQtYzljZmY2ZTYtZGZiYWFjODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:42.630714Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr89tfc22k46jae0h26fsp4r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NmYwNjY2NmMtZWI4NTA2YzgtNGI4MTQ1ZGUtMTRjZWFkYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:17:42.635726Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035462666, txId: 281474976715662] shutting down >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitPrepared [GOOD] Test command err: Trying to start YDB, gRPC: 3030, MsgBus: 9393 2025-04-07T14:17:35.224452Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575246361838994:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:35.224538Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001132/r3tmp/tmpGjXkO3/pdisk_1.dat 2025-04-07T14:17:35.518795Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3030, node 1 2025-04-07T14:17:35.559207Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:35.559231Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:35.559250Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:35.559363Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:35.602542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:35.602665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:35.604415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9393 TClient is connected to server localhost:9393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:35.973239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:17:35.989825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:36.078866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:36.207458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:36.282371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:37.771286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575254951775303:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:37.771401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:37.994148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:38.023050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:38.045574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:38.069336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:38.110644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:38.177607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:38.234648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575259246743098:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:38.234726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:38.234949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575259246743103:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:38.238186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:38.248483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575259246743105:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 29455, MsgBus: 7859 2025-04-07T14:17:39.635512Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575263543422707:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:39.635612Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001132/r3tmp/tmpZ9gCG8/pdisk_1.dat 2025-04-07T14:17:39.729169Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:39.763058Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:39.763166Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:39.764907Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29455, node 2 2025-04-07T14:17:39.814033Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:39.814052Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:39.814060Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:39.814154Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7859 TClient is connected to server localhost:7859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:40.224135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:40.229810Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:40.240980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:40.292902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:40.434641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:40.504039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:42.234650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575276428326309:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:42.234728Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:42.269682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:42.300906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:42.329364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:42.358149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:42.385101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:42.417649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:42.470223Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575276428326800:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:42.470324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:42.470419Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575276428326805:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:42.473524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:42.481202Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490575276428326807:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValues [GOOD] Test command err: Trying to start YDB, gRPC: 31339, MsgBus: 30196 2025-04-07T14:17:40.009992Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575265227917026:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:40.010066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ebc/r3tmp/tmpbPZ6tz/pdisk_1.dat 2025-04-07T14:17:40.274049Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31339, node 1 2025-04-07T14:17:40.336090Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:40.336118Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:40.336127Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:40.336206Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:40.352524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:40.352626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:40.354455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30196 TClient is connected to server localhost:30196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:40.741910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:40.764733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:40.890719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:41.017235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:41.076261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:42.414055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575273817853317:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:42.414162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:42.600752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:42.622457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:42.649604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:42.681614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:42.706689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:42.742183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:42.778981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575273817853808:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:42.779067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:42.779265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575273817853813:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:42.782664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:42.793025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575273817853815:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:35.976905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:35.976988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:35.977027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:35.977063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:35.977114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:35.977161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:35.977215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:35.977552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:36.048935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:36.048987Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:36.057885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:36.058010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:36.058131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:36.067904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:36.068067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:36.068460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:36.068757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:36.072790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:36.074631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:36.074696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:36.074857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:36.074903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:36.074941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:36.075068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:36.081426Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:36.186479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:36.186646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:36.186804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:36.186995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:36.187051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:36.188883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:36.189003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:36.189147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:36.189188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:36.189219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:36.189255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:36.190751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:36.190804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:36.190832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:36.192220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:36.192253Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:36.192319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:36.192356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:36.194891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:36.196113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:36.196249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:36.196875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:36.196989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:36.197028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:36.197203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:36.197251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:36.197389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:36.197453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:36.198831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:36.198861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:36.198996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:36.199027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:36.199254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:36.199282Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:36.199355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:36.199389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:36.199420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:36.199482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:36.199510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:36.199532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:36.199577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:36.199609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:36.199644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... tionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:17:43.615987Z node 117 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:17:43.616273Z node 117 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 287us result status StatusSuccess 2025-04-07T14:17:43.617055Z node 117 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:17:43.628133Z node 117 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][117:1072:2811] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:17:43.628251Z node 117 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][117:988:2811] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-07T14:17:43.628410Z node 117 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][117:1072:2811] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744035463587952 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744035463587952 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1744035463587952 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:17:43.633710Z node 117 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][117:1072:2811] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-07T14:17:43.633804Z node 117 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][117:988:2811] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> KqpImmediateEffects::UpsertExistingKey [GOOD] >> KqpEffects::InsertRevert_Literal_Success ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 20177, MsgBus: 31303 2025-04-07T14:17:40.583795Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575264041353093:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:40.584019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eb5/r3tmp/tmpDPd41I/pdisk_1.dat 2025-04-07T14:17:40.852055Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20177, node 1 2025-04-07T14:17:40.913134Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:40.913157Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:40.913186Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:40.913272Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:40.948111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:40.948218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:40.950022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31303 TClient is connected to server localhost:31303 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:41.392389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:41.408836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:41.524925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:41.664208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:41.737882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:43.117721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575276926256668:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:43.117816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:43.338171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:43.364287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:43.389572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:43.424633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:43.461206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:43.488632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:43.519047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575276926257159:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:43.519100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575276926257164:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:43.519106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:43.522061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:43.529231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575276926257166:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:17:44.336744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> KqpSinkLocks::DifferentKeyUpdateOlap >> KqpLocks::TwoPhaseTx [GOOD] >> KqpLocksTricky::TestNoLocksIssue+withSink >> KqpImmediateEffects::ConflictingKeyW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 >> KqpWrite::ProjectReplace >> KqpPg::TempTablesSessionsIsolation [GOOD] >> KqpPg::TempTablesDrop >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] >> YdbTableBulkUpsert::NotNulls [GOOD] >> YdbTableBulkUpsert::Errors >> KqpEffects::InsertAbort_Select_Conflict [GOOD] >> KqpImmediateEffects::DeleteAfterInsert [GOOD] >> TTableProfileTests::OverwritePartitioningPolicy [GOOD] >> TTableProfileTests::OverwriteStoragePolicy >> KqpEffects::UpdateOn_Params ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] Test command err: 2025-04-07T14:17:17.103147Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575169068302717:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:17.103639Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001856/r3tmp/tmpr8TWi4/pdisk_1.dat 2025-04-07T14:17:17.359068Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65524, node 1 2025-04-07T14:17:17.394943Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:17.395057Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:17.422596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:17.423341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:17.423383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:17.423444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:17:17.423529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:17:17.428692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:17.428824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:17.433267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:17.441269Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:17.441306Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:17.441321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:17.441425Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27473 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:17.645339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:17.651150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:17.652825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:17.654944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:17.655188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:17.655206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:17.656091Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:17.656898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:17.656925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:17.658352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:17.662035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035437704, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:17.662091Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:17.662403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:17.664074Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:17.664241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:17.664303Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:17.664386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:17.664432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:17.664476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:17.666690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:17.666738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:17.666755Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:17.666825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:27473 2025-04-07T14:17:17.870426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:17.870798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:17.870823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:17.874065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant 2025-04-07T14:17:17.874224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:17.874446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:17.874506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:17.874524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 waiting... 2025-04-07T14:17:17.875904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:17.875955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:17.875972Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:17.876151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:17.876185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:17.876201Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:17:17.879059Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:17.879082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T14:17:17.879586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:17:17.881209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:17.885261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035437928, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:17.885289Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-07T14:17:17.885488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-07T14:17:17.887151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:17.887441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:17.887498Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2025-04-07T14:17:17.887563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:17:17.887631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:17:17.887672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2025-04-07T14:17:17.888561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:17.888614Z node 1 :FLAT_TX_SCHEMESH ... 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T14:17:41.490456Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:41.490781Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:41.490835Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2025-04-07T14:17:41.492256Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:41.988604Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7490575269793882502:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:41.988687Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:17:41.991594Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:41.991710Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:41.994035Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-04-07T14:17:42.007179Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:42.304742Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 HandleReply TEvConfigureStatus operationId:281474976715659:0 at schemeshard:72057594046644480 2025-04-07T14:17:42.364107Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 HandleReply TEvConfigureStatus operationId:281474976715659:0 at schemeshard:72057594046644480 2025-04-07T14:17:42.364172Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 3 -> 128 2025-04-07T14:17:42.365925Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715659:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:11621 2025-04-07T14:17:42.371346Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035462414, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:42.371394Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715659:0, at tablet# 72057594046644480 2025-04-07T14:17:42.371647Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2025-04-07T14:17:42.373585Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:42.373823Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:42.373927Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2025-04-07T14:17:42.374071Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2025-04-07T14:17:42.374142Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-07T14:17:42.374371Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2025-04-07T14:17:42.375131Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-04-07T14:17:42.375221Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-04-07T14:17:42.375243Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 4 2025-04-07T14:17:42.375308Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-04-07T14:17:42.386654Z node 13 :FLAT_TX_SCHEMESHARD INFO: Got new config: TableProfilesConfig { TableProfiles { Name: "default" CompactionPolicy: "default" ExecutionPolicy: "default" PartitioningPolicy: "default" StoragePolicy: "default" ReplicationPolicy: "default" CachingPolicy: "default" } TableProfiles { Name: "profile1" CompactionPolicy: "compaction1" ExecutionPolicy: "execution1" PartitioningPolicy: "partitioning1" StoragePolicy: "storage1" ReplicationPolicy: "replication1" CachingPolicy: "caching1" } TableProfiles { Name: "profile2" CompactionPolicy: "compaction2" ExecutionPolicy: "execution2" PartitioningPolicy: "partitioning2" StoragePolicy: "storage2" ReplicationPolicy: "replication2" CachingPolicy: "caching2" } CompactionPolicies { Name: "default" } CompactionPolicies { Name: "compaction1" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } CompactionPolicies { Name: "compaction2" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } ExecutionPolicies { Name: "default" } ExecutionPolicies { Name: "execution1" PipelineConfig { NumActiveTx: 1 EnableOutOfOrder: false DisableImmediate: false EnableSoftUpdates: true } ResourceProfile: "profile1" EnableFilterByKey: true ExecutorFastLogPolicy: false TxReadSizeLimit: 10000000 } ExecutionPolicies { Name: "execution2" PipelineConfig { NumActiveTx: 8 EnableOutOfOrder: true DisableImmediate: true EnableSoftUpdates: false } ResourceProfile: "profile2" EnableFilterByKey: false ExecutorFastLogPolicy: true TxReadSizeLimit: 20000000 } PartitioningPolicies { Name: "default" } PartitioningPolicies { Name: "partitioning1" UniformPartitionsCount: 10 AutoSplit: true AutoMerge: false SizeToSplit: 123456 } PartitioningPolicies { Name: "partitioning2" UniformPartitionsCount: 20 AutoSplit: true AutoMerge: true SizeToSplit: 1000000000 } StoragePolicies { Name: "default" } StoragePolicies { Name: "storage1" ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecLZ4 StorageConfig { SysLog { PreferredPoolKind: "hdd" } Log { PreferredPoolKind: "hdd" } Data { PreferredPoolKind: "hdd" } External { PreferredPoolKind: "hdd" } ExternalThreshold: 4294967295 } } } StoragePolicies { Name: "storage2" ColumnFamilies { Id: 0 ColumnCache: ColumnCacheEver StorageConfig { SysLog { PreferredPoolKind: "ssd" } Log { PreferredPoolKind: "ssd" } Data { PreferredPoolKind: "ssd" } External { PreferredPoolKind: "ssd" } DataThreshold: 30000 } } } ReplicationPolicies { Name: "default" } ReplicationPolicies { Name: "replication1" FollowerCount: 1 AllowFollowerPromotion: false CrossDataCenter: true } ReplicationPolicies { Name: "replication2" FollowerCount: 2 AllowFollowerPromotion: true CrossDataCenter: false } CachingPolicies { Name: "default" } CachingPolicies { Name: "caching1" ExecutorCacheSize: 10000000 } CachingPolicies { Name: "caching2" ExecutorCacheSize: 20000000 } } QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "PostgreSQL" } 2025-04-07T14:17:42.426652Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:17:42.427410Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusSchemeError, reason: Error at split boundary 0: Value of type Uint64 expected in tuple at position 1, at schemeshard: 72057594046644480 2025-04-07T14:17:42.430152Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusSchemeError, reason: Error at split boundary 0: Value of type Uint64 expected in tuple at position 1, operation: CREATE TABLE, path: /Root/ydb_ut_tenant/table-1 2025-04-07T14:17:42.437148Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-07T14:17:42.437681Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:17:42.990426Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:17:43.991171Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:17:44.991652Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 11145, MsgBus: 5765 2025-04-07T14:17:37.349554Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575253697141592:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:37.349636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ebf/r3tmp/tmpJI0lmS/pdisk_1.dat 2025-04-07T14:17:37.607004Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11145, node 1 2025-04-07T14:17:37.697539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:37.697666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:37.699597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:37.716469Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:37.716491Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:37.716498Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:37.716587Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5765 TClient is connected to server localhost:5765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:38.175656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:38.207305Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:38.219744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:38.334945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:38.463274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:38.516867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:40.125609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575266582045196:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:40.125736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:40.425599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:40.449797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:40.471968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:40.496627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:40.521146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:40.551421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:40.590531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575266582045688:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:40.590599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575266582045693:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:40.590630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:40.593736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:40.604700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575266582045695:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 6362, MsgBus: 12433 2025-04-07T14:17:42.466082Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575274612091374:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:42.466129Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ebf/r3tmp/tmp0C7naP/pdisk_1.dat 2025-04-07T14:17:42.553868Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6362, node 2 2025-04-07T14:17:42.593695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:42.593775Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:42.597410Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:42.614275Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:42.614298Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:42.614306Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:42.614404Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12433 TClient is connected to server localhost:12433 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:43.014602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:43.030463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:43.076575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:43.213222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:43.276943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:45.027508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575287496994965:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:45.027815Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:45.044296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:45.068333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:45.092222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:45.121159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:45.145287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:45.179036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:45.217363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575287496995455:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:45.217437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575287496995460:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:45.217439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:45.220110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:45.227481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490575287496995462:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:17:46.166814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:17:46.560011Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490575291791963242:2487], TxId: 281474976715675, task: 1. Ctx: { TraceId : 01jr89tk1e0v13g27e9wwb581b. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDdlMzg0ODgtMmE1NjA1NmMtNTQzZDg0YWUtYmU2N2UxY2Q=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T14:17:46.560385Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490575291791963244:2488], TxId: 281474976715675, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDdlMzg0ODgtMmE1NjA1NmMtNTQzZDg0YWUtYmU2N2UxY2Q=. TraceId : 01jr89tk1e0v13g27e9wwb581b. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490575291791963239:2459], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:17:46.567845Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDdlMzg0ODgtMmE1NjA1NmMtNTQzZDg0YWUtYmU2N2UxY2Q=, ActorId: [2:7490575291791963067:2459], ActorState: ExecuteState, TraceId: 01jr89tk1e0v13g27e9wwb581b, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 10238, MsgBus: 5724 2025-04-07T14:17:37.049411Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575251524780662:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:37.049494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ec7/r3tmp/tmplKPKHd/pdisk_1.dat 2025-04-07T14:17:37.287127Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10238, node 1 2025-04-07T14:17:37.345185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:37.345211Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:37.345217Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:37.345321Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:37.395791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:37.395910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:37.397467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5724 TClient is connected to server localhost:5724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:37.777497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:37.800842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:37.945686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:38.088179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:38.160786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:39.494111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575260114716959:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:39.494248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:39.694561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:39.716238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:39.740588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:39.761336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:39.788477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:39.825178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:39.863099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575260114717451:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:39.863175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:39.863244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575260114717456:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:39.866415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:39.877093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575260114717458:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:17:40.713243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 32668, MsgBus: 6947 2025-04-07T14:17:41.920347Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575269364307596:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:41.920385Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ec7/r3tmp/tmpgqvk72/pdisk_1.dat 2025-04-07T14:17:41.997681Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:42.021254Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:42.021333Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:42.022724Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32668, node 2 2025-04-07T14:17:42.060435Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:42.060456Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:42.060464Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:42.060564Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6947 TClient is connected to server localhost:6947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:42.410615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:42.427627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:42.478572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:42.620187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:42.695305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:44.609385Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575282249211187:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:44.609457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:44.640213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:44.671823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:44.700849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:44.723354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:44.751037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:44.778586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:44.853488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575282249211685:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:44.860982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:44.861288Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575282249211690:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:44.863866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:44.874062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490575282249211692:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:17:45.681402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TPersQueueTest::ReadRuleServiceType [GOOD] >> TPersQueueTest::ReadRuleServiceTypeLimit >> KqpWrite::UpsertNullKey [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> KqpInplaceUpdate::Negative_SingleRowWithValueCast [GOOD] >> TPersQueueTest::SetupWriteSession [GOOD] >> TPersQueueTest::StoreNoMoreThanXSourceIDs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::UpsertNullKey [GOOD] Test command err: Trying to start YDB, gRPC: 29608, MsgBus: 18966 2025-04-07T14:17:43.898432Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575277928714483:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:43.898553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eae/r3tmp/tmpmFYLs7/pdisk_1.dat 2025-04-07T14:17:44.162550Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29608, node 1 2025-04-07T14:17:44.231530Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:44.231555Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:44.231564Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:44.231665Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:44.247610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:44.247750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:44.249370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18966 TClient is connected to server localhost:18966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:44.637579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:44.655492Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:44.668995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:44.775331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:44.924884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:45.010498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:46.374010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575290813618066:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:46.374103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:46.600458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:46.623463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:46.645966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:46.687782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:46.710649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:46.733702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:46.765710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575290813618556:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:46.765779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:46.766006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575290813618561:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:46.769365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:46.778036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575290813618563:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> TestProgram::JsonExistsBinary >> KqpImmediateEffects::InsertDuplicates >> KqpEffects::InsertRevert_Literal_Duplicates >> TestProgram::JsonExistsBinary [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast [GOOD] Test command err: Trying to start YDB, gRPC: 5302, MsgBus: 31394 2025-04-07T14:17:44.486844Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575284479039645:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:44.486924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ea4/r3tmp/tmpqT9rhF/pdisk_1.dat 2025-04-07T14:17:44.763980Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5302, node 1 2025-04-07T14:17:44.829451Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:44.829470Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:44.829499Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:44.829592Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:44.879702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:44.879836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:44.881126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31394 TClient is connected to server localhost:31394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:45.280606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:45.293792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:45.413195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:45.538685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:45.612172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:46.982449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575293068975970:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:46.982534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:47.236311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:47.261185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:47.296532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:47.324439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:47.389592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:47.415632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:47.454469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575297363943762:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:47.454549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:47.454808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575297363943767:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:47.457459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:47.468898Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:17:47.469959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575297363943769:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:17:48.286329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; json_data: [ 7B226B6579223A2276616C7565227D, 5B5D ] json_data: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TColumnEngineTestLogs::IndexWriteLoadRead >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] >> KqpEffects::InsertRevert_Literal_Success [GOOD] >> KqpEffects::UpdateOn_Literal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:339;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:339;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=6184;portion_bytes=6184;portion_raw_bytes=5070; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=1;before_size=0;after_size=6184;before_rows=0;after_rows=100; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=6184;portion_bytes=6184;portion_raw_bytes=5070; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); >> KqpSinkLocks::InvalidateOnCommit >> KqpWrite::ProjectReplace [GOOD] >> YdbTableBulkUpsert::Errors [GOOD] >> YdbTableBulkUpsert::Limits >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] >> KqpImmediateEffects::UpdateOn >> KqpPg::TempTablesDrop [GOOD] >> KqpPg::TempTablesWithCache >> KqpEffects::InsertAbort_Params_Duplicates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace [GOOD] Test command err: Trying to start YDB, gRPC: 30759, MsgBus: 27000 2025-04-07T14:17:47.116929Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575297067663065:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:47.116995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e63/r3tmp/tmpvFDB9h/pdisk_1.dat 2025-04-07T14:17:47.374275Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30759, node 1 2025-04-07T14:17:47.453816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:47.453943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:47.455646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:47.473847Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:47.473878Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:47.473885Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:47.473979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27000 TClient is connected to server localhost:27000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:47.912800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:47.930545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:48.034669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:48.140594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:48.212626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:49.448453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575305657599380:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.448534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.652773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.675346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.694191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.718373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.740468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.766149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.803983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575305657599871:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.804063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.804108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575305657599876:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.807125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:49.814829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575305657599878:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpEffects::UpdateOn_Params [GOOD] >> KqpEffects::UpdateOn_Select >> RetryPolicy::RetryWithBatching [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] Test command err: 2025-04-07T14:17:15.232155Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575159936358254:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:15.232314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00185d/r3tmp/tmpijnXOA/pdisk_1.dat 2025-04-07T14:17:15.549169Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21585, node 1 2025-04-07T14:17:15.560751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-07T14:17:15.571789Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:15.571811Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:15.590794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:17:15.591456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:15.591659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:17:15.596726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:15.597127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:15.597252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:15.597379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:15.597411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:15.597467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:17:15.597552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.601660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:15.611957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:15.611994Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:15.612003Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:15.612104Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:15.895134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.900832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:15.902280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.904302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:15.904504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:15.904545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:15.905433Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:15.906013Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:15.906038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:15.907503Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.910833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035435954, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:15.910874Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:15.911139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:15.913159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:15.913281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:15.913334Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:15.913432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:15.913479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:15.913521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:15.915494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:15.915545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:15.915559Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:15.915646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:17:17.446396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575168526293824:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:17.446526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:17.636980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:17.637487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T14:17:17.638063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:17.638099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:17.645728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2025-04-07T14:17:17.645939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:17.646123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:17.646186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:17:17.646581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:17:17.647134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:17.647191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:17.647208Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:17.647423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:17.647467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:17.647476Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:17:17.651276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:17.651380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T14:17:17.652881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:17:17.709232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:17.709255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:17.709341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T14:17:17.710680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#2 ... cation in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:28.876411Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T14:17:28.876500Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:28.876544Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:28.876555Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T14:17:28.877289Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:17:28.877327Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:17:28.877341Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2025-04-07T14:17:28.877397Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-04-07T14:17:28.878712Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490575215847556910:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:17:28.955323Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T14:17:28.955483Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T14:17:28.957380Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2025-04-07T14:17:33.224157Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7490575237461365318:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:33.224197Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00185d/r3tmp/tmpNjQQrT/pdisk_1.dat 2025-04-07T14:17:33.322080Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:33.348098Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:33.348171Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:33.350396Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17364, node 10 2025-04-07T14:17:33.391190Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:33.391214Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:33.391222Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:33.391340Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:33.624797Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:33.625177Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:33.625206Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:33.626783Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:33.626913Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:33.626927Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:33.627677Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:33.628260Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:33.628280Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:33.629593Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:33.632768Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035453678, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:33.632798Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:33.633058Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:33.634555Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:33.634733Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:33.634790Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:33.634879Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:33.634922Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:33.634962Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:33.635696Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:33.635739Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:33.635754Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:33.635803Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 2025-04-07T14:17:38.224539Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7490575237461365318:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:38.224616Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 2025-04-07T14:17:48.310875Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:17:48.310930Z node 10 :IMPORT WARN: Table profiles were not loaded Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 3775, MsgBus: 6289 2025-04-07T14:17:42.390058Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575276288578306:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:42.390118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eb2/r3tmp/tmpZb0G1i/pdisk_1.dat 2025-04-07T14:17:42.664524Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3775, node 1 2025-04-07T14:17:42.706402Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:42.706428Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:42.706434Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:42.706527Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:42.725698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:42.725828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:42.727358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6289 TClient is connected to server localhost:6289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:43.143711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:43.165671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:43.288138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:43.444827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:43.517562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:44.798364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575284878514607:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:44.798463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:45.022995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:45.049441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:45.079972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:45.107512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:45.130494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:45.160130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:45.195647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575289173482394:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:45.195714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:45.195877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575289173482399:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:45.198834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:45.207251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575289173482401:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:17:45.944441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18580, MsgBus: 10351 2025-04-07T14:17:47.090290Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575294618157037:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:47.090404Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000eb2/r3tmp/tmpXYMetO/pdisk_1.dat 2025-04-07T14:17:47.180117Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:47.203552Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:47.203631Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:47.205270Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18580, node 2 2025-04-07T14:17:47.261153Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:47.261181Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:47.261194Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:47.261299Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10351 TClient is connected to server localhost:10351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:47.653029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:47.670499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:47.719208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:47.860923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:47.920910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:49.595893Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575303208093320:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.595973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.632965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.661023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.683439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.706766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.731143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.763891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.805677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575303208093812:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.805770Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.805999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575303208093817:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.808613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:49.816316Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490575303208093819:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:17:50.814968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.426692Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWJlYWZlNTItOGIxMjQ1OC05NTQyN2E5My04MDBlM2FiNg==, ActorId: [2:7490575307503061427:2461], ActorState: ExecuteState, TraceId: 01jr89tqzp3j1fv9ghntkenqzk, Create QueryResponse for error on request, msg: Error while locks merge >> KqpImmediateEffects::UpdateAfterInsert ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-04-07T14:12:54.091737Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.091769Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.091787Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-07T14:12:54.092310Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T14:12:54.092371Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.092398Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.093511Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009568s 2025-04-07T14:12:54.093975Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T14:12:54.094001Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.094015Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.094050Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007361s 2025-04-07T14:12:54.094335Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-07T14:12:54.094362Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.094396Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:12:54.094433Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007951s 2025-04-07T14:12:54.107341Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1744035174107316 2025-04-07T14:12:54.444835Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574038880443770:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:54.445069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:12:54.467564Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574039425860646:2249];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:12:54.468764Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00109d/r3tmp/tmpYufTg2/pdisk_1.dat 2025-04-07T14:12:54.625294Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:54.648502Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:12:54.884905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:54.885008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:54.889409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:12:54.889475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:12:54.894725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:54.901431Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:12:54.902893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:12:54.905051Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4360, node 1 2025-04-07T14:12:54.945335Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:54.945351Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:12:55.049992Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/00109d/r3tmp/yandexgkH4yv.tmp 2025-04-07T14:12:55.050026Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/00109d/r3tmp/yandexgkH4yv.tmp 2025-04-07T14:12:55.050188Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/00109d/r3tmp/yandexgkH4yv.tmp 2025-04-07T14:12:55.050415Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:12:55.215771Z INFO: TTestServer started on Port 8011 GrpcPort 4360 TClient is connected to server localhost:8011 PQClient connected to localhost:4360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:12:55.495329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:12:55.528619Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T14:12:57.208091Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574052310762641:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.208172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574052310762631:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.208281Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:12:57.212541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T14:12:57.226304Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574052310762650:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T14:12:57.537630Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490574052310762693:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:12:57.537969Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDUxZjM0ZDUtZDljNTlmZTktNzE5N2YzOWMtYjBhOGQ2OWM=, ActorId: [2:7490574052310762619:2280], ActorState: ExecuteState, TraceId: 01jr89hrqpe5xjsppknnq0wda9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:12:57.538017Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574051765346726:2309], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:12:57.539496Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGFlNzUxOGQtN2Q3YjUyOWYtOGI5NmE4MjItZGI5NmQxN2Q=, ActorId: [1:7490574051765346685:2302], ActorState: ExecuteState, TraceId: 01jr89hrwfcsm4qvedvkmahqy3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:12:57.540767Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:12:57.540801Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:12:57.592001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.709961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:12:57.821213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:4360", true, true, 1000); 2025-04-07T14:12:58.189060Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr89hsd28w4j9m11xkrfpmqh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc0YzYxOGMtODZkYjNlNTgtNzBlM2JlODEtOTEyNTdlMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to Clu ... 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1454 count 9 nextOffset 9 batches 1 2025-04-07T14:17:51.153438Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0test-message-group-id' seqNo 10 partNo 0 2025-04-07T14:17:51.153488Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1612 count 10 nextOffset 10 batches 1 2025-04-07T14:17:51.154452Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,10 HeadOffset 0 endOffset 0 curOffset 10 d0000000000_00000000000000000000_00000_0000000010_00000| size 1208 WTime 1744035471154 2025-04-07T14:17:51.154782Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T14:17:51.159223Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:17:51.159295Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:17:51.159375Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-07T14:17:51.159415Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:17:51.159465Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-07T14:17:51.159492Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:17:51.159527Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-07T14:17:51.159554Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:17:51.159609Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-04-07T14:17:51.159638Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:17:51.159673Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-04-07T14:17:51.159698Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:17:51.159732Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-04-07T14:17:51.159756Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:17:51.159786Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2025-04-07T14:17:51.159810Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:17:51.159844Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2025-04-07T14:17:51.159868Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:17:51.159903Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2025-04-07T14:17:51.159929Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-07T14:17:51.159962Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2025-04-07T14:17:51.159981Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-07T14:17:51.160085Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T14:17:51.160186Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T14:17:51.160230Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-07T14:17:51.160387Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2025-04-07T14:17:51.160430Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T14:17:51.160643Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-07T14:17:51.160691Z node 17 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T14:17:51.160771Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1744035471151 queuesize 0 startOffset 0 2025-04-07T14:17:51.160984Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 6 queued_in_partition_duration_ms: 2 } 2025-04-07T14:17:51.161035Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session: acknoledged message 1 2025-04-07T14:17:51.161071Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session: acknoledged message 2 2025-04-07T14:17:51.161093Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session: acknoledged message 3 2025-04-07T14:17:51.161127Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session: acknoledged message 4 2025-04-07T14:17:51.161156Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session: acknoledged message 5 2025-04-07T14:17:51.161178Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session: acknoledged message 6 2025-04-07T14:17:51.161196Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session: acknoledged message 7 2025-04-07T14:17:51.161223Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session: acknoledged message 8 2025-04-07T14:17:51.161248Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session: acknoledged message 9 2025-04-07T14:17:51.161271Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session: acknoledged message 10 2025-04-07T14:17:51.161532Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session: close. Timeout = 0 ms 2025-04-07T14:17:51.161584Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session will now close 2025-04-07T14:17:51.161644Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session: aborting 2025-04-07T14:17:51.162556Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session: gracefully shut down, all writes complete 2025-04-07T14:17:51.162585Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session is aborting and will not restart 2025-04-07T14:17:51.162649Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0] Write session: destroy 2025-04-07T14:17:51.162759Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0 grpc read done: success: 0 data: 2025-04-07T14:17:51.162786Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0 grpc read failed 2025-04-07T14:17:51.162834Z node 17 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 7 sessionId: test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0 2025-04-07T14:17:51.162863Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|d18dd2c6-4275924-2ffc066-7c5a519_0 is DEAD 2025-04-07T14:17:51.163384Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:17:51.163515Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:17:51.163571Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7490575314619050488:2608] destroyed 2025-04-07T14:17:51.163624Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast >> KqpPg::TableDeleteWhere [GOOD] |96.9%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::CheckACLForGrpcRead [GOOD] >> TPersQueueTest::CheckKillBalancer >> KqpEffects::InsertRevert_Literal_Duplicates [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict >> KqpSinkTx::SnapshotRO >> KqpImmediateEffects::InsertDuplicates [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> YdbOlapStore::LogGrepExisting [GOOD] >> YdbOlapStore::LogExistingRequest >> KqpEffects::UpdateOn_Literal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] Test command err: 2025-04-07T14:15:30.239078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:30.239373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:30.239510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013d3/r3tmp/tmpKkVnBo/pdisk_1.dat 2025-04-07T14:15:30.601048Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12940, node 1 2025-04-07T14:15:30.816486Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:30.816540Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:30.816568Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:30.817122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:30.819070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.913454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:30.913589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:30.938381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6107 2025-04-07T14:15:31.613998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:34.414649Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:34.448706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:34.448792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:34.494349Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:34.502145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:34.703680Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:34.767662Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:34.767774Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:34.798929Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:34.799118Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:34.799339Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:34.799410Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:34.799464Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:34.799516Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:34.799565Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:34.799620Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:34.800054Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:34.999318Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:34.999421Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:35.005439Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:35.012919Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:35.013726Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:35.014704Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:15:35.035065Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:35.035138Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:35.035208Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:15:35.037207Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:35.037311Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:35.051903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:35.062615Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:35.062787Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:35.084307Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:35.100738Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:35.143336Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:35.408317Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:35.565346Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:36.319131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:36.319255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:36.333556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:15:36.417791Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:15:36.417949Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:15:36.418190Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:15:36.418281Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:15:36.418368Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:15:36.418443Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:15:36.418523Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:15:36.418594Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:15:36.418659Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:15:36.418730Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:15:36.418805Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:15:36.418893Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:15:36.443007Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:15:36.443114Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:15:36.443252Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:15:36.443301Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:15:36.443504Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:15:36.443548Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:15:36.443646Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema: ... nt = 0, need schemeshards count = 1 2025-04-07T14:17:48.981972Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-07T14:17:48.982101Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7214:5385], server id = [2:7215:5386], tablet id = 72075186224037897, status = OK 2025-04-07T14:17:48.982162Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:17:48.982220Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7212:5383], StatRequests.size() = 1 2025-04-07T14:17:49.096891Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzU4MmMzMC05NWU0Zjc1NS05MjY0NjA3YS05ZTAzOTgxZA==, TxId: 2025-04-07T14:17:49.096951Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzU4MmMzMC05NWU0Zjc1NS05MjY0NjA3YS05ZTAzOTgxZA==, TxId: 2025-04-07T14:17:49.097574Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:17:49.111346Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:17:49.111411Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:17:49.199695Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:17:49.199777Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:17:49.254973Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7214:5385], schemeshard count = 1 2025-04-07T14:17:50.046661Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:17:50.046776Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:17:50.050132Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:17:50.066521Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:17:50.066910Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:17:50.066950Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2025-04-07T14:17:50.080200Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:17:51.091593Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:17:51.091664Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:17:51.091694Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-07T14:17:51.091733Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:17:51.091773Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:17:51.093162Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:17:51.106732Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-07T14:17:51.106874Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:17:51.107290Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:17:51.107351Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:17:51.108046Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2025-04-07T14:17:51.108158Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-07T14:17:51.108716Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:17:51.124956Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2025-04-07T14:17:51.125078Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:17:51.125229Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:17:51.125755Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7351:5464], server id = [2:7352:5465], tablet id = 72075186224037899, status = OK 2025-04-07T14:17:51.125873Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7351:5464], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:17:51.126880Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:17:51.126968Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:17:51.127225Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:17:51.127414Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:17:51.127704Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:17:51.129432Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7351:5464], server id = [2:7352:5465], tablet id = 72075186224037899 2025-04-07T14:17:51.129480Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:17:51.129942Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:17:51.145743Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7369:5482]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:17:51.145864Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:17:51.145889Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7369:5482], StatRequests.size() = 1 2025-04-07T14:17:51.252557Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWZlZmRjZjQtMjI2OTg4YzItNTJmMThiYmQtZjg1MzczYzk=, TxId: 2025-04-07T14:17:51.252631Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWZlZmRjZjQtMjI2OTg4YzItNTJmMThiYmQtZjg1MzczYzk=, TxId: 2025-04-07T14:17:51.253174Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:17:51.266728Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:17:51.266780Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:17:51.806999Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T14:17:51.807097Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:17:52.286612Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:17:52.286691Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-07T14:17:52.286758Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:17:53.384438Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:17:53.384649Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:17:53.395615Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:17:53.395751Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:17:53.395788Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:17:53.396412Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:17:53.410083Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:17:53.410457Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:17:53.410516Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:17:53.410823Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:17:53.424370Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:17:53.424559Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T14:17:53.425098Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7462:5534], server id = [2:7463:5535], tablet id = 72075186224037899, status = OK 2025-04-07T14:17:53.425211Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7462:5534], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:17:53.425743Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:17:53.425814Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:17:53.426060Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:17:53.426225Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:17:53.426483Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:17:53.428462Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7462:5534], server id = [2:7463:5535], tablet id = 72075186224037899 2025-04-07T14:17:53.428500Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:17:53.429439Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:17:53.449669Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2ZmY2IyYTQtZmZkYTk5MDItODA3NmIzN2ItYzZmNmJhZTc=, TxId: 2025-04-07T14:17:53.449730Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2ZmY2IyYTQtZmZkYTk5MDItODA3NmIzN2ItYzZmNmJhZTc=, TxId: 2025-04-07T14:17:53.450173Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:17:53.464479Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:17:53.464551Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2657:3164] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TableDeleteWhere [GOOD] Test command err: Trying to start YDB, gRPC: 26952, MsgBus: 30718 2025-04-07T14:15:26.934298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574692096843028:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:26.934443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00213e/r3tmp/tmpBJjUHD/pdisk_1.dat 2025-04-07T14:15:27.211996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:27.212127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:27.213952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:27.227652Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26952, node 1 2025-04-07T14:15:27.294307Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:27.294340Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:27.294347Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:27.294440Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30718 TClient is connected to server localhost:30718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:27.940916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:29.546935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:15:29.658842Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg17_b (key, value) VALUES ( 'bytea 0'::bytea, 'bytea 0'::bytea ) 2025-04-07T14:15:29.675330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574704981745519:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.675344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574704981745511:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.675465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:29.678856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-07T14:15:29.688887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574704981745525:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } --!syntax_pg INSERT INTO Pg17_b (key, value) VALUES ( 'bytea 1'::bytea, 'bytea 1'::bytea ) --!syntax_pg INSERT INTO Pg17_b (key, value) VALUES ( 'bytea 2'::bytea, 'bytea 2'::bytea ) \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 2025-04-07T14:15:30.653215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:15:30.697766Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1001_b (key, value) VALUES ( '0'::int2, '{a0, b10}'::_bytea ) --!syntax_pg INSERT INTO Pg1001_b (key, value) VALUES ( '1'::int2, '{a1, b11}'::_bytea ) --!syntax_pg INSERT INTO Pg1001_b (key, value) VALUES ( '2'::int2, '{a2, b12}'::_bytea ) {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} 16 2025-04-07T14:15:31.482923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-07T14:15:31.513317Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg16_b (key, value) VALUES ( 'false'::bool, 'false'::bool ) --!syntax_pg INSERT INTO Pg16_b (key, value) VALUES ( 'true'::bool, 'true'::bool ) 2025-04-07T14:15:31.931660Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574692096843028:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:31.931719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; f f t t 18 2025-04-07T14:15:32.061482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-07T14:15:32.090643Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg18_b (key, value) VALUES ( '0'::"char", '0'::"char" ) --!syntax_pg INSERT INTO Pg18_b (key, value) VALUES ( '1'::"char", '1'::"char" ) --!syntax_pg INSERT INTO Pg18_b (key, value) VALUES ( '2'::"char", '2'::"char" ) 0 0 1 1 2 2 21 2025-04-07T14:15:32.849913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-07T14:15:32.887192Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg21_b (key, value) VALUES ( '0'::int2, '0'::int2 ) --!syntax_pg INSERT INTO Pg21_b (key, value) VALUES ( '1'::int2, '1'::int2 ) --!syntax_pg INSERT INTO Pg21_b (key, value) VALUES ( '2'::int2, '2'::int2 ) 0 0 1 1 2 2 23 2025-04-07T14:15:33.580563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710717:0, at schemeshard: 72057594046644480 2025-04-07T14:15:33.608331Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg23_b (key, value) VALUES ( '0'::int4, '0'::int4 ) --!syntax_pg INSERT INTO Pg23_b (key, value) VALUES ( '1'::int4, '1'::int4 ) --!syntax_pg INSERT INTO Pg23_b (key, value) VALUES ( '2'::int4, '2'::int4 ) 0 0 1 1 2 2 20 2025-04-07T14:15:34.320254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710729:0, at schemeshard: 72057594046644480 2025-04-07T14:15:34.350611Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg20_b (key, value) VALUES ( '0'::int8, '0'::int8 ) --!syntax_pg INSERT INTO Pg20_b (key, value) VALUES ( '1'::int8, '1'::int8 ) --!syntax_pg INSERT INTO Pg20_b (key, value) VALUES ( '2'::int8, '2'::int8 ) 0 0 1 1 2 2 700 2025-04-07T14:15:35.106268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710741:0, at schemeshard: 72057594046644480 2025-04-07T14:15:35.136770Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg700_b (key, value) VALUES ( '0.5'::float4, '0.5'::float4 ) --!syntax_pg INSERT INTO Pg700_b (key, value) VALUES ( '1.5'::float4, '1.5'::float4 ) --!syntax_pg INSERT INTO Pg700_b (key, value) VALUES ( '2.5'::float4, '2.5'::float4 ) 0.5 0.5 1.5 1.5 2.5 2.5 701 2025-04-07T14:15:35.886823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710753:0, at schemeshard: 72057594046644480 2025-04-07T14:15:35.914566Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg701_b (key, value) VALUES ( '0.5'::float8, '0.5'::float8 ) --!syntax_pg INSERT INTO Pg701_b (key, value) VALUES ( '1.5'::float8, '1.5'::float8 ) --!syntax_pg INSERT INTO Pg701_b (key, value) VALUES ( '2.5'::float8, '2.5'::float8 ) 0.5 0.5 1.5 1.5 2.5 2.5 25 2025-04-07T14:15:36.656445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710765:0, at schemeshard: 72057594046644480 2025-04-07T14:15:36.692428Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg25_b (key, value) VALUES ( 'text 0'::text, 'text 0'::text ) --!syntax_pg INSERT INTO Pg25_b (key, value) VALUES ( 'text 1'::text, 'text 1'::text ) --!syntax_pg INSERT INTO Pg25_b (key, value) VALUES ( 'text 2'::text, 'text 2'::text ) text 0 text 0 text 1 text 1 text 2 text 2 1042 2025-04-07T14:15:37.434479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 2 ... 025-04-07T14:17:50.915320Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:50.933989Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715829:0, at schemeshard: 72057594046644480 2025-04-07T14:17:50.986248Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 628 2025-04-07T14:17:51.001483Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715831:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.052851Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:51.067338Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715833:0, at schemeshard: 72057594046644480 601 2025-04-07T14:17:51.122197Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:51.147954Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715835:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.226301Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715836:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.287078Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 603 2025-04-07T14:17:51.303662Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715838:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.355390Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:51.372927Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715840:0, at schemeshard: 72057594046644480 602 2025-04-07T14:17:51.428473Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:51.445679Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715842:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.499704Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:51.516332Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715844:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.569080Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 604 2025-04-07T14:17:51.584346Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715846:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.630691Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:51.647372Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715848:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.697519Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 718 2025-04-07T14:17:51.713791Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715850:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.764231Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:51.780153Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715852:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.831211Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 869 2025-04-07T14:17:51.848205Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715854:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.910942Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:51.927703Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715856:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.980310Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 650 2025-04-07T14:17:51.998239Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715858:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.045943Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:52.063249Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715860:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.115244Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 829 2025-04-07T14:17:52.130402Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715862:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.183854Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:52.201109Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715864:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.247661Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 774 2025-04-07T14:17:52.266242Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715866:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.314669Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:52.330859Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715868:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.397317Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2950 2025-04-07T14:17:52.415319Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715870:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.493928Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715871:0, at schemeshard: 72057594046644480 114 2025-04-07T14:17:52.551099Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:52.564811Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715873:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.624085Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:52.643076Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715875:0, at schemeshard: 72057594046644480 3802 2025-04-07T14:17:52.700569Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:52.717875Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715877:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.768790Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:52.789166Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715879:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.849698Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 4072 2025-04-07T14:17:52.869778Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715881:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.926156Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:52.940405Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715883:0, at schemeshard: 72057594046644480 1422025-04-07T14:17:52.996326Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:53.012293Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715885:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.108745Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:53.148816Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715887:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.193110Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 3615 2025-04-07T14:17:53.210931Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715889:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.269743Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:53.287310Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715891:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.342721Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 3614 2025-04-07T14:17:53.359323Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715893:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.411109Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:53.438484Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715895:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.495647Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 22 2025-04-07T14:17:53.512509Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715897:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.562728Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-07T14:17:53.585575Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715899:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.643086Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpInplaceUpdate::SingleRowPgNotNull >> TTableProfileTests::OverwriteStoragePolicy [GOOD] |96.9%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] Test command err: 2025-04-07T14:17:03.692919Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575107520774688:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:03.693018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00186f/r3tmp/tmpGGz13R/pdisk_1.dat 2025-04-07T14:17:03.948844Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15118, node 1 2025-04-07T14:17:04.038691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:04.038784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:04.039144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:04.039164Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:04.039170Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:04.039263Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:04.041523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:04.266974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:04.272144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:04.273392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:04.275171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:04.275401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:04.275423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:04.276321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:04.277142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:04.277171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:04.278565Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:04.281768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035424327, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:04.281797Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:04.282070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:04.283495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:04.283640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:04.283684Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:04.283773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:04.283832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:04.283875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:04.285589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:04.285639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:04.285653Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:04.285715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:17:04.371872Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:50792) has now valid token of root@builtin 2025-04-07T14:17:04.414609Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-07T14:17:06.988721Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575121725336894:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:06.988825Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00186f/r3tmp/tmpOHTLNJ/pdisk_1.dat 2025-04-07T14:17:07.079584Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:07.106227Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:07.106297Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:07.108475Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23819, node 4 2025-04-07T14:17:07.159390Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:07.159409Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:07.159414Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:07.159539Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:07.317108Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:07.317479Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:07.317519Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:07.319120Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:07.319270Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:07.319285Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:07.320040Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:07.320332Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:07.320346Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:07.321411Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:07.324072Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035427372, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:07.324101Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:07.324322Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:07.325631Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:07.325758Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:07.325800Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:07.325855Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:07.325890Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 2 ... N: failed to initialize from file: (empty maybe) 2025-04-07T14:17:45.969878Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:46.266313Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:46.266717Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:46.266751Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:46.268714Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:46.268901Z node 22 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:46.268921Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:46.269878Z node 22 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:46.270500Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:46.270524Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:46.271638Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:46.275789Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035466320, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:46.275835Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:46.276116Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:46.278109Z node 22 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:46.278311Z node 22 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:46.278382Z node 22 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:46.278483Z node 22 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:46.278523Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:46.278572Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:46.279352Z node 22 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:46.279397Z node 22 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:46.279416Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:46.279480Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:17:46.378160Z node 22 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:59422) has now valid token of root@builtin 2025-04-07T14:17:46.448604Z node 22 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-07T14:17:50.738862Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7490575308606496887:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:50.738933Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00186f/r3tmp/tmpfT6YqR/pdisk_1.dat 2025-04-07T14:17:50.866688Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:50.904523Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:50.904613Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:50.907297Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24048, node 25 2025-04-07T14:17:50.962483Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:50.962512Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:50.962521Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:50.962662Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:51.264988Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.265389Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:51.265419Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:51.267696Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:51.267885Z node 25 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:51.267900Z node 25 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T14:17:51.269458Z node 25 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:51.269482Z node 25 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:51.269815Z node 25 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:51.270848Z node 25 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.274544Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035471318, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:51.274585Z node 25 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:51.274896Z node 25 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:51.276721Z node 25 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:51.276911Z node 25 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:51.276984Z node 25 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:51.277110Z node 25 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:51.277174Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:51.277225Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:51.278125Z node 25 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:51.278169Z node 25 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:51.278191Z node 25 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:51.278292Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 Trying to register node 2025-04-07T14:17:51.459724Z node 25 :TICKET_PARSER ERROR: Ticket 1BB8B5ED7EAA65CF3359DCE590C753B83ED51228: Cannot create token from certificate. Client certificate failed verification Register node result Status { Code: ERROR Reason: "Cannot create token from certificate. Client certificate failed verification" } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Literal [GOOD] Test command err: Trying to start YDB, gRPC: 65394, MsgBus: 25378 2025-04-07T14:17:46.593891Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575290150538731:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:46.593976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e94/r3tmp/tmpgN95Fq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 65394, node 1 2025-04-07T14:17:46.843223Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:46.843241Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:46.852874Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:46.873161Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:46.873185Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:46.873205Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:46.873305Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:46.943431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:46.943511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:46.945296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25378 TClient is connected to server localhost:25378 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:47.273813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:47.288640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:47.406112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:47.537761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:17:47.598067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:17:48.895107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575298740475042:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:48.895252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.138306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.159971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.189336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.212904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.237372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.302868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.339617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575303035442835:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.339706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.339854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575303035442840:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.343004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:49.351317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575303035442842:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 6067, MsgBus: 10860 2025-04-07T14:17:51.080494Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575312435707297:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:51.080609Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e94/r3tmp/tmpmxLxbG/pdisk_1.dat 2025-04-07T14:17:51.188811Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:51.208181Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:51.208254Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:51.209503Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6067, node 2 2025-04-07T14:17:51.253361Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:51.253383Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:51.253390Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:51.253480Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10860 TClient is connected to server localhost:10860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:51.637575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:51.642145Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:51.653737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:51.723615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:51.880315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:51.942342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:53.619838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575321025643599:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:53.619899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:53.650240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.678467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.699683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.724328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.749389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.773224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.809129Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575321025644093:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:53.809201Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575321025644099:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:53.809211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:53.811445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:53.817702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490575321025644101:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] >> KqpEffects::UpdateOn_Select [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates [GOOD] >> KqpEffects::InsertAbort_Params_Conflict >> KqpImmediateEffects::UpdateOn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::OverwriteStoragePolicy [GOOD] Test command err: 2025-04-07T14:17:23.967225Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575191283459983:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:23.967339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00183d/r3tmp/tmphXut04/pdisk_1.dat 2025-04-07T14:17:24.273110Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26651, node 1 2025-04-07T14:17:24.346502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:24.346652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:24.351443Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:24.351466Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:24.351473Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:24.351574Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:24.358082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:24.589090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:24.593786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:24.595043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:24.596868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:24.597105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:24.597134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:24.597944Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:24.598531Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:24.598552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:24.599708Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:24.602496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035444648, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:24.602517Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:24.602747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:24.604180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:24.604324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:24.604383Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:24.604454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:24.604497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:24.604542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:24.606269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:24.606314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:24.606324Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:24.606386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:22521 2025-04-07T14:17:24.816297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:24.816764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:24.816790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:24.819798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant 2025-04-07T14:17:24.819952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:24.820155Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:24.820261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:24.820277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 waiting... 2025-04-07T14:17:24.821195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:24.821235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:24.821254Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:24.821488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:24.821567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:24.821604Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:17:24.823021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:24.823038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T14:17:24.824227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:17:24.824674Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:24.827709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035444872, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:24.827736Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-07T14:17:24.827955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-07T14:17:24.829656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:24.829870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:24.829937Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2025-04-07T14:17:24.829995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:17:24.830023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:17:24.830072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2025-04-07T14:17:24.830615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:24.830666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:24.830678Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:17:24.830880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:24.830906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:24.830916Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:17:24.830944Z node 1 :FLA ... 17:53.835441Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:53.835661Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715664:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:53.835683Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:53.837031Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715664:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:53.837059Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:53.846952Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715664:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:53.847004Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:53.854789Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715664:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:53.854825Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:53.854865Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 3 -> 128 2025-04-07T14:17:53.856691Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715664:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:17:53.898418Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035473930, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:53.898511Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715664:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744035473930 2025-04-07T14:17:53.898656Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 128 -> 129 2025-04-07T14:17:53.901068Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:53.901623Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:53.901729Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:17:53.903006Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-07T14:17:53.903060Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715664 2025-04-07T14:17:53.903089Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 14 2025-04-07T14:17:53.903316Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-07T14:17:53.903342Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715664 2025-04-07T14:17:53.903356Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 3 2025-04-07T14:17:53.930497Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037935 Status: COMPLETE TxId: 281474976715664 Step: 1744035473930 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 27 DomainCoordinators: 72075186224037888 TxStats { PerShardStats { ShardId: 72075186224037935 CpuTimeUsec: 1378 } } 2025-04-07T14:17:53.932168Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037936 Status: COMPLETE TxId: 281474976715664 Step: 1744035473930 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 24 DomainCoordinators: 72075186224037888 TxStats { PerShardStats { ShardId: 72075186224037936 CpuTimeUsec: 1449 } } 2025-04-07T14:17:53.933773Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037932 Status: COMPLETE TxId: 281474976715664 Step: 1744035473930 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 23 DomainCoordinators: 72075186224037888 TxStats { PerShardStats { ShardId: 72075186224037932 CpuTimeUsec: 1379 } } 2025-04-07T14:17:53.936250Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037938 Status: COMPLETE TxId: 281474976715664 Step: 1744035473930 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 21 DomainCoordinators: 72075186224037888 TxStats { PerShardStats { ShardId: 72075186224037938 CpuTimeUsec: 1344 } } 2025-04-07T14:17:53.937943Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037931 Status: COMPLETE TxId: 281474976715664 Step: 1744035473930 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 37 DomainCoordinators: 72075186224037888 TxStats { PerShardStats { ShardId: 72075186224037931 CpuTimeUsec: 1396 } } 2025-04-07T14:17:53.939430Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037930 Status: COMPLETE TxId: 281474976715664 Step: 1744035473930 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 15 DomainCoordinators: 72075186224037888 TxStats { PerShardStats { ShardId: 72075186224037930 CpuTimeUsec: 1128 } } 2025-04-07T14:17:53.941021Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037934 Status: COMPLETE TxId: 281474976715664 Step: 1744035473930 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 14 DomainCoordinators: 72075186224037888 TxStats { PerShardStats { ShardId: 72075186224037934 CpuTimeUsec: 1086 } } 2025-04-07T14:17:53.943097Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037939 Status: COMPLETE TxId: 281474976715664 Step: 1744035473930 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 25 DomainCoordinators: 72075186224037888 TxStats { PerShardStats { ShardId: 72075186224037939 CpuTimeUsec: 1288 } } 2025-04-07T14:17:53.945026Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037937 Status: COMPLETE TxId: 281474976715664 Step: 1744035473930 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 24 DomainCoordinators: 72075186224037888 TxStats { PerShardStats { ShardId: 72075186224037937 CpuTimeUsec: 1311 } } 2025-04-07T14:17:53.953500Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:53.953832Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:53.953971Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:53.954845Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037933 Status: COMPLETE TxId: 281474976715664 Step: 1744035473930 OrderId: 281474976715664 ExecLatency: 1 ProposeLatency: 7 DomainCoordinators: 72075186224037888 TxStats { PerShardStats { ShardId: 72075186224037933 CpuTimeUsec: 1852 } } 2025-04-07T14:17:53.955083Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:53.955258Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:53.955964Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:53.956178Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:53.956321Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:53.956457Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:53.957558Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:17:53.957601Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.957651Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 129 -> 240 2025-04-07T14:17:53.959777Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715664:0 ProgressState 2025-04-07T14:17:53.959947Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:0 progress is 1/1 2025-04-07T14:17:53.960049Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:0 TClient is connected to server localhost:11851 TClient::Ls request: /Root/ydb_ut_tenant/table-5 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-5" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715664 CreateStep: 1744035473930 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table-5" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapsho... (TRUNCATED) 2025-04-07T14:17:54.354844Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-07T14:17:54.363864Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:17:54.751166Z node 15 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[15:7490575304343702226:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:54.751324Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; >> YdbTableBulkUpsert::Limits [GOOD] >> YdbTableBulkUpsert::DataValidation >> KqpWrite::InsertRevert |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 22504, MsgBus: 23865 2025-04-07T14:17:42.018254Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575275585049377:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:42.018325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00112c/r3tmp/tmpTD37cA/pdisk_1.dat 2025-04-07T14:17:42.268166Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22504, node 1 2025-04-07T14:17:42.339792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:42.339827Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:42.339835Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:42.339965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:42.363036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:42.363131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:42.364927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23865 TClient is connected to server localhost:23865 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:42.766744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:42.779141Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:42.792632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:42.895623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:43.017563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:43.087911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:44.443557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575284174985672:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:44.443676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:44.650430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:44.681554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:44.709771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:44.738044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:44.759216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:44.784991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:44.830720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575284174986163:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:44.830797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:44.830952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575284174986168:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:44.836028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:44.844143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575284174986170:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:17:46.278040Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGQ2ZjMyNDAtMzRjNDhhOGEtZjQ2ZTIwYS0zZmVlZTM2Mw==, ActorId: [1:7490575288469953773:2459], ActorState: ExecuteState, TraceId: 01jr89tjzr19rszye4gfj40j6m, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 26192, MsgBus: 30615 2025-04-07T14:17:49.070365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:17:49.070775Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:17:49.070850Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00112c/r3tmp/tmpFyWUkT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26192, node 2 2025-04-07T14:17:49.412776Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:49.413758Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:49.413817Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:49.413857Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:49.414204Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:49.460917Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:49.461068Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:49.472718Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30615 TClient is connected to server localhost:30615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:49.676412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:49.740796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:50.031792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:50.494949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:50.829328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:51.370055Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1726:3345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:51.370236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:51.387147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.618978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:51.894431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.158277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.481760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.780195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.141513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2297:3790], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:53.141628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:53.141864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2302:3795], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:53.147060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:53.335986Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2304:3797], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:17:54.087390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:17:54.388054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-07T14:17:54.723516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpPg::TempTablesWithCache [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateOn [GOOD] Test command err: Trying to start YDB, gRPC: 28954, MsgBus: 9224 2025-04-07T14:17:52.311478Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575316619195848:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:52.311581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000dbf/r3tmp/tmpazkZ2y/pdisk_1.dat 2025-04-07T14:17:52.579970Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28954, node 1 2025-04-07T14:17:52.640967Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:52.640992Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:52.640997Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:52.641072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:52.658276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:52.658352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:52.660194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9224 TClient is connected to server localhost:9224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:53.049133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:53.064698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:53.190895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:53.318502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:53.379058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:54.785160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575325209132160:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:54.785274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:54.989547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.009735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.030361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.050349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.076227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.110667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.142450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575329504099948:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:55.142506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:55.142616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575329504099953:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:55.145566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:55.155319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575329504099955:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:17:55.938329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Select [GOOD] Test command err: Trying to start YDB, gRPC: 2534, MsgBus: 8083 2025-04-07T14:17:48.265141Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575300758896826:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:48.265241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e47/r3tmp/tmp5niQeK/pdisk_1.dat 2025-04-07T14:17:48.496211Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2534, node 1 2025-04-07T14:17:48.557709Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:48.557731Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:48.557740Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:48.557819Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:48.592165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:48.592280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:48.593909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8083 TClient is connected to server localhost:8083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:48.957101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:48.977071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:49.088046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:49.218255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:49.269934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:50.594194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575309348833135:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:50.594312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:50.774564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:50.796912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:50.818745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:50.842166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:50.864405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:50.896746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:50.937154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575309348833627:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:50.937227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:50.937295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575309348833632:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:50.943337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:50.951354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575309348833634:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 26451, MsgBus: 23343 2025-04-07T14:17:52.665929Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575318832770304:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:52.666010Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e47/r3tmp/tmpW9hU3X/pdisk_1.dat 2025-04-07T14:17:52.728226Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26451, node 2 2025-04-07T14:17:52.767697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:52.767777Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:52.769287Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:52.787084Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:52.787101Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:52.787108Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:52.787188Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23343 TClient is connected to server localhost:23343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:53.169289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:53.183873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:53.224196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:53.356764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:53.410948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:55.243950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575331717673903:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:55.244032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:55.283033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.307119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.329286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.353645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.378181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.443803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.484122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575331717674398:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:55.484195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:55.484466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575331717674403:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:55.487231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:55.495176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490575331717674405:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 >> KqpImmediateEffects::UpdateAfterInsert [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TempTablesWithCache [GOOD] Test command err: Trying to start YDB, gRPC: 15465, MsgBus: 2056 2025-04-07T14:15:26.859733Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574688632231869:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:26.859774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00213d/r3tmp/tmp7N0bTc/pdisk_1.dat 2025-04-07T14:15:27.215782Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15465, node 1 2025-04-07T14:15:27.262447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:27.262557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:27.265060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:27.305310Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:27.305334Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:27.305349Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:27.305476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2056 TClient is connected to server localhost:2056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:15:27.907812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:15:29.531107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229'Unable to coerce value for pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: value too long for type character(2) 2025-04-07T14:15:29.724151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465'Unable to coerce value for _pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: value too long for type character(2) 2025-04-07T14:15:29.782660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 abcd 2025-04-07T14:15:29.874653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-04-07T14:15:29.963584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 abcd 2025-04-07T14:15:30.092405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 {"abcd ","abcd "} 2025-04-07T14:15:30.227126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarchar_17472595041006102391_17823623939509273229'Unable to coerce value for pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: value too long for type character varying(2) 2025-04-07T14:15:30.284260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarchar_17472595041006102391_5352544928909966465'Unable to coerce value for _pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: value too long for type character varying(2) 2025-04-07T14:15:30.352999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 abcd 2025-04-07T14:15:30.438355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-04-07T14:15:30.528179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 abcd 2025-04-07T14:15:30.618636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-04-07T14:15:30.705727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_17472595041006102391_5866627432374416336'Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string length 4 does not match type bit(2) 2025-04-07T14:15:30.760179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_17472595041006102391_11087201080355820517'Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string length 4 does not match type bit(2) 2025-04-07T14:15:30.842412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 1111 2025-04-07T14:15:30.928134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 {1111,1111} 2025-04-07T14:15:31.039779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_10103374131519304989_5866627432374416336'Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string length 4 does not match type bit(6) 2025-04-07T14:15:31.146166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_10103374131519304989_11087201080355820517'Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string length 4 does not match type bit(6) 2025-04-07T14:15:31.224730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarbit_17472595041006102391_5866627432374416336'Unable to coerce value for pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string too long for type bit varying(2) 2025-04-07T14:15:31.291617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarbit_17472595041006102391_11087201080355820517'Unable to coerce value for _pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string too long for type bit varying(2) 2025-04-07T14:15:31.349752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 1111 2025-04-07T14:15:31.494167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 {1111,1111} 2025-04-07T14:15:31.615237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 1111 2025-04-07T14:15:31.757289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 {1111,1111} 2025-04-07T14:15:31.855218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-07T14:15:31.859946Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574688632231869:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:15:31.860042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Bulk upsert to table '/Root/Coerce_pgnumeric_17472595041006102391_7644398022171395976'Unable to coerce value for pgnu ... WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:47.389377Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:47.389388Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:47.389547Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18076 TClient is connected to server localhost:18076 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:47.891254Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:51.058170Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490575312886141129:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:51.058183Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7490575312886141135:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:51.058287Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:51.062029Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:17:51.071804Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7490575312886141143:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:17:51.174116Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:1, at schemeshard: 72057594046644480 2025-04-07T14:17:51.381938Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037888 not found 2025-04-07T14:17:51.396467Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490575312886141455:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:17:51.396882Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=YjcyZjcxZDctZjE3ODYzYTEtM2JkZThhMjEtNWM4ZDcxMzE=, ActorId: [6:7490575312886141125:2301], ActorState: ExecuteState, TraceId: 01jr89tr0rdhft7jnbpcmcy61j, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:17:51.433432Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7490575312886141466:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:17:51.435395Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=YjI5ZGQzNDctZWZmOTRiNjYtOTQ4MGU2NjQtZmRiZGQ4N2E=, ActorId: [6:7490575312886141462:2342], ActorState: ExecuteState, TraceId: 01jr89tr1vczehfpntpxdvv49y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 28862, MsgBus: 13449 2025-04-07T14:17:52.363107Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7490575315825654524:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:52.363201Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00213d/r3tmp/tmp1wfsdT/pdisk_1.dat 2025-04-07T14:17:52.482875Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28862, node 7 2025-04-07T14:17:52.510821Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:52.510919Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:52.512547Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:52.548680Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:52.548705Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:52.548713Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:52.548828Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13449 TClient is connected to server localhost:13449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:53.142227Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:56.054354Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490575333005524346:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:56.054355Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7490575333005524338:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:56.054428Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:56.058177Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:17:56.066914Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7490575333005524352:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:17:56.151150Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:17:56.222866Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-04-07T14:17:56.241273Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:56.554543Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:56.854728Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:1, at schemeshard: 72057594046644480 2025-04-07T14:17:57.290651Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2025-04-07T14:17:57.336218Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-04-07T14:17:57.358919Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7490575337300492438:2420], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:17:57.360947Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=YjMyMTIyNTYtOWU5YTFmNzItODdkOWE4NTEtODZlZjJiOTE=, ActorId: [7:7490575337300492436:2419], ActorState: ExecuteState, TraceId: 01jr89txtyf1dca1x39ke07923, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:17:57.363093Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7490575315825654524:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:57.363152Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete >> KqpSystemView::Sessions |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.0%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpInplaceUpdate::SingleRowPgNotNull [GOOD] >> KqpSysColV1::StreamInnerJoinSelectAsterisk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast [GOOD] Test command err: Trying to start YDB, gRPC: 23576, MsgBus: 18292 2025-04-07T14:17:54.600487Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575325651636308:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:54.600559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d52/r3tmp/tmp0BsoBn/pdisk_1.dat 2025-04-07T14:17:54.849312Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23576, node 1 2025-04-07T14:17:54.910763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:54.910792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:54.910798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:54.910901Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:54.943674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:54.943776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:54.945476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18292 TClient is connected to server localhost:18292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:55.340921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:55.357015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:55.459836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:55.573553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:55.653391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:56.958481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575334241572616:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:56.958588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.186904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.208924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.232238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.253840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.274662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.301543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.330533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575338536540404:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.330622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575338536540409:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.330675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.333749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:57.340908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575338536540411:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:17:58.003989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] >> KqpSysColV1::StreamInnerJoinSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 18653, MsgBus: 23942 2025-04-07T14:17:50.093141Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575307679723207:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:50.094630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000df0/r3tmp/tmpv8ygeo/pdisk_1.dat 2025-04-07T14:17:50.349035Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18653, node 1 2025-04-07T14:17:50.410877Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:50.410920Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:50.410935Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:50.411027Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:50.453669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:50.453813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:50.455699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23942 TClient is connected to server localhost:23942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:50.809989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:50.836797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:50.955373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:51.071213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:51.122310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:52.683142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575316269659487:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:52.683265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:52.891915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.914171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.936374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.963710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.985212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.008635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:53.043124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575320564627274:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:53.043205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:53.043239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575320564627279:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:53.045677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:53.052436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575320564627281:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 8922, MsgBus: 8594 2025-04-07T14:17:54.795712Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575326986511349:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:54.795836Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000df0/r3tmp/tmporjMAm/pdisk_1.dat 2025-04-07T14:17:54.881267Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:54.899589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:54.899696Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:54.901265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8922, node 2 2025-04-07T14:17:54.936378Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:54.936406Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:54.936414Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:54.936497Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8594 TClient is connected to server localhost:8594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:55.327572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:55.343095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:55.414153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:17:55.561760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.630115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:57.396124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575339871414957:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.396209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.428552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.454615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.477758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.500373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.524859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.550095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.604538Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575339871415449:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.604605Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.604627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575339871415454:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.607224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:57.614322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490575339871415456:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull [GOOD] Test command err: Trying to start YDB, gRPC: 63487, MsgBus: 30249 2025-04-07T14:17:55.899237Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575332180809016:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:55.899551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d44/r3tmp/tmphoKRTA/pdisk_1.dat 2025-04-07T14:17:56.129255Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63487, node 1 2025-04-07T14:17:56.180277Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:56.180304Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:56.180319Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:56.180424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:56.240085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:56.240190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:56.241760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30249 TClient is connected to server localhost:30249 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:56.575134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:56.593960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:56.690862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:56.822303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:56.879060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:57.970022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575340770745301:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.970144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:58.180740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:58.202541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:58.222920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:58.245008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:58.267725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:58.298457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:58.330986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575345065713090:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:58.331051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575345065713095:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:58.331055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:58.334531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:58.343320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575345065713097:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:17:59.001463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TPersQueueTest::ReadRuleServiceTypeLimit [GOOD] >> TPersQueueTest::ReadRuleDisallowDefaultServiceType >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] >> KqpSinkLocks::EmptyRange >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 6105, MsgBus: 31006 2025-04-07T14:17:50.099981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575310384874292:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:50.100070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e1f/r3tmp/tmp1vWNnh/pdisk_1.dat 2025-04-07T14:17:50.352207Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6105, node 1 2025-04-07T14:17:50.420951Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:50.420988Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:50.420995Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:50.421087Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:50.448544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:50.448688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:50.450259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31006 TClient is connected to server localhost:31006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:50.847363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:50.869029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:50.999723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:51.113476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:51.166046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:52.437317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575318974810609:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:52.437422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:52.610423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.633407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.657740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.680794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.705938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.773625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:52.808898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575318974811106:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:52.808944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575318974811111:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:52.808967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:52.812482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:52.821049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575318974811113:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:17:53.754135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:17:54.299199Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490575327564746210:2490], TxId: 281474976710676, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=M2QyODFmNjUtZmQ4YmYyN2QtODA5NWVlYTYtZjYyNTA2ZDg=. CustomerSuppliedId : . TraceId : 01jr89tthnbh2aw82eyk20hm81. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-07T14:17:54.299632Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490575327564746211:2491], TxId: 281474976710676, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=M2QyODFmNjUtZmQ4YmYyN2QtODA5NWVlYTYtZjYyNTA2ZDg=. CustomerSuppliedId : . TraceId : 01jr89tthnbh2aw82eyk20hm81. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7490575327564746207:2459], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:17:54.306747Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2QyODFmNjUtZmQ4YmYyN2QtODA5NWVlYTYtZjYyNTA2ZDg=, ActorId: [1:7490575323269778718:2459], ActorState: ExecuteState, TraceId: 01jr89tthnbh2aw82eyk20hm81, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 9262, MsgBus: 16145 2025-04-07T14:17:55.041974Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575331979680338:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:55.042040Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e1f/r3tmp/tmpoCHOLO/pdisk_1.dat 2025-04-07T14:17:55.128501Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:55.159951Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:55.160029Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:55.161609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9262, node 2 2025-04-07T14:17:55.196766Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:55.196790Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:55.196800Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:55.196874Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16145 TClient is connected to server localhost:16145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:55.533642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:55.544728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:55.615336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:55.758042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:55.828152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:57.670748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575340569616624:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.670804Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.702876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.725608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.760114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.784093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.808216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.839317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.871575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575340569617115:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.871647Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.871657Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575340569617120:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.874246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:57.881041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490575340569617122:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:17:58.719023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:17:59.230344Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490575349159552322:2497], TxId: 281474976715677, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr89tzf5ebtdv6e0cf3411qb. SessionId : ydb://session/3?node_id=2&id=MThjZDYwNmItMTJkMjAyOTUtNThmM2EwZjYtZDhhMWM2MDQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T14:17:59.230549Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490575349159552324:2498], TxId: 281474976715677, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr89tzf5ebtdv6e0cf3411qb. SessionId : ydb://session/3?node_id=2&id=MThjZDYwNmItMTJkMjAyOTUtNThmM2EwZjYtZDhhMWM2MDQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490575349159552319:2459], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:17:59.231530Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MThjZDYwNmItMTJkMjAyOTUtNThmM2EwZjYtZDhhMWM2MDQ=, ActorId: [2:7490575344864584724:2459], ActorState: ExecuteState, TraceId: 01jr89tzf5ebtdv6e0cf3411qb, Create QueryResponse for error on request, msg: >> KqpSystemView::NodesRange1 >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] >> KqpEffects::InsertAbort_Params_Conflict [GOOD] >> KqpSinkLocks::InvalidateOnCommit [GOOD] >> KqpSinkLocks::OlapUncommittedRead >> KqpSystemView::PartitionStatsFollower |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk >> YdbTableBulkUpsert::DataValidation [GOOD] >> KqpWrite::InsertRevert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 1604, MsgBus: 18316 2025-04-07T14:17:52.438916Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575318081438208:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:52.438999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000db0/r3tmp/tmpU9bTFF/pdisk_1.dat 2025-04-07T14:17:52.728081Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1604, node 1 2025-04-07T14:17:52.798296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:52.798405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:52.800270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:52.804765Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:52.804783Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:52.804789Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:52.804862Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18316 TClient is connected to server localhost:18316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:53.229951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:53.253338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:53.367090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:53.491171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:53.547823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:54.969153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575326671374513:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:54.969243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:55.211269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.234293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.262934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.284467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.306018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.331978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:55.406962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575330966342307:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:55.407022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:55.407053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575330966342312:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:55.410184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:55.418528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575330966342314:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:17:56.425956Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490575335261309968:2471], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jr89twppa17hcxdwytwk5gb7. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MTQ3MTk2NzEtNTdmYWE5YjUtNjA2MDlkOS1kMDJkZjVhNg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-07T14:17:56.428985Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490575335261309970:2472], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr89twppa17hcxdwytwk5gb7. SessionId : ydb://session/3?node_id=1&id=MTQ3MTk2NzEtNTdmYWE5YjUtNjA2MDlkOS1kMDJkZjVhNg==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490575335261309965:2459], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:17:56.434307Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTQ3MTk2NzEtNTdmYWE5YjUtNjA2MDlkOS1kMDJkZjVhNg==, ActorId: [1:7490575335261309916:2459], ActorState: ExecuteState, TraceId: 01jr89twppa17hcxdwytwk5gb7, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 4611, MsgBus: 9859 2025-04-07T14:17:57.311767Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575337080928421:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:57.311853Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000db0/r3tmp/tmpTVNBIJ/pdisk_1.dat 2025-04-07T14:17:57.387769Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4611, node 2 2025-04-07T14:17:57.437782Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:57.437814Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:57.437841Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:57.437942Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:57.440795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:57.440888Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:57.442484Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9859 TClient is connected to server localhost:9859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:57.733307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:57.741361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:57.791555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:57.939410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:58.000137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:59.525033Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575345670864712:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:59.525141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:59.557828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:59.582131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:59.605535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:59.628288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:59.653081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:59.681762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:59.718755Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575345670865205:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:59.718824Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:59.718898Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575345670865210:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:59.722116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:59.730553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490575345670865212:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:18:00.618836Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490575349965832862:2471], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NzhkZTMwY2UtOGY4M2VkYTItNmI5ZWU0MWYtMmUxNjVmNWQ=. TraceId : 01jr89v0w4djhxedf7ww8kstpq. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T14:18:00.619025Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490575349965832864:2472], TxId: 281474976715672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzhkZTMwY2UtOGY4M2VkYTItNmI5ZWU0MWYtMmUxNjVmNWQ=. TraceId : 01jr89v0w4djhxedf7ww8kstpq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7490575349965832859:2458], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:18:00.619978Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzhkZTMwY2UtOGY4M2VkYTItNmI5ZWU0MWYtMmUxNjVmNWQ=, ActorId: [2:7490575349965832809:2458], ActorState: ExecuteState, TraceId: 01jr89v0w4djhxedf7ww8kstpq, Create QueryResponse for error on request, msg: >> KqpSysColV0::InnerJoinTables |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::InsertRevert [GOOD] Test command err: Trying to start YDB, gRPC: 24941, MsgBus: 26504 2025-04-07T14:17:57.831600Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575341003734202:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:57.831685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000cfa/r3tmp/tmprJtvrY/pdisk_1.dat 2025-04-07T14:17:58.097681Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24941, node 1 2025-04-07T14:17:58.162005Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:58.162034Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:58.162048Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:58.162129Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:58.163278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:58.163374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:58.165044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26504 TClient is connected to server localhost:26504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:58.570502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:58.587317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:58.685267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:58.811794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:58.883131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:59.959874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575349593670512:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:59.959979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:00.182091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:00.203954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:00.224125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:00.246150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:00.283777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:00.308245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:00.339940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575353888638303:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:00.340018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:00.340114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575353888638308:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:00.342923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:00.350567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575353888638310:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:01.178884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::DataValidation [GOOD] Test command err: 2025-04-07T14:17:14.824841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575152876612223:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:14.824891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001862/r3tmp/tmpzH1zCk/pdisk_1.dat 2025-04-07T14:17:15.149701Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13337, node 1 2025-04-07T14:17:15.155669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:17:15.156315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:15.156469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:17:15.158794Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:15.158813Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:15.161495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:15.162460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:15.162533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:15.162590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:17:15.162684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.190841Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:15.190869Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:15.190884Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:15.190975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:15.211895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:15.212003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:15.216161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:15.426179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.430786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:15.431827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.433690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:15.433870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:15.433887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:15.434774Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:15.435451Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:15.435492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:15.436822Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:15.439939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035435485, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:15.439966Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:15.440227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:15.441571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:15.441749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:15.441833Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:15.441940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:15.441986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:15.442040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:15.443792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:15.443832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:15.443848Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:15.443907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:17:17.154878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestNulls_0x0006, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:17.155395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T14:17:17.155998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:17.156031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:17.157916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TestNulls_0x0006 2025-04-07T14:17:17.158086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:17.158273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:17.158383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:17:17.158756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:17:17.159018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:17.159054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:17.159069Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:17.159251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:17.159283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:17.159291Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:17:17.163724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:17.163818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T14:17:17.165328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:17:17.221291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:17:17.221319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:17:17.221369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T14:17:17.222702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:17:17.225334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035437270, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:17.225386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744035437270 2025-04-07T14:17:17.225522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2025-04-07T14:17:17.226788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046 ... ingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:58.160755Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:58.161186Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:58.161221Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:58.163489Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:58.163664Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:58.163686Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:17:58.165075Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:17:58.165779Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:58.165807Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:17:58.167535Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:58.171138Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035478213, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:58.171193Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:17:58.171420Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:17:58.172748Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:58.172965Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:58.173035Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:17:58.173134Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:17:58.173177Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:17:58.173236Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:17:58.173864Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:17:58.173917Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:17:58.173958Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:58.174042Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:18:00.909326Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestInvalidData, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:18:00.910251Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2025-04-07T14:18:00.911124Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:18:00.911167Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:18:00.913687Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TestInvalidData 2025-04-07T14:18:00.913927Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:18:00.914198Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:18:00.914322Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046644480 2025-04-07T14:18:00.914999Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T14:18:00.915318Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:18:00.915372Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:18:00.915398Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:18:00.915649Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:18:00.915689Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:18:00.915705Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:18:00.920572Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:18:00.920681Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-07T14:18:00.922887Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:18:00.984301Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2025-04-07T14:18:00.984330Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2025-04-07T14:18:00.984387Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2025-04-07T14:18:00.986294Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2025-04-07T14:18:00.990182Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035481034, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:18:00.990234Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1744035481034 2025-04-07T14:18:00.990345Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2025-04-07T14:18:00.992218Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:18:00.992551Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:18:00.992597Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2025-04-07T14:18:00.995136Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:18:00.995252Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:18:00.995281Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:18:00.995563Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:18:00.995628Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:18:00.995650Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:18:01.002869Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1744035481034 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1574 } } 2025-04-07T14:18:01.003258Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-07T14:18:01.003305Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:18:01.003340Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2025-04-07T14:18:01.005327Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T14:18:01.005461Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T14:18:01.005542Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Decimal(22,9) value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Date value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Datetime value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Timestamp value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Interval value CLIENT_INTERNAL_ERROR
: Error: GRpc error: (13): Unable to parse request
: Error: Grpc error response on endpoint localhost:31581 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Yson value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Json value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid JSON for JsonDocument provided: TAPE_ERROR: The JSON document has an improper structure: missing or superfluous commas, braces, missing keys, etc. BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid DyNumber string representation >> TPersQueueTest::FetchRequest [GOOD] >> TPersQueueTest::EventBatching >> KqpSysColV1::StreamSelectRange >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> KqpSystemView::NodesRange2 >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] >> KqpSystemView::PartitionStatsSimple >> KqpSysColV1::InnerJoinSelectAsterisk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] Test command err: Trying to start YDB, gRPC: 21657, MsgBus: 11109 2025-04-07T14:17:54.065358Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575325785587880:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:54.067058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d7a/r3tmp/tmpQfsFLD/pdisk_1.dat 2025-04-07T14:17:54.309276Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21657, node 1 2025-04-07T14:17:54.378198Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:54.378226Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:54.378234Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:54.378335Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:54.405085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:54.405203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:54.406925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11109 TClient is connected to server localhost:11109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:54.750096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:54.772304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:54.859047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:54.968986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:55.023174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:56.450011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575334375524173:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:56.450139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:56.649400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:56.669819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:17:56.690653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:17:56.711980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:17:56.734220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:17:56.798964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:17:56.831751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575334375524670:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:56.831835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:56.831880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575334375524675:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:56.834228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:17:56.840492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575334375524677:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:17:57.553180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20808, MsgBus: 27759 2025-04-07T14:17:58.982969Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575343548712909:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:58.983052Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d7a/r3tmp/tmph3xb2j/pdisk_1.dat 2025-04-07T14:17:59.063996Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:59.087588Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:59.087663Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:59.088723Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20808, node 2 2025-04-07T14:17:59.119383Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:59.119403Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:59.119415Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:59.119510Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27759 TClient is connected to server localhost:27759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:59.435207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:59.450509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:59.520120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:59.677204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:59.749169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.399400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575356433616498:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:01.399483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:01.428789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:01.468392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:01.493630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:01.517070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:01.540272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:01.606008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:01.642341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575356433616992:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:01.642423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:01.642543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575356433616997:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:01.645023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:01.652013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490575356433616999:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:18:02.640644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-07T14:18:03.282293Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWJiNDA5NTUtMTY4M2EzZjEtYzJmNzk2N2EtN2NiNDc3NDA=, ActorId: [2:7490575365023552162:2496], ActorState: ExecuteState, TraceId: 01jr89v3kyff8qwq6t90mwxr73, Create QueryResponse for error on request, msg: >> KqpSysColV1::StreamInnerJoinSelect [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSinkTx::SnapshotRO [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 61110, MsgBus: 28593 2025-04-07T14:17:59.893803Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575348936600253:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:59.893880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017db/r3tmp/tmprVaWIl/pdisk_1.dat 2025-04-07T14:18:00.147700Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61110, node 1 2025-04-07T14:18:00.203393Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:00.203421Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:00.203430Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:00.203551Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:00.255335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:00.255433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:00.257099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28593 TClient is connected to server localhost:28593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:00.756079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:00.780977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:00.906471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:18:01.050967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.109965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.197425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575361821503876:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.197536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.402414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.435246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.461378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.490046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.516517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.558567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.635039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575361821504375:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.635154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.635161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575361821504380:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.638206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:02.645769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575361821504382:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:18:04.214650Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035484212, txId: 281474976715671] shutting down >> KqpSysColV1::StreamSelectRowAsterisk >> KqpSysColV1::UpdateAndDelete [GOOD] |97.0%| [TA] $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 17956, MsgBus: 30197 2025-04-07T14:18:00.390271Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575351608253246:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:00.390356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017d9/r3tmp/tmpDQuLZV/pdisk_1.dat 2025-04-07T14:18:00.624170Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17956, node 1 2025-04-07T14:18:00.695034Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:00.695056Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:00.695069Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:00.695146Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:00.718506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:00.718617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:00.720687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30197 TClient is connected to server localhost:30197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:01.135282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.152895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.265231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.395025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.456197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:02.938296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575360198189545:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.938420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:03.156253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:03.182065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:03.212481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:03.237452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:03.257510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:03.285035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:03.318286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575364493157334:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:03.318354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:03.318484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575364493157339:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:03.321206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:03.328882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575364493157341:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:18:04.624362Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035484646, txId: 281474976715671] shutting down |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 21199, MsgBus: 18444 2025-04-07T14:17:59.661688Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575349199248267:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:59.662168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017e1/r3tmp/tmpDM9aMY/pdisk_1.dat 2025-04-07T14:17:59.949997Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21199, node 1 2025-04-07T14:18:00.039339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:00.039419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:00.041272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:00.154842Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:00.154866Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:00.154871Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:00.154943Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18444 TClient is connected to server localhost:18444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:00.722572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:00.761041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:00.872044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.004029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.059371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:02.055264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575362084151730:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.055389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.404542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.429882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.456549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.482299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.506162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.531485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.659688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575362084152227:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.659771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.659991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575362084152232:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.663841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:02.673832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575362084152234:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:04.661451Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490575349199248267:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:04.681768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV1::SelectRowAsterisk [GOOD] >> KqpSystemView::PartitionStatsRange3 >> KqpSysColV1::StreamInnerJoinTables >> KqpSysColV0::InnerJoinTables [GOOD] >> KqpSystemView::Sessions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 19970, MsgBus: 64188 2025-04-07T14:18:02.636603Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575360109274103:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:02.636695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017c9/r3tmp/tmpEsmZ8F/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19970, node 1 2025-04-07T14:18:02.921415Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:18:02.926466Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:18:03.000839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:03.000958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:03.001687Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:03.001710Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:03.001717Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:03.001807Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:03.002291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64188 TClient is connected to server localhost:64188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:03.463456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:03.479599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:03.580094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:03.707922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:03.768357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:05.273138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575372994177691:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:05.273264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:05.505575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.547580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.596033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.625060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.652175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.687445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.736763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575372994178187:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:05.736830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:05.736918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575372994178192:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:05.740790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:05.749554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575372994178194:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpSysColV1::InnerJoinSelect |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 17207, MsgBus: 30490 2025-04-07T14:18:02.877516Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575360416269498:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:02.877566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017c7/r3tmp/tmpIKK4d9/pdisk_1.dat 2025-04-07T14:18:03.182751Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17207, node 1 2025-04-07T14:18:03.224846Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:03.224869Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:03.224879Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:03.224992Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:03.238820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:03.238932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:03.241047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30490 TClient is connected to server localhost:30490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:03.663818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:03.691359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:03.800733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:03.929895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:03.986194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:05.478648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575373301173097:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:05.478746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:05.706806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.729429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.756178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.784892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.823775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.852593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.903942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575373301173591:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:05.904012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:05.904139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575373301173596:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:05.907889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:05.922235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575373301173598:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpSysColV1::InnerJoinTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions [GOOD] Test command err: Trying to start YDB, gRPC: 7858, MsgBus: 11950 2025-04-07T14:17:59.661698Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575346839466350:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:59.662325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017f2/r3tmp/tmp6x9jG7/pdisk_1.dat 2025-04-07T14:17:59.953122Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7858, node 1 2025-04-07T14:18:00.039607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:00.039683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:00.041382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:00.154832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:00.154848Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:00.154853Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:00.154955Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11950 TClient is connected to server localhost:11950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:00.758791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:00.773334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:00.907317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.040695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.100903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.977835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575355429402518:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:01.977971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.402194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.429229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.454041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.479555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.506277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.537323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.624379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575359724370323:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.624447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575359724370328:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.624451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:02.628620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:02.637218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575359724370330:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 1 ydb-cpp-sdk/2.6.2 2025-04-07T14:18:04.661455Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490575346839466350:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:04.661574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:18:07.361913Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035487357, txId: 281474976710683] shutting down >> KqpSysColV1::StreamSelectRange [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 >> KqpSystemView::PartitionStatsSimple [GOOD] >> KqpSystemView::NodesRange1 [GOOD] >> KqpSystemView::FailResolve ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 13187, MsgBus: 11195 2025-04-07T14:18:04.197284Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575368676668516:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:04.197345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017c3/r3tmp/tmpjqdjO5/pdisk_1.dat 2025-04-07T14:18:04.448567Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13187, node 1 2025-04-07T14:18:04.524379Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:04.524399Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:04.524410Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:04.524508Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:04.526697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:04.526795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:04.528430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11195 TClient is connected to server localhost:11195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:04.961168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:04.988586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:05.101883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:05.240711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:05.329710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.958611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575377266604816:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:06.958733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:07.173454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:07.205143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:07.239134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:07.265019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:07.293803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:07.363155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:07.441498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575381561572616:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:07.441556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:07.441644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575381561572621:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:07.444279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:07.453744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575381561572623:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:08.440098Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035488475, txId: 281474976710671] shutting down >> KqpSysColV0::UpdateAndDelete >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] >> KqpSystemView::PartitionStatsRange2 |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 4920, MsgBus: 15167 2025-04-07T14:18:05.045218Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575372859300264:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:05.045330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017bb/r3tmp/tmp421e9Z/pdisk_1.dat 2025-04-07T14:18:05.310761Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4920, node 1 2025-04-07T14:18:05.394266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:05.394354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:05.395958Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:05.395978Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:05.395985Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:05.396072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:05.396163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15167 TClient is connected to server localhost:15167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:05.879282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:05.908506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.043809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.192304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.273017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:07.677112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575381449236569:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:07.677200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:07.893845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:07.918395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:07.949873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:07.976822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.005284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.032435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.112747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575385744204364:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.112806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.113011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575385744204369:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.116013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:08.126764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575385744204371:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:09.168251Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035489154, txId: 281474976710671] shutting down >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 29583, MsgBus: 7826 2025-04-07T14:18:05.329395Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575373647931880:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:05.329477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017b7/r3tmp/tmp37S2SF/pdisk_1.dat 2025-04-07T14:18:05.626869Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29583, node 1 2025-04-07T14:18:05.632498Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:18:05.712849Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:05.712876Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:05.712889Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:05.712987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:05.718823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:05.718930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:05.720423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7826 TClient is connected to server localhost:7826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:06.230284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.262949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.388255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.527426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.601445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:08.131552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575386532835488:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.131664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.352541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.378152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.443924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.469583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.496166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.527209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.611513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575386532835990:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.611589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.611646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575386532835995:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.615987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:08.634360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575386532835997:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 1735, MsgBus: 10381 2025-04-07T14:18:01.404511Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575354842445345:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:01.404631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:01.425860Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575357072951197:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:01.426638Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:01.431925Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490575354533741720:2152];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:01.436647Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575355337474147:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:01.438562Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:01.447502Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490575356676644480:2213];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017d3/r3tmp/tmpTRqwqh/pdisk_1.dat 2025-04-07T14:18:01.628216Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:01.629922Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:01.838540Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:18:01.850846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:01.850916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:01.854236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1735, node 1 2025-04-07T14:18:01.920796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:01.920884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:01.925555Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-07T14:18:01.926455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:01.932235Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:01.932258Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:01.932263Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:01.932353Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:01.936335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:01.936390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:01.938741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:01.938850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:01.938968Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:18:01.940407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:01.941764Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:18:01.942670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:01.943489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:01.943543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:01.945131Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-07T14:18:01.945982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10381 TClient is connected to server localhost:10381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:02.560459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:02.607890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:02.772982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:02.992726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:03.102605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:04.711031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575367727349072:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:04.711127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:05.017442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.091221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.131346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.183764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.271645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.375971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.499923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575372022317023:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:05.499986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:05.500198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575372022317028:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:05.503537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:05.526638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575372022317030:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:18:06.416272Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490575354842445345:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:06.416329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:18:06.429512Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490575357072951197:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:06.429585Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:18:06.430391Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490575354533741720:2152];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:06.430446Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:18:06.436038Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490575355337474147:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:06.436094Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:18:06.446295Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490575356676644480:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:06.446367Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:18:07.171168Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035487163, txId: 281474976715671] shutting down 2025-04-07T14:18:07.396977Z node 4 :BS_PROXY_PUT ERROR: [38c4ed8a055de16c] Result# TEvPutResult {Id# [72075186224037911:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-07T14:18:07.398644Z node 5 :BS_PROXY_PUT ERROR: [cfc1c29b09668cd0] Result# TEvPutResult {Id# [72075186224037888:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-07T14:18:07.406860Z node 3 :BS_PROXY_PUT ERROR: [c368464bb7fd0397] Result# TEvPutResult {Id# [72075186224037895:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-07T14:18:07.407013Z node 2 :BS_PROXY_PUT ERROR: [802f7a5fbf7e95a2] Result# TEvPutResult {Id# [72075186224037899:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 15638, MsgBus: 14329 2025-04-07T14:18:06.075636Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575379451111068:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:06.077679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017b0/r3tmp/tmph9x6aD/pdisk_1.dat 2025-04-07T14:18:06.376623Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15638, node 1 2025-04-07T14:18:06.432783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:06.432911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:06.435770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:06.444472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:06.444493Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:06.444499Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:06.444590Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14329 TClient is connected to server localhost:14329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:06.945193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.964629Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:18:06.974348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:07.085013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:07.224232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:18:07.274591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.742729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575388041047294:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.742862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.980753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:09.012736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:09.085143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:09.114932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:09.144321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:09.201708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:09.234972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575392336015089:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:09.235019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:09.235083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575392336015094:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:09.238070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:09.247677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575392336015096:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:18:10.109420Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035490141, txId: 281474976715671] shutting down >> KqpSystemView::PartitionStatsRange1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 18750, MsgBus: 8925 2025-04-07T14:18:00.700436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575350633913448:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:00.700488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00110f/r3tmp/tmptXNe1J/pdisk_1.dat 2025-04-07T14:18:00.970435Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18750, node 1 2025-04-07T14:18:01.032802Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:01.032845Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:01.032856Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:01.032957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:01.056244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:01.056387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:01.058281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8925 TClient is connected to server localhost:8925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:01.479725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.501973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.620585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.747418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.804116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:03.204466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575363518817055:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:03.204591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:03.402207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:03.428019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:03.449805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:03.471487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:03.496224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:03.519393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:03.553669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575363518817545:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:03.553735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575363518817550:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:03.553749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:03.556548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:03.564200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575363518817552:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 24152, MsgBus: 22523 2025-04-07T14:18:05.683633Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575371859346781:2061];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00110f/r3tmp/tmpnbfTVg/pdisk_1.dat 2025-04-07T14:18:05.764743Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:05.826320Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:05.826406Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:05.826723Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:18:05.840418Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24152, node 2 2025-04-07T14:18:05.929284Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:05.929305Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:05.929311Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:05.929508Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22523 TClient is connected to server localhost:22523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:06.359988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.367322Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:18:06.377551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.473078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.633718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.717115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:08.508171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575384744250340:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.508249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.564524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.586922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.620724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.649140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.690699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.723567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.809839Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575384744250837:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.809913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.810098Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575384744250842:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.813340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:08.824099Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490575384744250844:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpSysColV1::StreamSelectRowById [GOOD] >> KqpSystemView::PartitionStatsRange3 [GOOD] >> KqpSysColV0::SelectRange >> KqpSinkLocks::EmptyRange [GOOD] >> KqpSystemView::PartitionStatsRanges >> KqpSysColV1::StreamInnerJoinTables [GOOD] >> KqpSysColV1::SelectRange >> KqpSystemView::QueryStatsSimple >> KqpSysColV1::InnerJoinSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 [GOOD] Test command err: Trying to start YDB, gRPC: 13691, MsgBus: 1785 2025-04-07T14:18:07.642900Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575381891021696:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:07.646710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017a7/r3tmp/tmpXCQMbq/pdisk_1.dat 2025-04-07T14:18:07.920883Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:18:07.935901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:07.936045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:07.938982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13691, node 1 2025-04-07T14:18:07.997220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:07.997263Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:07.997277Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:07.997371Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1785 TClient is connected to server localhost:1785 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:08.494185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:18:08.512691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.660725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:08.795879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:08.875444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:10.144307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575394775925277:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.144388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.323019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.345397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.365388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.385566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.405498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.428201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.463040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575394775925769:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.463103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.463161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575394775925774:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.466082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:10.473073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575394775925776:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:18:11.455466Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035491450, txId: 281474976715671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 26393, MsgBus: 14104 2025-04-07T14:18:07.173153Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575380864633348:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:07.173202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017ab/r3tmp/tmprtyTDx/pdisk_1.dat 2025-04-07T14:18:07.469360Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26393, node 1 2025-04-07T14:18:07.539536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:07.539688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:07.541003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:07.554620Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:07.554644Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:07.554653Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:07.554764Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14104 TClient is connected to server localhost:14104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:08.030455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:08.043762Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:18:08.059037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:08.176050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:18:08.318705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:08.374303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:18:09.945693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575389454569659:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:09.945866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.155454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.180088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.203715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.227347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.251285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.316281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.353345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575393749537455:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.353430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.353608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575393749537460:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.356745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:10.364026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575393749537462:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:11.358198Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035491394, txId: 281474976710671] shutting down |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] >> KqpSysColV1::InnerJoinTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 28048, MsgBus: 9265 2025-04-07T14:18:07.664758Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575380795838246:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:07.665361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017a6/r3tmp/tmpd5eEYZ/pdisk_1.dat 2025-04-07T14:18:07.937297Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28048, node 1 2025-04-07T14:18:08.011376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:08.011660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:08.014380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:08.042175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:08.042203Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:08.042215Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:08.042322Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9265 TClient is connected to server localhost:9265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:08.548747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:18:08.588346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.706418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:08.857365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:08.933001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:10.313688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575393680741750:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.313804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.545744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.568754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.594917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.618063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.638754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.665536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:10.706881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575393680742246:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.706969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.707137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575393680742251:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.710033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:10.717161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575393680742253:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:11.879172Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035491912, txId: 281474976710671] shutting down [[[108u];["One"];[8];["Value5"];[108u];["One"];#;["Value31"]]] >> KqpSystemView::ReadSuccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRange [GOOD] Test command err: Trying to start YDB, gRPC: 31248, MsgBus: 17246 2025-04-07T14:17:46.869720Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575291948383004:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:46.869780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001126/r3tmp/tmpYbBm2j/pdisk_1.dat 2025-04-07T14:17:47.136887Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31248, node 1 2025-04-07T14:17:47.199631Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:47.199652Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:47.199661Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:47.199768Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:47.218376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:47.218507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:47.223245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17246 TClient is connected to server localhost:17246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:47.643438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:49.114034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575304833285515:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.114117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575304833285532:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.114163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:49.118487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:17:49.129513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575304833285542:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:17:49.390349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:17:49.478873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490575304833285751:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:17:49.478873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490575304833285752:2319];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:17:49.479155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490575304833285752:2319];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:17:49.479426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490575304833285752:2319];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:17:49.479476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490575304833285751:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:17:49.479551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490575304833285752:2319];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:17:49.479651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490575304833285751:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:17:49.479691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490575304833285752:2319];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:17:49.479806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490575304833285751:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:17:49.479888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490575304833285752:2319];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:17:49.479932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490575304833285751:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:17:49.480032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490575304833285751:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:17:49.480047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490575304833285752:2319];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:17:49.480117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490575304833285751:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:17:49.480131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490575304833285752:2319];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:17:49.480253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490575304833285751:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:17:49.480257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490575304833285752:2319];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:17:49.480388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490575304833285751:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:17:49.480398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490575304833285752:2319];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:17:49.480487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490575304833285751:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:17:49.480494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490575304833285752:2319];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:17:49.480603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490575304833285751:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:17:49.480652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490575304833285752:2319];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:17:49.480719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490575304833285751:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:17:49.509121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490575304833285748:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:17:49.509173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490575304833285748:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:17:49.509349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490575304833285748:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:17:49.509415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490575304833285748:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstr ... t=skip_indexation;reason=disabled; 2025-04-07T14:17:54.720516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[1:7490575317718192585:2926];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:17:54.730115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[1:7490575317718192577:2919];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038052;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:17:54.730467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[1:7490575317718192577:2919];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038052;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:17:54.732385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[1:7490575317718192529:2904];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:17:54.732875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[1:7490575317718192529:2904];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:17:54.736529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[1:7490575317718192600:2937];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038067;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:17:54.736904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[1:7490575317718192600:2937];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038067;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:17:54.740495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[1:7490575317718192583:2925];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038070;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:17:54.740835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[1:7490575317718192583:2925];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:17:54.859158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037895;tx_state=complete;fline=interaction.h:353;batch=Group: [ 2 ] Name: [ 5061756C ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976710665}],"starts":[{"inc":{"count_not_include":1},"id":281474976710665}],"finishes":[]},"p":{"include":1,"pk":"1;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976710665}]},"p":{"include":2147483647,"pk":"1;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710666}],"finishes":[{"inc":{"count_include":1},"id":281474976710666}]},"p":{"include":0,"pk":"2;Paul;"}}]}; 2025-04-07T14:17:54.865004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 27668, MsgBus: 24039 2025-04-07T14:18:00.942121Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575351673832378:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:00.942208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001126/r3tmp/tmpjT7y8r/pdisk_1.dat 2025-04-07T14:18:01.035666Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:18:01.057496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:01.057567Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:01.059337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27668, node 2 2025-04-07T14:18:01.096015Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:01.096045Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:01.096055Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:01.096175Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24039 TClient is connected to server localhost:24039 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:01.476808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:03.898872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575364558734894:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:03.898985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575364558734902:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:03.899050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:03.902643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:18:03.910369Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490575364558734908:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:18:04.026622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:18:04.103064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:18:04.940773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:05.942146Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490575351673832378:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:05.942216Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:18:06.354941Z node 2 :GLOBAL ERROR: fline=events.h:99;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-04-07T14:18:06.355119Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" } 2025-04-07T14:18:06.355298Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" } 2025-04-07T14:18:06.355550Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490575377443645426:2939], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [2:7490575377443645102:2939]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 6]`. ShardID=72075186224037888, Sink=[2:7490575377443645426:2939].{
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:18:06.368032Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490575377443645417:2939], SessionActorId: [2:7490575377443645102:2939], Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[2:7490575377443645102:2939]. isRollback=0 2025-04-07T14:18:06.368268Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmVkMTUyN2YtNjI0MmUxMTEtN2FiMmNkODctNjg5ZWZmNmM=, ActorId: [2:7490575377443645102:2939], ActorState: ExecuteState, TraceId: 01jr89v6jne2n7yegwkx7d9yz5, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7490575377443645418:2939] from: [2:7490575377443645417:2939] 2025-04-07T14:18:06.368568Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7490575377443645418:2939] TxId: 281474976715667. Ctx: { TraceId: 01jr89v6jne2n7yegwkx7d9yz5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmVkMTUyN2YtNjI0MmUxMTEtN2FiMmNkODctNjg5ZWZmNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:18:06.375897Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmVkMTUyN2YtNjI0MmUxMTEtN2FiMmNkODctNjg5ZWZmNmM=, ActorId: [2:7490575377443645102:2939], ActorState: ExecuteState, TraceId: 01jr89v6jne2n7yegwkx7d9yz5, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid
: Error: Transaction locks invalidated. Tables: `/Root/Test`, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TPersQueueTest::ReadRuleDisallowDefaultServiceType [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 32500, MsgBus: 5738 2025-04-07T14:18:08.296117Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575384422006172:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:08.296208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017a5/r3tmp/tmpVvMszf/pdisk_1.dat 2025-04-07T14:18:08.624428Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32500, node 1 2025-04-07T14:18:08.692770Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:08.692791Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:08.692800Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:08.692891Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:08.702175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:08.702254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:08.703951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5738 TClient is connected to server localhost:5738 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:09.207839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:09.230527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:09.355426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:09.477138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:09.530604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:10.862268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575393011942488:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:10.862414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:11.033801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:11.055394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:11.078280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:11.103361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:11.129554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:11.157814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:11.231624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575397306910280:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:11.231694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575397306910285:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:11.231700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:11.234886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:11.242925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575397306910287:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpSystemView::FailResolve [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 5398, MsgBus: 25280 2025-04-07T14:18:08.963602Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575385844377561:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:08.963664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00179e/r3tmp/tmpBvrmZE/pdisk_1.dat 2025-04-07T14:18:09.253306Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5398, node 1 2025-04-07T14:18:09.306440Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:09.306459Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:09.306467Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:09.306586Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:09.315992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:09.316111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:09.317789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25280 TClient is connected to server localhost:25280 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:09.725016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:09.749174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:09.858422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:09.983349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:10.042327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:11.433042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575398729281166:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:11.433175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:11.607472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:11.630940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:11.654226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:11.675540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:11.695695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:11.723073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:11.755695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575398729281659:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:11.755753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:11.755799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575398729281664:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:11.759038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:11.766925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575398729281666:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpSysColV1::SelectRowById >> KqpSysColV0::InnerJoinSelectAsterisk >> BasicStatistics::Serverless [GOOD] >> KqpSystemView::PartitionStatsRange2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 25521, MsgBus: 29291 2025-04-07T14:18:04.906391Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575370752546516:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:04.906493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:04.928254Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575366941395370:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:04.928333Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:04.934149Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490575367343808257:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:04.943631Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575369044493360:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:04.945167Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490575371051980448:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:04.946087Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:05.181060Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:05.182916Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017bd/r3tmp/tmpsQeQmt/pdisk_1.dat 2025-04-07T14:18:05.504039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:05.504142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:05.505814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:05.505876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:05.507060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:05.507137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:05.507233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:05.507269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:05.510774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:05.510886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:05.515026Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-07T14:18:05.515061Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:18:05.515076Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:18:05.515566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:05.515893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:05.516574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:05.516829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:05.519318Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-07T14:18:05.519940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:05.536142Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25521, node 1 2025-04-07T14:18:05.560985Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:18:05.561000Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:18:05.636873Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:05.636911Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:05.636920Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:05.637052Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29291 TClient is connected to server localhost:29291 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:06.509596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.536046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:18:06.665287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:18:06.816534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:06.959767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:08.638264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575387932417513:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.638369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:09.009769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:09.072424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:09.252929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:09.314848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:09.363693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:09.413065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:09.510874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575392227385449:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:09.510970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:09.510970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575392227385454:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:09.514814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:09.532584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575392227385456:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:09.906450Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490575370752546516:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:09.906558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:18:09.928447Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490575366941395370:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:09.928524Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:18:09.933614Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490575367343808257:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:09.933788Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:18:09.941281Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490575369044493360:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:09.941342Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:18:09.944877Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490575371051980448:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:09.944959Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:18:10.738699Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035490722, txId: 281474976710671] shutting down 2025-04-07T14:18:11.055668Z node 2 :BS_PROXY_PUT ERROR: [ddb2932f285c33c3] Result# TEvPutResult {Id# [72075186224037899:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-07T14:18:11.055661Z node 3 :BS_PROXY_PUT ERROR: [f0460fb0c74cb562] Result# TEvPutResult {Id# [72075186224037897:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-07T14:18:11.055731Z node 4 :BS_PROXY_PUT ERROR: [516b7291a7069872] Result# TEvPutResult {Id# [72075186224037895:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-07T14:18:11.059641Z node 5 :BS_PROXY_PUT ERROR: [e73c9e410687e904] Result# TEvPutResult {Id# [72075186224037912:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> KqpSysColV0::SelectRowAsterisk >> KqpSystemView::FailNavigate >> KqpSystemView::QueryStatsScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve [GOOD] Test command err: Trying to start YDB, gRPC: 27965, MsgBus: 10861 2025-04-07T14:18:10.098881Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575393704248722:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:10.098963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00179b/r3tmp/tmpaZUwKP/pdisk_1.dat 2025-04-07T14:18:10.347524Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27965, node 1 2025-04-07T14:18:10.404430Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:10.404456Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:10.404463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:10.404559Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:10.440581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:10.440695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:10.442515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10861 TClient is connected to server localhost:10861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:10.776258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:10.793784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:10.922638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:11.037151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:11.112779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:12.622209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575402294185038:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:12.622345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:12.818502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:12.838742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:12.860291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:12.883555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:12.908875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:12.943362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:12.974192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575402294185529:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:12.974255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:12.974293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575402294185534:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:12.976634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:12.984391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575402294185536:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:13.726138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:18:13.806059Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7490575406589153185:3644], for# user0@builtin, access# SelectRow 2025-04-07T14:18:13.806162Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 1] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint64 : NULL, Uint64 : NULL, Uint64 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-07T14:18:13.815677Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTE5ZGQyOTctZjk5N2JjZTEtMzYwOTZhMmUtNTQwMGI1Mjk=, ActorId: [1:7490575406589153162:2463], ActorState: ExecuteState, TraceId: 01jr89vdvb65pctp4prdnyf4w4, Create QueryResponse for error on request, msg: 2025-04-07T14:18:13.815999Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035493805, txId: 281474976710672] shutting down 2025-04-07T14:18:13.816308Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr89vdvb65pctp4prdnyf4w4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5ZGQyOTctZjk5N2JjZTEtMzYwOTZhMmUtNTQwMGI1Mjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpSystemView::NodesSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] Test command err: 2025-04-07T14:15:37.971815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:37.972140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:37.972248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001fe8/r3tmp/tmpXnInlz/pdisk_1.dat 2025-04-07T14:15:38.302446Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24722, node 1 2025-04-07T14:15:38.512146Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:38.512204Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:38.512229Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:38.512855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:38.514959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:38.608260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:38.608405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:38.623162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4232 2025-04-07T14:15:39.221945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:42.087875Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:42.122323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:42.122434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:42.161065Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:42.164224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:42.328955Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:42.371672Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:42.371757Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:42.398620Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:42.398813Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:42.399005Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:42.399060Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:42.399109Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:42.399166Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:42.399235Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:42.399303Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:42.399686Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:42.600487Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:42.600573Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:42.606130Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:42.612727Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:42.613367Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:42.614068Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2025-04-07T14:15:42.631572Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:42.631620Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:42.631678Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-07T14:15:42.633024Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:42.633126Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:42.643102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:42.653318Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:42.653437Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:42.663111Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:42.678468Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:42.717959Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:42.986587Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:43.141749Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:43.732146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:15:44.253432Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:44.388531Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-07T14:15:44.388590Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:15:44.388702Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2487:2905], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:15:44.390855Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2495:2909] 2025-04-07T14:15:44.391624Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2495:2909], schemeshard id = 72075186224037899 2025-04-07T14:15:45.251341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2615:3186], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:45.251469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:45.263434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-07T14:15:45.378915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2763:3221], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:45.379015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:45.387182Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2768:3225]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:45.387303Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:15:45.387380Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-07T14:15:45.387430Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2771:3228] 2025-04-07T14:15:45.387486Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2771:3228] 2025-04-07T14:15:45.387875Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2772:3034] 2025-04-07T14:15:45.388074Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2771:3228], server id = [2:2772:3034], tablet id = 72075186224037897, status = OK 2025-04-07T14:15:45.388199Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2772:3034], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:15:45.388246Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:15:45.388373Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:15:45.388416Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2768:3225], StatRequests.size() = 1 2025-04-07T14:15:45.401737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2776:3232], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:45.401826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:45.402072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2781:3237], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:45.406677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-04-07T14:15:45.520699Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:15:45.520774Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistic ... 776977Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 116 ], ReplyToActorId[ [2:7457:5374]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:04.777289Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 116 ] 2025-04-07T14:18:04.777330Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 116, ReplyToActorId = [2:7457:5374], StatRequests.size() = 1 2025-04-07T14:18:05.931309Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-07T14:18:05.931386Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:18:05.931426Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:18:05.931467Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-07T14:18:06.137420Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:7501:5394]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:06.137738Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2025-04-07T14:18:06.137782Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:7501:5394], StatRequests.size() = 1 2025-04-07T14:18:07.057277Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:18:07.058010Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:18:07.058338Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:07.095332Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037889 2025-04-07T14:18:07.095410Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 199.000000s, at schemeshard: 72075186224037889 2025-04-07T14:18:07.095709Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 25 2025-04-07T14:18:07.110324Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:18:07.385133Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:7542:5416]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:07.385376Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-07T14:18:07.385416Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7542:5416], StatRequests.size() = 1 2025-04-07T14:18:08.192851Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:08.192940Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:08.192996Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2025-04-07T14:18:08.193052Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:08.193518Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-07T14:18:08.221656Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:08.227866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7574:5438], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.227993Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7584:5443], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.228214Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.240638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2025-04-07T14:18:08.312442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7588:5446], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-07T14:18:08.601889Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7709:5512]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:08.602190Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-07T14:18:08.602484Z node 2 :STATISTICS DEBUG: [72075186224037897] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-07T14:18:08.602552Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-07T14:18:08.602746Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:08.602963Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7709:5512], StatRequests.size() = 1 2025-04-07T14:18:08.750631Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWFkN2RiMmEtZTQzYzg5YmUtNGNhNDBkMDgtZDNkZmQxZTA=, TxId: 2025-04-07T14:18:08.750703Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWFkN2RiMmEtZTQzYzg5YmUtNGNhNDBkMDgtZDNkZmQxZTA=, TxId: 2025-04-07T14:18:08.752228Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:08.769889Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:08.769960Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:08.837711Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7739:5526]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:08.838005Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-07T14:18:08.838051Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7739:5526], StatRequests.size() = 1 2025-04-07T14:18:08.861501Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:18:08.861565Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:18:08.917373Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:2945:3066], schemeshard count = 1 2025-04-07T14:18:09.299084Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-07T14:18:09.299150Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 203.000000s, at schemeshard: 72075186224037899 2025-04-07T14:18:09.299582Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-04-07T14:18:09.317534Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:18:10.095525Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7791:5561]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:10.095788Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-07T14:18:10.095831Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7791:5561], StatRequests.size() = 1 2025-04-07T14:18:10.958939Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:18:10.959080Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:10.959113Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:10.959154Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-07T14:18:10.959186Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T14:18:10.959450Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-07T14:18:10.961975Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:10.974204Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDZhNGNhYjMtZDIxYzJlMmEtMjIwZjdjZTktMjUxODI0NDE=, TxId: 2025-04-07T14:18:10.974270Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDZhNGNhYjMtZDIxYzJlMmEtMjIwZjdjZTktMjUxODI0NDE=, TxId: 2025-04-07T14:18:10.974740Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:10.988457Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T14:18:10.988525Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:11.288520Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7862:5602]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:11.288858Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-07T14:18:11.288901Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7862:5602], StatRequests.size() = 1 2025-04-07T14:18:12.500703Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7914:5634]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:12.500977Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-07T14:18:12.501015Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7914:5634], StatRequests.size() = 1 2025-04-07T14:18:13.281664Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-04-07T14:18:13.281912Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:13.282077Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:13.282106Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:13.282502Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:18:13.558667Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7952:5654]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:13.558962Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-07T14:18:13.559008Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7952:5654], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 62069, MsgBus: 26699 2025-04-07T14:18:10.517775Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575395159213109:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:10.517864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001795/r3tmp/tmp31umQO/pdisk_1.dat 2025-04-07T14:18:10.765137Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62069, node 1 2025-04-07T14:18:10.816952Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:10.816982Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:10.817005Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:10.817128Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:10.870575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:10.870659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:10.872210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26699 TClient is connected to server localhost:26699 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:11.228867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:11.252974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:11.356230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:11.470320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:11.539528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:12.931894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575403749149422:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:12.931994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:13.145133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:13.169252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:13.189876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:13.214258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:13.239091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:13.263071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:13.301183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575408044117209:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:13.301248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:13.301305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575408044117214:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:13.304752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:13.312978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575408044117216:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:14.211344Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035494206, txId: 281474976710671] shutting down >> KqpSystemView::PartitionStatsRange1 [GOOD] >> KqpSystemView::Join >> KqpSysColV0::UpdateAndDelete [GOOD] >> HttpRequest::Probe [GOOD] >> KqpSysColV0::InnerJoinSelect >> YdbOlapStore::LogExistingRequest [GOOD] >> YdbOlapStore::LogExistingUserId |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 30927, MsgBus: 8100 2025-04-07T14:18:11.891426Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575397967649402:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:11.891500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00178a/r3tmp/tmppNMWsp/pdisk_1.dat 2025-04-07T14:18:12.137662Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30927, node 1 2025-04-07T14:18:12.205810Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:12.205831Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:12.205846Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:12.205954Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:12.230896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:12.231041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:12.232611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8100 TClient is connected to server localhost:8100 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:12.612772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:12.635228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:12.735128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:12.847795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:12.918787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:14.265671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575410852553033:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:14.265772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:14.409917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:14.431793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:14.452426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:14.473588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:14.495919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:14.518564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:14.555612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575410852553526:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:14.555655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:14.555927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575410852553531:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:14.558588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:14.565070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575410852553533:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:18:15.406708Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035495402, txId: 281474976715671] shutting down >> KqpSysColV0::SelectRange [GOOD] >> KqpSinkLocks::OlapUncommittedRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 1399, MsgBus: 10986 2025-04-07T14:18:10.271097Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575395875146946:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:10.271758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001797/r3tmp/tmpSdFL2i/pdisk_1.dat 2025-04-07T14:18:10.524401Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1399, node 1 2025-04-07T14:18:10.602391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:10.602427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:10.602435Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:10.602543Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:10.629980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:10.630090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:10.632153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10986 TClient is connected to server localhost:10986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:11.016642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:11.034713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:11.162781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:11.301889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:11.356107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:12.782117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575404465083241:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:12.782234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:12.981267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:13.001438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:13.023571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:13.046705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:13.071838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:13.139031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:13.175127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575408760051039:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:13.175199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:13.175229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575408760051044:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:13.178042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:13.184865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575408760051046:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:15.271324Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490575395875146946:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:15.271403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV0::SelectRowById |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange [GOOD] >> KqpSystemView::PartitionStatsRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe [GOOD] Test command err: 2025-04-07T14:15:38.766219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:38.766524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:38.766656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001f67/r3tmp/tmp9UUX9z/pdisk_1.dat 2025-04-07T14:15:39.049719Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4091, node 1 2025-04-07T14:15:39.225957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:39.226007Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:39.226033Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:39.226511Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:39.228326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:39.320455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:39.320598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:39.334586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10863 2025-04-07T14:15:39.907325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:42.808485Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:42.843511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:42.843625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:42.883963Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:42.888200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:43.073405Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:43.113993Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:43.114074Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:43.139134Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:43.139333Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:43.139524Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:43.139587Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:43.139636Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:43.139687Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:43.139729Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:43.139777Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:43.140189Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:43.342534Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:43.342641Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:43.348266Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:43.354435Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:43.355170Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:43.356003Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:15:43.374463Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:43.374525Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:43.374587Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:15:43.376498Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:43.376597Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:43.387356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:43.395934Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:43.396044Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:43.404559Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:43.419453Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:43.457483Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:43.668048Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:43.820639Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:44.533505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:44.533619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:44.549269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:15:44.693518Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:15:44.693679Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:15:44.693876Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:15:44.693986Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:15:44.694083Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:15:44.694144Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:15:44.694214Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:15:44.694297Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:15:44.694357Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:15:44.694441Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:15:44.694501Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:15:44.694577Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2287:2844];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:15:44.739018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:15:44.739106Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:15:44.739341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:15:44.739489Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:15:44.739594Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:15:44.739694Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2300:2853];tablet_id=72075186224037900;process=TTxI ... 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:13.007909Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T14:18:13.007997Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:18:13.690404Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:18:13.690487Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=Nyra 2025-04-07T14:18:13.690531Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:18:14.995802Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:18:14.996074Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:15.065564Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:15.065727Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:18:15.065770Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:18:15.066404Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:18:15.079608Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:18:15.080074Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:18:15.080143Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:18:15.080588Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:18:15.093920Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:18:15.094084Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T14:18:15.095011Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8336:6207], server id = [2:8341:6212], tablet id = 72075186224037899, status = OK 2025-04-07T14:18:15.095123Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8336:6207], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:18:15.095347Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8337:6208], server id = [2:8342:6213], tablet id = 72075186224037900, status = OK 2025-04-07T14:18:15.095406Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8337:6208], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:18:15.095503Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8338:6209], server id = [2:8343:6214], tablet id = 72075186224037901, status = OK 2025-04-07T14:18:15.095551Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8338:6209], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:18:15.096466Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8339:6210], server id = [2:8344:6215], tablet id = 72075186224037902, status = OK 2025-04-07T14:18:15.096542Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8339:6210], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:18:15.096653Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:18:15.097555Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-07T14:18:15.097821Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8340:6211], server id = [2:8345:6216], tablet id = 72075186224037903, status = OK 2025-04-07T14:18:15.097887Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8340:6211], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:18:15.098419Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-07T14:18:15.098547Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-07T14:18:15.098834Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8336:6207], server id = [2:8341:6212], tablet id = 72075186224037899 2025-04-07T14:18:15.098862Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:15.099235Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8337:6208], server id = [2:8342:6213], tablet id = 72075186224037900 2025-04-07T14:18:15.099258Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:15.099317Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-07T14:18:15.099588Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8338:6209], server id = [2:8343:6214], tablet id = 72075186224037901 2025-04-07T14:18:15.099613Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:15.099667Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8339:6210], server id = [2:8344:6215], tablet id = 72075186224037902 2025-04-07T14:18:15.099687Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:15.099815Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8346:6217], server id = [2:8348:6219], tablet id = 72075186224037904, status = OK 2025-04-07T14:18:15.099875Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8346:6217], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:18:15.099918Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8347:6218], server id = [2:8351:6222], tablet id = 72075186224037905, status = OK 2025-04-07T14:18:15.099964Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8347:6218], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:18:15.100139Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8340:6211], server id = [2:8345:6216], tablet id = 72075186224037903 2025-04-07T14:18:15.100162Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:15.100859Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8349:6220], server id = [2:8353:6224], tablet id = 72075186224037906, status = OK 2025-04-07T14:18:15.100926Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8349:6220], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:18:15.101068Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8350:6221], server id = [2:8352:6223], tablet id = 72075186224037907, status = OK 2025-04-07T14:18:15.101121Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8350:6221], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:18:15.101576Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8354:6225], server id = [2:8355:6226], tablet id = 72075186224037908, status = OK 2025-04-07T14:18:15.101637Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8354:6225], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:18:15.101686Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-07T14:18:15.102536Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T14:18:15.102684Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T14:18:15.102850Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T14:18:15.103022Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8346:6217], server id = [2:8348:6219], tablet id = 72075186224037904 2025-04-07T14:18:15.103046Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:15.103103Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T14:18:15.103141Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:18:15.103295Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:18:15.103493Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:18:15.103784Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:18:15.105771Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8347:6218], server id = [2:8351:6222], tablet id = 72075186224037905 2025-04-07T14:18:15.105802Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:15.106177Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8349:6220], server id = [2:8353:6224], tablet id = 72075186224037906 2025-04-07T14:18:15.106201Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:15.106323Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8350:6221], server id = [2:8352:6223], tablet id = 72075186224037907 2025-04-07T14:18:15.106363Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:15.106504Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:18:15.106909Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8354:6225], server id = [2:8355:6226], tablet id = 72075186224037908 2025-04-07T14:18:15.106933Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:15.125525Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2RkZDQwNDMtNGYzNDQyODctZjI1Zjk0NGQtOTYxOTgwNTc=, TxId: 2025-04-07T14:18:15.125580Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2RkZDQwNDMtNGYzNDQyODctZjI1Zjk0NGQtOTYxOTgwNTc=, TxId: 2025-04-07T14:18:15.126231Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:15.139546Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:18:15.139623Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=Nyra, ActorId=[1:3383:3247] 2025-04-07T14:18:15.140615Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:8373:5532]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:15.140852Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:18:15.140901Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:18:15.142898Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:18:15.142946Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:18:15.142987Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-04-07T14:18:15.152438Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' >> KqpSystemView::ReadSuccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 26211, MsgBus: 13020 2025-04-07T14:18:12.332879Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575401575327688:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:12.332952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001789/r3tmp/tmp2WyQ7T/pdisk_1.dat 2025-04-07T14:18:12.598202Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26211, node 1 2025-04-07T14:18:12.665354Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:12.665374Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:12.665378Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:12.665495Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:12.678196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:12.678304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:12.679872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13020 TClient is connected to server localhost:13020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:13.043690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:13.066942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:13.190847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:13.310760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:13.371518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:14.953314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575410165263995:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:14.953416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:15.160731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.186346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.210212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.231313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.259207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.284533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.316162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575414460231782:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:15.316238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:15.316405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575414460231787:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:15.319605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:15.327591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575414460231789:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges [GOOD] Test command err: Trying to start YDB, gRPC: 11949, MsgBus: 14346 2025-04-07T14:18:12.455595Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575402374790685:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:12.455698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001788/r3tmp/tmpQ57TuG/pdisk_1.dat 2025-04-07T14:18:12.697701Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11949, node 1 2025-04-07T14:18:12.766741Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:12.766789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:12.766799Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:12.766949Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:12.806443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:12.806634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:12.808240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14346 TClient is connected to server localhost:14346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:13.201630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:13.224024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:13.314878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:13.438943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:13.496617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:15.077989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575415259694287:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:15.078091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:15.293147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.317528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.339136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.358855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.383545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.406757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.436034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575415259694777:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:15.436181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:15.436335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575415259694782:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:15.439289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:15.445368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575415259694784:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:18:16.681779Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035496674, txId: 281474976715671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 14323, MsgBus: 29606 2025-04-07T14:18:12.915917Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575404298079561:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:12.916724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00177d/r3tmp/tmpwcPWLt/pdisk_1.dat 2025-04-07T14:18:13.179202Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14323, node 1 2025-04-07T14:18:13.233941Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:13.233975Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:13.233988Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:13.234101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:13.260753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:13.260887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:13.262648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29606 TClient is connected to server localhost:29606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:13.646562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:13.660801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:13.761583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:13.875437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:13.947031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:15.514010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575417182983184:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:15.514115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:15.707293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.730290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.752701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.774480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.808442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.874190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.911701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575417182983680:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:15.911772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:15.911905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575417182983685:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:15.916114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:15.926653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575417182983687:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapUncommittedRead [GOOD] Test command err: Trying to start YDB, gRPC: 28650, MsgBus: 24520 2025-04-07T14:17:51.333060Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575313032816814:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:51.333553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001120/r3tmp/tmp1ErFgM/pdisk_1.dat 2025-04-07T14:17:51.598734Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28650, node 1 2025-04-07T14:17:51.674882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:51.674909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:51.674916Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:51.675034Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:51.713746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:51.713862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:51.715660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24520 TClient is connected to server localhost:24520 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:52.087491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:53.741534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575321622752035:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:53.741653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:53.742151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575321622752057:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:53.745898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:17:53.753631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575321622752059:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:17:54.058254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:17:54.171533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:17:54.887122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:56.074850Z node 1 :GLOBAL ERROR: fline=events.h:99;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-04-07T14:17:56.075066Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" } 2025-04-07T14:17:56.075207Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" } 2025-04-07T14:17:56.075437Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490575334507662475:2937], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7490575330212694867:2937]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 6]`. ShardID=72075186224037888, Sink=[1:7490575334507662475:2937].{
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:17:56.075946Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490575334507662464:2937], SessionActorId: [1:7490575330212694867:2937], Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[1:7490575330212694867:2937]. isRollback=0 2025-04-07T14:17:56.076073Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yzk1NGMxMzAtNDU2YzZjMy02YmZmYjMzNC00MTg1MTIyOA==, ActorId: [1:7490575330212694867:2937], ActorState: ExecuteState, TraceId: 01jr89twjt120zraaqtxwpynjc, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7490575334507662465:2937] from: [1:7490575334507662464:2937] 2025-04-07T14:17:56.076262Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490575334507662465:2937] TxId: 281474976710665. Ctx: { TraceId: 01jr89twjt120zraaqtxwpynjc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzk1NGMxMzAtNDU2YzZjMy02YmZmYjMzNC00MTg1MTIyOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:17:56.083059Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yzk1NGMxMzAtNDU2YzZjMy02YmZmYjMzNC00MTg1MTIyOA==, ActorId: [1:7490575330212694867:2937], ActorState: ExecuteState, TraceId: 01jr89twjt120zraaqtxwpynjc, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid
: Error: Transaction locks invalidated. Tables: `/Root/Test`, code: 2001 WAIT_INDEXATION: 0 2025-04-07T14:17:56.333059Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490575313032816814:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:56.333166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 2003, MsgBus: 5067 2025-04-07T14:18:02.057388Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575359090848762:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:02.057530Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001120/r3tmp/tmpfnGugj/pdisk_1.dat 2025-04-07T14:18:02.155175Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2003, node 2 2025-04-07T14:18:02.189313Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:02.189404Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:02.192912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:02.214823Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:02.214852Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:02.214860Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:02.214969Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5067 TClient is connected to server localhost:5067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:02.583476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:04.560515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575367680783991:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:04.560571Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575367680783987:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: { ... 5186224038022;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.906640Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7490575380565691009:2915];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.907058Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7490575380565690845:2874];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038038;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.907245Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7490575380565690845:2874];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.907666Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7490575380565690757:2861];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038061;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.907880Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7490575380565690757:2861];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038061;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.908835Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7490575384860658719:2928];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038076;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.909070Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7490575384860658719:2928];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038076;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.909527Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[2:7490575384860658697:2921];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038074;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.909745Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[2:7490575384860658697:2921];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038074;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.911024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7490575384860658769:2934];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038088;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.911201Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7490575384860658769:2934];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.911436Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7490575384860658826:2943];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038085;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.911674Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7490575384860658826:2943];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038085;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.913116Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7490575380565690908:2898];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.913293Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7490575380565690908:2898];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.914825Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7490575384860658789:2941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038091;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.915095Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7490575384860658789:2941];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038091;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.916096Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7490575384860658764:2933];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038095;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.916327Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7490575384860658764:2933];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.916922Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[2:7490575384860658854:2946];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038094;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.917174Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[2:7490575384860658854:2946];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038094;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.920762Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7490575380565691274:2918];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038090;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.921028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7490575380565691274:2918];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038090;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.923775Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[2:7490575384860658771:2935];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038077;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.924056Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[2:7490575384860658771:2935];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038077;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.930127Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[2:7490575384860658990:2954];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038097;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.930461Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[2:7490575384860658990:2954];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038097;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.930635Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7490575380565691000:2914];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:10.930854Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7490575380565691000:2914];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-04-07T14:18:11.066701Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037888;self_id=[2:7490575367680784201:2318];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:18:11.066804Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037897;self_id=[2:7490575367680784202:2319];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:18:11.066857Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037889;self_id=[2:7490575367680784198:2316];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:18:11.066913Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490575393450596571:3486], SessionActorId: [2:7490575393450596553:3486], Got BAD REQUEST for table. ShardID=72075186224037888, Sink=[2:7490575393450596571:3486].{
: Fatal: only single operation is supported } 2025-04-07T14:18:11.066933Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037890;self_id=[2:7490575367680784208:2320];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:18:11.067018Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037891;self_id=[2:7490575367680784197:2315];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:18:11.067025Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490575393450596571:3486], SessionActorId: [2:7490575393450596553:3486], Bad request. {
: Fatal: only single operation is supported }. statusCode=BAD_REQUEST. subIssues=
: Fatal: only single operation is supported . sessionActorId=[2:7490575393450596553:3486]. isRollback=0 2025-04-07T14:18:11.067184Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037892;self_id=[2:7490575367680784210:2321];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:18:11.067265Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037894;self_id=[2:7490575367680784220:2322];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:18:11.067270Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037893;self_id=[2:7490575367680784228:2323];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:18:11.067339Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037895;self_id=[2:7490575367680784199:2317];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:18:11.067343Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037896;self_id=[2:7490575367680784279:2324];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TPersQueueTest::EventBatching [GOOD] >> TPersQueueTest::CreateTopicWithMeteringMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 29495, MsgBus: 23694 2025-04-07T14:18:13.582988Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575408930054608:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:13.583070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001772/r3tmp/tmpGHSkK2/pdisk_1.dat 2025-04-07T14:18:13.867799Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29495, node 1 2025-04-07T14:18:13.914478Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:13.914492Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:13.914496Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:13.914573Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:13.941964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:13.942088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:13.943562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23694 TClient is connected to server localhost:23694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:14.296972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:14.308577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:14.404262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:14.522644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:14.598321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:16.086759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575421814958209:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:16.086857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:16.273953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:16.294914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:16.314819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:16.337160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:16.360984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:16.393109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:16.430190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575421814958701:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:16.430239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575421814958706:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:16.430279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:16.433711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:16.442781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575421814958708:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:17.168935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.253348Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr89vh6z8q0jnzjfx8xf29gy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjM1NGY0NzQtNGE5MmMzMS02MjA3MGM5LWM1MmMyOTJi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:18:17.257859Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035497252, txId: 281474976710672] shutting down >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] >> LabeledDbCounters::OneTablet [GOOD] >> LabeledDbCounters::OneTabletRemoveCounters >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] >> KqpSystemView::FailNavigate [GOOD] >> KqpSysColV1::SelectRowById [GOOD] >> KqpSysColV0::SelectRowAsterisk [GOOD] >> KqpSystemView::PartitionStatsParametricRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 26805, MsgBus: 25672 2025-04-07T14:18:14.631929Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575412170663096:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:14.631994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00176a/r3tmp/tmpFhKOAC/pdisk_1.dat 2025-04-07T14:18:14.871034Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26805, node 1 2025-04-07T14:18:14.940506Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:14.940531Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:14.940540Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:14.940676Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:14.991616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:14.991708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:14.993544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25672 TClient is connected to server localhost:25672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:15.391178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:15.414878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:15.538445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:18:15.661146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.721440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:17.024813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575425055566686:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.024932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.195740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.220641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.242796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.266065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.288820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.312804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.351398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575425055567177:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.351462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575425055567182:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.351490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.354770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:17.362764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575425055567184:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 9027, MsgBus: 63503 2025-04-07T14:18:14.546025Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575413641519770:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:14.546203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001770/r3tmp/tmpMHPm0h/pdisk_1.dat 2025-04-07T14:18:14.800693Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9027, node 1 2025-04-07T14:18:14.858756Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:14.858771Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:14.858777Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:14.858867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:14.886099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:14.886182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:14.887939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63503 TClient is connected to server localhost:63503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:15.295268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:15.310760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:15.439470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:15.556825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:15.617251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:17.140595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575426526423375:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.140715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.376056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.398485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.428898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.449968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.473788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.538443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.615275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575426526423882:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.615338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.615342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575426526423887:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.618030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:17.624468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575426526423889:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] Test command err: 2025-04-07T14:17:21.405861Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575186102013848:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:21.405942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00184b/r3tmp/tmph7WZ8F/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5250, node 1 2025-04-07T14:17:21.656845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2025-04-07T14:17:21.657047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "PostgreSQL" } 2025-04-07T14:17:21.657070Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:17:21.657525Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:21.658516Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:21.658968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-07T14:17:21.663238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:17:21.664589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:21.665085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:17:21.667525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:21.668109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:21.668134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:21.668174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:17:21.668494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:17:21.702655Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:21.702682Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:21.702689Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:21.702797Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:21.726566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:21.726652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:21.729236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:21.991292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:21.997142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:21.998604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:22.000580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:22.000767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:22.000783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:22.001801Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:22.002420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:22.002450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:22.003840Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:22.007014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035442051, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:22.007048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:22.007311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:22.008901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:22.009103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:22.009166Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:22.009241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:22.009277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:22.009324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:22.011475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:22.011542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:22.011576Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:22.011664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:17:22.096239Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:38438) has now valid token of root@builtin 2025-04-07T14:17:22.138190Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-07T14:17:24.871662Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575198138795416:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:24.871732Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00184b/r3tmp/tmpIeyNAc/pdisk_1.dat 2025-04-07T14:17:24.939229Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9712, node 4 2025-04-07T14:17:24.972049Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:24.972333Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:24.975975Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:17:24.999884Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:24.999904Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:24.999910Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:25.000012Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:25.176097Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:25.176435Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:25.176462Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:25.178250Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:25.178403Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDoma ... meout;self_id=[25:7490575324034468272:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:59.452418Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0407 14:18:05.049650704 638582 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:05.072275256 642532 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:05.109771952 638479 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:05.130571935 638582 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:05.169167737 642576 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:05.189840277 642577 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:05.223235252 638582 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:05.241864946 638583 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:05.280935536 642594 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:05.300053900 638480 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:05.328794309 638582 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:05.352024673 642594 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2025-04-07T14:18:06.915825Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7490575378015523695:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:06.915934Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00184b/r3tmp/tmpxDGeqr/pdisk_1.dat 2025-04-07T14:18:07.056718Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:18:07.100989Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:07.101102Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:07.106244Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61427, node 28 2025-04-07T14:18:07.189811Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:07.189840Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:07.189852Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:07.190000Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:07.568857Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:18:07.569357Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:18:07.569398Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:18:07.577553Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:18:07.577750Z node 28 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:18:07.577766Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:18:07.581411Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:18:07.581444Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:18:07.582908Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:18:07.585483Z node 28 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:18:07.588456Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035487628, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:18:07.588490Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:18:07.588795Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:18:07.590478Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:18:07.590657Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:18:07.590724Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:18:07.590852Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:18:07.590893Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:18:07.590934Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:18:07.591592Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:18:07.591631Z node 28 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:18:07.591652Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:18:07.591759Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:18:11.915889Z node 28 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[28:7490575378015523695:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:11.915980Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0407 14:18:17.665920123 643692 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:17.683568263 643508 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:17.714291067 643689 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:17.730118143 643689 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:17.766910151 643508 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:17.784583228 643508 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:17.813556206 647398 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:17.835017602 647397 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:17.864206717 647415 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:17.881155121 647416 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:17.912897281 643692 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0407 14:18:17.932717173 643692 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate [GOOD] Test command err: Trying to start YDB, gRPC: 20119, MsgBus: 22005 2025-04-07T14:18:15.006516Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575417704163715:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:15.006572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001765/r3tmp/tmpScXqAN/pdisk_1.dat 2025-04-07T14:18:15.256638Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20119, node 1 2025-04-07T14:18:15.325618Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:15.325649Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:15.325677Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:15.325807Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:15.338263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:15.338374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:15.340110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22005 TClient is connected to server localhost:22005 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:15.701967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:15.725031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:18:15.870174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:18:15.999560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:16.057463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:17.447085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575426294100021:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.447182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.647653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.669370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.693656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.715807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.738664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.771727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.804276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575426294100512:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.804352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.804618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575426294100517:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.808098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:17.816542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575426294100519:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:18.571740Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7490575430589068131:3619], for# user0@builtin, access# DescribeSchema 2025-04-07T14:18:18.571773Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7490575430589068131:3619], for# user0@builtin, access# DescribeSchema 2025-04-07T14:18:18.578900Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490575430589068128:2464], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/.sys/partition_stats]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:18:18.579098Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGM0YzE1MWItMmNiYmQ3ZGItZWMwZWMwNGQtNDNmYTEyZDc=, ActorId: [1:7490575430589068121:2460], ActorState: ExecuteState, TraceId: 01jr89vjj12z08syd14kbq00xn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> KqpSystemView::QueryStatsScan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 11879, MsgBus: 25330 2025-04-07T14:18:14.959387Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575413953039261:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:14.959439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001766/r3tmp/tmpoxspqh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11879, node 1 2025-04-07T14:18:15.242082Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:18:15.243689Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:18:15.246747Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:18:15.276335Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:15.276361Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:15.276371Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:15.276459Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:15.327885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:15.327945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:15.329597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25330 TClient is connected to server localhost:25330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:15.662150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:15.690330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:15.798593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:15.918991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:15.977951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:17.487228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575426837942856:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.487298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.712789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.735654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.761154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.800603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.825003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.888353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.931116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575426837943353:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.931185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.931314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575426837943358:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.934687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:17.942191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575426837943360:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> TPersQueueTest::CheckKillBalancer [GOOD] >> TPersQueueTest::CheckDeleteTopic >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] |97.2%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan [GOOD] Test command err: Trying to start YDB, gRPC: 26354, MsgBus: 25313 2025-04-07T14:18:15.267725Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575415938471155:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:15.268362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00175e/r3tmp/tmptEk0aC/pdisk_1.dat 2025-04-07T14:18:15.551919Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26354, node 1 2025-04-07T14:18:15.595397Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:15.595427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:15.595434Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:15.595545Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:15.643778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:15.643869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:15.645653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25313 TClient is connected to server localhost:25313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:15.983552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:16.002927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:16.102187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:16.231703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:16.288592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:17.646260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575424528407459:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.646390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.824400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.844058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.862951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.882459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.902005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.925831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:17.994599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575424528407955:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.994637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.994691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575424528407960:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:17.996894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:18.003532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575424528407962:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:19.424015Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035499059, txId: 281474976710671] shutting down 2025-04-07T14:18:19.514920Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035499510, txId: 281474976710674] shutting down >> KqpSystemView::NodesSimple [GOOD] >> KqpSysColV0::SelectRowById [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 4871, MsgBus: 13479 2025-04-07T14:17:54.894284Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575328007582989:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:54.894354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00111a/r3tmp/tmp98Mnro/pdisk_1.dat 2025-04-07T14:17:55.137930Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4871, node 1 2025-04-07T14:17:55.197181Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:55.197210Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:55.197217Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:55.197344Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:55.236342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:55.236461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:55.238004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13479 TClient is connected to server localhost:13479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:55.618232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:17:57.154426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575340892485487:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.154441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575340892485512:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.154506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:17:57.157642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:17:57.166233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575340892485525:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:17:57.400332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:17:57.531430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:17:58.226477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:17:59.646960Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzllNDE2ODMtMmM1ZDQwZTQtZGU2NWMzNmItYjZiYzlmY2M=, ActorId: [1:7490575349482428223:2937], ActorState: ExecuteState, TraceId: 01jr89v01be1xya6a4re166an1, Create QueryResponse for error on request, msg:
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 WAIT_INDEXATION: 0 2025-04-07T14:17:59.894453Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490575328007582989:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:59.894527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 27606, MsgBus: 3985 2025-04-07T14:18:05.386902Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575372923007132:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:05.386971Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00111a/r3tmp/tmpKlYUeE/pdisk_1.dat 2025-04-07T14:18:05.495446Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:18:05.519057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:05.519161Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:05.521003Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27606, node 2 2025-04-07T14:18:05.565198Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:05.565223Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:05.565231Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:05.565331Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3985 TClient is connected to server localhost:3985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:18:05.994102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:18:06.002691Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:18:08.077332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575385807909622:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.077400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.077611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575385807909648:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:08.080831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:18:08.090506Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490575385807909650:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:18:08.213671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:18:08.303832Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7490575385807909839:2318];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:18:08.304061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7490575385807909839:2318];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:18:08.304320Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7490575385807909839:2318];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:18:08.304436Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7490575385807909839:2318];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:18:08.304562Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7490575385807909839:2318];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:18:08.3048 ... Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[2:7490575398692816633:2869];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038075;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.788536Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7490575398692816783:2918];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.788577Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[2:7490575398692816633:2869];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038075;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.788729Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7490575398692816759:2914];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.788891Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7490575398692816549:2853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038069;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.788895Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7490575398692816759:2914];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.788998Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038030;self_id=[2:7490575398692816808:2933];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038030;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.789121Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7490575398692816549:2853];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.789139Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038030;self_id=[2:7490575398692816808:2933];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038030;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.789297Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[2:7490575398692816745:2908];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038062;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.789359Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7490575398692816845:2937];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.789562Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[2:7490575398692816745:2908];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038062;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.789579Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7490575398692816845:2937];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.789747Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7490575398692816784:2919];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038047;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.789824Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7490575398692816841:2935];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.789978Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7490575398692816784:2919];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.790034Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7490575398692816841:2935];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.790142Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7490575398692816689:2894];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.790146Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7490575398692816656:2880];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038092;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.790294Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7490575398692816689:2894];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.790304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7490575398692816656:2880];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038092;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.790433Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7490575398692816753:2910];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.790516Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7490575398692816756:2913];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038050;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.790601Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7490575398692816753:2910];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.790753Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7490575398692816756:2913];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.790794Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7490575398692816707:2897];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038040;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.790951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7490575398692816854:2945];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038035;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.790986Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7490575398692816707:2897];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.791097Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7490575398692816891:2947];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.791124Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7490575398692816854:2945];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.791236Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7490575398692816891:2947];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.791306Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7490575398692816902:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.791484Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7490575398692816902:2950];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.791719Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7490575398692816844:2936];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.791867Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7490575398692816844:2936];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.792040Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[2:7490575398692816721:2903];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038058;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.792196Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[2:7490575398692816721:2903];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038058;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.792358Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7490575398692816801:2929];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038034;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.792540Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7490575398692816801:2929];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038034;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.792655Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7490575398692816896:2949];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.792828Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7490575398692816896:2949];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.793059Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7490575398692816851:2943];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.793258Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7490575398692816851:2943];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:18:13.942466Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSystemView::PartitionStatsFollower [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 24341, MsgBus: 21199 2025-04-07T14:18:16.511038Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575419010758990:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:16.511549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001750/r3tmp/tmpsTzfKf/pdisk_1.dat 2025-04-07T14:18:16.727657Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24341, node 1 2025-04-07T14:18:16.786932Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:16.786957Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:16.786966Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:16.787125Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:16.853809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:16.853927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:16.855395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21199 TClient is connected to server localhost:21199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:17.242527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:17.262977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:17.375957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:17.496770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:17.556548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:18.928505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575427600695283:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:18.928572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:19.105811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:19.125019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:19.147724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:19.164464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:19.183238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:19.205092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:19.239582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575431895663071:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:19.239651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:19.239689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575431895663076:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:19.242519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:19.249369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575431895663078:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 11648, MsgBus: 20188 2025-04-07T14:18:17.252022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575425481605955:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:17.252132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00174e/r3tmp/tmpVUC8te/pdisk_1.dat 2025-04-07T14:18:17.514449Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11648, node 1 2025-04-07T14:18:17.559471Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:17.559502Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:17.559510Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:17.559615Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:17.587895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:17.587962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:17.589113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20188 TClient is connected to server localhost:20188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:17.964998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:17.981635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:18.085689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:18.199221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:18.276801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:19.803536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575434071542276:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:19.803679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:19.843549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:19.864804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:19.890366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:19.910400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:19.931326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:19.957308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:20.045071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575438366510069:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:20.045163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:20.045192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575438366510074:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:20.048178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:20.056064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575438366510076:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple [GOOD] Test command err: Trying to start YDB, gRPC: 26943, MsgBus: 1055 2025-04-07T14:18:15.724560Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575414117232146:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:15.724711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:15.750349Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575415733631065:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:15.758082Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490575414445682364:2231];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00175a/r3tmp/tmpKJBhmA/pdisk_1.dat 2025-04-07T14:18:15.941228Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:15.953376Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:16.115044Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:18:16.123187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:16.123264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:16.123385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:16.123453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:16.127077Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:18:16.127212Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:18:16.128977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:16.129199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26943, node 1 2025-04-07T14:18:16.162342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:16.162420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:16.170528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:16.203457Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:16.203486Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:16.203498Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:16.203637Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1055 TClient is connected to server localhost:1055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:16.730758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:16.773111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:16.902894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:17.027275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:17.115882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:18.645086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575427002136013:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:18.645154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:18.804760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:18.840860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:18.874112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:18.910989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:18.940442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:18.980385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:19.026831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575431297103974:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:19.026901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:19.027086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575431297103979:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:19.030360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:19.042226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575431297103981:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720668 completed, doublechecking } 2025-04-07T14:18:19.902842Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035499897, txId: 281474976720671] shutting down 2025-04-07T14:18:20.073548Z node 2 :BS_PROXY_PUT ERROR: [8035d11930c75e34] Result# TEvPutResult {Id# [72075186224037913:1:15:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-07T14:18:20.104182Z node 3 :BS_PROXY_PUT ERROR: [c82829f2ce9c9beb] Result# TEvPutResult {Id# [72075186224037914:1:15:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-07T14:18:20.749360Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490575415733631065:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:20.749439Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] Test command err: Trying to start YDB, gRPC: 6977, MsgBus: 26470 2025-04-07T14:18:02.301750Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575361673119097:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:02.301821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0017cf/r3tmp/tmpYPmC6O/pdisk_1.dat 2025-04-07T14:18:02.572734Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6977, node 1 2025-04-07T14:18:02.623057Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:02.623088Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:02.623101Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:02.623248Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:02.643352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:02.643450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:02.644654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26470 TClient is connected to server localhost:26470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:03.113240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:03.588725Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7490575361673119503:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:18:03.588792Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:18:03.588875Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7490575361673119503:2190], Recipient [1:7490575361673119503:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:18:03.588894Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:18:04.588966Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7490575361673119503:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:18:04.588998Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:18:04.589048Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7490575361673119503:2190], Recipient [1:7490575361673119503:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:18:04.589101Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:18:04.690670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575370263054328:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:04.690782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:04.862687Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7490575370263054353:2302], Recipient [1:7490575361673119503:2190]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:18:04.862731Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:18:04.862745Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T14:18:04.862793Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7490575370263054349:2299], Recipient [1:7490575361673119503:2190]: {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-07T14:18:04.862805Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T14:18:04.915932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:18:04.916357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:18:04.916475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, schema: Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false, at schemeshard: 72057594046644480 2025-04-07T14:18:04.916957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Followers, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T14:18:04.917000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-07T14:18:04.917021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-07T14:18:04.917161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-07T14:18:04.917161Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSetPartitioning: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] path /Root/Followers ShardIndices size 1 2025-04-07T14:18:04.917200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T14:18:04.917934Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TCreateTable Propose creating new table opId# 281474976710658:0 path# /Root/Followers pathId# [OwnerId: 72057594046644480, LocalPathId: 2] schemeshard# 72057594046644480 tx# WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } FailOnExist: false 2025-04-07T14:18:04.918092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:18:04.918116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:18:04.918203Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-07T14:18:04.918243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:18:04.918269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-07T14:18:04.918683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-07T14:18:04.918804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Followers 2025-04-07T14:18:04.918836Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-07T14:18:04.918849Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976710658:0 2025-04-07T14:18:04.919059Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:7490575361673119503:2190], Recipient [1:7490575361673119503:2190]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-07T14:18:04.919093Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-07T14:18:04.919127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:18:04.919139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:18:04.919237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T14:18:04.919289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:18 ... Sender [1:7490575361673119503:2190], Recipient [1:7490575361673119503:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:18:19.596773Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:18:19.975565Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:7490575370263054371:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:18:19.975614Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2025-04-07T14:18:19.975684Z node 1 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037888, for tableId 2: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2025-04-07T14:18:19.975779Z node 1 :TX_DATASHARD DEBUG: SendPeriodicTableStats register new pipe at datashard 72075186224037888 FollowerId 0, TableInfos size = 1 2025-04-07T14:18:19.975847Z node 1 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-04-07T14:18:19.976038Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7490575434687564222:2543], Recipient [1:7490575361673119503:2190]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:18:19.976066Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:18:19.976083Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T14:18:19.976133Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [1:7490575434687564221:2406], Recipient [1:7490575370263054371:2307]: NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:18:19.976185Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-07T14:18:19.976274Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:7490575370263054371:2307], Recipient [1:7490575361673119503:2190]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 768 RowCount: 4 IndexSize: 0 InMemSize: 768 LastAccessTime: 1744035485368 LastUpdateTime: 1744035485270 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 896 Memory: 123728 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 1744035484924 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-07T14:18:19.976297Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-07T14:18:19.976318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 768 rowCount 4 cpuUsage 0.0896 2025-04-07T14:18:19.976368Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] raw table stats: DataSize: 768 RowCount: 4 IndexSize: 0 InMemSize: 768 LastAccessTime: 1744035485368 LastUpdateTime: 1744035485270 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-07T14:18:19.976384Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099998s, queue# 1 2025-04-07T14:18:19.982209Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:7490575370263054371:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-07T14:18:19.982254Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-07T14:18:19.982322Z node 1 :TX_DATASHARD TRACE: No cleanup at 72075186224037888 outdated step 1744035500000 last cleanup 0 2025-04-07T14:18:19.982388Z node 1 :TX_DATASHARD DEBUG: Removed expired snapshots at 72075186224037888 2025-04-07T14:18:19.982414Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:18:19.982432Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:18:19.982446Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:18:19.982458Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:18:19.982954Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 268828680, Sender [1:7490575370263054392:2329], Recipient [1:7490575370263054423:2309]: NKikimr::TEvTablet::TEvFUpdate 2025-04-07T14:18:19.983066Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 268828680, Sender [1:7490575370263054393:2330], Recipient [1:7490575370263054425:2310]: NKikimr::TEvTablet::TEvFUpdate 2025-04-07T14:18:19.983092Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 268828680, Sender [1:7490575370263054394:2331], Recipient [1:7490575370263054427:2311]: NKikimr::TEvTablet::TEvFUpdate 2025-04-07T14:18:19.998792Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7490575370263054423:2309]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:18:19.998826Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:18:19.999565Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7490575370263054425:2310]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:18:19.999587Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:18:20.000565Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7490575370263054427:2311]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:18:20.000585Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:18:20.076403Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [1:7490575361673119503:2190]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T14:18:20.076440Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T14:18:20.076472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-07T14:18:20.076510Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-07T14:18:20.076523Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-07T14:18:20.076570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 768 row count 4 2025-04-07T14:18:20.076621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0 2025-04-07T14:18:20.076654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 2: RowCount 4, DataSize 768 2025-04-07T14:18:20.076681Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 0 2025-04-07T14:18:20.076756Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 4, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-07T14:18:20.076844Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-07T14:18:20.076944Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [1:7490575361673119503:2190]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-07T14:18:20.076964Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-07T14:18:20.076976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-07T14:18:20.077048Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 0 stats DataSize: 768 RowCount: 4 IndexSize: 0 CPUCores: 0.000896 TabletId: 72075186224037888 NodeId: 1 StartTime: 1744035484924 AccessTime: 1744035485368 UpdateTime: 1744035485270 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 RangeReadRows: 0 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 0 2025-04-07T14:18:20.597091Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7490575361673119503:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:18:20.597119Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-07T14:18:20.597150Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7490575361673119503:2190], Recipient [1:7490575361673119503:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-07T14:18:20.597159Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ... SELECT from partition_stats, attempt 3 2025-04-07T14:18:21.482489Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7490575443277498840:2417], owner: [1:7490575443277498837:2415], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-07T14:18:21.482905Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7490575443277498840:2417], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2025-04-07T14:18:21.483132Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274595843, Sender [1:7490575443277498840:2417], Recipient [1:7490575361673119503:2190]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2025-04-07T14:18:21.483158Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2025-04-07T14:18:21.483254Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7490575443277498840:2417], row count: 2, finished: 1 2025-04-07T14:18:21.483287Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7490575443277498840:2417], owner: [1:7490575443277498837:2415], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-07T14:18:21.485712Z node 1 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [1:7490575361673119233:2186], database# /Root, query hash# 14960494650040056739, cpu time# 245905 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] Test command err: Trying to start YDB, gRPC: 24261, MsgBus: 14690 2025-04-07T14:17:58.938686Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575342043322046:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:58.939482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001114/r3tmp/tmpVYP8En/pdisk_1.dat 2025-04-07T14:17:59.193895Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24261, node 1 2025-04-07T14:17:59.232608Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:59.232640Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:59.232652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:59.232804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:59.294993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:59.295125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:59.296724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14690 TClient is connected to server localhost:14690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:59.611313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:01.266291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575354928224556:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:01.266475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575354928224571:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:01.266549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:01.270364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:18:01.278751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575354928224585:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:18:01.542796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:18:01.657505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:18:02.452128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:03.743678Z node 1 :GLOBAL ERROR: fline=events.h:99;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710666; 2025-04-07T14:18:03.744431Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490575363518167677:2938], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [1:7490575363518167391:2938]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 7]`. ShardID=72075186224037889, Sink=[1:7490575363518167677:2938].{
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:18:03.753702Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490575363518167670:2938], SessionActorId: [1:7490575363518167391:2938], Transaction locks invalidated. Table `/Root/KV`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[1:7490575363518167391:2938]. isRollback=0 2025-04-07T14:18:03.753862Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDMxOGY4YzUtYTFkZmYwMmYtNTVlMmY1OC04MWMxNjdhZQ==, ActorId: [1:7490575363518167391:2938], ActorState: ExecuteState, TraceId: 01jr89v41rf9chyrfnvh3tp639, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7490575363518167671:2938] from: [1:7490575363518167670:2938] 2025-04-07T14:18:03.754059Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490575363518167671:2938] TxId: 281474976710666. Ctx: { TraceId: 01jr89v41rf9chyrfnvh3tp639, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDMxOGY4YzUtYTFkZmYwMmYtNTVlMmY1OC04MWMxNjdhZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/KV`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:18:03.760325Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDMxOGY4YzUtYTFkZmYwMmYtNTVlMmY1OC04MWMxNjdhZQ==, ActorId: [1:7490575363518167391:2938], ActorState: ExecuteState, TraceId: 01jr89v41rf9chyrfnvh3tp639, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 2025-04-07T14:18:03.938781Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490575342043322046:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:03.938875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 31092, MsgBus: 3640 2025-04-07T14:18:09.717394Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575391034411177:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:09.717470Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001114/r3tmp/tmpqe1oiN/pdisk_1.dat 2025-04-07T14:18:09.801827Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31092, node 2 2025-04-07T14:18:09.846507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:09.846584Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:09.848235Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:09.862413Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:09.862438Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:09.862446Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:09.862562Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3640 TClient is connected to server localhost:3640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:10.216374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:12.356977Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575403919313691:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:12.357278Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490575403919313678:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:12.357665Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:12.360094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:18:12.368265Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490575403919313701:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:18:12.494283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:18:12.525919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:18:13.335863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:14.717866Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490575391034411177:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:14.717959Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:18:16.845389Z node 2 :GLOBAL ERROR: fline=events.h:99;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715666; 2025-04-07T14:18:16.846602Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490575421099191693:2937], SessionActorId: [2:7490575412509256490:2937], Got LOCKS BROKEN for table. ShardID=72075186224037989, Sink=[2:7490575421099191693:2937].{
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:18:16.846730Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490575421099191693:2937], SessionActorId: [2:7490575412509256490:2937], Transaction locks invalidated.{
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[2:7490575412509256490:2937]. isRollback=0 2025-04-07T14:18:16.846827Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTAxZTZlMDAtZGU0NGViODAtNzQxYjMyM2EtYjZmNGFlYjU=, ActorId: [2:7490575412509256490:2937], ActorState: ExecuteState, TraceId: 01jr89vgnk7yqyh4pxwe57ztfn, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7490575421099191694:2937] from: [2:7490575421099191693:2937] 2025-04-07T14:18:16.847008Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7490575421099191694:2937] TxId: 281474976715666. Ctx: { TraceId: 01jr89vgnk7yqyh4pxwe57ztfn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTAxZTZlMDAtZGU0NGViODAtNzQxYjMyM2EtYjZmNGFlYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated.{
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:18:16.847413Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTAxZTZlMDAtZGU0NGViODAtNzQxYjMyM2EtYjZmNGFlYjU=, ActorId: [2:7490575412509256490:2937], ActorState: ExecuteState, TraceId: 01jr89vgnk7yqyh4pxwe57ztfn, Create QueryResponse for error on request, msg: 2025-04-07T14:18:16.847954Z node 2 :GLOBAL ERROR: fline=events.h:99;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715666; 2025-04-07T14:18:16.848078Z node 2 :TX_DATASHARD ERROR: Complete volatile write [1744035496889 : 281474976715666] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" } WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete >> KqpSystemView::PartitionStatsParametricRanges [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges [GOOD] Test command err: Trying to start YDB, gRPC: 64572, MsgBus: 6128 2025-04-07T14:18:19.405484Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575435448525387:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:19.405573Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001742/r3tmp/tmpTI123r/pdisk_1.dat 2025-04-07T14:18:19.655091Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64572, node 1 2025-04-07T14:18:19.717708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:19.717730Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:19.717757Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:19.717879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:19.767477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:19.767663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:19.769357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6128 TClient is connected to server localhost:6128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:20.115733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:20.131261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:20.220391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:20.355928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:20.426570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:21.722361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575444038461707:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:21.722466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:21.891480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:21.915124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:21.939065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:21.965129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:21.988334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:22.057651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:22.090874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575448333429500:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:22.090947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:22.090976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575448333429505:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:22.094508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:22.103493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575448333429507:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:23.292936Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035503282, txId: 281474976710671] shutting down >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] >> BasicStatistics::Simple [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple [GOOD] Test command err: 2025-04-07T14:15:37.415047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:37.415342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:37.415466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ffd/r3tmp/tmp2psadi/pdisk_1.dat 2025-04-07T14:15:37.760288Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2893, node 1 2025-04-07T14:15:37.953432Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:37.953474Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:37.953496Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:37.953894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:37.955342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:38.048872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:38.049010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:38.063312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2714 2025-04-07T14:15:38.666801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:41.213123Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:41.241927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:41.242019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:41.280475Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:41.284920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:41.479734Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:41.531564Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:41.531660Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:41.558816Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:41.558982Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:41.559171Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:41.559222Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:41.559278Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:41.559345Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:41.559389Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:41.559449Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:41.559781Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:41.760085Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:41.760206Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:41.766137Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:41.773008Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:41.773835Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:41.774756Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:15:41.793454Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:41.793515Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:41.793580Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:15:41.795308Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:41.795392Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:41.807460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:41.818523Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:41.818686Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:41.830553Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:41.846284Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:41.884606Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:42.173687Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:42.327391Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:43.084150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.084288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.102688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:15:43.308992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2277:3051], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.309109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.310261Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2282:3055]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:43.310400Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:15:43.310472Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2284:3057] 2025-04-07T14:15:43.311277Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2284:3057] 2025-04-07T14:15:43.311730Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2285:2838] 2025-04-07T14:15:43.312054Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2284:3057], server id = [2:2285:2838], tablet id = 72075186224037897, status = OK 2025-04-07T14:15:43.312236Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2285:2838], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:15:43.312311Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:15:43.312532Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:15:43.312611Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2282:3055], StatRequests.size() = 1 2025-04-07T14:15:43.349728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2289:3061], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.349830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.350202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2294:3066], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.356971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:15:43.494230Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:15:43.494295Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:15:43.611968Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2284:3057], schemeshard count = 1 2025-04-07T14:15:43.843021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2296:3068], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:15:43.976133Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2420:3153]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:43.976400Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:15:43.976449Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2420:3153], StatRequests.size() = 1 2025-04-07T14:15:44.185591Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89ptxjdp8cb8r1vc32f7e6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODQ3YzZkNjYtZDU0OWUwZjAtZjIzMzI1MTgtYzFmMmVjZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:15:44.222795Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetSt ... 5-04-07T14:18:12.552887Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 115 ] 2025-04-07T14:18:12.552913Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 115, ReplyToActorId = [2:6253:4528], StatRequests.size() = 1 2025-04-07T14:18:13.754611Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 116 ], ReplyToActorId[ [2:6286:4542]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:13.754894Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 116 ] 2025-04-07T14:18:13.754935Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 116, ReplyToActorId = [2:6286:4542], StatRequests.size() = 1 2025-04-07T14:18:15.059917Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:6319:4556]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:15.060237Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2025-04-07T14:18:15.060282Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:6319:4556], StatRequests.size() = 1 2025-04-07T14:18:15.510469Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:18:16.297422Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:6352:4570]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:16.297640Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-07T14:18:16.297674Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:6352:4570], StatRequests.size() = 1 2025-04-07T14:18:17.125472Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-07T14:18:17.125568Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:18:17.125612Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:18:17.125657Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-07T14:18:17.873618Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6394:4588]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:17.873896Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-07T14:18:17.873933Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6394:4588], StatRequests.size() = 1 2025-04-07T14:18:18.497065Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:18:18.497557Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:18:18.497874Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:18.554419Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2025-04-07T14:18:18.554502Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 194.000000s, at schemeshard: 72075186224037889 2025-04-07T14:18:18.554867Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 49 2025-04-07T14:18:18.568099Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:18:19.375179Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6427:4604]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:19.375418Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-07T14:18:19.375445Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6427:4604], StatRequests.size() = 1 2025-04-07T14:18:19.941675Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:19.941757Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:19.941810Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2025-04-07T14:18:19.941849Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:18:19.942196Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:18:19.954501Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:19.958527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6448:4621], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:19.958644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6457:4626], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:19.958734Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:19.970474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2025-04-07T14:18:20.050488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6462:4629], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-07T14:18:20.349457Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6581:4693]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:20.349695Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-07T14:18:20.349746Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6581:4693], StatRequests.size() = 1 2025-04-07T14:18:20.442954Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDVkOTM0NWEtYTU0NzYxYzAtZTQ0NzZiNjktNzllMjgyZjU=, TxId: 2025-04-07T14:18:20.443016Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDVkOTM0NWEtYTU0NzYxYzAtZTQ0NzZiNjktNzllMjgyZjU=, TxId: 2025-04-07T14:18:20.443389Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:20.456394Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:18:20.456454Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:21.065984Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6623:4713]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:21.066252Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-07T14:18:21.066294Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6623:4713], StatRequests.size() = 1 2025-04-07T14:18:22.276366Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6664:4735]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:22.276718Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-07T14:18:22.276763Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6664:4735], StatRequests.size() = 1 2025-04-07T14:18:22.823462Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:18:22.823749Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:22.823784Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:22.823830Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2025-04-07T14:18:22.823864Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:22.824166Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:18:22.826335Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:22.838544Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Yzg0ODkyYWMtM2ZlNGM0MTktOTJmYTMzMjctYmQ5ODY2YTU=, TxId: 2025-04-07T14:18:22.838605Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Yzg0ODkyYWMtM2ZlNGM0MTktOTJmYTMzMjctYmQ5ODY2YTU=, TxId: 2025-04-07T14:18:22.839080Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:22.853086Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:22.853144Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:23.721026Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:6734:4777]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:23.721353Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-07T14:18:23.721397Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:6734:4777], StatRequests.size() = 1 2025-04-07T14:18:25.237571Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:6773:4797]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:25.237835Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-07T14:18:25.237880Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:6773:4797], StatRequests.size() = 1 2025-04-07T14:18:25.820250Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:18:25.820427Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:25.820458Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:25.820880Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:18:25.821274Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:26.715582Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:6806:4813]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:26.715832Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-07T14:18:26.715875Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:6806:4813], StatRequests.size() = 1 >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] >> BasicStatistics::TwoTables [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> KqpSystemView::Join [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables [GOOD] Test command err: 2025-04-07T14:15:41.213546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:41.213831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:41.213959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001f03/r3tmp/tmpYdPQv5/pdisk_1.dat 2025-04-07T14:15:41.528224Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14310, node 1 2025-04-07T14:15:41.720067Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:41.720118Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:41.720143Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:41.720745Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:41.722624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:41.813430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:41.813578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:41.827437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11036 2025-04-07T14:15:42.430590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:44.771190Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:44.818085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:44.818169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:44.856324Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:44.858942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:45.028770Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:45.072441Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:45.072548Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:45.094513Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:45.094632Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:45.094758Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:45.094796Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:45.094829Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:45.094869Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:45.094905Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:45.094942Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:45.095233Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:45.293383Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:45.293463Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:45.298030Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:45.303781Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:45.304367Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:45.305109Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:15:45.320642Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:45.320720Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:45.320809Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:15:45.322391Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:45.322457Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:45.332811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:45.342654Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:45.342769Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:45.352468Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:45.368252Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:45.407023Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:45.610927Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:45.787117Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:46.556637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:46.556760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:46.570489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:15:46.747551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2277:3051], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:46.747700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:46.748848Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2282:3055]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:46.748993Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:15:46.749070Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2284:3057] 2025-04-07T14:15:46.749128Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2284:3057] 2025-04-07T14:15:46.749626Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2285:2838] 2025-04-07T14:15:46.749892Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2284:3057], server id = [2:2285:2838], tablet id = 72075186224037897, status = OK 2025-04-07T14:15:46.750026Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2285:2838], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:15:46.750081Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:15:46.750223Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:15:46.750289Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2282:3055], StatRequests.size() = 1 2025-04-07T14:15:46.770012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2289:3061], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:46.770127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:46.770498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2294:3066], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:46.774909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:15:46.936550Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:15:46.936615Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:15:47.032355Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2284:3057], schemeshard count = 1 2025-04-07T14:15:47.279646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2296:3068], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:15:47.383943Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2419:3154]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:47.384100Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:15:47.384138Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2419:3154], StatRequests.size() = 1 2025-04-07T14:15:47.469405Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89py9gb5dsdpygtmrf0m7d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI0YTQ1LThkYjI0ZDg1LTllMmY5NTcxLTc0ZWZjNmNl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:15:47.517555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part pr ... , no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:18:18.259775Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-07T14:18:18.977709Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6571:4684]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:18.977965Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-07T14:18:18.978004Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6571:4684], StatRequests.size() = 1 2025-04-07T14:18:19.581365Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:18:19.581518Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:19.581927Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:18:19.660542Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037889 2025-04-07T14:18:19.660628Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 199.000000s, at schemeshard: 72075186224037889 2025-04-07T14:18:19.660987Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 73 2025-04-07T14:18:19.674135Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:18:20.483414Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6606:4701]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:20.483670Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-07T14:18:20.483714Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6606:4701], StatRequests.size() = 1 2025-04-07T14:18:21.055154Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:21.055238Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:21.055338Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2025-04-07T14:18:21.055393Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:18:21.055714Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:18:21.069411Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:21.073401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6627:4718], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:21.073512Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6637:4723], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:21.073676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:21.086213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2025-04-07T14:18:21.139500Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6641:4726], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-07T14:18:21.450488Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6758:4788]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:21.450746Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-07T14:18:21.450811Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6758:4788], StatRequests.size() = 1 2025-04-07T14:18:21.578545Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzgxMzUyMzEtMTdmZjliMDctNTE2ZGQzZTYtYzQ5ZDRlMWI=, TxId: 2025-04-07T14:18:21.578619Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzgxMzUyMzEtMTdmZjliMDctNTE2ZGQzZTYtYzQ5ZDRlMWI=, TxId: 2025-04-07T14:18:21.579192Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:21.592986Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:18:21.593050Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:22.241430Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6803:4808]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:22.241721Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-07T14:18:22.241765Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6803:4808], StatRequests.size() = 1 2025-04-07T14:18:23.600788Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6842:4830]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:23.601025Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-07T14:18:23.601068Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6842:4830], StatRequests.size() = 1 2025-04-07T14:18:24.086711Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:18:24.086825Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:24.086849Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:24.086903Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is data table. 2025-04-07T14:18:24.086940Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 5] 2025-04-07T14:18:24.087192Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:18:24.089580Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:24.103596Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2IzZDhkYWYtNGViMjViMmMtYWY3YjgzZmYtYTlkZGQ5OGE=, TxId: 2025-04-07T14:18:24.103648Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2IzZDhkYWYtNGViMjViMmMtYWY3YjgzZmYtYTlkZGQ5OGE=, TxId: 2025-04-07T14:18:24.103946Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:24.117600Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 5] 2025-04-07T14:18:24.117656Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:25.022701Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:6916:4874]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:25.023031Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-07T14:18:25.023079Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:6916:4874], StatRequests.size() = 1 2025-04-07T14:18:26.556920Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:6955:4894]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:26.557214Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-07T14:18:26.557249Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:6955:4894], StatRequests.size() = 1 2025-04-07T14:18:27.118322Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:18:27.118489Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:27.118736Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:27.118764Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:27.118810Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2025-04-07T14:18:27.118852Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:27.119071Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:18:27.121100Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:18:27.121476Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:27.134565Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODNkZjAyZi01NzUyMThjNC1mY2JiNWI0YS0zYjcxMmNi, TxId: 2025-04-07T14:18:27.134613Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODNkZjAyZi01NzUyMThjNC1mY2JiNWI0YS0zYjcxMmNi, TxId: 2025-04-07T14:18:27.134902Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:27.149965Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:27.150026Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:28.067112Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7018:4931]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:28.067370Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-07T14:18:28.067413Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7018:4931], StatRequests.size() = 1 2025-04-07T14:18:28.068127Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 127 ], ReplyToActorId[ [2:7020:4933]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:28.070550Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 127 ] 2025-04-07T14:18:28.070595Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 127, ReplyToActorId = [2:7020:4933], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join [GOOD] Test command err: Trying to start YDB, gRPC: 18451, MsgBus: 5211 2025-04-07T14:18:16.042813Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575419596038451:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:16.043115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001759/r3tmp/tmpAQ6cxG/pdisk_1.dat 2025-04-07T14:18:16.314527Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18451, node 1 2025-04-07T14:18:16.372657Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:16.372709Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:16.372717Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:16.372828Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:18:16.400986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:16.401113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:16.402788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5211 TClient is connected to server localhost:5211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:16.772151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:16.792068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:16.933255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:17.064068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:17.124249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:18.524823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575428185974626:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:18.524919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:18.695451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:18.717821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:18.740607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:18.761441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:18.781512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:18.845863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:18.876534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575428185975122:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:18.876593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:18.876607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575428185975127:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:18.879472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:18.886568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575428185975129:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:19.730912Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035499726, txId: 281474976710671] shutting down waiting... 2025-04-07T14:18:20.847825Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035500844, txId: 281474976710673] shutting down 2025-04-07T14:18:21.042335Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490575419596038451:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:21.042444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2025-04-07T14:18:21.979085Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035501973, txId: 281474976710675] shutting down waiting... 2025-04-07T14:18:23.117243Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035503113, txId: 281474976710677] shutting down waiting... 2025-04-07T14:18:24.236310Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035504231, txId: 281474976710679] shutting down waiting... 2025-04-07T14:18:25.412336Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035505406, txId: 281474976710681] shutting down waiting... 2025-04-07T14:18:26.621884Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035506597, txId: 281474976710683] shutting down waiting... 2025-04-07T14:18:27.822895Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035507813, txId: 281474976710685] shutting down waiting... 2025-04-07T14:18:29.026385Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035509008, txId: 281474976710687] shutting down 2025-04-07T14:18:29.419543Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035509408, txId: 281474976710689] shutting down >> TPersQueueTest::ReadRuleServiceTypeMigration [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] >> ColumnStatistics::CountMinSketchStatistics [GOOD] >> BasicStatistics::SimpleGlobalIndex [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] >> TPersQueueTest::StoreNoMoreThanXSourceIDs [GOOD] >> TPersQueueTest::SetupWriteSessionOnDisabledCluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics [GOOD] Test command err: 2025-04-07T14:15:42.882822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:42.883118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:42.883242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ef5/r3tmp/tmpDpiMHG/pdisk_1.dat 2025-04-07T14:15:43.199999Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12659, node 1 2025-04-07T14:15:43.374468Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:43.374516Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:43.374545Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:43.375074Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:43.379685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:43.466691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:43.466822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:43.480585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6158 2025-04-07T14:15:44.070128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:46.431501Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:46.489658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:46.489764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:46.528407Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:46.532128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:46.696395Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:46.735702Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:46.735772Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:46.758435Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:46.758611Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:46.758791Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:46.758850Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:46.758893Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:46.758941Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:46.758986Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:46.759051Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:46.759419Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:46.958781Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:46.958877Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:46.964457Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:46.971695Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:46.972596Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:46.973534Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:15:46.993143Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:46.993207Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:46.993293Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:15:46.994837Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:46.994927Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:47.007798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:47.026077Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:47.026233Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:47.038698Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:47.054935Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:47.094492Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:47.296748Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:47.471165Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:48.198640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:48.198771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:48.215013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:15:48.312463Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:15:48.312693Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:15:48.312960Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:15:48.313084Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:15:48.313192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:15:48.313330Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:15:48.313440Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:15:48.313552Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:15:48.313671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:15:48.313780Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:15:48.313895Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:15:48.313998Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2811];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:15:48.334794Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:15:48.334883Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:15:48.334999Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:15:48.335029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:15:48.335164Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:15:48.335199Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:15:48.335283Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema: ... PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.859264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.862475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037889 waiting actualization: 0/0.000018s 2025-04-07T14:16:01.235460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:16:01.235520Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:02.506097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:16:02.506147Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:18:25.429354Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-07T14:18:25.429440Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:18:25.429498Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:18:25.429540Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-07T14:18:26.793183Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2025-04-07T14:18:26.793260Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 235.000000s, at schemeshard: 72075186224037889 2025-04-07T14:18:26.793570Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 51 2025-04-07T14:18:26.825764Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:18:28.336803Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:28.336904Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:28.336966Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2025-04-07T14:18:28.337012Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:28.337376Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:18:28.340607Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:28.343883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7030:5272], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:28.343981Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7041:5277], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:28.344073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:28.355527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2025-04-07T14:18:28.426933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7044:5280], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-07T14:18:28.754116Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7161:5342]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:28.754409Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:18:28.754516Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7163:5344] 2025-04-07T14:18:28.754581Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7163:5344] 2025-04-07T14:18:28.754920Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7164:5345] 2025-04-07T14:18:28.755046Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7164:5345], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:18:28.755123Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-07T14:18:28.755236Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7163:5344], server id = [2:7164:5345], tablet id = 72075186224037897, status = OK 2025-04-07T14:18:28.755322Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:28.755394Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7161:5342], StatRequests.size() = 1 2025-04-07T14:18:28.920763Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWZjZTQ5NjEtOTliNWU4NjEtOTMyYWVkZDQtOGRkOTEyNmE=, TxId: 2025-04-07T14:18:28.920856Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWZjZTQ5NjEtOTliNWU4NjEtOTMyYWVkZDQtOGRkOTEyNmE=, TxId: 2025-04-07T14:18:28.921411Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:28.962077Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:28.962170Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:29.057269Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:18:29.057361Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:18:29.128843Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7163:5344], schemeshard count = 1 2025-04-07T14:18:31.838318Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:31.838386Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:31.838438Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2025-04-07T14:18:31.838489Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:18:31.843208Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:18:31.861774Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:18:31.862223Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:18:31.862309Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:18:31.862999Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:18:31.879257Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:18:31.879470Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-07T14:18:31.879972Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7290:5414], server id = [2:7291:5415], tablet id = 72075186224037899, status = OK 2025-04-07T14:18:31.880386Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7290:5414], path = { OwnerId: 72075186224037889 LocalId: 4 } 2025-04-07T14:18:31.881527Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-07T14:18:31.881629Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:18:31.881875Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:18:31.882076Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:18:31.882382Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:18:31.884730Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7290:5414], server id = [2:7291:5415], tablet id = 72075186224037899 2025-04-07T14:18:31.884781Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:31.885249Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:18:31.907068Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7308:5432]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:31.907214Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:18:31.907245Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7308:5432], StatRequests.size() = 1 2025-04-07T14:18:32.055337Z node 2 :SYSTEM_VIEWS WARN: [72075186224037896] TEvIntervalQuerySummary, wrong stage: node id# 2 2025-04-07T14:18:32.055467Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWJiZjE2YTgtODllMmE5ODEtYjgzMTlkNTktMWFiN2IyMTE=, TxId: 2025-04-07T14:18:32.055497Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWJiZjE2YTgtODllMmE5ODEtYjgzMTlkNTktMWFiN2IyMTE=, TxId: 2025-04-07T14:18:32.056172Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:32.057390Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:7316:5395]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:32.057657Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:18:32.057692Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:18:32.059404Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:18:32.059466Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-07T14:18:32.059516Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:18:32.075091Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::SimpleGlobalIndex [GOOD] Test command err: 2025-04-07T14:15:44.116559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:44.116782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:44.116886Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ee9/r3tmp/tmpPZjdRp/pdisk_1.dat 2025-04-07T14:15:44.385635Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21761, node 1 2025-04-07T14:15:44.567027Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:44.567084Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:44.567121Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:44.567730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:44.573761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:44.665929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:44.666086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:44.680103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29390 2025-04-07T14:15:45.289346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:47.803389Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:47.851453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:47.851572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:47.889684Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:47.893333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:48.074012Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:48.123778Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:48.123882Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:48.151740Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:48.151935Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:48.152140Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:48.152199Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:48.152248Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:48.152296Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:48.152343Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:48.152404Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:48.152815Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:48.350773Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:48.350858Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:48.356078Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:48.362608Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:48.363390Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:48.364308Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:15:48.383673Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:48.383745Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:48.383814Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:15:48.385557Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:48.385644Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:48.398455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:48.415996Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:48.416129Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:48.425296Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:48.440337Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:48.478476Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:48.677646Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:48.830835Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:49.567387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.567480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.581566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:15:49.801384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2346:3059], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.801544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.802975Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2351:3063]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:49.803169Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:15:49.803249Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2353:3065] 2025-04-07T14:15:49.803305Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2353:3065] 2025-04-07T14:15:49.803881Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2354:2890] 2025-04-07T14:15:49.804180Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2353:3065], server id = [2:2354:2890], tablet id = 72075186224037897, status = OK 2025-04-07T14:15:49.804253Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2354:2890], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:15:49.804306Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:15:49.804461Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:15:49.804508Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2351:3063], StatRequests.size() = 1 2025-04-07T14:15:49.810754Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2387:3074]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:49.810942Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:15:49.810975Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2387:3074], StatRequests.size() = 1 2025-04-07T14:15:49.811109Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2389:3076]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:49.811178Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:15:49.811192Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2389:3076], StatRequests.size() = 1 2025-04-07T14:15:49.827122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2394:3081], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.827254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.827574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2399:3086], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.831603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:15:50.014539Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:15:50.014640Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:15:50.089096Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2353:3065], schemeshard count = 1 2025-04-07T14:15:50.322519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2401:3088], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, do ... equest id = 118, ReplyToActorId = [2:6576:4669], StatRequests.size() = 1 2025-04-07T14:18:21.309527Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-07T14:18:21.309621Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:18:21.309684Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:18:21.309735Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-07T14:18:21.971346Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6616:4686]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:21.971578Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-07T14:18:21.971620Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6616:4686], StatRequests.size() = 1 2025-04-07T14:18:22.503878Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:18:22.504360Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:18:22.504780Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:22.575171Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037889 2025-04-07T14:18:22.575241Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 180.000000s, at schemeshard: 72075186224037889 2025-04-07T14:18:22.575497Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 73 2025-04-07T14:18:22.589266Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:18:23.347822Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6649:4702]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:23.348084Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-07T14:18:23.348125Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6649:4702], StatRequests.size() = 1 2025-04-07T14:18:23.956028Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:23.956110Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:23.956166Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 6] is data table. 2025-04-07T14:18:23.956225Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 6] 2025-04-07T14:18:23.956545Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:18:24.000038Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:24.003384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6670:4719], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:24.003526Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6680:4724], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:24.003738Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:24.014024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2025-04-07T14:18:24.070424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6684:4727], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-07T14:18:24.391054Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6803:4791]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:24.391348Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-07T14:18:24.391405Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6803:4791], StatRequests.size() = 1 2025-04-07T14:18:24.498822Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjllNDc3ZmUtOTg0MDJlZWEtNDFjMjcxMy1lYmY0NjdhZQ==, TxId: 2025-04-07T14:18:24.498901Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjllNDc3ZmUtOTg0MDJlZWEtNDFjMjcxMy1lYmY0NjdhZQ==, TxId: 2025-04-07T14:18:24.499391Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:24.512460Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 6] 2025-04-07T14:18:24.512515Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:25.243975Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6845:4811]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:25.244299Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-07T14:18:25.244343Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6845:4811], StatRequests.size() = 1 2025-04-07T14:18:26.949996Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6886:4833]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:26.950210Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-07T14:18:26.950240Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6886:4833], StatRequests.size() = 1 2025-04-07T14:18:27.565510Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:18:27.565656Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:27.565706Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:27.565747Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2025-04-07T14:18:27.565782Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:18:27.566066Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:18:27.568592Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:27.594530Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODhhYTQzYmItMTZhNGVmMjMtNjMyZjJjZGItYmQ4MDZlYTU=, TxId: 2025-04-07T14:18:27.594598Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODhhYTQzYmItMTZhNGVmMjMtNjMyZjJjZGItYmQ4MDZlYTU=, TxId: 2025-04-07T14:18:27.595294Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:27.621916Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:18:27.621978Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:28.567348Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:6956:4875]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:28.567605Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-07T14:18:28.567646Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:6956:4875], StatRequests.size() = 1 2025-04-07T14:18:30.299409Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:6995:4895]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:30.299686Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-07T14:18:30.299759Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:6995:4895], StatRequests.size() = 1 2025-04-07T14:18:31.025162Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:18:31.025363Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:31.025537Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:31.025566Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:31.025605Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2025-04-07T14:18:31.025638Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:31.025919Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:18:31.028612Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:18:31.035073Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:31.050541Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmNjYTNmNjktMmM3ZjAxZjUtY2ZmNjYwNTQtMTBmYWNlOTc=, TxId: 2025-04-07T14:18:31.050602Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmNjYTNmNjktMmM3ZjAxZjUtY2ZmNjYwNTQtMTBmYWNlOTc=, TxId: 2025-04-07T14:18:31.051082Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:31.073922Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:31.074001Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:32.146901Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7060:4934]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:32.147145Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-07T14:18:32.147180Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7060:4934], StatRequests.size() = 1 >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] >> KqpExtractPredicateLookup::SqlInJoin+EnableKqpDataQueryStreamLookup [GOOD] >> KqpExtractPredicateLookup::SqlInJoin-EnableKqpDataQueryStreamLookup >> BasicStatistics::TwoNodes [GOOD] >> BasicStatistics::ServerlessGlobalIndex [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> HttpRequest::ProbeServerless [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessGlobalIndex [GOOD] Test command err: 2025-04-07T14:15:48.603149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:48.603378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:48.603472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ec0/r3tmp/tmp3zEV4u/pdisk_1.dat 2025-04-07T14:15:48.831855Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61466, node 1 2025-04-07T14:15:49.001242Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:49.001282Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:49.001299Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:49.001640Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:49.002905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:49.091845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:49.091948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:49.104987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12498 2025-04-07T14:15:49.674359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:52.309467Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:52.364302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:52.364401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:52.402612Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:52.405131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:52.584531Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:52.630125Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:52.630218Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:52.651219Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:52.651346Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:52.651480Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:52.651521Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:52.651554Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:52.651592Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:52.651625Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:52.651666Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:52.651955Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:52.845186Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:52.845267Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:52.849657Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:52.854430Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:52.854932Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:52.855531Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2025-04-07T14:15:52.869586Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:52.869626Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:52.869675Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-07T14:15:52.870975Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:52.871047Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:52.880267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:52.888241Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:52.888343Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:52.896197Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:52.911305Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:52.949797Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:53.152651Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:53.304035Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:53.929891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:15:54.435793Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:54.561192Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-07T14:15:54.561250Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:15:54.561338Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2487:2905], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:15:54.563345Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2495:2909] 2025-04-07T14:15:54.564048Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2495:2909], schemeshard id = 72075186224037899 2025-04-07T14:15:55.465827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2615:3186], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:55.465981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:55.481549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-07T14:15:55.659673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2842:3232], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:55.659761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:55.667175Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2847:3236]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:55.667307Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:15:55.667445Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-07T14:15:55.667487Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2850:3239] 2025-04-07T14:15:55.667528Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2850:3239] 2025-04-07T14:15:55.667881Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2851:3090] 2025-04-07T14:15:55.668106Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2850:3239], server id = [2:2851:3090], tablet id = 72075186224037897, status = OK 2025-04-07T14:15:55.668172Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2851:3090], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:15:55.668211Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:15:55.668392Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:15:55.668436Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2847:3236], StatRequests.size() = 1 2025-04-07T14:15:55.672955Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2884:3248]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:55.673076Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:15:55.673106Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2884:3248], StatRequests.size() = 1 2025-04-07T14:15:55.673223Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2886:3250]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:55.673288Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:15:55.673300Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2886:3250], StatRequests.size() = 1 2025-04-07T14:15:55.685620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2891:3255], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:55.685718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, Data ... ATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:18:31.225682Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:31.300071Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037889 2025-04-07T14:18:31.300146Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 213.000000s, at schemeshard: 72075186224037889 2025-04-07T14:18:31.300520Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 25 2025-04-07T14:18:31.331822Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:18:31.642714Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:7760:5510]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:31.642987Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-07T14:18:31.643027Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7760:5510], StatRequests.size() = 1 2025-04-07T14:18:32.792518Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:32.792592Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:32.792684Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2025-04-07T14:18:32.792730Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:32.793017Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-07T14:18:32.806779Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:32.811800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7792:5532], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:32.811915Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7802:5537], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:32.812133Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:32.845543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2025-04-07T14:18:32.919806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7806:5540], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-07T14:18:33.267962Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7925:5605]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:33.268271Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-07T14:18:33.268503Z node 2 :STATISTICS DEBUG: [72075186224037897] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-07T14:18:33.268551Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-07T14:18:33.268790Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:33.268866Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7925:5605], StatRequests.size() = 1 2025-04-07T14:18:33.413186Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDU0MTU1NzUtNzVhODhmZmMtY2QxNDBkMjYtMWIwMDQ4Mzg=, TxId: 2025-04-07T14:18:33.413259Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDU0MTU1NzUtNzVhODhmZmMtY2QxNDBkMjYtMWIwMDQ4Mzg=, TxId: 2025-04-07T14:18:33.413992Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:33.438101Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:33.438170Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:33.526840Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:18:33.526919Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:18:33.544088Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7955:5619]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:33.544404Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-07T14:18:33.544441Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7955:5619], StatRequests.size() = 1 2025-04-07T14:18:33.609386Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3114:3135], schemeshard count = 1 2025-04-07T14:18:34.041994Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037899 2025-04-07T14:18:34.042055Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 224.000000s, at schemeshard: 72075186224037899 2025-04-07T14:18:34.042254Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 50 2025-04-07T14:18:34.059133Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:18:35.113193Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:8009:5655]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:35.113457Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-07T14:18:35.113495Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:8009:5655], StatRequests.size() = 1 2025-04-07T14:18:36.216444Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:18:36.216729Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:36.216766Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:36.216814Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 4] is data table. 2025-04-07T14:18:36.216844Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 4] 2025-04-07T14:18:36.217105Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-07T14:18:36.219348Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:36.242407Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjVlZTc4MDctYmRlMzRiMDQtYTIzMGQ0ODAtYWJkMWU0YzE=, TxId: 2025-04-07T14:18:36.242470Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjVlZTc4MDctYmRlMzRiMDQtYTIzMGQ0ODAtYWJkMWU0YzE=, TxId: 2025-04-07T14:18:36.242934Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:36.257835Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 4] 2025-04-07T14:18:36.257903Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:36.660494Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:8082:5697]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:36.660956Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-07T14:18:36.660998Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:8082:5697], StatRequests.size() = 1 2025-04-07T14:18:38.267669Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:8134:5729]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:38.267930Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-07T14:18:38.267974Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:8134:5729], StatRequests.size() = 1 2025-04-07T14:18:39.453329Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-04-07T14:18:39.453596Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:39.453627Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:39.453677Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-07T14:18:39.453706Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T14:18:39.453967Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-07T14:18:39.456459Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:18:39.459813Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:39.461093Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:39.481875Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmNjNmFmYzUtODc2ZjU2ZjMtNTNjYzczNjUtZjAxZjYxMDA=, TxId: 2025-04-07T14:18:39.481935Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmNjNmFmYzUtODc2ZjU2ZjMtNTNjYzczNjUtZjAxZjYxMDA=, TxId: 2025-04-07T14:18:39.482414Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:39.499543Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T14:18:39.499597Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:39.862711Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:8198:5766]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:39.863024Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-07T14:18:39.863065Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:8198:5766], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes [GOOD] Test command err: 2025-04-07T14:15:49.108876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:454:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:49.109158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:49.109256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001eb8/r3tmp/tmpoPhx3F/pdisk_1.dat 2025-04-07T14:15:49.361332Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11802, node 1 2025-04-07T14:15:49.531601Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:49.531642Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:49.531661Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:49.531963Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:49.533597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:49.624692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:49.624826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:49.637813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10854 2025-04-07T14:15:50.150708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:54.345077Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-07T14:15:54.345650Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:54.388617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:54.388711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:54.389076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:54.389123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:54.438428Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:54.438812Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:15:54.441847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:54.442195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:54.596805Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:54.597830Z node 3 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:54.597898Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:54.621732Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:54.621978Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:54.622149Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:54.622201Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:54.622253Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:54.622298Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:54.622342Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:54.622385Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:54.623458Z node 3 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:54.887186Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:54.887311Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:2148:2421], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:54.895262Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:2163:2427] 2025-04-07T14:15:54.901621Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:2196:2428] 2025-04-07T14:15:54.902416Z node 3 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [3:2196:2428], schemeshard id = 72075186224037889 2025-04-07T14:15:54.904029Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:15:54.924724Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:54.924776Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:54.924829Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:15:54.928082Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:54.928167Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:54.933125Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:54.933218Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:54.943670Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:54.955886Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:54.956035Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:54.970081Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:54.985429Z node 2 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:15:54.986364Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:54.987817Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:55.020935Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:55.256395Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:55.364258Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:56.099388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2562:3036], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:56.099476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:56.108993Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:15:56.270743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2727:3075], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:56.270854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:56.271753Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2732:3079]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:56.271873Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:15:56.271948Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2734:3081] 2025-04-07T14:15:56.271987Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2734:3081] 2025-04-07T14:15:56.272359Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:2735:2557] 2025-04-07T14:15:56.272533Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2734:3081], server id = [3:2735:2557], tablet id = 72075186224037897, status = OK 2025-04-07T14:15:56.272703Z node 3 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [3:2735:2557], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:15:56.272749Z node 3 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:15:56.272912Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:15:56.272958Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2732:3079], StatRequests.size() = 1 2025-04-07T14:15:56.286418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2739:3085], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:56.286495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:56.286763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2744:3090], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:56.290687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:15:56.480607Z node 3 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:15:56.480685Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:15:56.586021Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2 ... 5-04-07T14:18:27.330545Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:18:27.330594Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:18:27.330634Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-07T14:18:28.277651Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7336:3904]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:28.277890Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-07T14:18:28.277927Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7336:3904], StatRequests.size() = 1 2025-04-07T14:18:29.019194Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:18:29.019599Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:29.019881Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:18:29.170696Z node 3 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2025-04-07T14:18:29.170771Z node 3 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 235.000000s, at schemeshard: 72075186224037889 2025-04-07T14:18:29.171081Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 49 2025-04-07T14:18:29.184523Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:18:30.245703Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7375:3914]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:30.245964Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-07T14:18:30.246004Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7375:3914], StatRequests.size() = 1 2025-04-07T14:18:30.956066Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:30.956164Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:30.956250Z node 3 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2025-04-07T14:18:30.956301Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:18:30.956742Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:18:30.969617Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:30.974584Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7400:3569], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:30.974702Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7411:3574], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:30.974832Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:30.988394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2025-04-07T14:18:31.083944Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7414:3577], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-07T14:18:31.419139Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [3:7533:3636]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:31.419444Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:18:31.419555Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [3:7535:3638] 2025-04-07T14:18:31.419627Z node 3 :STATISTICS DEBUG: SyncNode(), pipe client id = [3:7535:3638] 2025-04-07T14:18:31.420143Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:7536:3639] 2025-04-07T14:18:31.420297Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:7535:3638], server id = [3:7536:3639], tablet id = 72075186224037897, status = OK 2025-04-07T14:18:31.420416Z node 3 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [3:7536:3639], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:18:31.420483Z node 3 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2025-04-07T14:18:31.420624Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-04-07T14:18:31.420785Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [3:7533:3636], StatRequests.size() = 1 2025-04-07T14:18:31.548053Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=ZWQ0ODUzNWQtNzg4Y2RlZDgtM2EyMDYxNmEtZWU3OTU0NDM=, TxId: 2025-04-07T14:18:31.548129Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ZWQ0ODUzNWQtNzg4Y2RlZDgtM2EyMDYxNmEtZWU3OTU0NDM=, TxId: 2025-04-07T14:18:31.548849Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:31.564830Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2025-04-07T14:18:31.564889Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:31.684249Z node 3 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:18:31.684310Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:18:31.757053Z node 3 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [3:7535:3638], schemeshard count = 1 2025-04-07T14:18:32.307807Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7580:3939]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:32.308035Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-07T14:18:32.308068Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7580:3939], StatRequests.size() = 1 2025-04-07T14:18:33.944510Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7625:3957]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:33.944772Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-07T14:18:33.944813Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7625:3957], StatRequests.size() = 1 2025-04-07T14:18:34.493707Z node 3 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:18:34.538963Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:34.539028Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:34.539062Z node 3 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2025-04-07T14:18:34.539092Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:34.539419Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-07T14:18:34.541905Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:34.562376Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=OGIxNDM3Ni1iYmRjNzAwMC1mMjQ2OTVmMy1iNTlkYTM4OQ==, TxId: 2025-04-07T14:18:34.562448Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=OGIxNDM3Ni1iYmRjNzAwMC1mMjQ2OTVmMy1iNTlkYTM4OQ==, TxId: 2025-04-07T14:18:34.562995Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:34.595344Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:34.595407Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:35.662683Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7699:3976]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:35.662925Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-07T14:18:35.662965Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7699:3976], StatRequests.size() = 1 2025-04-07T14:18:37.387114Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7744:3992]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:37.387369Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-07T14:18:37.387396Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7744:3992], StatRequests.size() = 1 2025-04-07T14:18:37.946993Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 3, schemeshard count = 1 2025-04-07T14:18:37.947228Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-04-07T14:18:37.947611Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:37.947686Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:18:38.009581Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:38.009640Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:39.036847Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7783:4005]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:39.037131Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-07T14:18:39.037176Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7783:4005], StatRequests.size() = 1 2025-04-07T14:18:39.037868Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [3:7785:3699]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:39.048344Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:18:39.048415Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [3:7785:3699], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless [GOOD] Test command err: 2025-04-07T14:15:46.060926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:46.061130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:46.061217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001edd/r3tmp/tmpZfQjDN/pdisk_1.dat 2025-04-07T14:15:46.316925Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28959, node 1 2025-04-07T14:15:46.486796Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:46.486850Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:46.486876Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:46.487414Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:46.488957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:46.580083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:46.580207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:46.593606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63956 2025-04-07T14:15:47.176978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:49.651624Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:49.695073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:49.695161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:49.733159Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:49.736593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:49.893919Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:49.932593Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:49.932700Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:49.956452Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:49.956624Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:49.956832Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:49.956885Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:49.956947Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:49.957003Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:49.957052Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:49.957108Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:49.957427Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:50.162448Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:50.162689Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:50.169062Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:50.176351Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:50.177181Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:50.178112Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2025-04-07T14:15:50.199601Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:50.199678Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:50.199750Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-07T14:15:50.201929Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:50.202023Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:50.216426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:50.229768Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:50.229963Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:50.242440Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:50.257504Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:50.296204Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:50.487414Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:50.641880Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:51.281260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:15:51.833654Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:51.998580Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-07T14:15:51.998655Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:15:51.998755Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2485:2905], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:15:52.000479Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2487:2906] 2025-04-07T14:15:52.000926Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2487:2906], schemeshard id = 72075186224037899 2025-04-07T14:15:52.947171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2609:3182], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:52.947277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:52.962144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-07T14:15:53.153554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2742:3030];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:15:53.153732Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2742:3030];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:15:53.153969Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2742:3030];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:15:53.154058Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2742:3030];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:15:53.154124Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2742:3030];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:15:53.154203Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2742:3030];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:15:53.154275Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2742:3030];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:15:53.154349Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2742:3030];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:15:53.154421Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2742:3030];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:15:53.154488Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2742:3030];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:15:53.154558Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2742:3030];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:15:53.154619Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2742:3030];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:15:53.193132Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[2:2760:3043];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04 ... de 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:38.373499Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T14:18:38.373584Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:18:39.019368Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2025-04-07T14:18:39.019444Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=u C]bS 2025-04-07T14:18:39.019499Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-07T14:18:40.342161Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:18:40.342443Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:40.393781Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:40.393922Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-07T14:18:40.393965Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T14:18:40.394723Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:18:40.419092Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:18:40.419534Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:18:40.419598Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:18:40.420165Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:18:40.439930Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:18:40.440192Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-07T14:18:40.441387Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9451:7014], server id = [2:9456:7019], tablet id = 72075186224037905, status = OK 2025-04-07T14:18:40.441509Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9451:7014], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:18:40.442271Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9452:7015], server id = [2:9457:7020], tablet id = 72075186224037906, status = OK 2025-04-07T14:18:40.442363Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9452:7015], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:18:40.442456Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9453:7016], server id = [2:9458:7021], tablet id = 72075186224037907, status = OK 2025-04-07T14:18:40.442512Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9453:7016], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:18:40.443288Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-07T14:18:40.443554Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9454:7017], server id = [2:9459:7022], tablet id = 72075186224037908, status = OK 2025-04-07T14:18:40.443631Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9454:7017], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:18:40.443854Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9455:7018], server id = [2:9460:7023], tablet id = 72075186224037909, status = OK 2025-04-07T14:18:40.443918Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9455:7018], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:18:40.451384Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-07T14:18:40.451858Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-07T14:18:40.452235Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9451:7014], server id = [2:9456:7019], tablet id = 72075186224037905 2025-04-07T14:18:40.452269Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:40.452488Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-07T14:18:40.452576Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2025-04-07T14:18:40.453099Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9452:7015], server id = [2:9457:7020], tablet id = 72075186224037906 2025-04-07T14:18:40.453131Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:40.453181Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9453:7016], server id = [2:9458:7021], tablet id = 72075186224037907 2025-04-07T14:18:40.453201Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:40.453343Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9461:7024], server id = [2:9462:7025], tablet id = 72075186224037910, status = OK 2025-04-07T14:18:40.453453Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9461:7024], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:18:40.454240Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9454:7017], server id = [2:9459:7022], tablet id = 72075186224037908 2025-04-07T14:18:40.454283Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:40.454372Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9455:7018], server id = [2:9460:7023], tablet id = 72075186224037909 2025-04-07T14:18:40.454397Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:40.454462Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9463:7026], server id = [2:9465:7028], tablet id = 72075186224037911, status = OK 2025-04-07T14:18:40.454545Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9463:7026], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:18:40.455027Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9464:7027], server id = [2:9466:7029], tablet id = 72075186224037912, status = OK 2025-04-07T14:18:40.455097Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9464:7027], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:18:40.455138Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2025-04-07T14:18:40.455781Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9467:7030], server id = [2:9469:7032], tablet id = 72075186224037913, status = OK 2025-04-07T14:18:40.455851Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9467:7030], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:18:40.455942Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9468:7031], server id = [2:9470:7033], tablet id = 72075186224037914, status = OK 2025-04-07T14:18:40.455998Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9468:7031], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:18:40.468260Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-04-07T14:18:40.468508Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9461:7024], server id = [2:9462:7025], tablet id = 72075186224037910 2025-04-07T14:18:40.468544Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:40.468822Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-04-07T14:18:40.469066Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2025-04-07T14:18:40.469312Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2025-04-07T14:18:40.469359Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-07T14:18:40.469562Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9463:7026], server id = [2:9465:7028], tablet id = 72075186224037911 2025-04-07T14:18:40.469591Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:40.469695Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:18:40.469874Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:18:40.470091Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-07T14:18:40.472123Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9464:7027], server id = [2:9466:7029], tablet id = 72075186224037912 2025-04-07T14:18:40.472157Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:40.485088Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9467:7030], server id = [2:9469:7032], tablet id = 72075186224037913 2025-04-07T14:18:40.485207Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:40.485495Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:18:40.486434Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9468:7031], server id = [2:9470:7033], tablet id = 72075186224037914 2025-04-07T14:18:40.486475Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:18:40.517133Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTE3OWYwZWQtZTY0ZWRlZGMtYzgyMzhlZjctNDRhZjRmMjI=, TxId: 2025-04-07T14:18:40.517213Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTE3OWYwZWQtZTY0ZWRlZGMtYzgyMzhlZjctNDRhZjRmMjI=, TxId: 2025-04-07T14:18:40.517914Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:40.532937Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T14:18:40.533016Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=u C]bS, ActorId=[1:3898:3423] 2025-04-07T14:18:40.534346Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:9488:5727]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:40.534624Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:18:40.534681Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:18:40.534980Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:18:40.535036Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-07T14:18:40.535098Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-04-07T14:18:40.553117Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' >> TPersQueueTest::CheckDeleteTopic [GOOD] >> TPersQueueTest::CheckDecompressionTasksWithoutSession >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] >> ColumnShardTiers::TieringUsage [GOOD] >> YdbOlapStore::LogExistingUserId [GOOD] >> YdbOlapStore::LogCountByResource >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] Test command err: 2025-04-07T14:15:59.067185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:59.067360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:59.067494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0015d9/r3tmp/tmpqO3IiK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7811, node 1 TClient is connected to server localhost:29118 2025-04-07T14:15:59.437250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:59.465667Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:59.471288Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:59.471334Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:59.471357Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:59.471495Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:59.515481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:59.515572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:59.526559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected REQUEST=CREATE OBJECT secretAccessKey ( TYPE SECRET) WITH (value = ak);EXPECTATION=1;WAITING=1 2025-04-07T14:16:09.815878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:644:2535], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:09.816046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:09.958901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-07T14:16:10.304024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:785:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:10.304156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:10.304483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:790:2631], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:10.309296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-07T14:16:10.478673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:16:11.022820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:16:11.333805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-07T14:16:11.972036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-07T14:16:12.621446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:16:12.986738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-07T14:16:14.820866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-07T14:16:15.146038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secretAccessKey ( TYPE SECRET) WITH (value = ak);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secretAccessKey ( TYPE SECRET) WITH (value = ak);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secretSecretKey ( TYPE SECRET) WITH (value = fakeSecret);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secretSecretKey ( TYPE SECRET) WITH (value = fakeSecret);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secretSecretKey ( TYPE SECRET) WITH (value = fakeSecret);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tier1 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);EXPECTATION=1;WAITING=1 2025-04-07T14:16:39.872462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:1, at schemeshard: 72057594046644480 2025-04-07T14:16:40.751645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-04-07T14:16:42.069465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715726:0, at schemeshard: 72057594046644480 2025-04-07T14:16:42.435622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715729:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT tier1 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT tier1 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tier2 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tier2 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT tier2 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);EXPECTATION=1;WAITING=1 SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"SecretId":{"OwnerId":"root@builtin","Id":"secretAccessKey"}},"Bucket":"fake","SecretKey":"USId:root@builtin:secretSecretKey","Endpoint":"fake"},"Name":"fakeTier"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"SecretId":{"OwnerId":"root@builtin","Id":"secretAccessKey"}},"Bucket":"fake","SecretKey":"USId:root@builtin:secretSecretKey","Endpoint":"fake"},"Name":"fakeTier"}}}} SNAPSHOT: {"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"SecretId":{"OwnerId":"root@builtin","Id":"secretAccessKey"}},"Bucket":"fake","SecretKey":"USId:root@builtin:secretSecretKey","Endpoint":"fake"},"Name":"fakeTier"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"SecretId":{"OwnerId":"root@builtin","Id":"secretAccessKey"}},"Bucket":"fake","SecretKey":"USId:root@builtin:secretSecretKey","Endpoint":"fake"},"Name":"fakeTier"}}}} 2025-04-07T14:16:58.204738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715772:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715772 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 16 2025-04-07T14:16:58.578833Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037894;self_id=[1:3768:4895];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:16:58.598427Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037894;self_id=[1:3768:4895];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:16:58.598704Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037894 2025-04-07T14:16:58.635398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:3768:4895];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:16:58.635655Z no ... appended.cpp:80;portions=15,;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec; 2025-04-07T14:18:47.531829Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1520;portion_bytes=1520;portion_raw_bytes=1097; 2025-04-07T14:18:47.531910Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=DEFAULT;path_id=17;portion=13;before_size=166392;after_size=164872;before_rows=3008;after_rows=3007; 2025-04-07T14:18:47.531954Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1520;portion_bytes=1520;portion_raw_bytes=1097; 2025-04-07T14:18:47.532133Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2025-04-07T14:18:47.532197Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=DEFAULT;path_id=17;portion=14;before_size=164872;after_size=162992;before_rows=3007;after_rows=3006; 2025-04-07T14:18:47.532246Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2025-04-07T14:18:47.532315Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1520;portion_bytes=1520;portion_raw_bytes=1097; 2025-04-07T14:18:47.532396Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=17;portion=15;before_size=162992;after_size=164512;before_rows=3006;after_rows=3007; 2025-04-07T14:18:47.532438Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1520;portion_bytes=1520;portion_raw_bytes=1097; 2025-04-07T14:18:47.532763Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;fline=manager.cpp:14;event=unlock;process_id=CS::GENERAL::37dbb0a8-13bb11f0-a984c015-ad619cec; 2025-04-07T14:18:47.532876Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;fline=granule.cpp:99;event=OnCompactionFinished;info=(granule:17;path_id:17;size:164512;portions_count:7;); 2025-04-07T14:18:47.532926Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;tablet_id=72075186224037894;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:18:47.532993Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;tablet_id=72075186224037894;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:18:47.533049Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;tablet_id=72075186224037894;fline=columnshard_impl.cpp:1023;background=cleanup;skip_reason=in_progress; 2025-04-07T14:18:47.533084Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;tablet_id=72075186224037894;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:18:47.533124Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;tablet_id=72075186224037894;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:18:47.533197Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;tablet_id=72075186224037894;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:18:47.533551Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037894 Save Batch GenStep: 1:13 Blob count: 1 2025-04-07T14:18:47.533647Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:18:47.533743Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=17;external_task_id=37dbb0a8-13bb11f0-a984c015-ad619cec;mem=6126;cpu=0; 2025-04-07T14:18:47.533879Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037894 Save Batch GenStep: 1:14 Blob count: 1 2025-04-07T14:18:47.534000Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;local_tx_no=39;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037894;tx_state=complete;commit_tx_id=140737488355338;commit_lock_id=140737488355338;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2025-04-07T14:18:47.534067Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;local_tx_no=39;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037894;tx_state=complete;commit_tx_id=140737488355338;commit_lock_id=140737488355338;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=17;portion=16;before_size=164512;after_size=166392;before_rows=3007;after_rows=3008; 2025-04-07T14:18:47.534110Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;local_tx_no=39;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037894;tx_state=complete;commit_tx_id=140737488355338;commit_lock_id=140737488355338;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2025-04-07T14:18:47.534383Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2025-04-07T14:18:47.534453Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2025-04-07T14:18:47.534524Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ERASE;path_id=17;portion=12;before_size=166392;after_size=166392;before_rows=3008;after_rows=3008; 2025-04-07T14:18:47.534565Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2025-04-07T14:18:47.534672Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1520;portion_bytes=1520;portion_raw_bytes=1097; 2025-04-07T14:18:47.534734Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ERASE;path_id=17;portion=11;before_size=166392;after_size=166392;before_rows=3008;after_rows=3008; 2025-04-07T14:18:47.534771Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1520;portion_bytes=1520;portion_raw_bytes=1097; 2025-04-07T14:18:47.534823Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;fline=manager.cpp:14;event=unlock;process_id=CS::CLEANUP::PORTIONS::PORTIONS::385629e6-13bb11f0-9461c817-4d51daaf; 2025-04-07T14:18:47.534872Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;tablet_id=72075186224037894;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:18:47.534921Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;tablet_id=72075186224037894;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:18:47.534988Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;tablet_id=72075186224037894;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=1; 2025-04-07T14:18:47.535080Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;tablet_id=72075186224037894;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=0;portions_prepared=2;drop=0;skip=0;portions_counter=2;chunks=18;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:18:47.535129Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;tablet_id=72075186224037894;fline=manager.cpp:9;event=lock;process_id=CS::CLEANUP::PORTIONS::PORTIONS::3872e0e0-13bb11f0-b40fa4a5-f934bd53; 2025-04-07T14:18:47.535206Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;tablet_id=72075186224037894;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:18:47.535244Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;tablet_id=72075186224037894;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:18:47.535309Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;task_id=385629e6-13bb11f0-9461c817-4d51daaf;tablet_id=72075186224037894;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:18:47.535412Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037894 Delete Blob DS:2181038080:[72075186224037894:1:9:11:0:1520:0] 2025-04-07T14:18:47.535456Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037894 Delete Blob DS:2181038080:[72075186224037894:1:10:12:0:1880:0] 2025-04-07T14:18:47.535522Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:18:47.535587Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=18;external_task_id=385629e6-13bb11f0-9461c817-4d51daaf;mem=504;cpu=0; 2025-04-07T14:18:47.535836Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;parent=[1:3768:4895];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=504;external_task_id=3872e0e0-13bb11f0-b40fa4a5-f934bd53;type=CS::TTL;priority=0;; Cleaning waiting... Fake storage clean FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037895 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037895 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037897 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037897 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037896 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037896 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] >> TPersQueueTest::CreateTopicWithMeteringMode [GOOD] >> TPersQueueTest::DefaultMeteringMode >> BasicStatistics::TwoServerlessDbs [GOOD] |97.3%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [GOOD] Test command err: 2025-04-07T14:15:42.694665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:42.695031Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:42.695177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001ef8/r3tmp/tmpj0V5s3/pdisk_1.dat 2025-04-07T14:15:43.029951Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6117, node 1 2025-04-07T14:15:43.223956Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:43.224002Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:43.224022Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:43.224424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:43.226122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:43.319684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:43.319785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:43.332875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4655 2025-04-07T14:15:43.904059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:46.525807Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:46.556209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:46.556313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:46.594517Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:46.597847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:46.775473Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:46.825758Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:46.825859Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:46.854242Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:46.854418Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:46.854602Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:46.854659Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:46.854711Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:46.854757Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:46.854799Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:46.854849Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:46.855177Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:47.061254Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:47.061357Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:47.066171Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:47.070856Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:47.071429Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:47.072112Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2025-04-07T14:15:47.089427Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:47.089482Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:47.089541Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-07T14:15:47.091333Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:47.091427Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:47.104154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:47.120536Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:47.120692Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:47.132537Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:47.148599Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:47.188044Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:47.419517Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:47.573152Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:48.158961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:15:48.680706Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:48.807140Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-07T14:15:48.807202Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:15:48.807286Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2487:2905], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:15:48.809618Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2495:2909] 2025-04-07T14:15:48.810366Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2495:2909], schemeshard id = 72075186224037899 2025-04-07T14:15:49.564399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:15:50.047307Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:50.315476Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-04-07T14:15:50.315544Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037905 2025-04-07T14:15:50.315645Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2963:3112], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037905 2025-04-07T14:15:50.316445Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2964:3113] 2025-04-07T14:15:50.316595Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2964:3113], schemeshard id = 72075186224037905 2025-04-07T14:15:51.185298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3091:3346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:51.185450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:51.199013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-04-07T14:15:51.387475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3241:3382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:51.387614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:51.398499Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3246:3386]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:51.398664Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:15:51.398834Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-07T14:15:51.398890Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3249:3389] 2025-04-07T14:15:51.398971Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3249:3389] 2025-04-07T14:15:51.399544Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3250:3239] 2025-04-07T14:15:51.399782Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3249:3389], server id = [2:3250:3239], tablet id = 72075186224037897, status = OK 2025-04-07T14:15:51.399990Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:3250:3239], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:15:51.400059Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:15:51.400233Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:15:51.400296Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3246:3386], StatRequests.size() = 1 2025-04-07T14:15:51.417896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3254:3393], Datab ... -04-07T14:18:38.335819Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:8838:6345], StatRequests.size() = 1 2025-04-07T14:18:38.913707Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:38.913806Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:38.913875Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2025-04-07T14:18:38.913924Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:38.914257Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-07T14:18:38.941599Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:38.956021Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8863:6364], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:38.956201Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8873:6369], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:38.956341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:38.972207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2025-04-07T14:18:39.088215Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:8877:6372], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-07T14:18:39.539870Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:9000:6440]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:39.540221Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-07T14:18:39.540532Z node 2 :STATISTICS DEBUG: [72075186224037897] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-07T14:18:39.540597Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-07T14:18:39.540954Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:39.541049Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:9000:6440], StatRequests.size() = 1 2025-04-07T14:18:39.691042Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjQ0NmM3ZjYtNWQ2MDVjOWYtMjI5NjFiMWUtM2MxMTJmNTM=, TxId: 2025-04-07T14:18:39.691132Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjQ0NmM3ZjYtNWQ2MDVjOWYtMjI5NjFiMWUtM2MxMTJmNTM=, TxId: 2025-04-07T14:18:39.691887Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:39.707020Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:18:39.707102Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:39.823280Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:18:39.823355Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:18:39.895724Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3597:3354], schemeshard count = 1 2025-04-07T14:18:40.440983Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-07T14:18:40.441053Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 180.000000s, at schemeshard: 72075186224037899 2025-04-07T14:18:40.441307Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-04-07T14:18:40.472034Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:18:40.707473Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:9050:6468]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:40.707744Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-07T14:18:40.707783Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:9050:6468], StatRequests.size() = 1 2025-04-07T14:18:42.538671Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:9099:6499]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:42.538970Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-07T14:18:42.539012Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:9099:6499], StatRequests.size() = 1 2025-04-07T14:18:43.213352Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:18:43.213607Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:43.213633Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:43.213664Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-07T14:18:43.213688Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T14:18:43.213943Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-07T14:18:43.215826Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:43.245786Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTBlYmZkMTEtYmJhZGYzZmYtYjNhODhlZGItMTQ1MDBmYWE=, TxId: 2025-04-07T14:18:43.245857Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTBlYmZkMTEtYmJhZGYzZmYtYjNhODhlZGItMTQ1MDBmYWE=, TxId: 2025-04-07T14:18:43.246370Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:43.274016Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T14:18:43.274080Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:43.333788Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2025-04-07T14:18:43.333864Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 186.000000s, at schemeshard: 72075186224037905 2025-04-07T14:18:43.334086Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 26 2025-04-07T14:18:43.367617Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:18:44.624466Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:9185:6556]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:44.624828Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-07T14:18:44.624873Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:9185:6556], StatRequests.size() = 1 2025-04-07T14:18:46.694574Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:9239:6587]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:46.694855Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-07T14:18:46.694890Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:9239:6587], StatRequests.size() = 1 2025-04-07T14:18:47.317211Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 3 2025-04-07T14:18:47.317430Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:18:47.317463Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:18:47.317498Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is data table. 2025-04-07T14:18:47.317528Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037905, LocalPathId: 2] 2025-04-07T14:18:47.317831Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-07T14:18:47.320591Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:18:47.334169Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:18:47.335829Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:18:47.368107Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjFlZjI5NC00Y2QzN2Q1Yy01ZTdjYzdmYy1lNjNiNmUxMQ==, TxId: 2025-04-07T14:18:47.368175Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjFlZjI5NC00Y2QzN2Q1Yy01ZTdjYzdmYy1lNjNiNmUxMQ==, TxId: 2025-04-07T14:18:47.368651Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:18:47.400757Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037905, LocalPathId: 2] 2025-04-07T14:18:47.400856Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:18:48.647066Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:9312:6634]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:48.647390Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-07T14:18:48.647434Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:9312:6634], StatRequests.size() = 1 2025-04-07T14:18:48.648268Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:9314:6636]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:18:48.653481Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-07T14:18:48.653551Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:9314:6636], StatRequests.size() = 1 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault [GOOD] >> TPersQueueTest::ReadWithoutConsumerFederation >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] >> TPersQueueTest::SetupWriteSessionOnDisabledCluster [GOOD] >> TPersQueueTest::SetupReadSession >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] >> TPersQueueTest::CheckDecompressionTasksWithoutSession [GOOD] >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] >> TPersQueueTest::ReadWithoutConsumerFederation [GOOD] >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] >> KqpSystemView::QueryStatsSimple [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] >> LabeledDbCounters::OneTabletRemoveCounters [GOOD] >> LabeledDbCounters::OneTabletRestart >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [GOOD] >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] Test command err: 2025-04-07T14:15:33.916238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:255:2187], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:33.916526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:15:33.916618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002031/r3tmp/tmpNcmCT9/pdisk_1.dat 2025-04-07T14:15:34.233085Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13973, node 1 2025-04-07T14:15:34.440760Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:34.440813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:34.440844Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:34.441301Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:34.443386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:34.538355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:34.538487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:34.553223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8076 2025-04-07T14:15:35.134032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:37.895942Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-07T14:15:37.929559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:37.929654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:37.979567Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:15:37.982653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:38.169597Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:38.234231Z node 3 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:38.234322Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:38.266597Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:38.266753Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:38.266959Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:38.267027Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:38.267076Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:38.267133Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:38.267191Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:38.267233Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:38.267654Z node 3 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:38.453907Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:38.454017Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1815:2551], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:38.461066Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:1827:2560] 2025-04-07T14:15:38.466829Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:1858:2575] 2025-04-07T14:15:38.467273Z node 3 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [3:1858:2575], schemeshard id = 72075186224037889 2025-04-07T14:15:38.475023Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared1 2025-04-07T14:15:38.492247Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:38.492318Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:38.492398Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared1/.metadata/_statistics 2025-04-07T14:15:38.495508Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:38.495612Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:38.508063Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:38.521545Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:38.521704Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:38.539452Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:38.554309Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:38.593573Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:38.857546Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:39.033309Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:39.728335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:15:42.522374Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:42.550300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:42.550407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:42.581016Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:42.582747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:42.761544Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:42.797124Z node 2 :STATISTICS INFO: [72075186224038907] OnActivateExecutor 2025-04-07T14:15:42.797226Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInitSchema::Execute 2025-04-07T14:15:42.830627Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInitSchema::Complete 2025-04-07T14:15:42.831546Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInit::Execute 2025-04-07T14:15:42.831716Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:42.831773Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:42.831820Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:42.831866Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:42.831930Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:42.831974Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInit::Complete 2025-04-07T14:15:42.832334Z node 2 :STATISTICS INFO: [72075186224038907] Subscribed for config changes 2025-04-07T14:15:42.967563Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038907, at schemeshard: 72075186224038899 2025-04-07T14:15:42.967647Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3047:2552], at schemeshard: 72075186224038899, StatisticsAggregatorId: 72075186224038907, at schemeshard: 72075186224038899 2025-04-07T14:15:42.972126Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:3055:2559] 2025-04-07T14:15:42.975932Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:3078:2571] 2025-04-07T14:15:42.976289Z node 2 :STATISTICS DEBUG: [72075186224038907] EvConnectSchemeShard, pipe server id = [2:3078:2571], schemeshard id = 72075186224038899 2025-04-07T14:15:42.980103Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxConfigure::Execute: database# /Root/Shared2 2025-04-07T14:15:42.997273Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:42.997324Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:42.997383Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared2/.metadata/_statistics 2025-04-07T14:15:42.998476Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:42.998553Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:43.006916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730657:1, at schemeshard: 72075186224038899 2025-04-07T14:15:43.009475Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976730657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224038899 PathId: 3 } 2025-04-07T14:15:43.009581Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976730657 2025-04-07T14:15:43.022121Z node 2 :HIVE WARN: HIVE#72075186224038898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:43.036137Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:43.050006Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxConfigure::Complete 2025-04-07T14:15:43.207612Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976730657. Doublechecking... 2025-04-07T14:15:43.408489Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:44.338328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:15:44.979305Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:45.282167Z node 3 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: ... ISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [3:12386:5551], StatRequests.size() = 1 2025-04-07T14:19:12.269817Z node 3 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:19:12.269891Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:19:12.373468Z node 3 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [3:5003:3120], schemeshard count = 1 2025-04-07T14:19:12.941973Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224038899 2025-04-07T14:19:12.942049Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 200.000000s, at schemeshard: 72075186224038899 2025-04-07T14:19:12.942449Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038899, stats size# 25 2025-04-07T14:19:12.967348Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Complete 2025-04-07T14:19:14.231614Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [3:12459:5583]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:14.232051Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-07T14:19:14.232096Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [3:12459:5583], StatRequests.size() = 1 2025-04-07T14:19:14.629234Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal 2025-04-07T14:19:14.629319Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. No force traversals. 2025-04-07T14:19:14.629360Z node 2 :STATISTICS DEBUG: [72075186224038907] IsColumnTable. Path [OwnerId: 72075186224038899, LocalPathId: 3] is data table. 2025-04-07T14:19:14.629393Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038899, LocalPathId: 3] 2025-04-07T14:19:14.629770Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared2 2025-04-07T14:19:14.632630Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:19:14.661713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12477:5210], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:19:14.661824Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12487:5215], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:19:14.661912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:19:14.690633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038899 2025-04-07T14:19:14.833778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:12491:5218], DatabaseId: /Root/Shared2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2025-04-07T14:19:15.230235Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:12618:5286]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:15.230747Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:19:15.230859Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:12620:5288] 2025-04-07T14:19:15.230941Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:12620:5288] 2025-04-07T14:19:15.231922Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:12621:5289] 2025-04-07T14:19:15.232121Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:12620:5288], server id = [2:12621:5289], tablet id = 72075186224038907, status = OK 2025-04-07T14:19:15.232216Z node 2 :STATISTICS DEBUG: [72075186224038907] EvConnectNode, pipe server id = [2:12621:5289], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:19:15.232272Z node 2 :STATISTICS DEBUG: [72075186224038907] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-07T14:19:15.232489Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:19:15.232559Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:12618:5286], StatRequests.size() = 1 2025-04-07T14:19:15.400073Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjNjZmIyNzItMWRhNTg0ODQtNzU5NTQxOGItOTU0NTBjMWI=, TxId: 2025-04-07T14:19:15.400144Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjNjZmIyNzItMWRhNTg0ODQtNzU5NTQxOGItOTU0NTBjMWI=, TxId: 2025-04-07T14:19:15.409520Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Execute 2025-04-07T14:19:15.437663Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038899, LocalPathId: 3] 2025-04-07T14:19:15.437718Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:19:15.490952Z node 2 :STATISTICS DEBUG: [72075186224038907] EvFastPropagateCheck 2025-04-07T14:19:15.491029Z node 2 :STATISTICS DEBUG: [72075186224038907] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:19:15.561811Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:12620:5288], schemeshard count = 1 2025-04-07T14:19:16.049816Z node 3 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:19:16.104585Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:19:16.104658Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:19:16.128374Z node 3 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-07T14:19:16.128451Z node 3 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 227.000000s, at schemeshard: 72075186224037899 2025-04-07T14:19:16.128746Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-04-07T14:19:16.147839Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:19:16.577436Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [3:12695:5606]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:16.577952Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-07T14:19:16.578004Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [3:12695:5606], StatRequests.size() = 1 2025-04-07T14:19:18.202817Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [3:12764:5628]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:18.203197Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-07T14:19:18.203246Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [3:12764:5628], StatRequests.size() = 1 2025-04-07T14:19:18.630475Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal 2025-04-07T14:19:18.630544Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. No force traversals. 2025-04-07T14:19:18.714161Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224038909 2025-04-07T14:19:18.714254Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 200.000000s, at schemeshard: 72075186224038909 2025-04-07T14:19:18.714838Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038909, stats size# 26 2025-04-07T14:19:18.734580Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Complete 2025-04-07T14:19:19.373474Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 3 2025-04-07T14:19:19.374213Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:19:19.374875Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-04-07T14:19:19.436893Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:19:19.437021Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:19:19.437080Z node 3 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-07T14:19:19.437119Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T14:19:19.437575Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared1 2025-04-07T14:19:19.449076Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:19:19.502626Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YmI2YWQyMmItMjYwYjY4N2MtNWFhNTEwM2YtYzM4MWUwMTg=, TxId: 2025-04-07T14:19:19.502691Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YmI2YWQyMmItMjYwYjY4N2MtNWFhNTEwM2YtYzM4MWUwMTg=, TxId: 2025-04-07T14:19:19.503444Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:19:19.525869Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T14:19:19.525925Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:19:19.915248Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:12856:5670]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:19.915735Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-07T14:19:19.915771Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:12856:5670], StatRequests.size() = 1 2025-04-07T14:19:19.916553Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:12858:5359]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:19.922999Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:19:19.923376Z node 2 :STATISTICS DEBUG: [72075186224038907] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-07T14:19:19.923434Z node 2 :STATISTICS DEBUG: [72075186224038907] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-07T14:19:19.923779Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:19:19.923875Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:12858:5359], StatRequests.size() = 1 >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 4341, MsgBus: 20813 2025-04-07T14:18:13.078716Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575406467464497:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:13.078874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:13.098713Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575406568581235:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:13.099147Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:13.102853Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490575408018065183:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:13.103076Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00177c/r3tmp/tmpJ7RWbp/pdisk_1.dat 2025-04-07T14:18:13.422029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:13.422141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:13.422300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:13.422428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:18:13.425014Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:18:13.425053Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:18:13.426083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:13.426329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:13.447026Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:18:13.456803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:18:13.456878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4341, node 1 2025-04-07T14:18:13.466619Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:18:13.469169Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:18:13.471574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:18:13.504886Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:18:13.504921Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:18:13.504938Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:18:13.505051Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20813 TClient is connected to server localhost:20813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:18:13.963141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:13.987414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:14.126443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:14.248717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:14.300503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:18:16.136343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575415057401087:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:16.136678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:16.179884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:18:16.220370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:18:16.278715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:18:16.318662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:18:16.351224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:18:16.391757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:18:16.433203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575419352369044:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:16.433256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:16.433357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490575419352369049:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:18:16.436715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:18:16.450108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490575419352369051:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:18:17.875425Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035497867, txId: 281474976710673] shutting down 2025-04-07T14:18:18.014471Z node 3 :BS_PROXY_PUT ERROR: [85dc167a86983b82] Result# TEvPutResult {Id# [72075186224037914:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-07T14:18:18.053605Z node 2 :BS_PROXY_PUT ERROR: [7c41ea4bbe6a538d] Result# TEvPutResult {Id# [72075186224037916:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-07T14:18:18.098804Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490575406568581235:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:18.098875Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:18:18.103037Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490575408018065183:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:18.103117Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3209, MsgBus: 3326 2025-04-07T14:18:19.440627Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490575432707207773:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:19.440721Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:19.444553Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7490575434280002203:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:19.444597Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:18:19.446934Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7490575432084942769:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:18:19.446987 ... saction 281474976715668 completed, doublechecking } 2025-04-07T14:19:04.979486Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035544939, txId: 281474976715673] shutting down 2025-04-07T14:19:05.313968Z node 14 :BS_PROXY_PUT ERROR: [ab8ff31f1b7a448f] Result# TEvPutResult {Id# [72075186224037896:1:23:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-07T14:19:05.317566Z node 15 :BS_PROXY_PUT ERROR: [b3039bf1e6b6b6b2] Result# TEvPutResult {Id# [72075186224037895:1:23:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 Trying to start YDB, gRPC: 2637, MsgBus: 10382 2025-04-07T14:19:08.947904Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7490575643945750704:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:19:08.948528Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:19:08.957286Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7490575644715669596:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:19:08.957357Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:19:08.968771Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7490575644497251370:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:19:08.969819Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00177c/r3tmp/tmph60H1Y/pdisk_1.dat 2025-04-07T14:19:09.368189Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:19:09.439206Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:19:09.439319Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:19:09.440316Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:19:09.440396Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:19:09.440966Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:19:09.441032Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:19:09.446800Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 17 Cookie 17 2025-04-07T14:19:09.446854Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 18 Cookie 18 2025-04-07T14:19:09.446992Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:19:09.447574Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:19:09.448641Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2637, node 16 2025-04-07T14:19:09.620732Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:19:09.620770Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:19:09.620781Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:19:09.620962Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10382 TClient is connected to server localhost:10382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:19:10.723865Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:19:10.814165Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:19:11.015333Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:19:11.402130Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:19:11.550933Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:19:13.947402Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7490575643945750704:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:19:13.947475Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:19:13.957860Z node 17 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[17:7490575644715669596:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:19:13.957950Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:19:13.965484Z node 18 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7490575644497251370:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:19:13.965586Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:19:15.562425Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7490575674010523779:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:19:15.562529Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:19:15.648922Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:19:15.787879Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:19:15.914361Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:19:16.071490Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:19:16.170918Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:19:16.322665Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:19:16.463179Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7490575678305491770:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:19:16.463306Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:19:16.464814Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7490575678305491775:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:19:16.471669Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:19:16.564641Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7490575678305491778:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:19:19.283193Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035559273, txId: 281474976715673] shutting down 2025-04-07T14:19:19.556339Z node 17 :BS_PROXY_PUT ERROR: [008c3edd5f2dc35a] Result# TEvPutResult {Id# [72075186224037893:1:21:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-07T14:19:19.561981Z node 18 :BS_PROXY_PUT ERROR: [7e035c54d76620d7] Result# TEvPutResult {Id# [72075186224037892:1:21:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [GOOD] |97.3%| [TA] $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> TPersQueueTest::DefaultMeteringMode [GOOD] >> TPersQueueTest::DisableWrongSettings |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] >> KqpRm::NodesMembershipByExchanger |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> KqpRm::NodesMembershipByExchanger [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2025-04-07T14:19:26.848887Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:19:26.849382Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e73/r3tmp/tmpvFLDbf/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:19:26.849893Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/001e73/r3tmp/tmpvFLDbf/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/001e73/r3tmp/tmpvFLDbf/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2475495052487824527 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:19:26.891782Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:19:26.892129Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:19:26.907591Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:451:2098] with ResourceBroker at [2:422:2097] 2025-04-07T14:19:26.907714Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:452:2099] 2025-04-07T14:19:26.907885Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:450:2332] with ResourceBroker at [1:421:2313] 2025-04-07T14:19:26.907959Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:453:2333] 2025-04-07T14:19:26.908093Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:19:26.908135Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:19:26.908165Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-07T14:19:26.908183Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-07T14:19:26.908314Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:19:26.917411Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035566 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:19:26.917593Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:19:26.917659Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035566 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:19:26.918038Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:19:26.918150Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-07T14:19:26.918287Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:19:26.918332Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:19:26.918447Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744035566 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:19:26.918672Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-07T14:19:26.918692Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-07T14:19:26.918759Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744035566 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-07T14:19:26.919181Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-07T14:19:26.919388Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:19:26.920140Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:19:26.920241Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:19:26.920396Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:19:26.920580Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:19:26.920657Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:19:26.920922Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:19:26.921075Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-07T14:19:26.921116Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-07T14:19:27.892482Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-07T14:19:27.892585Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-07T14:19:27.893303Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-07T14:19:28.152301Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> YdbOlapStore::LogCountByResource [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] |97.3%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> TPersQueueTest::SetupReadSession [GOOD] >> TPersQueueTest::TestBigMessage ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogCountByResource [GOOD] Test command err: 2025-04-07T14:17:23.361295Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575191009367020:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:17:23.361800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001845/r3tmp/tmpfDX6V9/pdisk_1.dat 2025-04-07T14:17:23.668581Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11312, node 1 2025-04-07T14:17:23.686710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:17:23.686960Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:23.687496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:23.687799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:17:23.688930Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:17:23.693815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:23.694971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:23.695010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:23.695110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:17:23.695277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:17:23.724818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:17:23.724922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:17:23.725964Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:17:23.725998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:17:23.726011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:17:23.726160Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:17:23.728889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:17:23.953651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:23.958766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:23.960071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:23.961814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:17:23.961988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:17:23.962013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:17:23.963025Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:17:23.963619Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:17:23.963643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:17:23.965011Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:17:23.968251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035444011, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:17:23.968282Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:17:23.968495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:17:23.970401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:23.970600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:23.970652Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:17:23.970755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:17:23.970827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:17:23.970873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:17:23.973106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:17:23.973159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:17:23.973175Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:17:23.973252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:12221 2025-04-07T14:17:24.150860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:24.153518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T14:17:24.153744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:17:24.153779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:17:24.155529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2025-04-07T14:17:24.155710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:17:24.155893Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:17:24.155950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-04-07T14:17:24.156708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:24.156750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:24.156764Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:17:24.156988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:17:24.157011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:17:24.157019Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 waiting... 2025-04-07T14:17:24.161438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:17:24.161915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:24.162216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:24.168475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:24.168766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:17:24.168832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T14:17:24.171501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2025-04-07T14:17:24.247664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490575195304335348:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:17:24.247911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490575195304335348:2292];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:17:24.248224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490575195304335348:2292];tablet_id=720 ... nId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7490575724178103077:3878], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-07T14:19:27.214735Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490575724178103074:3592] TxId: 281474976715774. Ctx: { TraceId: 01jr89xkpk4p98ve4nt5ccpg94, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7490575724178103077:3878], 2025-04-07T14:19:27.215053Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7490575724178103077:3878], TxId: 281474976715774, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr89xkpk4p98ve4nt5ccpg94. SessionId : ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646926 2025-04-07T14:19:27.215283Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7490575724178103077:3878], TxId: 281474976715774, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr89xkpk4p98ve4nt5ccpg94. SessionId : ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 SrcEndpoint { ActorId { RawX1: 7490575724178103077 RawX2: 4503719886458662 } } DstEndpoint { ActorId { RawX1: 7490575724178103074 RawX2: 4503719886458376 } } InMemory: true } 2025-04-07T14:19:27.215522Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7490575724178103077:3878], TxId: 281474976715774, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr89xkpk4p98ve4nt5ccpg94. SessionId : ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-07T14:19:27.215757Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7490575724178103077:3878], TxId: 281474976715774, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr89xkpk4p98ve4nt5ccpg94. SessionId : ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [28:7490575724178103074:3592] TaskId: 1 Stats: CpuTimeUs: 760 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 270 FinishTimeMs: 1744035567215 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 73 BuildCpuTimeUs: 197 HostName: "ghrun-lqtfcktism" NodeId: 28 StartTimeMs: 1744035567214 } MaxMemoryUsage: 1048576 2025-04-07T14:19:27.215806Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490575724178103074:3592] TxId: 281474976715774. Ctx: { TraceId: 01jr89xkpk4p98ve4nt5ccpg94, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [28:7490575715588167645:3592], seqNo: 1, nRows: 1 2025-04-07T14:19:27.215816Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7490575724178103077:3878], TxId: 281474976715774, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr89xkpk4p98ve4nt5ccpg94. SessionId : ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:19:27.215864Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715774, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-07T14:19:27.216082Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490575724178103074:3592] TxId: 281474976715774. Ctx: { TraceId: 01jr89xkpk4p98ve4nt5ccpg94, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7490575724178103077:3878], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 760 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 270 FinishTimeMs: 1744035567215 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 73 BuildCpuTimeUs: 197 HostName: "ghrun-lqtfcktism" NodeId: 28 StartTimeMs: 1744035567214 } MaxMemoryUsage: 1048576 } 2025-04-07T14:19:27.216118Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490575724178103074:3592] TxId: 281474976715774. Ctx: { TraceId: 01jr89xkpk4p98ve4nt5ccpg94, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7490575724178103077:3878], 2025-04-07T14:19:27.216198Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, ActorId: [28:7490575715588167645:3592], ActorState: ExecuteState, TraceId: 01jr89xkpk4p98ve4nt5ccpg94, Forwarded TEvStreamData to [28:7490575715588167643:3591] 2025-04-07T14:19:27.216787Z node 28 :KQP_EXECUTER DEBUG: TxId: 281474976715774, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 8388572, to: [28:7490575724178103078:3878] 2025-04-07T14:19:27.216873Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7490575724178103077:3878], TxId: 281474976715774, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr89xkpk4p98ve4nt5ccpg94. SessionId : ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-07T14:19:27.216933Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715774, task: 1. Tasks execution finished 2025-04-07T14:19:27.216968Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7490575724178103077:3878], TxId: 281474976715774, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr89xkpk4p98ve4nt5ccpg94. SessionId : ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-07T14:19:27.217124Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715774, task: 1. pass away 2025-04-07T14:19:27.217283Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715774;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-07T14:19:27.217796Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490575724178103074:3592] TxId: 281474976715774. Ctx: { TraceId: 01jr89xkpk4p98ve4nt5ccpg94, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7490575724178103077:3878], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1777 DurationUs: 2000 Tasks { TaskId: 1 CpuTimeUs: 275 FinishTimeMs: 1744035567216 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 78 BuildCpuTimeUs: 197 HostName: "ghrun-lqtfcktism" NodeId: 28 StartTimeMs: 1744035567214 } MaxMemoryUsage: 1048576 } 2025-04-07T14:19:27.217880Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715774. Ctx: { TraceId: 01jr89xkpk4p98ve4nt5ccpg94, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7490575724178103077:3878] 2025-04-07T14:19:27.218032Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490575724178103074:3592] TxId: 281474976715774. Ctx: { TraceId: 01jr89xkpk4p98ve4nt5ccpg94, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-07T14:19:27.218116Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7490575724178103074:3592] TxId: 281474976715774. Ctx: { TraceId: 01jr89xkpk4p98ve4nt5ccpg94, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001777s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-07T14:19:27.218250Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, ActorId: [28:7490575715588167645:3592], ActorState: ExecuteState, TraceId: 01jr89xkpk4p98ve4nt5ccpg94, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-07T14:19:27.218689Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, ActorId: [28:7490575715588167645:3592], ActorState: ExecuteState, TraceId: 01jr89xkpk4p98ve4nt5ccpg94, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 1185.647 QueriesCount: 1 2025-04-07T14:19:27.218774Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, ActorId: [28:7490575715588167645:3592], ActorState: ExecuteState, TraceId: 01jr89xkpk4p98ve4nt5ccpg94, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-07T14:19:27.218949Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, ActorId: [28:7490575715588167645:3592], ActorState: ExecuteState, TraceId: 01jr89xkpk4p98ve4nt5ccpg94, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-07T14:19:27.219016Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, ActorId: [28:7490575715588167645:3592], ActorState: ExecuteState, TraceId: 01jr89xkpk4p98ve4nt5ccpg94, EndCleanup, isFinal: 1 2025-04-07T14:19:27.219108Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, ActorId: [28:7490575715588167645:3592], ActorState: ExecuteState, TraceId: 01jr89xkpk4p98ve4nt5ccpg94, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7490575655458620674:2127] 2025-04-07T14:19:27.219156Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, ActorId: [28:7490575715588167645:3592], ActorState: unknown state, TraceId: 01jr89xkpk4p98ve4nt5ccpg94, Cleanup temp tables: 0 RESULT: [[3u]] --------------------- STATS: total CPU: 1588 duration: 1154737 usec cpu: 754674 usec { name: "/Root/OlapStore/log1" reads { rows: 2 bytes: 16 } } duration: 17033 usec cpu: 3858 usec 2025-04-07T14:19:27.220365Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035566000, txId: 18446744073709551615] shutting down 2025-04-07T14:19:27.220542Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NjQ5ZmE2ZC0yMzRhODRiMi1hMGRlNjc3MC0xZDgwM2M3OQ==, ActorId: [28:7490575715588167645:3592], ActorState: unknown state, TraceId: 01jr89xkpk4p98ve4nt5ccpg94, Session actor destroyed 2025-04-07T14:19:27.279008Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[28:7490575664048556224:2291];fline=actor.cpp:33;event=skip_flush_writing; >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] >> BasicStatistics::TwoDatabases [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoDatabases [GOOD] Test command err: 2025-04-07T14:15:36.751703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:452:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:36.752191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:36.752326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002017/r3tmp/tmp8PdH3Y/pdisk_1.dat 2025-04-07T14:15:37.070747Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20283, node 1 2025-04-07T14:15:37.265450Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:37.265492Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:37.265511Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:37.265776Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:37.267568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:37.363684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:37.363809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:37.378155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21433 2025-04-07T14:15:37.971490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:40.619027Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-07T14:15:40.646239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:40.646324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:40.695503Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:15:40.699006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:40.873941Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:40.918467Z node 3 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:40.918550Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:40.949148Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:40.950096Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:40.950305Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:40.950369Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:40.950431Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:40.950480Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:40.950524Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:40.950569Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:40.950948Z node 3 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:41.154325Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:41.154455Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1815:2552], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:41.161097Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:1825:2559] 2025-04-07T14:15:41.169654Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:1867:2576] 2025-04-07T14:15:41.170089Z node 3 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [3:1867:2576], schemeshard id = 72075186224037889 2025-04-07T14:15:41.172685Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database1 2025-04-07T14:15:41.187408Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:41.187451Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:41.187503Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database1/.metadata/_statistics 2025-04-07T14:15:41.187876Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:41.187948Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:41.199927Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:41.210102Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:41.210233Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:41.222535Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:41.237431Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:41.262608Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:41.546357Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:41.688465Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:42.399640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:15:44.961944Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:44.997593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:44.997699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:45.036959Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:45.039651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:45.257774Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:45.284633Z node 2 :STATISTICS INFO: [72075186224038907] OnActivateExecutor 2025-04-07T14:15:45.284729Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInitSchema::Execute 2025-04-07T14:15:45.312733Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInitSchema::Complete 2025-04-07T14:15:45.312933Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInit::Execute 2025-04-07T14:15:45.313118Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:45.313171Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:45.313221Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:45.313267Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:45.313311Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:45.313359Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInit::Complete 2025-04-07T14:15:45.313749Z node 2 :STATISTICS INFO: [72075186224038907] Subscribed for config changes 2025-04-07T14:15:45.422014Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:3044:2552] 2025-04-07T14:15:45.423392Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038907, at schemeshard: 72075186224038899 2025-04-07T14:15:45.423489Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3050:2557], at schemeshard: 72075186224038899, StatisticsAggregatorId: 72075186224038907, at schemeshard: 72075186224038899 2025-04-07T14:15:45.429752Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxConfigure::Execute: database# /Root/Database2 2025-04-07T14:15:45.436199Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:3099:2580] 2025-04-07T14:15:45.436541Z node 2 :STATISTICS DEBUG: [72075186224038907] EvConnectSchemeShard, pipe server id = [2:3099:2580], schemeshard id = 72075186224038899 2025-04-07T14:15:45.470757Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:45.470811Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:45.470873Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database2/.metadata/_statistics 2025-04-07T14:15:45.485623Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:45.485719Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:45.493145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730657:1, at schemeshard: 72075186224038899 2025-04-07T14:15:45.495841Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976730657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224038899 PathId: 3 } 2025-04-07T14:15:45.495959Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976730657 2025-04-07T14:15:45.507269Z node 2 :HIVE WARN: HIVE#72075186224038898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:45.523060Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:45.583937Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxConfigure::Complete 2025-04-07T14:15:45.792448Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976730657. Doublechecking... 2025-04-07T14:15:45.915105Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:47.042466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3450:3320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:47.042667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: ... 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:25.129878Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-07T14:19:25.129910Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [3:10079:4701], StatRequests.size() = 1 2025-04-07T14:19:25.745823Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224038899 2025-04-07T14:19:25.745900Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 216.000000s, at schemeshard: 72075186224038899 2025-04-07T14:19:25.746229Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038899, stats size# 49 2025-04-07T14:19:25.766334Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Complete 2025-04-07T14:19:27.122117Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:10140:4727]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:27.122436Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-07T14:19:27.122477Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:10140:4727], StatRequests.size() = 1 2025-04-07T14:19:27.583872Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal 2025-04-07T14:19:27.583944Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. No force traversals. 2025-04-07T14:19:27.583982Z node 2 :STATISTICS DEBUG: [72075186224038907] IsColumnTable. Path [OwnerId: 72075186224038899, LocalPathId: 3] is data table. 2025-04-07T14:19:27.584028Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038899, LocalPathId: 3] 2025-04-07T14:19:27.584292Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2025-04-07T14:19:27.586805Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:19:27.593162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10153:4375], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:19:27.593292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10164:4380], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:19:27.593505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:19:27.607321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038899 2025-04-07T14:19:27.715903Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:10167:4383], DatabaseId: /Root/Database2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2025-04-07T14:19:28.076164Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:10293:4449]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:28.076435Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:19:28.076548Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:10295:4451] 2025-04-07T14:19:28.076622Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:10295:4451] 2025-04-07T14:19:28.077507Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:10295:4451], server id = [2:10296:4452], tablet id = 72075186224038907, status = OK 2025-04-07T14:19:28.077593Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:10296:4452] 2025-04-07T14:19:28.077733Z node 2 :STATISTICS DEBUG: [72075186224038907] EvConnectNode, pipe server id = [2:10296:4452], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:19:28.077811Z node 2 :STATISTICS DEBUG: [72075186224038907] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-07T14:19:28.078067Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:19:28.078161Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:10293:4449], StatRequests.size() = 1 2025-04-07T14:19:28.209455Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGU2OWQyOWUtMzM5NWYwYWMtZTYzOTM1ZmMtMTNmZDljYWI=, TxId: 2025-04-07T14:19:28.209523Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGU2OWQyOWUtMzM5NWYwYWMtZTYzOTM1ZmMtMTNmZDljYWI=, TxId: 2025-04-07T14:19:28.210209Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Execute 2025-04-07T14:19:28.224902Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038899, LocalPathId: 3] 2025-04-07T14:19:28.224970Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:19:28.281575Z node 2 :STATISTICS DEBUG: [72075186224038907] EvFastPropagateCheck 2025-04-07T14:19:28.281662Z node 2 :STATISTICS DEBUG: [72075186224038907] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:19:28.361141Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:10295:4451], schemeshard count = 1 2025-04-07T14:19:28.799558Z node 3 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:19:28.821880Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:19:28.821954Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:19:28.821997Z node 3 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2025-04-07T14:19:28.822030Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:19:28.822412Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database1 2025-04-07T14:19:28.825054Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:19:28.842891Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YWZjMTMyYzItOTJkZjZiYzgtNTZlZGJjOWEtODk3YTJmYmI=, TxId: 2025-04-07T14:19:28.842962Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YWZjMTMyYzItOTJkZjZiYzgtNTZlZGJjOWEtODk3YTJmYmI=, TxId: 2025-04-07T14:19:28.843980Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:19:28.874584Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:19:28.874655Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:19:29.435119Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [3:10396:4765]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:29.435412Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-07T14:19:29.435444Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [3:10396:4765], StatRequests.size() = 1 2025-04-07T14:19:31.174203Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [3:10463:4791]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:31.174578Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-07T14:19:31.174640Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [3:10463:4791], StatRequests.size() = 1 2025-04-07T14:19:31.494642Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal 2025-04-07T14:19:31.494701Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. No force traversals. 2025-04-07T14:19:31.494738Z node 2 :STATISTICS DEBUG: [72075186224038907] IsColumnTable. Path [OwnerId: 72075186224038899, LocalPathId: 4] is data table. 2025-04-07T14:19:31.494767Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038899, LocalPathId: 4] 2025-04-07T14:19:31.495196Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2025-04-07T14:19:31.497962Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-07T14:19:31.513008Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzVmNjU2NTItMjdjNTg0YjMtNTc3Y2RmZmYtYzg2NDhhYTE=, TxId: 2025-04-07T14:19:31.513071Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzVmNjU2NTItMjdjNTg0YjMtNTc3Y2RmZmYtYzg2NDhhYTE=, TxId: 2025-04-07T14:19:31.513751Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Execute 2025-04-07T14:19:31.540337Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038899, LocalPathId: 4] 2025-04-07T14:19:31.540412Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:19:32.136215Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-04-07T14:19:32.136642Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:19:32.137082Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-04-07T14:19:32.148685Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:19:32.148746Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:19:32.605168Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [3:10551:4805]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:32.605497Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-07T14:19:32.605539Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [3:10551:4805], StatRequests.size() = 1 2025-04-07T14:19:32.606342Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:10553:4539]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:32.611476Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:19:32.611544Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:10553:4539], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] Test command err: 2025-04-07T14:15:35.254200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:511:2383], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:35.254511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:35.254635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002024/r3tmp/tmpwYirk7/pdisk_1.dat 2025-04-07T14:15:35.599002Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14017, node 1 2025-04-07T14:15:35.798059Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:35.798112Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:35.798143Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:35.798621Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:35.801303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:35.902405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:35.902584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:35.922119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23186 2025-04-07T14:15:36.491845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:39.369086Z node 4 :STATISTICS INFO: Subscribed for config changes on node 4 2025-04-07T14:15:39.402936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:39.403039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:39.451833Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-07T14:15:39.454529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:39.635626Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:39.678356Z node 4 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:39.678453Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:39.707492Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:39.708723Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:39.708915Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:39.708968Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:39.709016Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:39.709054Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:39.709098Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:39.709141Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:39.710279Z node 4 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:39.916498Z node 4 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:39.916597Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:1881:2554], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:39.923902Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:1891:2562] 2025-04-07T14:15:39.930350Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:1929:2579] 2025-04-07T14:15:39.930861Z node 4 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [4:1929:2579], schemeshard id = 72075186224037889 2025-04-07T14:15:39.932332Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2025-04-07T14:15:39.951080Z node 4 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:39.951145Z node 4 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:39.951221Z node 4 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-07T14:15:39.958322Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:39.958404Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:39.963182Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:39.979545Z node 4 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:39.979672Z node 4 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:39.991584Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:40.006681Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:40.066845Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:40.283536Z node 4 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:40.419805Z node 4 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:41.045292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:15:43.810704Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-07T14:15:43.838035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:43.838114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:43.838286Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:43.838325Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:43.863143Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:15:43.864625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:43.865281Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:15:43.868287Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:43.945534Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:44.099701Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-07T14:15:44.099766Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:15:44.099851Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:2943:2908], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2025-04-07T14:15:44.100371Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:2944:2909] 2025-04-07T14:15:44.101671Z node 4 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [4:2944:2909], schemeshard id = 72075186224037899 2025-04-07T14:15:45.088391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:15:47.853622Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:47.885197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:47.885289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:47.885637Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:47.885684Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:47.911119Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:47.912222Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:47.913366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:47.914811Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:47.981949Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:48.221234Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-04-07T14:15:48.221291Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037905 2025-04-07T14:15:48.221360Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:3767:3108], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037905 2025-04-07T14:15:48.222777Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:3771:3112] 2025-04-07T14:15:48.223176Z node 4 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [4:3771:3112], schemeshard id = 72075186224037905 2025-04-07T14:15:49.542430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3911:3385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.542621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:49.559156Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo un ... , TxId: 2025-04-07T14:19:23.525072Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:19:23.551400Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2025-04-07T14:19:23.551488Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:19:23.643374Z node 4 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:19:23.643473Z node 4 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:19:23.735466Z node 4 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [4:11704:7272], schemeshard count = 1 2025-04-07T14:19:24.354112Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-07T14:19:24.354186Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 214.000000s, at schemeshard: 72075186224037899 2025-04-07T14:19:24.354416Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-04-07T14:19:24.378914Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:19:27.435587Z node 4 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:19:27.435646Z node 4 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:19:27.435695Z node 4 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-07T14:19:27.435738Z node 4 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T14:19:27.440844Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:19:27.466986Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:19:27.467655Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:19:27.467758Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:19:27.479596Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:19:27.499417Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:19:27.499707Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 2, current Round: 0 2025-04-07T14:19:27.500540Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:11875:7366], server id = [4:11876:7367], tablet id = 72075186224037911, status = OK 2025-04-07T14:19:27.501042Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:11875:7366], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-07T14:19:27.502277Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-04-07T14:19:27.502376Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2025-04-07T14:19:27.502573Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:19:27.502757Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:19:27.503067Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-07T14:19:27.507873Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:11875:7366], server id = [4:11876:7367], tablet id = 72075186224037911 2025-04-07T14:19:27.507932Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:19:27.508643Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:19:27.533641Z node 4 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [4:11893:7384]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:27.533971Z node 4 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:19:27.534020Z node 4 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [4:11893:7384], StatRequests.size() = 1 2025-04-07T14:19:27.672103Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=NTcwYzQ5NzgtZDNjNzhhZjgtNzI2MjBkMTgtZWVmYzg0NDE=, TxId: 2025-04-07T14:19:27.672177Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NTcwYzQ5NzgtZDNjNzhhZjgtNzI2MjBkMTgtZWVmYzg0NDE=, TxId: 2025-04-07T14:19:27.674923Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:19:27.692767Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-07T14:19:27.692823Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-07T14:19:27.745130Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2025-04-07T14:19:27.745197Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 187.000000s, at schemeshard: 72075186224037905 2025-04-07T14:19:27.745553Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 28 2025-04-07T14:19:27.776366Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:19:28.743814Z node 4 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-07T14:19:28.743906Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-07T14:19:31.789736Z node 4 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:19:31.790018Z node 4 :STATISTICS DEBUG: EvPropagateStatistics, node id = 4 2025-04-07T14:19:31.801267Z node 4 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:19:31.801336Z node 4 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:19:31.801372Z node 4 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is column table. 2025-04-07T14:19:31.801406Z node 4 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037905, LocalPathId: 2] 2025-04-07T14:19:31.805504Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2025-04-07T14:19:31.827352Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2025-04-07T14:19:31.827931Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2025-04-07T14:19:31.827987Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2025-04-07T14:19:31.828564Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-07T14:19:31.847702Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2025-04-07T14:19:31.847883Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 3, current Round: 0 2025-04-07T14:19:31.848464Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:12042:7461], server id = [4:12043:7462], tablet id = 72075186224037912, status = OK 2025-04-07T14:19:31.848560Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:12042:7461], path = { OwnerId: 72075186224037905 LocalId: 2 } 2025-04-07T14:19:31.849156Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-04-07T14:19:31.849233Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2025-04-07T14:19:31.849362Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2025-04-07T14:19:31.849494Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2025-04-07T14:19:31.849728Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-07T14:19:31.851439Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:12042:7461], server id = [4:12043:7462], tablet id = 72075186224037912 2025-04-07T14:19:31.851468Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-07T14:19:31.852143Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-07T14:19:31.873296Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=MjYwNjc5MzItNTQ1YzM2YjUtZWI1MDBhNDYtODI5MDY0NmU=, TxId: 2025-04-07T14:19:31.873359Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=MjYwNjc5MzItNTQ1YzM2YjUtZWI1MDBhNDYtODI5MDY0NmU=, TxId: 2025-04-07T14:19:31.874317Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2025-04-07T14:19:31.875707Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:12058:6056]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:31.876100Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:19:31.876155Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:19:31.880867Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:19:31.880934Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-07T14:19:31.880984Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:19:31.899299Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-07T14:19:31.900402Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:12058:6056]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-07T14:19:31.900611Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:19:31.900648Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-07T14:19:31.900872Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:19:31.900924Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-07T14:19:31.900974Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-07T14:19:31.903836Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] Test command err: 2025-04-07T14:16:17.453244Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574909019009609:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:17.453339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:16:17.472483Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574911284319502:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:17.472846Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:16:17.570070Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:16:17.573832Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00145b/r3tmp/tmpdVvUlz/pdisk_1.dat 2025-04-07T14:16:17.721720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:17.721920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:17.724228Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:16:17.725013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:17.740126Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9570, node 1 2025-04-07T14:16:17.747940Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:17.751388Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:17.777366Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/00145b/r3tmp/yandex1GmvVH.tmp 2025-04-07T14:16:17.777397Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/00145b/r3tmp/yandex1GmvVH.tmp 2025-04-07T14:16:17.777563Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/00145b/r3tmp/yandex1GmvVH.tmp 2025-04-07T14:16:17.777690Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:17.795062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:17.795134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:17.798055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:17.821559Z INFO: TTestServer started on Port 5580 GrpcPort 9570 TClient is connected to server localhost:5580 PQClient connected to localhost:9570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:18.008844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:16:18.038026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T14:16:19.517335Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574919874254396:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:19.517411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490574919874254401:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:19.517450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:19.521834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-07T14:16:19.532762Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490574919874254410:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-07T14:16:19.674113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:16:19.676513Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490574919874254453:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:16:19.676746Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODkxN2Q2ZjMtN2RmYzVjNDEtNzVhNmVkOGMtNjQ4NmRmOA==, ActorId: [2:7490574919874254394:2280], ActorState: ExecuteState, TraceId: 01jr89qy9vdnd6cad5t529df24, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:16:19.677376Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574917608945397:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:16:19.677526Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWY0MzY2MmMtZmVlYWY5ZmQtNjVjYzhlNjMtNzAwNjcxNWM=, ActorId: [1:7490574917608945357:2306], ActorState: ExecuteState, TraceId: 01jr89qyarebxfr26pbh9fm3rh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:16:19.678303Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:16:19.678295Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:16:19.746256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:16:19.806010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:16:19.963778Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr89qymk7eg7yp1zb449k6xf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRkYjExODUtZjk2NjBiOGQtOWEzODY4YzgtNWU2MDhmMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490574917608945758:3062] 2025-04-07T14:16:22.453338Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574909019009609:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:22.453404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:16:22.472713Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490574911284319502:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:22.472780Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 10 partitions CallPersQueueGRPC request to localhost:9570 2025-04-07T14:16:25.795160Z node 1 :PQ_METACACHE DEBUG: HandleClustersUpdate 2025-04-07T14:16:25.795181Z node 1 :PQ_METACACHE DEBUG: HandleClustersUpdate LocalCluster !LocalCluster.empty() MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2025-04-07T14:16:25.823167Z node 1 :PQ_METACACHE DEBUG: HandleDescribeAllTopics 2025-04-07T14:16:25.823208Z node 1 :PQ_METACACHE DEBUG: ProcessDescribeAllTopics 2025-04-07T14:16:25.823216Z node 1 :PQ_METACACHE DEBUG: Describe all topics - send empty response 2025-04-07T14:16:25.823230Z node 1 :PQ_METACACHE DEBUG: Send describe all topics response with 0 topics 2025-04-07T14:16:25.823314Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ... he: server session deregistered: _24_1_335690534233609729_v1 2025-04-07T14:19:33.304349Z node 24 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_335690534233609729_v1 2025-04-07T14:19:33.304367Z node 24 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_335690534233609729_v1 2025-04-07T14:19:33.304383Z node 24 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_335690534233609729_v1 2025-04-07T14:19:33.304526Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 2 messageNo: 0 requestId: cookie: 35 2025-04-07T14:19:33.305099Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3) initDone 1 event { CmdReadResult { MaxOffset: 40 Result { Offset: 35 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 37 WriteTimestampMS: 1744035572924 CreateTimestampMS: 1744035572910 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 36 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 38 WriteTimestampMS: 1744035573043 CreateTimestampMS: 1744035573041 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 37 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 39 WriteTimestampMS: 1744035573053 CreateTimestampMS: 1744035573051 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 372 RealReadOffset: 37 WaitQuotaTimeMs: 0 } Cookie: 35 } 2025-04-07T14:19:33.305413Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3) ready for read with readOffset 38 endOffset 40 2025-04-07T14:19:33.305468Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3) EndOffset 40 ReadOffset 38 ReadGuid d5aa7810-45c5179c-1d366b9f-da9da213 has messages 1 2025-04-07T14:19:33.305563Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3), readOffset# 38, endOffset# 40, WTime# 1744035573053, sizeLag# 372 2025-04-07T14:19:33.305588Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1TEvPartitionReady. Aval parts: 0 2025-04-07T14:19:33.305643Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 read done: guid# d5aa7810-45c5179c-1d366b9f-da9da213, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3), size# 518 2025-04-07T14:19:33.305680Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 response to read: guid# d5aa7810-45c5179c-1d366b9f-da9da213 2025-04-07T14:19:33.305871Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 Process answer. Aval parts: 1 Bytes readed: 518 Offset: 35 from session 3 Offset: 36 from session 3 Offset: 37 from session 3 2025-04-07T14:19:33.310068Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 grpc read done: success# 1, data# { read_request { bytes_size: 400 } } 2025-04-07T14:19:33.310237Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 got read request: guid# 174fa06d-311cdc4b-9078b049-f838a2a5 2025-04-07T14:19:33.310294Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 performing read request: guid# 13fb6b70-2fe7c2ca-98effd8a-49091339, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3), count# 2, size# 170, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-07T14:19:33.310383Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3)maxCount 2 maxSize 170 maxTimeLagMs 0 readTimestampMs 0 readOffset 38 EndOffset 40 ClientCommitOffset 0 committedOffset 0 Guid 13fb6b70-2fe7c2ca-98effd8a-49091339 2025-04-07T14:19:33.310701Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-07T14:19:33.310742Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 2 2025-04-07T14:19:33.310828Z node 24 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2025-04-07T14:19:33.310961Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 2, State: StateIdle] read cookie 34 Topic 'rt3.dc1--topic1' partition 2 user $without_consumer offset 38 count 2 size 170 endOffset 40 max time lag 0ms effective offset 38 2025-04-07T14:19:33.311011Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 2, State: StateIdle] read cookie 34 added 0 blobs, size 0 count 0 last offset 38 2025-04-07T14:19:33.311110Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 2, State: StateIdle] Reading cookie 34. All data is from uncompacted head. 2025-04-07T14:19:33.311140Z node 24 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T14:19:33.311298Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 2 messageNo: 0 requestId: cookie: 38 2025-04-07T14:19:33.311786Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3) initDone 1 event { CmdReadResult { MaxOffset: 40 Result { Offset: 38 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 40 WriteTimestampMS: 1744035573104 CreateTimestampMS: 1744035573102 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 39 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 41 WriteTimestampMS: 1744035573112 CreateTimestampMS: 1744035573110 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 40 RealReadOffset: 39 WaitQuotaTimeMs: 0 } Cookie: 38 } 2025-04-07T14:19:33.312116Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3) wait data in partition inited, cookie 1 from offset40 2025-04-07T14:19:33.312197Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3) EndOffset 40 ReadOffset 40 ReadGuid 13fb6b70-2fe7c2ca-98effd8a-49091339 has messages 1 2025-04-07T14:19:33.312318Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 read done: guid# 13fb6b70-2fe7c2ca-98effd8a-49091339, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3), size# 348 2025-04-07T14:19:33.312357Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 response to read: guid# 13fb6b70-2fe7c2ca-98effd8a-49091339 2025-04-07T14:19:33.312571Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 Process answer. Aval parts: 0 Bytes readed: 348 Offset: 38 from session 3 Offset: 39 from session 3 2025-04-07T14:19:33.313466Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16890679397630510928_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 3 offsets { end: 39 } } } } 2025-04-07T14:19:33.313500Z node 24 :PQ_READ_PROXY INFO: session cookie 2 consumer session _24_2_16890679397630510928_v1 closed with error: reason# can't commit when reading without a consumer 2025-04-07T14:19:33.313757Z node 24 :PQ_READ_PROXY INFO: session cookie 2 consumer session _24_2_16890679397630510928_v1 is DEAD 2025-04-07T14:19:33.314465Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:19:33.314499Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_16890679397630510928_v1 2025-04-07T14:19:33.314557Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7490575750402108666:2552] destroyed 2025-04-07T14:19:33.314582Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:19:33.314596Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_16890679397630510928_v1 2025-04-07T14:19:33.314618Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7490575750402108665:2551] destroyed 2025-04-07T14:19:33.314635Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:19:33.314651Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_16890679397630510928_v1 2025-04-07T14:19:33.314672Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7490575750402108669:2550] destroyed 2025-04-07T14:19:33.314686Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:19:33.314699Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_16890679397630510928_v1 2025-04-07T14:19:33.314719Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7490575750402108668:2549] destroyed 2025-04-07T14:19:33.314734Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:19:33.314747Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_16890679397630510928_v1 2025-04-07T14:19:33.314770Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7490575750402108667:2548] destroyed 2025-04-07T14:19:33.314829Z node 24 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_16890679397630510928_v1 2025-04-07T14:19:33.314849Z node 24 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_16890679397630510928_v1 2025-04-07T14:19:33.314880Z node 24 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_16890679397630510928_v1 2025-04-07T14:19:33.314903Z node 24 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_16890679397630510928_v1 2025-04-07T14:19:33.314921Z node 24 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_16890679397630510928_v1 2025-04-07T14:19:33.898687Z node 24 :KQP_COMPUTE WARN: SelfId: [24:7490575750402108774:2592], TxId: 281474976710707, task: 1, CA Id [24:7490575750402108772:2592]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2025-04-07T14:19:33.934737Z node 24 :KQP_COMPUTE WARN: SelfId: [24:7490575750402108774:2592], TxId: 281474976710707, task: 1, CA Id [24:7490575750402108772:2592]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-07T14:19:33.995410Z node 24 :KQP_COMPUTE WARN: SelfId: [24:7490575750402108774:2592], TxId: 281474976710707, task: 1, CA Id [24:7490575750402108772:2592]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-07T14:19:34.065698Z node 24 :KQP_COMPUTE WARN: SelfId: [24:7490575750402108774:2592], TxId: 281474976710707, task: 1, CA Id [24:7490575750402108772:2592]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-07T14:19:34.172247Z node 24 :KQP_COMPUTE WARN: SelfId: [24:7490575750402108774:2592], TxId: 281474976710707, task: 1, CA Id [24:7490575750402108772:2592]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] >> test_cp_ic.py::TestCpIc::test_discovery >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] >> test_dispatch.py::TestMapping::test_mapping >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_retry.py::TestRetry::test_fail_first[kikimr0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> TPersQueueTest::DisableWrongSettings [GOOD] >> TPersQueueTest::DisableDeduplication >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> TPersQueueTest::TestBigMessage [GOOD] >> TPersQueueTest::SetMeteringMode |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError [GOOD] Test command err: 2025-04-07T14:16:15.790903Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574902464814101:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:15.790945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:16:15.807067Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574900657529169:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:15.807487Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00146d/r3tmp/tmp1cqzwL/pdisk_1.dat 2025-04-07T14:16:15.930141Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:16:15.930643Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:16:16.044005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:16.044151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:16.048974Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:16:16.049939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:16.070416Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10338, node 1 2025-04-07T14:16:16.078226Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:16.078249Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:16:16.104282Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/00146d/r3tmp/yandexN0PWV2.tmp 2025-04-07T14:16:16.104318Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/00146d/r3tmp/yandexN0PWV2.tmp 2025-04-07T14:16:16.104501Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/00146d/r3tmp/yandexN0PWV2.tmp 2025-04-07T14:16:16.104584Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:16.140418Z INFO: TTestServer started on Port 8465 GrpcPort 10338 2025-04-07T14:16:16.145540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:16.145604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:16.148264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8465 PQClient connected to localhost:10338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:16.322026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:16:16.359967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-07T14:16:18.003710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574911054749821:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:18.003802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574911054749831:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:18.003824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:18.006719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-07T14:16:18.009655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574915349717166:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:18.009699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:16:18.018389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574915349717132:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-07T14:16:18.180064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:16:18.180199Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490574915349717233:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:16:18.180395Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODI3MzczMTUtYjAzODMwNTUtMzhjMzk0YjEtYjI5ZmNjODQ=, ActorId: [1:7490574911054749804:2306], ActorState: ExecuteState, TraceId: 01jr89qwsyf9zbn0gtpmvhywee, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:16:18.181999Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:16:18.183542Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7490574913542431384:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:16:18.184995Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2M3MjgxYTgtOTY2OGMxZjAtNGM0ODE1YWUtZDE5YzM2Zjg=, ActorId: [2:7490574913542431336:2280], ActorState: ExecuteState, TraceId: 01jr89qwyh8s4ww34p6nfxr85c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-07T14:16:18.185456Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-07T14:16:18.253854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:16:18.345223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-07T14:16:18.507759Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr89qx707dyyq8brygdjzj5y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdiNDYyZGEtODgyZjQ3NC1hMzBmZGJlLWE0MGYzMWIx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7490574915349717582:3068] 2025-04-07T14:16:20.791015Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574902464814101:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:20.791103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:16:20.807394Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490574900657529169:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:20.807487Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic with 1 partitions CallPersQueueGRPC request to localhost:10338 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-04-07T14:16:24.357738Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Mark ... ' requestId: 2025-04-07T14:19:55.415610Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-07T14:19:55.415713Z node 24 :PERSQUEUE INFO: new Cookie 12345678|8f24261d-78ecbd91-8e91afce-28866fd0_0 generated for partition 0 topic 'rt3.dc1--topic1' owner 12345678 2025-04-07T14:19:55.415870Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T14:19:55.415975Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:19:55.416583Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-07T14:19:55.416624Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-07T14:19:55.413498Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7490575842977120332:2463] (SourceId=12345678, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-07T14:19:55.416772Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:19:55.413565Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7490575842977120332:2463] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-07T14:19:55.413596Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7490575842977120332:2463] (SourceId=12345678, PreferedPartition=(NULL)) Start idle 2025-04-07T14:19:55.413637Z node 23 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T14:19:55.415393Z node 23 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 24, Generation: 1 2025-04-07T14:19:55.417187Z node 23 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|8f24261d-78ecbd91-8e91afce-28866fd0_0 Finish: 0 2025-04-07T14:19:55.421228Z node 23 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|8f24261d-78ecbd91-8e91afce-28866fd0_0 grpc read done: success: 0 data: 2025-04-07T14:19:55.422582Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:19:55.421267Z node 23 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|8f24261d-78ecbd91-8e91afce-28866fd0_0 grpc read failed 2025-04-07T14:19:55.421478Z node 23 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|8f24261d-78ecbd91-8e91afce-28866fd0_0 2025-04-07T14:19:55.422652Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [23:7490575847272087693:2463] destroyed 2025-04-07T14:19:55.421512Z node 23 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|8f24261d-78ecbd91-8e91afce-28866fd0_0 is DEAD 2025-04-07T14:19:55.422707Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-07T14:19:55.424958Z :Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError INFO: Random seed for debugging is 1744035595424940 2025-04-07T14:19:55.422106Z node 23 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:19:55.433539Z :Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError INFO: Wait for "init_response" 2025-04-07T14:19:55.433818Z node 23 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-07T14:19:55.433866Z node 23 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-07T14:19:55.434271Z node 23 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "test-message-group-id" } 2025-04-07T14:19:55.434450Z node 23 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "topic1" message_group_id: "test-message-group-id" from ipv6:[::1]:36530 2025-04-07T14:19:55.434476Z node 23 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:36530 proto=v1 topic=topic1 durationSec=0 2025-04-07T14:19:55.434488Z node 23 :PQ_WRITE_PROXY INFO: init check schema 2025-04-07T14:19:55.436519Z node 23 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-07T14:19:55.436694Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-07T14:19:55.436711Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-07T14:19:55.436729Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-07T14:19:55.436752Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7490575847272087697:2479] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-04-07T14:19:55.443441Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7490575847272087697:2479] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-04-07T14:19:55.480191Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7490575847272087697:2479] (SourceId=test-message-group-id, PreferedPartition=(NULL)) RequestPQRB 2025-04-07T14:19:55.480888Z node 23 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [23:7490575847272087735:2479] connected; active server actors: 1 2025-04-07T14:19:55.480981Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7490575847272087697:2479] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=test-message-group-id 2025-04-07T14:19:55.481009Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7490575847272087697:2479] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Update the table 2025-04-07T14:19:55.481348Z node 23 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [23:7490575847272087735:2479] disconnected; active server actors: 1 2025-04-07T14:19:55.481388Z node 23 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [23:7490575847272087735:2479] disconnected no session 2025-04-07T14:19:55.491688Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7490575847272087697:2479] (SourceId=test-message-group-id, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-07T14:19:55.491751Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7490575847272087697:2479] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-07T14:19:55.491781Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7490575847272087697:2479] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Start idle 2025-04-07T14:19:55.491826Z node 23 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-07T14:19:55.492714Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-07T14:19:55.492792Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [23:7490575847272087753:2479], now have 1 active actors on pipe 2025-04-07T14:19:55.492876Z node 23 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 24, Generation: 1 2025-04-07T14:19:55.493119Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-07T14:19:55.493167Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-07T14:19:55.493249Z node 24 :PERSQUEUE INFO: new Cookie test-message-group-id|fcb34c95-9d8c16f6-616be671-a83c201a_0 generated for partition 0 topic 'rt3.dc1--topic1' owner test-message-group-id 2025-04-07T14:19:55.493388Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-07T14:19:55.493461Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:19:55.494266Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-07T14:19:55.494292Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-07T14:19:55.494376Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-07T14:19:55.494672Z node 23 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group-id|fcb34c95-9d8c16f6-616be671-a83c201a_0 Init response: status: SUCCESS init_response { session_id: "test-message-group-id|fcb34c95-9d8c16f6-616be671-a83c201a_0" topic: "topic1" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP } 2025-04-07T14:19:55.495683Z :Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError INFO: Session ID is "test-message-group-id|fcb34c95-9d8c16f6-616be671-a83c201a_0" 2025-04-07T14:19:55.496400Z :Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError INFO: Wait for session to die 2025-04-07T14:19:55.502402Z node 23 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-message-group-id|fcb34c95-9d8c16f6-616be671-a83c201a_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-07T14:19:55.502544Z node 23 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: bad write request - 'blocks_headers' at position 0 is invalid: given codec (id 3) is not configured for the topic. Configured codecs are raw (id 0), gzip (id 1), lzop (id 2) sessionId: test-message-group-id|fcb34c95-9d8c16f6-616be671-a83c201a_0 2025-04-07T14:19:55.502830Z node 23 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group-id|fcb34c95-9d8c16f6-616be671-a83c201a_0 is DEAD status: BAD_REQUEST issues { message: "bad write request - \'blocks_headers\' at position 0 is invalid: given codec (id 3) is not configured for the topic. Configured codecs are raw (id 0), gzip (id 1), lzop (id 2)" issue_code: 500003 severity: 1 } 2025-04-07T14:19:55.503382Z node 23 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:19:55.504691Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:19:55.504762Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [23:7490575847272087753:2479] destroyed 2025-04-07T14:19:55.504829Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> TPersQueueTest::DisableDeduplication [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:22.968907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:22.969048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:22.969098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:22.969139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:22.969894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:22.969962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:22.970044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:22.971201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:23.051674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:23.051731Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:23.066179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:23.066305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:23.066433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:23.076999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:23.077187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:23.077751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:23.078088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:23.105911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:23.110198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:23.110266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:23.110418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:23.110455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:23.110511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:23.110644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:23.120650Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:23.260902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:23.261124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.261361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:23.261576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:23.261635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.266078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:23.266227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:23.266431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.266475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:23.266508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:23.266547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:23.269630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.269690Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:23.269744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:23.271758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.271812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.271855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:23.271897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:23.278897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:23.280765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:23.281012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:23.281892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:23.282030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:23.282089Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:23.282342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:23.282386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:23.282544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:23.282597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:23.285663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:23.285706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:23.285867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:23.285911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:23.286249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:23.286290Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:23.286372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:23.286421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:23.286459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:23.286500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:23.286525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:23.286552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:23.286604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:23.286638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:23.286663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... Queue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:20:01.139499Z node 180 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:20:01.139807Z node 180 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 332us result status StatusSuccess 2025-04-07T14:20:01.140848Z node 180 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:20:01.153475Z node 180 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][180:1015:2793] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:20:01.153585Z node 180 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][180:1016:2793] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:20:01.153664Z node 180 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][180:941:2793] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-07T14:20:01.153748Z node 180 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][180:941:2793] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-07T14:20:01.153870Z node 180 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][180:1015:2793] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744035601120929 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744035601120929 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:20:01.154066Z node 180 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][180:1016:2793] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1744035601120929 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:20:01.157283Z node 180 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][180:1015:2793] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-04-07T14:20:01.157399Z node 180 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][180:1016:2793] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-07T14:20:01.157476Z node 180 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][180:941:2793] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-07T14:20:01.157565Z node 180 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][180:941:2793] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [GOOD] >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:37.388483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:37.388573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:37.388608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:37.388637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:37.388710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:37.388745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:37.388787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:37.389098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:37.455680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:37.455733Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:37.466666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:37.466777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:37.466924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:37.476448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:37.476595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:37.477200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:37.477545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:37.482599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:37.484395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:37.484458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:37.484626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:37.484697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:37.484741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:37.484871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:37.491425Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:37.623238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:37.623460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:37.623686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:37.623928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:37.624007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:37.626294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:37.626436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:37.626608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:37.626657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:37.626696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:37.626751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:37.628604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:37.628683Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:37.628724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:37.630458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:37.630502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:37.630561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:37.630605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:37.634077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:37.635673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:37.635860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:37.636654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:37.636839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:37.636896Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:37.637144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:37.637192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:37.637363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:37.637452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:37.639325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:37.639368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:37.639528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:37.639564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:37.639906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:37.639950Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:37.640042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:37.640093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:37.640141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:37.640186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:37.640220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:37.640252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:37.640319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:37.640358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:37.640391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... erQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:20:06.027469Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:20:06.027751Z node 162 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 322us result status StatusSuccess 2025-04-07T14:20:06.028451Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:20:06.050063Z node 162 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][162:1173:2916] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:20:06.050179Z node 162 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][162:1110:2916] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-07T14:20:06.050326Z node 162 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][162:1173:2916] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744035605977789 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744035605977789 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1744035605977789 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:20:06.053235Z node 162 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][162:1173:2916] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-07T14:20:06.053353Z node 162 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][162:1110:2916] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> TPersQueueTest::SetMeteringMode [GOOD] >> TPersQueueTest::TClusterTrackerTest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> KqpExtractPredicateLookup::SqlInJoin-EnableKqpDataQueryStreamLookup [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpExtractPredicateLookup::SqlInJoin-EnableKqpDataQueryStreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 13966, MsgBus: 11361 2025-04-07T14:14:18.438527Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574397826566143:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:18.438678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012aa/r3tmp/tmpdM4qYi/pdisk_1.dat 2025-04-07T14:14:18.705315Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13966, node 1 2025-04-07T14:14:18.763237Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:18.763287Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:18.763304Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:18.763408Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:14:18.804144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:18.804281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:18.805878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11361 TClient is connected to server localhost:11361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:14:19.184289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:19.196566Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:14:19.211290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:19.333396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:19.477511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:19.531425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:14:21.090497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574410711469742:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.090619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.296698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.322034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.345448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.370811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.436032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.507087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:14:21.581474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574410711470250:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.581535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.581562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490574410711470255:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:14:21.584385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:14:21.592691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490574410711470257:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:14:22.435145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.470677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.493701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.518063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.591597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.614174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.636442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.660427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.680712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T14:14:22.744843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T14:14:23.450776Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490574397826566143:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:23.450837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22678, MsgBus: 19936 2025-04-07T14:14:24.511320Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574423072868232:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:14:24.511369Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0012aa/r3tmp/tmpaJwmPa/pdisk_1.dat 2025-04-07T14:14:24.635334Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22678, node 2 2025-04-07T14:14:24.678605Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:14:24.678722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:14:24.684544Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:14:24.707070Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:14:24.707102Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:14:24.707110Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:14:24.707202Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19936 TClient is connected to server localhost:19936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 Path ... > Connected TServer::EnableGrpc on GrpcPort 65030, node 29 2025-04-07T14:20:07.630644Z node 29 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:20:07.630699Z node 29 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:20:07.630720Z node 29 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:20:07.630937Z node 29 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8091 TClient is connected to server localhost:8091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:20:09.070153Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:09.097373Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:09.242494Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:09.668186Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:09.822360Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:12.268039Z node 29 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[29:7490575897503636420:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:12.268180Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:20:15.359194Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [29:7490575931863376537:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:15.359375Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:15.458237Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:20:15.528784Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:20:15.631339Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:20:15.712334Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:20:15.809076Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:20:15.889328Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:20:15.985732Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [29:7490575931863377040:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:15.985936Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:15.986350Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [29:7490575931863377045:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:15.993229Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:20:16.008491Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [29:7490575931863377047:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:20:18.313423Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:20:18.434998Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:20:18.526946Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-07T14:20:18.615638Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-07T14:20:18.740137Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-07T14:20:18.825816Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-07T14:20:18.910898Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-07T14:20:19.000008Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-07T14:20:19.123014Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-07T14:20:19.223525Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-07T14:20:22.521218Z node 29 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:20:22.521257Z node 29 :IMPORT WARN: Table profiles were not loaded {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"Tables":["PgComplexKey"],"PlanNodeId":9,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"TopBy":"","Name":"Top","Limit":"1001"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","ReadColumns":["Fk","Key","Value"],"Name":"TablePointLookup","E-Size":"No estimate","E-Cost":"No estimate","Table":"PgComplexKey"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Top-TablePointLookup-ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"TopSort","Limit":"1001","TopSortBy":""}],"Node Type":"TopSort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":7,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["PgKey"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"PgKey","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"HashShuffle","KeyColumns":["Key"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"PartitionByKey","Input":"NarrowMap"}],"Node Type":"Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/PgComplexKey","reads":[{"columns":["Fk","Key","Value"],"type":"Lookup"}]},{"name":"\/Root\/PgKey","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":13,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"PgKey","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","ReadColumns":["Fk","Key","Value"],"Name":"TablePointLookup","E-Size":"No estimate","E-Cost":"No estimate","Table":"PgComplexKey"}],"Node Type":"TablePointLookup"}],"Operators":[{"TopBy":"","Name":"Top","Limit":"1001"}],"Node Type":"Top"}],"Operators":[{"Name":"TopSort","Limit":"1001","TopSortBy":""}],"Node Type":"TopSort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] |97.5%| [TA] $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] |97.5%| [TA] $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-short_polling-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] >> KqpParams::RowsList >> KqpStats::StatsProfile >> KqpLimits::TooBigQuery >> KqpExplain::ExplainStream >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> KqpQuery::QueryResultsTruncated >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> KqpParams::RowsList [GOOD] >> KqpQuery::CurrentUtcTimestamp >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> KqpStats::StatsProfile [GOOD] >> KqpStats::StreamLookupStats+StreamLookupJoin >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> KqpExplain::ExplainStream [GOOD] >> KqpExplain::FewEffects >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-short_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] |97.5%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] |97.5%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> KqpQuery::CurrentUtcTimestamp [GOOD] >> KqpQuery::CreateAsSelect_BadCases >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryExplain >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> KqpStats::StreamLookupStats+StreamLookupJoin [GOOD] >> KqpStats::StreamLookupStats-StreamLookupJoin >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> KqpExplain::FewEffects [GOOD] >> KqpExplain::FullOuterJoin >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-short_polling-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> TPersQueueTest::TClusterTrackerTest [GOOD] >> TPersQueueTest::SrcIdCompatibility >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-short_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-short_polling-std] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_retry.py::TestRetry::test_fail_first[kikimr0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-short_polling-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false >> KqpQuery::QueryExplain [GOOD] >> KqpQuery::QueryFromSqs |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> KqpExplain::FullOuterJoin [GOOD] >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 28936, MsgBus: 23895 2025-04-07T14:20:33.840996Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576007857025511:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:33.841271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013d8/r3tmp/tmp2nWToD/pdisk_1.dat 2025-04-07T14:20:34.275137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:20:34.275225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:20:34.277580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28936, node 1 2025-04-07T14:20:34.310596Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:20:34.517253Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:20:34.517274Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:20:34.517286Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:20:34.517378Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23895 TClient is connected to server localhost:23895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:20:35.371189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:35.408706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:35.565007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:20:35.819070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:20:35.923099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:37.209264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576025036896245:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:37.209424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:37.740262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.779710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.863704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.905840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.979299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:20:38.023939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:20:38.076641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576029331864042:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:38.076746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:38.077515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576029331864047:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:38.083096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:20:38.097535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576029331864049:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:20:38.838301Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576007857025511:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:38.838382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"TwoShard","ReadColumns":null,"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Aggregate"}],"Node Type":"Aggregate","Stats":{"ComputeNodes":[{"Tasks":[{"FinishTimeMs":1744035639893,"Host":"ghrun-lqtfcktism","OutputRows":1,"StartTimeMs":1744035639878,"IngressRows":3,"ComputeTimeUs":84,"NodeId":1,"OutputChannels":[{"ChannelId":2,"Rows":1,"DstStageId":1,"Bytes":3}],"WaitInputTimeUs":2326,"TaskId":2,"OutputBytes":3}],"PeakMemoryUsageBytes":65536,"DurationUs":15000,"CpuTimeUs":2563},{"Tasks":[{"FinishTimeMs":1744035639893,"Host":"ghrun-lqtfcktism","OutputRows":1,"StartTimeMs":1744035639878,"IngressRows":3,"ComputeTimeUs":108,"NodeId":1,"OutputChannels":[{"ChannelId":1,"Rows":1,"DstStageId":1,"Bytes":3}],"WaitInputTimeUs":2042,"TaskId":1,"OutputBytes":3}],"PeakMemoryUsageBytes":65536,"DurationUs":15000,"CpuTimeUs":3717}],"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":2,"Max":1,"Min":1},"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":6,"Max":3,"Min":3}},"Name":"4","Push":{"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Rows":{"Count":2,"Sum":2,"Max":1,"Min":1},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":6,"Max":3,"Min":3},"WaitTimeUs":{"Count":2,"Sum":4402,"Max":2332,"Min":2070},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}],"DurationUs":{"Count":2,"Sum":30000,"Max":15000,"Min":15000},"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576},"Tasks":2,"OutputRows":{"Count":2,"Sum":2,"Max":1,"Min":1},"IngressRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":15000,"BaseTimeMs":1744035639876,"WaitInputTimeUs":{"Count":2,"Sum":4368,"Max":2326,"Min":2042},"OutputBytes":{"Count":2,"Sum":6,"Max":3,"Min":3},"CpuTimeUs":{"Count":2,"Sum":4414,"Max":2559,"Min":1855},"Ingress":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":48,"Max":24,"Min":24}},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":48,"Max":24,"Min":24},"WaitTimeUs":{"Count":2,"Sum":4486,"Max":2374,"Min":2112},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit","Stats":{"ComputeNodes":[{"Tasks":[{"InputBytes":6,"FinishTimeMs":1744035639894,"Host":"ghrun-lqtfcktism","ResultRows":1,"ResultBytes":3,"OutputRows":1,"StartTimeMs":1744035639893,"InputRows":2,"ComputeTimeUs":196,"InputChannels":[{"WaitTimeUs":218,"ChannelId":1,"Rows":1,"SrcStageId":0,"Bytes":3},{"WaitTimeUs":172,"ChannelId":2,"Rows":1,"SrcStageId":0,"Bytes":3}],"NodeId":1,"OutputChannels":[{"ChannelId":3,"Rows":1,"DstStageId":0,"Bytes":3}],"WaitInputTimeUs":116,"TaskId":3,"OutputBytes":3}],"PeakMemoryUsageBytes":131072,"DurationUs":1000,"CpuTimeUs":17110}],"UseLlvm":"undefined","OutputRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"PhysicalStageId":1,"InputBytes":{"Count":1,"Sum":6,"Max":6,"Min":6},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"BaseTimeMs":1744035639876,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":17,"Max":17,"Min":17}," ... 025-04-07T14:20:52.716507Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":8,"Columns":["Key","Value1","Value2"],"E-Rows":"No estimate","Table":"TwoShard","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","Stats":{"UseLlvm":"undefined","DurationUs":{"Count":1,"Sum":2000,"Max":2000,"Min":2000},"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"RESULT","Push":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1,"ResultBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":2000,"BaseTimeMs":1744035653251,"CpuTimeUs":{"Count":1,"Sum":897,"Max":897,"Min":897},"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"ExternalPlanNodeId":8}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-Filter","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":18,"Max":18,"Min":18}},"Name":"11","Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":1855,"Max":1855,"Min":1855},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":2}}}],"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"InputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":1,"StageDurationUs":1000,"BaseTimeMs":1744035653251,"WaitInputTimeUs":{"Count":1,"Sum":1787,"Max":1787,"Min":1787},"OutputBytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"CpuTimeUs":{"Count":1,"Sum":640,"Max":640,"Min":640},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"7","Push":{"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":889,"Max":889,"Min":889},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":2,"Min":1}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":18,"Max":18,"Min":18}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"WaitTimeUs":{"Count":1,"Sum":2964,"Max":2964,"Min":2964},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"InputBytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1,"ResultBytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":2,"StageDurationUs":1000,"BaseTimeMs":1744035653251,"WaitInputTimeUs":{"Count":1,"Sum":2917,"Max":2917,"Min":2917},"OutputBytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"CpuTimeUs":{"Count":1,"Sum":518,"Max":518,"Min":518},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":18,"Max":18,"Min":18}},"Name":"9","Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"WaitTimeUs":{"Count":1,"Sum":2980,"Max":2980,"Min":2980},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Collect","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"4","Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"WaitTimeUs":{"Count":1,"Sum":597,"Max":597,"Min":597},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":0,"BaseTimeMs":1744035653244,"WaitInputTimeUs":{"Count":1,"Sum":586,"Max":586,"Min":586},"CpuTimeUs":{"Count":1,"Sum":429,"Max":429,"Min":429},"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32}},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32},"WaitTimeUs":{"Count":1,"Sum":606,"Max":606,"Min":606},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"WaitTimeUs":{"Count":1,"Sum":1035,"Max":1035,"Min":1035},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"InputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1,"ResultBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":1,"StageDurationUs":1000,"BaseTimeMs":1744035653244,"WaitInputTimeUs":{"Count":1,"Sum":949,"Max":949,"Min":949},"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"CpuTimeUs":{"Count":1,"Sum":416,"Max":416,"Min":416},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"WaitTimeUs":{"Count":1,"Sum":1062,"Max":1062,"Min":1062},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":312000,"CpuTimeUs":308214},"ProcessCpuTimeUs":528,"TotalDurationUs":333591,"ResourcePoolId":"default","QueuedTimeUs":512},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"No estimate","Columns":["Key","Value1","Value2"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"A-Rows":2,"A-Cpu":0.64,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":2,"A-Cpu":1.158,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::FullOuterJoin [GOOD] Test command err: Trying to start YDB, gRPC: 5202, MsgBus: 31478 2025-04-07T14:20:33.835329Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576010508044966:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:33.835469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013c2/r3tmp/tmpMpkhFR/pdisk_1.dat 2025-04-07T14:20:34.245978Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:20:34.267655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:20:34.267797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:20:34.271452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5202, node 1 2025-04-07T14:20:34.521695Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:20:34.521720Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:20:34.521730Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:20:34.521880Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31478 TClient is connected to server localhost:31478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:20:35.447990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:35.471885Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:20:35.487239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:35.697394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:20:35.878262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:20:35.941270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:37.224412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576027687915724:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:37.234268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:37.735930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.769425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.807410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.844621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.880681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.961602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:20:38.027539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576031982883522:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:38.027609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:38.027631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576031982883527:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:38.031398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:20:38.044787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576031982883529:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:20:38.840085Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576010508044966:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:38.840159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"InternalOperatorId":2},{"InternalOperatorId":4}],"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"},{"Inputs":[{"ExternalPlanNodeId":4}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 5048, MsgBus: 26300 2025-04-07T14:20:41.172730Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576041591398271:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:41.172771Z node 2 :METAD ... "TableRangeScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_5_0","Node Type":"Precompute_5","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":17,"Plans":[{"Tables":["EightShard"],"PlanNodeId":16,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_3_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_3_0"}],"Node Type":"Effect"},{"PlanNodeId":14,"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"Tables":["EightShard"],"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","Inputs":[],"ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_3_0","Node Type":"Precompute_3","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":8,"Plans":[{"Tables":["EightShard"],"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"Effect"},{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_0_0","Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"},{"columns":["Key"],"scan_by":["Key (350, +∞)"],"type":"Scan"},{"columns":["Data","Key"],"scan_by":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"type":"Scan"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"},{"columns":["Data","Key"],"type":"MultiUpsert"},{"type":"MultiErase"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Delete","Table":"EightShard"}],"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key (350, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Delete"}],"Node Type":"Effect"},{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":16,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"},{"PlanNodeId":17,"Plans":[{"PlanNodeId":18,"Operators":[{"Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":24,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} Trying to start YDB, gRPC: 63462, MsgBus: 19966 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013c2/r3tmp/tmpnSJQTZ/pdisk_1.dat 2025-04-07T14:20:48.076748Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:20:48.097305Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:20:48.119161Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:20:48.119237Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:20:48.121618Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63462, node 3 2025-04-07T14:20:48.225446Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:20:48.225467Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:20:48.225474Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:20:48.225558Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19966 TClient is connected to server localhost:19966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:20:48.702018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:48.720498Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:48.793168Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:48.941948Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:20:49.043801Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:20:51.194829Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490576086306847208:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:51.194927Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:51.237295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:20:51.267693Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:20:51.312292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:20:51.348045Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:20:51.378634Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:20:51.415870Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:20:51.471282Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490576086306847701:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:51.471339Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:51.471490Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490576086306847706:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:51.474741Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:20:51.482819Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490576086306847708:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:20:52.559006Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:52.882554Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-07T14:20:52.931339Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] |97.6%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-plan] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] >> KqpQuery::QueryFromSqs [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-result_sets] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: Trying to start YDB, gRPC: 1350, MsgBus: 3568 2025-04-07T14:20:39.890026Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576036511230463:2189];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013a7/r3tmp/tmpGDKKCn/pdisk_1.dat 2025-04-07T14:20:40.073273Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:20:40.252305Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:20:40.273824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:20:40.273930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:20:40.275847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1350, node 1 2025-04-07T14:20:40.406177Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:20:40.406205Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:20:40.406213Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:20:40.406316Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3568 TClient is connected to server localhost:3568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:20:41.106546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:41.120839Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:20:41.135403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:41.306965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:41.527244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:41.626928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:43.445346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576053691101210:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:43.445452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:43.692144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:20:43.717846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:20:43.764971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:20:43.801035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:20:43.837408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:20:43.884575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:20:43.934370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576053691101702:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:43.934437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:43.934547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576053691101707:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:43.937768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:20:43.948256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576053691101709:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:20:44.852185Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576036511230463:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:44.852279Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:20:45.136876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61112, MsgBus: 11263 2025-04-07T14:20:47.090363Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576070893055752:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:47.090402Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013a7/r3tmp/tmpyY4pDF/pdisk_1.dat 2025-04-07T14:20:47.197221Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:20:47.214069Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:20:47.214151Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:20:47.215947Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61112, node 2 2025-04-07T14:20:47.359735Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:20:47.359755Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:20:47.359766Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:20:47.359861Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11263 TClient is connected to server localhost:11263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:20:47.826034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:47.831545Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:47.841989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:20:47.935122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:48.126621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:48.200766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation ty ... e, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:20:50.753907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576083777959825:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } AST: ( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:9" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (Uint64 '"1001")) (let $4 (Uint32 '1)) (let $5 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) '((KqlKeyExc $4 (String '"Name")) (KqlKeyInc $4)))) (let $6 (OptionalType (DataType 'String))) (let $7 (StructType '('"Amount" (OptionalType (DataType 'Uint64))) '('"Comment" $6) '('"Group" (OptionalType (DataType 'Uint32))) '('"Name" $6))) (let $8 '('('"_logical_id" '692) '('"_id" '"7249eeef-fea8f765-23220d92-f6d8cecd") '('"_wide_channels" $7))) (let $9 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $5)) (lambda '($13) (block '( (let $14 (lambda '($15) (Member $15 '"Amount") (Member $15 '"Comment") (Member $15 '"Group") (Member $15 '"Name"))) (return (FromFlow (ExpandMap (Take (ToFlow $13) $3) $14))) ))) $8)) (let $10 (DqCnUnionAll (TDqOutput $9 '"0"))) (let $11 (DqPhyStage '($10) (lambda '($16) (FromFlow (NarrowMap (Take (ToFlow $16) $3) (lambda '($17 $18 $19 $20) (AsStruct '('"Amount" $17) '('"Comment" $18) '('"Group" $19) '('"Name" $20)))))) '('('"_logical_id" '705) '('"_id" '"adbb3be7-7e779300-87877491-abd1604f")))) (let $12 (DqCnResult (TDqOutput $11 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($9 $11) '($12) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $7) '"0" '"0")) '('('"type" '"data_query")))) ) Plan: {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","ReadLimit":"1001","Inputs":[],"E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Test","reads":[{"lookup_by":["Group (1)"],"columns":["Amount","Comment","Group","Name"],"scan_by":["Name (Name, +∞)"],"limit":"1001","type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","ReadLimit":"1001","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} 2025-04-07T14:20:52.094912Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576070893055752:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:52.094986Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29560, MsgBus: 6929 2025-04-07T14:20:52.888269Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490576090538402962:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:52.952127Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013a7/r3tmp/tmp5XmozD/pdisk_1.dat 2025-04-07T14:20:53.131708Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:20:53.133931Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:20:53.134029Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:20:53.135433Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29560, node 3 2025-04-07T14:20:53.262658Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:20:53.262682Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:20:53.262690Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:20:53.262787Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6929 TClient is connected to server localhost:6929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:20:53.788033Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:53.796404Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:20:53.815132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:53.911837Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:54.075565Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:54.158732Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:56.156195Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490576107718273709:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:56.156283Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:56.197482Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:20:56.237794Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:20:56.272223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:20:56.306006Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:20:56.347292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:20:56.441120Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:20:56.505123Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490576107718274205:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:56.505209Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:56.505400Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490576107718274210:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:56.514381Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:20:56.527789Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490576107718274212:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:20:57.732152Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-04-07T14:20:57.878179Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490576090538402962:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:57.878248Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-plan] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:20:47.432896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:20:47.433039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:20:47.433081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:20:47.433111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:20:47.436900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:20:47.436972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:20:47.437095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:20:47.441279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:20:47.532838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:20:47.532896Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:20:47.553556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:20:47.553647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:20:47.553751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:20:47.564910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:20:47.565120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:20:47.567196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:20:47.567535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:20:47.573628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:20:47.583127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:20:47.583225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:20:47.583367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:20:47.583423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:20:47.583583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:20:47.583771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:20:47.596784Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:20:47.784767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:20:47.785966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:20:47.787491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:20:47.789373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:20:47.789442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:20:47.792366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:20:47.793207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:20:47.793568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:20:47.793619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:20:47.793708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:20:47.793748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:20:47.795733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:20:47.795805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:20:47.795841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:20:47.797730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:20:47.797804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:20:47.797836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:20:47.797879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:20:47.803109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:20:47.805631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:20:47.807972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:20:47.809113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:20:47.809262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:20:47.809318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:20:47.809629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:20:47.809703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:20:47.809908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:20:47.809981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:20:47.812415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:20:47.812461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:20:47.812656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:20:47.812724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:20:47.812983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:20:47.813029Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:20:47.813119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:20:47.813174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:20:47.813224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:20:47.813263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:20:47.813296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:20:47.813326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:20:47.813389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:20:47.813435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:20:47.813485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:20:47.815518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:20:47.815617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:20:47.815650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:20:47.815684Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:20:47.815717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:20:47.815896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 158Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 2 2025-04-07T14:21:00.793204Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 1 -> 2 2025-04-07T14:21:00.793704Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 116:1, propose status:StatusAccepted, reason: , at schemeshard: 72075186233409546 2025-04-07T14:21:00.793777Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 116:0, at schemeshard: 72075186233409546 2025-04-07T14:21:00.793927Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 12 2025-04-07T14:21:00.793994Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 3 2025-04-07T14:21:00.796525Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 116, response: Status: StatusAccepted TxId: 116 SchemeshardId: 72075186233409546 PathId: 9, at schemeshard: 72075186233409546 2025-04-07T14:21:00.796735Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 116, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-04-07T14:21:00.797061Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-07T14:21:00.797118Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-07T14:21:00.797352Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 9] 2025-04-07T14:21:00.797440Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-07T14:21:00.797492Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:726:2628], at schemeshard: 72075186233409546, txId: 116, path id: 1 2025-04-07T14:21:00.797551Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:726:2628], at schemeshard: 72075186233409546, txId: 116, path id: 9 2025-04-07T14:21:00.798169Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-07T14:21:00.798241Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2025-04-07T14:21:00.798512Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-07T14:21:00.799208Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-04-07T14:21:00.799316Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-04-07T14:21:00.799363Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-04-07T14:21:00.799417Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 16 2025-04-07T14:21:00.799468Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 13 2025-04-07T14:21:00.800451Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-04-07T14:21:00.800524Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-04-07T14:21:00.800549Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-04-07T14:21:00.800576Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 9], version: 1 2025-04-07T14:21:00.800601Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 4 2025-04-07T14:21:00.800895Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2025-04-07T14:21:00.803369Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2025-04-07T14:21:00.803530Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2025-04-07T14:21:00.803584Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-04-07T14:21:00.804431Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-07T14:21:00.804696Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2025-04-07T14:21:00.804912Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-07T14:21:00.804970Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-04-07T14:21:00.805117Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-07T14:21:00.805176Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2025-04-07T14:21:00.805266Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-07T14:21:00.805368Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 2 -> 3 2025-04-07T14:21:00.806406Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-04-07T14:21:00.808475Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-04-07T14:21:00.814827Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-07T14:21:00.815139Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-07T14:21:00.815195Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#116:0 ProgressState at tabletId# 72075186233409546 2025-04-07T14:21:00.815285Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId#116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2025-04-07T14:21:00.815671Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId#116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 677 RawX2: 30064773661 } TxBody: "\n\236\004\n\007Table11\020\t\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\t:\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2025-04-07T14:21:00.819020Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2025-04-07T14:21:00.819197Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2025-04-07T14:21:00.847873Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-04-07T14:21:00.850554Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-07T14:21:00.850766Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 >> KqpQuery::CreateAsSelect_BadCases [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] |97.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_dispatch.py::TestMapping::test_mapping [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelect_BadCases [GOOD] Test command err: Trying to start YDB, gRPC: 32349, MsgBus: 19052 2025-04-07T14:20:33.833722Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576008189107035:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:33.834233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013b6/r3tmp/tmpGS51ee/pdisk_1.dat 2025-04-07T14:20:34.281561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:20:34.281678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:20:34.283602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:20:34.325732Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32349, node 1 2025-04-07T14:20:34.517365Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:20:34.517389Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:20:34.517394Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:20:34.517476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19052 TClient is connected to server localhost:19052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:20:35.469050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:35.501599Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:20:35.519104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:35.698751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:35.900258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:35.983148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:37.174693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576025368977921:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:37.174804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:37.734444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.785287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.814316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.858670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.889007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.956595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:20:38.021079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576029663945719:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:38.021223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:38.021436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576029663945725:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:38.027796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:20:38.039551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576029663945727:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:20:38.834554Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576008189107035:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:38.866434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16869, MsgBus: 30995 2025-04-07T14:20:40.929452Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576040976049744:2118];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:40.929497Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013b6/r3tmp/tmp2TN43R/pdisk_1.dat 2025-04-07T14:20:41.124409Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:20:41.178132Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:20:41.178201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:20:41.185642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16869, node 2 2025-04-07T14:20:41.303708Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:20:41.303730Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:20:41.303736Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:20:41.303820Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30995 TClient is connected to server localhost:30995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:20:41.700405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:41.713988Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:20:41.734619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:41.804762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:41.960742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:42.031428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:44.457110Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:74905760581 ... :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:20:57.457513Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:20:57.457613Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:20:57.457630Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:20:59.003757Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-07T14:20:59.101903Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038020 not found 2025-04-07T14:20:59.101946Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038016 not found 2025-04-07T14:20:59.101960Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038022 not found 2025-04-07T14:20:59.101974Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038005 not found 2025-04-07T14:20:59.101987Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038007 not found 2025-04-07T14:20:59.102000Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038001 not found 2025-04-07T14:20:59.102015Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038003 not found 2025-04-07T14:20:59.102028Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037999 not found 2025-04-07T14:20:59.102041Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037995 not found 2025-04-07T14:20:59.102055Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037991 not found 2025-04-07T14:20:59.102067Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037997 not found 2025-04-07T14:20:59.102081Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038018 not found 2025-04-07T14:20:59.102094Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037981 not found 2025-04-07T14:20:59.102107Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038014 not found 2025-04-07T14:20:59.102119Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038012 not found 2025-04-07T14:20:59.102133Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038008 not found 2025-04-07T14:20:59.102146Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038027 not found 2025-04-07T14:20:59.102159Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038010 not found 2025-04-07T14:20:59.102174Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038025 not found 2025-04-07T14:20:59.102187Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037972 not found 2025-04-07T14:20:59.102199Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037970 not found 2025-04-07T14:20:59.102211Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037968 not found 2025-04-07T14:20:59.102224Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037966 not found 2025-04-07T14:20:59.102237Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037993 not found 2025-04-07T14:20:59.102250Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038024 not found 2025-04-07T14:20:59.102263Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037983 not found 2025-04-07T14:20:59.102277Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037985 not found 2025-04-07T14:20:59.102290Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037979 not found 2025-04-07T14:20:59.102304Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038015 not found 2025-04-07T14:20:59.102316Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037975 not found 2025-04-07T14:20:59.102329Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037977 not found 2025-04-07T14:20:59.102341Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038004 not found 2025-04-07T14:20:59.102355Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038006 not found 2025-04-07T14:20:59.102369Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038002 not found 2025-04-07T14:20:59.102384Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038000 not found 2025-04-07T14:20:59.102398Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037998 not found 2025-04-07T14:20:59.102411Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037996 not found 2025-04-07T14:20:59.102426Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037994 not found 2025-04-07T14:20:59.102441Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037992 not found 2025-04-07T14:20:59.102455Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037987 not found 2025-04-07T14:20:59.102469Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037989 not found 2025-04-07T14:20:59.102482Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038019 not found 2025-04-07T14:20:59.104202Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038017 not found 2025-04-07T14:20:59.104238Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038013 not found 2025-04-07T14:20:59.104252Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037976 not found 2025-04-07T14:20:59.104266Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038009 not found 2025-04-07T14:20:59.104280Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038026 not found 2025-04-07T14:20:59.104292Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037973 not found 2025-04-07T14:20:59.104304Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038011 not found 2025-04-07T14:20:59.104318Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037969 not found 2025-04-07T14:20:59.104330Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038028 not found 2025-04-07T14:20:59.104342Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037971 not found 2025-04-07T14:20:59.104354Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037967 not found 2025-04-07T14:20:59.104366Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038021 not found 2025-04-07T14:20:59.104377Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037965 not found 2025-04-07T14:20:59.104391Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037990 not found 2025-04-07T14:20:59.104403Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037980 not found 2025-04-07T14:20:59.104416Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038023 not found 2025-04-07T14:20:59.104428Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037974 not found 2025-04-07T14:20:59.104445Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037984 not found 2025-04-07T14:20:59.114697Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037988 not found 2025-04-07T14:20:59.114738Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037986 not found 2025-04-07T14:20:59.114752Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037982 not found 2025-04-07T14:20:59.114769Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037978 not found 2025-04-07T14:20:59.329133Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:1, at schemeshard: 72057594046644480 2025-04-07T14:20:59.494352Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YmRhZjg1NS1lMDcxZjM3My1lMmE3N2ZlYi05ODE4ODEzZA==, ActorId: [3:7490576120169859006:3841], ActorState: ExecuteState, TraceId: 01jr8a0ffbca0xz7gjqrqp63z5, Create QueryResponse for error on request, msg: 2025-04-07T14:20:59.697183Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710690, at schemeshard: 72057594046644480 2025-04-07T14:20:59.705004Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:1, at schemeshard: 72057594046644480 2025-04-07T14:21:00.825668Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:1, at schemeshard: 72057594046644480 >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-plan] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> TPersQueueTest::SrcIdCompatibility [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::SrcIdCompatibility [GOOD] Test command err: === Start server === Server->StartServer(false); 2025-04-07T14:16:15.758390Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574902655890735:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:15.758439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:16:15.779571Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490574899190577482:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:15.779662Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001469/r3tmp/tmph9Q5Tc/pdisk_1.dat 2025-04-07T14:16:15.878594Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:16:15.878635Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2025-04-07T14:16:16.019944Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:16.047321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:16.047411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:16.051339Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:16:16.052005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32218, node 1 2025-04-07T14:16:16.096270Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001469/r3tmp/yandexTZEyN5.tmp 2025-04-07T14:16:16.096296Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001469/r3tmp/yandexTZEyN5.tmp 2025-04-07T14:16:16.096442Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001469/r3tmp/yandexTZEyN5.tmp 2025-04-07T14:16:16.096542Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:16.101211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:16.101273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:16.104189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:16.130167Z INFO: TTestServer started on Port 27031 GrpcPort 32218 TClient is connected to server localhost:27031 PQClient connected to localhost:32218 === TenantModeEnabled() = 0 === Init PQ - start server on port 32218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:16.388837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:16:16.389028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:16.389174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:16:16.389351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:16.389402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:16.390930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:16.391035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:16.391157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:16.391183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:16.391201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-04-07T14:16:16.391211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-07T14:16:16.392131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:16:16.392152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-04-07T14:16:16.392167Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:16:16.392433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:16.392465Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:16.392474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:16:16.393528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:16.393549Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:16.393560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:16:16.393589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-04-07T14:16:16.396444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:16:16.397792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-04-07T14:16:16.397905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:16:16.399844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035376447, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:16.399966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 7490574906950858645 RawX2: 4294969662 } } Step: 1744035376447 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:16:16.400008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:16:16.400217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:16:16.400239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:16:16.400369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:16:16.400403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:16:16.401784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:16.401819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:16:16.401962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:16.401988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490574906950858659:2377], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-04-07T14:16:16.402034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:16.402056Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:16:16.402134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:16:16.402153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-07T14:16:16.402174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2025-04-07T14:16:16.402236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-07T14:16:16.402248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:16:16.402265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-04-07T14:16:16.402313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:16:16.402327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:16:16.402340Z node ... artition::ReplyOwnerOk. Partition: 7 2025-04-07T14:21:08.878420Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2025-04-07T14:21:08.878623Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-04-07T14:21:08.878644Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-07T14:21:08.878733Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2025-04-07T14:21:08.878916Z node 27 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 7 MaxSeqNo: 0 sessionId: test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0 2025-04-07T14:21:08.879730Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744035668879 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:21:08.879925Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Write session established. Init response: session_id: "test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0" topic: "account/topic100" cluster: "dc1" partition_id: 7 supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-07T14:21:08.880299Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0] Write 1 messages with Id from 1 to 1 2025-04-07T14:21:08.885403Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0] Write session: try to update token 2025-04-07T14:21:08.885519Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0] Send 1 message(s) (0 left), first sequence number is 1 2025-04-07T14:21:08.886571Z node 27 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-07T14:21:08.886951Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-07T14:21:08.890366Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-04-07T14:21:08.890432Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-07T14:21:08.890555Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 1 2025-04-07T14:21:08.890668Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T14:21:08.891466Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-04-07T14:21:08.891519Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-07T14:21:08.891595Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message topic: rt3.dc1--account--topic100 partition: 7 SourceId: '\0test-src-id-compat2' SeqNo: 1 partNo : 0 messageNo: 1 size 102 offset: -1 2025-04-07T14:21:08.891871Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob processing sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 2025-04-07T14:21:08.915528Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob complete sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 189 count 1 nextOffset 1 batches 1 2025-04-07T14:21:08.916684Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic100' partition 7 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000007_00000000000000000000_00000_0000000001_00000| size 177 WTime 1744035668912 2025-04-07T14:21:08.924963Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-07T14:21:08.930183Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 122 WriteNewSizeFromSupportivePartitions# 0 2025-04-07T14:21:08.930261Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyWrite. Partition: 7 2025-04-07T14:21:08.930328Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Answering for message sourceid: '\0test-src-id-compat2', Topic: 'rt3.dc1--account--topic100', Partition: 7, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-07T14:21:08.930585Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-07T14:21:08.930621Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-07T14:21:08.930723Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 1 requestId: cookie: 1 2025-04-07T14:21:08.930904Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-07T14:21:08.931325Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 Topic 'rt3.dc1--account--topic100' partition 7 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-07T14:21:08.931360Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2025-04-07T14:21:08.931413Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-07T14:21:08.931441Z node 27 :PERSQUEUE DEBUG: FormAnswer 0 2025-04-07T14:21:08.931516Z node 27 :PERSQUEUE DEBUG: Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp done, result 1744035668891 queuesize 0 startOffset 0 2025-04-07T14:21:08.933599Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0] Write session got write response: sequence_numbers: 1 offsets: 0 already_written: false partition_id: 7 write_statistics { persist_duration_ms: 15 queued_in_partition_duration_ms: 20 } 2025-04-07T14:21:08.933681Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0] Write session: acknoledged message 1 2025-04-07T14:21:08.934052Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0] Write session: close. Timeout = 0 ms 2025-04-07T14:21:08.934128Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0] Write session will now close 2025-04-07T14:21:08.934193Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0] Write session: aborting 2025-04-07T14:21:08.937853Z node 27 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0 grpc read done: success: 0 data: 2025-04-07T14:21:08.937897Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0 grpc read failed 2025-04-07T14:21:08.938468Z node 27 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 5 sessionId: test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0 2025-04-07T14:21:08.938503Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0 is DEAD 2025-04-07T14:21:08.939227Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NActors::TEvents::TEvPoison 2025-04-07T14:21:08.939808Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:21:08.939861Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [27:7490576157184473063:2701] destroyed 2025-04-07T14:21:08.939913Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::DropOwner. 2025-04-07T14:21:08.942002Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0] Write session: gracefully shut down, all writes complete 2025-04-07T14:21:08.944583Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0] Write session is aborting and will not restart 2025-04-07T14:21:08.944752Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|c5e32c85-bd70c42d-b62982ac-902ea59d_0] Write session: destroy 2025-04-07T14:21:09.581192Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037905] Handle TEvPQ::TEvPartitionCounters PartitionId 14 2025-04-07T14:21:09.783100Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037905] Handle TEvPQ::TEvPartitionCounters PartitionId 50 2025-04-07T14:21:09.803685Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037905] Handle TEvPQ::TEvPartitionCounters PartitionId 84 2025-04-07T14:21:09.813053Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvPQ::TEvPartitionCounters PartitionId 62 2025-04-07T14:21:09.813643Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvPQ::TEvPartitionCounters PartitionId 55 2025-04-07T14:21:09.813817Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037905] Handle TEvPQ::TEvPartitionCounters PartitionId 49 2025-04-07T14:21:09.821442Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Handle TEvPQ::TEvPartitionCounters PartitionId 43 2025-04-07T14:21:09.854848Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037905] Handle TEvPQ::TEvPartitionCounters PartitionId 25 2025-04-07T14:21:09.924806Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037901] Handle TEvPQ::TEvPartitionCounters PartitionId 44 2025-04-07T14:21:09.925091Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037901] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-07T14:21:10.073624Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvPQ::TEvPartitionCounters PartitionId 67 2025-04-07T14:21:10.077614Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvPQ::TEvPartitionCounters PartitionId 35 2025-04-07T14:21:10.081196Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037901] Handle TEvPQ::TEvPartitionCounters PartitionId 51 2025-04-07T14:21:10.086935Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvPQ::TEvPartitionCounters PartitionId 7 2025-04-07T14:21:10.089478Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvPQ::TEvPartitionCounters PartitionId 10 2025-04-07T14:21:10.089548Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Handle TEvPQ::TEvPartitionCounters PartitionId 79 2025-04-07T14:21:10.179788Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Handle TEvPQ::TEvPartitionCounters PartitionId 63 2025-04-07T14:21:10.180101Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Handle TEvPQ::TEvPartitionCounters PartitionId 65 2025-04-07T14:21:10.197876Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvPQ::TEvPartitionCounters PartitionId 42 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-result_sets] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-short_polling-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] >> TDatabaseQuotas::DisableWritesToDatabase >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-plan] >> TGRpcYdbTest::CreateTableBadRequest2 >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-plan] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-result_sets] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-result_sets] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> TGRpcYdbTest::CreateTableBadRequest2 [GOOD] >> TGRpcYdbTest::CreateTableBadRequest3 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-result_sets] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> TGRpcYdbTest::CreateTableBadRequest3 [GOOD] >> TGRpcYdbTest::CreateTableWithIndex >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-result_sets] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-plan] >> TGRpcClientLowTest::GrpcRequestProxy >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-result_sets] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-plan] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterAsyncReplication 2025-04-07 14:21:23,094 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-07 14:21:23,294 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 445975 75.6M 75.3M 21.3M test_tool run_ut @/home/runner/.ya/build/build_root/u6pr/001473/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk16/testing_out_stuff/test_tool.args 446128 2.0G 1.9G 1.5G └─ ydb-core-kqp-ut-scheme --trace-path-append /home/runner/.ya/build/build_root/u6pr/001473/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk16/ytest.rep Test command err: Trying to start YDB, gRPC: 20249, MsgBus: 7770 2025-04-07T14:11:26.140093Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490573660808841265:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:26.140187Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001473/r3tmp/tmpgoyad4/pdisk_1.dat 2025-04-07T14:11:26.495308Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20249, node 1 2025-04-07T14:11:26.551914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:26.553139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:26.561095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:26.589355Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:26.589376Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:26.589392Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:26.589494Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7770 TClient is connected to server localhost:7770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:27.138639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.151652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.267611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.391002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:27.461989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:28.551224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573669398777559:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.551353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.726447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.745586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.766343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.783710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.800446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.820193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:11:28.857943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573669398778050:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.857996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.858099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490573669398778055:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:28.860971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:11:28.867051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490573669398778057:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:11:29.632864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19909, MsgBus: 10943 2025-04-07T14:11:30.108687Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490573677969073027:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:11:30.109685Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001473/r3tmp/tmpULT5XH/pdisk_1.dat 2025-04-07T14:11:30.170702Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19909, node 2 2025-04-07T14:11:30.208595Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:11:30.208681Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:11:30.210305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:11:30.221274Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:11:30.221293Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:11:30.221297Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:11:30.221361Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10943 TClient is connected to server localhost:10943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:11:30.548741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.556961Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:11:30.561731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.611050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.738990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:30.809855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:11:32.466241Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490573686559009296:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:11:32.466297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NO ... nerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:05.769451Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:06.788604Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:06.788712Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:06.788779Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:07.805316Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:07.805402Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:07.805482Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:08.832721Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:08.832833Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:08.832911Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:09.849321Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:09.849424Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:09.849502Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:10.877086Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:10.877230Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:10.877318Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:11.894439Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:11.894640Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:11.894759Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:12.908631Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:12.908740Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:12.908802Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:13.922235Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:13.922315Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:13.922386Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:14.935317Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:14.935415Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:14.935473Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:15.702023Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimr::NReplication::NController::TEvPrivate::TEvUpdateTenantNodes { Tenant: /Root } 2025-04-07T14:21:15.702068Z node 6 :REPLICATION_CONTROLLER INFO: [controller 72075186224037920] Discover tenant nodes: tenant# /Root 2025-04-07T14:21:15.702737Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimr::TEvDiscovery::TEvError 2025-04-07T14:21:15.957977Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:15.958098Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:15.958175Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:16.966399Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:16.966488Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:16.966545Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:17.986894Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:17.986988Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:17.987058Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:19.006400Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:19.006518Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:19.006600Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:20.018563Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:20.018658Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:20.018717Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:21.030969Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:21.031078Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:21.031142Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:22.052493Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:22.052597Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:22.052687Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2025-04-07T14:21:23.066990Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:23.067100Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2025-04-07T14:21:23.067160Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7573699373/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/u6pr/001473/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk16/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1747, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7573699373/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/u6pr/001473/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk16/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-result_sets] |97.7%| [TA] $(B)/ydb/core/kqp/ut/scheme/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-plan] |97.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/scheme/test-results/unittest/{meta.json ... results_accumulator.log} >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_retry.py::TestRetry::test_low_rate[kikimr0] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-result_sets] >> TGRpcYdbTest::CreateTableWithIndex [GOOD] >> TGRpcYdbTest::CreateYqlSession |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-plan] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] >> TGRpcClientLowTest::GrpcRequestProxy [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-result_sets] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-plan] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/group_by_lookup.script-script] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-result_sets] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] >> TDatabaseQuotas::DisableWritesToDatabase [GOOD] >> GrpcConnectionStringParserTest::NoDatabaseFlag >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-plan] >> TGRpcYdbTest::CreateYqlSession [GOOD] >> TGRpcYdbTest::CreateYqlSessionExecuteQuery >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-result_sets] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-plan] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check [GOOD] >> TGRpcClientLowTest::BiStreamPing |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-short_polling-fifo] >> TColumnEngineTestLogs::IndexReadWithPredicates >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-plan] >> TColumnShardTestSchema::OneTierExternalTtl >> TColumnEngineTestLogs::IndexReadWithPredicates [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicates [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:339;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=33376;portion_bytes=33376;portion_raw_bytes=53670; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=1;before_size=0;after_size=33376;before_rows=0;after_rows=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=33376;portion_bytes=33376;portion_raw_bytes=53670; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36912;portion_bytes=36912;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=2;before_size=33376;after_size=70288;before_rows=1000;after_rows=2000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36912;portion_bytes=36912;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36944;portion_bytes=36944;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=3;before_size=70288;after_size=107232;before_rows=2000;after_rows=3000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36944;portion_bytes=36944;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36976;portion_bytes=36976;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=4;before_size=107232;after_size=144208;before_rows=3000;after_rows=4000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36976;portion_bytes=36976;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37024;portion_bytes=37024;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=5;before_size=144208;after_size=181232;before_rows=4000;after_rows=5000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37024;portion_bytes=37024;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;componen ... t=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:21:36.329263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:21:36.329380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:21:36.329504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:21:36.329538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:21:36.329580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:21:36.329607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:21:36.329684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:21:36.330033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:21:36.403654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:21:36.403717Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:36.420038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:21:36.420205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:21:36.420356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:21:36.427348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:21:36.427580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:21:36.428172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:21:36.428408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:21:36.430484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:21:36.431722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:21:36.431780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:21:36.431903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:21:36.431963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:21:36.431999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:21:36.432157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:21:36.439252Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:21:36.559681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:21:36.559953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:21:36.560172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:21:36.560397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:21:36.560450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:21:36.562827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:21:36.562946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:21:36.563122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:21:36.563190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:21:36.563224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:21:36.563257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:21:36.568755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:21:36.568823Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:21:36.568866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:21:36.570956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:21:36.571000Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:21:36.571041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:21:36.571137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:21:36.574639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:21:36.576531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:21:36.576736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:21:36.577658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:21:36.577805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:21:36.577857Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:21:36.578098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:21:36.578149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:21:36.578364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:21:36.578446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:21:36.580539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:21:36.580594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:21:36.580782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:21:36.580819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:21:36.581050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:21:36.581092Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:21:36.581182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:21:36.581214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:21:36.581268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:21:36.581317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:21:36.581347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:21:36.581374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:21:36.581433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:21:36.581479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:21:36.581508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:21:36.583287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:21:36.583397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:21:36.583436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:21:36.583469Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:21:36.583514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:21:36.583606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... olution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:21:37.101190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:750:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:753:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:754:2058] recipient: [1:752:2670] Leader for TabletID 72057594046678944 is [1:755:2671] sender: [1:756:2058] recipient: [1:752:2670] 2025-04-07T14:21:37.143004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:21:37.143107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:21:37.143154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:21:37.143193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:21:37.143234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:21:37.143261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:21:37.143324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:21:37.143630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:21:37.161458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:21:37.162778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:21:37.162996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:21:37.163170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:21:37.163203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:37.163368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:21:37.163987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-07T14:21:37.164096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:21:37.164134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:21:37.164214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.164280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.164486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:21:37.165614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.165843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.165947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.166086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-04-07T14:21:37.166158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:21:37.166190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:21:37.166209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-07T14:21:37.166227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:21:37.166307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.166393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.166607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-04-07T14:21:37.166789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:21:37.167141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.167256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.167629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.167751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.167990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.168080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.168168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.168350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.168427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.168609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.168931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.169111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.169178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.169253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:21:37.177407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:21:37.177481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:21:37.177568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:21:37.177624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:21:37.177666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:21:37.177890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:755:2671] sender: [1:811:2058] recipient: [1:15:2062] 2025-04-07T14:21:37.253833Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:21:37.254124Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 307us result status StatusSuccess 2025-04-07T14:21:37.254690Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-result_sets] >> KqpTx::RollbackRoTx >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-plan] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-short_polling-fifo] [GOOD] >> GrpcConnectionStringParserTest::NoDatabaseFlag [GOOD] >> GrpcConnectionStringParserTest::IncorrectConnectionString >> GrpcConnectionStringParserTest::IncorrectConnectionString [GOOD] >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString >> TGRpcYdbTest::CreateYqlSessionExecuteQuery [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> KqpTx::RollbackInvalidated |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-result_sets] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/group_by_lookup.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-plan] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::CreateYqlSessionExecuteQuery [GOOD] Test command err: 2025-04-07T14:21:16.253280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576195272775797:2088];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:16.253908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019de/r3tmp/tmpjroW4Z/pdisk_1.dat 2025-04-07T14:21:16.628703Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:16.659893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:16.660009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22986, node 1 2025-04-07T14:21:16.666292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:21:16.742379Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:16.742404Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:16.742414Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:16.742519Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:17.078080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:17.091209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:21:17.107235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:17.109499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:21:17.109721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:21:17.109743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T14:21:17.111708Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:21:17.111737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-07T14:21:17.113407Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:17.114236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:21:17.117699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035677160, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:21:17.117730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:21:17.118030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:21:17.119994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:21:17.120164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:21:17.120217Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:21:17.120295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:21:17.120334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:21:17.120378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:21:17.122513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:21:17.122577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:21:17.122592Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:21:17.122655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:21:17.201712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TheTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:21:17.202315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSchemeError, reason: Unknown column 'BlaBla' specified in key column list, at schemeshard: 72057594046644480 2025-04-07T14:21:17.208325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusSchemeError, reason: Unknown column 'BlaBla' specified in key column list, operation: CREATE TABLE, path: /Root/TheTable 2025-04-07T14:21:20.302462Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490576210054277553:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:20.302525Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019de/r3tmp/tmp6oKRBH/pdisk_1.dat 2025-04-07T14:21:20.381613Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:20.406029Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:20.406109Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:21:20.410510Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64365, node 4 2025-04-07T14:21:20.468214Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:20.468234Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:20.468240Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:20.468343Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23624 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:20.742530Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:20.742809Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:21:20.742830Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:20.747203Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:21:20.747383Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:21:20.747395Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T14:21:20.750190Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:21:20.750426Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:21:20.750444Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:21:20.757058Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:20.769465Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035680814, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:21:20.769503Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:21:20.769773Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:21:20.776913Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:21:20.777062Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at s ... : 72057594046644480 2025-04-07T14:21:38.199633Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:21:38.201521Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:21:38.201569Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:21:38.201591Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:21:38.201839Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:21:38.201871Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:21:38.201885Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:21:38.202037Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:21:38.202059Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:21:38.202087Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2025-04-07T14:21:38.202308Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:21:38.202350Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:21:38.202363Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-07T14:21:38.202489Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:21:38.202511Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:21:38.202523Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2025-04-07T14:21:38.203934Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:21:38.208608Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035698251, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:21:38.208655Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035698251, at schemeshard: 72057594046644480 2025-04-07T14:21:38.208794Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-07T14:21:38.208903Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035698251, at schemeshard: 72057594046644480 2025-04-07T14:21:38.208943Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 128 -> 240 2025-04-07T14:21:38.208999Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035698251, at schemeshard: 72057594046644480 2025-04-07T14:21:38.209034Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:2 128 -> 240 2025-04-07T14:21:38.209086Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, HandleReply TEvOperationPlan: step# 1744035698251 2025-04-07T14:21:38.209132Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 128 -> 240 2025-04-07T14:21:38.214172Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:21:38.214793Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:21:38.214890Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:3 ProgressState 2025-04-07T14:21:38.215016Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:3 progress is 1/4 2025-04-07T14:21:38.215265Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:1 ProgressState 2025-04-07T14:21:38.215339Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:1 progress is 2/4 2025-04-07T14:21:38.215441Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:2 ProgressState 2025-04-07T14:21:38.215483Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:2 progress is 3/4 2025-04-07T14:21:38.215566Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2025-04-07T14:21:38.215620Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 4/4 2025-04-07T14:21:38.215647Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:21:38.215684Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:1 2025-04-07T14:21:38.215698Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:2 2025-04-07T14:21:38.215710Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:3 2025-04-07T14:21:38.215732Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 5, subscribers: 1 2025-04-07T14:21:38.217634Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:21:38.217705Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:21:38.217754Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:21:38.218024Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:21:38.218052Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:21:38.218064Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2025-04-07T14:21:38.218195Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:21:38.218238Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:21:38.218250Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2025-04-07T14:21:38.218389Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:21:38.218426Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:21:38.218437Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2025-04-07T14:21:38.218618Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:21:38.218634Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:21:38.218644Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2025-04-07T14:21:38.218702Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2025-04-07T14:21:38.221889Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7490576286533860248:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:21:38.303036Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710659:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T14:21:38.303278Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T14:21:38.307644Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T14:21:38.457385Z node 13 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [13:7490576286533860373:2312], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[Root/NotFound]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:21:38.457697Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NjYwMDVlYjUtZjBkMWQ4ODEtNWE4YjBkZGUtN2YwNTE4ZTQ=, ActorId: [13:7490576286533860231:2296], ActorState: ExecuteState, TraceId: 01jr8a1nr07tddbq4bj7q98x83, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> TGRpcClientLowTest::BiStreamPing [GOOD] >> TGRpcClientLowTest::BiStreamCancelled >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> KqpTx::SnapshotROInteractive2 >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-plan] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-result_sets] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-result_sets] >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString [GOOD] >> LocalityOperation::LocksFromAnotherTenants >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-plan] >> KqpTx::RollbackRoTx [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnShardTestSchema::OneTierExternalTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackRoTx [GOOD] Test command err: Trying to start YDB, gRPC: 27050, MsgBus: 23343 2025-04-07T14:21:39.005296Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576292886681514:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:39.005429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001109/r3tmp/tmpsUW7c2/pdisk_1.dat 2025-04-07T14:21:39.433877Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:39.468588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:39.476760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:21:39.481752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27050, node 1 2025-04-07T14:21:39.698819Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:39.698839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:39.698865Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:39.698964Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23343 TClient is connected to server localhost:23343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:40.360273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:40.386196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:40.538167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:40.676478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:40.753553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:42.292994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576305771584965:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:42.293096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:42.489590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:21:42.525274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:21:42.554482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:21:42.632542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:21:42.688053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:21:42.735528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:21:42.832888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576305771585461:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:42.832964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:42.833363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576305771585466:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:42.837728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:21:42.857627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576305771585468:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:21:43.974088Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTM3OTYyNTgtMjQwNjAxZWUtNjdjNzI1Yy01ZTQ0OTVhNg==, ActorId: [1:7490576310066553073:2460], ActorState: ReadyState, TraceId: 01jr8a1v444jwqa5nm75d0ckmp, Create QueryResponse for error on request, msg: 2025-04-07T14:21:44.009311Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576292886681514:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:44.009360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [GOOD] >> KqpTx::RollbackInvalidated [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-result_sets] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneTierExternalTtl [GOOD] Test command err: 2025-04-07T14:21:38.143135Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:21:38.402402Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:21:38.433296Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:21:38.437355Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:21:38.464837Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:21:38.465021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:21:38.465464Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:21:38.481838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:21:38.482202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:21:38.482454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:21:38.482583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:21:38.482711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:21:38.482833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:21:38.482947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:21:38.483131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:21:38.483255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:21:38.483365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:21:38.483473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:21:38.483614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:21:38.508398Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:21:38.510274Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2025-04-07T14:21:38.510392Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:21:38.510467Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T14:21:38.513711Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:21:38.513907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:21:38.513959Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:21:38.514137Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:21:38.516842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:21:38.516963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:21:38.517032Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:21:38.517164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:21:38.517257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:21:38.517334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:21:38.517379Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:21:38.517587Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:21:38.517681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:21:38.517740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:21:38.517776Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:21:38.517931Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:21:38.517992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:21:38.518046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:21:38.518086Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:21:38.518162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:21:38.518201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:21:38.518234Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:21:38.518278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:21:38.518349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:21:38.518404Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:21:38.518614Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-07T14:21:38.518705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-07T14:21:38.518787Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-07T14:21:38.518895Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=54; 2025-04-07T14:21:38.519111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:21:38.519169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:21:38.519203Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:21:38.519392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:21:38.519445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:21:38.519485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:21:38.519687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:21:38.519731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2 ... ytes=0;chunks=0);; 2025-04-07T14:21:45.003395Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [1:365:2376], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-04-07T14:21:45.003445Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-07T14:21:45.003735Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[25] (CS::INDEXATION) apply at tablet 9437184 2025-04-07T14:21:45.006501Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 3 2025-04-07T14:21:45.006666Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9575952;raw_bytes=15906843; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:21:45.006727Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=a1d60e2c-13bb11f0-a4ba2d84-b5bf9a55;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:21:45.007096Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2025-04-07T14:21:45.007145Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2025-04-07T14:21:45.007192Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=4; 2025-04-07T14:21:45.007259Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=4; 2025-04-07T14:21:45.007319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:65;event=intervals_finished; 2025-04-07T14:21:45.007453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:21:45.007499Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2025-04-07T14:21:45.007543Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-07T14:21:45.007919Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:21:45.008128Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:21:45.008192Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-07T14:21:45.008318Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=80000; 2025-04-07T14:21:45.008385Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=timestamp; 2025-04-07T14:21:45.008500Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:367:2378] send ScanData to [1:362:2373] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2025-04-07T14:21:45.008633Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:21:45.008763Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:21:45.008877Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:21:45.009026Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:21:45.009166Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:21:45.009290Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:21:45.009329Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:367:2378] finished for tablet 9437184 2025-04-07T14:21:45.009460Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:367:2378] send ScanData to [1:362:2373] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:21:45.009917Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:367:2378] and sent to [1:362:2373] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.104},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.106}],"full":{"a":1744035704903245,"name":"_full_task","f":1744035704903245,"d_finished":0,"c":0,"l":1744035705009520,"d":106275},"events":[{"name":"bootstrap","f":1744035704903837,"d_finished":2874,"c":1,"l":1744035704906711,"d":2874},{"a":1744035705009002,"name":"ack","f":1744035705007877,"d_finished":1023,"c":1,"l":1744035705008900,"d":1541},{"a":1744035705008988,"name":"processing","f":1744035704906991,"d_finished":3653,"c":5,"l":1744035705008902,"d":4185},{"name":"ProduceResults","f":1744035704905349,"d_finished":2115,"c":8,"l":1744035705009316,"d":2115},{"a":1744035705009319,"name":"Finish","f":1744035705009319,"d_finished":0,"c":0,"l":1744035705009520,"d":201},{"name":"task_result","f":1744035704907009,"d_finished":2517,"c":4,"l":1744035705007608,"d":2517}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) 2025-04-07T14:21:45.009992Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:21:44.902105Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2025-04-07T14:21:45.010037Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:21:45.010161Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.096040s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.085821s;size=0.0063152;details={columns=1;};};]};; 2025-04-07T14:21:45.010260Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:367:2378];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:66;message=aborted data locks manager; |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> TGRpcClientLowTest::BiStreamCancelled [GOOD] >> TGRpcClientLowTest::ChangeAcl >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-plan] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackInvalidated [GOOD] Test command err: Trying to start YDB, gRPC: 19713, MsgBus: 2957 2025-04-07T14:21:40.309048Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576294782435635:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:40.309207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001103/r3tmp/tmpfdVFKW/pdisk_1.dat 2025-04-07T14:21:40.766861Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:40.771763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:40.771938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:21:40.774035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19713, node 1 2025-04-07T14:21:40.910776Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:40.910801Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:40.910810Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:40.910891Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2957 TClient is connected to server localhost:2957 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:41.469380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:41.482720Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:21:41.494658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:41.639583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:41.819457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:41.934648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:43.670343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576307667339219:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:43.670464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:43.915698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:21:43.953737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:21:43.989323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:21:44.017523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:21:44.053951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:21:44.095713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:21:44.176034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576311962307014:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:44.176117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:44.176405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576311962307019:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:44.179593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:21:44.189750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576311962307021:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:21:45.309598Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7490576316257274650:2471], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-07T14:21:45.310873Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576294782435635:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:45.310972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:21:45.311075Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTVmMWJhYWItZjVlMGQ2MDAtNWE4ODNiZDQtNzJiNDA3N2Q=, ActorId: [1:7490576316257274626:2460], ActorState: ExecuteState, TraceId: 01jr8a1we2ezwrv88fqd7y7a3s, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 01jr8a1wdr6qh4ta0dy16gzxjz 2025-04-07T14:21:45.323910Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTVmMWJhYWItZjVlMGQ2MDAtNWE4ODNiZDQtNzJiNDA3N2Q=, ActorId: [1:7490576316257274626:2460], ActorState: ReadyState, TraceId: 01jr8a1wfb69c4qe26nhv211zf, Create QueryResponse for error on request, msg: |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-plan] >> KqpSinkLocks::EmptyRangeAlreadyBroken |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-plan] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> KqpTx::SnapshotRO >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] >> KqpTx::SnapshotROInteractive2 [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-result_sets] >> TestProgram::YqlKernelContains >> TestProgram::YqlKernelContains [GOOD] >> KqpTx::ExplicitTcl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 29257, MsgBus: 11262 2025-04-07T14:21:42.368285Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576303538312817:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:42.368330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010fd/r3tmp/tmpwqQHMH/pdisk_1.dat 2025-04-07T14:21:42.794439Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29257, node 1 2025-04-07T14:21:42.852917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:42.861914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:21:42.871694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:21:42.899600Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:42.899636Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:42.899648Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:42.899733Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11262 TClient is connected to server localhost:11262 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:43.523714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:43.562254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:43.692984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:43.848516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:43.916116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:45.479657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576316423216410:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:45.479755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:45.742173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:21:45.768102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:21:45.832545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:21:45.867353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:21:45.910746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:21:45.959228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:21:46.025909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576320718184203:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:46.026095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:46.026376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576320718184209:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:46.030070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:21:46.044474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576320718184211:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:21:47.458265Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576303538312817:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:47.458560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-plan] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Function { Arguments { Name: "string" } Arguments { Name: "substring" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{arguments=[G:string;G:substring;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10BinaryTypeE; string: [ 4C6F72656D20697073756D20C020646F6C6F720C2073697420616D65742E, 4C6F72656D20697073756D20646F6C6F722073697420076D65742E, 4C6F72656D20697073756D20646F6C6F722073697420616D65742E, 4C6F72656D20697073756D20646F6C6F722073697420076D65742E ] substring: [ 646F6C6F72, 616D65742E, 076D65742E, 076D65742E ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-plan] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> TGRpcClientLowTest::ChangeAcl [GOOD] >> KqpTx::RollbackTx2 |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> LocalityOperation::LocksFromAnotherTenants [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::ChangeAcl [GOOD] Test command err: 2025-04-07T14:21:26.345649Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576236682403481:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:26.346716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018c3/r3tmp/tmpYTy6mZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5169, node 1 2025-04-07T14:21:26.748594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2025-04-07T14:21:26.748937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "PostgreSQL" } 2025-04-07T14:21:26.748948Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:26.753884Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:21:26.753914Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:21:26.763912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-07T14:21:26.783126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:21:26.785516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-07T14:21:26.785741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:21:26.786940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:26.787002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:21:26.818365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:21:26.818881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:21:26.818900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:21:26.818943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:21:26.818998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:21:26.827514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:21:26.864463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:26.864485Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:26.864498Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:26.864623Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:27.210399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:27.215758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:21:27.217162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:27.219281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:21:27.219480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:21:27.219516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:21:27.221614Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:21:27.221647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:21:27.222273Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:21:27.223599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:27.227323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035687275, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:21:27.227355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:21:27.227616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:21:27.232994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:21:27.233173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:21:27.233254Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:21:27.233344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:21:27.233379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:21:27.233429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:21:27.238059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:21:27.238110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:21:27.238134Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:21:27.238224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-04-07T14:21:29.363082Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-07T14:21:29.371666Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-07T14:21:31.179447Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490576258834476746:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:31.179618Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0018c3/r3tmp/tmpgB9Edr/pdisk_1.dat 2025-04-07T14:21:31.421142Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:31.459727Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:31.460075Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:21:31.466373Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27970, node 4 2025-04-07T14:21:31.856076Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:31.856105Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:31.856112Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:31.856239Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:32.216832Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:32.217259Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:21:32.217290Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:32.221641Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DA ... me: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:47.945607Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:47.946158Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:21:47.946182Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:47.949793Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:21:47.949990Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:21:47.950005Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T14:21:47.952475Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:21:47.954169Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:21:47.954197Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:21:47.955999Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:47.961495Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035708009, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:21:47.961536Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:21:47.961822Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:21:47.963772Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:21:47.963963Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:21:47.964018Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:21:47.964114Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:21:47.964157Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:21:47.964210Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:21:47.966160Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:21:47.966207Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:21:47.966224Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:21:47.966294Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:21:48.039282Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/TheDirectory, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:21:48.039549Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:21:48.045901Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/TheDirectory 2025-04-07T14:21:48.046323Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:21:48.046620Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:21:48.046690Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:21:48.048418Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:21:48.048459Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:21:48.048497Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:21:48.048730Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:21:48.048748Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:21:48.048759Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:21:48.049601Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:21:48.054258Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035708100, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:21:48.054320Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744035708100, at schemeshard: 72057594046644480 2025-04-07T14:21:48.054513Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-07T14:21:48.056347Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:21:48.056591Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:21:48.056652Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2025-04-07T14:21:48.056758Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:21:48.056794Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:21:48.056850Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2025-04-07T14:21:48.059085Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:21:48.059136Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:21:48.059154Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:21:48.059356Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:21:48.059373Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:21:48.059383Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:21:48.059417Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2025-04-07T14:21:48.081523Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/TheDirectory, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:21:48.081849Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:21:48.081875Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:21:48.081968Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:21:48.082071Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:21:48.082111Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 2, subscribers: 0 2025-04-07T14:21:48.084356Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/TheDirectory, set owner:qqq, add access: +R:qqq, remove access: -():qqq:- 2025-04-07T14:21:48.084537Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:21:48.084918Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:21:48.086458Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T14:21:48.086505Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T14:21:48.086526Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2025-04-07T14:21:48.086875Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T14:21:48.086903Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T14:21:48.086919Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:21:48.086968Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-plan] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-result_sets] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> LocalityOperation::LocksFromAnotherTenants [GOOD] Test command err: 2025-04-07T14:21:18.638466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:21:18.638799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:21:18.638959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0019fb/r3tmp/tmpQpWKxX/pdisk_1.dat 2025-04-07T14:21:19.176145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:21:19.260979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:19.261176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:21:19.289428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:21:19.603252Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:938:2764], Recipient [1:498:2433]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:21:19.603331Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:21:19.603379Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-07T14:21:19.603473Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:935:2762], Recipient [1:498:2433]: {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:21:19.603509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-07T14:21:19.731902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "tenant" } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:21:19.732182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/tenant, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:19.732286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: tenant, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-07T14:21:19.732442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-07T14:21:19.732607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-07T14:21:19.732842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:21:19.732883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:19.732948Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-07T14:21:19.733016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:21:19.733066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-07T14:21:19.735387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-07T14:21:19.735577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/tenant 2025-04-07T14:21:19.735639Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-07T14:21:19.735717Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715657:0 2025-04-07T14:21:19.735990Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:498:2433], Recipient [1:498:2433]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-07T14:21:19.736027Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-07T14:21:19.736257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:21:19.736296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:21:19.736459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T14:21:19.736568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:21:19.736609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:712:2582], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-04-07T14:21:19.736650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:712:2582], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 2 2025-04-07T14:21:19.736950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:19.737018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxCreateSubDomain, at tablet# 72057594046644480 2025-04-07T14:21:19.737088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-04-07T14:21:19.737122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T14:21:19.737227Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-07T14:21:19.737907Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [1:938:2764], Recipient [1:498:2433]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:21:19.737951Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-07T14:21:19.737982Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-07T14:21:19.739609Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [1:712:2582], Recipient [1:498:2433]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 4 } 2025-04-07T14:21:19.739658Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-07T14:21:19.739739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:21:19.739839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:21:19.739876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:21:19.739912Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:21:19.739968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:21:19.740055Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-07T14:21:19.741956Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [1:712:2582], Recipient [1:498:2433]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 2 } 2025-04-07T14:21:19.741994Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-07T14:21:19.742061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:21:19.742154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:21:19.742185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:21:19.742214Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-07T14:21:19.742246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-07T14:21:19.742321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-04-07T14:21:19.742368Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-07T14:21:19.745069Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-07T14:21:19.745120Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715657:0 2025-04-07T14:21:19.745336Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:498:2433], Recipient [1:498:2433]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-07T14:21:19.745367Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-07T14:21:19.745421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:21:19.745474Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:21:19.745522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:21:19.745629Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-07T14:21:19.746487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715657 2025-04-07T14:21:19.746531Z node 1 :FLAT_TX_SCHEMES ... 5 PathOwnerId: 72057594046644480, cookie: 281474976710664 2025-04-07T14:21:48.942505Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710664 2025-04-07T14:21:48.942512Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2025-04-07T14:21:48.942595Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710664 2025-04-07T14:21:48.942609Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710664 2025-04-07T14:21:48.942617Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 5 2025-04-07T14:21:48.942727Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 8 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710664 2025-04-07T14:21:48.942756Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710664 2025-04-07T14:21:48.942766Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 8], version: 5 2025-04-07T14:21:48.942875Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 9 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710664 2025-04-07T14:21:48.942902Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710664 2025-04-07T14:21:48.942912Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 9], version: 2 2025-04-07T14:21:48.942941Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710664, subscribers: 1 2025-04-07T14:21:48.950573Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7490576330217715688:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-04-07T14:21:49.043237Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710665:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-07T14:21:49.043396Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710665:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-07T14:21:49.046197Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710665, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-07T14:21:49.130612Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr8a1zz76j7e04p8t5drfkg4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZDEzOWU4N2MtNjI3OTI0YjYtNjFkNGJiODYtZTM3M2RjOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:21:49.298695Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr8a2089df68mqcf57bxcgx5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZDEzOWU4N2MtNjI3OTI0YjYtNjFkNGJiODYtZTM3M2RjOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:21:49.457321Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7490576313037844489:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:49.457405Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:21:49.522485Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jr8a20c6dkjgtxb1n8srq31a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZDEzOWU4N2MtNjI3OTI0YjYtNjFkNGJiODYtZTM3M2RjOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:21:50.813390Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jr8a20c6dkjgtxb1n8srq31a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZDEzOWU4N2MtNjI3OTI0YjYtNjFkNGJiODYtZTM3M2RjOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:21:50.824918Z node 9 :KQP_EXECUTER ERROR: ActorId: [9:7490576338807650539:2314] TxId: 281474976710669. Ctx: { TraceId: 01jr8a20c6dkjgtxb1n8srq31a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZDEzOWU4N2MtNjI3OTI0YjYtNjFkNGJiODYtZTM3M2RjOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Handle TEvProposeTransactionResult: unable to select coordinator. Tx canceled, actorId: [9:7490576338807650539:2314], previously selected coordinator: 72075186224037888, coordinator selected at propose result: 72075186224037890 2025-04-07T14:21:50.825410Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=ZDEzOWU4N2MtNjI3OTI0YjYtNjFkNGJiODYtZTM3M2RjOTc=, ActorId: [9:7490576330217715497:2314], ActorState: ExecuteState, TraceId: 01jr8a20c6dkjgtxb1n8srq31a, Create QueryResponse for error on request, msg: 2025-04-07T14:21:50.826556Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jr8a20c6dkjgtxb1n8srq31a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZDEzOWU4N2MtNjI3OTI0YjYtNjFkNGJiODYtZTM3M2RjOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:21:50.836223Z node 9 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-04-07T14:21:50.836814Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:21:50.836933Z node 9 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2025-04-07T14:21:50.837181Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:21:51.175273Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7490576323796144272:2176];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:51.175371Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:21:51.264263Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:21:51.267079Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7490576342431834571:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:21:51.354840Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7490576342431834571:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:21:51.419148Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7490576342431834571:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:21:51.481985Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7490576342431834571:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:21:51.565196Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7490576342431834571:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:21:51.659422Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7490576342431834571:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:21:51.736928Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7490576342431834571:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:21:51.829914Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7490576342431834571:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:21:51.899570Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7490576342431834571:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:21:51.911827Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7490576320956997776:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:51.911934Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:21:51.987283Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7490576342431834571:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:21:52.086325Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7490576342431834571:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:21:52.175094Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7490576342431834571:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:21:52.258737Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7490576342431834571:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:21:52.269660Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-result_sets] >> TColumnEngineTestLogs::IndexTtl >> KqpTx::SnapshotRO [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] >> TColumnEngineTestLogs::IndexTtl [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-plan] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:339;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38296;portion_bytes=38296;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=1;before_size=0;after_size=38296;before_rows=0;after_rows=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38296;portion_bytes=38296;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38296;portion_bytes=38296;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=2;before_size=38296;after_size=76592;before_rows=1000;after_rows=2000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38296;portion_bytes=38296;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38264;portion_bytes=38264;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=3;before_size=76592;after_size=114856;before_rows=2000;after_rows=3000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38264;portion_bytes=38264;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38296;portion_bytes=38296;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=4;before_size=114856;after_size=153152;before_rows=3000;after_rows=4000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38296;portion_bytes=38296;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38296;portion_bytes=38296;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=5;before_size=153152;after_size=191448;before_rows=4000;after_rows=5000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38296;portion_bytes=38296;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=TRAC ... G;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38232;portion_bytes=38232;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=17;before_size=229712;after_size=267944;before_rows=6000;after_rows=7000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38232;portion_bytes=38232;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38264;portion_bytes=38264;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=18;before_size=267944;after_size=306208;before_rows=7000;after_rows=8000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38264;portion_bytes=38264;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38360;portion_bytes=38360;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=19;before_size=306208;after_size=344568;before_rows=8000;after_rows=9000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38360;portion_bytes=38360;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38296;portion_bytes=38296;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=20;before_size=344568;after_size=382864;before_rows=9000;after_rows=10000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=38296;portion_bytes=38296;portion_raw_bytes=104000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38360;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38328;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38328;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38360;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38360;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));); >> KqpTx::ExplicitTcl [GOOD] >> KqpTx::InteractiveTx >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotRO [GOOD] Test command err: Trying to start YDB, gRPC: 6475, MsgBus: 64476 2025-04-07T14:21:48.968707Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576333123293623:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:48.968769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010eb/r3tmp/tmpcVnERk/pdisk_1.dat 2025-04-07T14:21:49.363750Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:49.406271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:49.406385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6475, node 1 2025-04-07T14:21:49.410788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:21:49.466857Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:49.466877Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:49.466883Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:49.466975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64476 TClient is connected to server localhost:64476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:50.063609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:50.088337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:50.222320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:50.417716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:50.508311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:52.065743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576350303164296:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:52.068456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:52.344141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:21:52.378311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:21:52.406369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:21:52.456505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:21:52.486397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:21:52.546403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:21:52.611405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576350303164793:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:52.611488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:52.611859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576350303164798:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:52.615525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:21:52.625645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576350303164800:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:21:53.980459Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576333123293623:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:53.981626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:21:54.179784Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzE4ZWFmYzYtYjQzNDZhMTktYmQ0OTAyYTQtMmUyZDc2OGU=, ActorId: [1:7490576354598132403:2460], ActorState: ExecuteState, TraceId: 01jr8a25214z09244jm06t7t60, Create QueryResponse for error on request, msg:
:3:25: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-plan] [GOOD] >> KqpTx::RollbackByIdle >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-plan] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 64150, MsgBus: 3715 2025-04-07T14:21:44.467236Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576315596339347:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:44.467299Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010f7/r3tmp/tmpSeh7we/pdisk_1.dat 2025-04-07T14:21:44.881984Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:44.889555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:44.889673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 64150, node 1 2025-04-07T14:21:44.891982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:21:44.980164Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:44.980191Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:44.980197Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:44.980279Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3715 TClient is connected to server localhost:3715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:45.618668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:45.634223Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:21:45.646060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:45.795004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:45.994847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:46.065434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:48.041624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576328481242936:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:48.041800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:48.089165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:21:48.119289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:21:48.149693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:21:48.200189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:21:48.247310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:21:48.279560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:21:48.370605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576332776210730:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:48.370705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:48.371051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576332776210735:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:48.374956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:21:48.390829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576332776210737:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:21:49.467672Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576315596339347:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:49.467768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:21:49.702018Z node 1 :GLOBAL ERROR: fline=events.h:99;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710673; 2025-04-07T14:21:49.703179Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490576337071178499:2459], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7490576337071178385:2459]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 2]`. ShardID=72075186224037888, Sink=[1:7490576337071178499:2459].{
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:21:49.713645Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490576337071178492:2459], SessionActorId: [1:7490576337071178385:2459], Transaction locks invalidated. Table `/Root/TwoShard`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[1:7490576337071178385:2459]. isRollback=0 2025-04-07T14:21:49.713849Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzBlMmM0MTUtOGM3N2FkZWQtZmYzMTQ3MDQtOGJiMTEzMjc=, ActorId: [1:7490576337071178385:2459], ActorState: ExecuteState, TraceId: 01jr8a20pp3e59tfv437a9109j, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7490576337071178493:2459] from: [1:7490576337071178492:2459] 2025-04-07T14:21:49.714078Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490576337071178493:2459] TxId: 281474976710673. Ctx: { TraceId: 01jr8a20pp3e59tfv437a9109j, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzBlMmM0MTUtOGM3N2FkZWQtZmYzMTQ3MDQtOGJiMTEzMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/TwoShard`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:21:49.720288Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzBlMmM0MTUtOGM3N2FkZWQtZmYzMTQ3MDQtOGJiMTEzMjc=, ActorId: [1:7490576337071178385:2459], ActorState: ExecuteState, TraceId: 01jr8a20pp3e59tfv437a9109j, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 4415, MsgBus: 8084 2025-04-07T14:21:50.516908Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576340800970155:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:50.516951Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010f7/r3tmp/tmp0pc9tA/pdisk_1.dat 2025-04-07T14:21:50.668732Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:50.683743Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:50.683814Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:21:50.685225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4415, node 2 2025-04-07T14:21:50.764473Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:50.764496Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:50.764503Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:50.764588Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8084 TClient is connected to server localhost:8084 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:51.298787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:51.309159Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:21:51.327365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:51.403011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:51.566887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:21:51.630989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:21:53.559721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576353685873731:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:53.559788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:53.607388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:21:53.675912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:21:53.719909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:21:53.758189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:21:53.796959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:21:53.872819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:21:53.961755Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576353685874239:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:53.961841Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:53.962047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576353685874244:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:53.965868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:21:53.977333Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576353685874246:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:21:55.396364Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmFhYTBkZjEtNTMxNDJkYTItYmMxYjEzNWMtYmJhY2E2OWI=, ActorId: [2:7490576357980841854:2460], ActorState: ExecuteState, TraceId: 01jr8a267ma39cj9pmnkr274wp, Create QueryResponse for error on request, msg: |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-plan] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-short_polling-fifo] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-result_sets] >> KqpTx::TooManyTx >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] >> KqpTx::RollbackTx2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:21:54.682285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:21:54.682390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:21:54.682428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:21:54.682462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:21:54.682503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:21:54.682529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:21:54.682573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:21:54.682859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:21:54.747278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:21:54.747330Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:54.758572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:21:54.758701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:21:54.758845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:21:54.765587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:21:54.765799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:21:54.766401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:21:54.766601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:21:54.769241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:21:54.770474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:21:54.770528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:21:54.770662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:21:54.770706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:21:54.770754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:21:54.770916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:21:54.780514Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:21:54.913742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:21:54.913971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:21:54.914180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:21:54.914423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:21:54.914495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:21:54.917656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:21:54.917775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:21:54.917962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:21:54.918032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:21:54.918062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:21:54.918110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:21:54.920486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:21:54.920551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:21:54.920586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:21:54.922130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:21:54.922177Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:21:54.922221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:21:54.922299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:21:54.930688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:21:54.932647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:21:54.932800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:21:54.933513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:21:54.933628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:21:54.933668Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:21:54.933866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:21:54.933902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:21:54.934032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:21:54.934090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:21:54.935715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:21:54.935745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:21:54.935856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:21:54.935881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:21:54.936102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:21:54.936135Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:21:54.936197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:21:54.936220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:21:54.936260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:21:54.936312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:21:54.936351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:21:54.936377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:21:54.936445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:21:54.936483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:21:54.936511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:21:54.938004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:21:54.938147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:21:54.938173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:21:54.938198Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:21:54.938232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:21:54.938308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-07T14:21:57.170725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:21:57.170781Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-07T14:21:57.170889Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-07T14:21:57.170927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T14:21:57.170978Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-07T14:21:57.171018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T14:21:57.171064Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-07T14:21:57.171096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-07T14:21:57.171223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-07T14:21:57.171266Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-04-07T14:21:57.171297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-07T14:21:57.171859Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T14:21:57.171957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T14:21:57.171992Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-07T14:21:57.172028Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T14:21:57.172064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:21:57.172131Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-04-07T14:21:57.172157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:406:2373] 2025-04-07T14:21:57.176044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T14:21:57.176147Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T14:21:57.176186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:712:2632] TestWaitNotification: OK eventTxId 105 2025-04-07T14:21:57.687421Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:21:57.687881Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 487us result status StatusSuccess 2025-04-07T14:21:57.688497Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:21:57.781411Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:21:57.781684Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 316us result status StatusSuccess 2025-04-07T14:21:57.782260Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2025-04-07T14:21:57.785014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:21:57.785196Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-04-07T14:21:57.786563Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2025-04-07T14:21:57.789665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:21:57.789831Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-07T14:21:57.790197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-07T14:21:57.790236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-07T14:21:57.790649Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-07T14:21:57.790741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-07T14:21:57.790788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:782:2695] TestWaitNotification: OK eventTxId 106 >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-result_sets] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackTx2 [GOOD] Test command err: Trying to start YDB, gRPC: 14712, MsgBus: 13533 2025-04-07T14:21:52.354997Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576348313540860:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:52.355078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010e0/r3tmp/tmpmaCQja/pdisk_1.dat 2025-04-07T14:21:52.864898Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:52.880720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:52.880816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:21:52.890329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14712, node 1 2025-04-07T14:21:52.959439Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:52.959464Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:52.959473Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:52.959566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13533 TClient is connected to server localhost:13533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:53.518200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:53.540383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:53.677141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:53.837337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:53.910042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:55.577353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576361198444439:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:55.577448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:55.775297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:21:55.841254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:21:55.865781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:21:55.893175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:21:55.917717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:21:55.953093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:21:55.990382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576361198444934:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:55.990454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:55.990575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576361198444939:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:55.993568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:21:56.018075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576361198444941:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:21:57.345501Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjYyMTdkZjAtNTQ1NDliOGMtYTcxMDMxMS1jMjA2NTNmOA==, ActorId: [1:7490576369788379845:2460], ActorState: ReadyState, TraceId: 01jr8a286q111vwza74921wwd9, Create QueryResponse for error on request, msg: 2025-04-07T14:21:57.346530Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576348313540860:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:57.346588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-plan] >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=insert_table.cpp:43;event=commit_insertion;path_id=0;blob_range={ Blob: DS:0:[2222:1:1:2:100:1:0] Offset: 0 Size: 0 }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=152;columns=1; >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] >> TestProgram::CountWithNulls >> TestProgram::CountWithNulls [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-plan] >> KqpSinkTx::OlapSnapshotRO >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "0" } Function { Id: 7 Arguments { Name: "uid" } } } } Command { Filter { Predicate { Name: "0" } } } Command { GroupBy { Aggregates { Column { Name: "1" } Function { Id: 2 } } } } Command { Projection { Columns { Name: "1" } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=IsNull;arguments=[uid;];column=G:0;};];filters=[G:0;];group_by_assignes=[{op=num_rows;arguments=[uid;];options={skip_nulls=true, min_count=1};column=G:1;};];projections=[];};{projections=[G:1;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10UInt64TypeE; >> KqpTx::InteractiveTx [GOOD] >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite |97.9%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-result_sets] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-result_sets] [GOOD] >> KqpTx::RollbackByIdle [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-plan] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-result_sets] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::InteractiveTx [GOOD] Test command err: Trying to start YDB, gRPC: 5406, MsgBus: 31109 2025-04-07T14:21:50.043858Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576337862489958:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:50.044016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010e6/r3tmp/tmpHeaG5d/pdisk_1.dat 2025-04-07T14:21:50.343486Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5406, node 1 2025-04-07T14:21:50.409163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:50.409243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:21:50.426365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:21:50.473267Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:50.473287Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:50.473307Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:50.473392Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31109 TClient is connected to server localhost:31109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:50.979687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:50.992131Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:21:50.997620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:51.146954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:21:51.302485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:21:51.394933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:53.026137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576350747393543:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:53.026267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:53.291642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:21:53.323447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:21:53.352797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:21:53.385222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:21:53.417955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:21:53.493558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:21:53.587657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576350747394045:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:53.587754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:53.588051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576350747394050:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:53.591328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:21:53.603697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576350747394052:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:21:55.048829Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576337862489958:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:55.055722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:21:55.139983Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTMwZTM1MmUtMTA4NmIxZTUtMmEwN2NkNTMtM2IxNTAxMWY=, ActorId: [1:7490576355042361660:2460], ActorState: ReadyState, TraceId: 01jr8a261v98tn7ynq3kpd0rr2, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 6209, MsgBus: 9579 2025-04-07T14:21:55.778023Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576360099600788:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:55.778068Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010e6/r3tmp/tmpL910bg/pdisk_1.dat 2025-04-07T14:21:55.902239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:55.902316Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:21:55.903572Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:21:55.906236Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6209, node 2 2025-04-07T14:21:56.024546Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:56.024581Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:56.024589Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:56.024715Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9579 TClient is connected to server localhost:9579 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:56.431245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:56.441396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:56.514877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:56.655337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:56.748485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:58.665687Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576372984504366:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:58.665791Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:58.700193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:21:58.744929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:21:58.787680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:21:58.838773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:21:58.871467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:21:58.945000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:21:59.042041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576377279472167:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:59.042151Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:59.042523Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576377279472172:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:59.046378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:21:59.062797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576377279472174:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:22:00.780860Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576360099600788:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:00.780931Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackByIdle [GOOD] Test command err: Trying to start YDB, gRPC: 27388, MsgBus: 19830 2025-04-07T14:21:56.346526Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576364403731014:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:56.346595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010d4/r3tmp/tmpH7dOVk/pdisk_1.dat 2025-04-07T14:21:56.680246Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27388, node 1 2025-04-07T14:21:56.747718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:56.749047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:21:56.754649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:21:56.794375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:56.794400Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:56.794405Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:56.794485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19830 TClient is connected to server localhost:19830 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:57.302672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:57.327460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:57.458654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:57.603414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:57.668640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:59.409803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576377288634595:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:59.409941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:59.718921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:21:59.753713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:21:59.790621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:21:59.876695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:21:59.908364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:21:59.983817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:22:00.108572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576381583602394:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:00.108675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:00.109312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576381583602399:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:00.113670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:22:00.135716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576381583602401:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:22:01.348765Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576364403731014:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:01.348826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:22:01.527769Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmExNTZlNjYtNWNiZDgyYmQtMWM3ZWY5YmQtY2E2OGU1NWY=, ActorId: [1:7490576385878570013:2462], ActorState: ReadyState, TraceId: 01jr8a2c9ea8zrpse7j33jpva9, Create QueryResponse for error on request, msg: |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-plan] [GOOD] >> KqpTx::TooManyTx [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-result_sets] |97.9%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-result_sets] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::TooManyTx [GOOD] Test command err: Trying to start YDB, gRPC: 64186, MsgBus: 23470 2025-04-07T14:21:57.975042Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576368481989714:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:57.975112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010ce/r3tmp/tmpXt39Iw/pdisk_1.dat 2025-04-07T14:21:58.344775Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:58.374868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:58.374990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 64186, node 1 2025-04-07T14:21:58.378397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:21:58.493361Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:58.493395Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:58.493402Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:58.493498Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23470 TClient is connected to server localhost:23470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:59.121335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:59.142359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:59.290583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:59.455173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:59.540599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:01.604109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576385661860583:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:01.615368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:01.657164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:22:01.707629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:22:01.761919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:22:01.829031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:22:01.878244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:22:01.933923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:22:02.047104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576389956828377:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:02.047178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:02.047433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576389956828382:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:02.050603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:22:02.066325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576389956828384:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:22:02.975860Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576368481989714:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:02.975956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:22:03.486820Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDNmYmM3MWMtZTg0ZGRjZjgtMjc2NTQ2NzUtMTMyNmNhNWE=, ActorId: [1:7490576394251795995:2462], ActorState: ReadyState, TraceId: 01jr8a2e5jbfd49jhfa8j2g03k, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:824: Too many transactions, current active: 2 MaxTxPerSession: 2 >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-plan] >> KqpSinkTx::OlapLocksAbortOnCommit >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-result_sets] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-plan] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-result_sets] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-result_sets] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-result_sets] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-plan] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-plan] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-plan] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink >> test_auditlog.py::test_single_dml_query_logged[update] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-result_sets] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-result_sets] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> KqpSinkMvcc::OltpMultiSinks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 15449, MsgBus: 16267 2025-04-07T14:21:59.208892Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576379687262912:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:59.209046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010c8/r3tmp/tmpRzHFXQ/pdisk_1.dat 2025-04-07T14:21:59.693845Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:59.696209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:59.696306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:21:59.714474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15449, node 1 2025-04-07T14:21:59.835502Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:59.835786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:59.835803Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:59.835898Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16267 TClient is connected to server localhost:16267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:22:00.635380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:00.653627Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:22:00.671414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:00.865418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:01.094375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:01.178930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:03.440607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576396867133652:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:03.440895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:03.793600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:22:03.870217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:22:03.922189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:22:03.955445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:22:04.036814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:22:04.076455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:22:04.155284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576401162101445:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:04.155369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:04.155444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576401162101450:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:04.159174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:22:04.174485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576401162101452:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:22:04.212780Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576379687262912:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:04.212848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:22:06.170913Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWNmMjkwMDgtOWJhZDJmNzMtODc1MGI1NjUtNTAxYzU4Zjc=, ActorId: [1:7490576405457069104:2460], ActorState: ExecuteState, TraceId: 01jr8a2gshavxx8ggcvs2dddm9, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken Trying to start YDB, gRPC: 29675, MsgBus: 5786 2025-04-07T14:22:07.165967Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576414432533286:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:07.166025Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010c8/r3tmp/tmpdeDTNV/pdisk_1.dat 2025-04-07T14:22:07.312149Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:22:07.313424Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:22:07.313499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:22:07.317352Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29675, node 2 2025-04-07T14:22:07.391443Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:22:07.391462Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:22:07.391469Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:22:07.391549Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5786 TClient is connected to server localhost:5786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:22:07.902409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:07.909478Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:22:07.934464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:22:08.024863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-07T14:22:08.319205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:08.434814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:10.760863Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576427317436865:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:10.760963Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:10.799444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:22:10.857235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:22:10.913382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:22:10.968879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:22:11.023140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:22:11.078887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:22:11.184587Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576431612404657:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:11.184699Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:11.184906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576431612404662:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:11.188931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:22:11.202588Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576431612404664:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:22:12.206011Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576414432533286:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:12.206094Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:22:13.277726Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODMxMDRjYzEtOGExN2FmYTEtNGMzYmYyZWUtOTU5Yjg2OTA=, ActorId: [2:7490576435907372277:2461], ActorState: ExecuteState, TraceId: 01jr8a2qnab0ndqkbpp63t3w9n, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-plan] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-result_sets] >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] >> test_auditlog.py::test_dml_begin_commit_logged >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-plan] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-plan] >> test_auditlog.py::test_single_dml_query_logged[delete] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-result_sets] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected >> test_auditlog.py::test_single_dml_query_logged[replace] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-result_sets] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-result_sets] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] >> KqpSinkTx::OlapSnapshotRO [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive1 |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-plan] >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-plan] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] Test command err: Trying to start YDB, gRPC: 9175, MsgBus: 11979 2025-04-07T14:21:47.879336Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576328615457285:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:47.879403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010f1/r3tmp/tmpdRqbfe/pdisk_1.dat 2025-04-07T14:21:48.238768Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:48.268901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:48.269011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:21:48.270336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9175, node 1 2025-04-07T14:21:48.333024Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:48.333044Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:48.333050Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:48.333120Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11979 TClient is connected to server localhost:11979 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:48.950240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:48.962142Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:21:51.029628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576345795327086:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:51.029714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576345795327071:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:51.029816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:51.034372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:21:51.042986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576345795327109:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:21:51.425101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:21:51.556382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:21:52.690329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:21:53.382775Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576328615457285:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:53.460375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:21:54.183257Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTdmYzhmZDktOWRkMTM4NWYtZmRlMjY5NDYtMjNlNTU4OTg=, ActorId: [1:7490576354385269773:2939], ActorState: ExecuteState, TraceId: 01jr8a250n90zen67ychj9gj5q, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Tables: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 26655, MsgBus: 8835 2025-04-07T14:22:00.325617Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576381561371118:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:00.325660Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010f1/r3tmp/tmpe5dprQ/pdisk_1.dat 2025-04-07T14:22:00.506751Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:22:00.519709Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:22:00.523061Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:22:00.526290Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26655, node 2 2025-04-07T14:22:00.671869Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:22:00.671905Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:22:00.671927Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:22:00.672028Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8835 TClient is connected to server localhost:8835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:22:01.454861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:01.465119Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:22:04.533402Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576398741240927:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:04.533516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576398741240938:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:04.533562Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:04.551531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:22:04.588276Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576398741240941:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:22:04.756522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:22:04.973754Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7490576398741241146:2317];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:22:04.973960Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7490576398741241146:2317];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:22:04.974252Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7490576398741241146:2317];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:22:04.974382Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7490576398741241146:2317];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:22:04.974516Z node 2 :TX_COL ... 83040:3075];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.553289Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7490576420216082422:2859];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.554007Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7490576420216083008:3057];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.554025Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7490576420216083154:3108];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.554169Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7490576420216083048:3078];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.554222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7490576420216083026:3070];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.555495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[2:7490576420216083222:3118];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038002;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.556110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7490576420216082691:2966];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.556278Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037998;self_id=[2:7490576420216082442:2878];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037998;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.556603Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7490576420216083106:3094];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.561312Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7490576420216082512:2919];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.561489Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[2:7490576420216083206:3113];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038000;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.561605Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7490576420216083047:3077];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.561782Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7490576420216083154:3108];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.561792Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7490576420216083026:3070];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.561893Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[2:7490576420216083222:3118];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038002;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:15.638573Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;local_tx_no=16;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037896;tx_state=complete;fline=interaction.h:353;batch=Group: [ 11 ] Name: [ 53657373696F6E31 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976715665}],"starts":[{"inc":{"count_not_include":1},"id":281474976715665}],"finishes":[]},"p":{"include":1,"pk":"10;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715665}]},"p":{"include":2147483647,"pk":"10;"}},{"i":{"txs":[{"inc":{"count":1},"id":281474976715665}],"starts":[{"inc":{"count_not_include":1},"id":281474976715665}],"finishes":[]},"p":{"include":1,"pk":"11;"}},{"i":{"txs":[{"inc":{"count":1},"id":281474976715665}],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"11;Session1;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715665}]},"p":{"include":2147483647,"pk":"11;"}}]}; 2025-04-07T14:22:15.644827Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037891;self_id=[2:7490576398741241158:2321];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=0; 2025-04-07T14:22:15.644983Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490576445985891049:3700], SessionActorId: [2:7490576437395955942:3700], Got LOCKS BROKEN for table. ShardID=72075186224037891, Sink=[2:7490576445985891049:3700].{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 } 2025-04-07T14:22:15.674468Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490576445985891049:3700], SessionActorId: [2:7490576437395955942:3700], Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 }. statusCode=ABORTED. subIssues=
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 . sessionActorId=[2:7490576437395955942:3700]. isRollback=0 2025-04-07T14:22:15.674640Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2M0NTUxZmYtZGJhZDJlMGUtNGE2ZGZlZC03M2FlOWVj, ActorId: [2:7490576437395955942:3700], ActorState: ExecuteState, TraceId: 01jr8a2sf49wfzw22qhhqyfyfx, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7490576445985891247:3700] from: [2:7490576445985891049:3700] 2025-04-07T14:22:15.674793Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7490576445985891247:3700] TxId: 281474976715671. Ctx: { TraceId: 01jr8a2sf49wfzw22qhhqyfyfx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2M0NTUxZmYtZGJhZDJlMGUtNGE2ZGZlZC03M2FlOWVj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 };
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 } 2025-04-07T14:22:15.700099Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2M0NTUxZmYtZGJhZDJlMGUtNGE2ZGZlZC03M2FlOWVj, ActorId: [2:7490576437395955942:3700], ActorState: ExecuteState, TraceId: 01jr8a2sf49wfzw22qhhqyfyfx, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 };
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0
: Error: Transaction locks invalidated. Tables: `/Root/Test`, code: 2001 2025-04-07T14:22:16.109731Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;self_id=[2:7490576403036209754:2478];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037939;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:16.110006Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;self_id=[2:7490576403036209754:2478];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037939;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:16.122883Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;self_id=[2:7490576403036209646:2454];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037972;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:16.123101Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;self_id=[2:7490576403036209646:2454];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037972;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:16.123246Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;self_id=[2:7490576403036209464:2423];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037996;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:16.123365Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;self_id=[2:7490576403036209464:2423];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037996;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:16.123488Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7490576403036209465:2424];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037993;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:16.123604Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7490576403036209465:2424];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037993;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:16.139633Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490576398741241216:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:16.155926Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7490576398741241160:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:16.156164Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7490576398741241160:2323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037897;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:16.158105Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490576398741241216:2326];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:16.160393Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7490576398741241169:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:16.160601Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7490576398741241169:2325];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-result_sets] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THeavyPerfTest::TTestLoadEverything 2025-04-07 14:22:17,787 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-07 14:22:18,500 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 469827 75.6M 74.5M 21.4M test_tool run_ut @/home/runner/.ya/build/build_root/u6pr/002186/ydb/core/mind/hive/ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args 471929 7.2G 6.9G 6.9G └─ ydb-core-mind-hive-ut --trace-path-append /home/runner/.ya/build/build_root/u6pr/002186/ydb/core/mind/hive/ut/test-results/unittest/testing_out_stuff/chunk1/ytest.report Test command err: 2025-04-07T14:12:27.133805Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:27.139025Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.139880Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-07T14:12:27.142710Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-07T14:12:27.149765Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-07T14:12:27.149836Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:27.150698Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:47:2073] ControllerId# 72057594037932033 2025-04-07T14:12:27.150769Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:27.150886Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:27.151176Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:27.170092Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:27.170156Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:27.171970Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:46:2072] Create Queue# [1:55:2078] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.172166Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:46:2072] Create Queue# [1:56:2079] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.172302Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:46:2072] Create Queue# [1:57:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.172433Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:46:2072] Create Queue# [1:58:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.172556Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:46:2072] Create Queue# [1:59:2082] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.172710Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:46:2072] Create Queue# [1:60:2083] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.172857Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:46:2072] Create Queue# [1:61:2084] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.172886Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:27.172962Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:47:2073] 2025-04-07T14:12:27.172995Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:47:2073] 2025-04-07T14:12:27.173040Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:27.173075Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:27.176030Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:27.176108Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2025-04-07T14:12:27.178499Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.178616Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-07T14:12:27.179424Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [2:67:2071] ControllerId# 72057594037932033 2025-04-07T14:12:27.179466Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2025-04-07T14:12:27.179529Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2025-04-07T14:12:27.179698Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:282} StartRequestReportingThrottler 2025-04-07T14:12:27.180220Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-07T14:12:27.180251Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-07T14:12:27.181650Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:73:2075] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.181795Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:74:2076] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.181916Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:75:2077] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.182094Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:76:2078] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.182224Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:77:2079] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.182370Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:78:2080] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.182477Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:79:2081] targetNodeId# 1 Marker# DSP01 2025-04-07T14:12:27.182498Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-07T14:12:27.182546Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:67:2071] 2025-04-07T14:12:27.182571Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:67:2071] 2025-04-07T14:12:27.182616Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-07T14:12:27.182646Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2025-04-07T14:12:27.182969Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-07T14:12:27.183233Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:67:2071] 2025-04-07T14:12:27.183276Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.183330Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.183593Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.197288Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:47:2073] 2025-04-07T14:12:27.197354Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.197388Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-07T14:12:27.206288Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.225353Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.225417Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-07T14:12:27.230187Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-07T14:12:27.230613Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-07T14:12:27.230849Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:47:2073] 2025-04-07T14:12:27.230890Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.230912Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-07T14:12:27.231001Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-07T14:12:27.231209Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-07T14:12:27.231282Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:51:2064] 2025-04-07T14:12:27.231307Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:51:2064] 2025-04-07T14:12:27.231382Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.231628Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:27.231847Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-07T14:12:27.231955Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [2:71:2064] 2025-04-07T14:12:27.231974Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [2:71:2064] 2025-04-07T14:12:27.232009Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-07T14:12:27.232060Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-07T14:12:27.232132Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-07T14:12:27.232175Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [2:71:2064] 2025-04-07T14:12:27.232214Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-07T14:12:27.232249Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [2:89:2085] 2025-04-07T14:12:27.232267Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [2:89:2085] 2025-04-07T14:12:27.232306Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-07T14:12:27.232332Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: ... tablet Unknown.610127.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.325720Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610126.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.325749Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610125.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.325772Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610124.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.325795Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610123.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.325815Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610122.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.325836Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610121.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.325861Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610120.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.325886Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610119.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.325912Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610118.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.325937Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610117.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.325982Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610116.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.326010Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610115.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.326036Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610114.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.326063Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610113.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.326092Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610112.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.326119Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610111.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.326146Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610110.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.326173Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610109.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.326201Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610108.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.326227Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610107.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.326252Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610106.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.326277Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610105.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.326304Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610104.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.326331Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610103.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.326359Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610102.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.428875Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610101.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.428956Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610100.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.428988Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610099.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429016Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610098.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429041Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610097.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429067Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610096.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429094Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610095.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429123Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610094.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429150Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610093.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429174Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610092.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429197Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610091.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429226Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610090.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429254Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610089.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429282Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610088.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429309Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610087.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429333Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610086.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429361Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610085.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429387Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610084.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429413Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610083.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429442Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610082.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429469Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610081.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429494Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610080.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429520Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610079.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429547Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610078.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429572Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610077.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429597Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610076.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429622Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610075.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429649Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610074.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429675Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610073.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429704Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610072.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429733Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610071.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429760Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610070.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429786Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610069.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429814Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610068.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429841Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610067.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429868Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610066.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429895Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610065.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-04-07T14:22:18.429921Z node 1 :HIVE WARN: HIVE#72057594037927937 The tablet Unknown.610064.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7573699373/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/u6pr/002186/ydb/core/mind/hive/ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1747, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7573699373/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/u6pr/002186/ydb/core/mind/hive/ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OlapNamedStatement >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-plan] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpLimits::TooBigQuery [GOOD] >> KqpLimits::TooBigKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 17045, MsgBus: 30932 2025-04-07T14:22:05.329786Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576405677645779:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:05.329831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010b6/r3tmp/tmp7WyRQI/pdisk_1.dat 2025-04-07T14:22:05.979329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:22:05.979451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:22:05.985764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:22:06.041699Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17045, node 1 2025-04-07T14:22:06.287924Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:22:06.287947Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:22:06.287955Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:22:06.288032Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30932 TClient is connected to server localhost:30932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:22:07.198997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:07.241368Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:22:07.247479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:22:07.395969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:22:07.562673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:07.650138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:09.362958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576422857516440:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:09.363176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:09.634446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:22:09.668444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:22:09.719432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:22:09.791679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:22:09.842221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:22:09.897097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:22:09.978668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576422857516941:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:09.978760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:09.979536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576422857516946:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:09.983514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:22:09.996499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576422857516948:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:22:10.332986Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576405677645779:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:10.377576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:22:13.692527Z node 1 :GLOBAL ERROR: fline=events.h:99;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710673; 2025-04-07T14:22:13.697994Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490576440037386721:2460], Table: `/Root/EightShard` ([72057594046644480:3:1]), SessionActorId: [1:7490576427152484595:2460]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 3]`. ShardID=72075186224037891, Sink=[1:7490576440037386721:2460].{
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:22:13.698658Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490576440037386702:2460], SessionActorId: [1:7490576427152484595:2460], Transaction locks invalidated. Table `/Root/EightShard`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[1:7490576427152484595:2460]. isRollback=0 2025-04-07T14:22:13.698791Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTAxNjBlOTktYmJhOTMyMTMtYWYwNDNkODItYzEwMGM2MTM=, ActorId: [1:7490576427152484595:2460], ActorState: ExecuteState, TraceId: 01jr8a2qv021zw64kqp41txgqp, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7490576440037386703:2460] from: [1:7490576440037386702:2460] 2025-04-07T14:22:13.699036Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490576440037386703:2460] TxId: 281474976710673. Ctx: { TraceId: 01jr8a2qv021zw64kqp41txgqp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTAxNjBlOTktYmJhOTMyMTMtYWYwNDNkODItYzEwMGM2MTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/EightShard`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:22:13.719331Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTAxNjBlOTktYmJhOTMyMTMtYWYwNDNkODItYzEwMGM2MTM=, ActorId: [1:7490576427152484595:2460], ActorState: ExecuteState, TraceId: 01jr8a2qv021zw64kqp41txgqp, Create QueryResponse for error on request, msg: 2025-04-07T14:22:13.721074Z node 1 :GLOBAL ERROR: fline=events.h:99;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710673; 2025-04-07T14:22:13.721217Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1744035733734 : 281474976710673] from 72075186224037888 at tablet 72075186224037888, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" } Trying to start YDB, gRPC: 21312, MsgBus: 24053 2025-04-07T14:22:14.455352Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576442677282440:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:14.455618Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010b6/r3tmp/tmpn4uVUg/pdisk_1.dat 2025-04-07T14:22:14.703586Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:22:14.726548Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:22:14.726633Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:22:14.732547Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21312, node 2 2025-04-07T14:22:14.877183Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:22:14.877203Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:22:14.877211Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:22:14.877308Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24053 TClient is connected to server localhost:24053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:22:15.750075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:15.765495Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:22:15.785742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:15.905342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:16.178882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:16.285560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:19.019059Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576464152120502:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:19.023512Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:19.071677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:22:19.121560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:22:19.184193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:22:19.216968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:22:19.260999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:22:19.339923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:22:19.396002Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576464152121002:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:19.396081Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:19.396404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576464152121007:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:19.400429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:22:19.411308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576464152121009:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:22:19.458928Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576442677282440:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:19.458995Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:22:23.431669Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWNjZDYxY2QtOTJkYzVjOWMtYzBjOGY1NWQtYTFlODVkYg==, ActorId: [2:7490576468447088624:2463], ActorState: ExecuteState, TraceId: 01jr8a31e38rvsr2qc2378cje6, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:122:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:128:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:131:2058] recipient: [1:107:2139] 2025-04-07T14:13:49.467734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:13:49.467824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:49.467858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:13:49.467888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:13:49.467934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:13:49.467984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:13:49.468047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:13:49.468317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:13:49.522432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:13:49.522467Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2025-04-07T14:13:49.529663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:13:49.529749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:13:49.529852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:13:49.538104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:13:49.538250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:13:49.538780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:49.539082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:13:49.543208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:49.544436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:49.544478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:49.544597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:13:49.544630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:49.544656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:13:49.544763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2211] Leader for TabletID 72057594037968897 is [1:215:2215] sender: [1:216:2058] recipient: [1:209:2211] 2025-04-07T14:13:49.549076Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-07T14:13:49.627401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:13:49.627545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:49.627683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:13:49.627827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:13:49.627866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:49.629285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:49.629370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:13:49.629466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:49.629504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:13:49.629527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:13:49.629557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:13:49.630760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:49.630803Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:13:49.630832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:13:49.632158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:49.632198Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:49.632237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:49.632275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:13:49.635592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:13:49.637046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:13:49.637213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2152] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:13:49.637959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:13:49.638071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969448 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:13:49.638115Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:49.638316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:13:49.638359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:13:49.638492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:13:49.638553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:13:49.640216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:13:49.640255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:13:49.640372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:13:49.640410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:13:49.640696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:13:49.640733Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:13:49.640811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:13:49.640862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:49.640908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:13:49.640968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:13:49.641005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:13:49.641034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:13:49.641088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:13:49.641129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:13:49.641158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, ... tion_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:22:23.086987Z node 202 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:22:23.087340Z node 202 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 392us result status StatusSuccess 2025-04-07T14:22:23.088322Z node 202 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:22:23.103487Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1099:2871] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-07T14:22:23.103629Z node 202 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][202:1046:2871] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-07T14:22:23.103802Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1099:2871] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744035743045229 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744035743045229 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744035743045229 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-07T14:22:23.106609Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1099:2871] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-07T14:22:23.106727Z node 202 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][202:1046:2871] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> test_ttl.py::TestTTLAlterSettings::test_case >> LabeledDbCounters::OneTabletRestart [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-result_sets] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-result_sets] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> LabeledDbCounters::OneTabletRestart [GOOD] Test command err: 2025-04-07T14:16:51.567073Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490575057035815870:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:51.567140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010ad/r3tmp/tmp0lgG92/pdisk_1.dat 2025-04-07T14:16:51.813632Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9695, node 1 2025-04-07T14:16:51.884646Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:16:51.884694Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:16:51.884704Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:16:51.884817Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:51.903440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:51.903543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:51.906537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:52.110215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:16:52.127991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:16:52.138600Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490575057720712534:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:52.138654Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Database1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2025-04-07T14:16:52.152229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:52.152285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:52.154787Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-07T14:16:52.155493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:52.179010Z node 3 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2025-04-07T14:16:52.179069Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2025-04-07T14:16:52.232538Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [3:7490575057720712663:2176], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2025-04-07T14:16:52.232898Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2025-04-07T14:16:52.232982Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2025-04-07T14:16:52.233576Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2025-04-07T14:16:52.233630Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval metrics: query count# 0 2025-04-07T14:16:52.233701Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval query tops: total query count# 0 2025-04-07T14:16:52.233763Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2025-04-07T14:16:52.233834Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 6, result count# 0 2025-04-07T14:16:52.233918Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 7, result count# 0 2025-04-07T14:16:52.233966Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 8, result count# 0 2025-04-07T14:16:52.234013Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 9, result count# 0 2025-04-07T14:16:52.234053Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 10, result count# 0 2025-04-07T14:16:52.234093Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 11, result count# 0 2025-04-07T14:16:52.234162Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 12, result count# 0 2025-04-07T14:16:52.234214Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 13, result count# 0 2025-04-07T14:16:52.234269Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 14, result count# 0 2025-04-07T14:16:52.234340Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 15, result count# 0 2025-04-07T14:16:52.234394Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 16, partCount count# 0 2025-04-07T14:16:52.234432Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 19, partCount count# 0 2025-04-07T14:16:52.234475Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 17, result count# 0 2025-04-07T14:16:52.234519Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 18, result count# 0 2025-04-07T14:16:52.234639Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2025-04-07T14:16:52.000000Z 2025-04-07T14:16:52.234777Z node 3 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-04-07T14:16:52.234875Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:16:52.236472Z node 3 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [3:7490575057720712663:2176], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Database1 2025-04-07T14:16:52.238603Z node 3 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [3:7490575057720712663:2176], database# /Root/Database1, no sysview processor 2025-04-07T14:16:52.241627Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Complete 2025-04-07T14:16:52.243851Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Execute: database# /Root/Database1 2025-04-07T14:16:52.245786Z node 3 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2025-04-07T14:16:52.248003Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Complete 2025-04-07T14:16:52.262677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:16:52.273906Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490575060759471631:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:52.273971Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Database2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:16:52.280125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:52.280187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-04-07T14:16:52.282205Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:16:52.283085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:52.317202Z node 2 :SYSTEM_VIEWS INFO: [72075186224037899] OnActivateExecutor 2025-04-07T14:16:52.317241Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInitSchema::Execute 2025-04-07T14:16:52.356971Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-04-07T14:16:52.357957Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [2:7490575060759471721:2139], path id# [OwnerId: 72057594046644480, LocalPathId: 3], service# 2 2025-04-07T14:16:52.358037Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInitSchema::Complete 2025-04-07T14:16:52.358085Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInit::Execute 2025-04-07T14:16:52.358669Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2025-04-07T14:16:52.358743Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading interval metrics: query count# 0 2025-04-07T14:16:52.358786Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading interval query tops: total query count# 0 2025-04-07T14:16:52.358849Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading nodes to request: nodes count# 0, hashes count# 0 2025-04-07T14:16:52.358891Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 6, result count# 0 2025-04-07T14:16:52.358916Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 7, result count# 0 2025-04-07T14:16:52.358948Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 8, result count# 0 2025-04-07T14:16:52.358991Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 9, result count# 0 2025-04-07T14:16:52.359018Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 10, result count# 0 2025-04-07T14:16:52.359071Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 11, result count# 0 2025-04-07T14:16:52.359099Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 12, result count# 0 2025-04-07T14:16:52.359123Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 13, result count# 0 2025-04-07T14:16:52.359156Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 14, result count# 0 2025-04-07T14:16:52.359184Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 15, result count# 0 2025-04-07T14:16:52.359214Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 16, partCount count# 0 2025-04-07T14:16:52.359241Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 19, partCount count# 0 2025-04-07T1 ... 37894] PersistQueryTopResults: table id# 10, interval end# 2025-04-07T14:22:22.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:22.529006Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 12, interval end# 2025-04-07T14:22:22.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:22.529029Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 14, interval end# 2025-04-07T14:22:22.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:22.529054Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 9, interval end# 2025-04-07T15:00:00.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:22.529078Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 11, interval end# 2025-04-07T15:00:00.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:22.529100Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 13, interval end# 2025-04-07T15:00:00.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:22.529124Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 15, interval end# 2025-04-07T15:00:00.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:22.551450Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxAggregate::Complete 2025-04-07T14:22:23.053428Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxCollect::Execute 2025-04-07T14:22:23.053487Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistPartitionTopResults: table id# 17, partition interval end# 2025-04-07T14:22:23.000000Z, partition count# 0 2025-04-07T14:22:23.053513Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistPartitionTopResults: table id# 18, partition interval end# 2025-04-07T15:00:00.000000Z, partition count# 0 2025-04-07T14:22:23.053611Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] Reset: interval end# 2025-04-07T14:22:23.000000Z 2025-04-07T14:22:23.061113Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxCollect::Complete 2025-04-07T14:22:23.204897Z node 11 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-07T14:22:23.584384Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxAggregate::Execute 2025-04-07T14:22:23.584456Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryResults: interval end# 2025-04-07T14:22:23.000000Z, query count# 0 2025-04-07T14:22:23.584483Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 8, interval end# 2025-04-07T14:22:23.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:23.584510Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 10, interval end# 2025-04-07T14:22:23.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:23.584532Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 12, interval end# 2025-04-07T14:22:23.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:23.584554Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 14, interval end# 2025-04-07T14:22:23.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:23.584579Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 9, interval end# 2025-04-07T15:00:00.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:23.584604Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 11, interval end# 2025-04-07T15:00:00.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:23.584627Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 13, interval end# 2025-04-07T15:00:00.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:23.584651Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 15, interval end# 2025-04-07T15:00:00.000000Z, query count# 0, persisted# 0 2025-04-07T14:22:23.594432Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxAggregate::Complete 2025-04-07T14:22:24.001003Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [10:7490575702075959661:2070], interval end# 2025-04-07T14:22:24.000000Z, event interval end# 2025-04-07T14:22:24.000000Z 2025-04-07T14:22:24.000970Z node 12 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [12:7490575703417539811:2061], interval end# 2025-04-07T14:22:24.000000Z, event interval end# 2025-04-07T14:22:24.000000Z 2025-04-07T14:22:24.001017Z node 12 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [12:7490575703417539811:2061], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-07T14:22:24.002350Z node 12 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [12:7490575707712507271:2167], interval end# 2025-04-07T14:22:24.000000Z, event interval end# 2025-04-07T14:22:24.000000Z 2025-04-07T14:22:24.002375Z node 12 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [12:7490575707712507271:2167], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-07T14:22:24.001043Z node 10 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [10:7490575702075959661:2070], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-07T14:22:24.002896Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [10:7490575702075959824:2143], interval end# 2025-04-07T14:22:24.000000Z, event interval end# 2025-04-07T14:22:24.000000Z 2025-04-07T14:22:24.004007Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TEvApplyLabeledCounters: services count# 1 2025-04-07T14:22:24.004083Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [11:7490575710079278232:2058], interval end# 2025-04-07T14:22:24.000000Z, event interval end# 2025-04-07T14:22:24.000000Z 2025-04-07T14:22:24.004104Z node 11 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [11:7490575710079278232:2058], query logs count# 0, processor ids count# 1, processor id to database count# 1 2025-04-07T14:22:24.002932Z node 10 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [10:7490575702075959824:2143], query logs count# 0, processor ids count# 2, processor id to database count# 1 2025-04-07T14:22:24.009081Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [11:7490575705784310873:2061], interval end# 2025-04-07T14:22:24.000000Z, event interval end# 2025-04-07T14:22:24.000000Z 2025-04-07T14:22:24.009132Z node 11 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [11:7490575705784310873:2061], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-07T14:22:24.054168Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxCollect::Execute 2025-04-07T14:22:24.054227Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistPartitionTopResults: table id# 17, partition interval end# 2025-04-07T14:22:24.000000Z, partition count# 0 2025-04-07T14:22:24.054254Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistPartitionTopResults: table id# 18, partition interval end# 2025-04-07T15:00:00.000000Z, partition count# 0 2025-04-07T14:22:24.054356Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] Reset: interval end# 2025-04-07T14:22:24.000000Z 2025-04-07T14:22:24.058437Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxCollect::Complete 2025-04-07T14:22:24.138204Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TEvApplyCounters: services count# 1 iteration 6
name=topic.partition.alive_count: 32
name=topic.partition.init_duration_milliseconds_max: 49
name=topic.partition.producers_count_max: 0
name=topic.partition.read.inflight_throttled_microseconds_max: 0
name=topic.partition.read.speed_limit_bytes_per_second: 0
name=topic.partition.read.throttled_microseconds_max: 0
name=topic.partition.read_without_consumer.speed_limit_bytes_per_second: 0
name=topic.partition.read_without_consumer.throttled_microseconds_max: 0
name=topic.partition.storage_bytes_max: 0
name=topic.partition.total_count: 32
name=topic.partition.uptime_milliseconds_min: 59730
name=topic.partition.write.bytes_per_day_max: 0
name=topic.partition.write.bytes_per_hour_max: 0
name=topic.partition.write.bytes_per_minute_max: 0
name=topic.partition.write.idle_milliseconds_max: 59748
name=topic.partition.write.lag_milliseconds_max: 0
name=topic.partition.write.speed_limit_bytes_per_second: 50000000
name=topic.partition.write.throttled_microseconds_max: 0
name=topic.producers_count: 0
name=topic.reserve.limit_bytes: 0
name=topic.reserve.used_bytes: 0
name=topic.storage_bytes: 0
consumer=user:
    name=topic.partition.alive_count: 32
    name=topic.partition.committed_end_to_end_lag_milliseconds_max: 9698
    name=topic.partition.committed_lag_messages_max: 0
    name=topic.partition.committed_read_lag_milliseconds_max: 9698
    name=topic.partition.end_to_end_lag_milliseconds_max: 50031
    name=topic.partition.read.idle_milliseconds_max: 59710
    name=topic.partition.read.lag_messages_max: 0
    name=topic.partition.read.lag_milliseconds_max: 0
    name=topic.partition.read.speed_limit_bytes_per_second: 0
    name=topic.partition.read.throttled_microseconds_max: 0
    name=topic.partition.write.lag_milliseconds_max: 0
    name=topic.read.lag_messages: 0
CHECK COUNTER topic.partition.uptime_milliseconds_min wait less than 60200 got 59730 CHECK COUNTER topic.partition.alive_count wait 32 got 32 2025-04-07T14:22:24.190743Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-07T14:22:24.191008Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:22:24.191580Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-04-07T14:22:24.192266Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-07T14:22:24.193065Z node 10 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [10:7490575702075959824:2143], processor id# 72075186224037894, database# /Root/PQ 2025-04-07T14:22:24.194437Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7490575702075959824:2143], database# /Root/PQ, processor id# 72075186224037894 2025-04-07T14:22:24.208414Z node 11 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [11:7490575710079278232:2058], processor id# 72075186224037894, database# /Root/PQ 2025-04-07T14:22:24.208500Z node 11 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [11:7490575710079278232:2058], database# /Root/PQ, processor id# 72075186224037894 2025-04-07T14:22:25.236936Z node 12 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [12:7490575703417539811:2061] 2025-04-07T14:22:25.240890Z node 12 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [12:7490575707712507271:2167] 2025-04-07T14:22:25.713507Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [11:7490575710079278232:2058] 2025-04-07T14:22:25.772964Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [11:7490575710079278232:2058] 2025-04-07T14:22:25.774373Z node 11 :SYSTEM_VIEWS DEBUG: Send counters: service id# [11:7490575710079278232:2058], processor id# 72075186224037894, database# /Root/PQ, generation# 11041687254593014561, node id# 11, is retrying# 0, is labeled# 0 2025-04-07T14:22:25.812591Z node 11 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [11:7490575710079278232:2058], processor id# 72075186224037894, database# /Root/PQ 2025-04-07T14:22:25.813030Z node 11 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [11:7490575710079278232:2058], database# /Root/PQ, processor id# 72075186224037894 2025-04-07T14:22:25.873008Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [11:7490575705784310873:2061] |98.0%| [TA] $(B)/ydb/core/sys_view/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |98.0%| [TA] {RESULT} $(B)/ydb/core/sys_view/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> KqpSinkMvcc::OltpMultiSinks [GOOD] >> KqpSinkTx::OlapLocksAbortOnCommit [GOOD] >> KqpSinkTx::OlapInvalidateOnError >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-result_sets] >> test_ttl.py::TestTTLOnIndexedTable::test_case ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 18596, MsgBus: 10085 2025-04-07T14:21:55.043073Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576362037137569:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:21:55.043671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010da/r3tmp/tmpwk1BU6/pdisk_1.dat 2025-04-07T14:21:55.407345Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:21:55.440323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:21:55.440446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18596, node 1 2025-04-07T14:21:55.442354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:21:55.490488Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:21:55.490540Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:21:55.490553Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:21:55.490668Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10085 TClient is connected to server localhost:10085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:21:55.995819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:21:56.009093Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:21:57.835655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576370627072633:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:57.835864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:57.836107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576370627072660:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:21:57.839797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:21:57.851913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576370627072662:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:21:58.157879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:21:58.291387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576374922040174:2322];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:21:58.291537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576374922040174:2322];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:21:58.291823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576374922040174:2322];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:21:58.291967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576374922040174:2322];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:21:58.292067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576374922040174:2322];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:21:58.292171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576374922040174:2322];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:21:58.292261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576374922040174:2322];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:21:58.292375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576374922040174:2322];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:21:58.292483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576374922040174:2322];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:21:58.292575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576374922040174:2322];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:21:58.292885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576374922040174:2322];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:21:58.293024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576374922040174:2322];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:21:58.304394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576374922040175:2323];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:21:58.304483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576374922040175:2323];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:21:58.304634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576374922040175:2323];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:21:58.304743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576374922040175:2323];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:21:58.304830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576374922040175:2323];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:21:58.304933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576374922040175:2323];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:21:58.305038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576374922040175:2323];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:21:58.305141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576374922040175:2323];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:21:58.305226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576374922040175:2323];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:21:58.305310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576374922040175:2323];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:21:58.305433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576374922040175:2323];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:21:58.305535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576374922040175:2323];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:21:58.313277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:21:58.313331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:21:58.313418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:21:58.313448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAM ... single operation is supported;tx_id=0; 2025-04-07T14:22:08.700598Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037967;self_id=[1:7490576374922041269:2431];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037967;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.700642Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037984;self_id=[1:7490576374922041449:2474];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037984;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.701960Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037997;self_id=[1:7490576374922041343:2442];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037997;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.702040Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037981;self_id=[1:7490576374922041413:2458];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037981;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.702094Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037966;self_id=[1:7490576374922041491:2496];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037966;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.702151Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037982;self_id=[1:7490576374922041456:2479];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037982;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.702214Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037983;self_id=[1:7490576374922041438:2466];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037983;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.702273Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037977;self_id=[1:7490576374922041460:2481];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037977;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.702317Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037978;self_id=[1:7490576374922041448:2473];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037978;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.702364Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037995;self_id=[1:7490576374922041334:2435];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037995;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.702411Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037996;self_id=[1:7490576374922041264:2427];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037996;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.705207Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490576417871723234:3684], SessionActorId: [1:7490576409281787583:3684], Got BAD REQUEST for table. ShardID=72075186224037909, Sink=[1:7490576417871723234:3684].{
: Fatal: only single operation is supported } 2025-04-07T14:22:08.705285Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490576417871723234:3684], SessionActorId: [1:7490576409281787583:3684], Bad request. {
: Fatal: only single operation is supported }. statusCode=BAD_REQUEST. subIssues=
: Fatal: only single operation is supported . sessionActorId=[1:7490576409281787583:3684]. isRollback=0 2025-04-07T14:22:08.706059Z node 1 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=1&id=NmQ4OGIwYy0xZDg3OWMyMi1kYTA2OWU1OC1iNjkxZTlkMw==, ActorId: [1:7490576409281787583:3684], ActorState: ReadyState, got TEvKqpBuffer::TEvError in ReadyState, status: BAD_REQUEST send to: [0:0:0] from: [1:7490576417871723234:3684]: Old error. 2025-04-07T14:22:08.712809Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037985;self_id=[1:7490576374922041494:2498];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037985;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.712920Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037986;self_id=[1:7490576374922041475:2491];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037986;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.712978Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037987;self_id=[1:7490576374922041444:2471];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037987;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.713032Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037993;self_id=[1:7490576374922041271:2433];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037993;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-07T14:22:08.713086Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037994;self_id=[1:7490576374922041335:2436];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037994;event=TEvWrite;fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 2025-04-07T14:22:10.406325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:22:10.406353Z node 1 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 19118, MsgBus: 23997 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010da/r3tmp/tmpHKb4WM/pdisk_1.dat 2025-04-07T14:22:15.695507Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576445648415749:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:15.695886Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:22:15.865757Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:22:15.906917Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:22:15.907017Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:22:15.921384Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19118, node 2 2025-04-07T14:22:16.101290Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:22:16.101312Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:22:16.101321Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:22:16.101425Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23997 TClient is connected to server localhost:23997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:22:16.907216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:20.250325Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576467123252719:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:20.250411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:20.250833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576467123252731:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:20.254150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:22:20.268565Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576467123252733:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:22:20.415704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:22:20.480929Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576445648415749:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:20.481012Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:22:20.510886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:22:21.583411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-plan] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-plan] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] |98.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-result_sets] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-plan] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] |98.1%| [TA] $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[upsert] >> KqpLimits::TooBigKey [GOOD] >> KqpLimits::TooBigColumn >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-result_sets] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-plan] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-plan] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] |98.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} >> test_auditlog.py::test_dynconfig >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] >> test_auditlog.py::test_dynconfig [GOOD] >> test_auditlog.py::test_single_dml_query_logged[insert] >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-plan] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-plan] [GOOD] |98.1%| [TA] {RESULT} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-result_sets] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-result_sets] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[select] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-result_sets] >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-result_sets] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-result_sets] >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-plan] >> KqpLimits::TooBigColumn [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-result_sets] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 8380, MsgBus: 27210 2025-04-07T14:22:01.341376Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576388072645909:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:01.341412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010c2/r3tmp/tmpB1VvV0/pdisk_1.dat 2025-04-07T14:22:01.952957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:22:01.953043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:22:01.961638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:22:02.008316Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8380, node 1 2025-04-07T14:22:02.119418Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:22:02.119436Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:22:02.119443Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:22:02.119519Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27210 TClient is connected to server localhost:27210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:22:02.875483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:05.250204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576405252515562:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:05.250319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:05.250813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576405252515574:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:05.258818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:22:05.272748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576405252515576:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:22:05.732639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:22:05.872542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576405252515811:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:22:05.873205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576405252515811:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:22:05.873538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576405252515811:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:22:05.873652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576405252515811:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:22:05.873763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576405252515811:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:22:05.873871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576405252515811:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:22:05.873978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576405252515811:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:22:05.874101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576405252515811:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:22:05.874211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576405252515811:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:22:05.874309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576405252515811:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:22:05.874412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576405252515811:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:22:05.874512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576405252515811:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:22:05.882225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576405252515813:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:22:05.882278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576405252515813:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:22:05.882476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576405252515813:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:22:05.882572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576405252515813:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:22:05.882664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576405252515813:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:22:05.882753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576405252515813:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:22:05.882840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576405252515813:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:22:05.882946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576405252515813:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:22:05.883056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576405252515813:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:22:05.883148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576405252515813:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:22:05.883236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576405252515813:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:22:05.883341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576405252515813:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:22:05.924111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576405252515809:2320];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:22:05.924117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490576405252515814:2325];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:22:05.924162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490576405252515814:2325];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:22:05.924162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576405252515809:2320];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event ... _id=[2:7490576524288209275:3085];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038086;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.967994Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[2:7490576524288209253:3082];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038096;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.968233Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7490576524288208488:2992];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.968403Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7490576524288208309:2933];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.968570Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7490576498518399831:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.969478Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[2:7490576502813368393:2458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037925;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.969711Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[2:7490576502813368393:2458];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037925;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.969851Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[2:7490576524288209316:3103];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038084;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.970014Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7490576524288209301:3096];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.970138Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[2:7490576524288208278:2910];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038055;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.970847Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038078;self_id=[2:7490576524288209300:3095];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038078;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.971032Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7490576524288209319:3104];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038065;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.971304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7490576524288208415:2975];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038006;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.972002Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[2:7490576524288209096:3078];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038071;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.972182Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7490576524288209329:3109];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038090;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.972329Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7490576524288208343:2948];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.974996Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7490576498518399831:2326];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037892;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.975250Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[2:7490576524288209306:3097];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038082;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.976740Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7490576524288208541:3004];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.976929Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7490576524288208541:3004];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.977094Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7490576524288208401:2964];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.977107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7490576524288208315:2937];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.977229Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7490576524288208667:3026];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.977310Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7490576524288209273:3084];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038095;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.977493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7490576524288208336:2941];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.977621Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7490576524288208336:2941];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.977753Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7490576524288209058:3065];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038066;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.977918Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7490576524288208378:2956];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.978040Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7490576524288208378:2956];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.983782Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7490576524288208289:2918];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.984022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7490576524288208437:2986];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.984152Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7490576524288208549:3009];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.992614Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038078;self_id=[2:7490576524288209300:3095];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038078;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.993054Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[2:7490576524288209096:3078];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038071;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.993219Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7490576524288209329:3109];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038090;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.993445Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7490576524288209563:3115];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038091;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.993573Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7490576524288209310:3100];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038079;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.993702Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[2:7490576524288208655:3023];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038002;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.993818Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[2:7490576524288209253:3082];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038096;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.993984Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7490576524288208309:2933];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.994265Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[2:7490576524288209306:3097];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038082;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.994615Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7490576524288208315:2937];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.994773Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7490576524288209273:3084];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:40.994931Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7490576524288209058:3065];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn [GOOD] Test command err: Trying to start YDB, gRPC: 21550, MsgBus: 24257 2025-04-07T14:20:33.870601Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576008064461940:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:33.870677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013cc/r3tmp/tmpOIUSXW/pdisk_1.dat 2025-04-07T14:20:34.337757Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:20:34.348398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:20:34.348490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:20:34.354258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21550, node 1 2025-04-07T14:20:34.516360Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:20:34.516386Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:20:34.516393Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:20:34.516491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24257 TClient is connected to server localhost:24257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:20:35.440844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:35.459315Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:20:35.473547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:35.617145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:35.784604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:20:35.877516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.383559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576025244332626:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:37.383667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:37.733601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.785857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.825678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.854599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.897264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:20:37.971028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:20:38.032099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576029539300424:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:38.032175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:38.032486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576029539300429:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:20:38.036031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:20:38.052164Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-07T14:20:38.052429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576029539300431:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:20:38.877820Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576008064461940:2253];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:20:38.878136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:20:39.347693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:20:49.300508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:20:49.300540Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:22:24.522287Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490576484805835649:2487] TxId: 281474976710672. Ctx: { TraceId: 01jr89zwgdfn9wp1d796pqay89, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTRlYzU1NWMtZGYyMzkzNjEtOTdkNDI3NTktYmQzZTg0ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Abort execution. Task #1 size is too big: 50663600 > 50331648 2025-04-07T14:22:24.543057Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTRlYzU1NWMtZGYyMzkzNjEtOTdkNDI3NTktYmQzZTg0ZjA=, ActorId: [1:7490576033834268355:2487], ActorState: ExecuteState, TraceId: 01jr89zwgdfn9wp1d796pqay89, Create QueryResponse for error on request, msg:
: Error: Datashard program size limit exceeded (50663600 > 50331648), code: 200509 Trying to start YDB, gRPC: 16814, MsgBus: 8382 2025-04-07T14:22:26.135335Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576495633673836:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:26.135489Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013cc/r3tmp/tmppBykFO/pdisk_1.dat 2025-04-07T14:22:26.358471Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:22:26.385150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:22:26.392940Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:22:26.395687Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16814, node 2 2025-04-07T14:22:26.601386Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:22:26.601410Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:22:26.601423Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:22:26.601566Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8382 TClient is connected to server localhost:8382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:22:27.304212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is un ... ], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:32.756133Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:32.756480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576521403479684:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:32.760848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:22:32.783973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576521403479686:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:22:35.354788Z node 2 :TX_DATASHARD ERROR: Operation [0:281474976710671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914 2025-04-07T14:22:35.354985Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710671 at tablet 72075186224037914 status: BAD_REQUEST errors: BAD_ARGUMENT (Operation [0:281474976710671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914) | 2025-04-07T14:22:35.356301Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7490576534288381933:2466] TxId: 281474976710671. Ctx: { TraceId: 01jr8a3d2kfenzc5ejxm847p32, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzM2U5OWMtOTU5MTNiNmEtOTk2ZDc0ODktZGY3NGUyODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: [BAD_ARGUMENT] Operation [0:281474976710671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914; 2025-04-07T14:22:35.414648Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTkzM2U5OWMtOTU5MTNiNmEtOTk2ZDc0ODktZGY3NGUyODI=, ActorId: [2:7490576534288381900:2466], ActorState: ExecuteState, TraceId: 01jr8a3d2kfenzc5ejxm847p32, Create QueryResponse for error on request, msg:
: Error: Bad request., code: 2017
: Error: [BAD_ARGUMENT] Operation [0:281474976710671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914 Trying to start YDB, gRPC: 3402, MsgBus: 19919 2025-04-07T14:22:37.057772Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490576543120584390:2232];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0013cc/r3tmp/tmpDZ7UlJ/pdisk_1.dat 2025-04-07T14:22:37.201153Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:22:37.432730Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:22:37.482885Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:22:37.483000Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:22:37.494003Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3402, node 3 2025-04-07T14:22:37.658769Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:22:37.658824Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:22:37.658834Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:22:37.658971Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19919 TClient is connected to server localhost:19919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:22:38.603085Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:38.610702Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:22:38.620390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:38.736434Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:39.125118Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:39.293077Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:42.034830Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490576543120584390:2232];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:42.034905Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:22:43.815443Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490576568890389704:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:43.815560Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:43.919334Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:22:44.008737Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:22:44.070344Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:22:44.155055Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:22:44.259033Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:22:44.368161Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:22:44.422704Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490576573185357518:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:44.422827Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:44.423243Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490576573185357523:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:44.426924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:22:44.449323Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-07T14:22:44.453819Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7490576573185357525:2439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:22:47.398557Z node 3 :TX_DATASHARD ERROR: Transaction write column value of 20971522 bytes is larger than the allowed threshold 2025-04-07T14:22:47.398711Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715671 at tablet 72075186224037911 status: EXEC_ERROR errors: BAD_ARGUMENT (Transaction write column value of 20971522 bytes is larger than the allowed threshold) | 2025-04-07T14:22:47.400826Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7490576586070259838:2471] TxId: 281474976715671. Ctx: { TraceId: 01jr8a3rca5497zq9p0mwbt4v8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWRkOTkxMjEtMWU0NjhkMTctMTQwYWNjYjItMjZjMzZiNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold; 2025-04-07T14:22:47.401426Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWRkOTkxMjEtMWU0NjhkMTctMTQwYWNjYjItMjZjMzZiNmM=, ActorId: [3:7490576581775292444:2471], ActorState: ExecuteState, TraceId: 01jr8a3rca5497zq9p0mwbt4v8, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage.script-script] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] |98.1%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |98.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-plan] >> KqpSinkMvcc::OlapNamedStatement [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/u6pr/001133/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit.txt 2025-04-07T14:22:37.354579Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} 2025-04-07T14:22:37.533358Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} 2025-04-07T14:22:37.664199Z: {"old_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 1\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"{none}","remote_address":"127.0.0.1","status":"ERROR","subject":"{none}","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-plan] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/u6pr/00111f/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk3/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs0/audit.txt 2025-04-07T14:22:41.219141Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","cloud_id":"cloud-id-A","end_time":"2025-04-07T14:22:41.219078Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-07T14:22:40.862587Z","subject":"root@builtin","detailed_status":"SUCCESS","resource_id":"database-id-C","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} 2025-04-07T14:22:42.590520Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","cloud_id":"cloud-id-A","end_time":"2025-04-07T14:22:42.590488Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-07T14:22:42.389385Z","subject":"root@builtin","detailed_status":"SUCCESS","resource_id":"database-id-C","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/u6pr/001144/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk6/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_unauthorized/audit.txt 2025-04-07T14:22:37.510422Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:37.510367Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-04-07T14:22:37.477518Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:37.657968Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:37.657929Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-04-07T14:22:37.622724Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:37.796755Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:37.796719Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-04-07T14:22:37.771265Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:37.944270Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:37.944237Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-07T14:22:37.922618Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:38.077392Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:38.077360Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-04-07T14:22:38.059591Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:38.220240Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:38.220210Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-04-07T14:22:38.197846Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:38.643360Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> KqpSinkTx::OlapInvalidateOnError [GOOD] >> test_dispatch.py::TestMapping::test_idle >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-plan] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-plan] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-plan] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/u6pr/0010ff/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.delete/audit.txt 2025-04-07T14:22:39.230177Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:39.230086Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-04-07T14:22:38.853806Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:39.847100Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:39.847069Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-04-07T14:22:39.368061Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapNamedStatement [GOOD] Test command err: Trying to start YDB, gRPC: 5301, MsgBus: 61054 2025-04-07T14:22:02.022802Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576391498844013:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:02.022860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010bc/r3tmp/tmphSidm0/pdisk_1.dat 2025-04-07T14:22:02.607090Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:22:02.632270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:22:02.632346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:22:02.636445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5301, node 1 2025-04-07T14:22:02.901135Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:22:02.901168Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:22:02.901196Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:22:02.901282Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61054 TClient is connected to server localhost:61054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:22:03.708793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:03.727680Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:22:06.414781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576408678713810:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:06.414856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576408678713844:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:06.415117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:06.424210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:22:06.438337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576408678713848:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:22:06.764309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:22:06.871112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576408678714064:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:22:06.872604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576408678714064:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:22:06.872819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576408678714064:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:22:06.872914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576408678714064:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:22:06.873011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576408678714064:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:22:06.873105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576408678714064:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:22:06.873191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576408678714064:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:22:06.873284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576408678714064:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:22:06.873375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576408678714064:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:22:06.873463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576408678714064:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:22:06.873559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576408678714064:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:22:06.873648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576408678714064:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:22:06.901252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576408678714093:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:22:06.901304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576408678714093:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:22:06.901496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576408678714093:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:22:06.901640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576408678714093:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:22:06.901745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576408678714093:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:22:06.901845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576408678714093:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:22:06.901933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576408678714093:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:22:06.902019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576408678714093:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:22:06.902116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576408678714093:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:22:06.902219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576408678714093:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:22:06.902297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576408678714093:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:22:06.902374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576408678714093:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:22:06.906528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576408678714084:2323];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:22:06.906577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576408678714084:2323];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:22:06.906732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576408678714084:2323];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:22:06.906839Z node 1 ... 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.830727Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.830833Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.831113Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.831201Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.831286Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.831385Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037956;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.835963Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.836104Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.836192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.836286Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.836545Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.836641Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037959;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.836780Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.836799Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037948;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.836878Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.836968Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037953;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037977;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837085Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837149Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837251Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837332Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837361Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837419Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837453Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837506Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837558Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837597Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837655Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037973;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837685Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837743Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037945;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837778Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037963;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837868Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837906Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.837960Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.838007Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.838049Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.838093Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.838185Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037997;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.838281Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.838825Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.838836Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037965;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.838944Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839019Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839034Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037985;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839113Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839129Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839216Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839314Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037975;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839413Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839420Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839522Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839524Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839615Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839617Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839717Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839846Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.839939Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037967;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.840033Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037955;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.840123Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.840213Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037958;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.840214Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037960;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.840305Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037990;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.840307Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037989;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.840525Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.840638Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037961;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.840783Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037995;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:46.842470Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-plan] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 19379, MsgBus: 64818 2025-04-07T14:22:06.543790Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576406609483283:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:22:06.543846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010b1/r3tmp/tmpD6rJe7/pdisk_1.dat 2025-04-07T14:22:07.253876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:22:07.253981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:22:07.255557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:22:07.310083Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19379, node 1 2025-04-07T14:22:07.559418Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:22:07.559440Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:22:07.559447Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:22:07.560006Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64818 TClient is connected to server localhost:64818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:22:08.297311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:22:10.774794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576423789352949:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:10.774869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576423789352970:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:10.775017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:22:10.781738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:22:10.791758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576423789352987:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:22:11.256516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:22:11.372983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576428084320495:2323];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:22:11.373162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576428084320495:2323];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:22:11.373479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576428084320495:2323];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:22:11.373479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490576428084320493:2321];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:22:11.373507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490576428084320493:2321];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:22:11.373713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576428084320495:2323];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:22:11.373781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490576428084320493:2321];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:22:11.373839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576428084320495:2323];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:22:11.373872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490576428084320493:2321];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:22:11.373940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576428084320495:2323];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:22:11.373969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490576428084320493:2321];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:22:11.374062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576428084320495:2323];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:22:11.374071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490576428084320493:2321];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:22:11.374181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576428084320495:2323];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:22:11.374367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490576428084320493:2321];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:22:11.374468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490576428084320493:2321];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:22:11.374558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490576428084320493:2321];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:22:11.374567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576428084320495:2323];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:22:11.374674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490576428084320493:2321];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:22:11.374851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576428084320495:2323];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:22:11.374939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490576428084320493:2321];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:22:11.374966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576428084320495:2323];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:22:11.375064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7490576428084320495:2323];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:22:11.375072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7490576428084320493:2321];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:22:11.409436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576428084320504:2325];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:22:11.409494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576428084320504:2325];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:22:11.409666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576428084320504:2325];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:22:11.409781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576428084320504:2325];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstr ... columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]};tx_id=281474976710665; 2025-04-07T14:22:47.873984Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7490576540969551008:2375];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteBlobsResult;tablet_id=72075186224037993;event=TEvWriteBlobsResult;tablet_id=72075186224037993;local_tx_no=11;tx_info=TTxWrite;tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.874202Z node 2 :TX_COLUMNSHARD_SCAN WARN: Scan [2:7490576583919232224:3670] got AbortExecution txId: 281474976710665 scanId: 1 gen: 1 tablet: 72075186224037993 code: ABORTED reason: {
: Error: task finished: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } 2025-04-07T14:22:47.874828Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490576583919232219:3667], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [0:0:0]Got BAD REQUEST for table `[OwnerId: 72057594046644480, LocalPathId: 7]`. ShardID=72075186224037993, Sink=[2:7490576583919232219:3667].{
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } 2025-04-07T14:22:47.874936Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490576583919232216:3667], TxId: 281474976710665, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NmUzZjhjZGEtNTI0ZTk5MjktM2RjMzM0YjEtMzNlMTA0ZQ==. TraceId : 01jr8a3rp9007vpzgjjrnk141n. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Sink[0] fatal error: {
: Fatal: Bad request. Table `/Root/KV`. {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } subissue: {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } } 2025-04-07T14:22:47.875039Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490576583919232216:3667], TxId: 281474976710665, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NmUzZjhjZGEtNTI0ZTk5MjktM2RjMzM0YjEtMzNlMTA0ZQ==. TraceId : 01jr8a3rp9007vpzgjjrnk141n. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: BAD_REQUEST DEFAULT_ERROR: {
: Fatal: Bad request. Table `/Root/KV`. {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } subissue: {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } }. 2025-04-07T14:22:47.876256Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmUzZjhjZGEtNTI0ZTk5MjktM2RjMzM0YjEtMzNlMTA0ZQ==, ActorId: [2:7490576579624264858:3645], ActorState: ExecuteState, TraceId: 01jr8a3rp9007vpzgjjrnk141n, Create QueryResponse for error on request, msg: 2025-04-07T14:22:47.876861Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7490576562444391956:3011];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038069;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.877167Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7490576562444392018:3024];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038085;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.877614Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7490576562444392018:3024];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038085;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.877783Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7490576562444391956:3011];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.885140Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7490576562444392168:3040];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038072;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.885828Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7490576562444392168:3040];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038072;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.889308Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[2:7490576540969551776:2439];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037947;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled;
: Fatal: Bad request. Table `/Root/KV`. {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} }
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} 2025-04-07T14:22:47.889613Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[2:7490576540969551776:2439];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037947;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.897117Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7490576562444392013:3020];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.897664Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7490576562444392013:3020];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.913026Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[2:7490576562444391848:2955];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038084;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.913916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[2:7490576562444391848:2955];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038084;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.923920Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7490576562444391814:2947];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038095;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.924265Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7490576562444391948:3006];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038050;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.924543Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7490576562444391814:2947];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.924798Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7490576562444391948:3006];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.941475Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7490576562444392142:3039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.941842Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7490576562444391961:3012];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.942046Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7490576562444391835:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038066;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.942241Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7490576562444391909:2982];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.942436Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7490576540969551066:2388];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.942871Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7490576562444391909:2982];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.943070Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7490576540969551066:2388];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037899;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.943195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7490576562444392142:3039];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.943320Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7490576562444391961:3012];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.943433Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7490576562444391835:2950];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.962175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[2:7490576540969551759:2430];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037940;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.962497Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[2:7490576540969551790:2444];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037911;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.962918Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[2:7490576540969551759:2430];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037940;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.963080Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[2:7490576540969551790:2444];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037911;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:22:47.964818Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmUzZjhjZGEtNTI0ZTk5MjktM2RjMzM0YjEtMzNlMTA0ZQ==, ActorId: [2:7490576579624264858:3645], ActorState: ExecuteState, TraceId: 01jr8a3sjp0nkcgqy388wy65dg, Create QueryResponse for error on request, msg:
: Error: Transaction not found: 01jr8a3rnz3y5zmq1shseqmttg, code: 2015 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/u6pr/00115d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk9/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit.txt 2025-04-07T14:22:36.394715Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:36.394670Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-07T14:22:36.130868Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:36.717677Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:36.717643Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-04-07T14:22:36.602165Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/u6pr/00114d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk2/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_bad_dynconfig/audit.txt 2025-04-07T14:22:36.919506Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"ERROR","subject":"root@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} 2025-04-07T14:22:37.109679Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/u6pr/001131/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit.txt 2025-04-07T14:22:41.037567Z: {"tx_id":"01jr8a3jw9114x9z56qrdvfa0t","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:41.037508Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-04-07T14:22:41.033253Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2025-04-07T14:22:41.246989Z: {"tx_id":"01jr8a3jw9114x9z56qrdvfa0t","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:41.246952Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-07T14:22:41.046335Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:41.260471Z: {"tx_id":"01jr8a3jw9114x9z56qrdvfa0t","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:41.260430Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-04-07T14:22:41.255017Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] >> TSchemeShardMoveTest::MoveMigratedTable >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> TSchemeShardMoveTest::TwoTables >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage_key.script-script] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-result_sets] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> TSchemeShardMoveTest::ResetCachedPath >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-plan] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/u6pr/0010e1/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk5/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_sid_is_expected/audit.txt 2025-04-07T14:22:48.915700Z: {"paths":"[/Root/test_auditlog.py]","tx_id":"281474976720671","acl_add":"[+U:other-user@builtin]","database":"{none}","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusSuccess","operation":"MODIFY ACL","component":"schemeshard"} 2025-04-07T14:22:49.319034Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:49.318986Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-04-07T14:22:48.945838Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:49.718368Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:49.718335Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-04-07T14:22:49.430191Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:49.964051Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:49.964024Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-04-07T14:22:49.828050Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:50.355012Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:50.354982Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-07T14:22:50.074558Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:50.585063Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:50.585022Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-04-07T14:22:50.464881Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:50.834285Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:50.834238Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-04-07T14:22:50.693132Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex >> TSchemeShardMoveTest::TwoTables [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-result_sets] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:23:01.838978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:23:01.839085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:01.839131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:23:01.839165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:23:01.839206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:23:01.839234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:23:01.839299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:01.839616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:23:01.933787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:23:01.933867Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:01.949629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:23:01.949772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:23:01.949942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:23:01.959838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:23:01.960047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:23:01.960805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:01.961043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:23:01.963258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:01.964434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:01.964490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:01.964629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:23:01.964696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:01.964739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:23:01.964893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:23:01.972427Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:23:02.156474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:02.156721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:02.156954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:23:02.157245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:23:02.157303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:02.165631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:02.165797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:23:02.166026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:02.166095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:23:02.166162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:23:02.166213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:23:02.173625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:02.173713Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:23:02.173750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:23:02.177899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:02.177961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:02.178005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:02.178061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:23:02.181379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:23:02.189494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:23:02.189725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:23:02.190720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:02.190841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:02.190885Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:02.191129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:23:02.191180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:02.191331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:02.191417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:23:02.193618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:02.193657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:02.193824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:02.193860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:23:02.194128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:02.194169Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:23:02.194258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:23:02.194291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:02.194351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:23:02.194388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:02.194418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:23:02.194445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:23:02.194509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:23:02.194560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:23:02.194589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:23:02.196311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:02.196430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:02.196466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:23:02.196519Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:23:02.196572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:02.196751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-07T14:23:02.904526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:23:02.904559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-07T14:23:02.904572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-07T14:23:02.904601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-07T14:23:02.904613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:23:02.904980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:23:02.905027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:23:02.905082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-07T14:23:02.905117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:23:02.905192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-07T14:23:02.909088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-07T14:23:02.909151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:488:2452] 2025-04-07T14:23:02.909223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-07T14:23:02.909745Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:23:02.909938Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 224us result status StatusPathDoesNotExist 2025-04-07T14:23:02.910095Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:23:02.910498Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:23:02.910707Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove1" took 167us result status StatusSuccess 2025-04-07T14:23:02.911055Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove1" PathDescription { Self { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "TableMove1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:02.911790Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:23:02.911944Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table2" took 180us result status StatusPathDoesNotExist 2025-04-07T14:23:02.912057Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-07T14:23:02.912449Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:23:02.912616Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove2" took 169us result status StatusSuccess 2025-04-07T14:23:02.912967Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove2" PathDescription { Self { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "TableMove2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:02.913620Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:23:02.913814Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 155us result status StatusSuccess 2025-04-07T14:23:02.914144Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] >> TSchemeShardMoveTest::ResetCachedPath [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-result_sets] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:23:01.348078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:23:01.348144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:01.348170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:23:01.348195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:23:01.348226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:23:01.348246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:23:01.348284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:01.348639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:23:01.415864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:23:01.415925Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:01.432569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:23:01.432754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:23:01.432923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:23:01.444452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:23:01.444688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:23:01.445155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:01.445348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:23:01.447564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:01.452362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:01.452426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:01.452513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:23:01.452546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:01.452573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:23:01.452698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:23:01.458477Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:23:01.576171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:01.576431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:01.576939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:23:01.577264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:23:01.577319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:01.582290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:01.582448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:23:01.582669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:01.582749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:23:01.582794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:23:01.582841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:23:01.589381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:01.589470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:23:01.589506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:23:01.594273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:01.594351Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:01.594399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:01.594483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:23:01.606213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:23:01.610837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:23:01.611052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:23:01.612171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:01.612304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:01.612350Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:01.612594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:23:01.612657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:01.612850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:01.612940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:23:01.618431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:01.618488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:01.618735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:01.618804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:23:01.619046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:01.619098Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:23:01.619212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:23:01.619253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:01.619291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:23:01.619336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:01.619373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:23:01.619416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:23:01.619491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:23:01.619527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:23:01.619557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:23:01.625302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:01.625431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:01.625478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:23:01.625520Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:23:01.625559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:01.625683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 997 } } 2025-04-07T14:23:04.258566Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 650 } } 2025-04-07T14:23:04.258597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-07T14:23:04.258683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 650 } } 2025-04-07T14:23:04.258743Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 650 } } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:23:04.259300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 314 RawX2: 8589936891 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:23:04.259340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 2 2025-04-07T14:23:04.259441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:2, at schemeshard: 72057594046678944, message: Source { RawX1: 314 RawX2: 8589936891 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:23:04.259485Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:23:04.259556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 314 RawX2: 8589936891 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:23:04.259610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:04.259644Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-07T14:23:04.259676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-07T14:23:04.259721Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:2 129 -> 240 2025-04-07T14:23:04.260429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 8589936893 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:23:04.260460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-07T14:23:04.260557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 8589936893 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:23:04.260591Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:23:04.260640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 8589936893 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-07T14:23:04.260997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:04.261031Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:23:04.261056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-07T14:23:04.261084Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-07T14:23:04.262923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-07T14:23:04.263205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:23:04.264135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-07T14:23:04.264245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-07T14:23:04.264285Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:23:04.264328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-07T14:23:04.264438Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 2/3 2025-04-07T14:23:04.264469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-04-07T14:23:04.264507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2025-04-07T14:23:04.264879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:23:04.268962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:23:04.269015Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:23:04.269049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-07T14:23:04.269118Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 3/3 2025-04-07T14:23:04.269145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-04-07T14:23:04.269183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2025-04-07T14:23:04.269246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:378:2343] message: TxId: 102 2025-04-07T14:23:04.269315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-04-07T14:23:04.269355Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:23:04.269387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:23:04.269493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-07T14:23:04.269526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:23:04.269558Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-07T14:23:04.269575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-07T14:23:04.269597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-07T14:23:04.269617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:23:04.269635Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-04-07T14:23:04.269652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-04-07T14:23:04.269684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-07T14:23:04.269703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-07T14:23:04.270042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-07T14:23:04.270092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-07T14:23:04.270147Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:23:04.270183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:23:04.270209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:23:04.270232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:23:04.270259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:23:04.275699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:23:04.275761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:463:2427] 2025-04-07T14:23:04.275869Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-result_sets] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/u6pr/0010f0/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk8/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.replace/audit.txt 2025-04-07T14:22:44.315756Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:44.315688Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-04-07T14:22:44.118091Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-07T14:22:44.627623Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-07T14:22:44.627589Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-04-07T14:22:44.436643Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:23:03.348696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:23:03.348800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:03.348841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:23:03.348876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:23:03.348924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:23:03.348953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:23:03.349010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:03.349372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:23:03.450327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:23:03.450390Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:03.465666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:23:03.465831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:23:03.466012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:23:03.472127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:23:03.472461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:23:03.473112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:03.473321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:23:03.475430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:03.476680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:03.476740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:03.476860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:23:03.476923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:03.476981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:23:03.477144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:23:03.484433Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:23:03.619900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:03.620150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:03.620388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:23:03.620815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:23:03.620919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:03.624686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:03.624855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:23:03.625077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:03.625140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:23:03.625174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:23:03.625227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:23:03.627487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:03.627571Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:23:03.627610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:23:03.631034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:03.631095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:03.631144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:03.631229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:23:03.635044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:23:03.637637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:23:03.637866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:23:03.638890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:03.639052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:03.639099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:03.639383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:23:03.639465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:03.639635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:03.639729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:23:03.642867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:03.642916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:03.643120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:03.643163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:23:03.643408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:03.643456Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:23:03.643552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:23:03.643582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:03.643654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:23:03.643701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:03.643737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:23:03.643767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:23:03.643870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:23:03.643920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:23:03.643953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:23:03.645957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:03.646068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:03.646109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:23:03.646153Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:23:03.646218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:03.646368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... MaxStep: 18446744073709551615 PrepareArriveTime: 145500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 258 } } 2025-04-07T14:23:04.664009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-04-07T14:23:04.664170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 145500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 258 } } 2025-04-07T14:23:04.664305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#105:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 145500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 258 } } 2025-04-07T14:23:04.664356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-07T14:23:04.664459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 105:0, left await: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:04.664505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 3 -> 128 2025-04-07T14:23:04.667595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:04.667753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:04.667820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#105:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:23:04.667906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-04-07T14:23:04.668078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:23:04.673509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2025-04-07T14:23:04.673677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000004 2025-04-07T14:23:04.674158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:04.674310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:04.674365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-04-07T14:23:04.674671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 129 2025-04-07T14:23:04.674813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-07T14:23:04.697520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:04.697617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:23:04.698278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:04.698343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-07T14:23:04.698968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:04.699033Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 105 2025-04-07T14:23:04.699972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T14:23:04.700077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T14:23:04.700113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-07T14:23:04.700154Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 8 2025-04-07T14:23:04.700193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:23:04.700279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-07T14:23:04.713050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1364 } } 2025-04-07T14:23:04.713121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-04-07T14:23:04.713276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1364 } } 2025-04-07T14:23:04.713387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1364 } } 2025-04-07T14:23:04.714421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 653 RawX2: 4294969892 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-04-07T14:23:04.714479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-04-07T14:23:04.714602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 653 RawX2: 4294969892 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-04-07T14:23:04.714653Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-07T14:23:04.714734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 653 RawX2: 4294969892 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-04-07T14:23:04.714789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:04.714828Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:04.714864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-07T14:23:04.714920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2025-04-07T14:23:04.715495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T14:23:04.719763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:04.720440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:04.720575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:04.720632Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-07T14:23:04.720753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-07T14:23:04.720793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T14:23:04.720844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-04-07T14:23:04.720930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2311] message: TxId: 105 2025-04-07T14:23:04.720989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T14:23:04.721037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-07T14:23:04.721069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-07T14:23:04.721218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-07T14:23:04.723969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T14:23:04.724027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:810:2735] TestWaitNotification: OK eventTxId 105 |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] |98.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |98.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::ColdTiers >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> KqpSinkTx::InvalidateOnError >> KqpSinkTx::SnapshotROInteractive1 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/u6pr/001007/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_good_dynconfig/audit.txt 2025-04-07T14:22:52.139795Z: {"sanitized_token":"{none}","subject":"{none}","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} 2025-04-07T14:22:52.352479Z: {"old_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 1\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} 2025-04-07T14:22:52.498085Z: {"old_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 1\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"SUCCESS","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] |98.2%| [TA] $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |98.2%| [TA] {RESULT} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> TestProgram::Like >> TestProgram::Like [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:23:07.839749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:23:07.839911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:07.839952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:23:07.839983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:23:07.840036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:23:07.840068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:23:07.840125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:07.840448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:23:07.971901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:23:07.971967Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:08.006431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:23:08.006584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:23:08.006772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:23:08.018319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:23:08.018584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:23:08.019208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:08.019478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:23:08.026070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:08.027562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:08.027639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:08.027792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:23:08.027854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:08.027904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:23:08.028089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:23:08.045666Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:23:08.255120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:08.255376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:08.255616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:23:08.255905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:23:08.255974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:08.269211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:08.269360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:23:08.269554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:08.269605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:23:08.269637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:23:08.269672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:23:08.271905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:08.271989Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:23:08.272025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:23:08.274060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:08.274111Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:08.274152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:08.274239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:23:08.278093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:23:08.280151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:23:08.280354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:23:08.281353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:08.281491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:08.281541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:08.281791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:23:08.281857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:08.282025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:08.282107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:23:08.284977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:08.285041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:08.285228Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:08.285264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:23:08.285468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:08.285508Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:23:08.285597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:23:08.285633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:08.285693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:23:08.285730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:08.285764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:23:08.285790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:23:08.285857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:23:08.285905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:23:08.285938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:23:08.287702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:08.287822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:08.287856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:23:08.287889Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:23:08.287931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:08.288023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... tatus StatusSuccess 2025-04-07T14:23:09.159911Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Inactive ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active ParentPartitionIds: 1 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:09.160969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:766:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:769:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:770:2058] recipient: [1:768:2680] Leader for TabletID 72057594046678944 is [1:771:2681] sender: [1:772:2058] recipient: [1:768:2680] 2025-04-07T14:23:09.276022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:23:09.276126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:09.276181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:23:09.276220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:23:09.276268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:23:09.276304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:23:09.276375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:09.276739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:23:09.331953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:23:09.341819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:23:09.342038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:23:09.342291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:23:09.342337Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:09.342688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:23:09.343381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-07T14:23:09.343471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:23:09.343511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:23:09.343573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.343634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.343850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:23:09.344121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.344320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.344427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.344548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-04-07T14:23:09.344591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:23:09.344619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:23:09.344637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-07T14:23:09.344654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:23:09.352263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.352397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.352633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-04-07T14:23:09.352841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:23:09.353241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.353360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.353724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.353825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.354121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.354214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.354305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.354468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.354542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.354768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.354969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.355095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.355158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.355207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:09.383818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:09.383921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:09.389079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:23:09.389174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:09.389248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:23:09.390931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 >> KqpSinkTx::OlapDeferredEffects >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "suffix" } Constant { Bytes: "001" } } } Command { Assign { Column { Name: "prefix" } Constant { Bytes: "uid" } } } Command { Assign { Column { Name: "start_with" } Function { Id: 33 Arguments { Name: "string" } Arguments { Name: "prefix" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Name: "ends_with" } Function { Id: 34 Arguments { Name: "string" } Arguments { Name: "suffix" } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Name: "start_with_bool" } Function { Id: 18 Arguments { Name: "start_with" } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Name: "ends_with_bool" } Function { Id: 18 Arguments { Name: "ends_with" } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Name: "result" } Function { Id: 11 Arguments { Name: "start_with_bool" } Arguments { Name: "ends_with_bool" } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Name: "result" } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=001;column=G:suffix;};{op=Constant;const=uid;column=G:prefix;};{arguments=[G:string;G:prefix;];kernel=local_function;column=G:start_with;};{arguments=[G:string;G:suffix;];kernel=local_function;column=G:ends_with;};{op=CastBoolean;arguments=[G:start_with;];column=G:start_with_bool;};{op=CastBoolean;arguments=[G:ends_with;];column=G:ends_with_bool;};{op=And;arguments=[G:start_with_bool;G:ends_with_bool;];column=G:result;};];projections=[G:result;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow11BooleanTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow11BooleanTypeE; >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> KqpTx::RollbackTx >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-plan] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage_key.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-plan] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-result_sets] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-plan] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] >> KqpTx::LocksAbortOnCommit >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-plan] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> KqpTx::RollbackTx [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-result_sets] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackTx [GOOD] Test command err: Trying to start YDB, gRPC: 63972, MsgBus: 21925 2025-04-07T14:23:11.993458Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576688850106373:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:11.993709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00109b/r3tmp/tmpSPpew5/pdisk_1.dat 2025-04-07T14:23:12.279109Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63972, node 1 2025-04-07T14:23:12.331508Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:12.331537Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:12.331543Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:12.331635Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:23:12.345867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:12.345966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:12.346921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21925 TClient is connected to server localhost:21925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:12.870504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:12.894866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:13.020764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:13.153198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:13.209596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:14.788486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576701735009819:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:14.788625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:15.258262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:15.332212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:15.365172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:15.396636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:15.423252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:15.454219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:15.516641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576706029977617:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:15.516736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:15.516908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576706029977622:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:15.520291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:15.534024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576706029977624:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:23:16.994002Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576688850106373:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:16.994498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:17.004818Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTNkOTAzOGUtZmI0MGQ2MTQtNWU2ZTI1N2QtODdlZTIxNDk=, ActorId: [1:7490576710324945235:2460], ActorState: ReadyState, TraceId: 01jr8a4p01esd7jzg49007kcg7, Create QueryResponse for error on request, msg: |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite >> KqpTx::CommitRequired >> TColumnShardTestSchema::RebootColdTiers >> KqpSinkTx::SnapshotROInteractive1 [GOOD] >> KqpSinkTx::SnapshotROInteractive2 >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-result_sets] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-result_sets] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-result_sets] >> KqpSinkTx::InvalidateOnError [GOOD] >> KqpSinkTx::Interactive >> KqpTx::LocksAbortOnCommit [GOOD] >> KqpTx::InvalidateOnError >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/ct.script-script] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-result_sets] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-result_sets] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> KqpSinkLocks::TInvalidate >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[explain.script-script] >> KqpSinkLocks::InvalidateOlapOnCommit >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink >> KqpTx::CommitRequired [GOOD] >> KqpTx::CommitRoTx >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-plan] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] >> KqpTx::InvalidateOnError [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-plan] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 1972, MsgBus: 24863 2025-04-07T14:23:15.154327Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576706029456479:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:15.154451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001095/r3tmp/tmpPhzWjv/pdisk_1.dat 2025-04-07T14:23:15.531564Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:15.572754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:15.572871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:15.581870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1972, node 1 2025-04-07T14:23:15.730889Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:15.730911Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:15.730918Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:15.731015Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24863 TClient is connected to server localhost:24863 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:16.287617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:16.310902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:16.462089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:16.680695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:16.776975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:18.469912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576718914360052:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:18.470061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:18.711063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:18.782069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:18.851688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:18.943656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:19.030856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:19.111100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:19.210558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576723209327863:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:19.210636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:19.210929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576723209327868:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:19.215200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:19.231248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576723209327870:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:23:20.203968Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576706029456479:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:20.204576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:21.302680Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGQ0ZThlNmYtNmUxZDk2M2QtMmQ3MWU1M2MtYTYxZDliZjY=, ActorId: [1:7490576727504295476:2461], ActorState: ExecuteState, TraceId: 01jr8a4t0mdmtr2ek5m55f4nnp, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-07T14:23:21.320192Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGQ0ZThlNmYtNmUxZDk2M2QtMmQ3MWU1M2MtYTYxZDliZjY=, ActorId: [1:7490576727504295476:2461], ActorState: ReadyState, TraceId: 01jr8a4t773ax4y8qbajchf5v3, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 30907, MsgBus: 13194 2025-04-07T14:23:22.339307Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576734280870894:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:22.339353Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001095/r3tmp/tmpatvGgv/pdisk_1.dat 2025-04-07T14:23:22.492746Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:22.492860Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:22.497097Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:22.499074Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30907, node 2 2025-04-07T14:23:22.615552Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:22.615591Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:22.615598Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:22.615716Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13194 TClient is connected to server localhost:13194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:23.120140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:23.151665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:23.209139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:23.403017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:23:23.487564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:25.852878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576747165774464:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:25.852999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:25.882128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:25.922810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:25.986924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:26.041291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:26.084239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:26.123928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:26.219612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576751460742256:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:26.219702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:26.220027Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576751460742261:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:26.223017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:26.233151Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576751460742263:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:23:27.341141Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576734280870894:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:27.341270Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:27.501225Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490576755755709930:2473], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NGY0MzlhNjQtYmUyOTA1NzItZjY4MGVhZTctZWY4NTdjOWU=. TraceId : 01jr8a50116s0t15p1aztp3kq5. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-07T14:23:27.505266Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490576755755709932:2474], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr8a50116s0t15p1aztp3kq5. SessionId : ydb://session/3?node_id=2&id=NGY0MzlhNjQtYmUyOTA1NzItZjY4MGVhZTctZWY4NTdjOWU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7490576755755709927:2460], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-07T14:23:27.511141Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGY0MzlhNjQtYmUyOTA1NzItZjY4MGVhZTctZWY4NTdjOWU=, ActorId: [2:7490576755755709873:2460], ActorState: ExecuteState, TraceId: 01jr8a50116s0t15p1aztp3kq5, Create QueryResponse for error on request, msg: 2025-04-07T14:23:27.633980Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGY0MzlhNjQtYmUyOTA1NzItZjY4MGVhZTctZWY4NTdjOWU=, ActorId: [2:7490576755755709873:2460], ActorState: ExecuteState, TraceId: 01jr8a509b77g16wzwrj6622gq, Create QueryResponse for error on request, msg: >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[simple/ct.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-plan] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-plan] >> KqpTx::CommitRoTx_TLI >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-plan] >> KqpSinkTx::OlapDeferredEffects [GOOD] >> KqpSinkTx::LocksAbortOnCommit >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-result_sets] [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration+withSink >> test_sql.py::TestCanonicalFolder1::test_case[simple/script_params.script-script] >> KqpTx::CommitRoTx [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitRoTx [GOOD] Test command err: Trying to start YDB, gRPC: 5991, MsgBus: 63824 2025-04-07T14:23:21.257366Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576730654520404:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:21.257516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001089/r3tmp/tmpr2jCuR/pdisk_1.dat 2025-04-07T14:23:21.739543Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:21.740491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:21.740605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:21.775100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5991, node 1 2025-04-07T14:23:21.866769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:21.866790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:21.866797Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:21.866914Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63824 TClient is connected to server localhost:63824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:22.553347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:22.604931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:22.740035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:22.942663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:23.010881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:24.922502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576743539423838:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:24.922686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:25.187388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:25.224677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:25.275253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:25.349373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:25.398095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:25.459128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:25.557853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576747834391637:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:25.557950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:25.558278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576747834391642:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:25.562946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:25.578765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576747834391644:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:23:26.256794Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576730654520404:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:26.256874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61776, MsgBus: 1652 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001089/r3tmp/tmpL6tiVG/pdisk_1.dat 2025-04-07T14:23:27.592920Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:23:27.602410Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:27.614198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:27.614277Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:27.616073Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61776, node 2 2025-04-07T14:23:27.672276Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:27.672297Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:27.672306Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:27.672406Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1652 TClient is connected to server localhost:1652 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:28.104349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:28.115040Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:23:28.131393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:28.219331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:28.359838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:28.486972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:30.645459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576768418631652:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:30.645555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:30.689863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:30.718624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:30.757056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:30.791582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:30.837235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:30.866636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:30.953872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576768418632150:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:30.953941Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:30.954155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576768418632155:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:30.957285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:30.965649Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576768418632157:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OltpNamedStatementNoSink >> KqpSinkTx::Interactive [GOOD] >> TestProgram::YqlKernelEquals >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-result_sets] >> TestProgram::YqlKernelEquals [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> KqpSinkTx::DeferredEffects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Function { Arguments { Name: "lhs" } Arguments { Name: "rhs" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{arguments=[G:lhs;G:rhs;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9FloatTypeE; lhs: [ -2, -1, 0, 1, 2 ] rhs: [ -2, -1.1, 0, 2, 2 ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-result_sets] >> KqpSinkTx::SnapshotROInteractive2 [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-result_sets] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-result_sets] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 32357, MsgBus: 5655 2025-04-07T14:23:09.020779Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576676973894190:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:09.021147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010a6/r3tmp/tmpICT7N0/pdisk_1.dat 2025-04-07T14:23:09.535271Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:09.568162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:09.568255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:09.577176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32357, node 1 2025-04-07T14:23:09.805552Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:09.805578Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:09.805589Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:09.805684Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5655 TClient is connected to server localhost:5655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:10.535980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:10.549690Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:23:12.836178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576689858796562:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:12.836185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576689858796583:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:12.836290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:12.839437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:12.850570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576689858796585:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:23:13.542641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:13.688353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:14.338218Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576676973894190:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:14.362174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:14.587919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:15.997032Z node 1 :GLOBAL ERROR: fline=events.h:99;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Operation is aborting because an duplicate key;tx_id=3; 2025-04-07T14:23:15.997239Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037889 errors: Status: STATUS_BAD_REQUEST Issues: { message: "Operation is aborting because an duplicate key" } 2025-04-07T14:23:15.997411Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037889 Status: STATUS_BAD_REQUEST Issues: { message: "Operation is aborting because an duplicate key" } 2025-04-07T14:23:15.997640Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490576702743706949:2941], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [1:7490576702743706683:2941]Got BAD REQUEST for table `[OwnerId: 72057594046644480, LocalPathId: 7]`. ShardID=72075186224037889, Sink=[1:7490576702743706949:2941].{
: Fatal: Operation is aborting because an duplicate key } 2025-04-07T14:23:15.998118Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490576702743706940:2941], SessionActorId: [1:7490576702743706683:2941], Bad request. Table `/Root/KV`. {
: Fatal: Operation is aborting because an duplicate key }. statusCode=BAD_REQUEST. subIssues=
: Fatal: Operation is aborting because an duplicate key . sessionActorId=[1:7490576702743706683:2941]. isRollback=0 2025-04-07T14:23:15.998224Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjA1YzM3ZDYtZTFkMWRhYjQtY2FlNmUxMzQtNjBlOWI1NGY=, ActorId: [1:7490576702743706683:2941], ActorState: ExecuteState, TraceId: 01jr8a4mmw4mgmhbr5gyngf86d, got TEvKqpBuffer::TEvError in ExecuteState, status: BAD_REQUEST send to: [1:7490576702743706941:2941] from: [1:7490576702743706940:2941] 2025-04-07T14:23:15.998393Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490576702743706941:2941] TxId: 281474976710664. Ctx: { TraceId: 01jr8a4mmw4mgmhbr5gyngf86d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA1YzM3ZDYtZTFkMWRhYjQtY2FlNmUxMzQtNjBlOWI1NGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: {
: Error: Bad request. Table `/Root/KV`. {
: Fatal: Operation is aborting because an duplicate key };
: Fatal: Operation is aborting because an duplicate key } 2025-04-07T14:23:16.004246Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjA1YzM3ZDYtZTFkMWRhYjQtY2FlNmUxMzQtNjBlOWI1NGY=, ActorId: [1:7490576702743706683:2941], ActorState: ExecuteState, TraceId: 01jr8a4mmw4mgmhbr5gyngf86d, Create QueryResponse for error on request, msg:
: Error: Bad request. Table `/Root/KV`. {
: Fatal: Operation is aborting because an duplicate key };
: Fatal: Operation is aborting because an duplicate key 2025-04-07T14:23:16.068424Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjA1YzM3ZDYtZTFkMWRhYjQtY2FlNmUxMzQtNjBlOWI1NGY=, ActorId: [1:7490576702743706683:2941], ActorState: ExecuteState, TraceId: 01jr8a4n1n9s3hw1hj94wqeejp, Create QueryResponse for error on request, msg:
: Error: Transaction not found: 01jr8a4mmq5dsvvmjyfcte311q, code: 2015 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 7514, MsgBus: 20195 2025-04-07T14:23:22.132447Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576734387550080:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:22.132495Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010a6/r3tmp/tmpsnp0Gr/pdisk_1.dat 2025-04-07T14:23:22.314968Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:22.342046Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:22.342125Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:22.344096Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7514, node 2 2025-04-07T14:23:22.464596Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:22.464617Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:22.464623Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:22.464719Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20195 TClient is connected to server localhost:20195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:23:22.936736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:23:22.945483Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:23:25.452386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576747272452584:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:25.452495Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:25.454934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576747272452611:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:25.458518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:25.472129Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576747272452613:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:23:25.581854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:25.625356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:27.010653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:27.132483Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576734387550080:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:27.132548Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-result_sets] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSinkLocks::UncommittedRead >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |98.3%| [TA] $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |98.3%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-result_sets] >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:23:34.934364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:23:34.934490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:34.934539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:23:34.934576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:23:34.934627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:23:34.934654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:23:34.934714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:34.935047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:23:35.004037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:23:35.004114Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:35.018604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:23:35.018756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:23:35.018912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:23:35.031047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:23:35.031280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:23:35.031972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:35.032188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:23:35.034197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:35.035430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:35.035493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:35.035628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:23:35.035676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:35.035716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:23:35.035885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:23:35.042229Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:23:35.174573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:35.174827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:35.175053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:23:35.175305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:23:35.175370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:35.178010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:35.178140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:23:35.178309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:35.178364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:23:35.178412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:23:35.178461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:23:35.180677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:35.180732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:23:35.180777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:23:35.182967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:35.183037Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:35.183107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:35.183206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:23:35.187117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:23:35.189625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:23:35.189841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:23:35.191132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:35.191294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:35.191376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:35.191671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:23:35.191737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:35.191950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:35.192044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:23:35.198181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:35.198251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:35.198442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:35.198491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:23:35.198749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:35.198803Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:23:35.198909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:23:35.198946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:35.199008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:23:35.199082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:35.199150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:23:35.199188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:23:35.199279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:23:35.199326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:23:35.199362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:23:35.201738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:35.201863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:35.201904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:23:35.201947Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:23:35.202001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:35.202138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... es { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:36.285462Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [2:121:2147] sender: [2:638:2058] recipient: [2:100:2135] Leader for TabletID 72057594046678944 is [2:121:2147] sender: [2:641:2058] recipient: [2:15:2062] Leader for TabletID 72057594046678944 is [2:121:2147] sender: [2:642:2058] recipient: [2:640:2563] Leader for TabletID 72057594046678944 is [2:643:2564] sender: [2:644:2058] recipient: [2:640:2563] 2025-04-07T14:23:36.339565Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:23:36.339677Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:36.339725Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:23:36.339779Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:23:36.339823Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:23:36.339862Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:23:36.339944Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:36.340298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:23:36.359365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:23:36.360496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:23:36.360696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:23:36.360818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:23:36.360869Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:36.360980Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:23:36.361667Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-07T14:23:36.361758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:23:36.361797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-07T14:23:36.361865Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.361947Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.362155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-07T14:23:36.362365Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.362527Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.362595Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.362683Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-04-07T14:23:36.362730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-07T14:23:36.362764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-07T14:23:36.362786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-07T14:23:36.362808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-07T14:23:36.362901Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.362986Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.363128Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-04-07T14:23:36.363295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-07T14:23:36.363595Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.363704Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.364032Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.364105Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.364282Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.364366Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.364454Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.364627Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.366912Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.367123Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.367282Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.367404Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.367454Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.367496Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:36.377613Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:36.377713Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:36.378075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:23:36.378136Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:36.378185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:23:36.380439Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:643:2564] sender: [2:702:2058] recipient: [2:15:2062] 2025-04-07T14:23:36.434974Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:23:36.435278Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 356us result status StatusSuccess 2025-04-07T14:23:36.435936Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 1 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> KqpTx::CommitRoTx_TLI [GOOD] >> KqpTx::CommitStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 26271, MsgBus: 17212 2025-04-07T14:23:09.192889Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576677960817406:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:09.192937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010ab/r3tmp/tmpOTrgeE/pdisk_1.dat 2025-04-07T14:23:09.697827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:09.697922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:09.706012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:09.730793Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26271, node 1 2025-04-07T14:23:09.908594Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:09.908619Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:09.908632Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:09.908735Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17212 TClient is connected to server localhost:17212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:10.779686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:10.798751Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:23:12.908589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576690845719792:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:12.908698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576690845719780:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:12.908847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:12.912418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:12.921051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576690845719795:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:23:13.266043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:13.386562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:14.155926Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576677960817406:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:14.156230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:14.347826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 25688, MsgBus: 9624 2025-04-07T14:23:21.745394Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576730826479744:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010ab/r3tmp/tmpTIuYmb/pdisk_1.dat 2025-04-07T14:23:21.919809Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:23:21.953993Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:21.995030Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:21.995128Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:22.006249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25688, node 2 2025-04-07T14:23:22.188463Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:22.188480Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:22.188484Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:22.188567Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9624 TClient is connected to server localhost:9624 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:22.979563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:25.861835Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576748006349374:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:25.861953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:25.862346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576748006349410:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:25.866283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:25.881399Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576748006349412:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:23:26.041291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:26.122418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:27.131692Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576730826479744:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:27.154474Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:27.523159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-result_sets] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 20670, MsgBus: 5355 2025-04-07T14:23:17.116333Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576714593808134:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:17.119495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00108f/r3tmp/tmpYyGQbd/pdisk_1.dat 2025-04-07T14:23:17.737123Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:17.739221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:17.739297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:17.747219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20670, node 1 2025-04-07T14:23:17.990850Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:17.990867Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:17.990872Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:17.990954Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5355 TClient is connected to server localhost:5355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:18.927440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:18.945666Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:18.977720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:23:19.145876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:19.303350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:23:19.377709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:21.163571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576731773678883:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:21.163668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:21.393887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:21.426447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:21.475647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:21.544523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:21.583823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:21.638535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:21.709815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576731773679378:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:21.709897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:21.710070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576731773679383:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:21.714077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:21.724277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576731773679385:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:23:22.153155Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576714593808134:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:22.153421Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:22.811920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:23:22.871038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:23:22.918114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29721, MsgBus: 61339 2025-04-07T14:23:27.288985Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576755391579165:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:27.289055Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00108f/r3tmp/tmpVt1HOT/pdisk_1.dat 2025-04-07T14:23:27.483768Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:27.512214Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:27.512724Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:27.515790Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29721, node 2 2025-04-07T14:23:27.698285Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:27.698311Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:27.698317Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:27.698446Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61339 TClient is connected to server localhost:61339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:28.212525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:28.224270Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:28.237015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:23:28.363525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:23:28.562348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:28.664968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:30.946032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576768276482776:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:30.946099Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:30.982058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:31.026333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:31.064408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:31.108908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:31.158850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:31.234927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:31.320948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576772571450574:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:31.321028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:31.321088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576772571450579:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:31.325161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:31.342157Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576772571450581:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:23:32.303651Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576755391579165:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:32.303724Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:32.655144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-07T14:23:32.721993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-07T14:23:32.773234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-result_sets] >> KqpSinkMvcc::OltpNamedStatement >> test_sql.py::TestCanonicalFolder1::test_case[simple/script_params.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[table_types.script-script] >> KqpSinkTx::OlapExplicitTcl >> test_sql.py::TestCanonicalFolder1::test_case[explain.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-plan] >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] >> KqpSinkLocks::TInvalidate [GOOD] >> KqpSinkLocks::TInvalidateOlap >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnShardTestSchema::HotTiersTtlWithStat >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> TColumnShardTestSchema::RebootOneColdTier >> KqpLocks::DifferentKeyUpdate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] Test command err: 2025-04-07T14:15:35.310363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:35.310668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:35.310783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/002029/r3tmp/tmpSmbR1N/pdisk_1.dat 2025-04-07T14:15:35.644375Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5185, node 1 2025-04-07T14:15:35.858784Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:35.858842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:35.858870Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:35.859421Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:35.861481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:35.959650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:35.959773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:35.972943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16753 2025-04-07T14:15:36.562071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:39.133354Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:39.191413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:39.191508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:39.229081Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:39.232068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:39.386973Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:39.431362Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:39.431454Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:39.458482Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:39.458621Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:39.458775Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:39.458819Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:39.458855Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:39.458895Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:39.458931Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:39.458999Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:39.459330Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:39.662078Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:39.662179Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:39.667852Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:39.674490Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:39.675343Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:39.676415Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:15:39.695582Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:39.695639Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:39.695706Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:15:39.697561Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:39.697648Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:39.709651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:39.720210Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:39.720335Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:39.730963Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:39.746594Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:39.786147Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:40.016274Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:40.171860Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:40.922688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:40.922803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:40.935904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:15:41.050953Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2823];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:15:41.051127Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2823];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:15:41.051374Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2823];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:15:41.051478Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2823];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:15:41.051585Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2823];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:15:41.051686Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2823];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:15:41.051747Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2823];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:15:41.051834Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2823];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:15:41.051920Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2823];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:15:41.051982Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2823];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:15:41.052056Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2823];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:15:41.052175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2823];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:15:41.073194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2247:2824];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:15:41.073264Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2247:2824];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:15:41.073458Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2247:2824];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:15:41.073526Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2247:2824];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:15:41.073587Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2247:2824];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:15:41.073675Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2247:2824];tablet_id=72075186224037900;process=TTxI ... NextTraversal. No force traversals. 2025-04-07T14:22:09.567707Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:22:11.213348Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:11.213418Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:13.057436Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:22:13.057686Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:22:14.710875Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:14.710946Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:17.487745Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:22:17.501502Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:17.501580Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:20.483298Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:22:20.483534Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:22:20.494528Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:20.494598Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:22.928848Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:22.928913Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:24.011399Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:22:25.285835Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:25.285909Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:26.824016Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:22:26.824222Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:22:28.313193Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:28.313259Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:31.470370Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:22:31.485533Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:31.485604Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:34.768981Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:22:34.769211Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:22:34.816919Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:34.816992Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:37.707936Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:37.708008Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:39.139956Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:22:40.583665Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:40.583735Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:40.867868Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-07T14:22:40.867945Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:22:40.867980Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:22:40.868014Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-07T14:22:42.569491Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:22:42.569686Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:22:44.405464Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:44.405538Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:47.693591Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:22:47.704569Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:47.704641Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:51.056980Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:22:51.057216Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:22:51.069550Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:51.069620Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:54.270314Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:54.270401Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:55.813770Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:22:57.473303Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:22:57.473376Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:22:59.369345Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:22:59.369569Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:23:01.202408Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:01.202481Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:04.463063Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:23:04.476991Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:04.477062Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:07.529386Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:23:07.529572Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:23:07.552548Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:07.552612Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:10.697456Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:10.697527Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:12.165453Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:23:13.793732Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:13.793808Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:15.506797Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:23:15.507004Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:23:16.934619Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:16.934693Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:19.872538Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:23:19.883452Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:19.883523Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:22.824336Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:23:22.824503Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:23:22.835482Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:22.835554Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:25.717548Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:25.717623Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:27.119427Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:23:28.611316Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:28.611382Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:28.828723Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-07T14:23:28.828802Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:23:28.828835Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:23:28.828867Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-07T14:23:30.330167Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:23:30.330330Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:23:31.888849Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:31.888927Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:31.924157Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2025-04-07T14:23:31.924253Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 189.000000s, at schemeshard: 72075186224037889 2025-04-07T14:23:31.924556Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 53 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2025-04-07T14:23:31.942321Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:23:34.630764Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:23:34.652540Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:34.652629Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:37.494212Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-07T14:23:37.494553Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 ... waiting for TEvPropagateStatistics (done) 2025-04-07T14:23:37.494975Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:15670:9739]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:23:37.498663Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-07T14:23:37.498740Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [2:15670:9739], StatRequests.size() = 1 >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:23:39.788841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:23:39.788958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:39.788997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:23:39.789027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:23:39.789070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:23:39.789095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:23:39.789139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:39.789424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:23:39.850746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:23:39.850802Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:39.861104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:23:39.861235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:23:39.861384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:23:39.867323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:23:39.867519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:23:39.868083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:39.868264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:23:39.870330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:39.871501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:39.871564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:39.871684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:23:39.871721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:39.871772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:23:39.871903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:23:39.877868Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:23:39.976224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:39.976436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:39.976645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:23:39.976887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:23:39.976948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:39.979204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:39.979345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:23:39.979500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:39.979551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:23:39.979585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:23:39.979651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:23:39.985479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:39.985543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:23:39.985574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:23:39.987533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:39.987580Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:39.987634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:39.987736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:23:39.991356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:23:39.993410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:23:39.993629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:23:39.994623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:39.994765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:39.994814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:39.995062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:23:39.995107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:39.995294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:39.995394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:23:39.997632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:39.997673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:39.997856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:39.997897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:23:39.998129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:39.998175Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:23:39.998268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:23:39.998319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:39.998381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:23:39.998422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:39.998456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:23:39.998484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:23:39.998561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:23:39.998604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:23:39.998635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:23:40.000446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:40.000577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:40.000610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:23:40.000642Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:23:40.000773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:40.000885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... EvProposeTransactionResult> complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:41.637176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:41.637218Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:41.637308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-04-07T14:23:41.637447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409546 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 151 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409546 2025-04-07T14:23:41.639015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269550082 2025-04-07T14:23:41.639093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 0:105 msg type: 269090816 2025-04-07T14:23:41.639158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72075186233409546 2025-04-07T14:23:41.642635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-04-07T14:23:41.642795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409548 TxId: 105 Status: OK 2025-04-07T14:23:41.642859Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 105 Status: OK 2025-04-07T14:23:41.642899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-07T14:23:41.642939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-07T14:23:41.645570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-07T14:23:41.645833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-07T14:23:41.645874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-07T14:23:41.646229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-04-07T14:23:41.646270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-07T14:23:41.646307Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-04-07T14:23:41.682260Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:41.682422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 380 RawX2: 8589936941 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:41.682482Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-04-07T14:23:41.682529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-07T14:23:41.741037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-04-07T14:23:41.741217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-07T14:23:41.741306Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-07T14:23:41.741356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:41.741390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-07T14:23:41.741551Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-04-07T14:23:41.741723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:23:41.747876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:41.748259Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:41.748303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:23:41.748590Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:41.748631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-07T14:23:41.748909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:41.748951Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-07T14:23:41.749047Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-07T14:23:41.749082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T14:23:41.749127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-07T14:23:41.749170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T14:23:41.749203Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-07T14:23:41.749233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-07T14:23:41.749364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-07T14:23:41.749406Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-04-07T14:23:41.749436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-07T14:23:41.750165Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T14:23:41.750253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T14:23:41.750288Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-07T14:23:41.750322Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T14:23:41.750357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:23:41.750426Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-04-07T14:23:41.750460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:406:2373] 2025-04-07T14:23:41.753930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-07T14:23:41.754072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T14:23:41.754111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:677:2601] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2025-04-07T14:23:41.770476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:41.770675Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-04-07T14:23:41.771192Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2025-04-07T14:23:41.773494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:41.773653Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-07T14:23:41.774046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-07T14:23:41.774089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-07T14:23:41.774453Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-07T14:23:41.774541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-07T14:23:41.774572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:770:2682] TestWaitNotification: OK eventTxId 106 |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-plan] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> KqpTx::CommitStats [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-plan] >> KqpTx::DeferredEffects >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-plan] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-plan] >> KqpTx::MixEnginesOldNew >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-plan] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitStats [GOOD] Test command err: Trying to start YDB, gRPC: 5198, MsgBus: 10849 2025-04-07T14:23:31.702728Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576773584479419:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:31.709712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001073/r3tmp/tmpXyXpL9/pdisk_1.dat 2025-04-07T14:23:32.225007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:32.225131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:32.227067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:32.287057Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5198, node 1 2025-04-07T14:23:32.367950Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:32.367969Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:32.367978Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:32.368058Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10849 TClient is connected to server localhost:10849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:33.053517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:33.069726Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:23:33.080918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:33.198010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:33.359414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:33.435786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:34.890183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576786469382872:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:34.890347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:35.065625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:35.088556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:35.119605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:35.147618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:35.169827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:35.194736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:35.232404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576790764350661:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:35.232466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:35.232690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576790764350667:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:35.236103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:35.244454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576790764350669:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:23:36.705486Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576773584479419:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:36.705540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8553, MsgBus: 11163 2025-04-07T14:23:37.556714Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576798733060046:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:37.556762Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001073/r3tmp/tmpAGFxDr/pdisk_1.dat 2025-04-07T14:23:37.821774Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:37.868171Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:37.872815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:37.874089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8553, node 2 2025-04-07T14:23:38.006236Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:38.006255Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:38.006284Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:38.006402Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11163 TClient is connected to server localhost:11163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:38.568845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:23:38.582228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:23:38.656994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:38.849252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:38.928165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:40.861570Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576811617963646:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:40.861656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:40.888511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:40.918447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:40.960958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:40.991718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:41.057229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:41.117732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:41.169298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576815912931439:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:41.169368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:41.169524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576815912931444:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:41.172635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:41.185672Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576815912931446:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:23:42.556822Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576798733060046:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:42.556927Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions >> KqpSinkTx::LocksAbortOnCommit [GOOD] >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] >> KqpSinkLocks::EmptyRangeOlap >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-result_sets] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> KqpLocks::DifferentKeyUpdate [GOOD] >> KqpLocks::EmptyRange >> KqpLocks::InvalidateOnCommit |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> test_sql.py::TestCanonicalFolder1::test_case[table_types.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-plan] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::LocksAbortOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 19192, MsgBus: 4452 2025-04-07T14:23:11.069537Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576687507931288:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:11.069595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010a0/r3tmp/tmpp3Kn7c/pdisk_1.dat 2025-04-07T14:23:11.752208Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:11.762944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:11.763062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:11.783661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19192, node 1 2025-04-07T14:23:12.021334Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:12.021353Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:12.021360Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:12.021451Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4452 TClient is connected to server localhost:4452 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:13.135204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:13.149798Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:23:16.072883Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576687507931288:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:16.072978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:16.119754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576708982768399:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:16.119901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:16.120147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576708982768411:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:16.124358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:16.136300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576708982768413:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:23:16.512084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:16.632284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576708982768651:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:16.632466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576708982768651:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:16.632862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576708982768651:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:16.632997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576708982768651:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:23:16.633097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576708982768651:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:23:16.633214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576708982768651:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:23:16.633315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576708982768651:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:23:16.633409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576708982768651:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:23:16.633510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576708982768651:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:23:16.633600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576708982768651:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:23:16.633702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576708982768651:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:23:16.633854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576708982768651:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:23:16.698299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576708982768666:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:16.698368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576708982768666:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:16.698560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576708982768666:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:16.698662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576708982768666:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:23:16.698765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576708982768666:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:23:16.699203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576708982768666:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:23:16.699307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576708982768666:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:23:16.699404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576708982768666:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:23:16.699510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576708982768666:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:23:16.699599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576708982768666:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:23:16.699694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576708982768666:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:23:16.699783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7490576708982768666:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:23:16.703808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576708982768654:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:16.703887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576708982768654:2324];tablet_id=72075186224037888;process= ... 41];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:25.632349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[1:7490576713277737260:2503];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037907;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:25.632627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[1:7490576730457610536:3051];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038014;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:25.632817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7490576726162643159:3037];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:25.632966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7490576713277737180:2489];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037932;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:25.633138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[1:7490576726162642891:2944];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:25.633285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7490576713277737194:2492];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037910;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:25.633415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[1:7490576730457610542:3053];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:25.637712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7490576726162643159:3037];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:25.645035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[1:7490576713277737260:2503];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037907;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:25.645266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[1:7490576730457610536:3051];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:25.646185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7490576713277737180:2489];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037932;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:25.646769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[1:7490576726162642891:2944];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:25.646901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7490576713277737194:2492];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037910;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:25.647048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[1:7490576730457610542:3053];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-07T14:23:26.730363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:23:26.730397Z node 1 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 12822, MsgBus: 21118 2025-04-07T14:23:32.261587Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576778796439239:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:32.261660Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0010a0/r3tmp/tmpvCpDxg/pdisk_1.dat 2025-04-07T14:23:32.451170Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:32.516645Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:32.516751Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:32.518173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12822, node 2 2025-04-07T14:23:32.639928Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:32.639958Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:32.639967Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:32.640073Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21118 TClient is connected to server localhost:21118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:33.193536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:35.548565Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576791681341742:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:35.548638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576791681341753:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:35.548706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:35.552088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:35.565090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576791681341764:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:23:35.704346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:35.790062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:36.780456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:37.592124Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576778796439239:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:37.639024Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:38.667108Z node 2 :GLOBAL ERROR: fline=events.h:99;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710667; 2025-04-07T14:23:38.694946Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490576804566252601:2939], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [2:7490576804566251828:2939]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 7]`. ShardID=72075186224037889, Sink=[2:7490576804566252601:2939].{
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:23:38.695454Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490576804566252588:2939], SessionActorId: [2:7490576804566251828:2939], Transaction locks invalidated. Table `/Root/KV`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[2:7490576804566251828:2939]. isRollback=0 2025-04-07T14:23:38.695819Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWI3NzNjNzUtOThiMzBiY2ItYmUyYzNkLTY3MDY2YmM=, ActorId: [2:7490576804566251828:2939], ActorState: ExecuteState, TraceId: 01jr8a5b3w8wep9k285r41a3zh, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7490576804566252589:2939] from: [2:7490576804566252588:2939] 2025-04-07T14:23:38.696089Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7490576804566252589:2939] TxId: 281474976710667. Ctx: { TraceId: 01jr8a5b3w8wep9k285r41a3zh, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWI3NzNjNzUtOThiMzBiY2ItYmUyYzNkLTY3MDY2YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/KV`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:23:38.709984Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWI3NzNjNzUtOThiMzBiY2ItYmUyYzNkLTY3MDY2YmM=, ActorId: [2:7490576804566251828:2939], ActorState: ExecuteState, TraceId: 01jr8a5b3w8wep9k285r41a3zh, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-plan] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] >> TestProgram::JsonExists |98.4%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |98.4%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpSinkTx::DeferredEffects [GOOD] >> KqpSinkTx::ExplicitTcl >> TestProgram::JsonExists [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:339;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=33376;portion_bytes=33376;portion_raw_bytes=53670; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=1;before_size=0;after_size=33376;before_rows=0;after_rows=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=33376;portion_bytes=33376;portion_raw_bytes=53670; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36912;portion_bytes=36912;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=2;before_size=33376;after_size=70288;before_rows=1000;after_rows=2000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36912;portion_bytes=36912;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36944;portion_bytes=36944;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=3;before_size=70288;after_size=107232;before_rows=2000;after_rows=3000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36944;portion_bytes=36944;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36976;portion_bytes=36976;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=4;before_size=107232;after_size=144208;before_rows=3000;after_rows=4000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36976;portion_bytes=36976;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37024;portion_bytes=37024;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=5;before_size=144208;after_size=181232;before_rows=4000;after_rows=5000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37024;portion_bytes=37024;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;componen ... t=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:521;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; json_data: [ "{"key":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-result_sets] >> KqpSinkLocks::UncommittedRead [GOOD] >> KqpSinkMvcc::OlapMultiSinks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:23:47.315947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:23:47.316048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:47.316083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:23:47.316124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:23:47.316165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:23:47.316189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:23:47.316241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:47.316552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:23:47.375441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:23:47.375490Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:47.383783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:23:47.383887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:23:47.384028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:23:47.388009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:23:47.388146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:23:47.388500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:47.388643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:23:47.390231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:47.391375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:47.391428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:47.391533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:23:47.391582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:47.391613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:23:47.391786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:23:47.397598Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:23:47.496191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:47.496415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:47.496634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:23:47.496906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:23:47.496973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:47.499350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:47.499477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:23:47.499697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:47.499760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:23:47.499795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:23:47.499828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:23:47.502613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:47.502677Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:23:47.502713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:23:47.504561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:47.504608Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:47.504646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:47.504748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:23:47.508149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:23:47.513848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:23:47.514070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:23:47.515145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:47.515296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:47.515345Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:47.515631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:23:47.515680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:47.515863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:47.515948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:23:47.518295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:47.518363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:47.518522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:47.518552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:23:47.518762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:47.518803Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:23:47.518899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:23:47.518949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:47.518999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:23:47.519035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:47.519067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:23:47.519088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:23:47.519155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:23:47.519204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:23:47.519236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:23:47.521112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:47.521211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:47.521242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:23:47.521276Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:23:47.521320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:47.521419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... chemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:47.971915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary is empty, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-07T14:23:47.972187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-07T14:23:47.972225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-07T14:23:47.972696Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-07T14:23:47.972793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T14:23:47.972826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:636:2563] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } TestModificationResults wait txId: 106 2025-04-07T14:23:47.975066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:47.975259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-04-07T14:23:47.979794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-04-07T14:23:47.982130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'01\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:47.982286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-07T14:23:47.982567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-07T14:23:47.982603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-07T14:23:47.982966Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-07T14:23:47.983055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-07T14:23:47.983090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:643:2570] TestWaitNotification: OK eventTxId 106 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } TestModificationResults wait txId: 107 2025-04-07T14:23:47.985477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } } } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:47.985706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 107:0, at schemeshard: 72057594046678944 2025-04-07T14:23:47.985897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-04-07T14:23:47.988381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:47.988606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-04-07T14:23:47.988923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-04-07T14:23:47.988960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-04-07T14:23:47.989416Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-04-07T14:23:47.989510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-07T14:23:47.989553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:650:2577] TestWaitNotification: OK eventTxId 107 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } TestModificationResults wait txId: 108 2025-04-07T14:23:47.992304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:47.992491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 108:0, at schemeshard: 72057594046678944 2025-04-07T14:23:47.992787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-04-07T14:23:47.995244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AD\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:47.995420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-07T14:23:47.995705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-07T14:23:47.995753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-07T14:23:47.996132Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-07T14:23:47.996211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-07T14:23:47.996243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:657:2584] TestWaitNotification: OK eventTxId 108 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } TestModificationResults wait txId: 109 2025-04-07T14:23:47.998947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:47.999144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2025-04-07T14:23:47.999374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-04-07T14:23:48.009745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:48.009924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-04-07T14:23:48.010295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-04-07T14:23:48.010336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-04-07T14:23:48.010748Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-04-07T14:23:48.010855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-04-07T14:23:48.010887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:664:2591] TestWaitNotification: OK eventTxId 109 >> TestProgram::YqlKernelStartsWith >> TestProgram::YqlKernelStartsWith [GOOD] >> TestProgram::YqlKernelEndsWithScalar >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-result_sets] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] >> TestProgram::YqlKernelEndsWithScalar [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> KqpTx::DeferredEffects [GOOD] >> KqpTx::EmptyTxOnCommit |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-result_sets] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Function { Arguments { Name: "string" } Arguments { Name: "prefix" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{arguments=[G:string;G:prefix;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] Test command err: Trying to start YDB, gRPC: 27798, MsgBus: 25105 2025-04-07T14:23:21.240904Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576729637103698:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:21.241196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001083/r3tmp/tmp3twyRA/pdisk_1.dat 2025-04-07T14:23:21.785636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:21.785750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:21.788303Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:21.790105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27798, node 1 2025-04-07T14:23:21.991988Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:21.992020Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:21.992059Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:21.992182Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25105 TClient is connected to server localhost:25105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:22.919714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:24.966305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576742522006038:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:24.966385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576742522006071:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:24.966504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:24.978912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:24.994800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576742522006076:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:23:25.321849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:25.445120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:26.259980Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576729637103698:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:26.275837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:26.481173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 3875, MsgBus: 21426 2025-04-07T14:23:34.940213Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576784860917598:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:34.940274Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001083/r3tmp/tmpuOVMnu/pdisk_1.dat 2025-04-07T14:23:35.035458Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3875, node 2 2025-04-07T14:23:35.077850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:35.077993Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:35.079247Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:35.113985Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:35.114012Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:35.114018Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:35.114114Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21426 TClient is connected to server localhost:21426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:23:35.531112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:23:38.010821Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576802040787409:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:38.010887Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576802040787394:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:38.011315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:38.015339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:38.028403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576802040787432:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:23:38.220468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:38.314249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:39.531485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:40.360705Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576784860917598:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:40.432330Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-result_sets] >> KqpTx::MixEnginesOldNew [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "suffix" } Constant { Bytes: "amet." } } } Command { Assign { Function { Arguments { Name: "string" } Arguments { Name: "suffix" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=amet.;column=G:suffix;};{arguments=[G:string;G:suffix;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow10StringTypeE; string: [ "Lorem ipsum dolor sit amet.", "Lorem ipsum dolor sit." ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:488;T=N5arrow9UInt8TypeE; >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-result_sets] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:23:45.040942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:23:45.041071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:45.041108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:23:45.041138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:23:45.041197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:23:45.041235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:23:45.041287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:45.041597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:23:45.110754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:23:45.110820Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:45.121540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:23:45.121675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:23:45.121853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:23:45.127246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:23:45.127473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:23:45.128085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:45.128289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:23:45.130174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:45.131511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:45.131568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:45.131693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:23:45.131734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:45.131786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:23:45.131941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:23:45.139117Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:23:45.269626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:45.269814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:45.269987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:23:45.270206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:23:45.270263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:45.281718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:45.281858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:23:45.282037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:45.282086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:23:45.282115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:23:45.282168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:23:45.287256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:45.287344Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:23:45.287374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:23:45.289788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:45.289843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:45.289896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:45.289969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:23:45.293643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:23:45.295864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:23:45.296050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:23:45.297104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:45.297302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:45.297350Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:45.297620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:23:45.297670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:45.297825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:45.297901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:23:45.300281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:45.300333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:45.300493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:45.300527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:23:45.300838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:45.300886Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:23:45.300976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:23:45.301008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:45.301050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:23:45.301087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:45.301116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:23:45.301160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:23:45.301243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:23:45.301285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:23:45.301317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:23:45.303241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:45.303357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:45.303396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:23:45.303433Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:23:45.303485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:45.303580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... ept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:48.413644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-07T14:23:48.413924Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-04-07T14:23:48.414286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:23:48.417237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:48.417942Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:48.418028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:23:48.418402Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:48.418439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-07T14:23:48.418923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:48.418983Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-07T14:23:48.419187Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-07T14:23:48.419242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T14:23:48.419306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-07T14:23:48.419378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-07T14:23:48.419450Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-07T14:23:48.419529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-07T14:23:48.419758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-07T14:23:48.419849Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-04-07T14:23:48.419907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-07T14:23:48.421042Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T14:23:48.421141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-07T14:23:48.421203Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-07T14:23:48.421273Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-07T14:23:48.421318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:23:48.421434Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-07T14:23:48.426287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-07T14:23:48.440554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-07T14:23:48.440651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-07T14:23:48.441239Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-07T14:23:48.441369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T14:23:48.441416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:775:2687] TestWaitNotification: OK eventTxId 105 2025-04-07T14:23:48.945282Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:48.945592Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 352us result status StatusSuccess 2025-04-07T14:23:48.946478Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:49.016600Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-07T14:23:49.016933Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 370us result status StatusSuccess 2025-04-07T14:23:49.017620Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration-withSink >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-result_sets] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-result_sets] >> TColumnEngineTestLogs::IndexWriteOverload ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::MixEnginesOldNew [GOOD] Test command err: Trying to start YDB, gRPC: 25778, MsgBus: 1746 2025-04-07T14:23:44.074222Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576831384626642:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:44.074808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f84/r3tmp/tmpCTFwlR/pdisk_1.dat 2025-04-07T14:23:44.409822Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25778, node 1 2025-04-07T14:23:44.441828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:44.442541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:44.449132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:44.577128Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:44.577150Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:44.577160Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:44.577234Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1746 TClient is connected to server localhost:1746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:45.102754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:45.114156Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:23:45.117999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:45.255448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:45.435141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:45.499267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:47.308796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576844269530246:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:47.308888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:47.538566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:47.566622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:47.598441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:47.620730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:47.646007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:47.673831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:47.724919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576844269530737:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:47.724985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:47.725023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576844269530742:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:47.728409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:47.738716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576844269530744:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:23:49.077047Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576831384626642:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:49.077219Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-plan] >> KqpSinkMvcc::OltpNamedStatement [GOOD] >> KqpSinkMvcc::OltpMultiSinksNoSinks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:23:49.996772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:23:49.996875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:49.996925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:23:49.996958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:23:49.996999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:23:49.997030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:23:49.997077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:49.997366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:23:50.061696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:23:50.061752Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:50.073953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:23:50.074056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:23:50.074178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:23:50.080064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:23:50.080250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:23:50.080855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:50.081060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:23:50.090176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:50.091528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:50.091593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:50.091712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:23:50.091760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:50.091789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:23:50.091928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:23:50.098239Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:23:50.219917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:50.220153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:50.220369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:23:50.220600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:23:50.220649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:50.222917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:50.223060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:23:50.223243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:50.223291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:23:50.223327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:23:50.223359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:23:50.225591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:50.225648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:23:50.225680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:23:50.227381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:50.227425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:50.227463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:50.227587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:23:50.236314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:23:50.238352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:23:50.238552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:23:50.239579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:50.239722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:50.239791Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:50.240043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:23:50.240091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:50.240256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:50.240332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:23:50.242414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:50.242464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:50.242611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:50.242649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:23:50.242838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:50.242875Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:23:50.242974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:23:50.243019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:50.243063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:23:50.243101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:50.243132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:23:50.243161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:23:50.243222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:23:50.243272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:23:50.243305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:23:50.245119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:50.245217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:50.245248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:23:50.245279Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:23:50.245320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:50.245432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... : 72075186233409548 TxId: 104 Status: OK 2025-04-07T14:23:50.535437Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 104 Status: OK 2025-04-07T14:23:50.535499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-07T14:23:50.535549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-07T14:23:50.538004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-07T14:23:50.538264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-07T14:23:50.538309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-07T14:23:50.538678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-04-07T14:23:50.538719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-07T14:23:50.538755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-04-07T14:23:50.573025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:50.573169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 375 RawX2: 4294969642 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:50.573238Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2025-04-07T14:23:50.573410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-07T14:23:50.630457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-04-07T14:23:50.630636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-07T14:23:50.630689Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-07T14:23:50.630732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-07T14:23:50.630759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-07T14:23:50.630929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-07T14:23:50.631077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-07T14:23:50.631154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:23:50.633739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:23:50.633968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:50.633997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:23:50.634158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-07T14:23:50.634279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:50.634305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-07T14:23:50.634344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-07T14:23:50.634835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-07T14:23:50.634902Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-07T14:23:50.634996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-07T14:23:50.635026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:23:50.635071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-07T14:23:50.635110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-07T14:23:50.635143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-07T14:23:50.635175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-07T14:23:50.635291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-07T14:23:50.635324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-04-07T14:23:50.635351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-07T14:23:50.635406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-07T14:23:50.636240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:23:50.636352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:23:50.636388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:23:50.636423Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-07T14:23:50.636456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-07T14:23:50.637358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:23:50.637411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-07T14:23:50.637427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-07T14:23:50.637444Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-07T14:23:50.637460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-07T14:23:50.637504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-04-07T14:23:50.637537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:403:2372] 2025-04-07T14:23:50.641042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T14:23:50.641890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-07T14:23:50.641997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-07T14:23:50.642027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:538:2476] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2025-04-07T14:23:50.653811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:50.654024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-04-07T14:23:50.654201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2025-04-07T14:23:50.656715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:50.656857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-07T14:23:50.657118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-07T14:23:50.657156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-07T14:23:50.657562Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-07T14:23:50.657647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-07T14:23:50.657679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:636:2563] TestWaitNotification: OK eventTxId 105 >> KqpLocks::InvalidateOnCommit [GOOD] >> KqpLocks::MixedTxFail >> KqpLocks::EmptyRange [GOOD] |98.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/test_connection/ut/unittest |98.4%| [TS] {RESULT} ydb/core/fq/libs/test_connection/ut/unittest >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-result_sets] >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRange [GOOD] Test command err: Trying to start YDB, gRPC: 21462, MsgBus: 17561 2025-04-07T14:23:40.177802Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576810047343326:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:40.177839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f8c/r3tmp/tmpUaNGmd/pdisk_1.dat 2025-04-07T14:23:40.543423Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21462, node 1 2025-04-07T14:23:40.574222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:40.574345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:40.576306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:40.700149Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:40.700172Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:40.700183Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:40.700248Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17561 TClient is connected to server localhost:17561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:41.200110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:41.223231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:41.334427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:41.487212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:41.548455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:43.207924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576822932246718:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:43.208047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:43.446556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:43.477409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:43.508609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:43.535477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:43.562066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:43.592541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:43.655632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576822932247213:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:43.655709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:43.655905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576822932247218:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:43.659098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:43.666987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576822932247220:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 24745, MsgBus: 16739 2025-04-07T14:23:45.898406Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576833442442833:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f8c/r3tmp/tmpWdcaGv/pdisk_1.dat 2025-04-07T14:23:45.945972Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:23:46.025504Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:46.049564Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:46.049643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:46.051421Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24745, node 2 2025-04-07T14:23:46.125170Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:46.125190Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:46.125196Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:46.125275Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16739 TClient is connected to server localhost:16739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:23:46.508358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:23:46.524528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:46.602670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:46.780917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:46.854688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:49.079848Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576850622313574:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:49.079946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:49.127776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:49.161345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:49.214205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:49.289678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:49.326192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:49.406483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:49.477741Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576850622314073:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:49.477800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:49.477924Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576850622314078:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:49.480821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:49.489613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576850622314080:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:23:50.882748Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576833442442833:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:50.882842Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:51.176421Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWE3ZjMzMDMtNTFlMWQzMmItN2JlMDBhMWUtZjUzN2E0ODU=, ActorId: [2:7490576854917281690:2460], ActorState: ExecuteState, TraceId: 01jr8a5q7cerjsndhewabdhymc, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:339;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=33376;portion_bytes=33376;portion_raw_bytes=53670; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=1;before_size=0;after_size=33376;before_rows=0;after_rows=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=33376;portion_bytes=33376;portion_raw_bytes=53670; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36912;portion_bytes=36912;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=2;before_size=33376;after_size=70288;before_rows=1000;after_rows=2000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36912;portion_bytes=36912;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36944;portion_bytes=36944;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=3;before_size=70288;after_size=107232;before_rows=2000;after_rows=3000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36944;portion_bytes=36944;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36976;portion_bytes=36976;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=4;before_size=107232;after_size=144208;before_rows=3000;after_rows=4000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=36976;portion_bytes=36976;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37024;portion_bytes=37024;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=5;before_size=144208;after_size=181232;before_rows=4000;after_rows=5000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37024;portion_bytes=37024;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;componen ... iority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37648;portion_bytes=37648;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37648;portion_bytes=37648;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=84;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37648;portion_bytes=37648;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=85;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=86;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=87;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=88;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37424;portion_bytes=37424;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=89;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37424;portion_bytes=37424;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=90;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37624;portion_bytes=37624;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=91;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37624;portion_bytes=37624;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=92;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=93;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37624;portion_bytes=37624;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=94;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37624;portion_bytes=37624;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37648;portion_bytes=37648;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=95;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37648;portion_bytes=37648;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=96;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=97;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=98;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37624;portion_bytes=37624;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=99;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37624;portion_bytes=37624;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37392;portion_bytes=37392;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=100;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37392;portion_bytes=37392;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1624136;portion_bytes=1624136;portion_raw_bytes=2538150; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=101;before_size=0;after_size=1624136;before_rows=0;after_rows=42858; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1624136;portion_bytes=1624136;portion_raw_bytes=2538150; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1688792;portion_bytes=1688792;portion_raw_bytes=2571480; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=102;before_size=1624136;after_size=3312928;before_rows=42858;after_rows=85716; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=1688792;portion_bytes=1688792;portion_raw_bytes=2571480; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=560112;portion_bytes=560112;portion_raw_bytes=857040; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=103;before_size=3312928;after_size=3873040;before_rows=85716;after_rows=100000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=560112;portion_bytes=560112;portion_raw_bytes=857040; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37648;portion_bytes=37648;portion_raw_bytes=64000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=104;before_size=3873040;after_size=3910688;before_rows=100000;after_rows=101000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37648;portion_bytes=37648;portion_raw_bytes=64000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37656;portion_bytes=37656;portion_raw_bytes=64000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:69;event=portion_stats_updated;type=ADD;path_id=1;portion=105;before_size=3910688;after_size=3948344;before_rows=101000;after_rows=102000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:111;event=update_portion;blobs_size=37656;portion_bytes=37656;portion_raw_bytes=64000; >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery |98.5%| [TA] $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.5%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries >> KqpTx::EmptyTxOnCommit [GOOD] >> AttributesMD5Test::AmazonSampleWithString [GOOD] >> AttributesMD5Test::AmazonSampleWithBinary [GOOD] >> InflyTest::AddMessage [GOOD] >> InflyTest::DeleteMessage [GOOD] >> InflyTest::ChangeMesageVisibility [GOOD] >> InflyTest::ReceiveMessages [GOOD] >> InflyTest::DeleteReceivedMessage [GOOD] >> MessageDelayStatsTest::All [GOOD] >> MessageDelayStatsTest::BigTimeDiff [GOOD] >> MessageDelayStatsTest::MaxMessageDelay [GOOD] >> Metering::BillingRecords >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery >> TRangeTreap::Simple [GOOD] >> TRangeTreap::Sequential >> TSequence::CreateTableWithDefaultFromSequence >> Graph::CreateGraphShard >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-plan] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus >> KqpSinkTx::OlapExplicitTcl [GOOD] >> KqpSinkTx::OlapInteractive >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-plan] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::EmptyTxOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 27826, MsgBus: 23910 2025-04-07T14:23:43.639163Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576824583292463:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:43.642762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f87/r3tmp/tmpCBhJor/pdisk_1.dat 2025-04-07T14:23:43.945927Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27826, node 1 2025-04-07T14:23:44.016136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:44.016220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:44.017161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:44.034362Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:44.034388Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:44.034407Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:44.034525Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23910 TClient is connected to server localhost:23910 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:44.516237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:44.535937Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:23:44.547531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:23:44.698476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:23:44.840199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:44.910773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:46.617470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576837468195901:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:46.617607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:46.839556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:46.873906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:46.944059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:46.969295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:47.035526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:47.085699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:47.177909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576841763163708:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:47.177980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:47.178036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576841763163713:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:47.181119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:47.202572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576841763163715:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:23:48.637864Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576824583292463:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:48.637922Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8373, MsgBus: 5825 2025-04-07T14:23:49.423734Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576849638674722:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:49.423812Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f87/r3tmp/tmp2VyLTL/pdisk_1.dat 2025-04-07T14:23:49.527560Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8373, node 2 2025-04-07T14:23:49.580788Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:49.580926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:49.594635Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:49.665604Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:49.665628Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:49.665637Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:49.665734Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5825 TClient is connected to server localhost:5825 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:50.063669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:50.110723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:50.192058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:50.352208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:50.429231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:52.558096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576862523578339:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:52.558179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:52.600073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:52.672208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:52.748459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:52.779412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:52.816427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:52.895942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:52.949725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576862523578843:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:52.949813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:52.949825Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576862523578848:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:52.953800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:52.966417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576862523578850:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery >> TSentinelUnstableTests::BSControllerCantChangeStatus >> KqpSinkLocks::TInvalidateOlap [GOOD] >> TxKeys::ComparePointKeys >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-plan] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-result_sets] >> DataShardReplication::SimpleApplyChanges >> Graph::CreateGraphShard [GOOD] >> Graph::UseGraphShard |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData >> TTxDataShardLocalKMeansScan::BadRequest >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-plan] >> MediatorTimeCast::ReadStepSubscribe >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::TInvalidateOlap [GOOD] Test command err: Trying to start YDB, gRPC: 29643, MsgBus: 1955 2025-04-07T14:23:25.654215Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576749353863450:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:25.654756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00107d/r3tmp/tmpbNfc0f/pdisk_1.dat 2025-04-07T14:23:26.294125Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29643, node 1 2025-04-07T14:23:26.429195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:26.429525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:26.432918Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:26.432939Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:26.432945Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:26.433025Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:23:26.435724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1955 TClient is connected to server localhost:1955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:27.222733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:29.561541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576766533733106:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:29.561650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576766533733118:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:29.561803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:29.566668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:29.581391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576766533733133:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:23:29.887617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:30.015563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:30.857517Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576749353863450:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:30.882222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:31.099177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:32.740721Z node 1 :GLOBAL ERROR: fline=events.h:99;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-04-07T14:23:32.740938Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" } 2025-04-07T14:23:32.741089Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" } 2025-04-07T14:23:32.741324Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490576779418643426:2941], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7490576779418643126:2941]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 6]`. ShardID=72075186224037888, Sink=[1:7490576779418643426:2941].{
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:23:32.741753Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490576779418643412:2941], SessionActorId: [1:7490576779418643126:2941], Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[1:7490576779418643126:2941]. isRollback=0 2025-04-07T14:23:32.741855Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmIzMzY5M2MtNTYyMTY2Zi0zMzA5NDc0Mi1hMDI4ZGRhNg==, ActorId: [1:7490576779418643126:2941], ActorState: ExecuteState, TraceId: 01jr8a559y7bhkhhgt7s4zhpy0, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7490576779418643413:2941] from: [1:7490576779418643412:2941] 2025-04-07T14:23:32.742057Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7490576779418643413:2941] TxId: 281474976710665. Ctx: { TraceId: 01jr8a559y7bhkhhgt7s4zhpy0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzMzY5M2MtNTYyMTY2Zi0zMzA5NDc0Mi1hMDI4ZGRhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2025-04-07T14:23:32.747470Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmIzMzY5M2MtNTYyMTY2Zi0zMzA5NDc0Mi1hMDI4ZGRhNg==, ActorId: [1:7490576779418643126:2941], ActorState: ExecuteState, TraceId: 01jr8a559y7bhkhhgt7s4zhpy0, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid
: Error: Transaction locks invalidated. Tables: `/Root/Test`, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 1845, MsgBus: 14910 2025-04-07T14:23:39.065055Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576809587977057:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:39.065113Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00107d/r3tmp/tmpeTHRrF/pdisk_1.dat 2025-04-07T14:23:39.198905Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:39.234133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:39.234214Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:39.235592Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1845, node 2 2025-04-07T14:23:39.347812Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:39.347839Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:39.347846Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:39.347952Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14910 TClient is connected to server localhost:14910 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:39.853541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:39.861254Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:23:42.043654Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576822472879554:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:42.043796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [ ... 735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.772354Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7490576835357786169:2882];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.772536Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7490576835357786856:2942];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038073;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.772645Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7490576835357786139:2873];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.774160Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037912;self_id=[2:7490576822472881032:2479];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037912;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.774374Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[2:7490576835357786843:2940];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038062;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.774545Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[2:7490576822472880853:2442];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037984;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.774700Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7490576835357786168:2881];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.777819Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7490576835357786677:2909];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.778010Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[2:7490576835357786713:2916];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038094;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.778118Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[2:7490576835357786868:2950];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038071;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.778261Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7490576835357786884:2961];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038068;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.778369Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7490576835357786156:2878];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.778472Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7490576835357786876:2954];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.778578Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7490576835357786167:2880];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.778684Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7490576835357786732:2924];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038090;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.778797Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7490576835357786878:2956];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038061;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.778903Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7490576835357786877:2955];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.779018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7490576835357786831:2937];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038081;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.779166Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7490576835357786180:2886];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.779287Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7490576835357786630:2897];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038092;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.780635Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[2:7490576835357786627:2896];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038097;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.781151Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;self_id=[2:7490576822472880897:2459];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037974;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.781350Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[2:7490576822472881026:2475];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037950;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.781503Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[2:7490576822472880882:2450];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037934;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.781669Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[2:7490576822472881075:2484];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037920;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.781809Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7490576835357786886:2962];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038085;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.781953Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7490576835357786824:2935];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038072;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.782068Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[2:7490576835357786678:2910];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038080;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.782179Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7490576822472881090:2496];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037910;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.782369Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[2:7490576822472880759:2431];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037971;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.783541Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[2:7490576822472880853:2442];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037984;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.787479Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;self_id=[2:7490576835357786822:2934];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038089;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.787639Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[2:7490576822472880763:2433];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037969;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.787768Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;self_id=[2:7490576822472880635:2429];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037988;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.787969Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;self_id=[2:7490576822472881028:2477];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037941;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.789583Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[2:7490576835357786843:2940];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038062;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.789660Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[2:7490576822472880883:2451];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037928;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.789750Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;self_id=[2:7490576822472880897:2459];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037974;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.789784Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037912;self_id=[2:7490576822472881032:2479];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037912;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.789866Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[2:7490576822472881026:2475];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037950;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.789908Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[2:7490576822472880882:2450];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037934;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.789982Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[2:7490576822472881075:2484];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037920;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:49.790040Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[2:7490576822472880759:2431];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037971;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-07T14:23:54.192137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:23:54.192170Z node 2 :IMPORT WARN: Table profiles were not loaded >> TxKeys::ComparePointKeys [GOOD] >> TxKeys::ComparePointKeysWithNull >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-result_sets] [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-plan] >> KeyValueGRPCService::SimpleAcquireLock >> TRetryEventsQueueTest::SendDisconnectAfterPoisonPill >> TGRpcRateLimiterTest::CreateResource >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob >> TxKeys::ComparePointKeysWithNull [GOOD] >> TxKeys::ComparePointAndRange >> KqpLocks::MixedTxFail [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-plan] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> Graph::UseGraphShard [GOOD] >> Graph::MemoryBackendFullCycle >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> TxKeys::ComparePointAndRange [GOOD] >> TxKeys::ComparePointAndRangeWithNull >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::MixedTxFail [GOOD] Test command err: Trying to start YDB, gRPC: 21710, MsgBus: 21875 2025-04-07T14:23:46.056333Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576836451605880:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:46.056482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f82/r3tmp/tmppZOSzZ/pdisk_1.dat 2025-04-07T14:23:46.478175Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21710, node 1 2025-04-07T14:23:46.495472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:46.495595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:46.497784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:46.589460Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:46.589492Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:46.589506Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:46.589607Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21875 TClient is connected to server localhost:21875 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:47.110090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:47.132413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:47.230546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:47.371832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:47.439904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:48.970580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576845041542191:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:48.970706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:49.251356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:49.291980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:49.318655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:49.345607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:49.375720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:49.465974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:49.583480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576849336509992:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:49.583558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:49.583768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576849336509997:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:49.588412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:49.629035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576849336509999:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:23:51.057291Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576836451605880:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:51.064249Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:51.263794Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODQ3MzllYTctMjJiN2QwZTgtYmMzY2RhNjAtZGQ5MzZhNjY=, ActorId: [1:7490576853631477607:2460], ActorState: ExecuteState, TraceId: 01jr8a5qdb7wvmk5dh42phmdkf, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 21739, MsgBus: 5168 2025-04-07T14:23:52.221852Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576861494145226:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:52.221939Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f82/r3tmp/tmpHJOZhR/pdisk_1.dat 2025-04-07T14:23:52.327286Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:52.334705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:52.334804Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:52.337183Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21739, node 2 2025-04-07T14:23:52.461143Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:52.461167Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:52.461173Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:52.461253Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5168 TClient is connected to server localhost:5168 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:52.934635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:52.941393Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:23:55.061307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576874379047713:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:55.061426Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:55.061675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576874379047740:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access p ... 2025-04-07T14:23:56.795275Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:23:56.795389Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:23:56.795409Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:23:56.795959Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:23:56.796004Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:23:56.796094Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:23:56.796114Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:23:56.796279Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:23:56.796299Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:23:56.796392Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:23:56.796417Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:23:56.796467Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:23:56.796487Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:23:56.796517Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:23:56.796536Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:23:56.797415Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:23:56.797453Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:23:56.797717Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:23:56.797751Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:23:56.797913Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:23:56.797940Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:23:56.798112Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:23:56.798138Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:23:56.798247Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:23:56.798271Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:23:56.798767Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:23:56.798798Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:23:56.798909Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:23:56.798941Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:23:56.799097Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:23:56.799124Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:23:56.799235Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:23:56.799263Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:23:56.799314Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:23:56.799332Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:23:56.799375Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:23:56.799397Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:23:56.799635Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:23:56.799686Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:23:56.799871Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:23:56.799901Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:23:56.800028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-07T14:23:56.800053Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-07T14:23:56.800220Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-07T14:23:56.800281Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-07T14:23:56.800389Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-07T14:23:56.800415Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-07T14:23:57.222867Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576861494145226:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:57.222960Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:57.702034Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037936;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 2 ] Col2: [ 1 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"2;1;"}}]}; 2025-04-07T14:23:57.713908Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzMxNzI4NTctZWQ0ODYxOTItYTZkMjkxMTktOTdjNmJiNzM=, ActorId: [2:7490576882968985139:2663], ActorState: ExecuteState, TraceId: 01jr8a5xpjdr679zjy2kf5pdj4, Create QueryResponse for error on request, msg: 2025-04-07T14:23:57.715864Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[2:7490576874379048468:2389];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=manager.cpp:111;event=abort;tx_id=281474976715665;problem=finished; 2025-04-07T14:23:57.716173Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037936;tx_state=complete;fline=events.h:99;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976715670; 2025-04-07T14:23:57.716243Z node 2 :TX_COLUMNSHARD WARN: fline=manager.cpp:128;event=abort;tx_id=281474976715665;problem=finished; >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-plan] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection >> TxKeys::ComparePointAndRangeWithNull [GOOD] >> TxKeys::ComparePointAndRangeWithInf >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> KqpSinkTx::ExplicitTcl [GOOD] >> TTxDataShardLocalKMeansScan::BadRequest [GOOD] >> TTxDataShardLocalKMeansScan::MainToPosting >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection >> DataShardReassign::AutoReassignOnYellowFlag >> TRetryEventsQueueTest::SendDisconnectAfterPoisonPill [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> Metering::BillingRecords [GOOD] >> Metering::MockedNetClassifierOnly >> TxKeys::ComparePointAndRangeWithInf [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-result_sets] >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues |98.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/dq/actors/common/ut/unittest >> TRetryEventsQueueTest::SendDisconnectAfterPoisonPill [GOOD] |98.5%| [TS] {RESULT} ydb/library/yql/dq/actors/common/ut/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::ExplicitTcl [GOOD] Test command err: Trying to start YDB, gRPC: 12756, MsgBus: 5618 2025-04-07T14:23:35.971861Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576790324591345:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:35.971925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00105c/r3tmp/tmpH3VSfK/pdisk_1.dat 2025-04-07T14:23:36.279247Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12756, node 1 2025-04-07T14:23:36.287888Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:23:36.331075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:36.332842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:36.337845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:36.352899Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:36.352920Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:36.352926Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:36.353008Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5618 TClient is connected to server localhost:5618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:36.842469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:39.099652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576807504460944:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:39.099749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:39.100271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576807504460952:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:39.104303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:39.115553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576807504460958:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:23:39.536372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:39.682035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:40.741423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:41.467568Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576790324591345:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:41.527802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 20470, MsgBus: 2194 2025-04-07T14:23:48.125208Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576845342183011:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:48.125307Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00105c/r3tmp/tmpCy8NTp/pdisk_1.dat 2025-04-07T14:23:48.229285Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20470, node 2 2025-04-07T14:23:48.257869Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:48.257977Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:48.261160Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:48.279772Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:48.279794Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:48.279805Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:48.279898Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2194 TClient is connected to server localhost:2194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:48.644346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:51.697894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576858227085528:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:51.697950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576858227085519:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:51.698216Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:51.701936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:51.714434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576858227085548:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:23:51.823284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:51.874461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:52.952409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:53.587490Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576845342183011:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:53.745617Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:54.395339Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjFkOTI5ZDYtMmNmNmZlY2UtMWE0YjA1NjItNTMxZDhmMjc=, ActorId: [2:7490576871111995476:2939], ActorState: ReadyState, TraceId: 01jr8a5tgjf4tt7d56xrn5g5z7, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpTpch::Query01 >> KeyValueGRPCService::SimpleAcquireLock [GOOD] >> KeyValueGRPCService::SimpleExecuteTransaction >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-result_sets] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_keys/unittest >> TxKeys::ComparePointAndRangeWithInf [GOOD] Test command err: 2025-04-07T14:23:56.892429Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:23:56.994656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:23:56.994734Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:56.997812Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:23:56.999195Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2151] 2025-04-07T14:23:56.999525Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:23:57.014547Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:128:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:23:57.066620Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:23:57.067200Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:23:57.069414Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:23:57.069504Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:23:57.069551Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:23:57.070488Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:23:57.100165Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:23:57.102966Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:23:57.103159Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:191:2193] 2025-04-07T14:23:57.103217Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:23:57.103257Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:23:57.103295Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:23:57.103535Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:23:57.103595Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:23:57.104824Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:23:57.105105Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:23:57.105197Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:23:57.105255Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:23:57.105440Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:23:57.105483Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:23:57.105519Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:23:57.105569Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:23:57.105613Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:23:57.183285Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:207:2208], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:23:57.183374Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:23:57.184171Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:205:2207], serverId# [1:207:2208], sessionId# [0:0:0] 2025-04-07T14:23:57.188393Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:128:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-07T14:23:57.188479Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:23:57.188580Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-07T14:23:57.188860Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-07T14:23:57.188918Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-07T14:23:57.189001Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-07T14:23:57.189056Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:23:57.189098Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-07T14:23:57.189136Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-07T14:23:57.189169Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:23:57.189485Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-07T14:23:57.189539Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-07T14:23:57.189581Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-07T14:23:57.189632Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:23:57.189692Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-07T14:23:57.189723Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-07T14:23:57.189758Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-07T14:23:57.189791Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-07T14:23:57.189818Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-07T14:23:57.217663Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-07T14:23:57.217758Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-07T14:23:57.217816Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-07T14:23:57.217867Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-07T14:23:57.217939Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-07T14:23:57.219486Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2214], Recipient [1:128:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:23:57.219557Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:23:57.219604Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:214:2214], sessionId# [0:0:0] 2025-04-07T14:23:57.219777Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:128:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-07T14:23:57.219808Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:23:57.219961Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-07T14:23:57.220011Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:23:57.220047Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-07T14:23:57.220084Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-07T14:23:57.223932Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-07T14:23:57.224007Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:23:57.224297Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:128:2151], Recipient [1:128:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:23:57.224334Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:23:57.224391Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:23:57.224432Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:23:57.224464Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:23:57.224507Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-07T14:23:57.224542Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-07T14:23:57.224585Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:23:57.224621Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-07T14:23:57.224654Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-07T14:23:57.224720Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-07T14:23:57.224903Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-07T14:23:57.224942Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:23:57.224975Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-07T14:23:57.224997Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-07T14:23:57.225017Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-07T14:23:57.225081Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-07T14:23:57.225107Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-07T14:23:57.225141Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-07T14:23:57.225170Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-07T14:23:57.225228Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-07T14:23:57.225272Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-07T14:23:57.225305Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-07T14:23:57.225347Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:23:57.225366Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-07T14:23:57.225385Z node 1 :TX_DATASHARD TRACE: ... 1] at 9437184 is Executed 2025-04-07T14:24:00.546980Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit DropCdcStream 2025-04-07T14:24:00.547000Z node 5 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit CreateIncrementalRestoreSrc 2025-04-07T14:24:00.547021Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CreateIncrementalRestoreSrc 2025-04-07T14:24:00.547041Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:24:00.547061Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CreateIncrementalRestoreSrc 2025-04-07T14:24:00.547081Z node 5 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit CompleteOperation 2025-04-07T14:24:00.547103Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CompleteOperation 2025-04-07T14:24:00.547319Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is DelayComplete 2025-04-07T14:24:00.547352Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CompleteOperation 2025-04-07T14:24:00.547388Z node 5 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit CompletedOperations 2025-04-07T14:24:00.547422Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CompletedOperations 2025-04-07T14:24:00.547455Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-07T14:24:00.547474Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CompletedOperations 2025-04-07T14:24:00.547497Z node 5 :TX_DATASHARD TRACE: Execution plan for [1000001:1] at 9437184 has finished 2025-04-07T14:24:00.547532Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:00.547564Z node 5 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-07T14:24:00.547597Z node 5 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:24:00.547630Z node 5 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:24:00.549092Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [5:24:2071], Recipient [5:130:2153]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-04-07T14:24:00.549145Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-07T14:24:00.549184Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-04-07T14:24:00.549222Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:24:00.551173Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000001 txid# 1} 2025-04-07T14:24:00.551234Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000001} 2025-04-07T14:24:00.551291Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:24:00.552289Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:24:00.552335Z node 5 :TX_DATASHARD TRACE: Complete execution for [1000001:1] at 9437184 on unit CreateTable 2025-04-07T14:24:00.552373Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:24:00.552426Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-04-07T14:24:00.552458Z node 5 :TX_DATASHARD TRACE: Complete execution for [1000001:1] at 9437184 on unit CompleteOperation 2025-04-07T14:24:00.552512Z node 5 :TX_DATASHARD DEBUG: Complete [1000001 : 1] from 9437184 at tablet 9437184 send result to client [5:97:2132], exec latency: 0 ms, propose latency: 2 ms 2025-04-07T14:24:00.552582Z node 5 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-04-07T14:24:00.552683Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:24:00.553309Z node 5 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 130 RawX2: 21474838633 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 2 2025-04-07T14:24:00.553403Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [5:219:2219], Recipient [5:130:2153]: NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:24:00.553439Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-07T14:24:00.553525Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [5:120:2146], Recipient [5:130:2153]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-07T14:24:00.553558Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-07T14:24:00.553597Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-07T14:24:00.553653Z node 5 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-07T14:24:00.554023Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 65543, Sender [5:97:2132], Recipient [5:130:2153]: NActors::TEvents::TEvPoison 2025-04-07T14:24:00.554369Z node 5 :TX_DATASHARD INFO: OnDetach: 9437184 2025-04-07T14:24:00.554421Z node 5 :TX_DATASHARD INFO: Change sender killed: at tablet: 9437184 2025-04-07T14:24:00.564169Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [5:224:2221], Recipient [5:227:2222]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:24:00.567615Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [5:224:2221], Recipient [5:227:2222]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:24:00.567893Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [5:224:2221], Recipient [5:227:2222]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:24:00.574435Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [5:227:2222] 2025-04-07T14:24:00.574687Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:00.579600Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2025-04-07T14:24:00.616100Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:00.616284Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:00.618151Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:24:00.618227Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:24:00.618279Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:24:00.618567Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:00.618632Z node 5 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2025-04-07T14:24:00.618713Z node 5 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-04-07T14:24:00.618854Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2025-04-07T14:24:00.619063Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [5:271:2259] 2025-04-07T14:24:00.619097Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:24:00.619137Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-04-07T14:24:00.619168Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:24:00.619392Z node 5 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-07T14:24:00.619479Z node 5 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-07T14:24:00.619639Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [5:227:2222], Recipient [5:227:2222]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:00.619687Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:00.619847Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:24:00.619914Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:24:00.620082Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [5:24:2071], Recipient [5:227:2222]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-04-07T14:24:00.620120Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-07T14:24:00.620156Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-04-07T14:24:00.620190Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:24:00.620324Z node 5 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 227 RawX2: 21474838702 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-04-07T14:24:00.620448Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [5:24:2071], Recipient [5:227:2222]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-04-07T14:24:00.620491Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-07T14:24:00.620531Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-04-07T14:24:00.620572Z node 5 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 9437184: waitStep# 0 readStep# 0 observedStep# 1000001 2025-04-07T14:24:00.620645Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:24:00.620705Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:00.620742Z node 5 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:24:00.620778Z node 5 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:24:00.620813Z node 5 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:24:00.620846Z node 5 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:24:00.620887Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:24:00.620963Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [5:269:2257], Recipient [5:227:2222]: NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:24:00.620993Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-07T14:24:00.621083Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [5:120:2146], Recipient [5:227:2222]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-07T14:24:00.621114Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-07T14:24:00.621162Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-07T14:24:00.621228Z node 5 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 9437184 promoting UnprotectedReadEdge to v0/18446744073709551615 2025-04-07T14:24:00.621298Z node 5 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-07T14:24:00.654044Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [5:269:2257], Recipient [5:227:2222]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:24:00.654114Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed |98.5%| [TM] {RESULT} ydb/core/tx/datashard/ut_keys/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::SequencesIndex >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-result_sets] >> DataShardReplication::SimpleApplyChanges [GOOD] >> DataShardReplication::SplitMergeChanges >> TGRpcRateLimiterTest::CreateResource [GOOD] >> TGRpcRateLimiterTest::UpdateResource >> KqpSinkLocks::EmptyRangeOlap [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-plan] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings >> TIndexProcesorTests::TestCreateIndexProcessor >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-result_sets] >> KqpSinkMvcc::OltpMultiSinksNoSinks [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-result_sets] >> TDataShardRSTest::TestCleanupInRS >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-result_sets] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] >> DataShardCompaction::CompactBorrowed >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-result_sets] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding >> MediatorTimeCast::ReadStepSubscribe [GOOD] >> MediatorTimeCast::GranularTimecast ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeOlap [GOOD] Test command err: Trying to start YDB, gRPC: 20914, MsgBus: 27781 2025-04-07T14:23:27.194031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576754659617160:2161];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:27.196528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001077/r3tmp/tmpyMak7R/pdisk_1.dat 2025-04-07T14:23:27.641275Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:27.662506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:27.662602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:27.673637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20914, node 1 2025-04-07T14:23:27.736825Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:27.736846Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:27.736853Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:27.736937Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27781 TClient is connected to server localhost:27781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:28.342386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:28.361425Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:23:30.357435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576767544519542:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:30.357573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:30.358005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576767544519577:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:30.361628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:30.375228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576767544519579:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:23:30.692065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:30.797238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576767544519784:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:30.797410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576767544519784:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:30.797575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576767544519786:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:30.797599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576767544519786:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:30.797621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576767544519784:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:30.797721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576767544519784:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:23:30.797732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576767544519786:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:30.797809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576767544519784:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:23:30.797809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576767544519786:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:23:30.797885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576767544519786:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:23:30.797923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576767544519784:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:23:30.797999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576767544519786:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:23:30.798023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576767544519784:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:23:30.798082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576767544519786:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:23:30.798152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576767544519784:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:23:30.798164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576767544519786:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:23:30.798252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576767544519784:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:23:30.798257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576767544519786:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:23:30.798342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576767544519784:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:23:30.798342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576767544519786:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:23:30.798432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576767544519784:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:23:30.798433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576767544519786:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:23:30.798519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576767544519786:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:23:30.806285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576767544519784:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:23:30.835257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576767544519792:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:30.835311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576767544519792:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:30.835491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7490576767544519792:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:30.835578Z node ... 4:23:56.352561Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7490576861831239895:2826];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.352854Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7490576861831239895:2826];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.353107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7490576861831239861:2823];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.353216Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7490576861831239861:2823];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.353365Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7490576861831239855:2820];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.353473Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[2:7490576848946334779:2433];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037921;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.353587Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7490576844651367290:2374];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037900;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.353603Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;self_id=[2:7490576848946334716:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037957;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.353759Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;self_id=[2:7490576848946334716:2412];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037957;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.353861Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7490576861831239859:2821];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038014;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.353907Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037998;self_id=[2:7490576861831239492:2789];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037998;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.354027Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7490576861831239859:2821];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.354048Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037998;self_id=[2:7490576861831239492:2789];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037998;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.354196Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[2:7490576848946334609:2395];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037971;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.354349Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[2:7490576848946334609:2395];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037971;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.354949Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037958;self_id=[2:7490576848946334709:2405];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037958;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.355106Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7490576861831239832:2814];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038006;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.355148Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7490576861831239854:2819];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.355323Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7490576861831239854:2819];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.355498Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[2:7490576848946334777:2431];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037929;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.355647Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[2:7490576848946334777:2431];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037929;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.363945Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7490576861831239829:2811];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.364228Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7490576861831239829:2811];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.366287Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7490576861831240759:2887];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038070;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.369000Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7490576861831240672:2862];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038049;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.370705Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7490576861831240695:2878];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.370973Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7490576861831239828:2810];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.371391Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037967;self_id=[2:7490576848946334594:2381];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037967;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.371614Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[2:7490576861831240677:2863];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038077;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.371802Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7490576861831240759:2887];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.372963Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7490576861831239893:2824];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038046;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.373213Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7490576861831240672:2862];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.373387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7490576861831240695:2878];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.373547Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7490576861831239828:2810];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.373674Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037967;self_id=[2:7490576848946334594:2381];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037967;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.373806Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[2:7490576861831240677:2863];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038077;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.375424Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7490576861831239893:2824];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.375887Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7490576861831240020:2839];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.376095Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7490576861831240823:2891];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.376275Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7490576861831239902:2833];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.376410Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7490576861831239902:2833];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.377261Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7490576861831240020:2839];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:56.377430Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7490576861831240823:2891];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-07T14:24:00.101099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:24:00.101141Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 >> TRangeTreap::Sequential [GOOD] >> TRangeTreap::Random >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding >> TRangeTreap::Random [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpMultiSinksNoSinks [GOOD] Test command err: Trying to start YDB, gRPC: 16346, MsgBus: 9834 2025-04-07T14:23:38.510482Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576803898832363:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:38.510578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fc2/r3tmp/tmppnula4/pdisk_1.dat 2025-04-07T14:23:38.955388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:38.955474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:38.957453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:38.992363Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16346, node 1 2025-04-07T14:23:39.137476Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:39.137506Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:39.137515Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:39.137575Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9834 TClient is connected to server localhost:9834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:39.841695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:41.769442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576816783734671:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:41.769529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:41.769917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576816783734682:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:41.773533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:41.781755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576816783734685:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:23:42.099813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:42.225475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:43.170009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:43.816765Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576803898832363:2255];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:43.872473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 25985, MsgBus: 7752 2025-04-07T14:23:51.533838Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576860684949354:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:51.533917Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000fc2/r3tmp/tmp2sBFrD/pdisk_1.dat 2025-04-07T14:23:51.658819Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:51.685580Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:51.685687Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:51.687011Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25985, node 2 2025-04-07T14:23:51.781224Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:51.781248Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:51.781261Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:51.781365Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7752 TClient is connected to server localhost:7752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:52.267950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:52.274854Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:23:54.734916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576873569851857:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:54.735042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576873569851847:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:54.735292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:54.738272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:54.751496Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576873569851870:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:23:54.853223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:54.888496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:55.949263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:56.809920Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576860684949354:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:56.869953Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> test_query_cache.py::TestQueryCache::test >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery >> KeyValueGRPCService::SimpleExecuteTransaction [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGeneration >> TTxDataShardLocalKMeansScan::MainToPosting [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuild >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries >> TIndexProcesorTests::TestCreateIndexProcessor [GOOD] >> TIndexProcesorTests::TestSingleCreateQueueEvent >> KqpSinkMvcc::OlapMultiSinks [GOOD] >> test_http_api.py::TestHttpApi::test_simple_analytics_query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/locks/ut_range_treap/unittest >> TRangeTreap::Random [GOOD] Test command err: NOTE: building treap of size 1000000 got height 52 and needed 1000000 ops (1000000 inserts 0 updates 0 deletes) and 31981753 comparisons (31.981753 per op) NOTE: building treap of size 9279 got height 32 and needed 10467 ops (9746 inserts 254 updates 467 deletes) and 202955 comparisons (19.38998758 per op) Checking point 1891 ... found 1489 ranges, needed 3934 comparisons (2.642041639 per range) Checking point 7778 ... found 2314 ranges, needed 8473 comparisons (3.661624892 per range) Checking point 5970 ... found 2424 ranges, needed 8856 comparisons (3.653465347 per range) Checking point 4458 ... found 2438 ranges, needed 8558 comparisons (3.510254307 per range) Checking point 1586 ... found 1301 ranges, needed 3320 comparisons (2.551883167 per range) Checking point 3004 ... found 2026 ranges, needed 6142 comparisons (3.031589339 per range) Checking point 7706 ... found 2304 ranges, needed 8494 comparisons (3.686631944 per range) Checking point 7868 ... found 2309 ranges, needed 8476 comparisons (3.670853183 per range) Checking point 8507 ... found 2353 ranges, needed 8550 comparisons (3.633659159 per range) Checking point 2469 ... found 1782 ranges, needed 5050 comparisons (2.833894501 per range) |98.5%| [TM] {RESULT} ydb/core/tx/locks/ut_range_treap/unittest >> Graph::MemoryBackendFullCycle [GOOD] >> Graph::LocalBackendFullCycle >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-result_sets] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 29421, MsgBus: 18511 2025-04-07T14:23:36.933037Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576794056572408:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:36.934174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001050/r3tmp/tmpjcdGTU/pdisk_1.dat 2025-04-07T14:23:37.394550Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:37.412721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:37.412824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:37.418101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29421, node 1 2025-04-07T14:23:37.557181Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:37.557200Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:37.557208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:37.557292Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18511 TClient is connected to server localhost:18511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:38.271856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:38.292555Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:23:40.419587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576811236442151:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:40.419678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576811236442146:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:40.420145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:40.424633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:40.437914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576811236442169:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:23:40.755396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:40.862940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-07T14:23:41.783475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:42.433601Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576794056572408:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:42.484007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 1186, MsgBus: 7487 2025-04-07T14:23:48.992229Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576847810981441:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:48.992591Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001050/r3tmp/tmpWc6AAc/pdisk_1.dat 2025-04-07T14:23:49.157094Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:49.172118Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:49.172192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:49.174218Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1186, node 2 2025-04-07T14:23:49.227007Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:49.227028Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:49.227037Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:49.227141Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7487 TClient is connected to server localhost:7487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:49.610749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:52.011596Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576864990851240:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:52.011663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576864990851248:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:52.011720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:52.015198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:52.027335Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T14:23:52.027610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576864990851254:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:23:52.133405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:52.292121Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7490576864990851493:2319];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:52.292121Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7490576864990851519:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:52.292367Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7490576864990851493:2319];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:52.292368Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7490576864990851519:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:52.292711Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7490576864990851493:2319];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:52.292713Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7490576864990851519:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:52.2 ... 3133Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7490576877875758300:2929];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.044871Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7490576877875758006:2860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038090;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.045213Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7490576877875758006:2860];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038090;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.046433Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[2:7490576877875757983:2851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038096;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.046724Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[2:7490576877875757983:2851];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038096;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.047379Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7490576877875758284:2918];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038050;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.047642Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7490576877875758284:2918];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.047892Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7490576877875758181:2871];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038079;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.048145Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7490576877875758181:2871];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038079;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.050351Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7490576877875758251:2895];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038046;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.050705Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7490576877875758251:2895];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.051292Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7490576877875758249:2893];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038040;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.051610Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7490576877875758249:2893];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.053919Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7490576877875758393:2962];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038014;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.054218Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7490576877875758393:2962];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.054479Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;self_id=[2:7490576877875758215:2879];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038086;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.054780Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;self_id=[2:7490576877875758215:2879];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038086;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.057488Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7490576877875758210:2877];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038065;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.057852Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7490576877875758210:2877];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038065;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.059627Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7490576877875758205:2876];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038076;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.059962Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7490576877875758205:2876];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038076;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.060753Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[2:7490576877875758239:2888];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038055;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.061163Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[2:7490576877875758239:2888];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038055;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.061352Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;self_id=[2:7490576864990852287:2443];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037935;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.064638Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7490576877875758287:2921];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038006;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.066638Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7490576877875758261:2902];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038045;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.086613Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7490576877875758261:2902];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.087080Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7490576877875758291:2924];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.087470Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7490576877875758291:2924];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.087628Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7490576864990852226:2432];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.087775Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7490576864990852226:2432];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037899;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.087977Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7490576877875758317:2942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038036;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.088267Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7490576877875758317:2942];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.088385Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;self_id=[2:7490576864990852287:2443];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037935;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.088547Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7490576877875758287:2921];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038006;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.089001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7490576877875758280:2915];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.089299Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7490576877875758280:2915];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.477825Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[2:7490576864990852300:2449];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037924;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.481516Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[2:7490576864990852301:2450];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037927;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.481809Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[2:7490576864990852301:2450];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037927;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.482415Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[2:7490576864990852421:2473];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037928;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.482552Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[2:7490576864990852421:2473];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037928;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:23:59.482960Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[2:7490576864990852300:2449];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037924;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-07T14:24:04.144884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:24:04.144916Z node 2 :IMPORT WARN: Table profiles were not loaded >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-plan] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery >> TGRpcRateLimiterTest::UpdateResource [GOOD] >> TGRpcRateLimiterTest::DropResource |98.5%| [TA] $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 64329, MsgBus: 21655 2025-04-07T14:23:32.584082Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576778828685508:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:32.584309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001065/r3tmp/tmpds0dAG/pdisk_1.dat 2025-04-07T14:23:33.043459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:33.043587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:33.045356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64329, node 1 2025-04-07T14:23:33.098346Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:23:33.110686Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:23:33.116335Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:33.162698Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:33.162723Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:33.162730Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:33.162831Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21655 TClient is connected to server localhost:21655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:33.694443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:33.719022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:23:33.858938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:23:33.989707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:34.062079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:35.582561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576791713588974:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:35.582689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:35.793244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:35.821600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:35.858399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:35.879262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:35.910097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:35.940799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:36.026878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576796008556772:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:36.026946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:36.027322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576796008556777:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:36.029971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:36.036789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576796008556779:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-07T14:23:37.580969Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576778828685508:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:37.581026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:23:48.113280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:23:48.113323Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:49.527307Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490576851843132616:2601], TxId: 281474976710682, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=OGFiM2E2YmQtZTE3YjZkNmMtNzg0YjliZmMtMzZiMTk0Yzc=. CustomerSuppliedId : . TraceId : 01jr8a5nj84n87643wftrq3qhj. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1744035817000/18446744073709551615 shard 72075186224037888 with lowWatermark v1744035817300/18446744073709551615 (node# 1 state# Ready) } } 2025-04-07T14:23:49.527699Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490576851843132616:2601], TxId: 281474976710682, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=OGFiM2E2YmQtZTE3YjZkNmMtNzg0YjliZmMtMzZiMTk0Yzc=. CustomerSuppliedId : . TraceId : 01jr8a5nj84n87643wftrq3qhj. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1744035817000/18446744073709551615 shard 72075186224037888 with lowWatermark v1744035817300/18446744073709551615 (node# 1 state# Ready) } }. 2025-04-07T14:23:49.528168Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7490576851843132617:2602], TxId: 281474976710682, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=OGFiM2E2YmQtZTE3YjZkNmMtNzg0YjliZmMtMzZiMTk0Yzc=. TraceId : 01jr8a5nj84n87643wftrq3qhj. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7490576851843132612:2459], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:23:49.537357Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGFiM2E2YmQtZTE3YjZkNmMtNzg0YjliZmMtMzZiMTk0Yzc=, ActorId: [1:7490576796008557133:2459], ActorState: ExecuteState, TraceId: 01jr8a5nj84n87643wftrq3qhj, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 5278, MsgBus: 21340 2025-04-07T14:23:50.153502Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576854343844376:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:50.153570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001065/r3tmp/tmprr2e1r/pdisk_1.dat 2025-04-07T14:23:50.235964Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5278, node 2 2025-04-07T14:23:50.289648Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:50.289739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:50.305437Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:50.421316Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:50.421342Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:50.421350Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:50.421468Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21340 TClient is connected to server localhost:21340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:50.928080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:50.939863Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:23:50.956166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:51.031306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:51.287634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:51.373593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:54.032342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576871523715261:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:54.032418Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:54.071389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-07T14:23:54.141298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-07T14:23:54.185697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-07T14:23:54.242151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-07T14:23:54.269457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-07T14:23:54.302714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-07T14:23:54.342955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576871523715761:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:54.343046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:54.343053Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490576871523715766:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:54.346573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-07T14:23:54.362822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7490576871523715768:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-07T14:23:55.153805Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576854343844376:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:55.153862Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:24:05.225632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:24:05.225669Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:06.100757Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490576923063324186:2593], TxId: 281474976715680, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr8a65ph98vd51gmgqnvtw63. SessionId : ydb://session/3?node_id=2&id=OTRjZjY5NzgtNTYxY2M5ODItMjQ2Njk1Y2YtNzI1N2RkZjk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1744035835451/18446744073709551615 shard 72075186224037888 with lowWatermark v1744035835773/18446744073709551615 (node# 2 state# Ready) } } 2025-04-07T14:24:06.100822Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490576923063324186:2593], TxId: 281474976715680, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr8a65ph98vd51gmgqnvtw63. SessionId : ydb://session/3?node_id=2&id=OTRjZjY5NzgtNTYxY2M5ODItMjQ2Njk1Y2YtNzI1N2RkZjk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1744035835451/18446744073709551615 shard 72075186224037888 with lowWatermark v1744035835773/18446744073709551615 (node# 2 state# Ready) } }. 2025-04-07T14:24:06.101604Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7490576923063324187:2594], TxId: 281474976715680, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OTRjZjY5NzgtNTYxY2M5ODItMjQ2Njk1Y2YtNzI1N2RkZjk=. TraceId : 01jr8a65ph98vd51gmgqnvtw63. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7490576923063324182:2461], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-07T14:24:06.102752Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTRjZjY5NzgtNTYxY2M5ODItMjQ2Njk1Y2YtNzI1N2RkZjk=, ActorId: [2:7490576875818683378:2461], ActorState: ExecuteState, TraceId: 01jr8a65ph98vd51gmgqnvtw63, Create QueryResponse for error on request, msg: |98.5%| [TA] {RESULT} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> TSequence::SequencesIndex [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceFromSelect >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery >> DataShardReplication::SplitMergeChanges [GOOD] >> DataShardReplication::SplitMergeChangesReboots >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-plan] >> TIndexProcesorTests::TestSingleCreateQueueEvent [GOOD] >> TIndexProcesorTests::TestReindexSingleQueue >> DataShardStats::OneChannelStatsCorrect >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-plan] >> MediatorTimeCast::GranularTimecast [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-result_sets] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGeneration [GOOD] >> KeyValueGRPCService::SimpleRenameUnexistedKey >> TIndexProcesorTests::TestReindexSingleQueue [GOOD] >> TIndexProcesorTests::TestDeletedQueueNotReindexed >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-result_sets] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/time_cast/ut/unittest >> MediatorTimeCast::GranularTimecast [GOOD] Test command err: 2025-04-07T14:24:00.468196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:00.468629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:00.468947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f55/r3tmp/tmp6hPrUR/pdisk_1.dat 2025-04-07T14:24:00.901254Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvSubscribeReadStep{ CoordinatorId# 72057594046316545 } 2025-04-07T14:24:00.902095Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientConnected 2025-04-07T14:24:00.905676Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 1 LastAcquireStep: 0 NextAcquireStep: 0 2025-04-07T14:24:00.943464Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:00.997918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:00.998386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:01.010100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:01.127713Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 500 2025-04-07T14:24:01.264340Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 1000 2025-04-07T14:24:01.540971Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 2000 2025-04-07T14:24:01.770146Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 3000 2025-04-07T14:24:01.995033Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 4000 2025-04-07T14:24:02.241233Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 5000 2025-04-07T14:24:02.287568Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 7000 } 2025-04-07T14:24:02.520450Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 6000 2025-04-07T14:24:02.768512Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 7000 2025-04-07T14:24:02.774186Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientDestroyed 2025-04-07T14:24:02.805302Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientConnected 2025-04-07T14:24:02.805846Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 2 LastAcquireStep: 0 NextAcquireStep: 7000 2025-04-07T14:24:02.818071Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 12000 } 2025-04-07T14:24:02.966795Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 7500 2025-04-07T14:24:03.118576Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 8000 2025-04-07T14:24:03.375036Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 9000 2025-04-07T14:24:03.611084Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 10000 2025-04-07T14:24:03.865666Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 11000 2025-04-07T14:24:04.119661Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 12000 2025-04-07T14:24:07.367477Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:07.367862Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:07.367939Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f55/r3tmp/tmpa4UQAR/pdisk_1.dat 2025-04-07T14:24:07.623193Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365120 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-04-07T14:24:07.623874Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 1 Tablets: 72057594047365120 MinStep: 0 2025-04-07T14:24:07.623955Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-07T14:24:07.624009Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:615:2515] {TEvRegisterTabletResult TabletId# 72057594047365120 Entry# 0} 2025-04-07T14:24:07.624283Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-04-07T14:24:07.624414Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 0 2025-04-07T14:24:07.624512Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} 2025-04-07T14:24:07.624731Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365121 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-04-07T14:24:07.624798Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 2 AddTablets: 72057594047365121 2025-04-07T14:24:07.624836Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:618:2517] {TEvRegisterTabletResult TabletId# 72057594047365121 Entry# 0} 2025-04-07T14:24:07.624995Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 2 LatestStep: 0 2025-04-07T14:24:07.625139Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365123 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-04-07T14:24:07.625201Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 3 AddTablets: 72057594047365123 2025-04-07T14:24:07.625242Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:619:2518] {TEvRegisterTabletResult TabletId# 72057594047365123 Entry# 0} 2025-04-07T14:24:07.625401Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 0 2025-04-07T14:24:07.648384Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:07.700523Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:07.700649Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:07.712862Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:07.824022Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 500 2025-04-07T14:24:07.824162Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 500} 2025-04-07T14:24:07.963668Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 1000 2025-04-07T14:24:07.963777Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 1000} 2025-04-07T14:24:08.243686Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 2000 2025-04-07T14:24:08.243792Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2000} ... have step 0 and 2000 after sleep 2025-04-07T14:24:08.384501Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... tx1 planned at step 2500 ... tablet1 at 2499 ... tablet2 at 2499 ... tablet3 at 2500 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet 2025-04-07T14:24:08.528734Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 3000 ... tx2 planned at step 3000 ... tablet1 at 2499 ... tablet2 at 2499 ... tablet3 at 3000 ... unblocking tx1 at tablet2 ... unblocking NKikimr::TEvTxProcessing:: ... EvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 0 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-07T14:24:08.786813Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-07T14:24:08.797789Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3000 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-07T14:24:08.808444Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3000 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-07T14:24:08.820483Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 2999 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-07T14:24:08.844387Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 2999 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 3499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-07T14:24:08.866142Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 3499 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 UnfrozenTablets: 72057594047365121 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-07T14:24:08.887594Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 UnfrozenTablets: 72057594047365121 ... restarting mediator 2025-04-07T14:24:08.899223Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientDestroyed 2025-04-07T14:24:08.899456Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 8 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-04-07T14:24:08.899516Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-07T14:24:08.900448Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-04-07T14:24:08.900602Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 9 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-04-07T14:24:08.900646Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-07T14:24:08.901132Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-04-07T14:24:08.901217Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 10 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-04-07T14:24:08.901246Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-07T14:24:08.909267Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-07T14:24:08.909908Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... fully unblocking tx1 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-07T14:24:08.943685Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2500} ... tablet1 at 2500 ... tablet2 at 3500 ... tablet3 at 3500 ... fully unblocking tx2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-07T14:24:08.955120Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} ... tablet1 at 3000 ... tablet2 at 3500 ... tablet3 at 3500 ... fully unblocking tx3 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-07T14:24:08.966007Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3500} ... tablet1 at 3500 ... tablet2 at 3500 ... tablet3 at 3500 |98.5%| [TM] {RESULT} ydb/core/tx/time_cast/ut/unittest >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-plan] >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-plan] >> KqpTpch::Query01 [GOOD] >> KqpTpch::Query02 >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection >> TTxDataShardLocalKMeansScan::MainToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BuildToPosting >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reassign/unittest >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] Test command err: 2025-04-07T14:24:03.601150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:03.601672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:03.601996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d42/r3tmp/tmpaqsVCj/pdisk_1.dat 2025-04-07T14:24:04.137824Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2025-04-07T14:24:04.137921Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:04.146447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:04.147323Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{4, redo 987b alter 0b annex 0, ~{ 1, 33, 35, 42, 4 } -{ }, 0 gb} 2025-04-07T14:24:04.147430Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:04.148536Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:5:1:24576:515:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:04.148683Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:04.148818Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} commited cookie 1 for step 5 2025-04-07T14:24:04.155319Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-07T14:24:04.155424Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:04.156988Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{5, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-04-07T14:24:04.157112Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:04.157566Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:6:1:24576:129:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:04.157649Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:04.157727Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} commited cookie 1 for step 6 2025-04-07T14:24:04.157849Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-07T14:24:04.157896Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:04.158075Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{6, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-04-07T14:24:04.158124Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:04.158354Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:7:1:24576:130:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:04.158395Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:04.158457Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} commited cookie 1 for step 7 2025-04-07T14:24:04.158544Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-07T14:24:04.158582Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:04.159587Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{7, redo 120b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-07T14:24:04.159641Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:04.159885Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:8:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:04.159932Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:04.159974Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} commited cookie 1 for step 8 2025-04-07T14:24:04.161933Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2025-04-07T14:24:04.162005Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:04.162087Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:24:04.162163Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:04.175232Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:7} Tx{12, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} queued, type NKikimr::NBsController::TBlobStorageController::TTxRegisterNode 2025-04-07T14:24:04.175338Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:7} Tx{12, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:04.175730Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:7} Tx{12, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} hope 1 -> done Change{6, redo 79b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-07T14:24:04.175826Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:7} Tx{12, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:04.203247Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:04.203903Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} queued, type NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig 2025-04-07T14:24:04.204038Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:04.206848Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:24:04.206948Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:04.207176Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} queued, type NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig 2025-04-07T14:24:04.207267Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:04.208790Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:24:04.208877Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:04.221559Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037932033:2:7:0:0:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:04.221755Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} commited cookie 1 for step 7 2025-04-07T14:24:04.221890Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{13, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives 2025-04-07T14:24:04.221966Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{13, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:04.222474Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{13, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:24:04.222569Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{13, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:04.257962Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:24:04.258102Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} queued, type NKikimr::NTxAllocator::TTxAllocator::TTxReserve 2025-04-07T14:24:04.258166Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:04.258399Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} hope 1 -> done Change{3, redo 76b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-07T14:24:04.258467Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxA ... ndexSize 0, PartCount 0, LoadedSize 0, Spent{0.000s wa 0.000s cnt 0}, HistogramKeys 0 2025-04-07T14:24:08.745016Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [1:893:2727], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-07T14:24:08.745104Z node 1 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186224037888, for tableId 2 2025-04-07T14:24:08.860983Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{23, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-07T14:24:08.861065Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{23, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:08.861208Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{23, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{20, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-07T14:24:08.861253Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{23, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:08.861680Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:26:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:08.861739Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:08.861863Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} commited cookie 1 for step 26 2025-04-07T14:24:08.885766Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2025-04-07T14:24:08.885836Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:08.886002Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{12, redo 82b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-07T14:24:08.886076Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:08.896828Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037968897:2:10:0:0:94:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:08.896943Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} commited cookie 1 for step 10 2025-04-07T14:24:09.124912Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-07T14:24:09.124978Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:09.125132Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{21, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-07T14:24:09.125185Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:09.125597Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:27:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:09.125683Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:09.125763Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} commited cookie 1 for step 27 2025-04-07T14:24:09.397059Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-07T14:24:09.397141Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:09.397306Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{22, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-07T14:24:09.397358Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:09.397781Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:28:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:09.397858Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:09.397952Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} commited cookie 1 for step 28 2025-04-07T14:24:09.640196Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-07T14:24:09.640311Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:09.640465Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{23, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-07T14:24:09.640546Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:09.641017Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:29:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:09.641078Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:09.641178Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} commited cookie 1 for step 29 2025-04-07T14:24:09.884842Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-07T14:24:09.884932Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:09.885061Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{24, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-07T14:24:09.885118Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:09.885519Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:30:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:09.885617Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:09.885715Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} commited cookie 1 for step 30 2025-04-07T14:24:09.898414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:24:09.898473Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:10.044320Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-07T14:24:10.044399Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-07T14:24:10.044492Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:14} Tx{18, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxCleanupTransaction 2025-04-07T14:24:10.044565Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:14} Tx{18, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:10.044632Z node 1 :TX_DATASHARD TRACE: No cleanup at 72075186224037888 outdated step 15000 last cleanup 0 2025-04-07T14:24:10.044717Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:10.044762Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:24:10.044803Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:24:10.044839Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:24:10.044938Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:14} Tx{18, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} hope 1 -> done Change{10, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:24:10.045049Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:14} Tx{18, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:10.045252Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:24:10.150302Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-07T14:24:10.150388Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:24:10.150548Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{25, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-07T14:24:10.150590Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:24:10.151022Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:31:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:10.151112Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-07T14:24:10.151196Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:32} commited cookie 1 for step 31 --- Captured TEvCheckBlobstorageStatusResult event --- Waiting for TEvReassignTablet event... 2025-04-07T14:24:10.324410Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:14} CheckYellow current light yellow move channels: 0 1 2 --- Captured TEvReassignTablet event |98.5%| [TM] {RESULT} ydb/core/tx/datashard/ut_reassign/unittest >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-plan] >> Metering::MockedNetClassifierOnly [GOOD] >> Metering::MockedNetClassifierLabelTransformation >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> TIndexProcesorTests::TestDeletedQueueNotReindexed [GOOD] >> TIndexProcesorTests::TestManyMessages >> RangeOps::Intersection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_range_ops/unittest >> RangeOps::Intersection [GOOD] Test command err: first [(Uint64 : NULL, Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 5)] result [(Uint64 : 10) ; (Uint64 : 5)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 17)] result [(Uint64 : 15) ; (Uint64 : 17)] correct [(Uint64 : 15) ; (Uint64 : 17)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 20)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 30)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 20)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 30)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 25) ; (Uint64 : 30)] result [(Uint64 : 25) ; (Uint64 : 20)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result ((Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result ((Uint64 : 10) ; (Uint64 : 15)] correct ((Uint64 : 10) ; (Uint64 : 15)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)) result ((Uint64 : 10) ; (Uint64 : 15)) correct ((Uint64 : 10) ; (Uint64 : 15)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; (Uint64 : 20)) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 20)) correct [(Uint64 : 1) ; (Uint64 : 20)) first [(Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 10) ; (Uint64 : 20)) correct [(Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; (Uint64 : 10)] second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 10)] correct [(Uint64 : 1) ; (Uint64 : 10)] first [(Uint64 : NULL) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 1) ; (Uint64 : 10)) correct [(Uint64 : 1) ; (Uint64 : 10)) |98.5%| [TM] {RESULT} ydb/core/tx/datashard/ut_range_ops/unittest >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] >> TGRpcRateLimiterTest::DropResource [GOOD] >> TGRpcRateLimiterTest::DescribeResource >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] >> test_query_cache.py::TestQueryCache::test [GOOD] >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected [GOOD] >> KesusProxyTest::ReconnectsWithKesusWhenPipeDestroyed [GOOD] >> KesusProxyTest::RejectsNotCanonizedResourceName [GOOD] >> KesusProxyTest::SubscribesOnResource >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection >> KesusProxyTest::SubscribesOnResource [GOOD] >> KesusProxyTest::SubscribesOnResourcesWhenReconnected [GOOD] >> KesusProxyTest::ProxyRequestDuringDisconnection [GOOD] >> KesusProxyTest::DeactivateSessionWhenResourceClosed [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnSuccess [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnFailure [GOOD] >> KesusProxyTest::AnswersWithSessionWhenResourceIsAlreadyKnown >> KesusProxyTest::AnswersWithSessionWhenResourceIsAlreadyKnown [GOOD] >> KesusProxyTest::SendsBrokenUpdateWhenKesusPassesError [GOOD] >> KesusProxyTest::AllocatesResourceWithKesus [GOOD] >> KesusProxyTest::DisconnectsDuringActiveSession [GOOD] >> KesusProxyTest::AllocatesResourceOffline [GOOD] >> KesusProxyTest::ConnectsDuringOfflineAllocation [GOOD] >> KesusResourceAllocationStatisticsTest::ReturnsDefaultValues [GOOD] >> KesusResourceAllocationStatisticsTest::CalculatesAverage [GOOD] >> KesusResourceAllocationStatisticsTest::TakesBestStat [GOOD] >> TQuoterServiceTest::StaticRateLimiter >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_tx_coordinator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection >> TSequence::CreateTableWithDefaultFromSequenceFromSelect [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceBadRequest >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-result_sets] >> KeyValueGRPCService::SimpleRenameUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKey >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] >> Graph::LocalBackendFullCycle [GOOD] >> Graph::MemoryBordersOnGet >> KqpSinkTx::OlapInteractive [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-plan] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection >> DataShardReplication::SplitMergeChangesReboots [GOOD] >> DataShardReplication::ReplicatedTable >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-result_sets] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel_unstable/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] Test command err: 2025-04-07T14:23:56.869124Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-04-07T14:23:56.869182Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-04-07T14:23:56.869273Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-07T14:23:56.869301Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-04-07T14:23:56.869336Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-04-07T14:23:56.869405Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-04-07T14:23:56.871220Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-04-07T14:23:56.885987Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 18 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: ... h: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34220026 2025-04-07T14:24:12.922670Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34220026 2025-04-07T14:24:12.922794Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34220026 2025-04-07T14:24:12.922946Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34220026 2025-04-07T14:24:12.923014Z node 1 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-07T14:24:12.934862Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-07T14:24:12.934950Z node 1 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-07T14:24:12.935065Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2025-04-07T14:24:12.935106Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2025-04-07T14:24:12.935143Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2025-04-07T14:24:12.935173Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2025-04-07T14:24:12.935196Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2025-04-07T14:24:12.935275Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2025-04-07T14:24:12.935303Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2025-04-07T14:24:12.935336Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2025-04-07T14:24:12.935642Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34260026 2025-04-07T14:24:12.936406Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34260026 2025-04-07T14:24:12.936627Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34260026 2025-04-07T14:24:12.936788Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34260026 2025-04-07T14:24:12.936908Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34260026 2025-04-07T14:24:12.937058Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34260026 2025-04-07T14:24:12.937238Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34260026 2025-04-07T14:24:12.937375Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34260026 2025-04-07T14:24:12.937443Z node 1 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-07T14:24:12.937836Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 5:22, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-07T14:24:12.937898Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 3:13, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-07T14:24:12.937931Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 5:23, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-07T14:24:12.937979Z node 1 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-07T14:24:12.938197Z node 1 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { }, cookie# 126 2025-04-07T14:24:12.938222Z node 1 :CMS ERROR: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-04-07T14:24:12.948549Z node 1 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 1 2025-04-07T14:24:12.948614Z node 1 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-07T14:24:12.948885Z node 1 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 127 2025-04-07T14:24:12.948964Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 3:13 2025-04-07T14:24:12.949017Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 5:22 2025-04-07T14:24:12.949038Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 5:23 >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-result_sets] >> TIndexProcesorTests::TestManyMessages [GOOD] >> TIndexProcesorTests::TestOver1000Queues |98.5%| [TM] {RESULT} ydb/core/cms/ut_sentinel_unstable/unittest >> Graph::MemoryBordersOnGet [GOOD] >> Graph::LocalBordersOnGet >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-result_sets] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 63478, MsgBus: 27379 2025-04-07T14:23:38.709378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576801878361539:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:38.714673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f8f/r3tmp/tmp68WSt6/pdisk_1.dat 2025-04-07T14:23:39.040225Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63478, node 1 2025-04-07T14:23:39.094801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:39.094896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:39.097002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:39.151605Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:39.151624Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:39.151633Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:39.151743Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27379 TClient is connected to server localhost:27379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:39.734391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:39.756923Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:23:41.771353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576814763264045:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:41.771374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576814763264057:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:41.771500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:41.774947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:41.783522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576814763264059:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:23:42.133785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:42.227865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490576819058231585:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:42.228127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490576819058231585:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:42.228422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490576819058231585:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:42.228544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490576819058231585:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:23:42.228630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576819058231598:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:42.228913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576819058231598:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:42.229101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576819058231598:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:42.229196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576819058231598:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:23:42.229376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576819058231598:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:23:42.229383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490576819058231585:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:23:42.229590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576819058231598:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:23:42.229615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490576819058231585:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:23:42.229735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490576819058231585:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:23:42.229818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576819058231598:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:23:42.229838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490576819058231585:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:23:42.229954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490576819058231585:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:23:42.229965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576819058231598:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:23:42.230058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490576819058231585:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:23:42.230061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576819058231598:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:23:42.230175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576819058231598:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:23:42.230188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490576819058231585:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:23:42.230286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7490576819058231585:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:23:42.230288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576819058231598:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:23:42.230401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7490576819058231598:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:23:42.263250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576819058231586:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:42.263319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576819058231586:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:42.263536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7490576819058231586:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:42.263627Z node ... 1885Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7490576912944481634:2859];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038070;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.982072Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7490576912944481780:2897];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.982599Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7490576912944481780:2897];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.982774Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7490576912944481757:2889];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.982904Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7490576912944481634:2859];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.986035Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7490576912944481932:2942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.986298Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7490576908649514194:2800];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038092;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.989115Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7490576912944481932:2942];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.989332Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7490576908649514194:2800];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038092;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.992350Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7490576912944481610:2842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038049;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.992542Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7490576912944481610:2842];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.992757Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7490576912944481651:2876];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.993693Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7490576912944481651:2876];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.994477Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7490576912944481641:2866];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038088;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.995195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[2:7490576912944481933:2943];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038000;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.995735Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7490576912944481641:2866];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:07.995933Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[2:7490576912944481933:2943];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038000;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.001141Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7490576908649514206:2803];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038066;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.002229Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7490576908649514206:2803];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.003848Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7490576912944481532:2824];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038079;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.004191Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7490576912944481524:2820];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.004899Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7490576912944481532:2824];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038079;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.006494Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7490576912944481524:2820];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.007308Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7490576912944481554:2835];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.007532Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7490576912944481554:2835];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.008177Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7490576912944481875:2925];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038040;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.008938Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7490576912944481875:2925];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.015062Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7490576912944481657:2880];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.016236Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7490576912944481657:2880];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.020115Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7490576912944481801:2904];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.021174Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7490576912944481801:2904];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.021917Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7490576912944481629:2854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038034;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.023030Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7490576912944481629:2854];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038034;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.026171Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7490576912944481850:2921];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.027294Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[2:7490576895764608378:2403];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037923;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.027641Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7490576912944481850:2921];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.027804Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[2:7490576895764608378:2403];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037923;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.033230Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7490576912944481876:2926];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.033462Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7490576912944481908:2928];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.035616Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7490576912944481876:2926];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.035818Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7490576912944481908:2928];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.037539Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[2:7490576895764608365:2393];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037937;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:08.041950Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[2:7490576895764608365:2393];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037937;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-07T14:24:10.933149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:24:10.933180Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TTxDataShardLocalKMeansScan::BuildToPosting [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-plan] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] >> TQuoterServiceTest::StaticRateLimiter [GOOD] >> TQuoterServiceTest::StaticMultipleAndResources >> Metering::MockedNetClassifierLabelTransformation [GOOD] >> SHA256Test::SHA256Test [GOOD] >> Graph::LocalBordersOnGet [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings >> KqpTpch::Query02 [GOOD] >> KqpTpch::Query03 >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-result_sets] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-result_sets] >> GraphShard::NormalizeAndDownsample1 [GOOD] >> GraphShard::NormalizeAndDownsample2 [GOOD] >> GraphShard::NormalizeAndDownsample3 [GOOD] >> GraphShard::NormalizeAndDownsample4 [GOOD] >> GraphShard::NormalizeAndDownsample5 [GOOD] >> GraphShard::NormalizeAndDownsample6 [GOOD] >> GraphShard::CheckHistogramToPercentileConversions [GOOD] >> GraphShard::CreateGraphShard ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/ut/unittest >> Graph::LocalBordersOnGet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:23:56.278786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:23:56.278876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:56.278906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:23:56.278929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:23:56.279629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:23:56.279658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:23:56.279714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:23:56.281347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:23:56.347412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:23:56.347486Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:56.358327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:23:56.358447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:23:56.358617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:23:56.370747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:23:56.370939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:23:56.374735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:56.375215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:23:56.384524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:56.402641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:56.402744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:56.402922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:23:56.402980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:56.403104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:23:56.403308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:23:56.413476Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:23:56.548258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:23:56.549660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:56.550567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:23:56.553253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:23:56.553348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:56.561792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:56.562566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:23:56.562956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:56.563020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:23:56.563149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:23:56.563209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:23:56.565528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:56.565589Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:23:56.565629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:23:56.568795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:56.568864Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:56.568915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:56.568974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:23:56.573146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:23:56.575156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:23:56.576378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:23:56.577999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:23:56.578150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:23:56.578208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:56.578572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:23:56.578630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:23:56.578831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:56.578935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:23:56.581319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:23:56.581382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:23:56.581566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:23:56.581610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:23:56.581959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:23:56.582009Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:23:56.582123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:23:56.582214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:56.582271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:23:56.582315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:23:56.582353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:23:56.582396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:23:56.582473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:23:56.582513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:23:56.582546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:23:56.584645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:56.584781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:23:56.584868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:23:56.584927Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:23:56.584963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:23:56.585094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... cs 2025-04-07T14:24:17.764689Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 101 } Time: 101 2025-04-07T14:24:17.764712Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.764738Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.764762Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.764807Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 102 } Time: 102 2025-04-07T14:24:17.764821Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.764837Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.764856Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.764906Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 103 } Time: 103 2025-04-07T14:24:17.764926Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.764953Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.764982Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.765036Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 104 } Time: 104 2025-04-07T14:24:17.765056Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.765079Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.765105Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.765176Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 105 } Time: 105 2025-04-07T14:24:17.765195Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.765218Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.765245Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.765320Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 106 } Time: 106 2025-04-07T14:24:17.765341Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.765363Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.765389Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.765469Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 107 } Time: 107 2025-04-07T14:24:17.765491Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.765518Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.765545Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.765623Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 108 } Time: 108 2025-04-07T14:24:17.765645Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.765670Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.765696Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.765746Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 109 } Time: 109 2025-04-07T14:24:17.765767Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.765796Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.765822Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.765887Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 110 } Time: 110 2025-04-07T14:24:17.765905Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.765928Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.765951Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.766000Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 111 } Time: 111 2025-04-07T14:24:17.766020Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.766044Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.766068Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.766136Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 112 } Time: 112 2025-04-07T14:24:17.766154Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.766173Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.766206Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.766278Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 113 } Time: 113 2025-04-07T14:24:17.766301Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.766325Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.766357Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.766423Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 114 } Time: 114 2025-04-07T14:24:17.766442Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.766467Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.766495Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.766579Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 115 } Time: 115 2025-04-07T14:24:17.766601Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.766625Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.766654Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.766736Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 116 } Time: 116 2025-04-07T14:24:17.766757Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.766782Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.766814Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.766879Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 117 } Time: 117 2025-04-07T14:24:17.766902Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.766927Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.766956Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.767047Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 118 } Time: 118 2025-04-07T14:24:17.767071Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.767098Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.767129Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.767210Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 119 } Time: 119 2025-04-07T14:24:17.768297Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-07T14:24:17.768342Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-07T14:24:17.768388Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-07T14:24:17.768488Z node 6 :GRAPH TRACE: SHARD Handle TEvGraph::TEvGetMetrics from [6:561:2488] 2025-04-07T14:24:17.768555Z node 6 :GRAPH DEBUG: SHARD TTxGetMetrics::Execute 2025-04-07T14:24:17.768606Z node 6 :GRAPH DEBUG: DB Querying from 0 to 119 2025-04-07T14:24:17.780876Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.780958Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.780982Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781000Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781016Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781037Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781060Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781084Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781104Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781120Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781135Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781150Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781166Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781182Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781197Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781213Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781230Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781245Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781260Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781276Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781291Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781326Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781349Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781371Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781393Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781414Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781440Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781462Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781482Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781502Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781525Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781547Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781568Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781593Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781614Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781635Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781655Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781678Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781699Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781720Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781742Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781764Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781788Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781811Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781832Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781854Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781875Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781903Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781927Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781949Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781971Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.781995Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.782017Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.782038Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.782058Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.782076Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.782097Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.782116Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.782135Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.782158Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-07T14:24:17.782192Z node 6 :GRAPH DEBUG: SHARD TTxGetMetric::Complete 2025-04-07T14:24:17.782242Z node 6 :GRAPH TRACE: SHARD TxGetMetrics returned 60 points for request 3 2025-04-07T14:24:17.782399Z node 6 :GRAPH TRACE: SVC TEvMetricsResult 3 2025-04-07T14:24:17.782441Z node 6 :GRAPH TRACE: SVC TEvMetricsResult found request 3 resending to [6:562:2489] |98.6%| [TS] {RESULT} ydb/core/graph/ut/unittest >> TGRpcRateLimiterTest::DescribeResource [GOOD] >> TGRpcRateLimiterTest::ListResources >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding ------- [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/ut/unittest >> SHA256Test::SHA256Test [GOOD] Test command err: 2025-04-07T14:23:55.228609Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576875900347646:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:55.228680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ed1/r3tmp/tmpu68rRV/pdisk_1.dat 2025-04-07T14:23:55.598929Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:55.681333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:55.681456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:55.692476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:55.705078Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:55.705107Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:55.705129Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:55.705522Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:24:00.228765Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576875900347646:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:00.228843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:24:01.163349Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490576902852199986:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:01.163423Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ed1/r3tmp/tmpUFEwFi/pdisk_1.dat 2025-04-07T14:24:01.269072Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:01.290648Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:01.290736Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:01.292324Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:01.301261Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:24:01.301280Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:24:01.301286Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:24:01.301398Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:24:06.167644Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7490576902852199986:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:06.167729Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:24:12.018314Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490576951495501637:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:12.018400Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ed1/r3tmp/tmpCR4lZm/pdisk_1.dat 2025-04-07T14:24:12.109749Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:12.126545Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:12.126649Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:12.128163Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:12.140979Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:24:12.140999Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:24:12.141005Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:24:12.141096Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:24:17.017941Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490576951495501637:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:17.018015Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |98.6%| [TS] {RESULT} ydb/core/ymq/actor/ut/unittest >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-result_sets] >> KeyValueGRPCService::SimpleConcatUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKey >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery >> GraphShard::CreateGraphShard [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/py3test >> test_query_cache.py::TestQueryCache::test [GOOD] |98.6%| [TM] {RESULT} ydb/tests/functional/query_cache/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries >> TQuoterServiceTest::StaticMultipleAndResources [GOOD] >> TQuoterServiceTest::StaticDeadlines >> KqpTpch::Query03 [GOOD] >> KqpTpch::Query04 >> DataShardBackgroundCompaction::ShouldCompact ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/shard/ut/unittest >> GraphShard::CreateGraphShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] 2025-04-07T14:24:19.856863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-07T14:24:19.857007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:24:19.857054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-07T14:24:19.857093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-07T14:24:19.860779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-07T14:24:19.860896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-07T14:24:19.861033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-07T14:24:19.862374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:19.932745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:24:19.932811Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:19.941882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:19.941994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-07T14:24:19.942120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-07T14:24:19.955291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-07T14:24:19.955446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-07T14:24:19.958069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-07T14:24:19.958736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-07T14:24:19.966084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:24:19.982934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:24:19.983034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:24:19.983176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:24:19.983234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:24:19.983384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:24:19.983591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.001637Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-07T14:24:20.133746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-07T14:24:20.134827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.135740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-07T14:24:20.140815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-07T14:24:20.140940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.153624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-07T14:24:20.160883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-07T14:24:20.161280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.161388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-07T14:24:20.161474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-07T14:24:20.161532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-07T14:24:20.164168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.164233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-07T14:24:20.164274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-07T14:24:20.166329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.166383Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.166425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:24:20.166494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-07T14:24:20.171103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:24:20.173569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-07T14:24:20.174750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-07T14:24:20.176471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:24:20.176638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:24:20.176726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:24:20.177032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-07T14:24:20.177085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-07T14:24:20.177258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:24:20.177333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-07T14:24:20.181110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:24:20.181163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-07T14:24:20.181430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:24:20.181486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-07T14:24:20.181739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.181788Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-07T14:24:20.181881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-07T14:24:20.181914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:24:20.181963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-07T14:24:20.182004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-07T14:24:20.182039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-07T14:24:20.182068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-07T14:24:20.182157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-07T14:24:20.182214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-07T14:24:20.182248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-07T14:24:20.184133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:24:20.184264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-07T14:24:20.184302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-07T14:24:20.184336Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-07T14:24:20.184396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-07T14:24:20.184501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notif ... 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-07T14:24:20.438042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TenantSchemeShard: 72075186233409547 Status: StatusSuccess 2025-04-07T14:24:20.438092Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#102:0 HandleReply TEvInitTenantSchemeShardResult operationId: 102:0 at schemeshard: 72057594046678944 2025-04-07T14:24:20.438150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#102:0 Got OK TEvInitTenantSchemeShardResult from schemeshard tablet: 72075186233409547 shardIdx: 72057594046678944:2 at schemeshard: 72057594046678944 2025-04-07T14:24:20.441791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.441862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-07T14:24:20.442147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-04-07T14:24:20.442253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409549 2025-04-07T14:24:20.442290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#102:0 HandleReply TEvConfigureStatus operationId:102:0 at schemeshard:72057594046678944 2025-04-07T14:24:20.442322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-04-07T14:24:20.442358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-04-07T14:24:20.443218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.445276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.445399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.445458Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.445497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2025-04-07T14:24:20.445545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-07T14:24:20.445684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:24:20.448367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-07T14:24:20.448480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-07T14:24:20.448853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-07T14:24:20.448983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 119 RawX2: 4294969441 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-07T14:24:20.449027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-07T14:24:20.449290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-07T14:24:20.449343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-07T14:24:20.449442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-07T14:24:20.449531Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:373:2344], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 72075186233409550, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-07T14:24:20.453694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-07T14:24:20.453756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-07T14:24:20.453904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-07T14:24:20.453951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-07T14:24:20.454241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.454286Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2025-04-07T14:24:20.454323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-04-07T14:24:20.454914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:24:20.455006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-07T14:24:20.455042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-07T14:24:20.455081Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-07T14:24:20.455129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-07T14:24:20.455208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-07T14:24:20.458593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-07T14:24:20.458655Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-07T14:24:20.458757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-07T14:24:20.458801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:24:20.458845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-07T14:24:20.458891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-07T14:24:20.458957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-07T14:24:20.458995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-07T14:24:20.459185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-07T14:24:20.459774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-07T14:24:20.461438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-07T14:24:20.461484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-07T14:24:20.461834Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-07T14:24:20.461929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-07T14:24:20.461964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:549:2479] TestWaitNotification: OK eventTxId 102 2025-04-07T14:24:20.463207Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/db1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-07T14:24:20.463443Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/db1" took 245us result status StatusSuccess 2025-04-07T14:24:20.465263Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/db1" PathDescription { Self { Name: "db1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 SchemeShard: 72075186233409547 Hive: 72075186233409546 GraphShard: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |98.6%| [TS] {RESULT} ydb/core/graph/shard/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] Test command err: 2025-04-07T14:23:58.511267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:23:58.511674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:23:58.511925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009a6/r3tmp/tmpnLRnRy/pdisk_1.dat 2025-04-07T14:23:58.979338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:23:59.029586Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:59.087260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:59.087471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:59.098768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:59.241499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:23:59.678955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:734:2608], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:59.679056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:59.679137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:59.686021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:59.883295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:748:2616], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:24:00.764637Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr8a5znw7vqbj6mmsnnc9c90, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc1MDc0MGItYTViODA4ZDEtZDIzNzM1MWQtNTgwMWYzNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:00.843711Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr8a5znw7vqbj6mmsnnc9c90, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc1MDc0MGItYTViODA4ZDEtZDIzNzM1MWQtNTgwMWYzNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:01.219136Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr8a60vdfbhkear98typ5crj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUzN2RiZS03NDYzNTk2OC03Yzk3ZWY3Yi01MGY1ZTczYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:01.285835Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr8a60vdfbhkear98typ5crj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUzN2RiZS03NDYzNTk2OC03Yzk3ZWY3Yi01MGY1ZTczYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:01.299011Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr8a60vdfbhkear98typ5crj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUzN2RiZS03NDYzNTk2OC03Yzk3ZWY3Yi01MGY1ZTczYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:01.307430Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr8a60vdfbhkear98typ5crj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUzN2RiZS03NDYzNTk2OC03Yzk3ZWY3Yi01MGY1ZTczYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:01.428170Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr8a619cetm00hpxz286x1w8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjUxNDUyZjQtMTRiMjVkNzUtMWVkZmYwODctOTkxZjM4M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:01.455446Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr8a619cetm00hpxz286x1w8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjUxNDUyZjQtMTRiMjVkNzUtMWVkZmYwODctOTkxZjM4M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:01.631980Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr8a61dz5ebep5xtx83vvkqy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YThhMDdjNjUtNzNkODM4ZjAtNDk5MTlmMDYtN2ZmMDdhZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-04-07T14:24:04.924100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:04.924472Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:04.924545Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009a6/r3tmp/tmpOZ05xa/pdisk_1.dat 2025-04-07T14:24:05.179609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:05.205240Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:05.259367Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:05.259487Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:05.270888Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:05.391637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:05.777025Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:783:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:05.777141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:792:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:05.777230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:05.781925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:24:05.999497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:797:2654], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:24:06.623907Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr8a65mf5j1tq5v21t5awjhp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZmODZmNjEtMjNhNzkyODQtMWFlMmE5MGYtNWJhMDUxNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:06.655988Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr8a65mf5j1tq5v21t5awjhp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZmODZmNjEtMjNhNzkyODQtMWFlMmE5MGYtNWJhMDUxNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:06.673143Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr8a65mf5j1tq5v21t5awjhp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZmODZmNjEtMjNhNzkyODQtMWFlMmE5MGYtNWJhMDUxNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:06.682458Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr8a65mf5j1tq5v21t5awjhp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZmODZmNjEtMjNhNzkyODQtMWFlMmE5MGYtNWJhMDUxNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:07.009756Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr8a66hm3x1tfpxxz4cpd7qh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmJmY2VjYi1lOGI3MTlhMS1hNzgyZTMwNy03ODk2Y2VkNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:07.049537Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr8a66hm3x1tfpxxz4cpd7qh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmJmY2VjYi1lOGI3MTlhMS1hNzgyZTMwNy03ODk2Y2VkNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:07.057165Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr8a66hm3x1tfpxxz4cpd7qh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmJmY2VjYi1lOGI3MTlhMS1hNzgyZTMwNy03ODk2Y2VkNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:07.059783 ... { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-04-07T14:24:07.788054Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr8a67fydnnyvysypy02pvf1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGMxYWM3YTctM2UzN2YzMDgtZTA5M2JjOGYtYmEyNjg2YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-04-07T14:24:11.408148Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:11.408604Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:11.408774Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009a6/r3tmp/tmpnWECyU/pdisk_1.dat 2025-04-07T14:24:11.724749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:11.758591Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:11.805978Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:11.806133Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:11.817671Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:11.936549Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:12.269983Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:734:2608], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:12.270105Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:744:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:12.270184Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:12.275760Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:24:12.486414Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:748:2616], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:24:12.654535Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr8a6bzcc3sdd059cqn9d9hz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWI4MDUxYjAtYWIxYTcxN2QtNDg3Mjc5MmEtNzllYzY4NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:12.686383Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr8a6bzcc3sdd059cqn9d9hz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWI4MDUxYjAtYWIxYTcxN2QtNDg3Mjc5MmEtNzllYzY4NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:12.837337Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr8a6cd4fp8qpeyjps63sb7q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzVhN2UzZmMtM2UxNmIwYTItZDcyYTZiZmYtY2UxM2I0YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } } 2025-04-07T14:24:13.046987Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr8a6che89wspg4yar8x9t84, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjNlNGQ5YTMtYTU1YjVlOTYtOTBhNjg0OTQtNTdlZjM5ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:13.072927Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr8a6che89wspg4yar8x9t84, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjNlNGQ5YTMtYTU1YjVlOTYtOTBhNjg0OTQtNTdlZjM5ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:13.368514Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr8a6cs71mwpth4dbk8nh5cc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGEwMTM1YjktMWZkYjJhZjItMjFlZTAwMmMtMTFhNTJlNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } } 2025-04-07T14:24:13.546438Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr8a6d21cj5m81mzw28nt55s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTc3M2QxMmItYTY1MjVmNTctM2JkNzdmMmItYTFjN2E2MGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:13.565340Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr8a6d21cj5m81mzw28nt55s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTc3M2QxMmItYTY1MjVmNTctM2JkNzdmMmItYTFjN2E2MGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:13.749110Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr8a6d8jf9dvn3fdf4x42far, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjkxZWNhYWItN2RkYWUzNjItN2U5OTU2NjctODYyZjVhNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } }, { items { int64_value: 3 } items { uint32_value: 303 } } 2025-04-07T14:24:18.402361Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:24:18.402493Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:18.402816Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009a6/r3tmp/tmptLfKAy/pdisk_1.dat 2025-04-07T14:24:18.702856Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:18.733041Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:18.793144Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:18.793261Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:18.804968Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:18.924449Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:19.277206Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:734:2608], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:19.277311Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:744:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:19.277382Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:19.283021Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:24:19.485387Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:748:2616], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:24:19.577537Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:832:2668], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017 2025-04-07T14:24:19.579454Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZWMzZDc1NmMtNDMyMTFjYzktZDYyZDAwYWMtYTUxOTBlZDM=, ActorId: [4:731:2605], ActorState: ExecuteState, TraceId: 01jr8a6jtbde71fmjbz2szsr2m, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-07T14:24:19.615305Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:854:2684], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017 2025-04-07T14:24:19.617233Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjg5OGFlNjUtZDdiOWViNGItMTk1ODkzODQtNTEzOWZlMzM=, ActorId: [4:846:2676], ActorState: ExecuteState, TraceId: 01jr8a6k3z5gjwc4p844ba7hq6, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: |98.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_sequence/unittest >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery >> test_dynumber.py::test_dynumber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] Test command err: Trying to start YDB, gRPC: 1409, MsgBus: 27234 2025-04-07T14:23:41.528156Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576816500564851:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:41.528889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f89/r3tmp/tmpVkufUU/pdisk_1.dat 2025-04-07T14:23:41.848161Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1409, node 1 2025-04-07T14:23:41.932382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:41.932550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:41.949000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:42.013085Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:42.013107Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:42.013113Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:42.013200Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27234 TClient is connected to server localhost:27234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:42.483513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:23:42.509619Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:23:44.328890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576829385467367:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:44.329062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:44.329390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576829385467389:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:23:44.333310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:23:44.343142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576829385467391:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:23:44.651414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-07T14:23:44.754701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576829385467601:2319];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:44.754886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576829385467601:2319];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:44.755128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576829385467601:2319];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:44.755245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576829385467601:2319];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:23:44.755336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576829385467601:2319];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:23:44.755428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576829385467601:2319];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:23:44.755543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576829385467601:2319];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:23:44.755660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576829385467601:2319];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:23:44.755779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576829385467601:2319];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:23:44.755913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576829385467601:2319];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:23:44.756006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576829385467601:2319];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:23:44.756104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7490576829385467601:2319];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:23:44.760708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576829385467594:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:44.760762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576829385467594:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:44.760916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576829385467594:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:44.761004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576829385467594:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:23:44.761098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576829385467594:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:23:44.761177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576829385467594:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:23:44.761276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576829385467594:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:23:44.761364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576829385467594:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:23:44.761451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576829385467594:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:23:44.761544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576829385467594:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:23:44.761630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576829385467594:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:23:44.761706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7490576829385467594:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:23:44.791041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576829385467600:2318];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:44.791096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576829385467600:2318];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:44.791268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7490576829385467600:2318];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:44.791350Z node 1 ... _id=[2:7490576900826133490:2427];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037947;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.023893Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037954;self_id=[2:7490576900826133135:2413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037954;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.023926Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[2:7490576900826133020:2379];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037914;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.024069Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037954;self_id=[2:7490576900826133135:2413];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037954;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.024215Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[2:7490576905121100850:2442];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037924;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.024276Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;self_id=[2:7490576900826133011:2374];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037983;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.024409Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[2:7490576905121100850:2442];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037924;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.024438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;self_id=[2:7490576900826133011:2374];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037983;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.024591Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[2:7490576900826133469:2423];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037934;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.024620Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[2:7490576900826133060:2382];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037981;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.024771Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7490576900826132207:2317];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.024963Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;self_id=[2:7490576900826133024:2380];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037992;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.025194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037948;self_id=[2:7490576900826133301:2422];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037948;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.025358Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[2:7490576905121101163:2470];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037903;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.025505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7490576900826133007:2370];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037899;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.025582Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[2:7490576900826133060:2382];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037981;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.025682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;self_id=[2:7490576900826133005:2368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037994;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.025719Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[2:7490576905121100914:2446];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037926;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.025846Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;self_id=[2:7490576900826133005:2368];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037994;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.025866Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[2:7490576900826133025:2381];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037978;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.025977Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;self_id=[2:7490576900826133010:2373];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037979;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.026093Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7490576900826132210:2318];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.026159Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037973;self_id=[2:7490576900826133074:2395];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037973;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.026314Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037953;self_id=[2:7490576900826133137:2415];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037953;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.026496Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7490576900826132313:2325];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.026652Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7490576900826132219:2321];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.026777Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;self_id=[2:7490576900826133471:2424];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037942;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.027017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;self_id=[2:7490576900826133251:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037952;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.027168Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;self_id=[2:7490576900826133251:2417];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037952;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.027282Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;self_id=[2:7490576900826133024:2380];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037992;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.027289Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[2:7490576900826133469:2423];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037934;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.027446Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037948;self_id=[2:7490576900826133301:2422];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037948;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.027504Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7490576900826133078:2399];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037900;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.027651Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037976;self_id=[2:7490576900826133015:2376];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037976;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.027862Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7490576905121101153:2468];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037909;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.028012Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[2:7490576905121100826:2440];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037933;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.028226Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[2:7490576900826133480:2426];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037918;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.028442Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:7490576905121101086:2457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037908;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.028628Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037915;self_id=[2:7490576905121101095:2464];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037915;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-04-07T14:24:14.029149Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037915;self_id=[2:7490576905121101095:2464];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037915;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.029389Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[2:7490576900826133128:2407];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037969;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.029569Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[2:7490576900826133480:2426];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037918;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.029604Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;self_id=[2:7490576900826133496:2430];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037939;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2025-04-07T14:24:14.029704Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:7490576905121101086:2457];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037908;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-plan] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution >> DataShardReplication::ReplicatedTable [GOOD] >> DataShardReplication::ApplyChangesToReplicatedTable >> TTxDataShardLocalKMeansScan::BuildToBuild [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-result_sets] >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] >> test_http_api.py::TestHttpApi::test_simple_analytics_query [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-plan] >> test_http_api.py::TestHttpApi::test_empty_query [GOOD] >> test_http_api.py::TestHttpApi::test_warning >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery |98.6%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-plan] >> KqpTpch::Query04 [GOOD] >> test_dynumber.py::test_dynumber [GOOD] >> KqpTpch::Query05 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_local_kmeans/unittest >> TTxDataShardLocalKMeansScan::BuildToBuild [GOOD] Test command err: 2025-04-07T14:23:57.661248Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576885417760657:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:57.661331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f4a/r3tmp/tmpFdgYjk/pdisk_1.dat 2025-04-07T14:23:58.045148Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:58.065288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:58.065377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:58.077916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:23:58.082872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:23:58.121339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:23:58.164047Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7490576889712728512:2271] 2025-04-07T14:23:58.164339Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:23:58.178199Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:23:58.178313Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:23:58.180336Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:23:58.181313Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:23:58.181373Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:23:58.182172Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:23:58.214339Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:23:58.215353Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:23:58.215432Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7490576889712728528:2272] 2025-04-07T14:23:58.215471Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:23:58.215481Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:23:58.215502Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:23:58.216180Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490576889712728509:2289], serverId# [1:7490576889712728527:2297], sessionId# [0:0:0] 2025-04-07T14:23:58.216298Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:23:58.216392Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:23:58.216415Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:23:58.216434Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:23:58.216455Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:23:58.216470Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:23:58.216486Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:23:58.217900Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:23:58.218018Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-07T14:23:58.219735Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:23:58.219837Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:23:58.219914Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:23:58.223175Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490576889712728542:2305], serverId# [1:7490576889712728543:2306], sessionId# [0:0:0] 2025-04-07T14:23:58.228943Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744035838265 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 7490576885417761118 RawX2: 4294969514 } } Step: 1744035838265 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:23:58.228982Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:23:58.229215Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:23:58.229305Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:23:58.229328Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:23:58.229385Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744035838265:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-07T14:23:58.229616Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744035838265:281474976710657 keys extracted: 0 2025-04-07T14:23:58.229756Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:23:58.229838Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:23:58.229878Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:23:58.233145Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:23:58.233649Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:23:58.234866Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744035838264 2025-04-07T14:23:58.234879Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:23:58.234902Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744035838272 2025-04-07T14:23:58.234962Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1744035838265 txid# 281474976710657} 2025-04-07T14:23:58.234998Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744035838265} 2025-04-07T14:23:58.235042Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:23:58.235070Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:23:58.235088Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:23:58.235113Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:23:58.235142Z node 1 :TX_DATASHARD DEBUG: Complete [1744035838265 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7490576885417761056:2188], exec latency: 3 ms, propose latency: 5 ms 2025-04-07T14:23:58.235164Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-07T14:23:58.235192Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:23:58.239104Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-07T14:23:58.239157Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:23:58.246007Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490576889712728581:2334], serverId# [1:7490576889712728582:2335], sessionId# [0:0:0] 2025-04-07T14:23:58.247047Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:23:58.247167Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710658 at tablet 72075186224037888 2025-04-07T14:23:58.248161Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:23:58.249271Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710658 at step 1744035838293 at tablet 72075186224037888 { Transactions { TxId: 281474976710658 AckTo { RawX1: 7490576885417761118 RawX2: 4294969514 } } Step: 1744035838293 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:23:58.249292Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:23:58.249386Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:23:58.249399Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:23:58.249421Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744035838293:281474976710658] in PlanQueue unit at 72075186224037888 2025-04-07T14:23:58.249566Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744035838293:281474976710658 keys extracted: 0 2025-04-07T14:23:58.249789Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:23:58.250163Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1744035838293 txid# 281474976710658} 2025-04-07T14:23:58.250189Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744035838293} 2025-04-07T14:23:58.250220Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:23:58.250284Z node 1 :TX_DATASHARD DEBUG: Complete [1744035838293 : 281474976710658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7490576889712728575:2329], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:23:58.250308Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:23:58.254167Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490576889712728592:2345], serverId# [1:7490576889712728593:2346], sessionId# [0:0:0] 2025-04-07T14:23:58.254982Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:23:58.255061Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710659 at tablet 72075186224037888 2025-04-07T14:23:58.256307Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 720751 ... 10 keys extracted: 0 2025-04-07T14:24:22.005705Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037903 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:24:22.005790Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037903 2025-04-07T14:24:22.005841Z node 5 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037903 tableId# [OwnerId: 72057594046644480, LocalPathId: 21] schema version# 1 2025-04-07T14:24:22.006233Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037903 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:24:22.006689Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037903 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:22.008173Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037903 2025-04-07T14:24:22.008251Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037903 time 1744035862050 2025-04-07T14:24:22.008276Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037903 2025-04-07T14:24:22.008312Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037903 coordinator 72057594046316545 last step 0 next step 1744035862051 2025-04-07T14:24:22.008352Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037903 step# 1744035862051 txid# 281474976715710} 2025-04-07T14:24:22.008373Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037903 step# 1744035862051} 2025-04-07T14:24:22.008399Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037903 2025-04-07T14:24:22.008433Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037903 2025-04-07T14:24:22.008449Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037903 2025-04-07T14:24:22.008463Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037903 2025-04-07T14:24:22.008504Z node 5 :TX_DATASHARD DEBUG: Complete [1744035862051 : 281474976715710] from 72075186224037903 at tablet 72075186224037903 send result to client [5:7490576969428685705:2143], exec latency: 0 ms, propose latency: 2 ms 2025-04-07T14:24:22.008533Z node 5 :TX_DATASHARD INFO: 72075186224037903 Sending notify to schemeshard 72057594046644480 txId 281474976715710 state Ready TxInFly 0 2025-04-07T14:24:22.008570Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037903 2025-04-07T14:24:22.008636Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:22.011815Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715710 datashard 72075186224037903 state Ready 2025-04-07T14:24:22.011856Z node 5 :TX_DATASHARD DEBUG: 72075186224037903 Got TEvSchemaChangedResult from SS at 72075186224037903 2025-04-07T14:24:22.020338Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-07T14:24:22.024201Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:22.024226Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037903 2025-04-07T14:24:22.031883Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037904 actor [5:7490576990903524649:2373] 2025-04-07T14:24:22.032107Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:22.045825Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:22.045902Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:22.047443Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037904 2025-04-07T14:24:22.047493Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037904 2025-04-07T14:24:22.047529Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037904 2025-04-07T14:24:22.047881Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:22.047909Z node 5 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037904 2025-04-07T14:24:22.047976Z node 5 :TX_DATASHARD DEBUG: 72075186224037904 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:22.048011Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037904, actorId: [5:7490576990903524664:2374] 2025-04-07T14:24:22.048024Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037904 2025-04-07T14:24:22.048034Z node 5 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037904, state: WaitScheme 2025-04-07T14:24:22.048048Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-04-07T14:24:22.048134Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037904 2025-04-07T14:24:22.048178Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037904 2025-04-07T14:24:22.048201Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037904 2025-04-07T14:24:22.048218Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:22.048237Z node 5 :TX_DATASHARD INFO: No tx to execute at 72075186224037904 TxInFly 0 2025-04-07T14:24:22.048252Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037904 2025-04-07T14:24:22.081030Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037904, clientId# [5:7490576990903524645:3988], serverId# [5:7490576990903524667:3999], sessionId# [0:0:0] 2025-04-07T14:24:22.081178Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037904 2025-04-07T14:24:22.081438Z node 5 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037904 txId 281474976715711 ssId 72057594046644480 seqNo 2:31 2025-04-07T14:24:22.081525Z node 5 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715711 at tablet 72075186224037904 2025-04-07T14:24:22.081993Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037904 2025-04-07T14:24:22.084840Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037904 2025-04-07T14:24:22.084935Z node 5 :TX_DATASHARD DEBUG: 72075186224037904 not sending time cast registration request in state WaitScheme 2025-04-07T14:24:22.090936Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037904, clientId# [5:7490576990903524672:4004], serverId# [5:7490576990903524674:4006], sessionId# [0:0:0] 2025-04-07T14:24:22.091244Z node 5 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715711 at step 1744035862135 at tablet 72075186224037904 { Transactions { TxId: 281474976715711 AckTo { RawX1: 7490576969428685849 RawX2: 21474838703 } } Step: 1744035862135 MediatorID: 72057594046382081 TabletID: 72075186224037904 } 2025-04-07T14:24:22.091264Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-04-07T14:24:22.091372Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037904 2025-04-07T14:24:22.091432Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037904 2025-04-07T14:24:22.091450Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:24:22.091474Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1744035862135:281474976715711] in PlanQueue unit at 72075186224037904 2025-04-07T14:24:22.091809Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037904 loaded tx from db 1744035862135:281474976715711 keys extracted: 0 2025-04-07T14:24:22.091949Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:24:22.092057Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037904 2025-04-07T14:24:22.092099Z node 5 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037904 tableId# [OwnerId: 72057594046644480, LocalPathId: 22] schema version# 1 2025-04-07T14:24:22.092559Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037904 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:24:22.092775Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:22.092877Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037903 2025-04-07T14:24:22.092970Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:22.097066Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037904 time 1744035862134 2025-04-07T14:24:22.097101Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-04-07T14:24:22.097153Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037904 step# 1744035862135 txid# 281474976715711} 2025-04-07T14:24:22.097179Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037904 step# 1744035862135} 2025-04-07T14:24:22.097223Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037904 2025-04-07T14:24:22.097277Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037904 2025-04-07T14:24:22.097295Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037904 2025-04-07T14:24:22.097320Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037904 2025-04-07T14:24:22.097381Z node 5 :TX_DATASHARD DEBUG: Complete [1744035862135 : 281474976715711] from 72075186224037904 at tablet 72075186224037904 send result to client [5:7490576969428685705:2143], exec latency: 0 ms, propose latency: 5 ms 2025-04-07T14:24:22.097409Z node 5 :TX_DATASHARD INFO: 72075186224037904 Sending notify to schemeshard 72057594046644480 txId 281474976715711 state Ready TxInFly 0 2025-04-07T14:24:22.097465Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-04-07T14:24:22.097537Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037904 coordinator 72057594046316545 last step 0 next step 1744035862142 2025-04-07T14:24:22.099471Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715711 datashard 72075186224037904 state Ready 2025-04-07T14:24:22.099505Z node 5 :TX_DATASHARD DEBUG: 72075186224037904 Got TEvSchemaChangedResult from SS at 72075186224037904 2025-04-07T14:24:22.142996Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7490576969428685549:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:22.143098Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-plan] |98.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_local_kmeans/unittest |98.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-result_sets] >> TGRpcRateLimiterTest::ListResources [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi >> StatisticsScan::RunScanOnShard >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs >> TQuoterServiceTest::StaticDeadlines [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath >> test.py::test_order_conflict [GOOD] >> test.py::test_missing_value [GOOD] >> test.py::test_unexpected_value [GOOD] >> test.py::test_local |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_http_api.py::TestHttpApi::test_warning [GOOD] >> test_http_api.py::TestHttpApi::test_get_unknown_query >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob >> KeyValueGRPCService::SimpleCopyUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleWriteRead >> test_http_api.py::TestHttpApi::test_get_unknown_query [GOOD] >> test_http_api.py::TestHttpApi::test_unauthenticated [GOOD] >> TColumnShardTestSchema::ColdTiers [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-plan] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144036388.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144036388.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144036388.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124036388.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144036388.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144036388.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124035188.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124036388.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124036388.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124035188.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124035188.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124035188.000000s;Name=;Codec=}; 2025-04-07T14:23:09.258802Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:23:09.375265Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:23:09.396428Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:23:09.396513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:23:09.396829Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:23:09.404199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:09.404441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:09.404657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:09.404781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:23:09.404876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:23:09.404978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:23:09.405070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:23:09.405186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:23:09.405307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:23:09.405399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:23:09.405485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:23:09.405569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:23:09.442515Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:23:09.442581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T14:23:09.445560Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:23:09.445899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:23:09.445955Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:23:09.446131Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:23:09.446289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:23:09.446369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:23:09.446404Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:23:09.446507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:23:09.446573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:23:09.446619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:23:09.446649Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:23:09.446831Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:23:09.446905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:23:09.446950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:23:09.446979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:23:09.447065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:23:09.447112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:23:09.447148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:23:09.447171Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:23:09.447238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:23:09.447271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:23:09.447297Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:23:09.447336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:23:09.447365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:23:09.447390Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:23:09.447572Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-04-07T14:23:09.447659Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-07T14:23:09.447727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-07T14:23:09.447818Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-07T14:23:09.447989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:23:09.448041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:23:09.448071Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:23:09.448431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:23:09.448485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switch ... e 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:796:2796];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:25.800824Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:796:2796] finished for tablet 9437184 2025-04-07T14:24:25.800885Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:796:2796] send ScanData to [1:795:2795] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:24:25.801209Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:796:2796] and sent to [1:795:2795] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.008},{"events":["f_processing","f_task_result"],"t":0.009},{"events":["f_ack","l_task_result"],"t":0.228},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.231}],"full":{"a":1744035865569408,"name":"_full_task","f":1744035865569408,"d_finished":0,"c":0,"l":1744035865800917,"d":231509},"events":[{"name":"bootstrap","f":1744035865569567,"d_finished":8802,"c":1,"l":1744035865578369,"d":8802},{"a":1744035865800603,"name":"ack","f":1744035865798021,"d_finished":1951,"c":3,"l":1744035865800531,"d":2265},{"a":1744035865800594,"name":"processing","f":1744035865579376,"d_finished":154009,"c":24,"l":1744035865800533,"d":154332},{"name":"ProduceResults","f":1744035865571411,"d_finished":5250,"c":29,"l":1744035865800811,"d":5250},{"a":1744035865800813,"name":"Finish","f":1744035865800813,"d_finished":0,"c":0,"l":1744035865800917,"d":104},{"name":"task_result","f":1744035865579394,"d_finished":151749,"c":21,"l":1744035865797908,"d":151749}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:796:2796]->[1:795:2795] 2025-04-07T14:24:25.801299Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:796:2796];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:24:25.569035Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4749668;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4749668;selected_rows=0; 2025-04-07T14:24:25.801330Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:796:2796];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:24:25.801463Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:796:2796];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.067087s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.044912s;size=0.002211992;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::LAST_PK;duration=0.027144s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2025-04-07T14:24:25.801522Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:796:2796];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T14:24:25.802719Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-07T14:24:25.802852Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000011:max} readable: {1000000011:max} at tablet 9437184 2025-04-07T14:24:25.802936Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T14:24:25.803044Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2025-04-07T14:24:25.803090Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2025-04-07T14:24:25.803425Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:803:2803];trace_detailed=; 2025-04-07T14:24:25.803711Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-07T14:24:25.803865Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T14:24:25.803994Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:25.804104Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:25.804234Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:803:2803];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:24:25.804294Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:803:2803];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:25.804365Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:803:2803];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:25.804399Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:803:2803] finished for tablet 9437184 2025-04-07T14:24:25.804453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:803:2803] send ScanData to [1:802:2802] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:24:25.804756Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:803:2803] and sent to [1:802:2802] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744035865803373,"name":"_full_task","f":1744035865803373,"d_finished":0,"c":0,"l":1744035865804485,"d":1112},"events":[{"name":"bootstrap","f":1744035865803486,"d_finished":645,"c":1,"l":1744035865804131,"d":645},{"a":1744035865804221,"name":"ack","f":1744035865804221,"d_finished":0,"c":0,"l":1744035865804485,"d":264},{"a":1744035865804212,"name":"processing","f":1744035865804212,"d_finished":0,"c":0,"l":1744035865804485,"d":273},{"name":"ProduceResults","f":1744035865803942,"d_finished":323,"c":2,"l":1744035865804385,"d":323},{"a":1744035865804387,"name":"Finish","f":1744035865804387,"d_finished":0,"c":0,"l":1744035865804485,"d":98}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:803:2803]->[1:802:2802] 2025-04-07T14:24:25.804827Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:803:2803];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:24:25.803132Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T14:24:25.804867Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:803:2803];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:24:25.804905Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:803:2803];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T14:24:25.804969Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:803:2803];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495312 160000/9495312 160000/9495312 80000/4749668 0/0 >> test.py::test[solomon-BadDownsamplingAggregation-] >> QueryActorTest::SimpleQuery >> SdkCredProvider::PingFromProviderSyncDiscovery ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/py3test >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |98.6%| [TM] {RESULT} ydb/tests/tools/kqprun/tests/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144036389.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144036389.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144036389.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124036389.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144036389.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144036389.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124035189.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124036389.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124036389.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124035189.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124035189.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124035189.000000s;Name=;Codec=}; 2025-04-07T14:23:09.644363Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:23:09.776777Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:23:09.796015Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:23:09.796089Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:23:09.796316Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:23:09.812245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:09.812487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:09.817155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:09.817357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:23:09.817450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:23:09.817547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:23:09.817639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:23:09.817752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:23:09.817872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:23:09.817968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:23:09.818052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:23:09.818162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:23:09.859533Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:23:09.859601Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T14:23:09.865590Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:23:09.865773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:23:09.865819Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:23:09.865983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:23:09.866164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:23:09.866254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:23:09.866293Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:23:09.866393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:23:09.866468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:23:09.866513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:23:09.866574Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:23:09.866742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:23:09.866828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:23:09.866875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:23:09.866903Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:23:09.866981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:23:09.867025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:23:09.867061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:23:09.867086Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:23:09.867174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:23:09.867220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:23:09.867247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:23:09.867289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:23:09.867326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:23:09.867351Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:23:09.867511Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-07T14:23:09.867595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-07T14:23:09.867672Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-07T14:23:09.867751Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-04-07T14:23:09.867950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:23:09.868017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:23:09.868053Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Ex ... rator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:25.961367Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:939:2939] finished for tablet 9437184 2025-04-07T14:24:25.961441Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:939:2939] send ScanData to [1:938:2938] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:24:25.961872Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:939:2939] and sent to [1:938:2938] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.015},{"events":["f_processing","f_task_result"],"t":0.018},{"events":["f_ack","l_task_result"],"t":0.328},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.332}],"full":{"a":1744035865628888,"name":"_full_task","f":1744035865628888,"d_finished":0,"c":0,"l":1744035865961484,"d":332596},"events":[{"name":"bootstrap","f":1744035865629084,"d_finished":14957,"c":1,"l":1744035865644041,"d":14957},{"a":1744035865961165,"name":"ack","f":1744035865957491,"d_finished":2713,"c":3,"l":1744035865961067,"d":3032},{"a":1744035865961154,"name":"processing","f":1744035865646981,"d_finished":206927,"c":24,"l":1744035865961069,"d":207257},{"name":"ProduceResults","f":1744035865631153,"d_finished":10115,"c":29,"l":1744035865961353,"d":10115},{"a":1744035865961355,"name":"Finish","f":1744035865961355,"d_finished":0,"c":0,"l":1744035865961484,"d":129},{"name":"task_result","f":1744035865647007,"d_finished":203791,"c":21,"l":1744035865957313,"d":203791}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:939:2939]->[1:938:2938] 2025-04-07T14:24:25.961956Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:24:25.628431Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2025-04-07T14:24:25.961996Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:24:25.962162Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.082631s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.075781s;size=0.002211944;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.013898s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.016006s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2025-04-07T14:24:25.962241Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:939:2939];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T14:24:25.963939Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-07T14:24:25.964155Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2025-04-07T14:24:25.964275Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T14:24:25.964456Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2025-04-07T14:24:25.964531Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2025-04-07T14:24:25.965074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:946:2946];trace_detailed=; 2025-04-07T14:24:25.965451Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-07T14:24:25.965662Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T14:24:25.965829Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:25.965962Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:25.966199Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:24:25.966301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:25.966425Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:25.966470Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] finished for tablet 9437184 2025-04-07T14:24:25.966554Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] send ScanData to [1:945:2945] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:24:25.966965Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:946:2946] and sent to [1:945:2945] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744035865965008,"name":"_full_task","f":1744035865965008,"d_finished":0,"c":0,"l":1744035865966603,"d":1595},"events":[{"name":"bootstrap","f":1744035865965163,"d_finished":832,"c":1,"l":1744035865965995,"d":832},{"a":1744035865966175,"name":"ack","f":1744035865966175,"d_finished":0,"c":0,"l":1744035865966603,"d":428},{"a":1744035865966159,"name":"processing","f":1744035865966159,"d_finished":0,"c":0,"l":1744035865966603,"d":444},{"name":"ProduceResults","f":1744035865965750,"d_finished":471,"c":2,"l":1744035865966452,"d":471},{"a":1744035865966454,"name":"Finish","f":1744035865966454,"d_finished":0,"c":0,"l":1744035865966603,"d":149}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:946:2946]->[1:945:2945] 2025-04-07T14:24:25.967073Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:24:25.964587Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T14:24:25.967123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:24:25.967173Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T14:24:25.967275Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount >> Functions::CreateRequest [GOOD] >> Functions::CreateResponse [GOOD] >> KafkaProtocol::ProduceScenario >> DataShardReplication::ApplyChangesToReplicatedTable [GOOD] >> DataShardReplication::ApplyChangesToCommonTable >> DataShardStats::OneChannelStatsCorrect [GOOD] >> DataShardStats::MultipleChannelsStatsCorrect >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-plan] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/dynumber/py3test >> test_dynumber.py::test_dynumber [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |98.6%| [TM] {RESULT} ydb/tests/functional/dynumber/py3test >> DataShardBackgroundCompaction::ShouldCompact [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed >> test_dispatch.py::TestMapping::test_idle [GOOD] >> KqpTpch::Query05 [GOOD] >> KqpTpch::Query06 >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-plan] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] |98.6%| [TM] {RESULT} ydb/tests/functional/scheme_tests/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection >> DataShardCompaction::CompactBorrowed [GOOD] >> DataShardCompaction::CompactBorrowedTxStatus >> Coordinator::ReadStepSubscribe >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection >> SdkCredProvider::PingFromProviderSyncDiscovery [GOOD] >> SdkCredProvider::PingFromProviderAsyncDiscovery >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] >> TDqPqRdReadActorTests::TestReadFromTopic2 [GOOD] >> KqpTpch::Query06 [GOOD] >> KqpTpch::Query07 >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-plan] [GOOD] >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount >> TTxDataShardReshuffleKMeansScan::BadRequest >> TDqPqRdReadActorTests::SessionError [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection |98.6%| [TA] $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> TDqPqRdReadActorTests::ReadWithFreeSpace [GOOD] >> TMemoryController::Counters >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-result_sets] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-result_sets] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-plan] >> StatisticsScan::RunScanOnShard [GOOD] >> KeyValueGRPCService::SimpleWriteRead [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPath >> KafkaProtocol::ProduceScenario [GOOD] >> KafkaProtocol::FetchScenario >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-plan] [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_stop_idempotency >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-result_sets] >> TDqPqRdReadActorTests::CoordinatorChanged >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding >> QueryActorTest::SimpleQuery [GOOD] >> QueryActorTest::Rollback ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_column_stats/unittest >> StatisticsScan::RunScanOnShard [GOOD] Test command err: 2025-04-07T14:24:28.430584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:28.431068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:28.431381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000cfd/r3tmp/tmpJLi7Jq/pdisk_1.dat 2025-04-07T14:24:28.994039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:29.053359Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:29.113475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:29.113710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:29.125399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:29.300023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:29.706592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:29.706723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:29.706799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:29.713631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:24:29.929468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:24:31.103073Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr8a6x079427wv4nwc73ygce, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTRiNzY5NDYtZTU4ZjQ2MjgtYTM1ODVkM2ItMzM0MDE1ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |98.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_column_stats/unittest >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-plan] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings >> DataShardReplication::ApplyChangesToCommonTable [GOOD] >> DataShardReplication::ApplyChangesWithConcurrentTx >> TestFilterSet::FilterGroup >> TTxDataShardReshuffleKMeansScan::BadRequest [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPosting >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-result_sets] |98.6%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> TDqPqRdReadActorTests::CoordinatorChanged [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding >> TDqPqRdReadActorTests::Backpressure >> TSentinelBaseTests::PDiskInitialStatus [GOOD] >> TSentinelBaseTests::PDiskErrorState [GOOD] >> TSentinelBaseTests::PDiskInactiveAfterStateChange [GOOD] >> TSentinelBaseTests::PDiskFaultyState [GOOD] >> TSentinelBaseTests::PDiskStateChangeNormalFlow [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodePermanentlyBad [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeNotExpectedRestart [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] >> TSentinelBaseTests::GuardianDataCenterRatio >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-plan] [GOOD] >> TSentinelBaseTests::GuardianDataCenterRatio [GOOD] >> TSentinelBaseTests::GuardianRackRatio >> TSentinelBaseTests::GuardianRackRatio [GOOD] >> TSentinelTests::Smoke >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-result_sets] >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] >> SequenceProxy::Basics >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding >> KqpTpch::Query07 [GOOD] >> KqpTpch::Query08 >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-result_sets] >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath [GOOD] >> QuoterWithKesusTest::HandlesNonExistentResource >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-result_sets] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery >> KafkaProtocol::FetchScenario [GOOD] >> KafkaProtocol::BalanceScenario ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] Test command err: 2 2 |98.7%| [TM] {RESULT} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> TColumnShardTestSchema::HotTiersTtlWithStat [GOOD] >> TestFilterSet::FilterGroup [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery [GOOD] >> QueryActorTest::Rollback [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries >> QueryActorTest::Commit >> TestFilterSet::DuplicationValidation >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter >> TSentinelTests::Smoke [GOOD] >> TSentinelTests::PDiskUnknownState >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPath [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutToken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144036422.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144036422.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144036422.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144036422.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144036422.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124036422.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=144036422.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144036422.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124035222.000000s;Name=;Codec=}; 2025-04-07T14:23:42.790562Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:23:42.869819Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:23:42.889261Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:23:42.889346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:23:42.889610Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:23:42.897707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:42.897971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:42.898197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:42.898301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:23:42.898413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:23:42.898517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:23:42.898621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:23:42.898720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:23:42.898837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:23:42.898961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:23:42.899061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:23:42.899169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:23:42.919399Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:23:42.919499Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T14:23:42.922354Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:23:42.922519Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:23:42.922571Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:23:42.922773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:23:42.922945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:23:42.923042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:23:42.923090Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:23:42.923182Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:23:42.923257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:23:42.923341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:23:42.923385Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:23:42.923593Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:23:42.923688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:23:42.923741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:23:42.923801Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:23:42.923902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:23:42.923966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:23:42.924009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:23:42.924040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:23:42.924133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:23:42.924174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:23:42.924204Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:23:42.924254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:23:42.924292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:23:42.924318Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:23:42.924494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=60; 2025-04-07T14:23:42.924574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-07T14:23:42.924658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-07T14:23:42.924770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2025-04-07T14:23:42.924929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:23:42.924997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:23:42.925042Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:23:42.925224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abst ... .377933Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:911:2916];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:36.377966Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:911:2916] finished for tablet 9437184 2025-04-07T14:24:36.378037Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:911:2916] send ScanData to [1:910:2915] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:24:36.378447Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:911:2916] and sent to [1:910:2915] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.009},{"events":["f_processing","f_task_result"],"t":0.014},{"events":["f_ack","l_task_result"],"t":0.299},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.303}],"full":{"a":1744035876074277,"name":"_full_task","f":1744035876074277,"d_finished":0,"c":0,"l":1744035876378074,"d":303797},"events":[{"name":"bootstrap","f":1744035876074469,"d_finished":9329,"c":1,"l":1744035876083798,"d":9329},{"a":1744035876377765,"name":"ack","f":1744035876373640,"d_finished":3225,"c":3,"l":1744035876377676,"d":3534},{"a":1744035876377754,"name":"processing","f":1744035876089073,"d_finished":200318,"c":24,"l":1744035876377678,"d":200638},{"name":"ProduceResults","f":1744035876076513,"d_finished":11781,"c":29,"l":1744035876377952,"d":11781},{"a":1744035876377954,"name":"Finish","f":1744035876377954,"d_finished":0,"c":0,"l":1744035876378074,"d":120},{"name":"task_result","f":1744035876089101,"d_finished":196579,"c":21,"l":1744035876373472,"d":196579}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:911:2916]->[1:910:2915] 2025-04-07T14:24:36.378524Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:911:2916];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:24:36.073881Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2025-04-07T14:24:36.378563Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:911:2916];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:24:36.378700Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:911:2916];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile={branch:simple;limit:0;duration:0.224693s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.270398s;size=0.002211944;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::LAST_PK;duration=0.029622s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2025-04-07T14:24:36.378773Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:911:2916];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T14:24:36.380363Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-07T14:24:36.380555Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000011:max} readable: {1000000011:max} at tablet 9437184 2025-04-07T14:24:36.381878Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-07T14:24:36.382078Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2025-04-07T14:24:36.382150Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2025-04-07T14:24:36.382659Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:146;event=TTxScan started;actor_id=[1:927:2932];trace_detailed=; 2025-04-07T14:24:36.383050Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-07T14:24:36.383246Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-07T14:24:36.383401Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:36.383538Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:36.383778Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:927:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-07T14:24:36.383878Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:927:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:36.383994Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:927:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-07T14:24:36.384038Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:927:2932] finished for tablet 9437184 2025-04-07T14:24:36.384125Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:927:2932] send ScanData to [1:926:2931] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2025-04-07T14:24:36.384531Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:377 :Scanner finished [1:927:2932] and sent to [1:926:2931] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744035876382588,"name":"_full_task","f":1744035876382588,"d_finished":0,"c":0,"l":1744035876384189,"d":1601},"events":[{"name":"bootstrap","f":1744035876382790,"d_finished":785,"c":1,"l":1744035876383575,"d":785},{"a":1744035876383758,"name":"ack","f":1744035876383758,"d_finished":0,"c":0,"l":1744035876384189,"d":431},{"a":1744035876383745,"name":"processing","f":1744035876383745,"d_finished":0,"c":0,"l":1744035876384189,"d":444},{"name":"ProduceResults","f":1744035876383330,"d_finished":458,"c":2,"l":1744035876384022,"d":458},{"a":1744035876384025,"name":"Finish","f":1744035876384025,"d_finished":0,"c":0,"l":1744035876384189,"d":164}],"id":"9437184::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:927:2932]->[1:926:2931] 2025-04-07T14:24:36.384633Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:927:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2025-04-07T14:24:36.382206Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-07T14:24:36.384695Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:927:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:170;event=scan_aborted;reason=unexpected on destructor; 2025-04-07T14:24:36.384744Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:927:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:77;profile=; 2025-04-07T14:24:36.384837Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:927:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:78;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 80000/4750028 0/0 >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable >> TMemoryController::Counters [GOOD] >> TMemoryController::Counters_HardLimit >> SequenceProxy::Basics [GOOD] >> SequenceProxy::DropRecreate >> BasicStatistics::NotFullStatisticsDatashard [GOOD] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-plan] >> TTxDataShardReshuffleKMeansScan::MainToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuild >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/write_group_by.script-script] >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] >> TSentinelTests::PDiskUnknownState [GOOD] >> TSentinelTests::PDiskErrorState >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-result_sets] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-plan] >> SequenceProxy::DropRecreate [GOOD] >> TestFilterSet::DuplicationValidation [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard [GOOD] Test command err: 2025-04-07T14:15:37.385124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:15:37.385443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:15:37.385583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/00200f/r3tmp/tmpG34U2g/pdisk_1.dat 2025-04-07T14:15:37.717080Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21521, node 1 2025-04-07T14:15:37.928920Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:15:37.928977Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:15:37.929006Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:15:37.929595Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:15:37.931706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:15:38.023291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:38.023455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:38.037713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12575 2025-04-07T14:15:38.630469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:15:41.510321Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-07T14:15:41.544258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:41.544387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:41.582461Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:15:41.586641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:41.755962Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:15:41.797959Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2025-04-07T14:15:41.798031Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2025-04-07T14:15:41.820298Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2025-04-07T14:15:41.820446Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2025-04-07T14:15:41.820649Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2025-04-07T14:15:41.820748Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2025-04-07T14:15:41.820806Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2025-04-07T14:15:41.820864Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2025-04-07T14:15:41.820965Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2025-04-07T14:15:41.821029Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2025-04-07T14:15:41.821391Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2025-04-07T14:15:42.021844Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:42.021962Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1759:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2025-04-07T14:15:42.027460Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1769:2561] 2025-04-07T14:15:42.033719Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1804:2577] 2025-04-07T14:15:42.034420Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1804:2577], schemeshard id = 72075186224037889 2025-04-07T14:15:42.035128Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2025-04-07T14:15:42.050508Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-07T14:15:42.050557Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-07T14:15:42.050605Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-07T14:15:42.051864Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:15:42.051943Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:15:42.062113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2025-04-07T14:15:42.072468Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2025-04-07T14:15:42.072627Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-07T14:15:42.084899Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-07T14:15:42.100655Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:15:42.140095Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2025-04-07T14:15:42.385198Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-07T14:15:42.540389Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-07T14:15:43.316080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.316205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.333058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2025-04-07T14:15:43.624372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2426:3064], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.624500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.625477Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2431:3068]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:43.625607Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-07T14:15:43.625670Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2433:3070] 2025-04-07T14:15:43.625712Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2433:3070] 2025-04-07T14:15:43.626078Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2434:2945] 2025-04-07T14:15:43.626279Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2433:3070], server id = [2:2434:2945], tablet id = 72075186224037897, status = OK 2025-04-07T14:15:43.626438Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2434:2945], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-07T14:15:43.626493Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-07T14:15:43.626621Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:15:43.626676Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2431:3068], StatRequests.size() = 1 2025-04-07T14:15:43.638774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2438:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.638854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.639096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2443:3079], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:15:43.643276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-07T14:15:43.860525Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2025-04-07T14:15:43.860599Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-07T14:15:43.934545Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2433:3070], schemeshard count = 1 2025-04-07T14:15:44.194806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2445:3081], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-07T14:15:44.313079Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2584:3168]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:15:44.313212Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-07T14:15:44.313246Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2584:3168], StatRequests.size() = 1 2025-04-07T14:15:44.364793Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr89pv7y4zct06gy3q133jcf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTMyZjlmNWMtZGZiYmUwYzItZjk5NDcwNy1lZDE5YjQxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for TEvPeriodicTableStats 2025-04-07T14:15:46.562637Z node 2 :S ... 7T14:23:16.351007Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:17.909992Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:23:17.910195Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:23:17.910514Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:23:19.631748Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:19.631844Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:22.522663Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:23:22.604375Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:22.604436Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:25.641641Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:23:25.642029Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:23:25.642298Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:23:25.696538Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:25.696614Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:28.569407Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:28.569473Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:30.043956Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:23:31.415742Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:31.415825Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:31.611167Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-07T14:23:31.611253Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:23:31.611291Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:23:31.611328Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-07T14:23:33.093363Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:23:33.093568Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:23:33.093887Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:23:34.512727Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:34.512803Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:37.154173Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:23:37.165128Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:37.165200Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:40.142210Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:23:40.142553Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:23:40.142814Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:23:40.153738Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:40.153797Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:43.200289Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:43.200342Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:44.690881Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:23:46.250535Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:46.250598Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:47.965630Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:23:47.965836Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:23:47.966205Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:23:49.701774Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:49.701831Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:52.957524Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:23:52.970968Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:52.971021Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:56.440432Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:23:56.440801Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:23:56.441044Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:23:56.451945Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:56.452001Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:23:59.717209Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:23:59.717280Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:24:01.297053Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:24:02.909004Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:24:02.909065Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:24:04.684992Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:24:04.685364Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:24:04.685632Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:24:06.374535Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:24:06.374598Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:24:09.723421Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:24:09.734355Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:24:09.734424Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:24:13.036446Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:24:13.036839Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:24:13.037110Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:24:13.048126Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:24:13.048190Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:24:16.537494Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:24:16.537555Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:24:18.256053Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:24:19.941514Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:24:19.941572Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:24:20.213335Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-07T14:24:20.213417Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:24:20.213454Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-07T14:24:20.213487Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-07T14:24:21.957469Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:24:21.957659Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:24:21.957991Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:24:23.685339Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:24:23.685386Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:24:26.781743Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:24:26.817456Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:24:26.817525Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:24:29.703406Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:24:29.703803Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:24:29.704057Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-07T14:24:29.760573Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:24:29.760634Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:24:32.508004Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:24:32.508066Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:24:34.033402Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2025-04-07T14:24:35.502744Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2025-04-07T14:24:35.502810Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2025-04-07T14:24:35.547265Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2025-04-07T14:24:35.547354Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 212.000000s, at schemeshard: 72075186224037889 2025-04-07T14:24:35.547600Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 49 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2025-04-07T14:24:35.562283Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2025-04-07T14:24:37.142030Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-07T14:24:37.142235Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 ... waiting for TEvPropagateStatistics (done) 2025-04-07T14:24:37.142588Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:13588:7505]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-07T14:24:37.143156Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-07T14:24:37.145934Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-07T14:24:37.145983Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [2:13588:7505], StatRequests.size() = 1 >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] >> TestFilterSet::CompilationValidation >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_replication/unittest >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] Test command err: 2025-04-07T14:23:59.997862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:23:59.998217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:23:59.998519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000dcc/r3tmp/tmpazBFMV/pdisk_1.dat 2025-04-07T14:24:00.487308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:00.536395Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:00.592642Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:24:00.594636Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:24:00.594885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:00.595055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:00.606692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:00.728643Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T14:24:00.728749Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:24:00.729503Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T14:24:00.855646Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_STRONG } } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:24:00.856252Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:24:00.856399Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:24:00.856627Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:24:00.856853Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:24:00.856982Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:24:00.857306Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T14:24:00.859510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:00.860650Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T14:24:00.860807Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T14:24:00.902719Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:24:00.903940Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:24:00.904424Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:24:00.904748Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:00.946941Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:24:00.947900Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:00.948020Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:00.949659Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:24:00.949754Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:24:00.949801Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:24:00.950760Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:00.994907Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:24:00.996172Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:00.996374Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:24:00.996427Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:24:00.996462Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:24:00.996514Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:00.997033Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:00.997091Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:00.998117Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:24:00.998253Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:24:00.999579Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:00.999629Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:00.999675Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:24:00.999791Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:00.999837Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:00.999968Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:24:01.000010Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:24:01.000042Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:24:01.000078Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:24:01.000127Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:01.000278Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:24:01.000381Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:24:01.000495Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:01.000850Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:24:01.000918Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:24:01.001068Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:24:01.001204Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:24:01.001256Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:24:01.001291Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:24:01.001324Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:24:01.001610Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:24:01.001647Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:24:01.001679Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:24:01.001710Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:24:01.001776Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:24:01.001811Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:24:01.001841Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:24:01.001874Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:24:01.001900Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:24:01.003379Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:24:01.003482Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:01.017524Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:01.017605Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:24:01.017636Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:24:01.017688Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at table ... verConnected 2025-04-07T14:24:38.463340Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:38.463384Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [7:838:2671], serverId# [7:839:2672], sessionId# [0:0:0] 2025-04-07T14:24:38.463634Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269549570, Sender [7:837:2670], Recipient [7:632:2537]: NKikimrTxDataShard.TEvApplyReplicationChanges TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Source: "my-source" Changes { SourceOffset: 1 WriteTxId: 0 Key: "\001\000\004\000\000\000\001\000\000\000" Upsert { Tags: 2 Data: "\001\000\004\000\000\000\025\000\000\000" } } 2025-04-07T14:24:38.463838Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v1500/18446744073709551615 ImmediateWriteEdge# v1000/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2025-04-07T14:24:38.463973Z node 7 :TX_DATASHARD TRACE: Lock 281474976715660 marked broken at v{min} 2025-04-07T14:24:38.476154Z node 7 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-07T14:24:38.477124Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [7:24:2071], Recipient [7:632:2537]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 1501} 2025-04-07T14:24:38.477182Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2025-04-07T14:24:38.477237Z node 7 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-04-07T14:24:38.477300Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:38.685015Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr8a75jd7nv2mdkbhvj0gd6e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YmEyZDQ5MDUtMzc2OTNiNWMtYzgzMTkzYmYtM2IxZWQwZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:38.686470Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [7:855:2591], Recipient [7:632:2537]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 855 RawX2: 30064773663 } TxBody: " \0018\001j7\010\001\032\'\n#\t\214\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715661 ExecLevel: 0 Flags: 8 MvccSnapshot { Step: 1500 TxId: 18446744073709551615 } 2025-04-07T14:24:38.686550Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:24:38.687038Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [7:632:2537], Recipient [7:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T14:24:38.687083Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T14:24:38.687182Z node 7 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:38.687433Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715660, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-07T14:24:38.687572Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2025-04-07T14:24:38.687641Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-07T14:24:38.687717Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2025-04-07T14:24:38.687771Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:24:38.687820Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:24:38.687873Z node 7 :TX_DATASHARD TRACE: Activated operation [0:281474976715661] at 72075186224037888 2025-04-07T14:24:38.687925Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-07T14:24:38.687951Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:24:38.687974Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-07T14:24:38.687999Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-07T14:24:38.688063Z node 7 :TX_DATASHARD TRACE: Operation [0:281474976715661] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193446 2025-04-07T14:24:38.688180Z node 7 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715660 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-04-07T14:24:38.688281Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-07T14:24:38.688339Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-07T14:24:38.688367Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-07T14:24:38.688393Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:24:38.688419Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit FinishPropose 2025-04-07T14:24:38.688460Z node 7 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715661 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T14:24:38.688538Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is DelayComplete 2025-04-07T14:24:38.688574Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:24:38.688622Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:24:38.688687Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:24:38.688739Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-07T14:24:38.688762Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:24:38.688790Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:281474976715661] at 72075186224037888 has finished 2025-04-07T14:24:38.688867Z node 7 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:38.688916Z node 7 :TX_DATASHARD TRACE: Complete execution for [0:281474976715661] at 72075186224037888 on unit FinishPropose 2025-04-07T14:24:38.688978Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:38.689634Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:863:2591], Recipient [7:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-07T14:24:38.689715Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-07T14:24:38.689795Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-07T14:24:38.689899Z node 7 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:24:38.689943Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-07T14:24:38.689987Z node 7 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:24:38.690033Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:24:38.690080Z node 7 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-07T14:24:38.690145Z node 7 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:24:38.690172Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:24:38.690196Z node 7 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-07T14:24:38.690221Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-07T14:24:38.690361Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-07T14:24:38.690605Z node 7 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1500/18446744073709551615 2025-04-07T14:24:38.690668Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:863:2591], 0} after executionsCount# 1 2025-04-07T14:24:38.690722Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:863:2591], 0} sends rowCount# 1, bytes# 24, quota rows left# 1000, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:24:38.690812Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:863:2591], 0} finished in read 2025-04-07T14:24:38.690892Z node 7 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:24:38.690920Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-07T14:24:38.690945Z node 7 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:24:38.690971Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:24:38.691011Z node 7 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-07T14:24:38.691037Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:24:38.691064Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-07T14:24:38.691109Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-07T14:24:38.691220Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-07T14:24:38.691941Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:863:2591], Recipient [7:632:2537]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T14:24:38.691997Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-07T14:24:38.694486Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:54:2101], Recipient [7:632:2537]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715660 LockNode: 7 Status: STATUS_NOT_FOUND { items { uint32_value: 1 } items { uint32_value: 11 } } >> MediatorTest::BasicTimecastUpdates |98.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_replication/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceproxy/ut/unittest >> SequenceProxy::DropRecreate [GOOD] Test command err: 2025-04-07T14:24:36.388591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:24:36.388679Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:36.482587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:24:37.241237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944 2025-04-07T14:24:37.460527Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:24:37.461056Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/000998/r3tmp/tmprkdBzE/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:24:37.461786Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/000998/r3tmp/tmprkdBzE/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/000998/r3tmp/tmprkdBzE/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10598532855094336115 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:24:38.482938Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:24:38.483009Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:38.535688Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:24:39.049637Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944 2025-04-07T14:24:39.273558Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:24:39.274027Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/000998/r3tmp/tmpSV3cKJ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:24:39.274220Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/000998/r3tmp/tmpSV3cKJ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/000998/r3tmp/tmpSV3cKJ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4519047716363275114 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:24:39.406994Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715658:0, at schemeshard: 72057594046578944 2025-04-07T14:24:39.648016Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715659:0, at schemeshard: 72057594046578944 |98.7%| [TS] {RESULT} ydb/core/tx/sequenceproxy/ut/unittest >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-result_sets] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData >> QuoterWithKesusTest::HandlesNonExistentResource [GOOD] >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource >> QueryActorTest::Commit [GOOD] >> QueryActorTest::StreamQuery >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-plan] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs |98.7%| [TA] $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob >> TTxDataShardReshuffleKMeansScan::MainToBuild [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToPosting >> TMemoryController::Counters_HardLimit [GOOD] >> TMemoryController::Counters_NoHardLimit >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-result_sets] [GOOD] >> TestFilterSet::CompilationValidation [GOOD] >> DataShardStats::MultipleChannelsStatsCorrect [GOOD] >> DataShardStats::HistogramStatsCorrect >> KeyValueGRPCService::SimpleWriteReadWithoutToken [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1 >> test_timeout.py::TestTimeout::test_timeout >> KqpTpch::Query08 [GOOD] >> KqpTpch::Query09 >> TestFormatHandler::ManyJsonClients >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection >> TColumnShardTestSchema::RebootColdTiers [GOOD] >> TTxDataShardSampleKScan::RunScan >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-plan] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> TColumnShardTestSchema::RebootOneColdTier [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144036402.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144036402.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144036402.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124036402.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144036402.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144036402.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124035202.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124036402.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124036402.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124035202.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124035202.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124035202.000000s;Name=;Codec=}; 2025-04-07T14:23:23.295666Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:23:23.392424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:23:23.422301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:23:23.422386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:23:23.422630Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:23:23.429431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:23.429658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:23.429881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:23.429979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:23:23.430049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:23:23.430141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:23:23.430230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:23:23.430343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:23:23.430461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:23:23.430556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:23:23.430642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:23:23.430725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:23:23.450072Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:23:23.450144Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T14:23:23.452249Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:23:23.452393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:23:23.452440Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:23:23.452635Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:23:23.452807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:23:23.452883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:23:23.452920Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:23:23.453017Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:23:23.453077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:23:23.453112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:23:23.453138Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:23:23.453287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:23:23.453347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:23:23.453378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:23:23.453406Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:23:23.453476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:23:23.453521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:23:23.453555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:23:23.453581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:23:23.453647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:23:23.453678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:23:23.453701Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:23:23.453742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:23:23.453772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:23:23.453799Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:23:23.453968Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-04-07T14:23:23.454048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-07T14:23:23.454119Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-07T14:23:23.454182Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-04-07T14:23:23.454324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:23:23.454372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:23:23.454401Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:23:23.454578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:23:23.454616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switch ... CUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=76; 2025-04-07T14:24:43.368837Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T14:24:43.368891Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=102; 2025-04-07T14:24:43.369031Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=86; 2025-04-07T14:24:43.369204Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=133; 2025-04-07T14:24:43.369423Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=169; 2025-04-07T14:24:43.369972Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=503; 2025-04-07T14:24:43.370043Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=24; 2025-04-07T14:24:43.370097Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=22; 2025-04-07T14:24:43.370135Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-04-07T14:24:43.370195Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-04-07T14:24:43.370243Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-07T14:24:43.370311Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2025-04-07T14:24:43.370346Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-04-07T14:24:43.370413Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=39; 2025-04-07T14:24:43.370451Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-07T14:24:43.370518Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=38; 2025-04-07T14:24:43.370561Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=13721; 2025-04-07T14:24:43.370695Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=12;blobs=18;rows=320000;bytes=18990624;raw_bytes=32169236; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:24:43.370794Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:24:43.370853Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=$$DELETE,tier1,tier0; 2025-04-07T14:24:43.370889Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T14:24:43.370987Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T14:24:43.371092Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:24:43.371126Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:24:43.371192Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:24:43.371231Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:24:43.371297Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:24:43.371368Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=3; 2025-04-07T14:24:43.371424Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2025-04-07T14:24:43.371475Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:24:43.371527Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:24:43.371568Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:24:43.371609Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:24:43.371721Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:24:43.372301Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:24:43.372393Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1411:3280];tablet_id=9437184;parent=[1:1372:3248];fline=manager.h:99;event=ask_data;request=request_id=102;1={portions_count=12};; 2025-04-07T14:24:43.373153Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:24:43.374012Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T14:24:43.374047Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:24:43.374071Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T14:24:43.374115Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:24:43.374173Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:24:43.374240Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=3; 2025-04-07T14:24:43.374300Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2025-04-07T14:24:43.374353Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:24:43.374415Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:24:43.374463Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:24:43.374507Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:24:43.374593Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:24:43.375070Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=12;path_id=1; 2025-04-07T14:24:43.375529Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=12;path_id=1; 2025-04-07T14:24:43.376527Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T14:24:43.376573Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1372:3248];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495312 160000/9495312 160000/9495312 80000/4749668 0/0 >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> MediatorTest::BasicTimecastUpdates [GOOD] >> LongTxService::BasicTransactions >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-result_sets] [GOOD] >> MediatorTest::MultipleTablets >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-plan] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144036419.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144036419.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124036419.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124036419.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124035219.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124035219.000000s;Name=;Codec=}; 2025-04-07T14:23:41.551646Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-07T14:23:41.650705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:93;event=initialize_shard;step=OnActivateExecutor; 2025-04-07T14:23:41.672345Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:99;event=tiering_new_event; 2025-04-07T14:23:41.672440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:109;event=initialize_shard;step=initialize_tiring_finished; 2025-04-07T14:23:41.672813Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-07T14:23:41.681547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-07T14:23:41.681834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-07T14:23:41.682078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-07T14:23:41.682196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-07T14:23:41.682307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-07T14:23:41.682423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-07T14:23:41.682533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-07T14:23:41.682647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-07T14:23:41.682764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-07T14:23:41.682918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-07T14:23:41.683019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-07T14:23:41.683136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-07T14:23:41.707592Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-07T14:23:41.707681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2025-04-07T14:23:41.709971Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-07T14:23:41.710126Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:130;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-07T14:23:41.710178Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-07T14:23:41.710375Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:23:41.710505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-07T14:23:41.710584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-07T14:23:41.710627Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-07T14:23:41.710703Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-07T14:23:41.710788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-07T14:23:41.710833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-07T14:23:41.710869Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-07T14:23:41.711040Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-07T14:23:41.711102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-07T14:23:41.711144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-07T14:23:41.711174Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-07T14:23:41.711258Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-07T14:23:41.711309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-07T14:23:41.711350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-07T14:23:41.711381Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-07T14:23:41.711454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-07T14:23:41.711491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-07T14:23:41.711520Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-07T14:23:41.711570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-07T14:23:41.711615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-07T14:23:41.711643Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-07T14:23:41.711889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=91; 2025-04-07T14:23:41.711978Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-07T14:23:41.712063Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-04-07T14:23:41.712151Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=47; 2025-04-07T14:23:41.712308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-07T14:23:41.712363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-07T14:23:41.712399Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-07T14:23:41.712601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-07T14:23:41.712649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-07T14:23:41.716860Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-07T14:23:41.717126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_f ... Time=846; 2025-04-07T14:24:44.286859Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=53; 2025-04-07T14:24:44.286940Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-04-07T14:24:44.286981Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=86; 2025-04-07T14:24:44.287082Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=68; 2025-04-07T14:24:44.287197Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=61; 2025-04-07T14:24:44.287376Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=142; 2025-04-07T14:24:44.287796Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=382; 2025-04-07T14:24:44.287868Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=29; 2025-04-07T14:24:44.287925Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=23; 2025-04-07T14:24:44.287961Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-04-07T14:24:44.287996Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2025-04-07T14:24:44.288039Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-04-07T14:24:44.288115Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2025-04-07T14:24:44.288152Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-07T14:24:44.288244Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=48; 2025-04-07T14:24:44.288290Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-07T14:24:44.288347Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=23; 2025-04-07T14:24:44.288379Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=11462; 2025-04-07T14:24:44.288505Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9495312;raw_bytes=16084618; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2025-04-07T14:24:44.288597Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2025-04-07T14:24:44.288640Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard_impl.cpp:1555;event=activate_tiering;path_id=1;tiers=$$DELETE,cold; 2025-04-07T14:24:44.288727Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard_impl.cpp:1579;event=OnTieringModified;path_id=1; 2025-04-07T14:24:44.288820Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=column_engine_logs.cpp:549;event=OnTieringModified;path_id=1; 2025-04-07T14:24:44.288932Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SignalTabletActive; 2025-04-07T14:24:44.288965Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2025-04-07T14:24:44.289029Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=270443315200;kff=0.3; 2025-04-07T14:24:44.289065Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:24:44.289115Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:24:44.289193Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=2; 2025-04-07T14:24:44.289266Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2025-04-07T14:24:44.289313Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:24:44.289383Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:24:44.289418Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:24:44.289454Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:24:44.289518Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:24:44.289906Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:24:44.290628Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1186:3082];tablet_id=9437184;parent=[1:1149:3052];fline=manager.h:99;event=ask_data;request=request_id=73;1={portions_count=6};; 2025-04-07T14:24:44.290870Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:226;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-07T14:24:44.291958Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:215;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-07T14:24:44.291990Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-07T14:24:44.292013Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-07T14:24:44.292052Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2025-04-07T14:24:44.292125Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2025-04-07T14:24:44.292196Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:322;event=StartCleanup;portions_count=2; 2025-04-07T14:24:44.292248Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:364;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2025-04-07T14:24:44.292287Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:397;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-07T14:24:44.292335Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2025-04-07T14:24:44.292365Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2025-04-07T14:24:44.292402Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:411;event=StartTtl;external=0; 2025-04-07T14:24:44.292469Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2025-04-07T14:24:44.292847Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=6;path_id=1; 2025-04-07T14:24:44.293240Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1416;event=TTxAskPortionChunks::Execute;stage=processing;size=6;path_id=1; 2025-04-07T14:24:44.293743Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1450;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2025-04-07T14:24:44.293792Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1149:3052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1454;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-result_sets] >> LongTxService::BasicTransactions [GOOD] >> LongTxService::AcquireSnapshot >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection >> CoordinatorTests::Route >> CoordinatorTests::Route [GOOD] >> CoordinatorTests::RouteTwoTopicWichSameName >> CoordinatorTests::RouteTwoTopicWichSameName [GOOD] >> LeaderElectionTests::Test1 >> TIndexProcesorTests::TestOver1000Queues [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-plan] [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] >> Coordinator::ReadStepSubscribe [GOOD] >> Coordinator::LastStepSubscribe >> TestFormatHandler::ManyJsonClients [GOOD] >> LeaderElectionTests::Test1 [GOOD] >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource [GOOD] >> QuoterWithKesusTest::GetsQuota >> LeaderElectionTests::TestLocalMode >> LeaderElectionTests::TestLocalMode [GOOD] >> test_timeout.py::TestTimeout::test_timeout [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuild >> test_commit.py::TestCommit::test_commit >> TopicSessionTests::TwoSessionsWithoutOffsets >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection >> test_sql.py::TestCanonicalFolder1::test_case[write/write_group_by.script-script] [GOOD] >> TestFormatHandler::ManyRawClients >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-plan] >> LongTxService::AcquireSnapshot [GOOD] >> LongTxService::LockSubscribe >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-plan] >> TMemoryController::Counters_NoHardLimit [GOOD] >> TMemoryController::Config_ConsumerLimits |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-plan] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_background_compaction/unittest >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] Test command err: 2025-04-07T14:24:25.328769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:25.329215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:25.329510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f66/r3tmp/tmpCeUJbu/pdisk_1.dat 2025-04-07T14:24:25.870235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:25.952376Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:26.003294Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:24:26.005399Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:24:26.005612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:26.005788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:26.017385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:26.146186Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T14:24:26.146281Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:24:26.147365Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T14:24:26.292042Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:24:26.292643Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:24:26.292855Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:24:26.293251Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:24:26.293792Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:24:26.293955Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:24:26.294228Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T14:24:26.297362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:26.298314Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T14:24:26.298395Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T14:24:26.344766Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:24:26.345718Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:24:26.346844Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:24:26.347145Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:26.409463Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:24:26.410289Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:26.410402Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:26.412272Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:24:26.412365Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:24:26.412429Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:24:26.413232Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:26.435218Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:24:26.436863Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:26.437097Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:24:26.437143Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:24:26.437177Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:24:26.437212Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:26.437652Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:26.437713Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:26.438715Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:24:26.438861Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:24:26.440105Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:26.440158Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:26.440206Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:24:26.440282Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:26.440327Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:26.440509Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:24:26.440562Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:24:26.440594Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:24:26.440632Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:24:26.440702Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:26.440916Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:24:26.440968Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:24:26.441064Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:26.441513Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:24:26.441572Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:24:26.441687Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:24:26.441775Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:24:26.441814Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:24:26.441849Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:24:26.441886Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:24:26.442187Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:24:26.442230Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:24:26.442263Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:24:26.442294Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:24:26.442369Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:24:26.442404Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:24:26.442434Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:24:26.442485Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:24:26.442510Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:24:26.444048Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:24:26.444146Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:26.457547Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:26.457692Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:24:26.457745Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:24:26.457831Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... dencies 2025-04-07T14:24:46.727754Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:24:46.727787Z node 5 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T14:24:46.727860Z node 5 :TX_DATASHARD TRACE: Activated operation [0:281474976715660] at 72075186224037888 2025-04-07T14:24:46.727943Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2025-04-07T14:24:46.727983Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:24:46.728013Z node 5 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-07T14:24:46.728039Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-07T14:24:46.728089Z node 5 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-07T14:24:46.728160Z node 5 :TX_DATASHARD TRACE: Operation [0:281474976715660] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191870 2025-04-07T14:24:46.728465Z node 5 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-07T14:24:46.728540Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:24:46.728576Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-07T14:24:46.728618Z node 5 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:24:46.728830Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit FinishPropose 2025-04-07T14:24:46.728944Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:24:46.728986Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:24:46.729039Z node 5 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:24:46.729190Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:24:46.729340Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2025-04-07T14:24:46.729371Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:24:46.729417Z node 5 :TX_DATASHARD TRACE: Execution plan for [0:281474976715660] at 72075186224037888 has finished 2025-04-07T14:24:46.740411Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:46.740517Z node 5 :TX_DATASHARD TRACE: Complete execution for [0:281474976715660] at 72075186224037888 on unit FinishPropose 2025-04-07T14:24:46.740593Z node 5 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715660 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T14:24:46.740780Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:46.743155Z node 5 :TX_PROXY DEBUG: actor# [5:52:2099] Handle TEvNavigate describe path /Root/table-1 2025-04-07T14:24:46.743292Z node 5 :TX_PROXY DEBUG: Actor# [5:820:2657] HANDLE EvNavigateScheme /Root/table-1 2025-04-07T14:24:46.743844Z node 5 :TX_PROXY DEBUG: Actor# [5:820:2657] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-07T14:24:46.743981Z node 5 :TX_PROXY DEBUG: Actor# [5:820:2657] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table-1" Options { ShowPrivateTable: true } 2025-04-07T14:24:46.745203Z node 5 :TX_PROXY DEBUG: Actor# [5:820:2657] Handle TEvDescribeSchemeResult Forward to# [5:558:2485] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-04-07T14:24:46.746349Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:824:2661], Recipient [5:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:46.746407Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:46.746454Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:823:2660], serverId# [5:824:2661], sessionId# [0:0:0] 2025-04-07T14:24:46.746574Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [5:822:2659], Recipient [5:632:2537]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-07T14:24:46.747498Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:827:2664], Recipient [5:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:46.747551Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:46.747598Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:826:2663], serverId# [5:827:2664], sessionId# [0:0:0] 2025-04-07T14:24:46.747756Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [5:825:2662], Recipient [5:632:2537]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-07T14:24:46.747904Z node 5 :TX_DATASHARD INFO: Started background compaction# 1 of 72075186224037888 tableId# 2 localTid# 1001, requested from [5:825:2662], partsCount# 0, memtableSize# 704, memtableWaste# 3904, memtableRows# 3 2025-04-07T14:24:46.835536Z node 5 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:01.525525Z 2025-04-07T14:24:46.835659Z node 5 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2025-04-07T14:24:46.835721Z node 5 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [5:825:2662]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T14:24:46.836516Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [5:623:2531], Recipient [5:632:2537]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-07T14:24:46.837582Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:834:2670], Recipient [5:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:46.837656Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:46.837708Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:833:2669], serverId# [5:834:2670], sessionId# [0:0:0] 2025-04-07T14:24:46.837900Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [5:832:2668], Recipient [5:632:2537]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-07T14:24:46.838004Z node 5 :TX_DATASHARD DEBUG: Background compaction of tablet# 72075186224037888 of path# [OwnerId: 72057594046644480, LocalPathId: 2], requested from# [5:832:2668] is not needed >> BSConfigGRPCService::ReplaceStorageConfig >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection |98.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_background_compaction/unittest >> MediatorTest::MultipleTablets [GOOD] >> test_commit.py::TestCommit::test_commit [GOOD] >> GenericProviderLookupActor::Lookup >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> KqpTpch::Query09 [GOOD] >> KqpTpch::Query10 >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] >> test.py::test[solomon-Basic-default.txt] >> GenericProviderLookupActor::Lookup [GOOD] >> GenericProviderLookupActor::LookupWithErrors >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> GenericProviderLookupActor::LookupWithErrors [GOOD] >> TTxDataShardSampleKScan::RunScan [GOOD] >> TTxDataShardSampleKScan::ScanBadParameters >> MediatorTest::TabletAckBeforePlanComplete |98.7%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/yc_search_ut/unittest >> TIndexProcesorTests::TestOver1000Queues [GOOD] Test command err: 2025-04-07T14:24:02.635970Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576908107255482:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:02.636047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d13/r3tmp/tmpYWAOnK/pdisk_1.dat 2025-04-07T14:24:03.096062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:03.096227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:03.097888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:03.123940Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11947, node 1 2025-04-07T14:24:03.241304Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:24:03.241329Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:24:03.241335Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:24:03.241433Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:24:03.647286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-07T14:24:03.693865Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 TClient is connected to server localhost:12411 waiting... 2025-04-07T14:24:05.695952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-07T14:24:05.697317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:12411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744035843718 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "SQS" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744035843739 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:24:06.184783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:24:06.197567Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 reason: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) TClient is connected to server localhost:12411 waiting... 2025-04-07T14:24:06.421098Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710664, at schemeshard: 72057594046644480 2025-04-07T14:24:06.451660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:1, at schemeshard: 72057594046644480 2025-04-07T14:24:06.457570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 ===Execute query: UPSERT INTO `/Root/SQS/SingleCreateQueueEvent/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId) VALUES ("cloud1", "queue1", 1, "myQueueCustomName", 1744035846221, "myFolder"); 2025-04-07T14:24:06.730514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576925287125707:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:06.730589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490576925287125719:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:06.730642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:06.735284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710667:3, at schemeshard: 72057594046644480 2025-04-07T14:24:06.746425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490576925287125721:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710667 completed, doublechecking } 2025-04-07T14:24:07.636061Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7490576908107255482:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:07.636144Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:24:07.680789Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jr8a66j552rzbqh8jz9etvfa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODJiMTYyYjEtMzM3ZTQyZTQtY2UyMDFmYmItZjBmNDljM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:08.019496Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jr8a67h61w3yykjbbyhyzy9x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJmOTViNTUtMmQzMzk4YjktZDZhMWIzMDQtOWYxYjQ0ODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:08.177553Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jr8a67vr79rwm40anj5eddvt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGEzZWRmNjQtMTJkY2ZiNGEtMzE2ZDY3NDYtZjZkNmExNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:08.350569Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr8a67zs21sv301wq7x506z3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODYxNzE0YmYtZTcwMjUxMGQtMTVlZDJmMTUtYjgyN2I3NjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-07T14:24:06.221000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-07T14:24:06.221000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-07T14:24:08.357844Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} 2025-04-07T14:24:08.467017Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr8a688j62pr8dx1xzjve685, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDg3MGJhNzAtZTMyYWIwZjgtZjZhMjliMDgtZDQzY2VjZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:08.472996Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr8a688qfy9mrhdm7vfshfe9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjliNTRhMi0xMTFmMzRlYy1mYjJjNWRhNy00M2U1NWUwNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:08.579980Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr8a68c2fa47qncvyan391jv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWEzMDUzMTAtYTFjYTBkOGEtZWQ2OTM1ZjAtNjA5YWE4YTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:08.585633Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr8a68c85a01ay9p3fz9by1v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTNjNDU0MjktNDg5MTA0Y2YtZmIxNTZjYjItOTM2NWEwMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:08.690682Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr8a68ff5dtnp0nd18h431ds, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWFhMGJhZTctY2JjYmIwNTktOGU5Y2VhOGItMzI4NzUzNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:08.697398Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jr8a68fr6x4amdzf47dmdq21, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTAwNDVlYWEtODVkZjkzZjctZDg1YmVlYzktOTUyNzc1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , Pool ... atabase not set, use /Root 2025-04-07T14:24:15.753523Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710720. Ctx: { TraceId: 01jr8a6f60b820s33wtm2eqg5f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdlNjJlNmUtNjU5NGY2OWQtNDhjNmI2MzAtYjQ0NTA3YjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-07T14:24:12.595000Z","resource_id":"deleting2","name":"myQueueCustomName","service":"message-queue","deleted":"2025-04-07T14:24:12.595000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-07T14:24:12.595000Z","resource_id":"deleting1","name":"myQueueCustomName","service":"message-queue","deleted":"2025-04-07T14:24:12.595000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-07T14:24:12.595000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-07T14:24:12.595000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-07T14:24:11.000000Z","resource_id":"existing1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-07T14:24:15.759188Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-07T14:24:11.000000Z","resource_id":"existing2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-07T14:24:15.759262Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-07T14:24:12.595000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-07T14:24:15.759298Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-07T14:24:11.000000Z","resource_id":"existing3","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-07T14:24:15.759332Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-07T14:24:12.595000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-07T14:24:15.759361Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} 2025-04-07T14:24:15.867861Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710721. Ctx: { TraceId: 01jr8a6ffsfhecxyt8q72zz7fv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRmM2M5OTEtZTQ5ZTk4M2YtZTgwNGFmMmItZjcyNDAxM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:15.886282Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710722. Ctx: { TraceId: 01jr8a6fgaef8jksxsmj49dhss, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRmM2M5OTEtZTQ5ZTk4M2YtZTgwNGFmMmItZjcyNDAxM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:15.893600Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710723. Ctx: { TraceId: 01jr8a6fgk40p7x8efjrxjqwh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzBmYWM3ZGUtNWVjYjBiZGQtZWQ2ODU4MzYtYzJhY2JjYWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:16.038297Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710724. Ctx: { TraceId: 01jr8a6fn446zxm1z94wyky2m1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NkNmI5MTUtNzc4ZjFiMmEtY2E1OWQ5OWYtNzgwZmRjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:16.050035Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710725. Ctx: { TraceId: 01jr8a6fnbcyh3h8rqp53wb0kw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NkNmI5MTUtNzc4ZjFiMmEtY2E1OWQ5OWYtNzgwZmRjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:16.057730Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710726. Ctx: { TraceId: 01jr8a6fnr8538tc11g3wrt7h7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWYzZjBjMTEtNGZlMzFlMDgtYjQwMjQ0ZmYtNjA0NGU3OGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:16.175084Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710727. Ctx: { TraceId: 01jr8a6fsa56zbxjj5f16psjj7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFhMzUyYjgtNTU0YWFlNmItYjkyY2Y1NmQtNzEwZmFiZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:16.188911Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710728. Ctx: { TraceId: 01jr8a6fd4atcfjyyd0k18kcx1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODJiMTYyYjEtMzM3ZTQyZTQtY2UyMDFmYmItZjBmNDljM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:16.189049Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710729. Ctx: { TraceId: 01jr8a6fsq5rfmgpy05fa5pdas, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFhMzUyYjgtNTU0YWFlNmItYjkyY2Y1NmQtNzEwZmFiZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:16.195978Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710730. Ctx: { TraceId: 01jr8a6ft27r3g5g68qvmbj9zk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhkMTZhNDgtYTBkMjkxODgtNjYzMjIwOTQtOTYwYmQ0M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient is connected to server localhost:12411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744035843718 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 5 SecurityStateVersion: 0 } } Children { Name: ".metadata" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710667 CreateStep: 1744035846791 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" } Children { Name: "SQS" PathId: 2 SchemeshardId: 72057594046644480 ... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:24:16.526783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710731:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:24:16.533163Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710731, at schemeshard: 72057594046644480 reason: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) TClient is connected to server localhost:12411 waiting... 2025-04-07T14:24:16.895481Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710733, at schemeshard: 72057594046644480 2025-04-07T14:24:16.949416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710735:1, at schemeshard: 72057594046644480 2025-04-07T14:24:16.951001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710734:0, at schemeshard: 72057594046644480 ===Started add queue batch 2025-04-07T14:24:18.125061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-07T14:24:18.125095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:44.071039Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710736. Ctx: { TraceId: 01jr8a7aba0fn03bsx8vhh33dk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODJiMTYyYjEtMzM3ZTQyZTQtY2UyMDFmYmItZjBmNDljM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:45.289059Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710737. Ctx: { TraceId: 01jr8a7bnq9e52gnaxpchcf9as, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODJiMTYyYjEtMzM3ZTQyZTQtY2UyMDFmYmItZjBmNDljM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:46.530211Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710738. Ctx: { TraceId: 01jr8a7d7223y73f21x2zgz62t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFkNDJkNjUtZDE1YjM0MjktNDdjNjYyZmUtNmE1ZWI3MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:46.586030Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710739. Ctx: { TraceId: 01jr8a7dfrekgww2x28jyanak4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFkNDJkNjUtZDE1YjM0MjktNDdjNjYyZmUtNmE1ZWI3MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:46.665899Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710740. Ctx: { TraceId: 01jr8a7dj584zmafh77bx1p358, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFkNDJkNjUtZDE1YjM0MjktNDdjNjYyZmUtNmE1ZWI3MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:46.674280Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710741. Ctx: { TraceId: 01jr8a7djh7dtcp26z8tbfnjny, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFkNDJkNjUtZDE1YjM0MjktNDdjNjYyZmUtNmE1ZWI3MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:46.844413Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710742. Ctx: { TraceId: 01jr8a7djp5vydgbn524krqnyx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDU4ZDE3ZWMtNDE3YTk5MDctM2U5ODYwZWEtMzFkNDg3YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |98.7%| [TM] {RESULT} ydb/core/ymq/actor/yc_search_ut/unittest >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-plan] [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2 >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] [GOOD] >> LongTxService::LockSubscribe [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/actors/ut/unittest >> GenericProviderLookupActor::LookupWithErrors [GOOD] Test command err: 2025-04-07 14:24:49.081 INFO ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB476F9AAC0) [generic] yql_generic_lookup_actor.cpp:152: New generic proivider lookup source actor(ActorId=[1:3:2050]) for kind=YDB, endpoint=host: "some_host" port: 2135, database=some_db, use_tls=1, protocol=NATIVE, table=lookup_test 2025-04-07 14:24:49.132 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB476F9AAC0) [generic] yql_generic_lookup_actor.cpp:289: ActorId=[1:3:2050] Got LookupRequest for 3 keys Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 ListSplits result. GRpcStatusCode: 0 2025-04-07 14:24:49.153 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB476F9AAC0) [generic] yql_generic_lookup_actor.cpp:320: ActorId=[1:3:2050] Got TListSplitsStreamIterator 2025-04-07 14:24:49.153 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB476F9AAC0) [generic] yql_generic_lookup_actor.cpp:197: ActorId=[1:3:2050] Got TListSplitsResponse from Connector Call ReadSplits. data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY ReadSplits result. GRpcStatusCode: 0 2025-04-07 14:24:49.153 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB476F9AAC0) [generic] yql_generic_lookup_actor.cpp:230: ActorId=[1:3:2050] Got ReadSplitsStreamIterator from Connector 2025-04-07 14:24:49.153 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB476F9AAC0) [generic] yql_generic_lookup_actor.cpp:342: ActorId=[1:3:2050] Got DataChunk 2025-04-07 14:24:49.174 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB476F9AAC0) [generic] yql_generic_lookup_actor.cpp:353: ActorId=[1:3:2050] Got EOF 2025-04-07 14:24:49.174 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB476F9AAC0) [generic] yql_generic_lookup_actor.cpp:403: Sending lookup results for 3 keys 2025-04-07 14:24:49.243 INFO ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB4727F6640) [generic] yql_generic_lookup_actor.cpp:152: New generic proivider lookup source actor(ActorId=[2:7490577108651739911:2050]) for kind=YDB, endpoint=host: "some_host" port: 2135, database=some_db, use_tls=1, protocol=NATIVE, table=lookup_test 2025-04-07 14:24:49.243 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB4727F6640) [generic] yql_generic_lookup_actor.cpp:289: ActorId=[2:7490577108651739911:2050] Got LookupRequest for 3 keys Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { co ... left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 ListSplits result. GRpcStatusCode: 4 Mocked Error 2025-04-07 14:24:49.272 WARN ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB4727F6640) [generic] yql_generic_lookup_actor.cpp:426: ActorId=[2:7490577108651739911:2050] Got retrievable GRPC Error from Connector: gRpcStatusCode: 4, Msg: Mocked Error, Details: , InternalError: 0, retry scheduled in 0.015623s Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 ListSplits result. GRpcStatusCode: 0 2025-04-07 14:24:49.289 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB4727F6640) [generic] yql_generic_lookup_actor.cpp:320: ActorId=[2:7490577108651739911:2050] Got TListSplitsStreamIterator 2025-04-07 14:24:49.290 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB4727F6640) [generic] yql_generic_lookup_actor.cpp:197: ActorId=[2:7490577108651739911:2050] Got TListSplitsResponse from Connector Call ReadSplits. data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY ReadSplits result. GRpcStatusCode: 0 2025-04-07 14:24:49.290 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB4727F6640) [generic] yql_generic_lookup_actor.cpp:230: ActorId=[2:7490577108651739911:2050] Got ReadSplitsStreamIterator from Connector 2025-04-07 14:24:49.290 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB4727F6640) [generic] yql_generic_lookup_actor.cpp:342: ActorId=[2:7490577108651739911:2050] Got DataChunk 2025-04-07 14:24:49.291 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB4727F6640) [generic] yql_generic_lookup_actor.cpp:353: ActorId=[2:7490577108651739911:2050] Got EOF 2025-04-07 14:24:49.291 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=791818, tid=0x00007FB4727F6640) [generic] yql_generic_lookup_actor.cpp:403: Sending lookup results for 3 keys |98.7%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TS] {RESULT} ydb/library/yql/providers/generic/actors/ut/unittest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/long_tx_service/ut/unittest >> LongTxService::LockSubscribe [GOOD] Test command err: 2025-04-07T14:24:45.997525Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:24:45.997946Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/000f51/r3tmp/tmpZacq0c/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:24:45.998453Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/000f51/r3tmp/tmpZacq0c/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/000f51/r3tmp/tmpZacq0c/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7429461175296917552 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:24:46.055452Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvBeginTx from [1:421:2313] 2025-04-07T14:24:46.056228Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Created new LongTxId# ydb://long-tx/000000001e28jy08exj1tf0tkn?node_id=1 2025-04-07T14:24:46.074380Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvAttachColumnShardWrites from [2:422:2097] LongTxId# ydb://long-tx/000000001e28jy08exj1tf0tkn?node_id=1 2025-04-07T14:24:46.074513Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeConnected for NodeId# 1 from session [2:92:2048] 2025-04-07T14:24:46.074673Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvAttachColumnShardWrites from [2:143:2086] LongTxId# ydb://long-tx/000000001e28jy08exj1tf0tkn?node_id=1 2025-04-07T14:24:46.074918Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvCommitTx from [2:422:2097] LongTxId# ydb://long-tx/000000001e28jy08exj1tf0tkn?node_id=1 2025-04-07T14:24:46.075043Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvCommitTx from [2:143:2086] LongTxId# ydb://long-tx/000000001e28jy08exj1tf0tkn?node_id=1 2025-04-07T14:24:46.075684Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Committed LongTxId# ydb://long-tx/000000001e28jy08exj1tf0tkn?node_id=1 without side-effects 2025-04-07T14:24:46.076010Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvRollbackTx from [2:422:2097] LongTxId# ydb://long-tx/000000001e28jy08exj1tf0tkn?node_id=1 2025-04-07T14:24:46.076148Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvRollbackTx from [2:143:2086] LongTxId# ydb://long-tx/000000001e28jy08exj1tf0tkn?node_id=1 2025-04-07T14:24:46.076451Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvRollbackTx from [2:422:2097] LongTxId# ydb://long-tx/000000001e28jy08exj1tf0tkn?node_id=1 2025-04-07T14:24:46.076599Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvRollbackTx from [2:143:2086] LongTxId# ydb://long-tx/000000001e28jy08exj1tf0tkn?node_id=1 2025-04-07T14:24:46.076823Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-04-07T14:24:46.077217Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2025-04-07T14:24:46.077500Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 1 from session [2:92:2048] 2025-04-07T14:24:46.077797Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:75:2071] ServerId# [1:346:2266] TabletId# 72057594037932033 PipeClientId# [2:75:2071] 2025-04-07T14:24:46.081975Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvCommitTx from [2:422:2097] LongTxId# ydb://long-tx/000000001e28jy08exj1tf0tkn?node_id=3 2025-04-07T14:24:46.082271Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 3 from session [2:460:2099] 2025-04-07T14:24:46.771086Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-07T14:24:46.771150Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:46.865600Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-07T14:24:47.487352Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:24:47.487866Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/000f51/r3tmp/tmphi8WAm/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:24:47.488201Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/000f51/r3tmp/tmphi8WAm/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/000f51/r3tmp/tmphi8WAm/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10289034924998459322 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:24:47.782673Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireReadSnapshot from [3:500:2378] for database /dc-1 2025-04-07T14:24:47.782759Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-04-07T14:24:47.793311Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-04-07T14:24:47.793543Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:551:2416] Sending navigate request for /dc-1 2025-04-07T14:24:47.796146Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:551:2416] Received navigate response status Ok 2025-04-07T14:24:47.796205Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:551:2416] Sending acquire step to coordinator 72057594046316545 2025-04-07T14:24:47.799221Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:551:2416] Received read step 1000 2025-04-07T14:24:47.799352Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 1 2025-04-07T14:24:47.799879Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvBeginTx from [3:500:2378] 2025-04-07T14:24:47.799932Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-04-07T14:24:47.810943Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-04-07T14:24:47.811150Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:568:2427] Sending navigate request for /dc-1 2025-04-07T14:24:47.811364Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:568:2427] Received navigate response status Ok 2025-04-07T14:24:47.811414Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:568:2427] Sending acquire step to coordinator 72057594046316545 2025-04-07T14:24:47.811617Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:568:2427] Received read step 1500 2025-04-07T14:24:47.811733Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 2 2025-04-07T14:24:47.811787Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Created new read-only LongTxId# ydb://long-tx/read-only?snapshot=1500%3Amax 2025-04-07T14:24:47.811987Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvBeginTx from [3:500:2378] 2025-04-07T14:24:47.812033Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-04-07T14:24:47.824460Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-04-07T14:24:47.824778Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:570:2429] Sending navigate request for /dc-1 2025-04-07T14:24:47.825080Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:570:2429] Received navigate response status Ok 2025-04-07T14:24:47.825134Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:570:2429] Sending acquire step to coordinator 72057594046316545 2025-04-07T14:24:47.825313Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:570:2429] Received read step 1500 2025-04-07T14:24:47.825393Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 3 2025-04-07T14:24:47.825458Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Created new read-write LongTxId# ydb://long-tx/00000001e85sf6hr82p0ftnd75?node_id=3&snapshot=1500%3Amax 2025-04-07T14:24:48.792445Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-07T14:24:48.793190Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/u6pr/000f51/r3tmp/tmpZagVFX/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-07T14:24:48.793422Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/u6pr/000f51/r3tmp/tmpZagVFX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/u6pr/000f51/r3tmp/tmpZagVFX/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14988978903035376651 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2025-04-07T14:24:48.846189Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvRegisterLock for LockId# 123 2025-04-07T14:24:48.846332Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [5:422:2314] for LockId# 987 LockNode# 5 2025-04-07T14:24:48.862860Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:423:2097] for LockId# 987 LockNode# 5 2025-04-07T14:24:48.863881Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:92:2048] 2025-04-07T14:24:48.864086Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [6:143:2086] for LockId# 987 LockNode# 5 2025-04-07T14:24:48.865511Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:142:2133] for LockId# 987 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2025-04-07T14:24:48.865744Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [5:422:2314] for LockId# 123 LockNode# 5 2025-04-07T14:24:48.865886Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:423:2097] for LockId# 123 LockNode# 5 2025-04-07T14:24:48.866042Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [6:143:2086] for LockId# 123 LockNode# 5 2025-04-07T14:24:48.866238Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:142:2133] for LockId# 123 LockNode# 5 LockStatus# STATUS_SUBSCRIBED 2025-04-07T14:24:48.866380Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvUnregisterLock for LockId# 123 2025-04-07T14:24:48.866508Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:142:2133] for LockId# 123 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2025-04-07T14:24:48.866683Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:423:2097] for LockId# 234 LockNode# 5 2025-04-07T14:24:48.866881Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-07T14:24:48.867251Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-07T14:24:48.867493Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:92:2048] 2025-04-07T14:24:48.867661Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:77:2072] ServerId# [5:347:2267] TabletId# 72057594037932033 PipeClientId# [6:77:2072] 2025-04-07T14:24:49.062795Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:452:2048] 2025-04-07T14:24:49.063050Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-07T14:24:49.063113Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-07T14:24:49.063181Z node 5 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 6 2025-04-07T14:24:49.063675Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:452:2048] 2025-04-07T14:24:49.063929Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:453:2098] ServerId# [5:458:2333] TabletId# 72057594037932033 PipeClientId# [6:453:2098] 2025-04-07T14:24:49.064085Z node 6 :TX_PROXY WARN: actor# [6:141:2085] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-07T14:24:49.277056Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:482:2048] 2025-04-07T14:24:49.277285Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-07T14:24:49.277340Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-07T14:24:49.277925Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:482:2048] 2025-04-07T14:24:49.278384Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:483:2099] ServerId# [5:487:2353] TabletId# 72057594037932033 PipeClientId# [6:483:2099] 2025-04-07T14:24:49.515419Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:504:2048] 2025-04-07T14:24:49.515692Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-07T14:24:49.515749Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-07T14:24:49.515902Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:504:2048] 2025-04-07T14:24:49.516360Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:505:2101] ServerId# [5:509:2368] TabletId# 72057594037932033 PipeClientId# [6:505:2101] |98.7%| [TS] {RESULT} ydb/core/tx/long_tx_service/ut/unittest >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable >> TTestYqlToMiniKQLCompile::CheckResolve >> TCreateAndDropViewTest::CheckCreatedView >> TTestYqlToMiniKQLCompile::CheckResolve [GOOD] >> TTestYqlToMiniKQLCompile::OnlyResult >> TestFormatHandler::ManyRawClients [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding >> TTestYqlToMiniKQLCompile::OnlyResult [GOOD] >> TTestYqlToMiniKQLCompile::EraseRow [GOOD] >> TTestYqlToMiniKQLCompile::UpdateRow >> TTestYqlToMiniKQLCompile::UpdateRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRow >> TestFormatHandler::ClientValidation >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-plan] [GOOD] >> TTestYqlToMiniKQLCompile::SelectRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRange >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-result_sets] >> TTestYqlToMiniKQLCompile::SelectRange [GOOD] >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx [GOOD] >> TTestYqlToMiniKQLCompile::AcquireLocks >> TTestYqlToMiniKQLCompile::AcquireLocks [GOOD] >> TTestYqlToMiniKQLCompile::StaticMapTypeOf >> TTestYqlToMiniKQLCompile::StaticMapTypeOf [GOOD] >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange [GOOD] >> TTestYqlToMiniKQLCompile::Extract >> TTestYqlToMiniKQLCompile::Extract [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding >> TTxDataShardReshuffleKMeansScan::BuildToBuild [GOOD] >> TTxDataShardSampleKScan::ScanBadParameters [GOOD] >> MediatorTest::TabletAckBeforePlanComplete [GOOD] >> QueryActorTest::StreamQuery [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-plan] [GOOD] |98.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/client/minikql_compile/ut/unittest >> TTestYqlToMiniKQLCompile::Extract [GOOD] |98.7%| [TS] {RESULT} ydb/core/client/minikql_compile/ut/unittest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding >> MediatorTest::TabletAckWhenDead >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-result_sets] |98.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/py3test >> test_commit.py::TestCommit::test_commit [GOOD] |98.7%| [TS] {RESULT} ydb/tests/tools/pq_read/test/py3test >> QuoterWithKesusTest::GetsQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sample_k/unittest >> TTxDataShardSampleKScan::ScanBadParameters [GOOD] Test command err: 2025-04-07T14:24:44.129763Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490577084951217734:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:44.129865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e2f/r3tmp/tmprJGuf5/pdisk_1.dat 2025-04-07T14:24:44.594301Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:44.599991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:44.600066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:44.616620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:44.684019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:44.728811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:44.757688Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7490577084951218294:2271] 2025-04-07T14:24:44.757908Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:44.774518Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:44.774575Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:44.776537Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:24:44.776597Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:24:44.776630Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:24:44.777639Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:44.815447Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:24:44.816861Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:44.816938Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7490577084951218310:2272] 2025-04-07T14:24:44.816947Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:24:44.816957Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:24:44.816969Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:44.817956Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:24:44.818046Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:24:44.818072Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:44.818087Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:44.818112Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:24:44.818124Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:44.821009Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490577084951218291:2290], serverId# [1:7490577084951218309:2298], sessionId# [0:0:0] 2025-04-07T14:24:44.821124Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:44.823529Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:24:44.823655Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-07T14:24:44.824699Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:44.825573Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:44.826413Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:24:44.829592Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490577084951218324:2306], serverId# [1:7490577084951218326:2308], sessionId# [0:0:0] 2025-04-07T14:24:44.833715Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744035884871 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 7490577084951218196 RawX2: 4294969514 } } Step: 1744035884871 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:24:44.833738Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:44.833863Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:44.833899Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:44.833908Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:24:44.833952Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744035884871:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-07T14:24:44.834138Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744035884871:281474976710657 keys extracted: 0 2025-04-07T14:24:44.834230Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:24:44.834288Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:44.834309Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:24:44.840521Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:24:44.841040Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:44.843888Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:24:44.843913Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:44.844079Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1744035884871 txid# 281474976710657} 2025-04-07T14:24:44.844106Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744035884871} 2025-04-07T14:24:44.844146Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:44.844196Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:44.844218Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:24:44.844244Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:24:44.844314Z node 1 :TX_DATASHARD DEBUG: Complete [1744035884871 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7490577084951218138:2188], exec latency: 6 ms, propose latency: 10 ms 2025-04-07T14:24:44.844351Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-07T14:24:44.844380Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:44.844628Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744035884878 2025-04-07T14:24:44.848796Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-07T14:24:44.848869Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:24:47.122132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490577097836120304:2285], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:47.122259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:47.127408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490577097836120316:2288], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:47.160328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-07T14:24:47.165567Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:47.169508Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:47.170772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490577097836120318:2289], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-07T14:24:48.611514Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: 01jr8a7e082x0kzx7p0tcgfp9w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTBjOGQ5ZWUtNTViZWM3NzctNjhmNDEzNzgtMzE0MGQzODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:24:48.648822Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490577102131087699:2402], serverId# [1:7490577102131087700:2403], sessionId# [0:0:0] 2025-04-07T14:24:48.652818Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:48.681445Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:48.681551Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:48.696914Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:48.697046Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710661 at tablet 72075186224037888 2025-04-07T14:24:48.697774Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:48.698959Z node 1 :TX_DATASHARD DEBUG: Planned trans ... connected -> Connecting 2025-04-07T14:24:49.648806Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:49.661744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:49.666769Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046644480 2025-04-07T14:24:49.669586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:49.675108Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:7490577109191546279:2271] 2025-04-07T14:24:49.675327Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:49.683955Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:49.684021Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:49.686877Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:24:49.686918Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:24:49.686943Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:24:49.687328Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:49.687357Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:24:49.687428Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:49.687491Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:7490577109191546294:2272] 2025-04-07T14:24:49.687514Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:24:49.687541Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:24:49.687552Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:49.687653Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:24:49.687716Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:24:49.687739Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:49.687755Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:49.687773Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:24:49.687781Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:49.728349Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7490577109191546277:2289], serverId# [2:7490577109191546297:2298], sessionId# [0:0:0] 2025-04-07T14:24:49.728473Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:49.728677Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:24:49.728744Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:24:49.729719Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:49.730661Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:49.730717Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:24:49.732625Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7490577109191546310:2305], serverId# [2:7490577109191546311:2306], sessionId# [0:0:0] 2025-04-07T14:24:49.732882Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1744035889778 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 7490577109191546193 RawX2: 8589936815 } } Step: 1744035889778 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:24:49.732897Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:49.732982Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:49.733002Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:24:49.733020Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1744035889778:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:24:49.733245Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744035889778:281474976715657 keys extracted: 0 2025-04-07T14:24:49.733389Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:24:49.733489Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:49.733532Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:24:49.733888Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:24:49.734179Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:49.735106Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:49.735140Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744035889777 2025-04-07T14:24:49.735148Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:49.735169Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744035889778 2025-04-07T14:24:49.735211Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1744035889778 txid# 281474976715657} 2025-04-07T14:24:49.735248Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744035889778} 2025-04-07T14:24:49.735275Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:49.735310Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:49.735328Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:24:49.735345Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:24:49.735377Z node 2 :TX_DATASHARD DEBUG: Complete [1744035889778 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:7490577109191546036:2137], exec latency: 0 ms, propose latency: 2 ms 2025-04-07T14:24:49.735395Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:24:49.735418Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:49.736093Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:24:49.736141Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:24:49.741830Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7490577109191546349:2334], serverId# [2:7490577109191546350:2335], sessionId# [0:0:0] 2025-04-07T14:24:49.741946Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:49.742035Z node 2 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976715658 at tablet 72075186224037888 2025-04-07T14:24:49.743115Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:49.744334Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1744035889792 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 7490577109191546193 RawX2: 8589936815 } } Step: 1744035889792 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:24:49.744375Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:49.744563Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:49.744588Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:24:49.744604Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1744035889792:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-07T14:24:49.744765Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744035889792:281474976715658 keys extracted: 0 2025-04-07T14:24:49.746207Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:49.746570Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1744035889792 txid# 281474976715658} 2025-04-07T14:24:49.746595Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744035889792} 2025-04-07T14:24:49.746625Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:49.746658Z node 2 :TX_DATASHARD DEBUG: Complete [1744035889792 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [2:7490577109191546344:2330], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:24:49.746684Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:49.748312Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7490577109191546360:2345], serverId# [2:7490577109191546361:2346], sessionId# [0:0:0] 2025-04-07T14:24:49.749424Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7490577109191546365:2350], serverId# [2:7490577109191546366:2351], sessionId# [0:0:0] 2025-04-07T14:24:49.750517Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7490577109191546370:2355], serverId# [2:7490577109191546371:2356], sessionId# [0:0:0] 2025-04-07T14:24:49.751525Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7490577109191546375:2360], serverId# [2:7490577109191546376:2361], sessionId# [0:0:0] 2025-04-07T14:24:49.752860Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7490577109191546380:2365], serverId# [2:7490577109191546381:2366], sessionId# [0:0:0] 2025-04-07T14:24:49.754120Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7490577109191546385:2370], serverId# [2:7490577109191546386:2371], sessionId# [0:0:0] |98.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_sample_k/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reshuffle_kmeans/unittest >> TTxDataShardReshuffleKMeansScan::BuildToBuild [GOOD] Test command err: 2025-04-07T14:24:30.601250Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490577024855945040:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:30.601300Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c9e/r3tmp/tmpJDBf2R/pdisk_1.dat 2025-04-07T14:24:31.078109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:31.078241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:31.085714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:31.130362Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:31.159638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:31.203922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:31.242114Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7490577029150912762:2271] 2025-04-07T14:24:31.242354Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:31.272138Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:31.272201Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:31.273992Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:24:31.276758Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:24:31.276833Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:24:31.277816Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:31.340621Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:24:31.341854Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:31.341918Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7490577029150912778:2272] 2025-04-07T14:24:31.341957Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:24:31.341967Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:24:31.341975Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:31.342747Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490577029150912759:2290], serverId# [1:7490577029150912777:2298], sessionId# [0:0:0] 2025-04-07T14:24:31.342835Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:24:31.342920Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:24:31.342947Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:31.342968Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:31.342989Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:24:31.343017Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:31.343034Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:31.343949Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:24:31.344055Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-07T14:24:31.345482Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:31.345561Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:31.346181Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:24:31.348752Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490577029150912792:2306], serverId# [1:7490577029150912793:2307], sessionId# [0:0:0] 2025-04-07T14:24:31.358943Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744035871396 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 7490577029150912684 RawX2: 4294969526 } } Step: 1744035871396 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:24:31.358982Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:31.359129Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:31.359188Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:31.359208Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:24:31.359292Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744035871396:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-07T14:24:31.359555Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744035871396:281474976710657 keys extracted: 0 2025-04-07T14:24:31.359698Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:24:31.359892Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:31.359925Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:24:31.363199Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:24:31.363639Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:31.364764Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1744035871396 txid# 281474976710657} 2025-04-07T14:24:31.364792Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744035871396} 2025-04-07T14:24:31.364827Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:31.364863Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:24:31.364874Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:31.365240Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:31.365269Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:24:31.365300Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:24:31.365341Z node 1 :TX_DATASHARD DEBUG: Complete [1744035871396 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7490577029150912610:2190], exec latency: 3 ms, propose latency: 5 ms 2025-04-07T14:24:31.365373Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-07T14:24:31.365422Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:31.365488Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744035871403 2025-04-07T14:24:31.368678Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-07T14:24:31.368720Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:24:31.376396Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490577029150912831:2335], serverId# [1:7490577029150912832:2336], sessionId# [0:0:0] 2025-04-07T14:24:31.377420Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:31.377575Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710658 at tablet 72075186224037888 2025-04-07T14:24:31.378898Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:31.380819Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710658 at step 1744035871424 at tablet 72075186224037888 { Transactions { TxId: 281474976710658 AckTo { RawX1: 7490577029150912684 RawX2: 4294969526 } } Step: 1744035871424 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:24:31.380842Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:31.380936Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:31.380950Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:24:31.380971Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744035871424:281474976710658] in PlanQueue unit at 72075186224037888 2025-04-07T14:24:31.381117Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744035871424:281474976710658 keys extracted: 0 2025-04-07T14:24:31.381379Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:31.382200Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1744035871424 txid# 281474976710658} 2025-04-07T14:24:31.382242Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744035871424} 2025-04-07T14:24:31.382295Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:31.382326Z node 1 :TX_DATASHARD DEBUG: Complete [1744035871424 : 281474976710658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7490577029150912826:2331], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:24:31.382342Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:31.385449Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7490577029150912842:2346], serverId# [1:7490577029150912843:2347], sessionId# [0:0:0] 2025-04-07T14:24:31.386241Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:31.386316Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710659 at tablet 72075186224037888 2025-04-07T14:24:31.387558Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 ... 644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037895 2025-04-07T14:24:51.871842Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037895 2025-04-07T14:24:51.871862Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:24:51.871883Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1744035891913:281474976710688] in PlanQueue unit at 72075186224037895 2025-04-07T14:24:51.871936Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:51.872032Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037895 loaded tx from db 1744035891913:281474976710688 keys extracted: 0 2025-04-07T14:24:51.872161Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:24:51.872243Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037895 2025-04-07T14:24:51.872298Z node 5 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037895 2025-04-07T14:24:51.872621Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:51.873860Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037895 step# 1744035891913 txid# 281474976710688} 2025-04-07T14:24:51.873894Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037895 step# 1744035891913} 2025-04-07T14:24:51.873926Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037895 2025-04-07T14:24:51.874021Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037895 2025-04-07T14:24:51.874071Z node 5 :TX_DATASHARD DEBUG: Complete [1744035891913 : 281474976710688] from 72075186224037895 at tablet 72075186224037895 send result to client [5:7490577099331641920:2149], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:24:51.874111Z node 5 :TX_DATASHARD INFO: 72075186224037895 Sending notify to schemeshard 72057594046644480 txId 281474976710688 state PreOffline TxInFly 0 2025-04-07T14:24:51.874151Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-07T14:24:51.874699Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710688 datashard 72075186224037895 state PreOffline 2025-04-07T14:24:51.874730Z node 5 :TX_DATASHARD DEBUG: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2025-04-07T14:24:51.876041Z node 5 :TX_DATASHARD DEBUG: 72075186224037895 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-07T14:24:51.876108Z node 5 :TX_DATASHARD INFO: 72075186224037895 Initiating switch from PreOffline to Offline state 2025-04-07T14:24:51.877342Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-07T14:24:51.877796Z node 5 :TX_DATASHARD INFO: 72075186224037895 Reporting state Offline to schemeshard 72057594046644480 2025-04-07T14:24:51.879159Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:51.879172Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037895 2025-04-07T14:24:51.882294Z node 5 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037895 state Offline 2025-04-07T14:24:51.885981Z node 5 :TX_DATASHARD INFO: OnTabletStop: 72075186224037895 reason = ReasonStop 2025-04-07T14:24:51.886213Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037896 actor [5:7490577116511512585:2334] 2025-04-07T14:24:51.886397Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:51.886412Z node 5 :TX_DATASHARD INFO: OnTabletDead: 72075186224037895 2025-04-07T14:24:51.886442Z node 5 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037895 2025-04-07T14:24:51.886990Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037895 not found 2025-04-07T14:24:51.896175Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:51.896233Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:51.897509Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037896 2025-04-07T14:24:51.897554Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037896 2025-04-07T14:24:51.897583Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037896 2025-04-07T14:24:51.897796Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:51.897818Z node 5 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037896 2025-04-07T14:24:51.897875Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:51.897903Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037896, actorId: [5:7490577116511512610:2335] 2025-04-07T14:24:51.897914Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037896 2025-04-07T14:24:51.897924Z node 5 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037896, state: WaitScheme 2025-04-07T14:24:51.897934Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-07T14:24:51.897993Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037896 2025-04-07T14:24:51.898028Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037896 2025-04-07T14:24:51.898044Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-04-07T14:24:51.898055Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:51.898069Z node 5 :TX_DATASHARD INFO: No tx to execute at 72075186224037896 TxInFly 0 2025-04-07T14:24:51.898081Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-07T14:24:51.898501Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037896, clientId# [5:7490577116511512589:3230], serverId# [5:7490577116511512594:3234], sessionId# [0:0:0] 2025-04-07T14:24:51.898582Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037896 2025-04-07T14:24:51.898764Z node 5 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037896 txId 281474976710689 ssId 72057594046644480 seqNo 2:16 2025-04-07T14:24:51.898826Z node 5 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710689 at tablet 72075186224037896 2025-04-07T14:24:51.899445Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037896 2025-04-07T14:24:51.900225Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037896 2025-04-07T14:24:51.900294Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 not sending time cast registration request in state WaitScheme 2025-04-07T14:24:51.901872Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037896, clientId# [5:7490577116511512616:3250], serverId# [5:7490577116511512617:3251], sessionId# [0:0:0] 2025-04-07T14:24:51.902098Z node 5 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710689 at step 1744035891948 at tablet 72075186224037896 { Transactions { TxId: 281474976710689 AckTo { RawX1: 7490577099331642060 RawX2: 21474838706 } } Step: 1744035891948 MediatorID: 72057594046382081 TabletID: 72075186224037896 } 2025-04-07T14:24:51.902118Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-07T14:24:51.902190Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-04-07T14:24:51.902211Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:24:51.902230Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1744035891948:281474976710689] in PlanQueue unit at 72075186224037896 2025-04-07T14:24:51.902459Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037896 loaded tx from db 1744035891948:281474976710689 keys extracted: 0 2025-04-07T14:24:51.902566Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:24:51.902644Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-04-07T14:24:51.902687Z node 5 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037896 tableId# [OwnerId: 72057594046644480, LocalPathId: 14] schema version# 1 2025-04-07T14:24:51.903048Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037896 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:24:51.903351Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:51.904546Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037896 2025-04-07T14:24:51.904595Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037896 time 1744035891920 2025-04-07T14:24:51.904605Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-07T14:24:51.904626Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:51.905164Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037896 step# 1744035891948 txid# 281474976710689} 2025-04-07T14:24:51.905196Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037896 step# 1744035891948} 2025-04-07T14:24:51.905222Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-07T14:24:51.905248Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-07T14:24:51.905269Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037896 2025-04-07T14:24:51.905283Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037896 2025-04-07T14:24:51.905324Z node 5 :TX_DATASHARD DEBUG: Complete [1744035891948 : 281474976710689] from 72075186224037896 at tablet 72075186224037896 send result to client [5:7490577099331641920:2149], exec latency: 0 ms, propose latency: 2 ms 2025-04-07T14:24:51.905351Z node 5 :TX_DATASHARD INFO: 72075186224037896 Sending notify to schemeshard 72057594046644480 txId 281474976710689 state Ready TxInFly 0 2025-04-07T14:24:51.905396Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-07T14:24:51.905455Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037896 coordinator 72057594046316545 last step 0 next step 1744035891948 2025-04-07T14:24:51.906861Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710689 datashard 72075186224037896 state Ready 2025-04-07T14:24:51.906917Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 Got TEvSchemaChangedResult from SS at 72075186224037896 |98.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_reshuffle_kmeans/unittest >> BSConfigGRPCService::ReplaceStorageConfig [GOOD] >> BSConfigGRPCService::FetchStorageConfig >> KqpTpch::Query10 [GOOD] >> KqpTpch::Query11 >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] >> TTxDataShardBuildIndexScan::RunScan >> TMemoryController::Config_ConsumerLimits [GOOD] >> TMemoryController::SharedCache ------- [TS] {asan, default-linux-x86_64, release} ydb/library/query_actor/ut/unittest >> QueryActorTest::StreamQuery [GOOD] Test command err: 2025-04-07T14:24:27.466809Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490577014966728880:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:27.466855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e98/r3tmp/tmpl2o7LN/pdisk_1.dat 2025-04-07T14:24:28.000279Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:28.015445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:28.015569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:28.031376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3379 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T14:24:28.422643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:24:28.473382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:24:28.614623Z node 1 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-07T14:24:30.643739Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T14:24:30.653054Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T14:24:30.668258Z node 1 :KQP_PROXY DEBUG: Request has 18445000037838.883394s seconds to be completed 2025-04-07T14:24:30.680796Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=Mjc2OWExNWQtNjEzMzk3MjgtNmJkMTI0ZS05YjhjODkyOA==, workerId: [1:7490577027851631450:2288], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-07T14:24:30.680867Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T14:24:30.681005Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T14:24:30.681140Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T14:24:30.681170Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T14:24:30.684765Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T14:24:30.684854Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T14:24:30.684962Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24:30.684999Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24:30.685083Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24:30.692982Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24:30.693018Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24:30.694048Z node 1 :KQP_PROXY DEBUG: [TQueryBase] RunDataQuery: SELECT 42 2025-04-07T14:24:30.706545Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=Mjc2OWExNWQtNjEzMzk3MjgtNmJkMTI0ZS05YjhjODkyOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7490577027851631450:2288] 2025-04-07T14:24:30.706599Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7490577027851631452:2352] 2025-04-07T14:24:30.709281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490577027851631453:2290], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:30.709404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:30.709748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490577027851631465:2293], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:30.715836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:24:30.732962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7490577027851631467:2294], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-07T14:24:31.907280Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7490577027851631451:2289], selfId: [1:7490577014966729113:2256], source: [1:7490577027851631450:2288] 2025-04-07T14:24:31.907653Z node 1 :KQP_PROXY DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=Mjc2OWExNWQtNjEzMzk3MjgtNmJkMTI0ZS05YjhjODkyOA==, TxId: 2025-04-07T14:24:31.908286Z node 1 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=Mjc2OWExNWQtNjEzMzk3MjgtNmJkMTI0ZS05YjhjODkyOA==, TxId: 2025-04-07T14:24:31.916018Z node 1 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=1&id=Mjc2OWExNWQtNjEzMzk3MjgtNmJkMTI0ZS05YjhjODkyOA==, workerId: [1:7490577027851631450:2288], local sessions count: 0 2025-04-07T14:24:31.917033Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24:32.837080Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490577034573640386:2267];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e98/r3tmp/tmpIqPrTo/pdisk_1.dat 2025-04-07T14:24:32.858380Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:24:32.946140Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:32.978055Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:32.978133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:32.980138Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25365 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T14:24:33.192966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:24:33.199754Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:24:33.202909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:24:33.248577Z node 2 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-07T14:24:35.885856Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T14:24:35.886662Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T14:24:35.887013Z node 2 :KQP_PROXY DEBUG: Request has 18445000037833.664622s seconds to be completed 2025-04-07T14:24:35.889139Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YWY3MGFkMjMtZGMxMmRhODItNjQ1NWUxYjctZjg4NTRkMGM=, workerId: [2:7490577047458542695:2288], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-07T14:24:35.889178Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T14:24:35.889280Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T14:24:35.889332Z node 2 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T14:24:35.889368Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T14:24:35.889399Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T14:24:35.889460Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T14:24:35.889603Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24:35.889650Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24 ... 9;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e98/r3tmp/tmpB0FzB5/pdisk_1.dat 2025-04-07T14:24:42.391569Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:42.510145Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:42.510239Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:42.511990Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61707 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-07T14:24:42.647812Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:24:42.653702Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:24:42.667747Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:24:42.745139Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-07T14:24:45.462222Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T14:24:45.463065Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T14:24:45.464966Z node 4 :KQP_PROXY DEBUG: Request has 18445000037824.086687s seconds to be completed 2025-04-07T14:24:45.467362Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=OTVjNjc3ZjEtNmRjYjVmNGMtMjc3Mjg4NC1kY2JhMGVmYw==, workerId: [4:7490577089272845325:2287], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-07T14:24:45.467402Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T14:24:45.467586Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T14:24:45.467632Z node 4 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-07T14:24:45.467687Z node 4 :KQP_PROXY DEBUG: Updated table service config. 2025-04-07T14:24:45.467703Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-07T14:24:45.467744Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-07T14:24:45.467800Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24:45.467876Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24:45.467898Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24:45.467920Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24:45.467942Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24:45.468641Z node 4 :KQP_PROXY DEBUG: [TQueryBase] RunStreamQuery: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2025-04-07T14:24:45.473134Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-04-07T14:24:45.476410Z node 4 :KQP_PROXY DEBUG: TraceId: "01jr8a7cd1ednx5wngv4tqypwt", Created new session, sessionId: ydb://session/3?node_id=4&id=N2RjNWQwMzQtZmU1MDg5LTNkNzNkNjcwLWU2OGY2OTlh, workerId: [4:7490577089272845352:2289], database: /dc-1, longSession: 0, local sessions count: 2 2025-04-07T14:24:45.476633Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jr8a7cd1ednx5wngv4tqypwt, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=N2RjNWQwMzQtZmU1MDg5LTNkNzNkNjcwLWU2OGY2OTlh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [4:7490577089272845352:2289] 2025-04-07T14:24:45.476689Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [4:7490577089272845353:2352] 2025-04-07T14:24:45.483595Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490577089272845354:2290], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:45.483703Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:45.484517Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7490577089272845366:2293], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:45.489279Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-07T14:24:45.503239Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-07T14:24:45.503474Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7490577089272845368:2294], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-07T14:24:46.828713Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24:47.193226Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7490577076387942833:2097];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:47.193299Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:24:50.364214Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-04-07T14:24:50.367875Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Cancel stream request 2025-04-07T14:24:50.367977Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=OTVjNjc3ZjEtNmRjYjVmNGMtMjc3Mjg4NC1kY2JhMGVmYw==, TxId: 2025-04-07T14:24:50.368807Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-07T14:24:50.527428Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-07T14:24:50.713619Z node 4 :KQP_PROXY DEBUG: Request has 18445000037818.838026s seconds to be completed 2025-04-07T14:24:50.715726Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=NzhlYjI5ZWItMTE5NDI0MDktZWEzMjhiYTctOGI5MjJlMzc=, workerId: [4:7490577110747681963:2313], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-07T14:24:50.715943Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-07T14:24:50.716040Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=OTVjNjc3ZjEtNmRjYjVmNGMtMjc3Mjg4NC1kY2JhMGVmYw==, workerId: [4:7490577089272845325:2287], local sessions count: 2 2025-04-07T14:24:50.716515Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-07T14:24:50.716557Z node 4 :KQP_PROXY DEBUG: [TQueryBase] RunStreamQuery: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2025-04-07T14:24:50.716643Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-04-07T14:24:50.718500Z node 4 :KQP_PROXY DEBUG: TraceId: "01jr8a7hgwc07d70018pd8vs2e", Created new session, sessionId: ydb://session/3?node_id=4&id=ZGIzYmM0YjYtYzI0NzcxNi0zYWVkZWRkNC05ZWY0MTVhOQ==, workerId: [4:7490577110747681966:2314], database: /dc-1, longSession: 0, local sessions count: 3 2025-04-07T14:24:50.718731Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jr8a7hgwc07d70018pd8vs2e, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=ZGIzYmM0YjYtYzI0NzcxNi0zYWVkZWRkNC05ZWY0MTVhOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [4:7490577110747681966:2314] 2025-04-07T14:24:50.718778Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 600.000000s actor id: [4:7490577110747681967:2421] 2025-04-07T14:24:50.814138Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744035890796, txId: 281474976715663] shutting down 2025-04-07T14:24:50.814246Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-04-07T14:24:50.817772Z node 4 :KQP_PROXY DEBUG: TraceId: "01jr8a7hgwc07d70018pd8vs2e", Forwarded response to sender actor, requestId: 5, sender: [4:7490577110747681964:2419], selfId: [4:7490577076387943006:2256], source: [4:7490577110747681966:2314] 2025-04-07T14:24:50.818406Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=ZGIzYmM0YjYtYzI0NzcxNi0zYWVkZWRkNC05ZWY0MTVhOQ==, workerId: [4:7490577110747681966:2314], local sessions count: 2 2025-04-07T14:24:50.821504Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-04-07T14:24:50.822185Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-04-07T14:24:50.822274Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NzhlYjI5ZWItMTE5NDI0MDktZWEzMjhiYTctOGI5MjJlMzc=, TxId: 2025-04-07T14:24:50.822950Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=NzhlYjI5ZWItMTE5NDI0MDktZWEzMjhiYTctOGI5MjJlMzc=, workerId: [4:7490577110747681963:2313], local sessions count: 1 |98.8%| [TS] {RESULT} ydb/library/query_actor/ut/unittest |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-BrokenJsonResponse-] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_compaction/unittest >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] Test command err: 2025-04-07T14:24:07.343739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:07.344103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:07.344357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f45/r3tmp/tmpnfIDt9/pdisk_1.dat 2025-04-07T14:24:07.886809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:07.937228Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:07.999721Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:24:08.005499Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:24:08.005827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:08.006006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:08.017601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:08.140242Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T14:24:08.140324Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:24:08.141550Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T14:24:08.248717Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:24:08.249442Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:24:08.249588Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:24:08.249929Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:24:08.250158Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:24:08.250356Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:24:08.250744Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T14:24:08.253869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:08.254982Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T14:24:08.255063Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T14:24:08.305148Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:24:08.306438Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:24:08.307686Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:24:08.308108Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:08.353218Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:24:08.354109Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:08.354228Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:08.356166Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:24:08.356248Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:24:08.356309Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:24:08.357318Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:08.394308Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:24:08.395525Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:08.395755Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:24:08.395841Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:24:08.395884Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:24:08.395923Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:08.396349Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:08.396404Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:08.397208Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:24:08.397325Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:24:08.397788Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:08.397829Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:08.397898Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:24:08.397963Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:08.398007Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:08.398179Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:24:08.398240Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:24:08.398280Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:24:08.398312Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:24:08.398359Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:08.398545Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:24:08.398592Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:24:08.398723Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:08.399034Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:24:08.399084Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:24:08.399203Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:24:08.399264Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:24:08.399307Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:24:08.399342Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:24:08.399376Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:24:08.399637Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:24:08.399671Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:24:08.399734Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:24:08.399768Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:24:08.399822Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:24:08.399856Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:24:08.399893Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:24:08.399925Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:24:08.399949Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:24:08.401554Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:24:08.401607Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:08.413097Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:08.413207Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:24:08.413236Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:24:08.413282Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... 4-07T14:24:53.287812Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2025-04-07T14:24:53.287867Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037892 2025-04-07T14:24:53.288078Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:893:2726], Recipient [2:893:2726]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:53.288125Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:53.288194Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-07T14:24:53.288233Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:24:53.288272Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037892 for ReadTableScan 2025-04-07T14:24:53.288304Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit ReadTableScan 2025-04-07T14:24:53.288339Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [71500:281474976715661] at 72075186224037892 error: , IsFatalError: 0 2025-04-07T14:24:53.288383Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is Executed 2025-04-07T14:24:53.288417Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit ReadTableScan 2025-04-07T14:24:53.288450Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037892 to execution unit CompleteOperation 2025-04-07T14:24:53.288483Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit CompleteOperation 2025-04-07T14:24:53.288742Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is DelayComplete 2025-04-07T14:24:53.288785Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit CompleteOperation 2025-04-07T14:24:53.288815Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037892 to execution unit CompletedOperations 2025-04-07T14:24:53.288849Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit CompletedOperations 2025-04-07T14:24:53.288888Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is Executed 2025-04-07T14:24:53.288915Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit CompletedOperations 2025-04-07T14:24:53.288945Z node 2 :TX_DATASHARD TRACE: Execution plan for [71500:281474976715661] at 72075186224037892 has finished 2025-04-07T14:24:53.288979Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:53.289009Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037892 2025-04-07T14:24:53.289041Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-04-07T14:24:53.289072Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-04-07T14:24:53.305414Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-07T14:24:53.305488Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-07T14:24:53.305527Z node 2 :TX_DATASHARD TRACE: Complete execution for [71500:281474976715661] at 72075186224037892 on unit CompleteOperation 2025-04-07T14:24:53.305596Z node 2 :TX_DATASHARD DEBUG: Complete [71500 : 281474976715661] from 72075186224037892 at tablet 72075186224037892 send result to client [2:1433:3239], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:24:53.305664Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-07T14:24:53.305861Z node 2 :TX_PROXY DEBUG: Actor# [2:1433:3239] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2025-04-07T14:24:53.305913Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037890, txid: 281474976715661, cleared: 1 2025-04-07T14:24:53.306063Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1433:3239], Recipient [2:734:2609]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-04-07T14:24:53.306104Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-07T14:24:53.306204Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:734:2609], Recipient [2:734:2609]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:53.306237Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:53.306315Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T14:24:53.306370Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:24:53.306413Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037890 for WaitForStreamClearance 2025-04-07T14:24:53.306449Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit WaitForStreamClearance 2025-04-07T14:24:53.306486Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [71500:281474976715661] at 72075186224037890 2025-04-07T14:24:53.306528Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-04-07T14:24:53.306564Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit WaitForStreamClearance 2025-04-07T14:24:53.306597Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit ReadTableScan 2025-04-07T14:24:53.306628Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit ReadTableScan 2025-04-07T14:24:53.306861Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Continue 2025-04-07T14:24:53.306891Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:24:53.306921Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-07T14:24:53.306955Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-07T14:24:53.306988Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-07T14:24:53.307530Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1465:3268], Recipient [2:734:2609]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-07T14:24:53.307584Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-07T14:24:53.307920Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2025-04-07T14:24:53.308023Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2025-04-07T14:24:53.373598Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-07T14:24:53.373675Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037890 2025-04-07T14:24:53.373996Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:734:2609], Recipient [2:734:2609]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:53.374044Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:53.374123Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T14:24:53.374162Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:24:53.374204Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037890 for ReadTableScan 2025-04-07T14:24:53.374234Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit ReadTableScan 2025-04-07T14:24:53.374272Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [71500:281474976715661] at 72075186224037890 error: , IsFatalError: 0 2025-04-07T14:24:53.374317Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-04-07T14:24:53.374353Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit ReadTableScan 2025-04-07T14:24:53.374389Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit CompleteOperation 2025-04-07T14:24:53.374419Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit CompleteOperation 2025-04-07T14:24:53.374624Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is DelayComplete 2025-04-07T14:24:53.374659Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit CompleteOperation 2025-04-07T14:24:53.374691Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit CompletedOperations 2025-04-07T14:24:53.374718Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit CompletedOperations 2025-04-07T14:24:53.374751Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-04-07T14:24:53.374774Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit CompletedOperations 2025-04-07T14:24:53.374799Z node 2 :TX_DATASHARD TRACE: Execution plan for [71500:281474976715661] at 72075186224037890 has finished 2025-04-07T14:24:53.374829Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:53.374864Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-07T14:24:53.374895Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-07T14:24:53.374927Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-07T14:24:53.385899Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:24:53.385969Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:24:53.386020Z node 2 :TX_DATASHARD TRACE: Complete execution for [71500:281474976715661] at 72075186224037890 on unit CompleteOperation 2025-04-07T14:24:53.386077Z node 2 :TX_DATASHARD DEBUG: Complete [71500 : 281474976715661] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1433:3239], exec latency: 1 ms, propose latency: 1 ms 2025-04-07T14:24:53.386128Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T14:24:53.386311Z node 2 :TX_PROXY DEBUG: Actor# [2:1433:3239] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037890 marker# P12 2025-04-07T14:24:53.386430Z node 2 :TX_PROXY INFO: Actor# [2:1433:3239] txid# 281474976715661 RESPONSE Status# ExecComplete prepare time: 0.000500s execute time: 0.001500s total time: 0.002000s marker# P13 |98.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_compaction/unittest >> TestFormatHandler::ClientValidation [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-result_sets] >> TestFormatHandler::ClientError >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata >> TCreateAndDropViewTest::CheckCreatedView [GOOD] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-result_sets] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatus >> TabletService_ChangeSchema::Basics |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery >> BSConfigGRPCService::FetchStorageConfig [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi >> TSentinelTests::PDiskErrorState [GOOD] >> TSentinelTests::PDiskFaultyState >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/bsconfig/ut/unittest >> BSConfigGRPCService::FetchStorageConfig [GOOD] Test command err: 2025-04-07T14:24:49.167225Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490577110396334727:2055];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:49.167974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000aae/r3tmp/tmpdrmkTw/pdisk_1.dat 2025-04-07T14:24:49.611630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:49.611745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:49.618054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:49.642410Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24622, node 1 2025-04-07T14:24:49.681425Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-07T14:24:49.681820Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-07T14:24:49.681894Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-07T14:24:49.682155Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-07T14:24:49.683357Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-07T14:24:49.683369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:24:49.683385Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-07T14:24:49.683423Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-07T14:24:49.683510Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-07T14:24:49.683546Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-07T14:24:49.683560Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-07T14:24:49.690627Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:24:49.690664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:24:49.690710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-07T14:24:49.690737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:24:49.690840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-07T14:24:49.691135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-07T14:24:49.691595Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:24:49.691619Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-07T14:24:49.694184Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T14:24:49.694352Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T14:24:49.694400Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T14:24:49.694417Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T14:24:49.839029Z node 1 :GRPC_SERVER DEBUG: [0x51a000027c80] created request Name# BlobStorageConfig 2025-04-07T14:24:49.839405Z node 1 :GRPC_SERVER DEBUG: [0x51a000028280] created request Name# HiveCreateTablet 2025-04-07T14:24:49.839659Z node 1 :GRPC_SERVER DEBUG: [0x51a000028880] created request Name# LocalEnumerateTablets 2025-04-07T14:24:49.839939Z node 1 :GRPC_SERVER DEBUG: [0x51b000019680] created request Name# KeyValue 2025-04-07T14:24:49.840191Z node 1 :GRPC_SERVER DEBUG: [0x51a000028e80] created request Name# TabletStateRequest 2025-04-07T14:24:49.841139Z node 1 :GRPC_SERVER DEBUG: [0x51a000029480] created request Name# LocalMKQL 2025-04-07T14:24:49.841425Z node 1 :GRPC_SERVER DEBUG: [0x51a000029a80] created request Name# LocalSchemeTx 2025-04-07T14:24:49.842244Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a080] created request Name# TabletKillRequest 2025-04-07T14:24:49.842493Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a680] created request Name# SchemeOperationStatus 2025-04-07T14:24:49.843369Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ac80] created request Name# ChooseProxy 2025-04-07T14:24:49.843725Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b280] created request Name# ResolveNode 2025-04-07T14:24:49.843932Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b880] created request Name# FillNode 2025-04-07T14:24:49.844153Z node 1 :GRPC_SERVER DEBUG: [0x51a00002be80] created request Name# DrainNode 2025-04-07T14:24:49.844401Z node 1 :GRPC_SERVER DEBUG: [0x51a00002c480] created request Name# InterconnectDebug 2025-04-07T14:24:49.844680Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ca80] created request Name# TestShardControl 2025-04-07T14:24:49.849262Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d080] created request Name# RegisterNode 2025-04-07T14:24:49.851204Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d680] created request Name# CmsRequest 2025-04-07T14:24:49.851606Z node 1 :GRPC_SERVER DEBUG: [0x51a00002dc80] created request Name# SqsRequest 2025-04-07T14:24:49.852641Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e280] created request Name# ConsoleRequest 2025-04-07T14:24:49.852941Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e880] created request Name# SchemeInitRoot 2025-04-07T14:24:49.853894Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ee80] created request Name# PersQueueRequest 2025-04-07T14:24:49.854249Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c0080] created request Name# Request 2025-04-07T14:24:49.855150Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c0680] created request Name# SchemeOperation 2025-04-07T14:24:49.855496Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c0c80] created request Name# SchemeDescribe 2025-04-07T14:24:49.949518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:24:49.949552Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:24:49.949561Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:24:49.949655Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:24:50.473926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "hdd2" Kind: "hdd2" } StoragePools { Name: "hdd" Kind: "hdd" } StoragePools { Name: "hdd1" Kind: "hdd1" } StoragePools { Name: "ssd" Kind: "ssd" } StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:24:50.474264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:50.477751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:24:50.480165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:24:50.480311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:50.485600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:24:50.486698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:24:50.486971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:50.487040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:24:50.487121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-07T14:24:50.487142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-07T14:24:50.490197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:50.490258Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:24:50.490275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:24:50.491956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:50.491979Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:50.492002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:24:50.492036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:24:50.500347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:24:50.504822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:24:50.504848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-07T14:24:50.508742Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:24:50.508871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie ... SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:24:54.669380Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:24:54.671435Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:54.671459Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:54.671472Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:24:54.671496Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:24:54.671572Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:24:54.673611Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:24:54.673633Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-07T14:24:54.673649Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:24:54.675244Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-07T14:24:54.675338Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:24:54.679297Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035894720, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:24:54.679388Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7490577130615601646 RawX2: 12884904253 } } Step: 1744035894720 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:24:54.679408Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:24:54.679709Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:24:54.679735Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:24:54.679859Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:24:54.679922Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:24:54.681696Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:24:54.681746Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:24:54.681877Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:24:54.681896Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7490577130615601657:2373], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-07T14:24:54.681929Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:54.681953Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:24:54.682024Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:24:54.682039Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:24:54.682058Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-07T14:24:54.682074Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:24:54.682086Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:24:54.682098Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-07T14:24:54.682135Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:24:54.682148Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:24:54.682156Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-07T14:24:54.683226Z node 3 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T14:24:54.683285Z node 3 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T14:24:54.683298Z node 3 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T14:24:54.683323Z node 3 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T14:24:54.683717Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:24:54.685262Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:24:54.685281Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:24:54.685294Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:24:54.685311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:24:54.685399Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:24:54.685413Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7490577130615601939:2283] 2025-04-07T14:24:54.687861Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710657 2025-04-07T14:24:54.743278Z node 3 :GRPC_SERVER DEBUG: Got grpc request# FetchStorageConfigRequest, traceId# 01jr8a7neq3kncntv4r2xj5vkm, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:46892, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef Fetched yaml config: host_configs: - host_config_id: 3 drive: - path: /home/runner/.ya/build/build_root/u6pr/000aae/r3tmp/tmpo9w5sR/pdisk_1.dat type: HDD expected_slot_count: 9 hosts: - host: ::1 port: 12001 host_config_id: 3 2025-04-07T14:24:54.750064Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d8080] received request Name# Request ok# false data# peer# current inflight# 1 2025-04-07T14:24:54.750308Z node 3 :GRPC_SERVER DEBUG: [0x51a000010e80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.750311Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d8680] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.750481Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d8c80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.750570Z node 3 :GRPC_SERVER DEBUG: [0x51a000010880] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.750653Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d7a80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.750795Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d7480] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.750838Z node 3 :GRPC_SERVER DEBUG: [0x51a000010280] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.750942Z node 3 :GRPC_SERVER DEBUG: [0x51a00000fc80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.750974Z node 3 :GRPC_SERVER DEBUG: [0x51a00000f680] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.751107Z node 3 :GRPC_SERVER DEBUG: [0x51a000013880] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.751112Z node 3 :GRPC_SERVER DEBUG: [0x51a000013280] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.751252Z node 3 :GRPC_SERVER DEBUG: [0x51a000012c80] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.751314Z node 3 :GRPC_SERVER DEBUG: [0x51b000221280] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.751382Z node 3 :GRPC_SERVER DEBUG: [0x51a00000ea80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.751460Z node 3 :GRPC_SERVER DEBUG: [0x51a00000e480] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.751509Z node 3 :GRPC_SERVER DEBUG: [0x51a00000de80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.751592Z node 3 :GRPC_SERVER DEBUG: [0x51a00000d880] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.751702Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d6e80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.751740Z node 3 :GRPC_SERVER DEBUG: [0x51a000012080] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.751858Z node 3 :GRPC_SERVER DEBUG: [0x51a000011a80] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.751882Z node 3 :GRPC_SERVER DEBUG: [0x51a000011480] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.752028Z node 3 :GRPC_SERVER DEBUG: [0x51a00000f080] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-07T14:24:54.752048Z node 3 :GRPC_SERVER DEBUG: [0x51a000012680] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 |98.8%| [TM] {RESULT} ydb/services/bsconfig/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-plan] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can >> TestFormatHandler::ClientError [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] >> TTxDataShardTestInit::TestGetShardStateAfterInitialization >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] >> KqpTpch::Query11 [GOOD] >> KqpTpch::Query12 >> TestFormatHandler::ClientErrorWithEmptyFilter >> QuoterWithKesusTest::GetsBigQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuotaWithDeadline >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable [GOOD] >> DataShardFollowers::FollowerStaleRo >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::InvalidQuery >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs >> TTxDataShardTestInit::TestGetShardStateAfterInitialization [GOOD] >> TTxDataShardTestInit::TestTableHasPath >> TopicSessionTests::TwoSessionsWithoutOffsets [GOOD] >> MediatorTest::TabletAckWhenDead [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob >> TopicSessionTests::TwoSessionWithoutPredicate >> MediatorTest::PlanStepAckToReconnectedMediator >> TTxDataShardBuildIndexScan::RunScan [GOOD] >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-result_sets] [GOOD] >> TabletService_ChangeSchema::Basics [GOOD] >> TabletService_ChangeSchema::OnlyAdminsAllowed >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-result_sets] [GOOD] >> Coordinator::LastStepSubscribe [GOOD] >> Coordinator::RestoreDomainConfiguration |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> TSentinelTests::PDiskFaultyState [GOOD] >> TSentinelTests::PDiskRackGuardHalfRack >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-plan] [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatus [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrun >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> KqpTpch::Query12 [GOOD] >> KqpTpch::Query13 >> test.py::test_local [FAIL] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-result_sets] [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections >> TestFormatHandler::ClientErrorWithEmptyFilter [GOOD] >> TestJsonParser::Simple1 >> TestJsonParser::Simple1 [GOOD] >> TestJsonParser::Simple2 >> TCreateAndDropViewTest::InvalidQuery [GOOD] >> TCreateAndDropViewTest::ParsingSecurityInvoker |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-plan] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection >> KafkaProtocol::BalanceScenario [GOOD] >> KafkaProtocol::OffsetCommitAndFetchScenario >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] >> TestJsonParser::Simple2 [GOOD] >> MediatorTest::PlanStepAckToReconnectedMediator [GOOD] >> TTxDataShardTestInit::TestTableHasPath [GOOD] >> TTxDataShardTestInit::TestResolvePathAfterRestart |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[write/write_group_by.script-script] [GOOD] >> TestJsonParser::Simple3 >> TestJsonParser::Simple3 [GOOD] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] >> DataShardFollowers::FollowerStaleRo [GOOD] >> DataShardFollowers::FollowerRebootAfterSysCompaction >> TestJsonParser::Simple4 |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] >> TabletService_ChangeSchema::OnlyAdminsAllowed [GOOD] >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead >> MediatorTest::WatcherReconnect >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection >> TestJsonParser::Simple4 [GOOD] >> Coordinator::RestoreDomainConfiguration [GOOD] >> Coordinator::RestoreTenantConfiguration >> TestJsonParser::LargeStrings >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TestJsonParser::LargeStrings [GOOD] >> TestJsonParser::ManyValues >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount >> KqpTpch::Query13 [GOOD] >> KqpTpch::Query14 >> TestJsonParser::ManyValues [GOOD] >> TestJsonParser::MissingFields >> TestJsonParser::MissingFields [GOOD] >> TestJsonParser::NestedTypes >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection >> QuoterWithKesusTest::GetsBigQuotaWithDeadline [GOOD] >> QuoterWithKesusTest::FailsToGetBigQuota >> test.py::test[solomon-BrokenJsonResponse-] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-plan] [GOOD] >> TestJsonParser::NestedTypes [GOOD] >> DataShardStats::HistogramStatsCorrect [GOOD] >> DataShardStats::BlobsStatsCorrect >> TopicSessionTests::TwoSessionWithoutPredicate [GOOD] >> TestJsonParser::SimpleBooleans >> TestJsonParser::SimpleBooleans [GOOD] >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection >> TestJsonParser::ManyBatches >> TestJsonParser::ManyBatches [GOOD] >> TestJsonParser::LittleBatches >> TestJsonParser::LittleBatches [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> TestJsonParser::MissingFieldsValidation |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/py3test >> test.py::test_local [FAIL] |98.8%| [TM] {RESULT} ydb/tests/functional/serializable/py3test >> TestJsonParser::MissingFieldsValidation [GOOD] >> MediatorTest::WatcherReconnect [GOOD] >> KafkaProtocol::OffsetCommitAndFetchScenario [GOOD] >> KafkaProtocol::CreateTopicsScenario >> TestJsonParser::TypeKindsValidation >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] >> KqpTpch::Query14 [GOOD] >> KqpTpch::Query15 >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding >> TCreateAndDropViewTest::ParsingSecurityInvoker [GOOD] >> TCreateAndDropViewTest::ListCreatedView >> TestJsonParser::TypeKindsValidation [GOOD] >> MediatorTest::MultipleSteps >> TestJsonParser::NumbersValidation >> KeyValueGRPCService::SimpleWriteReadOverrun [GOOD] >> KeyValueGRPCService::SimpleWriteReadRange >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings >> TestJsonParser::NumbersValidation [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TestJsonParser::StringsValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_init/unittest >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] Test command err: 2025-04-07T14:25:00.386684Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:106:2138]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:25:00.415750Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:106:2138]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:25:00.425851Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:106:2138] 2025-04-07T14:25:00.428227Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:25:00.460865Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:100:2134], Recipient [1:106:2138]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:25:00.564035Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:25:00.564783Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:25:00.566973Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-07T14:25:00.567064Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-07T14:25:00.567138Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-07T14:25:00.567470Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:25:00.597400Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-07T14:25:00.600263Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:25:00.600384Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:129:2155] 2025-04-07T14:25:00.600408Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-07T14:25:00.600444Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-07T14:25:00.600473Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-07T14:25:00.600906Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:106:2138], Recipient [1:106:2138]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:25:00.601528Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:25:00.602519Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-07T14:25:00.602625Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-07T14:25:00.602884Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-07T14:25:00.602942Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:25:00.602983Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-07T14:25:00.603014Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-07T14:25:00.603044Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-07T14:25:00.603072Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-07T14:25:00.603110Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-07T14:25:00.605880Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [1:97:2132], Recipient [1:106:2138]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 4294969428 } 2025-04-07T14:25:00.605946Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-07T14:25:05.085265Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:25:05.085836Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:25:05.085915Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000f1b/r3tmp/tmpgwENq0/pdisk_1.dat 2025-04-07T14:25:05.945981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:25:06.069608Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:06.142024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:06.142340Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:06.155841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:25:06.345637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:25:06.426869Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:631:2537] 2025-04-07T14:25:06.427133Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:25:06.526289Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:25:06.526421Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:25:06.528077Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:25:06.528187Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:25:06.528256Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:25:06.528587Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:25:06.530684Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:25:06.530876Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:25:06.530973Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:648:2546] 2025-04-07T14:25:06.531007Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:25:06.531042Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:25:06.531072Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:25:06.531898Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:25:06.531996Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:25:06.532064Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:630:2536], serverId# [2:639:2541], sessionId# [0:0:0] 2025-04-07T14:25:06.532162Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:25:06.532200Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:25:06.532236Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:25:06.532283Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:25:06.532737Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:25:06.535350Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:25:06.535499Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:25:06.537330Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:25:06.548910Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:25:06.549053Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:25:06.751136Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:667:2559], serverId# [2:669:2561], sessionId# [0:0:0] 2025-04-07T14:25:06.771461Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 517 RawX2: 8589937047 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:25:06.771557Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:25:06.772032Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:25:06.772102Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:25:06.772155Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:25:06.772402Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:25:06.772561Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:25:06.772738Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:25:06.772804Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:25:06.786316Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:25:06.791060Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:25:06.803228Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:25:06.803317Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:25:06.805341Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:25:06.805411Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:25:06.805472Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:25:06.806736Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:25:06.806777Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:25:06.806856Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:25:06.806918Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:25:06.806969Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:25:06.807083Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 720751862240 ... 541], sessionId# [0:0:0] 2025-04-07T14:25:11.329046Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:25:11.329083Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:25:11.329126Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:25:11.329159Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:25:11.329429Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:25:11.329581Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:25:11.329652Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:25:11.331176Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:25:11.344838Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:25:11.344945Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:25:11.539671Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:667:2559], serverId# [3:669:2561], sessionId# [0:0:0] 2025-04-07T14:25:11.540254Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 534 RawX2: 12884904352 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:25:11.540301Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:25:11.541999Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:25:11.542066Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:25:11.542112Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:25:11.542336Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:25:11.542469Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:25:11.542649Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:25:11.542705Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:25:11.543085Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:25:11.543445Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:25:11.545178Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:25:11.545230Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:25:11.546110Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:25:11.546174Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:25:11.546230Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:25:11.547243Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:25:11.547287Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:25:11.547324Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:25:11.547398Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:393:2385], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:25:11.547450Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:25:11.547513Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:25:11.548022Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:25:11.550034Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:25:11.550092Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:25:11.550847Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:25:11.555975Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-07T14:25:11.556080Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-07T14:25:11.606673Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:706:2588] 2025-04-07T14:25:11.606957Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:25:11.613457Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:25:11.614541Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:25:11.616551Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:25:11.616638Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:25:11.616852Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:25:11.617174Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:25:11.617284Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037888 2025-04-07T14:25:11.617384Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:25:11.617482Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:25:11.617552Z node 3 :TX_DATASHARD DEBUG: Resolve path at 72075186224037888: reason# empty path 2025-04-07T14:25:11.617688Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:722:2597] 2025-04-07T14:25:11.617729Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:25:11.617774Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:25:11.617810Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:25:11.618232Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-07T14:25:11.618423Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-07T14:25:11.619031Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:25:11.619110Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:25:11.619967Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1000 2025-04-07T14:25:11.620025Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:25:11.620451Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 281474976715657 message# Source { RawX1: 706 RawX2: 12884904476 } Origin: 72075186224037888 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-07T14:25:11.620547Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:25:11.620759Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:25:11.620810Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:25:11.620854Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:25:11.621475Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:25:11.622132Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:25:11.667571Z node 3 :TX_DATASHARD DEBUG: Got scheme resolve result at 72075186224037888: Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-04-07T14:25:11.667891Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:25:11.667942Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037888: waitStep# 1000 readStep# 1000 observedStep# 1000 2025-04-07T14:25:11.668012Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:25:11.668275Z node 3 :TX_DATASHARD DEBUG: TTxStoreTablePath::Execute at 72075186224037888 2025-04-07T14:25:11.669455Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:726:2601], serverId# [3:728:2602], sessionId# [0:0:0] 2025-04-07T14:25:11.694524Z node 3 :TX_DATASHARD DEBUG: TTxStoreTablePath::Complete at 72075186224037888 |98.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_init/unittest >> TestJsonParser::StringsValidation [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TestJsonParser::NestedJsonValidation >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding >> TestJsonParser::NestedJsonValidation [GOOD] >> TestJsonParser::BoolsValidation >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TestJsonParser::BoolsValidation [GOOD] >> TestJsonParser::JsonStructureValidation >> DataShardFollowers::FollowerRebootAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterSysCompaction >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding >> TestJsonParser::JsonStructureValidation [GOOD] >> TestPurecalcFilter::Simple1 >> MediatorTest::MultipleSteps [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> MediatorTest::WatchesBeforeFirstStep >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> TCreateAndDropViewTest::ListCreatedView [GOOD] >> TCreateAndDropViewTest::CreateSameViewTwice >> KqpTpch::Query15 [GOOD] >> KqpTpch::Query16 >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::MalformedParams >> KafkaProtocol::CreateTopicsScenario [GOOD] >> KafkaProtocol::CreatePartitionsScenario >> test_http_api.py::TestHttpApi::test_stop_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_restart_idempotency >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate [GOOD] >> QuoterWithKesusTest::FailsToGetBigQuota [GOOD] >> QuoterWithKesusTest::PrefetchCoefficient >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected >> TMemoryController::SharedCache [GOOD] >> TMemoryController::SharedCache_ConfigLimit |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-plan] [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestExplain::test_explain_data_query |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-result_sets] [GOOD] >> MediatorTest::WatchesBeforeFirstStep [GOOD] >> KeyValueGRPCService::SimpleWriteReadRange [GOOD] >> KeyValueGRPCService::SimpleWriteListRange >> TestPurecalcFilter::Simple1 [GOOD] >> KqpTpch::Query16 [GOOD] >> KqpTpch::Query17 >> MediatorTest::RebootTargetTablets >> TCreateAndDropViewTest::CreateSameViewTwice [GOOD] >> TCreateAndDropViewTest::CreateViewOccupiedName >> TSentinelTests::PDiskRackGuardHalfRack [GOOD] >> TSentinelTests::PDiskRackGuardFullRack >> TabletService_ExecuteMiniKQL::MalformedParams [GOOD] >> TabletService_ExecuteMiniKQL::MalformedProgram >> TestPurecalcFilter::Simple2 >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] >> KafkaProtocol::CreatePartitionsScenario [GOOD] >> KafkaProtocol::AlterConfigsScenario >> DataShardFollowers::FollowerAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterDataCompaction >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/rate_limiter/ut/unittest >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] Test command err: 2025-04-07T14:23:58.722452Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576887490390687:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:58.722555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e9e/r3tmp/tmpAoYoZP/pdisk_1.dat 2025-04-07T14:23:59.161460Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:59.196916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:59.197030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:59.209122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20995, node 1 2025-04-07T14:23:59.418946Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:59.418970Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:59.418977Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:59.419059Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:59.883826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:23:59.894837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:23:59.896232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:23:59.899911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:23:59.900140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:23:59.900201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:23:59.905180Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:23:59.906730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:23:59.906760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:23:59.908382Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:23:59.915550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035839959, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:23:59.915603Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:23:59.915954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:23:59.920165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:23:59.920655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:23:59.920928Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:23:59.921304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:23:59.921408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:23:59.921494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:23:59.924746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:23:59.924843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:23:59.924880Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:23:59.925060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-07T14:24:00.048090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/CoordinationNode, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:24:00.049766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-07T14:24:00.050241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:24:00.050270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:24:00.053452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/CoordinationNode 2025-04-07T14:24:00.053675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:24:00.053896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:24:00.053972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateKesus, at tablet# 72057594046644480 2025-04-07T14:24:00.054425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-07T14:24:00.054865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:24:00.054898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:24:00.054912Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:24:00.055187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:24:00.055206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:24:00.055214Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:24:00.063415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:24:00.063532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2025-04-07T14:24:00.131567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2025-04-07T14:24:00.134718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:24:00.138658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035840183, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:24:00.138754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2025-04-07T14:24:00.138867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-07T14:24:00.140743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:24:00.141011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:24:00.141067Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2025-04-07T14:24:00.141122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:24:00.141186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:24:00.141317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2025-04-07T14:24:00.143455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:24:00.143500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:24:00.143513Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:24:00.143689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:24:00.143890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:24:00.143918Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14 ... ome/runner/.ya/build/build_root/u6pr/000e9e/r3tmp/tmpbWl2Ud/pdisk_1.dat 2025-04-07T14:25:15.139886Z node 37 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:25:15.294397Z node 37 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:15.375702Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:15.375851Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:15.383377Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1960, node 37 2025-04-07T14:25:15.589509Z node 37 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:25:15.589540Z node 37 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:25:15.589553Z node 37 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:25:15.589718Z node 37 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:25:15.848259Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:25:15.848749Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:25:15.848787Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:25:15.851063Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:25:15.851411Z node 37 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:25:15.851429Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-07T14:25:15.857508Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:25:15.857549Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-07T14:25:15.858060Z node 37 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:25:15.862972Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:25:15.870559Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035915916, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:25:15.870606Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-07T14:25:15.870961Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-07T14:25:15.874218Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:25:15.874471Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:25:15.874561Z node 37 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-07T14:25:15.874739Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-07T14:25:15.874809Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-07T14:25:15.874867Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-07T14:25:15.875700Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-07T14:25:15.875757Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-07T14:25:15.875780Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:25:15.875889Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-07T14:25:15.996231Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/CoordinationNode, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:25:15.996399Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2025-04-07T14:25:15.996863Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:25:15.996892Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-07T14:25:16.001742Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/CoordinationNode 2025-04-07T14:25:16.001995Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:25:16.002288Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:25:16.002371Z node 37 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateKesus, at tablet# 72057594046644480 2025-04-07T14:25:16.004553Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:25:16.004599Z node 37 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:25:16.004621Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-07T14:25:16.007005Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:25:16.007050Z node 37 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:25:16.007071Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2025-04-07T14:25:16.007749Z node 37 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-07T14:25:16.017532Z node 37 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-07T14:25:16.017654Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-07T14:25:16.058353Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2025-04-07T14:25:16.060310Z node 37 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:25:16.067992Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035916112, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:25:16.068047Z node 37 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2025-04-07T14:25:16.068186Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-04-07T14:25:16.070306Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:25:16.070637Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:25:16.070709Z node 37 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-07T14:25:16.070843Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-07T14:25:16.070899Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-07T14:25:16.071050Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2025-04-07T14:25:16.077666Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:25:16.077754Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:25:16.077780Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:25:16.078112Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-07T14:25:16.078135Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-07T14:25:16.078147Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2025-04-07T14:25:16.078192Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 |98.8%| [TM] {RESULT} ydb/services/rate_limiter/ut/unittest |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-result_sets] [GOOD] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet >> TMemoryController::SharedCache_ConfigLimit [GOOD] >> TMemoryController::MemTable >> KqpTpch::Query17 [GOOD] >> KqpTpch::Query18 >> QuoterWithKesusTest::PrefetchCoefficient [GOOD] >> QuoterWithKesusTest::GetsQuotaAfterPause >> MediatorTest::RebootTargetTablets [GOOD] >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> MediatorTest::ResendSubset >> TopicSessionTests::TwoSessionsWithOffsets >> TabletService_ExecuteMiniKQL::MalformedProgram [GOOD] >> TabletService_ExecuteMiniKQL::DryRunEraseRow >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] >> test_public_api.py::TestExplain::test_explain_data_query [GOOD] >> DataShardStats::BlobsStatsCorrect [GOOD] >> DataShardStats::SharedCacheGarbage |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-result_sets] [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] |98.9%| [TM] {RESULT} ydb/tests/functional/wardens/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type >> TestPurecalcFilter::Simple2 [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column [GOOD] >> KafkaProtocol::AlterConfigsScenario [GOOD] >> KafkaProtocol::LoginWithApiKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_build_index/unittest >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] Test command err: 2025-04-07T14:24:57.765592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:57.766052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:57.766373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009c4/r3tmp/tmp7Ha8t5/pdisk_1.dat 2025-04-07T14:24:58.338314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:58.404898Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:58.464147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:58.464349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:58.478153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:58.659012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:58.735724Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:24:58.735987Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:58.800017Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:58.800163Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:58.801930Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:24:58.804451Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:24:58.804553Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:24:58.807199Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:58.838310Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:24:58.840201Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:58.840408Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:24:58.840447Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:24:58.840482Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:24:58.840519Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:58.842082Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:24:58.842208Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:24:58.842650Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:24:58.842725Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:58.842766Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:58.842809Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:24:58.842847Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:58.843093Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:58.848831Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:24:58.849000Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:24:58.851000Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:58.862977Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:58.863172Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:24:59.053522Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:24:59.058507Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:24:59.058602Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:59.059444Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:59.059493Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:24:59.059575Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-07T14:24:59.059821Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-07T14:24:59.059986Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:24:59.060579Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:59.060778Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-07T14:24:59.063782Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-07T14:24:59.064298Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:59.066082Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-07T14:24:59.066130Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:59.066777Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2025-04-07T14:24:59.066835Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-07T14:24:59.066891Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:59.067963Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:59.068000Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:24:59.068041Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-07T14:24:59.068105Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:24:59.068169Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-07T14:24:59.068247Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:59.072335Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:59.074651Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-07T14:24:59.074859Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-07T14:24:59.074911Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-07T14:24:59.118206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:701:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:59.118333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:59.118407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:59.125623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-07T14:24:59.132844Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:59.351344Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:59.354646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:715:2593], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-07T14:25:00.760609Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr8a7sqb6gja2x69t5cbx3w4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTY2MGEzZDgtYzAxYzFjOTYtOTM2Mjg2YWQtYWE0NTdlMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:25:00.789840Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2025-04-07T14:25:00.791245Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:25:00.825408Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:25:00.825549Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:25:00.835637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:25:00.844301Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:25:0 ... und ready candidate operation [63000:281474976715666] at 72075186224037891 for WaitForStreamClearance 2025-04-07T14:25:25.641435Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:25:25.641480Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:24} Tx{36, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{14, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:25:25.641535Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:24} Tx{36, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:25:25.641637Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:24} Tx{36, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-04-07T14:25:25.641677Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:24} Tx{36, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:25:25.641710Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T14:25:25.641747Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:25:25.641775Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037890 for WaitForStreamClearance 2025-04-07T14:25:25.642093Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:25:25.642155Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:25} Tx{36, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{14, redo 134b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-07T14:25:25.642195Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:25} Tx{36, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:25:25.642553Z node 2 :TABLET_EXECUTOR INFO: Leader{72075186224037890:1:26} starting Scan{8 on 1001, TReadTableScan} 2025-04-07T14:25:25.642936Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} commited cookie 1 for step 23 2025-04-07T14:25:25.643000Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037890 step# 63000 txid# 281474976715666} 2025-04-07T14:25:25.643067Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 63000} 2025-04-07T14:25:25.643131Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:25:25.643415Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 1 2025-04-07T14:25:25.643669Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} commited cookie 8 for step 24 2025-04-07T14:25:25.644358Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715666, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T14:25:25.644491Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715666, PendingAcks: 0 2025-04-07T14:25:25.644556Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 0 2025-04-07T14:25:25.707614Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-07T14:25:25.707695Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037890 2025-04-07T14:25:25.708235Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} Tx{37, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-04-07T14:25:25.708293Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} Tx{37, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:25:25.708340Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-07T14:25:25.708376Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:25:25.708413Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037890 for ReadTableScan 2025-04-07T14:25:25.708703Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:25:25.708808Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} Tx{37, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{15, redo 336b alter 0b annex 0, ~{ 1, 3, 4, 12, 7, 8 } -{ }, 0 gb} 2025-04-07T14:25:25.708866Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} Tx{37, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:25:25.720636Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} commited cookie 1 for step 25 2025-04-07T14:25:25.720753Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:25:25.720821Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-07T14:25:25.720893Z node 2 :TX_DATASHARD DEBUG: Complete [63000 : 281474976715666] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1491:3273], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:25:25.720962Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T14:25:25.721344Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:24} Tx{37, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-04-07T14:25:25.721396Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:24} Tx{37, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:25:25.721442Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-07T14:25:25.721477Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:25:25.721516Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037891 for WaitForStreamClearance 2025-04-07T14:25:25.721774Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:25:25.721855Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{37, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{14, redo 134b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-07T14:25:25.721904Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{37, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:25:25.722136Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} commited cookie 1 for step 23 2025-04-07T14:25:25.722186Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037891 step# 63000 txid# 281474976715666} 2025-04-07T14:25:25.722237Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 63000} 2025-04-07T14:25:25.722275Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-07T14:25:25.722304Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-07T14:25:25.722578Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{84, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations 2025-04-07T14:25:25.722645Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{84, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:25:25.722801Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{84, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} hope 1 -> done Change{81, redo 184b alter 0b annex 0, ~{ 4, 0 } -{ }, 0 gb} 2025-04-07T14:25:25.722870Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{84, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:25:25.723029Z node 2 :TABLET_EXECUTOR INFO: Leader{72075186224037891:1:26} starting Scan{8 on 1001, TReadTableScan} 2025-04-07T14:25:25.723444Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} commited cookie 8 for step 24 2025-04-07T14:25:25.723624Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037891, TxId: 281474976715666, MessageQuota: 1 2025-04-07T14:25:25.724097Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:88} commited cookie 1 for step 87 2025-04-07T14:25:25.724404Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037891, TxId: 281474976715666, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-07T14:25:25.726867Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037891, TxId: 281474976715666, PendingAcks: 0 2025-04-07T14:25:25.726926Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037891, TxId: 281474976715666, MessageQuota: 0 2025-04-07T14:25:25.812492Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037891 2025-04-07T14:25:25.812555Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037891 2025-04-07T14:25:25.812792Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-04-07T14:25:25.812843Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:25:25.812885Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-07T14:25:25.812919Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-04-07T14:25:25.812953Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037891 for ReadTableScan 2025-04-07T14:25:25.813182Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:25:25.813265Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{15, redo 336b alter 0b annex 0, ~{ 1, 3, 4, 12, 7, 8 } -{ }, 0 gb} 2025-04-07T14:25:25.813339Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:25:25.824964Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} commited cookie 1 for step 25 2025-04-07T14:25:25.825055Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-07T14:25:25.825096Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-07T14:25:25.825152Z node 2 :TX_DATASHARD DEBUG: Complete [63000 : 281474976715666] from 72075186224037891 at tablet 72075186224037891 send result to client [2:1491:3273], exec latency: 0 ms, propose latency: 1 ms 2025-04-07T14:25:25.825221Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 |98.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_build_index/unittest >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key >> TCreateAndDropViewTest::CreateViewOccupiedName [GOOD] >> TCreateAndDropViewTest::CreateViewIfNotExists >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> TestPurecalcFilter::ManyValues >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] >> KqpTpch::Query18 [GOOD] >> KqpTpch::Query19 >> KeyValueGRPCService::SimpleWriteListRange [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatus |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> DataShardFollowers::FollowerAfterDataCompaction [GOOD] >> DataShardFollowers::FollowerDuringSysPartSwitch >> MediatorTest::ResendSubset [GOOD] >> MediatorTest::ResendNotSubset |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TabletService_ExecuteMiniKQL::DryRunEraseRow [GOOD] >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] >> TestPurecalcFilter::ManyValues [GOOD] >> TCreateAndDropViewTest::CreateViewIfNotExists [GOOD] >> TCreateAndDropViewTest::DropView >> QuoterWithKesusTest::GetsQuotaAfterPause [GOOD] >> QuoterWithKesusTest::GetsSeveralQuotas >> KqpTpch::Query19 [GOOD] >> KqpTpch::Query20 >> TopicSessionTests::TwoSessionsWithOffsets [GOOD] >> TestPurecalcFilter::NullValues >> KafkaProtocol::LoginWithApiKey [GOOD] >> KafkaProtocol::LoginWithApiKeyWithoutAt >> TopicSessionTests::BadDataSessionError |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-plan] [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> MediatorTest::ResendNotSubset [GOOD] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed [GOOD] >> TabletService_Restart::Basics >> Coordinator::RestoreTenantConfiguration [GOOD] >> Coordinator::LastEmptyStepResent >> DataShardFollowers::FollowerDuringSysPartSwitch [GOOD] >> DataShardFollowers::FollowerDuringDataPartSwitch >> KeyValueGRPCService::SimpleGetStorageChannelStatus [GOOD] >> KeyValueGRPCService::SimpleCreateAlterDropVolume ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/mediator/ut/unittest >> MediatorTest::ResendNotSubset [GOOD] Test command err: 2025-04-07T14:24:44.275693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:44.276203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:44.276518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e6c/r3tmp/tmphdb1lW/pdisk_1.dat 2025-04-07T14:24:44.824145Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 TTxSchema Complete 2025-04-07T14:24:44.824648Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2025-04-07T14:24:44.826008Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:582:2499] connected 2025-04-07T14:24:44.826134Z node 1 :TX_MEDIATOR NOTICE: tablet# 72057594047365120 actor# [1:565:2489] HANDLE TEvMediatorConfiguration Version# 1 2025-04-07T14:24:44.826607Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2025-04-07T14:24:44.826772Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2025-04-07T14:24:44.827318Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:588:2504] connected 2025-04-07T14:24:44.827398Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:586:2503] to# [1:584:2501] ExecQueue 2025-04-07T14:24:44.827467Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:584:2501] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [1:586:2503] bucket# 0 ... waiting for watcher to connect (done) 2025-04-07T14:24:44.828415Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:586:2503] to# [1:584:2501] ExecQueue 2025-04-07T14:24:44.828486Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:584:2501] MediatorId# 72057594047365120 HANDLE TEvWatch 2025-04-07T14:24:44.828533Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:584:2501] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [1:585:2502] bucket.ActiveActor 2025-04-07T14:24:44.828639Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:585:2502] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [1:586:2503]} 2025-04-07T14:24:44.828779Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:585:2502] Mediator# 72057594047365120 SEND to# [1:586:2503] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} 2025-04-07T14:24:44.844469Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:592:2508] connected 2025-04-07T14:24:44.844582Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-07T14:24:44.844633Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [1:590:2506] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 3 Coordinator# 72057594046316545 2025-04-07T14:24:44.844934Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:584:2501] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 0}}} marker# M1 2025-04-07T14:24:44.844983Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:584:2501] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:585:2502] bucket.ActiveActor step# 1000 2025-04-07T14:24:44.845078Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:585:2502] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1000} 2025-04-07T14:24:44.845281Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:585:2502] Mediator# 72057594047365120 SEND to# [1:586:2503] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1000} ... waiting for blocked plan step 2025-04-07T14:24:44.864220Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-04-07T14:24:44.864285Z node 1 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2025-04-07T14:24:44.864391Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCommitStep to# [1:584:2501] ExecQueue {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:590:2506]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M0 2025-04-07T14:24:44.864489Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:584:2501] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:590:2506]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M1 2025-04-07T14:24:44.864599Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 marker# M2 2025-04-07T14:24:44.864644Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:584:2501] MediatorId# 72057594047365120 SEND Ev to# [1:585:2502] step# 1010 forTablet# 72057594047365121 txid# 1 marker# M3 2025-04-07T14:24:44.864717Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:584:2501] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:585:2502] bucket.ActiveActor step# 1010 2025-04-07T14:24:44.865021Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:585:2502] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [1:590:2506]}}} marker# M4 2025-04-07T14:24:44.865265Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:585:2502] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2025-04-07T14:24:44.865931Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:585:2502] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:24:44.865995Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-04-07T14:24:44.866063Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:585:2502] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for blocked plan step (done) ... waiting for no pending commands 2025-04-07T14:24:44.866513Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:586:2503] to# [1:584:2501] ExecQueue 2025-04-07T14:24:44.866593Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:584:2501] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [1:586:2503] bucket# 0 ... waiting for no pending commands (done) ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for watch updates 2025-04-07T14:24:44.866936Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:585:2502] Mediator# 72057594047365120 HANDLE {TEvPlanStepAccepted TabletId# 72057594047365121 step# 1010} 2025-04-07T14:24:44.867043Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:585:2502] Mediator# 72057594047365120 SEND to# [1:586:2503] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1010} ... waiting for watch updates (done) 2025-04-07T14:24:48.488615Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:48.489139Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:48.489219Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000e6c/r3tmp/tmpzX4MAz/pdisk_1.dat 2025-04-07T14:24:48.720903Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 TTxSchema Complete 2025-04-07T14:24:48.721476Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2025-04-07T14:24:48.722005Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [2:582:2499] connected 2025-04-07T14:24:48.722104Z node 2 :TX_MEDIATOR NOTICE: tablet# 72057594047365120 actor# [2:565:2489] HANDLE TEvMediatorConfiguration Version# 1 2025-04-07T14:24:48.722503Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2025-04-07T14:24:48.722674Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2025-04-07T14:24:48.723109Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [2:588:2504] connected ... waiting for watcher to connect (done) 2025-04-07T14:24:48.723335Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:586:2503] to# [2:584:2501] ExecQueue 2025-04-07T14:24:48.723395Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:584:2501] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [2:586:2503] bucket# 0 2025-04-07T14:24:48.723533Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:586:2503] to# [2:584:2501] ExecQueue 2025-04-07T14:24:48.723587Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:584:2501] MediatorId# 72057594047365120 HANDLE TEvWatch 2025-04-07T14:24:48.723631Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:584:2501] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [2:585:2502] bucket.ActiveActor 2025-04-07T14:24:48.723724Z node 2 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [2:585:2502] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [2:586:2503]} 2025-04-07T14:24:48.723807Z node 2 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [2:585:2502] Mediator# 72057594047365120 SEND to# [2:586:2503] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} ... waiting for no pending commands 2025-04-07T14:24:48.739000Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:586:2503] to# [2:584:2501] ExecQueue 2025-04-07T14:24:48.739069Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:584:2501] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:586:2503] bucket# 0 2025-04-07T14:24:48.739154Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:586:2503] to# [2:584:2501] ExecQueue 2025-04-07T14:24:48.739187Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:584:2501] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:586:2503] bucket# 0 2025-04-07T14:24:48.739269Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:586:2503] to# [2:584:2501] ExecQueue 2025-04-07T14:24:48.739304Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:584:2501] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:586:2503] bucket# 0 ... waiting for no pending commands (done) 2025-04-07T14:24:48.739663Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [2:648:2537] connected 2025-04-07T14:24:48.739754Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-07T14:24:48.739800Z node 2 :TX_MEDIATOR DEBUG: tablet ... 025-04-07T14:25:34.751188Z node 11 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [11:586:2503] to# [11:584:2501] ExecQueue 2025-04-07T14:25:34.751240Z node 11 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [11:584:2501] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [11:586:2503] bucket# 0 ... waiting for watcher to connect (done) 2025-04-07T14:25:34.751591Z node 11 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [11:586:2503] to# [11:584:2501] ExecQueue 2025-04-07T14:25:34.751651Z node 11 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [11:584:2501] MediatorId# 72057594047365120 HANDLE TEvWatch 2025-04-07T14:25:34.751701Z node 11 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [11:584:2501] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [11:585:2502] bucket.ActiveActor 2025-04-07T14:25:34.751773Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [11:586:2503]} 2025-04-07T14:25:34.751862Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 SEND to# [11:586:2503] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} ... waiting for no pending commands 2025-04-07T14:25:34.763776Z node 11 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [11:586:2503] to# [11:584:2501] ExecQueue 2025-04-07T14:25:34.763863Z node 11 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [11:584:2501] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [11:586:2503] bucket# 0 2025-04-07T14:25:34.763976Z node 11 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [11:586:2503] to# [11:584:2501] ExecQueue 2025-04-07T14:25:34.764039Z node 11 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [11:584:2501] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [11:586:2503] bucket# 0 ... waiting for no pending commands (done) 2025-04-07T14:25:34.764431Z node 11 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [11:629:2527] connected 2025-04-07T14:25:34.764556Z node 11 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-07T14:25:34.764607Z node 11 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [11:627:2525] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 2 Coordinator# 72057594046316545 2025-04-07T14:25:34.764985Z node 11 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-04-07T14:25:34.765034Z node 11 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [2] 2025-04-07T14:25:34.765195Z node 11 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCommitStep to# [11:584:2501] ExecQueue {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [11:627:2525]}{TTx Moderator# 0 txid# 4 AckTo# [11:627:2525]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}{tablet# 72057594047365121 txid# 4}{tablet# 72057594047365122 txid# 4}}}}} marker# M0 2025-04-07T14:25:34.765337Z node 11 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [11:584:2501] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [11:627:2525]}{TTx Moderator# 0 txid# 4 AckTo# [11:627:2525]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}{tablet# 72057594047365121 txid# 4}{tablet# 72057594047365122 txid# 4}}}}} marker# M1 2025-04-07T14:25:34.765392Z node 11 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 txid# 4 marker# M2 2025-04-07T14:25:34.765446Z node 11 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [11:584:2501] MediatorId# 72057594047365120 SEND Ev to# [11:585:2502] step# 1010 forTablet# 72057594047365121 txid# 1 txid# 4 marker# M3 2025-04-07T14:25:34.765497Z node 11 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 1 txid# 4 marker# M2 2025-04-07T14:25:34.765541Z node 11 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [11:584:2501] MediatorId# 72057594047365120 SEND Ev to# [11:585:2502] step# 1010 forTablet# 72057594047365122 txid# 1 txid# 4 marker# M3 2025-04-07T14:25:34.765587Z node 11 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [11:584:2501] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [11:585:2502] bucket.ActiveActor step# 1010 2025-04-07T14:25:34.765687Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [11:627:2525]}{TTx Moderator# 0 txid# 4 AckTo# [11:627:2525]}}} marker# M4 2025-04-07T14:25:34.765888Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [11:627:2525]}{TTx Moderator# 0 txid# 4 AckTo# [11:627:2525]}}} marker# M4 2025-04-07T14:25:34.765977Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2025-04-07T14:25:34.766681Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:25:34.766751Z node 11 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-04-07T14:25:34.766790Z node 11 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 4, marker M5lu 2025-04-07T14:25:34.766833Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-07T14:25:34.767189Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:25:34.767248Z node 11 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2025-04-07T14:25:34.767279Z node 11 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 4, marker M5lu 2025-04-07T14:25:34.767311Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-07T14:25:34.778205Z node 11 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-04-07T14:25:34.778293Z node 11 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [4] 2025-04-07T14:25:34.778398Z node 11 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvRequestLostAcks to# [11:584:2501] ExecQueue step {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [0:0:0]}{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}{TTx Moderator# 0 txid# 3 AckTo# [0:0:0]}{TTx Moderator# 0 txid# 4 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 3}{tablet# 72057594047365121 txid# 4}{tablet# 72057594047365122 txid# 4}}} 2025-04-07T14:25:34.778559Z node 11 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [11:584:2501] MediatorId# 72057594047365120 HANDLE TEvRequestLostAcks {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [0:0:0]}{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}{TTx Moderator# 0 txid# 3 AckTo# [0:0:0]}{TTx Moderator# 0 txid# 4 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 3}{tablet# 72057594047365121 txid# 4}{tablet# 72057594047365122 txid# 4}}} AckTo# [11:627:2525] 2025-04-07T14:25:34.778625Z node 11 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 txid# 2 txid# 4 marker# M2 2025-04-07T14:25:34.778682Z node 11 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [11:584:2501] MediatorId# 72057594047365120 SEND Ev to# [11:585:2502] step# 1010 forTablet# 72057594047365121 txid# 1 txid# 2 txid# 4 marker# M3 2025-04-07T14:25:34.778744Z node 11 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 1 txid# 3 txid# 4 marker# M2 2025-04-07T14:25:34.778798Z node 11 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [11:584:2501] MediatorId# 72057594047365120 SEND Ev to# [11:585:2502] step# 1010 forTablet# 72057594047365122 txid# 1 txid# 3 txid# 4 marker# M3 2025-04-07T14:25:34.778899Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [11:627:2525]}{TTx Moderator# 0 txid# 2 AckTo# [11:627:2525]}{TTx Moderator# 0 txid# 4 AckTo# [11:627:2525]}}} 2025-04-07T14:25:34.778959Z node 11 :TX_MEDIATOR_TABLETQUEUE CRIT: Received out-of-order step 1010 for tablet 72057594047365121 with transactions {1, 2, 4} which are not a subset of previously received {1, 4} 2025-04-07T14:25:34.779041Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [11:627:2525]}{TTx Moderator# 0 txid# 3 AckTo# [11:627:2525]}{TTx Moderator# 0 txid# 4 AckTo# [11:627:2525]}}} 2025-04-07T14:25:34.779070Z node 11 :TX_MEDIATOR_TABLETQUEUE CRIT: Received out-of-order step 1010 for tablet 72057594047365122 with transactions {1, 3, 4} which are not a subset of previously received {1, 4} ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-07T14:25:34.789913Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 HANDLE {TEvPlanStepAccepted TabletId# 72057594047365121 step# 1010} 2025-04-07T14:25:34.790027Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 SEND to# [11:627:2525] {TEvPlanStepAck TabletId# 72057594047365121 step# 1010 txid# 1 txid# 2 txid# 4} 2025-04-07T14:25:34.790168Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 HANDLE {TEvPlanStepAccepted TabletId# 72057594047365122 step# 1010} 2025-04-07T14:25:34.790207Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 SEND to# [11:627:2525] {TEvPlanStepAck TabletId# 72057594047365122 step# 1010 txid# 1 txid# 3 txid# 4} 2025-04-07T14:25:34.790459Z node 11 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [11:585:2502] Mediator# 72057594047365120 SEND to# [11:586:2503] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1010} ... ack from 72057594047365121 ... ack from 72057594047365122 ... ack from 72057594047365121 ... ack from 72057594047365122 |98.9%| [TM] {RESULT} ydb/core/tx/mediator/ut/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> KqpTpch::Query20 [GOOD] >> KqpTpch::Query21 |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |98.9%| [TA] $(B)/ydb/tests/functional/canonical/test-results/py3test/{meta.json ... results_accumulator.log} >> TSentinelTests::PDiskRackGuardFullRack [GOOD] >> TSentinelTests::BSControllerUnresponsive >> TestPurecalcFilter::NullValues [GOOD] |98.9%| [TA] {RESULT} $(B)/ydb/tests/functional/canonical/test-results/py3test/{meta.json ... results_accumulator.log} >> TCreateAndDropViewTest::DropView [GOOD] >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag >> TestPurecalcFilter::PartialPush >> KafkaProtocol::LoginWithApiKeyWithoutAt [GOOD] >> TMetadataActorTests::TopicMetadataGoodAndBad |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TopicSessionTests::BadDataSessionError [GOOD] >> TabletService_Restart::Basics [GOOD] >> TabletService_Restart::OnlyAdminsAllowed >> TopicSessionTests::WrongFieldType >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] >> QuoterWithKesusTest::GetsSeveralQuotas [GOOD] >> QuoterWithKesusTest::KesusRecreation >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls >> Coordinator::LastEmptyStepResent [GOOD] >> CoordinatorVolatile::PlanResentOnReboots >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case >> KeyValueGRPCService::SimpleCreateAlterDropVolume [GOOD] >> KeyValueGRPCService::SimpleListPartitions [GOOD] >> DataShardFollowers::FollowerDuringDataPartSwitch [GOOD] >> DataShardFollowers::FollowerReadDuringSplit >> TestPurecalcFilter::PartialPush [GOOD] >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::DropNonexistingView >> TestPurecalcFilter::CompilationValidation >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/keyvalue/ut/unittest >> KeyValueGRPCService::SimpleListPartitions [GOOD] Test command err: 2025-04-07T14:23:58.288829Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490576891086149677:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:23:58.289352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000a60/r3tmp/tmp6QTDE1/pdisk_1.dat 2025-04-07T14:23:58.745557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:23:58.745654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:23:58.753251Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:23:58.755189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9948, node 1 2025-04-07T14:23:58.829174Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-07T14:23:58.829533Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-07T14:23:58.829634Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-07T14:23:58.829718Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-07T14:23:58.830672Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-07T14:23:58.830724Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-07T14:23:58.830758Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-07T14:23:58.830794Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-07T14:23:58.830802Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-07T14:23:58.830816Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-07T14:23:58.834306Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T14:23:58.834445Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T14:23:58.834467Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-07T14:23:58.834476Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2025-04-07T14:23:58.898839Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a680] created request Name# BlobStorageConfig 2025-04-07T14:23:58.899665Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ac80] created request Name# HiveCreateTablet 2025-04-07T14:23:58.901448Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b280] created request Name# LocalEnumerateTablets 2025-04-07T14:23:58.901771Z node 1 :GRPC_SERVER DEBUG: [0x51b000018f80] created request Name# KeyValue 2025-04-07T14:23:58.902735Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b880] created request Name# TabletStateRequest 2025-04-07T14:23:58.903150Z node 1 :GRPC_SERVER DEBUG: [0x51a00002be80] created request Name# LocalMKQL 2025-04-07T14:23:58.903976Z node 1 :GRPC_SERVER DEBUG: [0x51a00002c480] created request Name# LocalSchemeTx 2025-04-07T14:23:58.904383Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ca80] created request Name# TabletKillRequest 2025-04-07T14:23:58.904631Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d080] created request Name# SchemeOperationStatus 2025-04-07T14:23:58.904933Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d680] created request Name# ChooseProxy 2025-04-07T14:23:58.905275Z node 1 :GRPC_SERVER DEBUG: [0x51a00002dc80] created request Name# ResolveNode 2025-04-07T14:23:58.905553Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e280] created request Name# FillNode 2025-04-07T14:23:58.905792Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e880] created request Name# DrainNode 2025-04-07T14:23:58.907265Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ee80] created request Name# InterconnectDebug 2025-04-07T14:23:58.907687Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c0080] created request Name# TestShardControl 2025-04-07T14:23:58.909044Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c0680] created request Name# RegisterNode 2025-04-07T14:23:58.909396Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c0c80] created request Name# CmsRequest 2025-04-07T14:23:58.909680Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c1280] created request Name# SqsRequest 2025-04-07T14:23:58.911161Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c1880] created request Name# ConsoleRequest 2025-04-07T14:23:58.911467Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c1e80] created request Name# SchemeInitRoot 2025-04-07T14:23:58.912573Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c2480] created request Name# PersQueueRequest 2025-04-07T14:23:58.912973Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c2a80] created request Name# Request 2025-04-07T14:23:58.913680Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c3080] created request Name# SchemeOperation 2025-04-07T14:23:58.913970Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c3680] created request Name# SchemeDescribe 2025-04-07T14:23:58.985846Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:23:58.985866Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:23:58.985872Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:23:58.985957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:23:59.558624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "hdd2" Kind: "hdd2" } StoragePools { Name: "hdd" Kind: "hdd" } StoragePools { Name: "hdd1" Kind: "hdd1" } StoragePools { Name: "ssd" Kind: "ssd" } StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:23:59.559571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:23:59.561329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:23:59.563064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:23:59.563170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:23:59.565939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:23:59.566856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:23:59.567072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:23:59.567132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:23:59.567203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-07T14:23:59.567232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:23:59.573727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:23:59.573757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-07T14:23:59.573803Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:23:59.574032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:23:59.574078Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:23:59.574093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:23:59.576103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:23:59.576142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:23:59.576165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:23:59.576188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:23:59.592943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:23:59.599166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-07T14:23:59.599318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:23:59.602710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035839644, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:23:59.602880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7490576891086150161 RawX2: 4294969660 } } Step: 1744035839644 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:23:59.602916Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:23:59.603217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:23:59.603252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474 ... 26976Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:7490577316567355357:2359], at schemeshard: 72057594046644480, txId: 281474976715661, path id: 2 2025-04-07T14:25:38.426995Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:7490577316567355357:2359], at schemeshard: 72057594046644480, txId: 281474976715661, path id: 3 2025-04-07T14:25:38.427059Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-07T14:25:38.427123Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDeleteParts opId# 281474976715661:0 ProgressState 2025-04-07T14:25:38.427202Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715661:0 progress is 1/1 2025-04-07T14:25:38.427219Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-04-07T14:25:38.427267Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715661, ready parts: 1/1, is published: false 2025-04-07T14:25:38.427293Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-04-07T14:25:38.427317Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715661:0 2025-04-07T14:25:38.427334Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715661:0 2025-04-07T14:25:38.427474Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-07T14:25:38.427507Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715661, publications: 2, subscribers: 1 2025-04-07T14:25:38.427524Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715661, [OwnerId: 72057594046644480, LocalPathId: 2], 7 2025-04-07T14:25:38.427538Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715661, [OwnerId: 72057594046644480, LocalPathId: 3], 18446744073709551615 2025-04-07T14:25:38.428069Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-07T14:25:38.428179Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-07T14:25:38.428202Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715661 2025-04-07T14:25:38.428232Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2025-04-07T14:25:38.428256Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-07T14:25:38.428683Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-07T14:25:38.428770Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-07T14:25:38.428784Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715661 2025-04-07T14:25:38.428801Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2025-04-07T14:25:38.428818Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-07T14:25:38.428883Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715661, subscribers: 1 2025-04-07T14:25:38.428908Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [33:7490577320862323250:2304] 2025-04-07T14:25:38.431483Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-07T14:25:38.431515Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-07T14:25:38.431645Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2025-04-07T14:25:38.431685Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2025-04-07T14:25:38.434486Z node 33 :GRPC_SERVER DEBUG: Got grpc request# ListDirectoryRequest, traceId# 01jr8a9042ctvnfrsq878tkrx9, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:45580, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-07T14:25:38.435000Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-07T14:25:38.435344Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-07T14:25:38.435608Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-07T14:25:38.435767Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-07T14:25:38.435869Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-07T14:25:38.435888Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-07T14:25:38.435950Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-07T14:25:38.436401Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037889 not found 2025-04-07T14:25:38.436429Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037888 not found 2025-04-07T14:25:38.437268Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037889 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-04-07T14:25:38.438108Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037888 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-04-07T14:25:38.440297Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-07T14:25:38.440333Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-07T14:25:38.440387Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-07T14:25:38.440403Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-07T14:25:38.440437Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-07T14:25:38.447819Z node 33 :GRPC_SERVER DEBUG: [0x51a0000c4e80] received request Name# Request ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.448197Z node 33 :GRPC_SERVER DEBUG: [0x51a000096080] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.448502Z node 33 :GRPC_SERVER DEBUG: [0x51a0000c5480] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.448813Z node 33 :GRPC_SERVER DEBUG: [0x51a000087080] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.449088Z node 33 :GRPC_SERVER DEBUG: [0x51a00010d480] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.449385Z node 33 :GRPC_SERVER DEBUG: [0x51a0000f9080] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.449702Z node 33 :GRPC_SERVER DEBUG: [0x51a00012ba80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.450000Z node 33 :GRPC_SERVER DEBUG: [0x51a0000eb280] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.450293Z node 33 :GRPC_SERVER DEBUG: [0x51a0000ed080] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.450574Z node 33 :GRPC_SERVER DEBUG: [0x51a000127880] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.450867Z node 33 :GRPC_SERVER DEBUG: [0x51a000119480] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.451151Z node 33 :GRPC_SERVER DEBUG: [0x51a0000d3280] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.451302Z node 33 :GRPC_SERVER DEBUG: [0x51a000097e80] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.451440Z node 33 :GRPC_SERVER DEBUG: [0x51b000098480] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.451655Z node 33 :GRPC_SERVER DEBUG: [0x51a000031280] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.451747Z node 33 :GRPC_SERVER DEBUG: [0x51a00012ae80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.451946Z node 33 :GRPC_SERVER DEBUG: [0x51a0000f5480] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.452019Z node 33 :GRPC_SERVER DEBUG: [0x51a000022280] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.452223Z node 33 :GRPC_SERVER DEBUG: [0x51a00012a880] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.452282Z node 33 :GRPC_SERVER DEBUG: [0x51a0000d5680] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.452525Z node 33 :GRPC_SERVER DEBUG: [0x51a00010c880] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.452577Z node 33 :GRPC_SERVER DEBUG: [0x51a0000eb880] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.452816Z node 33 :GRPC_SERVER DEBUG: [0x51a0000c9c80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-07T14:25:38.453095Z node 33 :GRPC_SERVER DEBUG: [0x51a0000e9a80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 |98.9%| [TM] {RESULT} ydb/services/keyvalue/ut/unittest >> TMemoryController::MemTable [GOOD] >> TMemoryController::ResourceBroker >> KqpTpch::Query21 [GOOD] >> KqpTpch::Query22 >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success >> TabletService_Restart::OnlyAdminsAllowed [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_dispatch.py::TestMapping::test_idle [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success [GOOD] >> TDataShardRSTest::TestCleanupInRS [GOOD] >> TDataShardRSTest::TestDelayedRSAckForUnknownTx >> test_insert.py::TestInsertOperations::test_insert_revert_basis >> CoordinatorVolatile::PlanResentOnReboots [GOOD] >> CoordinatorVolatile::MediatorReconnectPlanRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/tablet/ut/unittest >> TabletService_Restart::OnlyAdminsAllowed [GOOD] Test command err: 2025-04-07T14:24:59.821928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:59.822731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:59.822980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009ce/r3tmp/tmp7XNZQM/pdisk_1.dat 2025-04-07T14:25:00.541197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:25:00.602618Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:00.659385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:00.660224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:00.684145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ... reading schema ... changing schema (dry run) ... reading schema ... changing schema ... reading schema 2025-04-07T14:25:07.067000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:25:07.067378Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:25:07.067445Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009ce/r3tmp/tmpipg9iP/pdisk_1.dat 2025-04-07T14:25:07.384685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:25:07.412441Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:07.459887Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:07.460022Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:07.480270Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... reading schema (without token) ... reading schema (non-admin token) ... reading schema (admin token) 2025-04-07T14:25:11.431259Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:25:11.431652Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:25:11.431748Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009ce/r3tmp/tmpINSVXx/pdisk_1.dat 2025-04-07T14:25:11.679402Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:25:11.708284Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:11.755378Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:11.755503Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:11.767074Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:25:16.178259Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:25:16.178368Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:25:16.178618Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009ce/r3tmp/tmp95RTzB/pdisk_1.dat 2025-04-07T14:25:16.494734Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:25:16.531719Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:16.586170Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:16.586328Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:16.598195Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:25:20.739016Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:287:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:25:20.739386Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:25:20.739444Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009ce/r3tmp/tmpmJ0hq7/pdisk_1.dat 2025-04-07T14:25:21.042878Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:25:21.073329Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:21.120525Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:21.120656Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:21.132403Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:25:25.098188Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:25:25.098654Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:25:25.098754Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009ce/r3tmp/tmpVjqWJq/pdisk_1.dat 2025-04-07T14:25:25.411554Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:25:25.447377Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:25.494952Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:25.495093Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:25.506709Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:25:29.777440Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:25:29.777815Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:25:29.777876Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009ce/r3tmp/tmpyuWDo7/pdisk_1.dat 2025-04-07T14:25:30.138423Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:25:30.186666Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:30.239752Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:30.239877Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:30.253378Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:25:34.325402Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:287:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:25:34.325710Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:25:34.325776Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009ce/r3tmp/tmpxZu3ug/pdisk_1.dat 2025-04-07T14:25:34.695730Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:25:34.724210Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:34.774241Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:34.774377Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:34.786060Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:25:39.577843Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:25:39.578186Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:25:39.578314Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009ce/r3tmp/tmpZg9Wia/pdisk_1.dat 2025-04-07T14:25:39.922473Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:25:39.957530Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:40.005735Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:40.005876Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:40.017836Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected ... restarting tablet 72057594046644480 2025-04-07T14:25:40.209944Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:44.644633Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:25:44.644987Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:25:44.645198Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/0009ce/r3tmp/tmpC2pLh4/pdisk_1.dat 2025-04-07T14:25:44.930867Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:25:44.974537Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:45.026985Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:45.027150Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:45.038904Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected ... restarting tablet 72057594046644480 (without token) ... restarting tablet 72057594046644480 (non-admin token) ... restarting tablet 72057594046644480 (admin token) 2025-04-07T14:25:45.558645Z node 10 :IMPORT WARN: Table profiles were not loaded |98.9%| [TM] {RESULT} ydb/core/grpc_services/tablet/ut/unittest >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> TopicSessionTests::WrongFieldType [GOOD] >> test_insert.py::TestInsertOperations::test_insert_revert_basis [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs >> TopicSessionTests::RestartSessionIfNewClientWithOffset >> QuoterWithKesusTest::KesusRecreation [GOOD] >> QuoterWithKesusTest::AllocationStatistics >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> TestPurecalcFilter::CompilationValidation [GOOD] >> TestRawParser::Simple |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> TCreateAndDropViewTest::DropNonexistingView [GOOD] >> TCreateAndDropViewTest::CallDropViewOnTable >> TestRawParser::Simple [GOOD] >> TestRawParser::ManyValues >> DataShardFollowers::FollowerReadDuringSplit [GOOD] >> TestRawParser::ManyValues [GOOD] >> TestRawParser::TypeKindsValidation >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> KqpTpch::Query22 [GOOD] >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline >> TestRawParser::TypeKindsValidation [GOOD] >> TMemoryController::ResourceBroker [GOOD] >> TMemoryController::ResourceBroker_ConfigLimit >> CoordinatorVolatile::MediatorReconnectPlanRace [GOOD] >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx >> TDataShardRSTest::TestDelayedRSAckForUnknownTx [GOOD] >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_followers/unittest >> DataShardFollowers::FollowerReadDuringSplit [GOOD] Test command err: 2025-04-07T14:24:54.300341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:54.300775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:54.301052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000a9c/r3tmp/tmpMEUQi1/pdisk_1.dat 2025-04-07T14:24:55.000201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:55.057616Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:55.116878Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-07T14:24:55.118472Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-07T14:24:55.118737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:55.118908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:55.133941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:55.264920Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2025-04-07T14:24:55.265013Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2025-04-07T14:24:55.268471Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2025-04-07T14:24:55.408028Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 PartitionConfig { FollowerGroups { FollowerCount: 1 AllowLeaderPromotion: false } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-07T14:24:55.408636Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-07T14:24:55.408806Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-07T14:24:55.409118Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-07T14:24:55.409321Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-07T14:24:55.409482Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-07T14:24:55.409797Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2025-04-07T14:24:55.412092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:55.413121Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-07T14:24:55.413210Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-07T14:24:55.447062Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:24:55.447893Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:24:55.448286Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:24:55.448593Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:55.490459Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:24:55.490995Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:55.491074Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:55.492444Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:24:55.492538Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:24:55.492585Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:24:55.492957Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:55.523731Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:24:55.524659Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:55.524868Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:24:55.524916Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:24:55.524953Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:24:55.524985Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:55.525445Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:55.525500Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:55.526255Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:24:55.526344Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:24:55.527343Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:55.527384Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:55.527437Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:24:55.527554Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:55.527620Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:55.527696Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:24:55.527742Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:24:55.527772Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:24:55.527803Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:24:55.527850Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:55.528081Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:24:55.528148Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:24:55.528321Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:55.529964Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:24:55.530068Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:24:55.530213Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:24:55.530260Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:24:55.530297Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:24:55.530346Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:24:55.530380Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:24:55.530655Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:24:55.530690Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:24:55.530720Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:24:55.530762Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:24:55.530822Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:24:55.530850Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:24:55.530881Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:24:55.530908Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:24:55.530930Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:24:55.532507Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:24:55.532565Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:55.543377Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:55.543452Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:24:55.543486Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:24:55.543544Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 7 ... ing iterator: { ReadId: 0 SeqNo: 1 MaxRows: 999 MaxBytes: 5242880 } 2025-04-07T14:25:51.081841Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553218, Sender [8:1053:2830], Recipient [8:1042:2825]: NKikimrTxDataShard.TEvReadAck ReadId: 0 SeqNo: 2 MaxRows: 999 MaxBytes: 5242880 2025-04-07T14:25:51.081867Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadAck 2025-04-07T14:25:51.081909Z node 8 :TX_DATASHARD DEBUG: 72075186224037889 ReadAck from [8:1053:2830] on missing iterator: { ReadId: 0 SeqNo: 2 MaxRows: 999 MaxBytes: 5242880 } 2025-04-07T14:25:51.082174Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553219, Sender [8:1053:2830], Recipient [8:1042:2825]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T14:25:51.082208Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2025-04-07T14:25:51.082273Z node 8 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } ... reading from the right follower 2025-04-07T14:25:51.319759Z node 8 :TX_PROXY DEBUG: actor# [8:52:2099] Handle TEvExecuteKqpTransaction 2025-04-07T14:25:51.319865Z node 8 :TX_PROXY DEBUG: actor# [8:52:2099] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-04-07T14:25:51.323437Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269877761, Sender [8:1072:2845], Recipient [8:1045:2827]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:25:51.323546Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:25:51.323627Z node 8 :TX_DATASHARD DEBUG: Server connected at follower 1 tablet# 72075186224037890, clientId# [8:1070:2844], serverId# [8:1072:2845], sessionId# [0:0:0] 2025-04-07T14:25:51.323837Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr8a9cfhegrwd62be0psb575, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=MzJiMzdhODgtOTg2MzQ0YTgtNzEwNzA5ODYtYmJjNzE2N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:25:51.325644Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553215, Sender [8:1076:2846], Recipient [8:1045:2827]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-07T14:25:51.325702Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvRead 2025-04-07T14:25:51.325847Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:6} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-04-07T14:25:51.325940Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:6} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:25:51.326022Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2025-04-07T14:25:51.326123Z node 8 :TX_DATASHARD DEBUG: Updating sys metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=5, epoch=1} 2025-04-07T14:25:51.330507Z node 8 :TX_DATASHARD DEBUG: Updating tables metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=3, epoch=1} 2025-04-07T14:25:51.330987Z node 8 :TX_DATASHARD DEBUG: Updating snapshots metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=0, epoch=1} 2025-04-07T14:25:51.331111Z node 8 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to repeatable v1001/18446744073709551615 2025-04-07T14:25:51.331219Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2025-04-07T14:25:51.331337Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-07T14:25:51.331403Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2025-04-07T14:25:51.331465Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-07T14:25:51.331520Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-07T14:25:51.331589Z node 8 :TX_DATASHARD TRACE: Activated operation [0:1] at 72075186224037890 2025-04-07T14:25:51.331644Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-07T14:25:51.331673Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-07T14:25:51.331693Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2025-04-07T14:25:51.331716Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-04-07T14:25:51.331883Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2025-04-07T14:25:51.332156Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Restart 2025-04-07T14:25:51.332197Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Restart at tablet# 72075186224037890 2025-04-07T14:25:51.332296Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:6} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> retry Change{6, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:25:51.332406Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:6} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} touch new 0b, 65b lo load (65b in total), 0b requested for data (4194304b in total) 2025-04-07T14:25:51.332497Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:6} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 8388608b of static mem, Memory{8388608 dyn 0} 2025-04-07T14:25:51.332582Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:6} requests PageCollection [72075186224037888:1:27:1:12288:190:0] 65 bytes, 1 pages: [0 4] 2025-04-07T14:25:51.332703Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:6} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} postponed, 65b, pages {1 wait, 1 load}, freshly touched 1 pages 2025-04-07T14:25:51.332888Z node 8 :TABLET_SAUSAGECACHE DEBUG: PageCollection [72075186224037888:1:27:1:12288:190:0] class Online from cache [ 0 ] 2025-04-07T14:25:51.332983Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:6} got result TEvResult{1 pages [72075186224037888:1:27:1:12288:190:0] ok OK}, category 1 2025-04-07T14:25:51.333132Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2025-04-07T14:25:51.333200Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-04-07T14:25:51.333367Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 2, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2025-04-07T14:25:51.333641Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[8:1076:2846], 0} after executionsCount# 2 2025-04-07T14:25:51.333722Z node 8 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[8:1076:2846], 0} sends rowCount# 1, bytes# 24, quota rows left# 1000, quota bytes left# 5242856, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-07T14:25:51.333849Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-07T14:25:51.333911Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2025-04-07T14:25:51.333978Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2025-04-07T14:25:51.334039Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2025-04-07T14:25:51.334086Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-07T14:25:51.334111Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2025-04-07T14:25:51.334135Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:1] at 72075186224037890 has finished 2025-04-07T14:25:51.334198Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-07T14:25:51.334355Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:6} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 2 -> done Change{6, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:25:51.334466Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:6} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 8388608b of static, Memory{0 dyn 0} 2025-04-07T14:25:51.334536Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-07T14:25:51.334729Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553217, Sender [8:1045:2827], Recipient [8:1045:2827]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-07T14:25:51.334776Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadContinue 2025-04-07T14:25:51.334891Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:6} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} queued, type NKikimr::NDataShard::TDataShard::TTxReadContinue 2025-04-07T14:25:51.334972Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:6} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-07T14:25:51.335057Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[8:1076:2846], 0}, firstUnprocessedQuery# 0 2025-04-07T14:25:51.335138Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[8:1076:2846], 0}, FirstUnprocessedQuery# 0 2025-04-07T14:25:51.335267Z node 8 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[8:1076:2846], 0} sends rowCount# 0, bytes# 0, quota rows left# 1000, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-07T14:25:51.335353Z node 8 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[8:1076:2846], 0} finished in ReadContinue 2025-04-07T14:25:51.335475Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:6} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} hope 1 -> done Change{6, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-07T14:25:51.335575Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:6} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} release 4194304b of static, Memory{0 dyn 0} 2025-04-07T14:25:51.341085Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553219, Sender [8:1076:2846], Recipient [8:1045:2827]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-07T14:25:51.341211Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2025-04-07T14:25:51.341346Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadCancel: { ReadId: 0 } { items { uint32_value: 3 } items { uint32_value: 33 } } |99.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_followers/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> TestRawParser::TypeKindsValidation [GOOD] Test command err: 2025-04-07T14:24:33.814844Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-07T14:24:33.814922Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where col_0 == "str1"' (filter id: [0:0:0]) 2025-04-07T14:24:33.815000Z node 1 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where col_0 == "str1"; 2025-04-07T14:24:33.815042Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-07T14:24:33.815751Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [1:7490577040562112987:2050] 2025-04-07T14:24:36.456834Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7490577040562112987:2050] [id 1]: Started compile request 2025-04-07T14:24:37.114464Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7490577040562112987:2050] [id 1]: Compilation completed for request 2025-04-07T14:24:37.114601Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [1:7490577040562112987:2050] 2025-04-07T14:24:37.115347Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-07T14:24:37.115409Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-04-07T14:24:37.115471Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [1:0:0] 2025-04-07T14:24:37.115488Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where col_1 == "str2"' (filter id: [1:0:0]) 2025-04-07T14:24:37.115513Z node 1 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where col_1 == "str2"; 2025-04-07T14:24:37.115531Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Send compile request with id 2 2025-04-07T14:24:37.115594Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 2 from [1:7490577040562112987:2050] 2025-04-07T14:24:37.115675Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7490577040562112987:2050] [id 2]: Started compile request 2025-04-07T14:24:37.138468Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7490577040562112987:2050] [id 2]: Compilation completed for request 2025-04-07T14:24:37.138581Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 2 from [1:7490577040562112987:2050] 2025-04-07T14:24:37.138680Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 2 2025-04-07T14:24:37.138722Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Filter compilation finished 2025-04-07T14:24:37.138753Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [2:0:0] 2025-04-07T14:24:37.138834Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: FilterData for 3 clients, number rows: 3 2025-04-07T14:24:37.138853Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 3 rows to purecalc filter (filter id: [1:0:0]) 2025-04-07T14:24:37.138860Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 3 rows 2025-04-07T14:24:37.140733Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Add 3 rows to client [2:0:0] without filtering 2025-04-07T14:24:37.140777Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 3 rows to purecalc filter (filter id: [0:0:0]) 2025-04-07T14:24:37.140784Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 3 rows 2025-04-07T14:24:37.140839Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Remove filter with id [2:0:0] 2025-04-07T14:24:37.140904Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: FilterData for 2 clients, number rows: 1 2025-04-07T14:24:37.140924Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 1 rows to purecalc filter (filter id: [1:0:0]) 2025-04-07T14:24:37.140931Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-07T14:24:37.140964Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 1 rows to purecalc filter (filter id: [0:0:0]) 2025-04-07T14:24:37.140969Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-07T14:24:37.471371Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-07T14:24:37.471406Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where a1 = "str1"' (filter id: [0:0:0]) 2025-04-07T14:24:37.471426Z node 2 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a1 = "str1"; 2025-04-07T14:24:37.471544Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-07T14:24:37.472002Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [2:7490577055273456829:2050] 2025-04-07T14:24:40.217365Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [2:7490577055273456829:2050] [id 1]: Started compile request 2025-04-07T14:24:40.245830Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [2:7490577055273456829:2050] [id 1]: Compilation completed for request 2025-04-07T14:24:40.245925Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [2:7490577055273456829:2050] 2025-04-07T14:24:40.246085Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-07T14:24:40.246099Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-04-07T14:24:40.246121Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-07T14:24:40.649843Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-07T14:24:40.649874Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where a2 ... 50' (filter id: [0:0:0]) 2025-04-07T14:24:40.649902Z node 3 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; 2025-04-07T14:24:40.649940Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-07T14:24:40.650249Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [3:7490577070292606460:2050] 2025-04-07T14:24:43.302015Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [3:7490577070292606460:2050] [id 1]: Started compile request 2025-04-07T14:24:43.317354Z node 3 :FQ_ROW_DISPATCHER ERROR: TPurecalcCompileActor [3:7490577070292606460:2050] [id 1]: Compilation failed for request 2025-04-07T14:24:43.317456Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [3:7490577070292606460:2050] 2025-04-07T14:24:43.317571Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-07T14:24:43.317668Z node 3 :FQ_ROW_DISPATCHER ERROR: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation error: {
: Error: Failed to compile purecalc program subissue: {
: Error: Compile issues: generated.sql:2:36: Error: Unexpected token '.' : cannot match to any predicted input... } subissue: {
: Error: Final yql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; } } 2025-04-07T14:24:46.876885Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Add client with id [0:0:0] 2025-04-07T14:24:46.884792Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: UpdateParser to new schema with size 2 2025-04-07T14:24:47.003209Z node 4 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-07T14:24:47.003405Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Parser was updated on new schema with 2 columns 2025-04-07T14:24:47.004777Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-07T14:24:47.004804Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'WHERE col_first = "str_first__large__"' (filter id: [0:0:0]) 2025-04-07T14:24:47.004833Z node 4 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input WHERE col_first = "str_first__large__"; 2025-04-07T14:24:47.004859Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-07T14:24:47.036834Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-07T14:24:47.036884Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-04-07T14:24:47.037364Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Add client with id [1:0:0] 2025-04-07T14:24:47.105479Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: UpdateParser to new schema with size 3 2025-04-07T14:24:47.195261Z node 4 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-07T14:24:47.195682Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Parser was updated on new schema with 3 columns 2025-04-07T14:24:47.195715Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [1:0:0] 2025-04-07T14:24:47.195742Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'WHERE col_second = "str_second"' (filter id: [1:0:0]) 2025-04-07T14:24:47.195768Z node 4 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input WHERE col_second = "str_second"; 2025-04-07T14:24:47.195792Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Send compile request with id 2 2025-04-07T14:24:47.221938Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 2 2025-04-07T14:24:47.221967Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Filter compilation finished 2025-04-07T14:24:47.223166Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFormatHandler [json_each_row]: Send 2 messages to parser 2025-04-07T14:24:47.223208Z node 4 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 2 messages to parse 2025-04-07T14:24:47.223237Z node 4 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"com_col": "event1", "col_first": "some_str", "col_second": "str_second"}{"com_col": "event2", "col_second": "some_str", "col_first": "str_first__large__"} 2025-04-07T14:24:47.224324Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFormatHandler [json_each_row]: TParserHandler: Got parsed data, number rows: 2 2025-04-07T14:24:47.224363Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFormatHandler [json_each_row]: Send 2 messages to filters, first offset: 42, last offset: 43 2025-04-07T14:24:47.224381Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: FilterData for 2 clients, number rows: 2 2025-04-07T14:24:47.224417Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 2 rows to purecalc filter (filter id: [1:0:0]) 2025-04-07T14:24:47.224429Z node 4 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-07T14:24:47.226306Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFormatHandler [json_each_row]: TClientHandler [1:0:0]: OnFilteredData, row id: 0, offset: 42, new packer size: 32 2025-04-07T14:24:47.226389Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 2 rows to purecalc filter (filter id: [0:0:0]) 2025-04-07T14:24:47.226397Z node 4 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-07T14: ... 27Z node 20 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "hello1", "a2": null, "event": "event1"} 2025-04-07T14:25:12.521095Z node 21 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-07T14:25:12.659295Z node 21 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-07T14:25:13.157047Z node 22 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-07T14:25:13.157384Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-07T14:25:13.157417Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": 456, "a2": 42} 2025-04-07T14:25:13.157834Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-07T14:25:13.157866Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "456", "a2": -42} 2025-04-07T14:25:13.158140Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-07T14:25:13.158168Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": "str", "a2": 99999} 2025-04-07T14:25:13.161352Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-07T14:25:13.161394Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 45, values: {"a1": "456", "a2": 42, "a3": 1.11.1} 2025-04-07T14:25:13.743012Z node 23 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-07T14:25:13.743314Z node 23 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-07T14:25:13.743360Z node 23 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": "-456"} 2025-04-07T14:25:14.282561Z node 24 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-07T14:25:14.282906Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-07T14:25:14.282951Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": {"key": "value"}, "a2": {"key2": "value2"}} 2025-04-07T14:25:14.284332Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-07T14:25:14.284381Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": {"key": "value", "nested": {"a": "b", "c":}}, "a2": "str"} 2025-04-07T14:25:14.284728Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-07T14:25:14.284768Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": {"key" "value"}, "a2": "str"} 2025-04-07T14:25:14.987527Z node 25 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-07T14:25:14.987907Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-07T14:25:14.987958Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": true, "a2": false} 2025-04-07T14:25:14.988437Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-07T14:25:14.988480Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "true", "a2": falce} 2025-04-07T14:25:15.697061Z node 26 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-07T14:25:15.697374Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-07T14:25:15.697446Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": Yelse} 2025-04-07T14:25:15.697933Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-07T14:25:15.698010Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "st""r"} 2025-04-07T14:25:15.698261Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-07T14:25:15.698317Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": "x"} {"a1": "y"} 2025-04-07T14:25:15.698506Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-07T14:25:15.698541Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 45, values: { 2025-04-07T14:25:16.176436Z node 27 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-04-07T14:25:16.176683Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [27:7490577225440616330:2050] 2025-04-07T14:25:21.216795Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [27:7490577225440616330:2050] [id 0]: Started compile request 2025-04-07T14:25:21.314815Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [27:7490577225440616330:2050] [id 0]: Compilation completed for request 2025-04-07T14:25:21.314977Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [27:7490577225440616330:2050] 2025-04-07T14:25:21.321019Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-07T14:25:21.322604Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-07T14:25:21.939344Z node 28 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-04-07T14:25:21.940126Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [28:7490577247219998792:2050] 2025-04-07T14:25:27.309853Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [28:7490577247219998792:2050] [id 0]: Started compile request 2025-04-07T14:25:27.389428Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [28:7490577247219998792:2050] [id 0]: Compilation completed for request 2025-04-07T14:25:27.389561Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [28:7490577247219998792:2050] 2025-04-07T14:25:27.389875Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-07T14:25:27.390030Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-07T14:25:28.133357Z node 29 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-04-07T14:25:28.133575Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [29:7490577275588164492:2050] 2025-04-07T14:25:33.064843Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [29:7490577275588164492:2050] [id 0]: Started compile request 2025-04-07T14:25:33.116040Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [29:7490577275588164492:2050] [id 0]: Compilation completed for request 2025-04-07T14:25:33.116163Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [29:7490577275588164492:2050] 2025-04-07T14:25:33.116554Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-07T14:25:33.116703Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-07T14:25:33.116748Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-07T14:25:33.116788Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-07T14:25:33.116828Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-07T14:25:33.722928Z node 30 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a1 is null; 2025-04-07T14:25:33.723138Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [30:7490577297132827059:2050] 2025-04-07T14:25:38.508865Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [30:7490577297132827059:2050] [id 0]: Started compile request 2025-04-07T14:25:38.579974Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [30:7490577297132827059:2050] [id 0]: Compilation completed for request 2025-04-07T14:25:38.580075Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [30:7490577297132827059:2050] 2025-04-07T14:25:38.580316Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-07T14:25:39.235688Z node 31 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 50; 2025-04-07T14:25:39.235912Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [31:7490577324743920987:2050] 2025-04-07T14:25:44.207046Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [31:7490577324743920987:2050] [id 0]: Started compile request 2025-04-07T14:25:44.267589Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [31:7490577324743920987:2050] [id 0]: Compilation completed for request 2025-04-07T14:25:44.267721Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [31:7490577324743920987:2050] 2025-04-07T14:25:44.269106Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-07T14:25:44.988983Z node 32 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; 2025-04-07T14:25:45.019362Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [32:7490577342580290751:2050] 2025-04-07T14:25:50.272764Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [32:7490577342580290751:2050] [id 0]: Started compile request 2025-04-07T14:25:50.276031Z node 32 :FQ_ROW_DISPATCHER ERROR: TPurecalcCompileActor [32:7490577342580290751:2050] [id 0]: Compilation failed for request 2025-04-07T14:25:50.276146Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [32:7490577342580290751:2050] 2025-04-07T14:25:51.514558Z node 33 :FQ_ROW_DISPATCHER TRACE: TRawParser: Add 1 messages to parse 2025-04-07T14:25:51.514624Z node 33 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1__large_str", "a2": 101, "event": "event1"} 2025-04-07T14:25:52.234986Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Add 3 messages to parse 2025-04-07T14:25:52.235036Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1", "a2": "101", "event": "event1"} 2025-04-07T14:25:52.235144Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 43, value: {"a1": "hello1", "a2": "101", "event": "event2"} 2025-04-07T14:25:52.235168Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 44, value: {"a2": "101", "a1": "hello1", "event": "event3"} |99.0%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> test.py::test[solomon-Subquery-default.txt] [GOOD] >> test.py::test[solomon-UnknownSetting-] >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario >> TSentinelTests::BSControllerUnresponsive [GOOD] ------- [TM] {asan, default-linux-x86_64, pic, release} ydb/core/kqp/tests/kikimr_tpch/unittest >> KqpTpch::Query22 [GOOD] Test command err: -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 5 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 20 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 28 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 37 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 5 |99.0%| [TM] {RESULT} ydb/core/kqp/tests/kikimr_tpch/unittest >> QuoterWithKesusTest::AllocationStatistics [GOOD] >> QuoterWithKesusTest::UpdatesCountersForParentResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::BSControllerUnresponsive [GOOD] >> TCreateAndDropViewTest::CallDropViewOnTable [FAIL] Test command err: 2025-04-07T14:24:36.093626Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-04-07T14:24:36.093688Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-04-07T14:24:36.093750Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-07T14:24:36.093773Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-04-07T14:24:36.093809Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-04-07T14:24:36.093879Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-04-07T14:24:36.095733Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-04-07T14:24:36.100228Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 18 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: ... h: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 286 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 287 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34260026 2025-04-07T14:25:56.039260Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 72, response# PDiskStateInfo { PDiskId: 288 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 289 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 290 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 291 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34260026 2025-04-07T14:25:56.039388Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 66, response# PDiskStateInfo { PDiskId: 264 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 265 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 266 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 267 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34260026 2025-04-07T14:25:56.039503Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 67, response# PDiskStateInfo { PDiskId: 268 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 269 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 270 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 271 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34260026 2025-04-07T14:25:56.039597Z node 65 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-07T14:25:56.039994Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 68:272, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-07T14:25:56.040048Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 66:266, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-07T14:25:56.040089Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 68:275, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-07T14:25:56.040148Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-07T14:25:56.052974Z node 65 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 1 2025-04-07T14:25:56.053049Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-07T14:25:56.063573Z node 65 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-07T14:25:56.063641Z node 65 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-07T14:25:56.063740Z node 65 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 2 2025-04-07T14:25:56.063769Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-07T14:25:56.063906Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 65, wbId# [65:8388350642965737326:1634689637] 2025-04-07T14:25:56.063956Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 66, wbId# [66:8388350642965737326:1634689637] 2025-04-07T14:25:56.063983Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 67, wbId# [67:8388350642965737326:1634689637] 2025-04-07T14:25:56.064020Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 68, wbId# [68:8388350642965737326:1634689637] 2025-04-07T14:25:56.064047Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 69, wbId# [69:8388350642965737326:1634689637] 2025-04-07T14:25:56.064072Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 70, wbId# [70:8388350642965737326:1634689637] 2025-04-07T14:25:56.064099Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 71, wbId# [71:8388350642965737326:1634689637] 2025-04-07T14:25:56.064137Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 72, wbId# [72:8388350642965737326:1634689637] 2025-04-07T14:25:56.064386Z node 65 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { }, cookie# 111 2025-04-07T14:25:56.064413Z node 65 :CMS ERROR: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-04-07T14:25:56.064742Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 65, response# PDiskStateInfo { PDiskId: 260 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 261 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 262 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 263 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34280026 2025-04-07T14:25:56.065378Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 71, response# PDiskStateInfo { PDiskId: 284 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 285 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 286 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 287 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34280026 2025-04-07T14:25:56.065730Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 72, response# PDiskStateInfo { PDiskId: 288 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 289 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 290 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 291 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34280026 2025-04-07T14:25:56.065856Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 66, response# PDiskStateInfo { PDiskId: 264 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 265 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 266 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 267 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34280026 2025-04-07T14:25:56.065995Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 67, response# PDiskStateInfo { PDiskId: 268 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 269 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 270 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 271 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34280026 2025-04-07T14:25:56.066117Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 68, response# PDiskStateInfo { PDiskId: 272 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 273 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 274 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 275 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34280026 2025-04-07T14:25:56.066314Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 69, response# PDiskStateInfo { PDiskId: 276 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 277 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 278 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 279 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34280026 2025-04-07T14:25:56.066493Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 70, response# PDiskStateInfo { PDiskId: 280 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 281 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 282 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 283 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 34280026 2025-04-07T14:25:56.066581Z node 65 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s >> TCreateAndDropViewTest::DropSameViewTwice |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.0%| [TM] {RESULT} ydb/core/cms/ut_sentinel/unittest >> TopicSessionTests::RestartSessionIfNewClientWithOffset [GOOD] >> TMetadataActorTests::TopicMetadataGoodAndBad [GOOD] >> Serialization::RequestHeader [GOOD] >> Serialization::ResponseHeader [GOOD] >> Serialization::ApiVersionsRequest [GOOD] >> Serialization::ApiVersionsResponse [GOOD] >> Serialization::ApiVersion_WithoutSupportedFeatures [GOOD] >> Serialization::ProduceRequest [GOOD] >> Serialization::UnsignedVarint32 [GOOD] >> Serialization::UnsignedVarint64 [GOOD] >> Serialization::Varint32 [GOOD] >> Serialization::Varint64 [GOOD] >> Serialization::UnsignedVarint32_Wrong [GOOD] >> Serialization::UnsignedVarint64_Wrong [GOOD] >> Serialization::UnsignedVarint32_Deserialize [GOOD] >> Serialization::TKafkaInt8_NotPresentVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::Struct_IsDefault [GOOD] >> Serialization::TKafkaString_IsDefault [GOOD] >> Serialization::TKafkaString_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaString_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaString_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TKafkaArray_IsDefault [GOOD] >> Serialization::TKafkaArray_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaArray_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaArray_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TKafkaBytes_IsDefault [GOOD] >> Serialization::TKafkaBytes_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaBytes_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaBytes_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TRequestHeaderData_reference [GOOD] >> Serialization::TKafkaFloat64_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::RequestHeader_reference [GOOD] >> Serialization::ProduceRequestData [GOOD] >> Serialization::ProduceRequestData_Record_v0 [GOOD] >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx [GOOD] >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] >> TopicSessionTests::ReadNonExistentTopic |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] [GOOD] >> test.py::test[solomon-UnknownSetting-] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kafka_proxy/ut/unittest >> Serialization::ProduceRequestData_Record_v0 [GOOD] Test command err: 2025-04-07T14:24:27.629586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490577014811120740:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:27.629657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000c23/r3tmp/tmpL7chde/pdisk_1.dat 2025-04-07T14:24:28.100984Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:28.135667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:28.135792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:28.137370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15695, node 1 2025-04-07T14:24:28.347154Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:24:28.347191Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:24:28.347210Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:24:28.347301Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:24:28.875938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:28.885987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:24:28.887488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:28.897105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:24:28.897454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:24:28.897517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:24:28.901416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:24:28.901443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:24:28.901821Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:28.904597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035868953, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:24:28.904632Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:24:28.905112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:24:28.906086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:24:28.906218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:24:28.906272Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:24:28.906354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:24:28.906380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:24:28.906409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-04-07T14:24:28.913327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:24:28.913392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:24:28.913412Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:24:28.913534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2025-04-07T14:24:28.913738Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:24:29.068844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:24:29.069114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:24:29.069134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-07T14:24:29.069187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2025-04-07T14:24:29.069301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-07T14:24:29.069311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-04-07T14:24:29.069766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2025-04-07T14:24:29.069847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:24:29.070016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:24:29.070808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-07T14:24:29.070865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-07T14:24:29.070891Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2025-04-07T14:24:29.070970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:22172 2025-04-07T14:24:29.296190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:24:29.296402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:24:29.296434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-07T14:24:29.297108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2025-04-07T14:24:29.297511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:24:29.298823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035869345, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:24:29.298848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1744035869345, at schemeshard: 72057594046644480 2025-04-07T14:24:29.299068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2025-04-07T14:24:29.299123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-07T14:24:29.299142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2025-04-07T14:24:29.299552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:24:29.299710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:24:29.300627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2025-04-07T14:24:29.301490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2025-04-07T14:24:29.301519Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-04-07T14:24:29.301567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2025-04-07T14:24:29.301750Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-07T14:24:29.317147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-07T14:24:29.317280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710660:0 progress is 1/1 2025-04-07T14:24:29.317355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-04-07T14:24:29.317364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication st ... 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [10:7490577402604143366:3746] connected; active server actors: 1 2025-04-07T14:25:57.510260Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 10, Generation 1 2025-04-07T14:25:57.510280Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 1, NodeId 10, Generation 1 2025-04-07T14:25:57.510297Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 2, NodeId 10, Generation 1 2025-04-07T14:25:57.510314Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 10, Generation 1 2025-04-07T14:25:57.510330Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 4, NodeId 10, Generation 1 2025-04-07T14:25:57.511272Z node 10 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [10:7490577402604143362:3742]: Got location Wait for response for topics: '/Root/PQ/rt3.dc1--topic', /Root/PQ/rt3.dc1--topic2', 2025-04-07T14:25:57.513355Z node 10 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [10:7490577402604143370:3750]: Request location 2025-04-07T14:25:57.515740Z node 11 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [10:7490577402604143366:3746] disconnected; active server actors: 1 2025-04-07T14:25:57.515771Z node 11 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [10:7490577402604143366:3746] disconnected no session 2025-04-07T14:25:57.515857Z node 11 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [10:7490577402604143372:3752] connected; active server actors: 1 2025-04-07T14:25:57.515905Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 10, Generation 1 2025-04-07T14:25:57.515920Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 1, NodeId 10, Generation 1 2025-04-07T14:25:57.515935Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 2, NodeId 10, Generation 1 2025-04-07T14:25:57.515952Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 10, Generation 1 2025-04-07T14:25:57.515966Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 4, NodeId 10, Generation 1 2025-04-07T14:25:57.520838Z node 10 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [10:7490577402604143370:3750]: Got location Wait for response for topics: '/Root/PQ/rt3.dc1--topic', ', 2025-04-07T14:25:57.525119Z node 11 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [10:7490577402604143372:3752] disconnected; active server actors: 1 2025-04-07T14:25:57.525152Z node 11 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [10:7490577402604143372:3752] disconnected no session 2025-04-07T14:25:57.525137Z node 10 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [10:7490577402604143375:3755]: Request location 2025-04-07T14:25:57.525757Z node 10 :KAFKA_PROXY ERROR: TKafkaMetadataActor [10:7490577402604143373:3753] Describe topic '/Root/bad-topic' location finishied with error: Code=SCHEME_ERROR, Issues={
: Error: path 'Root/bad-topic' does not exist or you do not have access rights, code: 500018 } 2025-04-07T14:25:57.530282Z node 11 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [10:7490577402604143384:3758] connected; active server actors: 1 2025-04-07T14:25:57.530371Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 10, Generation 1 2025-04-07T14:25:57.530391Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 1, NodeId 10, Generation 1 2025-04-07T14:25:57.530408Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 2, NodeId 10, Generation 1 2025-04-07T14:25:57.530426Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 10, Generation 1 2025-04-07T14:25:57.530442Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 4, NodeId 10, Generation 1 2025-04-07T14:25:57.531485Z node 10 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [10:7490577402604143375:3755]: Got location 2025-04-07T14:25:57.535108Z node 11 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [10:7490577402604143384:3758] disconnected; active server actors: 1 2025-04-07T14:25:57.535141Z node 11 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [10:7490577402604143384:3758] disconnected no session Wait for response for topics: '/Root/bad-topic', /Root/PQ/rt3.dc1--topic', 2025-04-07T14:25:57.576019Z node 10 :KAFKA_PROXY ERROR: TKafkaMetadataActor [10:7490577402604143386:3760] Describe topic '/Root/bad-topic' location finishied with error: Code=SCHEME_ERROR, Issues={
: Error: path 'Root/bad-topic' does not exist or you do not have access rights, code: 500018 } Wait for response for topics: '/Root/bad-topic', Wait for response for topics: ' 2025-04-07T14:25:57.580573Z node 10 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [10:7490577402604143391:3765]: Request location 2025-04-07T14:25:57.581232Z node 11 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [10:7490577402604143393:3767] connected; active server actors: 1 2025-04-07T14:25:57.585747Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 10, Generation 1 2025-04-07T14:25:57.585781Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 1, NodeId 10, Generation 1 2025-04-07T14:25:57.585796Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 2, NodeId 10, Generation 1 2025-04-07T14:25:57.585814Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 10, Generation 1 2025-04-07T14:25:57.585830Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 4, NodeId 10, Generation 1 2025-04-07T14:25:57.586269Z node 10 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [10:7490577402604143391:3765]: Got location Wait for response for topics: '/Root/PQ/rt3.dc1--topic', 2025-04-07T14:25:57.594780Z node 11 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [10:7490577402604143393:3767] disconnected; active server actors: 1 2025-04-07T14:25:57.594805Z node 11 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [10:7490577402604143393:3767] disconnected no session 2025-04-07T14:25:58.086597Z node 10 :KQP_EXECUTER ERROR: ActorId: [10:7490577406899110731:2533] TxId: 281474976715693. Ctx: { TraceId: 01jr8a9jcz5dkbn5cge2jcnskc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjJjZGFmZDctMmRkMjlmMzctYjQ5YjdiZmQtZWNhNmMwYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 11 2025-04-07T14:25:58.100865Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7490577406899110740:2547], TxId: 281474976715693, task: 2. Ctx: { SessionId : ydb://session/3?node_id=10&id=YjJjZGFmZDctMmRkMjlmMzctYjQ5YjdiZmQtZWNhNmMwYjY=. CustomerSuppliedId : . TraceId : 01jr8a9jcz5dkbn5cge2jcnskc. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [10:7490577406899110731:2533], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-07T14:25:58.101709Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7490577406899110741:2548], TxId: 281474976715693, task: 4. Ctx: { SessionId : ydb://session/3?node_id=10&id=YjJjZGFmZDctMmRkMjlmMzctYjQ5YjdiZmQtZWNhNmMwYjY=. CustomerSuppliedId : . TraceId : 01jr8a9jcz5dkbn5cge2jcnskc. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [10:7490577406899110731:2533], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-07T14:25:59.157875Z node 10 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715694. Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-07T14:25:59.158052Z node 10 :KQP_EXECUTER WARN: ActorId: [10:7490577406899110780:2551] TxId: 281474976715694. Ctx: { TraceId: 01jr8a9m67evbg7n6agbwa0ajv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OWJiNzU5MGEtNDQ3MjZlOWUtZGY5NjUzYjctYjI3NDE0ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-07T14:25:59.158551Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=OWJiNzU5MGEtNDQ3MjZlOWUtZGY5NjUzYjctYjI3NDE0ZjI=, ActorId: [10:7490577406899110777:2551], ActorState: ExecuteState, TraceId: 01jr8a9m67evbg7n6agbwa0ajv, Create QueryResponse for error on request, msg: 2025-04-07T14:25:59.162695Z node 10 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr8a9m689tvh8jepys0dnsac" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-07T14:25:59.458258Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2025-04-07T14:25:59.469173Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 4 2025-04-07T14:25:59.469439Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-07T14:25:59.469634Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 3 2025-04-07T14:25:59.479820Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 2 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=4294967295 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=2147483648 >>>>> Check value=9223372036854775807 >>>>> Check value=18446744073709551615 >>>>> Check value=-2147483648 >>>>> Check value=-167966 >>>>> Check value=-1 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=-9223372036854775808 >>>>> Check value=-2147483648 >>>>> Check value=-167966 >>>>> Check value=-1 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483648 >>>>> Check value=9223372036854775807 >>>>> Buffer size: 251 >>>>> Buffer size: 104 |99.0%| [TM] {RESULT} ydb/core/kafka_proxy/ut/unittest >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionCommit >> TMemoryController::ResourceBroker_ConfigLimit [GOOD] >> TMemTableMemoryConsumersCollection::Empty [GOOD] >> TMemTableMemoryConsumersCollection::Destruction [GOOD] >> TMemTableMemoryConsumersCollection::Register >> TMemTableMemoryConsumersCollection::Register [GOOD] >> TMemTableMemoryConsumersCollection::Unregister [GOOD] >> TMemTableMemoryConsumersCollection::SetConsumption [GOOD] >> TMemTableMemoryConsumersCollection::CompactionComplete [GOOD] >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle [GOOD] >> test_public_api.py::TestCRUDOperations::test_none_values >> test_public_api.py::TestCRUDOperations::test_none_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_list_type >> test_http_api.py::TestHttpApi::test_restart_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query >> test_public_api.py::TestCRUDOperations::test_parse_list_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple >> test_public_api.py::TestCRUDOperations::test_parse_tuple [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type >> test_public_api.py::TestCRUDOperations::test_dict_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type |99.0%| [TM] {asan, default-linux-x86_64, pic, release} ydb/library/yql/tests/sql/solomon/pytest >> test.py::test[solomon-UnknownSetting-] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |99.0%| [TM] {RESULT} ydb/library/yql/tests/sql/solomon/pytest >> test_public_api.py::TestCRUDOperations::test_struct_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types ------- [TM] {asan, default-linux-x86_64, release} ydb/core/memory_controller/ut/unittest >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] Test command err: ResourceBrokerSelfConfig: LimitBytes: 0 QueryExecutionLimitBytes: 0 2025-04-07T14:24:31.154693Z node 1 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: none MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 60MiB ConsumersConsumption: 0B OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 6MiB Coefficient: 0.9999990463 2025-04-07T14:24:31.154933Z node 1 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 0B Limit: 6MiB Min: 2MiB Max: 6MiB 2025-04-07T14:24:31.155058Z node 1 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 0B Limit: 40MiB 2025-04-07T14:24:31.156823Z node 1 :MEMORY_CONTROLLER INFO: Bootstrapped with config HardLimitBytes: 209715200 2025-04-07T14:24:31.175780Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-07T14:24:31.177733Z node 1 :TABLET_SAUSAGECACHE INFO: Bootstrapped with config MemoryLimit: 33554432 2025-04-07T14:24:34.192815Z node 1 :MEMORY_CONTROLLER INFO: Consumer SharedCache [1:20:2067] registered 2025-04-07T14:24:34.193886Z node 1 :RESOURCE_BROKER INFO: New config diff: Queues { Name: "queue_kqp_resource_manager" Limit { Memory: 41943040 } } ResourceLimit { Memory: 62914560 } 2025-04-07T14:24:34.194614Z node 1 :RESOURCE_BROKER INFO: New config: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 41943040 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } ResourceLimit { Cpu: 20 Memory: 62914560 } 2025-04-07T14:24:34.196217Z node 1 :RESOURCE_BROKER INFO: Configure result: Success: true 2025-04-07T14:24:34.208349Z node 1 :MEMORY_CONTROLLER INFO: ResourceBroker configure result Success: true 2025-04-07T14:24:34.243258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:34.243612Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:34.244289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:24:34.401763Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:409:2370] 1 registered 2025-04-07T14:24:34.417277Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:422:2372] 0 registered 2025-04-07T14:24:34.422115Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:422:2372] 2 registered 2025-04-07T14:24:34.422471Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:422:2372] 4 registered 2025-04-07T14:24:34.422600Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:422:2372] 5 registered 2025-04-07T14:24:34.424572Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:432:2374] 1 registered 2025-04-07T14:24:34.427617Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:432:2374] 2 registered 2025-04-07T14:24:34.477757Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 1 registered 2025-04-07T14:24:34.508551Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 2 registered 2025-04-07T14:24:34.510240Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 3 registered 2025-04-07T14:24:34.510406Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 4 registered 2025-04-07T14:24:34.510651Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 5 registered 2025-04-07T14:24:34.510797Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 6 registered 2025-04-07T14:24:34.510890Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 7 registered 2025-04-07T14:24:34.511002Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 8 registered 2025-04-07T14:24:34.511191Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 9 registered 2025-04-07T14:24:34.511447Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 10 registered 2025-04-07T14:24:34.511793Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 11 registered 2025-04-07T14:24:34.517769Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 12 registered 2025-04-07T14:24:34.535103Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 13 registered 2025-04-07T14:24:34.536187Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 14 registered 2025-04-07T14:24:34.537271Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 15 registered 2025-04-07T14:24:34.537600Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 16 registered 2025-04-07T14:24:34.537704Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 17 registered 2025-04-07T14:24:34.537856Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 18 registered 2025-04-07T14:24:34.537900Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 19 registered 2025-04-07T14:24:34.548810Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 20 registered 2025-04-07T14:24:34.563388Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 22 registered 2025-04-07T14:24:34.568982Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 23 registered 2025-04-07T14:24:34.570005Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 24 registered 2025-04-07T14:24:34.570552Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 25 registered 2025-04-07T14:24:34.570810Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 26 registered 2025-04-07T14:24:34.571869Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 27 registered 2025-04-07T14:24:34.572353Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 28 registered 2025-04-07T14:24:34.572535Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 29 registered 2025-04-07T14:24:34.572875Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 30 registered 2025-04-07T14:24:34.572929Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 31 registered 2025-04-07T14:24:34.573258Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 32 registered 2025-04-07T14:24:34.573493Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 33 registered 2025-04-07T14:24:34.573580Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 34 registered 2025-04-07T14:24:34.573775Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 35 registered 2025-04-07T14:24:34.583478Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 36 registered 2025-04-07T14:24:34.587637Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 37 registered 2025-04-07T14:24:34.589450Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 38 registered 2025-04-07T14:24:34.590379Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 39 registered 2025-04-07T14:24:34.590627Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 40 registered 2025-04-07T14:24:34.590830Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 41 registered 2025-04-07T14:24:34.590984Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 42 registered 2025-04-07T14:24:34.591229Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 43 registered 2025-04-07T14:24:34.593503Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 44 registered 2025-04-07T14:24:34.600345Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 45 registered 2025-04-07T14:24:34.600731Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 46 registered 2025-04-07T14:24:34.601151Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 47 registered 2025-04-07T14:24:34.601508Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 48 registered 2025-04-07T14:24:34.601784Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 49 registered 2025-04-07T14:24:34.601842Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 50 registered 2025-04-07T14:24:34.602056Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:440:2376] 51 registered 2025-04-07T14:24:34.602179Z node 1 :MEMORY_CONTROLLER ... .379964Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 94 registered 2025-04-07T14:26:00.380011Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 95 registered 2025-04-07T14:26:00.380062Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 96 registered 2025-04-07T14:26:00.380107Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 97 registered 2025-04-07T14:26:00.380145Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 98 registered 2025-04-07T14:26:00.380190Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 99 registered 2025-04-07T14:26:00.380235Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 100 registered 2025-04-07T14:26:00.380279Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 101 registered 2025-04-07T14:26:00.380323Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 102 registered 2025-04-07T14:26:00.380362Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 103 registered 2025-04-07T14:26:00.389253Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 104 registered 2025-04-07T14:26:00.389554Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 105 registered 2025-04-07T14:26:00.389676Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 106 registered 2025-04-07T14:26:00.389737Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 107 registered 2025-04-07T14:26:00.389791Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 108 registered 2025-04-07T14:26:00.389834Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 109 registered 2025-04-07T14:26:00.389882Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 110 registered 2025-04-07T14:26:00.389937Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:442:2376] 111 registered 2025-04-07T14:26:00.389987Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 17 registered 2025-04-07T14:26:00.390047Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 0 registered 2025-04-07T14:26:00.390102Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 18 registered 2025-04-07T14:26:00.390145Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 1 registered 2025-04-07T14:26:00.390197Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 19 registered 2025-04-07T14:26:00.390241Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 2 registered 2025-04-07T14:26:00.390805Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 20 registered 2025-04-07T14:26:00.390991Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 3 registered 2025-04-07T14:26:00.391824Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 21 registered 2025-04-07T14:26:00.392089Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 4 registered 2025-04-07T14:26:00.392307Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 6 registered 2025-04-07T14:26:00.392454Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 9 registered 2025-04-07T14:26:00.397225Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 10 registered 2025-04-07T14:26:00.397431Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 13 registered 2025-04-07T14:26:00.397804Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 14 registered 2025-04-07T14:26:00.397980Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:449:2378] 16 registered 2025-04-07T14:26:00.398056Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:487:2382] 7 registered 2025-04-07T14:26:00.398253Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:487:2382] 1 registered 2025-04-07T14:26:00.398426Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:487:2382] 2 registered 2025-04-07T14:26:00.398522Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:487:2382] 3 registered 2025-04-07T14:26:00.398563Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:487:2382] 4 registered 2025-04-07T14:26:00.398608Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:487:2382] 5 registered 2025-04-07T14:26:00.398648Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:487:2382] 6 registered 2025-04-07T14:26:00.398679Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:492:2384] 102 registered 2025-04-07T14:26:00.398734Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:492:2384] 103 registered 2025-04-07T14:26:00.398771Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:492:2384] 1 registered 2025-04-07T14:26:00.398816Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:492:2384] 2 registered 2025-04-07T14:26:00.398852Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:492:2384] 3 registered 2025-04-07T14:26:00.398895Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:492:2384] 4 registered 2025-04-07T14:26:00.398940Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:492:2384] 5 registered 2025-04-07T14:26:00.398974Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:492:2384] 6 registered 2025-04-07T14:26:00.399016Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:492:2384] 7 registered 2025-04-07T14:26:00.399055Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:492:2384] 100 registered 2025-04-07T14:26:00.399104Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:492:2384] 101 registered 2025-04-07T14:26:00.399145Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2386] 1 registered 2025-04-07T14:26:00.399195Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2386] 2 registered 2025-04-07T14:26:00.399231Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2386] 3 registered test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000a5d/r3tmp/tmpYnm7D2/pdisk_1.dat 2025-04-07T14:26:00.456892Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 1 registered 2025-04-07T14:26:00.457175Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 2 registered 2025-04-07T14:26:00.457234Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 3 registered 2025-04-07T14:26:00.457384Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 4 registered 2025-04-07T14:26:00.457463Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 120 registered 2025-04-07T14:26:00.457596Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 5 registered 2025-04-07T14:26:00.457754Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 121 registered 2025-04-07T14:26:00.457842Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 6 registered 2025-04-07T14:26:00.457881Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 122 registered 2025-04-07T14:26:00.458086Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 7 registered 2025-04-07T14:26:00.458135Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 123 registered 2025-04-07T14:26:00.458175Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 8 registered 2025-04-07T14:26:00.458219Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 125 registered 2025-04-07T14:26:00.458382Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 126 registered 2025-04-07T14:26:00.458432Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 127 registered 2025-04-07T14:26:00.458477Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 128 registered 2025-04-07T14:26:00.458524Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 129 registered 2025-04-07T14:26:00.458570Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 100 registered 2025-04-07T14:26:00.458803Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 101 registered 2025-04-07T14:26:00.458860Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 131 registered 2025-04-07T14:26:00.458903Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 102 registered 2025-04-07T14:26:00.458942Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 103 registered 2025-04-07T14:26:00.458984Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:478:2380] 105 registered 2025-04-07T14:26:00.519826Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:26:00.579528Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:26:00.589483Z node 9 :MEMORY_CONTROLLER INFO: Config updated QueryExecutionLimitPercent: 15 2025-04-07T14:26:00.590082Z node 9 :TABLET_SAUSAGECACHE INFO: Config updated MemoryLimit: 33554432 2025-04-07T14:26:00.642947Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:26:00.643158Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:26:00.657897Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:26:00.974282Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 500MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 500MiB SoftLimit: 375MiB TargetUtilization: 250MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 28.7KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 250MiB ResultingConsumersConsumption: 250MiB Coefficient: 0.90625 2025-04-07T14:26:00.974656Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 236MiB Min: 100MiB Max: 250MiB 2025-04-07T14:26:00.974812Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 28.7KiB Limit: 14.1MiB Min: 5MiB Max: 15MiB 2025-04-07T14:26:00.974864Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-07T14:26:01.269950Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 500MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 500MiB SoftLimit: 375MiB TargetUtilization: 250MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 28.8KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 250MiB ResultingConsumersConsumption: 250MiB Coefficient: 0.90625 2025-04-07T14:26:01.270192Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 236MiB Min: 100MiB Max: 250MiB 2025-04-07T14:26:01.270244Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 28.8KiB Limit: 14.1MiB Min: 5MiB Max: 15MiB 2025-04-07T14:26:01.270283Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-07T14:26:01.543013Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 200MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 29KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 100MiB Coefficient: 0.90625 2025-04-07T14:26:01.543408Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 94.4MiB Min: 40MiB Max: 100MiB 2025-04-07T14:26:01.543491Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 29KiB Limit: 5.63MiB Min: 2MiB Max: 6MiB 2025-04-07T14:26:01.543562Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-07T14:26:01.792019Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 200MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 29.1KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 100MiB Coefficient: 0.90625 2025-04-07T14:26:01.792247Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 94.4MiB Min: 40MiB Max: 100MiB 2025-04-07T14:26:01.792294Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 29.1KiB Limit: 5.63MiB Min: 2MiB Max: 6MiB 2025-04-07T14:26:01.792330Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB |99.0%| [TM] {RESULT} ydb/core/memory_controller/ut/unittest |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.0%| [TA] $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] |99.0%| [TA] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> test_http_api.py::TestHttpApi::test_simple_streaming_query [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results >> test_public_api.py::TestCRUDOperations::test_data_types [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TCreateAndDropViewTest::DropSameViewTwice [GOOD] >> TCreateAndDropViewTest::DropViewIfExists |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results [GOOD] >> test_http_api.py::TestHttpApi::test_optional_results >> test_public_api.py::TestCRUDOperations::test_bulk_upsert [GOOD] >> test_public_api.py::TestCRUDOperations::test_all_enums_are_presented_as_exceptions [GOOD] >> test_public_api.py::TestCRUDOperations::test_type_builders_str_methods [GOOD] >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error >> QuoterWithKesusTest::UpdatesCountersForParentResources [GOOD] >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TopicSessionTests::ReadNonExistentTopic [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_3 >> test_public_api.py::TestCRUDOperations::test_tcl_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/coordinator/ut/unittest >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] Test command err: 2025-04-07T14:24:35.311604Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:633:2324], Scheduled retry for error: {
: Error: Scheme service not found } 2025-04-07T14:24:35.359020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:635:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:35.359548Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:24:35.359759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:35.360518Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:35.360556Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ad4/r3tmp/tmp4OchvY/pdisk_1.dat 2025-04-07T14:24:36.001296Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for the first mediator step 2025-04-07T14:24:36.301315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:36.301831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:36.309824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:36.309931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:36.323942Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:24:36.324533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:36.324936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... found first step to be 500 2025-04-07T14:24:36.683847Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 500 ... waiting for the next mediator step ... found second step to be 1000 ... read step subscribe result: [500, 1000] ... read step subscribe update: 2000 2025-04-07T14:24:38.242525Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 2000 ... read step subscribe result: [2000, 2000] ... read step subscribe update: 2500 ... read step subscribe update: 2500 ... read step subscribe update: 3000 ... read step subscribe update: 4000 ... read step subscribe update: 5000 ... read step subscribe update: 6000 ... read step subscribe result: [2000, 6000] 2025-04-07T14:24:41.753799Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2025-04-07T14:24:41.753895Z node 1 :PIPE_SERVER ERROR: [72057594037968897] NodeDisconnected NodeId# 2 2025-04-07T14:24:41.753963Z node 1 :PIPE_SERVER ERROR: [72057594037936129] NodeDisconnected NodeId# 2 2025-04-07T14:24:41.754185Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:78:2077] ServerId# [1:1004:2595] TabletId# 72057594037932033 PipeClientId# [2:78:2077] 2025-04-07T14:24:41.754896Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-04-07T14:24:41.754945Z node 1 :PIPE_SERVER ERROR: [72057594037936131] NodeDisconnected NodeId# 2 2025-04-07T14:24:41.755504Z node 2 :TX_PROXY WARN: actor# [2:194:2108] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-07T14:24:41.756033Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeDisconnected, NodeId 2 2025-04-07T14:24:41.756123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnecting 2025-04-07T14:24:41.756539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnecting -> Disconnected 2025-04-07T14:24:41.764140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:41.780041Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-07T14:24:41.781200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... read step subscribe update: 7000 ... read step subscribe update: 8000 ... read step subscribe update: 9000 ... read step subscribe update: 10000 ... read step subscribe update: 11000 2025-04-07T14:24:53.568658Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:640:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:53.569081Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:53.569193Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-07T14:24:53.570847Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:638:2326], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:53.571061Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:53.571233Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ad4/r3tmp/tmpcYRuWG/pdisk_1.dat 2025-04-07T14:24:53.900017Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:54.062128Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1073:2345] at coordinator 72057594046316545 with seqNo 123 and cookie 234 2025-04-07T14:24:54.163160Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:54.163294Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:54.185372Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:54.185492Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:54.202399Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-07T14:24:54.203139Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:54.203543Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:55.094214Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1074:2346] at coordinator 72057594046316545 with seqNo 234 and cookie 345 2025-04-07T14:24:56.077256Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1073:2345] at coordinator 72057594046316545 with seqNo 124 and cookie 245 2025-04-07T14:24:56.089534Z node 3 :TX_COORDINATOR DEBUG: Ignored TEvSubscribeLastStep from [4:1073:2345] at coordinator 72057594046316545 with seqNo 123 existing seqNo 124 2025-04-07T14:24:57.030156Z node 3 :TX_COORDINATOR DEBUG: Processing TEvUnsubscribeLastStep from [4:1073:2345] at coordinator 72057594046316545 with seqNo 124 2025-04-07T14:25:06.419149Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:287:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:25:06.419537Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:25:06.419600Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ad4/r3tmp/tmpcJ5y7d/pdisk_1.dat 2025-04-07T14:25:06.837590Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:06.894124Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:06.894259Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:06.905895Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected Rebooting coordinator to restore config 2025-04-07T14:25:07.227131Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete 2025-04-07T14:25:07.228629Z node 5 :TX_COORDINATOR INFO: Coordinator# 72057594046316545 restoring static processing params 2025-04-07T14:25:07.234655Z node 5 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-04-07T14:25:07.234868Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 version# 0 TTxConfigure Complete Rebooting coordinator a second time 2025-04-07T14:25:07.666316Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete 2025-04-07T14:25:07.668214Z node 5 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-04-07T14:25:12.454799Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:574:2386], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:25:12.455171Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:25:12.455262Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000ad4/r3tmp/tmpx2MpxX/pdisk_1.dat 2025-04-07T14:25:12.847216Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:13.145321Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:13.145485Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:13.169593Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected Sending CreateDatabase request 2025-04-07T14:25:14.362877Z node 6 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { operation_par ... rker# C0 2025-04-07T14:26:06.625252Z node 15 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 step# 1001 Status# 16 SEND to# [15:560:2486] Proxy marker# C1 ... waiting for blocked put responses ... coordinator 72057594046316545 gen 2 is planning step 1050 2025-04-07T14:26:06.638439Z node 15 :TX_COORDINATOR DEBUG: Transaction 10000000 has been planned 2025-04-07T14:26:06.638573Z node 15 :TX_COORDINATOR DEBUG: Planned transaction 10000000 for mediator 72057594046382081 tablet 72057594047365120 ... blocking put [72057594046316545:2:12:1:24576:168:0] response ... waiting for planning for the required step ... waiting for planning for the required step ... coordinator 72057594046316545 gen 2 is planning step 1100 ... starting a new coordinator instance ... waiting for migrated state 2025-04-07T14:26:06.691621Z node 15 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-04-07T14:26:06.692174Z node 15 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-04-07T14:26:06.701433Z node 15 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete 2025-04-07T14:26:06.702262Z node 15 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted ... blocking state response from [15:512:2371] to [15:656:2530] LastSentStep: 1000 LastAcquiredStep: 0 LastConfirmedStep: 0 ... unblocking put responses and requests 2025-04-07T14:26:06.702808Z node 15 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 stepId# 1050 Status# 17 SEND EvProposeTransactionStatus to# [15:560:2486] Proxy 2025-04-07T14:26:06.704547Z node 15 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 server# [15:522:2457] disconnnected 2025-04-07T14:26:06.704832Z node 15 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [15:539:2466] MediatorId# 72057594046382081 HANDLE TEvServerDisconnected server# [15:522:2457] ... trying to plan tx 10000011 ... waiting for planned another persistent tx 2025-04-07T14:26:06.729590Z node 15 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 server# [15:663:2540] connected 2025-04-07T14:26:06.729973Z node 15 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorSync 2025-04-07T14:26:06.730054Z node 15 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCoordinatorSyncResult to# [15:660:2538] Cookie# 1 CompleteStep# 1000 LatestKnownStep# 1000 SubjectiveTime# 952 Coordinator# 72057594046316545 2025-04-07T14:26:06.730366Z node 15 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-04-07T14:26:06.730428Z node 15 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1050, txid# 10000000 marker# C2 2025-04-07T14:26:06.730566Z node 15 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 HANDLE EvProposeTransaction marker# C0 2025-04-07T14:26:06.730648Z node 15 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 step# 1051 Status# 16 SEND to# [15:560:2486] Proxy marker# C1 ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000000 } Step: 1050 PrevStep: 0 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-04-07T14:26:06.737939Z node 15 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1050 2025-04-07T14:26:06.738075Z node 15 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [1050] transactions [1] 2025-04-07T14:26:06.738257Z node 15 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCommitStep to# [15:539:2466] ExecQueue {TMediateStep From 1000 To# 1050Steps: {{TCoordinatorStep step# 1050 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [15:660:2538]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M0 2025-04-07T14:26:06.738468Z node 15 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [15:539:2466] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1050Steps: {{TCoordinatorStep step# 1050 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [15:660:2538]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M1 2025-04-07T14:26:06.738557Z node 15 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [15:539:2466] MediatorId# 72057594046382081 SEND Ev to# [15:540:2467] step# 1050 forTablet# 72057594047365120 txid# 10000000 marker# M3 2025-04-07T14:26:06.738648Z node 15 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [15:539:2466] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [15:540:2467] bucket.ActiveActor step# 1050 2025-04-07T14:26:06.738715Z node 15 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [15:539:2466] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [15:541:2468] bucket.ActiveActor step# 1050 2025-04-07T14:26:06.738842Z node 15 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [15:540:2467] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 1050 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000000 AckTo# [15:660:2538]}}} marker# M4 2025-04-07T14:26:06.739035Z node 15 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [15:541:2468] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1050} 2025-04-07T14:26:06.739293Z node 15 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [15:540:2467] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1050} 2025-04-07T14:26:06.739874Z node 15 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [15:540:2467] Mediator# 72057594046382081 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:26:06.739982Z node 15 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [15:540:2467] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 1050 MediatorId# 72057594046382081 TabletID 72057594047365120} ... observed tablet step: Transactions { TxId: 10000000 AckTo { RawX1: 660 RawX2: 64424511978 } } Step: 1050 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 2025-04-07T14:26:06.741423Z node 15 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-07T14:26:06.741620Z node 15 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [3:3] persistent tx 10000000 for mediator 72057594046382081 tablet 72057594047365120 removed=1 2025-04-07T14:26:06.741688Z node 15 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [3:3] persistent tx 10000000 for mediator 72057594046382081 acknowledged 2025-04-07T14:26:06.741752Z node 15 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [3:3] persistent tx 10000000 acknowledged ... waiting for planned another persistent tx ... coordinator 72057594046316545 gen 3 is planning step 1100 2025-04-07T14:26:06.753369Z node 15 :TX_COORDINATOR DEBUG: Transaction 10000011 has been planned 2025-04-07T14:26:06.753507Z node 15 :TX_COORDINATOR DEBUG: Planned transaction 10000011 for mediator 72057594046382081 tablet 72057594047365120 2025-04-07T14:26:06.754502Z node 15 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1100, txid# 10000011 marker# C2 2025-04-07T14:26:06.754604Z node 15 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 stepId# 1100 Status# 17 SEND EvProposeTransactionStatus to# [15:560:2486] Proxy ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000011 } Step: 1100 PrevStep: 1050 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-04-07T14:26:06.754979Z node 15 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1100 2025-04-07T14:26:06.755057Z node 15 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [1100] transactions [1] 2025-04-07T14:26:06.755262Z node 15 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCommitStep to# [15:539:2466] ExecQueue {TMediateStep From 1050 To# 1100Steps: {{TCoordinatorStep step# 1100 PrevStep# 1050Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [15:660:2538]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M0 2025-04-07T14:26:06.755437Z node 15 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [15:539:2466] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1050 To# 1100Steps: {{TCoordinatorStep step# 1100 PrevStep# 1050Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [15:660:2538]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M1 2025-04-07T14:26:06.755553Z node 15 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [15:539:2466] MediatorId# 72057594046382081 SEND Ev to# [15:540:2467] step# 1100 forTablet# 72057594047365120 txid# 10000011 marker# M3 2025-04-07T14:26:06.755661Z node 15 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [15:539:2466] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [15:540:2467] bucket.ActiveActor step# 1100 2025-04-07T14:26:06.755730Z node 15 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [15:539:2466] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [15:541:2468] bucket.ActiveActor step# 1100 2025-04-07T14:26:06.755853Z node 15 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [15:540:2467] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 1100 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000011 AckTo# [15:660:2538]}}} marker# M4 2025-04-07T14:26:06.755964Z node 15 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [15:540:2467] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 1100 MediatorId# 72057594046382081 TabletID 72057594047365120} 2025-04-07T14:26:06.756080Z node 15 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [15:541:2468] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1100} 2025-04-07T14:26:06.756250Z node 15 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [15:540:2467] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1100} ... observed tablet step: Transactions { TxId: 10000011 AckTo { RawX1: 660 RawX2: 64424511978 } } Step: 1100 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 2025-04-07T14:26:06.756647Z node 15 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-07T14:26:06.756919Z node 15 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [3:5] persistent tx 10000011 for mediator 72057594046382081 tablet 72057594047365120 removed=1 2025-04-07T14:26:06.756991Z node 15 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [3:5] persistent tx 10000011 for mediator 72057594046382081 acknowledged 2025-04-07T14:26:06.757052Z node 15 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [3:5] persistent tx 10000011 acknowledged ... coordinator 72057594046316545 gen 3 is planning step 1150 ... observed step: Step: 1150 PrevStep: 1100 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-04-07T14:26:06.768867Z node 15 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [15:539:2466] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1100 To# 1150Steps: {{TCoordinatorStep step# 1150 PrevStep# 1100}}} marker# M1 2025-04-07T14:26:06.768950Z node 15 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [15:539:2466] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [15:540:2467] bucket.ActiveActor step# 1150 2025-04-07T14:26:06.768998Z node 15 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [15:539:2466] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [15:541:2468] bucket.ActiveActor step# 1150 2025-04-07T14:26:06.769055Z node 15 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [15:540:2467] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1150} 2025-04-07T14:26:06.769100Z node 15 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [15:541:2468] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1150} |99.0%| [TM] {RESULT} ydb/core/tx/coordinator/ut/unittest >> TopicSessionTests::SlowSession >> test_http_api.py::TestHttpApi::test_optional_results [GOOD] >> test_http_api.py::TestHttpApi::test_pg_results >> TDataShardRSTest::TestGenericReadSetDecisionCommit [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionAbort >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire >> test_http_api.py::TestHttpApi::test_pg_results [GOOD] >> test_http_api.py::TestHttpApi::test_set_result >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_http_api.py::TestHttpApi::test_set_result [GOOD] >> test_http_api.py::TestHttpApi::test_complex_results |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_http_api.py::TestHttpApi::test_complex_results [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit >> TCreateAndDropViewTest::DropViewIfExists [GOOD] >> TCreateAndDropViewTest::DropViewInFolder ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DisableDeduplication [GOOD] 2025-04-07 14:26:10,323 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-07 14:26:10,648 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 591986 76.0M 74.8M 22.0M test_tool run_ut @/home/runner/.ya/build/build_root/u6pr/001478/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/test_tool.args 593335 2.4G 2.3G 1.9G └─ ydb-services-persqueue_v1-ut --trace-path-append /home/runner/.ya/build/build_root/u6pr/001478/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/chunk 688785 424M 463M 362M └─ llvm-symbolizer --demangle --inlines --default-arch=x86_64 Test command err: === Server->StartServer(false); 2025-04-07T14:16:13.817165Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490574893056460110:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:16:13.817211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:16:13.938117Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/001478/r3tmp/tmpuZJceM/pdisk_1.dat 2025-04-07T14:16:14.072218Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23585, node 1 2025-04-07T14:16:14.160765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:16:14.160891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:16:14.163123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:16:14.245536Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/u6pr/001478/r3tmp/yandexBxVnrc.tmp 2025-04-07T14:16:14.245558Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/u6pr/001478/r3tmp/yandexBxVnrc.tmp 2025-04-07T14:16:14.245825Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/u6pr/001478/r3tmp/yandexBxVnrc.tmp 2025-04-07T14:16:14.245938Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-07T14:16:14.422777Z INFO: TTestServer started on Port 18983 GrpcPort 23585 TClient is connected to server localhost:18983 PQClient connected to localhost:23585 === TenantModeEnabled() = 0 === Init PQ - start server on port 23585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:16:14.701685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-07T14:16:14.701944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:14.702731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-07T14:16:14.704899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-07T14:16:14.704947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:14.705900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:14.706537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-07T14:16:14.706745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:14.706786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-07T14:16:14.706845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-07T14:16:14.706860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-07T14:16:14.707194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:14.707219Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-07T14:16:14.707228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-07T14:16:14.707542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:14.707586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:14.707626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:16:14.707668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:16:14.712626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-07T14:16:14.712990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:16:14.713035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-07T14:16:14.713073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:16:14.713151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-07T14:16:14.713278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-07T14:16:14.714202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744035374760, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-07T14:16:14.714284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7490574897351427875 RawX2: 4294969515 } } Step: 1744035374760 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-07T14:16:14.714313Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:16:14.714584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-07T14:16:14.714608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-07T14:16:14.714753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-07T14:16:14.714815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-07T14:16:14.715247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-07T14:16:14.715266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-07T14:16:14.715402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-07T14:16:14.715421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7490574897351427909:2242], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-07T14:16:14.715456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:16:14.715476Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-07T14:16:14.715548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-07T14:16:14.715561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:16:14.715596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-07T14:16:14.715614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-07T14:16:14.715632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-07T14:16:14.715640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-07T14:16:14.715665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-07T14:16:14.715676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-07T14:16:14.715683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-07T14:16:14.717197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:14.717252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-07T14:16:14.717259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-07T14:16:14.717268Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-07T14:16:14.717300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Decreme ... ffset 0x12b0 warning: address range table at offset 0x12d0 has a premature terminator entry at offset 0x12e0 warning: address range table at offset 0x1300 has a premature terminator entry at offset 0x1310 warning: address range table at offset 0x1330 has a premature terminator entry at offset 0x1340 warning: address range table at offset 0x1390 has a premature terminator entry at offset 0x13a0 warning: address range table at offset 0x13c0 has a premature terminator entry at offset 0x13d0 warning: address range table at offset 0x13f0 has a premature terminator entry at offset 0x1400 warning: address range table at offset 0x1420 has a premature terminator entry at offset 0x1430 warning: address range table at offset 0x1540 has a premature terminator entry at offset 0x1550 warning: address range table at offset 0x1570 has a premature terminator entry at offset 0x1580 warning: address range table at offset 0x15a0 has a premature terminator entry at offset 0x15b0 warning: address range table at offset 0x15d0 has a premature terminator entry at offset 0x15e0 warning: address range table at offset 0x1600 has a premature terminator entry at offset 0x1610 warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 warning: address range table at offset 0x60 has a premature terminator entry at offset 0x70 warning: address range table at offset 0x90 has a premature terminator entry at offset 0xa0 warning: address range table at offset 0xc0 has a premature terminator entry at offset 0xd0 warning: address range table at offset 0xa20 has a premature terminator entry at offset 0xa30 warning: address range table at offset 0x1000 has a premature terminator entry at offset 0x1010 warning: address range table at offset 0x1030 has a premature terminator entry at offset 0x1040 warning: address range table at offset 0x1060 has a premature terminator entry at offset 0x1070 warning: address range table at offset 0x1090 has a premature terminator entry at offset 0x10a0 warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 warning: address range table at offset 0x10f0 has a premature terminator entry at offset 0x1100 warning: address range table at offset 0x1120 has a premature terminator entry at offset 0x1130 warning: address range table at offset 0x1150 has a premature terminator entry at offset 0x1160 warning: address range table at offset 0x1180 has a premature terminator entry at offset 0x1190 warning: address range table at offset 0x11b0 has a premature terminator entry at offset 0x11c0 warning: address range table at offset 0x11e0 has a premature terminator entry at offset 0x11f0 warning: address range table at offset 0x1210 has a premature terminator entry at offset 0x1220 warning: address range table at offset 0x1240 has a premature terminator entry at offset 0x1250 warning: address range table at offset 0x1270 has a premature terminator entry at offset 0x1280 warning: address range table at offset 0x12a0 has a premature terminator entry at offset 0x12b0 warning: address range table at offset 0x12d0 has a premature terminator entry at offset 0x12e0 warning: address range table at offset 0x1300 has a premature terminator entry at offset 0x1310 warning: address range table at offset 0x1330 has a premature terminator entry at offset 0x1340 warning: address range table at offset 0x1390 has a premature terminator entry at offset 0x13a0 warning: address range table at offset 0x13c0 has a premature terminator entry at offset 0x13d0 warning: address range table at offset 0x13f0 has a premature terminator entry at offset 0x1400 warning: address range table at offset 0x1420 has a premature terminator entry at offset 0x1430 warning: address range table at offset 0x1540 has a premature terminator entry at offset 0x1550 warning: address range table at offset 0x1570 has a premature terminator entry at offset 0x1580 warning: address range table at offset 0x15a0 has a premature terminator entry at offset 0x15b0 warning: address range table at offset 0x15d0 has a premature terminator entry at offset 0x15e0 warning: address range table at offset 0x1600 has a premature terminator entry at offset 0x1610 warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 warning: address range table at offset 0x60 has a premature terminator entry at offset 0x70 warning: address range table at offset 0x90 has a premature terminator entry at offset 0xa0 warning: address range table at offset 0xc0 has a premature terminator entry at offset 0xd0 warning: address range table at offset 0xa20 has a premature terminator entry at offset 0xa30 warning: address range table at offset 0x1000 has a premature terminator entry at offset 0x1010 warning: address range table at offset 0x1030 has a premature terminator entry at offset 0x1040 warning: address range table at offset 0x1060 has a premature terminator entry at offset 0x1070 warning: address range table at offset 0x1090 has a premature terminator entry at offset 0x10a0 warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 warning: address range table at offset 0x10f0 has a premature terminator entry at offset 0x1100 warning: address range table at offset 0x1120 has a premature terminator entry at offset 0x1130 warning: address range table at offset 0x1150 has a premature terminator entry at offset 0x1160 warning: address range table at offset 0x1180 has a premature terminator entry at offset 0x1190 warning: address range table at offset 0x11b0 has a premature terminator entry at offset 0x11c0 warning: address range table at offset 0x11e0 has a premature terminator entry at offset 0x11f0 warning: address range table at offset 0x1210 has a premature terminator entry at offset 0x1220 warning: address range table at offset 0x1240 has a premature terminator entry at offset 0x1250 warning: address range table at offset 0x1270 has a premature terminator entry at offset 0x1280 warning: address range table at offset 0x12a0 has a premature terminator entry at offset 0x12b0 warning: address range table at offset 0x12d0 has a premature terminator entry at offset 0x12e0 warning: address range table at offset 0x1300 has a premature terminator entry at offset 0x1310 warning: address range table at offset 0x1330 has a premature terminator entry at offset 0x1340 warning: address range table at offset 0x1390 has a premature terminator entry at offset 0x13a0 warning: address range table at offset 0x13c0 has a premature terminator entry at offset 0x13d0 warning: address range table at offset 0x13f0 has a premature terminator entry at offset 0x1400 warning: address range table at offset 0x1420 has a premature terminator entry at offset 0x1430 warning: address range table at offset 0x1540 has a premature terminator entry at offset 0x1550 warning: address range table at offset 0x1570 has a premature terminator entry at offset 0x1580 warning: address range table at offset 0x15a0 has a premature terminator entry at offset 0x15b0 warning: address range table at offset 0x15d0 has a premature terminator entry at offset 0x15e0 warning: address range table at offset 0x1600 has a premature terminator entry at offset 0x1610 warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 warning: address range table at offset 0x60 has a premature terminator entry at offset 0x70 warning: address range table at offset 0x90 has a premature terminator entry at offset 0xa0 warning: address range table at offset 0xc0 has a premature terminator entry at offset 0xd0 warning: address range table at offset 0xa20 has a premature terminator entry at offset 0xa30 warning: address range table at offset 0x1000 has a premature terminator entry at offset 0x1010 warning: address range table at offset 0x1030 has a premature terminator entry at offset 0x1040 warning: address range table at offset 0x1060 has a premature terminator entry at offset 0x1070 warning: address range table at offset 0x1090 has a premature terminator entry at offset 0x10a0 warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 warning: address range table at offset 0x10f0 has a premature terminator entry at offset 0x1100 warning: address range table at offset 0x1120 has a premature terminator entry at offset 0x1130 warning: address range table at offset 0x1150 has a premature terminator entry at offset 0x1160 warning: address range table at offset 0x1180 has a premature terminator entry at offset 0x1190 warning: address range table at offset 0x11b0 has a premature terminator entry at offset 0x11c0 warning: address range table at offset 0x11e0 has a premature terminator entry at offset 0x11f0 warning: address range table at offset 0x1210 has a premature terminator entry at offset 0x1220 warning: address range table at offset 0x1240 has a premature terminator entry at offset 0x1250 warning: address range table at offset 0x1270 has a premature terminator entry at offset 0x1280 warning: address range table at offset 0x12a0 has a premature terminator entry at offset 0x12b0 warning: address range table at offset 0x12d0 has a premature terminator entry at offset 0x12e0 warning: address range table at offset 0x1300 has a premature terminator entry at offset 0x1310 warning: address range table at offset 0x1330 has a premature terminator entry at offset 0x1340 warning: address range table at offset 0x1390 has a premature terminator entry at offset 0x13a0 warning: address range table at offset 0x13c0 has a premature terminator entry at offset 0x13d0 warning: address range table at offset 0x13f0 has a premature terminator entry at offset 0x1400 warning: address range table at offset 0x1420 has a premature terminator entry at offset 0x1430 warning: address range table at offset 0x1540 has a premature terminator entry at offset 0x1550 warning: address range table at offset 0x1570 has a premature terminator entry at offset 0x1580 warning: address range table at offset 0x15a0 has a premature terminator entry at offset 0x15b0 warning: address range table at offset 0x15d0 has a premature terminator entry at offset 0x15e0 warning: address range table at offset 0x1600 has a premature terminator entry at offset 0x1610 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7573699373/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/u6pr/001478/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1747, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7573699373/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/u6pr/001478/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit [GOOD] >> test_http_api.py::TestHttpApi::test_openapi_spec >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt [GOOD] >> QuoterWithKesusTest::CanKillKesusWhenUsingIt >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts [GOOD] >> test_public_api.py::TestCRUDOperations::test_presented_in_cache [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 [GOOD] |99.1%| [TA] $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic >> test_public_api.py::TestCRUDOperations::test_decimal_values [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic [GOOD] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children [GOOD] >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modifications_1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modification_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success [GOOD] >> test_public_api.py::TestCRUDOperations::test_modify_permissions_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_directory_that_doesnt_exists [GOOD] >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions [GOOD] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions |99.1%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_rs/unittest >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] Test command err: 2025-04-07T14:24:06.800338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:06.800646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:06.800888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000b8e/r3tmp/tmpP1cO9d/pdisk_1.dat 2025-04-07T14:24:07.290114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:07.347773Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:07.403493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:07.403941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:07.415463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:07.546163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:07.601024Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:646:2546] 2025-04-07T14:24:07.601291Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:07.651329Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:652:2548] 2025-04-07T14:24:07.651514Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:07.659404Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:07.659677Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:07.661262Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:24:07.661332Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:24:07.661384Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:24:07.661685Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:07.686207Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:24:07.688811Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:07.689005Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:687:2568] 2025-04-07T14:24:07.689051Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:24:07.689102Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:24:07.689141Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:07.697303Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:24:07.697419Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:24:07.697578Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:637:2540], serverId# [1:680:2564], sessionId# [0:0:0] 2025-04-07T14:24:07.698132Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:07.698175Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:07.698231Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:24:07.698274Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:07.698354Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:07.698510Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:07.700614Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:24:07.700755Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:24:07.701306Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:07.702473Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-07T14:24:07.702547Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-07T14:24:07.702600Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-07T14:24:07.702825Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:07.702872Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-07T14:24:07.702935Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:07.703021Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:689:2570] 2025-04-07T14:24:07.703047Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-07T14:24:07.703069Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-07T14:24:07.703103Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:24:07.704039Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-07T14:24:07.704105Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-07T14:24:07.704423Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:638:2541], serverId# [1:688:2569], sessionId# [0:0:0] 2025-04-07T14:24:07.704751Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-07T14:24:07.704782Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:07.704810Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-07T14:24:07.704856Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:24:07.705062Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-07T14:24:07.705215Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:24:07.705272Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-07T14:24:07.707355Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:656:2550] 2025-04-07T14:24:07.707522Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:07.718083Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:659:2552] 2025-04-07T14:24:07.718294Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:07.726587Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:07.726730Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:07.727907Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-04-07T14:24:07.727965Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-04-07T14:24:07.727999Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-04-07T14:24:07.728255Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:07.728297Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-04-07T14:24:07.728356Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:07.728420Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [1:725:2587] 2025-04-07T14:24:07.728445Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-04-07T14:24:07.728467Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-04-07T14:24:07.728490Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-07T14:24:07.729074Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037891 2025-04-07T14:24:07.729151Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037891 2025-04-07T14:24:07.729320Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:07.729384Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-07T14:24:07.729425Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-07T14:24:07.729450Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:07.729480Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-07T14:24:07.729509Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-07T14:24:07.729886Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:07.729966Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:07.731063Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-07T14:24:07.731119Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-07T14:24:07.731156Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-07T14:24:07.731372Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:07.731407Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-07T14:24:07.731462Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:07.731516Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:726:2588] 2025-04-07T14:24:07.731549Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-07T14:24:07.731573Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-07T14:24:07.731594Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-07T14:24:07.731836Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-07T14:24:07.731887Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-07T14:24:07.731977Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 7 ... 5-04-07T14:26:16.016619Z node 5 :TX_DATASHARD TRACE: Execution plan for [2002:281474976715664] at 72075186224037889 has finished 2025-04-07T14:26:16.016650Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:26:16.016702Z node 5 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-07T14:26:16.016731Z node 5 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-07T14:26:16.016818Z node 5 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-07T14:26:16.017809Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 2002 txid# 281474976715664} 2025-04-07T14:26:16.017891Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2002} 2025-04-07T14:26:16.018310Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:964:2773], Recipient [5:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:26:16.018342Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:26:16.018368Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:961:2770], serverId# [5:964:2773], sessionId# [0:0:0] 2025-04-07T14:26:16.018427Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 2002 txid# 281474976715664} 2025-04-07T14:26:16.018454Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2002} 2025-04-07T14:26:16.018597Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [5:720:2601], Recipient [5:632:2537]: {TEvReadSet step# 2002 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-07T14:26:16.018625Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-07T14:26:16.018666Z node 5 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715664 2025-04-07T14:26:16.018748Z node 5 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2002 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-07T14:26:16.019054Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:26:16.019101Z node 5 :TX_DATASHARD TRACE: Complete execution for [2002:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-07T14:26:16.019172Z node 5 :TX_DATASHARD DEBUG: Complete [2002 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [5:954:2720], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:26:16.019234Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:26:16.019274Z node 5 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-07T14:26:16.019335Z node 5 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037888 {TEvReadSet step# 2002 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} ... nodata readset 2025-04-07T14:26:16.019529Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [5:632:2537], Recipient [5:720:2601]: {TEvReadSet step# 2002 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-07T14:26:16.019552Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-07T14:26:16.019575Z node 5 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715664 2025-04-07T14:26:16.019613Z node 5 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2002 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-07T14:26:16.019662Z node 5 :TX_DATASHARD TRACE: Processed readset without data from 72075186224037888 to 72075186224037889 at tablet 72075186224037889 2025-04-07T14:26:16.020386Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=ZTUzYWM5Y2MtOWYwNWI0MDItNzc4MDUxYjMtMzBkOTlhNGI=, ActorId: [5:899:2720], ActorState: ExecuteState, TraceId: 01jr8aa4q50x65fafy4bz5d6je, Create QueryResponse for error on request, msg: 2025-04-07T14:26:16.020938Z node 5 :TX_PROXY DEBUG: actor# [5:52:2099] Handle TEvExecuteKqpTransaction 2025-04-07T14:26:16.020973Z node 5 :TX_PROXY DEBUG: actor# [5:52:2099] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-04-07T14:26:16.021062Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-07T14:26:16.021097Z node 5 :TX_DATASHARD TRACE: Complete execution for [2002:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-07T14:26:16.021142Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:26:16.021165Z node 5 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-07T14:26:16.021672Z node 5 :TX_DATASHARD ERROR: Complete [2002 : 281474976715664] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-07T14:26:16.021759Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-07T14:26:16.022022Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr8aa4q50x65fafy4bz5d6je, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZTUzYWM5Y2MtOWYwNWI0MDItNzc4MDUxYjMtMzBkOTlhNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-07T14:26:16.022258Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [5:966:2720], Recipient [5:632:2537]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 966 RawX2: 21474839200 } TxBody: " \0018\001j5\010\001\032\'\n#\t\216\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n8\001" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-04-07T14:26:16.022286Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:26:16.022401Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [5:632:2537], Recipient [5:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T14:26:16.022429Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-07T14:26:16.022491Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:26:16.022675Z node 5 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715662, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-07T14:26:16.022796Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-04-07T14:26:16.022863Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T14:26:16.022910Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-04-07T14:26:16.022952Z node 5 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-07T14:26:16.022992Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-07T14:26:16.023035Z node 5 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2002/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2001/0 ImmediateWriteEdgeReplied# v2001/0 2025-04-07T14:26:16.023097Z node 5 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-04-07T14:26:16.023135Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T14:26:16.023155Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-07T14:26:16.023172Z node 5 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-07T14:26:16.023192Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-07T14:26:16.023250Z node 5 :TX_DATASHARD TRACE: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193450 2025-04-07T14:26:16.023347Z node 5 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-04-07T14:26:16.023418Z node 5 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-07T14:26:16.023462Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T14:26:16.023496Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-07T14:26:16.023515Z node 5 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:26:16.023533Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-07T14:26:16.023572Z node 5 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-07T14:26:16.023635Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-04-07T14:26:16.023679Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:26:16.023728Z node 5 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:26:16.023765Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:26:16.023808Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-07T14:26:16.023826Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:26:16.023846Z node 5 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-04-07T14:26:16.023901Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:26:16.023947Z node 5 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-07T14:26:16.023991Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:26:16.025002Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [5:54:2101], Recipient [5:632:2537]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715662 LockNode: 5 Status: STATUS_NOT_FOUND |99.1%| [TM] {RESULT} ydb/core/tx/datashard/ut_rs/unittest >> TopicSessionTests::SlowSession [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> TopicSessionTests::TwoSessionsWithDifferentSchemes >> test_public_api.py::TestCRUDOperations::test_query_set1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 >> TCreateAndDropViewTest::DropViewInFolder [GOOD] >> TCreateAndDropViewTest::ContextPollution >> test_public_api.py::TestCRUDOperations::test_queries_set2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session [GOOD] >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] >> DataShardStats::SharedCacheGarbage [GOOD] >> DataShardStats::CollectStatsForSeveralParts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/quoter/ut/unittest >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] Test command err: 2025-04-07T14:24:13.040725Z node 1 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-07T14:24:13.040918Z node 1 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-07T14:24:13.049157Z node 1 :QUOTER_PROXY WARN: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2025-04-07T14:24:13.049232Z node 1 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-04-07T14:24:13.086628Z node 2 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-07T14:24:13.086732Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-07T14:24:13.086850Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-07T14:24:13.086932Z node 2 :QUOTER_PROXY WARN: [/Path/KesusName]: Disconnected from tablet 2025-04-07T14:24:13.086957Z node 2 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-04-07T14:24:13.087190Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-07T14:24:13.114550Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-07T14:24:13.114663Z node 3 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-07T14:24:13.114908Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "/resource" 2025-04-07T14:24:13.114974Z node 3 :QUOTER_PROXY WARN: [/Path/KesusName]: Resource "/resource" has incorrect name. Maybe this was some error on client side. 2025-04-07T14:24:13.115031Z node 3 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("/resource", Error: GenericError) 2025-04-07T14:24:13.115152Z node 3 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-07T14:24:13.121880Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "resource//resource" 2025-04-07T14:24:13.121968Z node 3 :QUOTER_PROXY WARN: [/Path/KesusName]: Resource "resource//resource" has incorrect name. Maybe this was some error on client side. 2025-04-07T14:24:13.122003Z node 3 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("resource//resource", Error: GenericError) 2025-04-07T14:24:13.133492Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-07T14:24:13.133629Z node 4 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-07T14:24:13.133797Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-07T14:24:13.133985Z node 4 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-07T14:24:13.157263Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-04-07T14:24:13.157373Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-04-07T14:24:13.157853Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-04-07T14:24:13.157986Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-07T14:24:13.172963Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-07T14:24:13.173097Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-07T14:24:13.173233Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res0" 2025-04-07T14:24:13.173433Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-07T14:24:13.173743Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-07T14:24:13.173791Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res0" 2025-04-07T14:24:13.173862Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res0", 42) 2025-04-07T14:24:13.173922Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }]) 2025-04-07T14:24:13.174023Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res1" 2025-04-07T14:24:13.174101Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Subscribe on resource "res1" 2025-04-07T14:24:13.174299Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-07T14:24:13.174327Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res1" 2025-04-07T14:24:13.174358Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res1", 43) 2025-04-07T14:24:13.174398Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2025-04-07T14:24:13.174505Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res2" 2025-04-07T14:24:13.174585Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Subscribe on resource "res2" 2025-04-07T14:24:13.174755Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-07T14:24:13.174784Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res2" 2025-04-07T14:24:13.174812Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res2", 44) 2025-04-07T14:24:13.174853Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res2", Normal, {0: Front(1, 2)} }]) 2025-04-07T14:24:13.175140Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyStats([{"res1", Consumed: 0, Queue: 5}]) 2025-04-07T14:24:13.175179Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Set info for resource "res1": { Available: 1, QueueWeight: 5 } 2025-04-07T14:24:13.175209Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Activate session to "res1". Connected: 1 2025-04-07T14:24:13.175925Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 43 ConsumeResource: true Amount: inf } ActorID { RawX1: 4 RawX2: 21474838531 } }) 2025-04-07T14:24:13.175988Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2025-04-07T14:24:13.176275Z node 5 :QUOTER_PROXY WARN: [/Path/KesusName]: Disconnected from tablet 2025-04-07T14:24:13.176308Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-04-07T14:24:13.176423Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Mark "res1" for offline allocation. Connected: 0, SessionIsActive: 1, AverageDuration: 0.100000s, AverageAmount: 0.5 2025-04-07T14:24:13.176466Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Schedule offline allocation in 0.000000s: [{ "res1", 0.5 }] 2025-04-07T14:24:13.176630Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-07T14:24:13.176933Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-07T14:24:13.176966Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res0" 2025-04-07T14:24:13.176998Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res1" 2025-04-07T14:24:13.177023Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res2" 2025-04-07T14:24:13.177066Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }, { "res1", Normal, {0: Front(1, 2)} }, { "res2", Normal, {0: Front(1, 2)} }]) 2025-04-07T14:24:13.185576Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-07T14:24:13.185685Z node 6 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-07T14:24:13.185910Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-07T14:24:13.186065Z node 6 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-07T14:24:13.187585Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-04-07T14:24:13.187639Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-04-07T14:24:13.187680Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-04-07T14:24:13.188389Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-07T14:24:13.199610Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-07T14:24:13.199738Z node 7 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-07T14:24:13.199891Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-07T14:24:13.200016Z node 7 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-07T14:24:13.200300Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-04-07T14:24:13.200335Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-04-07T14:24:13.200384Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-04-07T14:24:13.200465Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-07T14:24:13.200935Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyStats([{"res", Consumed: 0, Queue: 25}]) 2025-04-07T14:24:13.200976Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: Set info for resource "res": { Available: 20, QueueWeight: 25 } 2025-04-07T14:24:13.201011Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Activate session to "res". Connected: 1 2025-04-07T14:24:13.201117Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 42 ConsumeResource: true Amount: inf } ActorID { RawX1: 4 RawX2: 30064773123 } }) 2025-04-07T14:24:13.201187Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-07T14:24:13.201332Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyCloseSession("res", 42) 2025-04-07T14:24:13.201362Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Deactivate session to "res". Connected: 1 2025-04-07T14:24:13.201462Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 42 } ActorID { RawX1: 4 RawX2: 30064773123 } }) 2025-04-07T14:24:13.228967Z node 8 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-07T14:24:13.229105Z node 8 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-07T14:24:13.229265Z node 8 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-07T14:24:13.229411Z node 8 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-07T14:24:13.229684Z node 8 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 H ... ACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-07T14:26:18.706218Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-07T14:26:18.706262Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-07T14:26:18.706327Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-07T14:26:18.707606Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-07T14:26:18.707643Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-07T14:26:18.807000Z 2025-04-07T14:26:18.707669Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-07T14:26:18.707818Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-07T14:26:18.707870Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -1.000101115, QueueWeight: 5 } 2025-04-07T14:26:18.707916Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-07T14:26:18.707996Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-07T14:26:18.800915Z node 49 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7490577491315623660:2274]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-07T14:26:18.801037Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-07T14:26:18.801071Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-07T14:26:18.801125Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-07T14:26:18.801816Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-07T14:26:18.807794Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-07T14:26:18.807855Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-07T14:26:18.907000Z 2025-04-07T14:26:18.807879Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-07T14:26:18.808927Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-07T14:26:18.808971Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -0.0001011154562, QueueWeight: 5 } 2025-04-07T14:26:18.809013Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-07T14:26:18.809086Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-07T14:26:18.900736Z node 49 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7490577491315623660:2274]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-07T14:26:18.900866Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-07T14:26:18.900906Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-07T14:26:18.900972Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(0.9998988845, 2)} }]) 2025-04-07T14:26:18.901295Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-07T14:26:18.907166Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0.9998988845. FreeBalance: 0.9998988845 2025-04-07T14:26:18.907210Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-07T14:26:19.007000Z 2025-04-07T14:26:18.907236Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-07T14:26:18.907311Z node 49 :QUOTER_SERVICE TRACE: Charge "Resource" for 5. Balance: 0.9998988845. FreeBalance: 0.9998988845. TicksToFullfill: 5.000505628. DurationToFullfillInUs: 500050.5628. TimeToFullfill: 2025-04-07T14:26:18.504675Z. Now: 2025-04-07T14:26:18.907087Z. LastAllocated: 2025-04-07T14:26:18.004624Z 2025-04-07T14:26:18.907558Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 5, Queue: 0}]) 2025-04-07T14:26:18.907592Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -4.000101115, QueueWeight: 0 } 2025-04-07T14:26:18.907640Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-07T14:26:18.907887Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-07T14:26:19.005793Z node 49 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7490577491315623660:2274]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-07T14:26:19.005931Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-07T14:26:19.005968Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-07T14:26:19.006015Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-07T14:26:19.007096Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-07T14:26:19.007246Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-07T14:26:19.100433Z node 49 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7490577491315623660:2274]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-07T14:26:19.100562Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-07T14:26:19.100596Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-07T14:26:19.100638Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-07T14:26:19.100735Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-07T14:26:19.100762Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-07T14:26:19.200801Z node 49 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7490577491315623660:2274]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-07T14:26:19.200927Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-07T14:26:19.200974Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-07T14:26:19.201015Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-07T14:26:19.201125Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-07T14:26:19.201169Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-07T14:26:19.300705Z node 49 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7490577491315623660:2274]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-07T14:26:19.300848Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-07T14:26:19.300882Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-07T14:26:19.300922Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-07T14:26:19.300994Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-07T14:26:19.301037Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-07T14:26:19.400484Z node 49 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7490577491315623660:2274]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-07T14:26:19.400631Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-07T14:26:19.400685Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-07T14:26:19.400731Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(0.9998988845, 2)} }]) 2025-04-07T14:26:19.400813Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-07T14:26:19.400848Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0.9998988845. FreeBalance: 0.9998988845 2025-04-07T14:26:19.500849Z node 49 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7490577491315623660:2274]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-07T14:26:19.501006Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-07T14:26:19.501047Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-07T14:26:19.501095Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(1.999898885, 2)} }]) 2025-04-07T14:26:19.501176Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-07T14:26:19.501198Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 1.999898885. FreeBalance: 1.999898885 2025-04-07T14:26:19.600694Z node 49 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7490577491315623660:2274]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-07T14:26:19.600823Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-07T14:26:19.600857Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-07T14:26:19.600878Z node 49 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Deactivate session to "Resource". Connected: 1 2025-04-07T14:26:19.600970Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 1 } ActorID { RawX1: 7490577491315623660 RawX2: 4503810080770274 } }) 2025-04-07T14:26:19.601011Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(2.999898885, 2)} }]) 2025-04-07T14:26:19.601081Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-07T14:26:19.601115Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 2.999898885. FreeBalance: 2.999898885 2025-04-07T14:26:19.601205Z node 49 :KESUS_TABLET TRACE: [72075186224037888] Send TEvUpdateConsumptionStateAck to [49:7490577491315623660:2274]. Cookie: 0. Data: { } 2025-04-07T14:26:19.601238Z node 49 :KESUS_TABLET DEBUG: [72075186224037888] Update quoter resources consumption state (sender=[49:7490577491315623660:2274], cookie=0) |99.1%| [TM] {RESULT} ydb/core/quoter/ut/unittest >> TopicSessionTests::TwoSessionsWithDifferentSchemes [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c >> TCreateAndDropViewTest::ContextPollution [GOOD] >> TEvaluateExprInViewTest::EvaluateExpr >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 >> DataShardStats::CollectStatsForSeveralParts [GOOD] >> DataShardStats::NoData >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes [GOOD] >> TopicSessionTests::RestartSessionIfQueryStopped >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 >> TEvaluateExprInViewTest::EvaluateExpr [GOOD] >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateQuery >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated [GOOD] >> TControlPlaneProxyTest::ShouldSendListQueries >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state >> TControlPlaneProxyTest::ShouldSendListQueries [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeQuery >> TDqPqRdReadActorTests::Backpressure [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 >> TControlPlaneProxyTest::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetQueryStatus >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction [GOOD] >> TSelectFromViewTest::OneTable >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 [GOOD] >> TControlPlaneProxyTest::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyQuery >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] >> TopicSessionTests::RestartSessionIfQueryStopped [GOOD] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteQuery |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted [GOOD] >> RowDispatcherTests::OneClientOneSession >> RowDispatcherTests::OneClientOneSession [GOOD] >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions [GOOD] >> RowDispatcherTests::TwoClientOneSession >> RowDispatcherTests::TwoClientOneSession [GOOD] >> RowDispatcherTests::SessionError >> RowDispatcherTests::SessionError [GOOD] >> TDqPqRdReadActorTests::MetadataFields [GOOD] >> RowDispatcherTests::CoordinatorSubscribe [GOOD] >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged >> TControlPlaneProxyTest::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendControlQuery >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged [GOOD] >> RowDispatcherTests::TwoClients4Sessions >> RowDispatcherTests::TwoClients4Sessions [GOOD] >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState >> RowDispatcherTests::ReinitConsumerIfNewGeneration >> RowDispatcherTests::ReinitConsumerIfNewGeneration [GOOD] >> RowDispatcherTests::HandleTEvUndelivered >> RowDispatcherTests::HandleTEvUndelivered [GOOD] >> RowDispatcherTests::TwoClientTwoConnection >> RowDispatcherTests::TwoClientTwoConnection [GOOD] >> RowDispatcherTests::ProcessNoSession >> RowDispatcherTests::ProcessNoSession [GOOD] >> RowDispatcherTests::IgnoreWrongPartitionId |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] >> DataShardStats::NoData [GOOD] >> DataShardStats::Follower >> TControlPlaneProxyTest::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetResultData >> TControlPlaneProxyTest::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyTest::ShouldSendListJobs |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState [GOOD] >> TDqPqReadActorTest::TestReadFromTopic |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendListJobs [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeJob |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] >> TControlPlaneProxyTest::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateConnection >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/ut/unittest >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] Test command err: 2025-04-07T14:24:46.812606Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [1:26:2056] 2025-04-07T14:24:46.812854Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [1:21:2053] 2025-04-07T14:24:46.813033Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [1:21:2053] 2025-04-07T14:24:46.813084Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:22:2053] 2025-04-07T14:24:46.813107Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:22:2053] 2025-04-07T14:24:46.813136Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [3:23:2053] 2025-04-07T14:24:46.813153Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [3:23:2053] 2025-04-07T14:24:46.813239Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:24:2054], topic1, partIds: 0 2025-04-07T14:24:46.813360Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:24:2054] 2025-04-07T14:24:46.814389Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:25:2055], topic1, partIds: 0 2025-04-07T14:24:46.814470Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:25:2055] 2025-04-07T14:24:46.828367Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:25:2055], topic1, partIds: 1 2025-04-07T14:24:46.828507Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:25:2055] 2025-04-07T14:24:46.828691Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:27:2054] 2025-04-07T14:24:46.828744Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:22:2053] to new [2:27:2054] 2025-04-07T14:24:46.828811Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:27:2054] 2025-04-07T14:24:46.828882Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:28:2055] 2025-04-07T14:24:46.828937Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:27:2054] to new [2:28:2055] 2025-04-07T14:24:46.828968Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:28:2055] 2025-04-07T14:24:46.829046Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:24:2054], topic1, partIds: 0 2025-04-07T14:24:46.829093Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:24:2054] 2025-04-07T14:24:46.829197Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:25:2055], topic1, partIds: 1 2025-04-07T14:24:46.829249Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:25:2055] 2025-04-07T14:24:46.933191Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [5:26:2056] 2025-04-07T14:24:46.933339Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [5:21:2053] 2025-04-07T14:24:46.933377Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [5:21:2053] 2025-04-07T14:24:46.933417Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [6:22:2053] 2025-04-07T14:24:46.933457Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [6:22:2053] 2025-04-07T14:24:46.933494Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [7:23:2053] 2025-04-07T14:24:46.933512Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [7:23:2053] 2025-04-07T14:24:46.933592Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:24:2054], topic1, partIds: 0, 1, 2 2025-04-07T14:24:46.933727Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:24:2054] 2025-04-07T14:24:46.933887Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:25:2055], topic1, partIds: 3 2025-04-07T14:24:46.933981Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:25:2055] 2025-04-07T14:24:47.100221Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:7:2054] Successfully bootstrapped, local coordinator id [9:4:2051] 2025-04-07T14:24:47.100443Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Successfully bootstrapped, local coordinator id [9:5:2052] 2025-04-07T14:24:47.100501Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Successfully bootstrapped, local coordinator id [9:6:2053] 2025-04-07T14:24:47.100567Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-04-07T14:24:47.100605Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.100672Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.108485Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-04-07T14:24:47.108541Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.108564Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.108623Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-04-07T14:24:47.108642Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.108653Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.219724Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-07T14:24:47.219852Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-07T14:24:47.219940Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.219972Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.231235Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.231321Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.263792Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-07T14:24:47.263948Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.263976Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.298803Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-07T14:24:47.298973Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.299002Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.299863Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-07T14:24:47.310128Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.310198Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.326178Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-07T14:24:47.326326Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.326361Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.330624Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-07T14:24:47.334434Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-07T14:24:47.334587Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.334616Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.347123Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.347202Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": 2025-04-07T14:24:47.347265Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.347313Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exist, request accepts it (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateNoChanges) } 2025-04-07T14:24:47.347351Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:7:2054] Coordination node successfully created 2025-04-07T14:24:47.347377Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:7:2054] Start session 2025-04-07T14:24:47.405449Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Coordination node successfully created 2025-04-07T14:24:47.405519Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Start session 2025-04-07T14:24:47.406367Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.406407Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.419072Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-04-07T14:24:47.419203Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exist, request accepts it (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateNoChanges) } 2025-04-07T14:24:47.419288Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Coordination node successfully created 2025-04-07T14:24:47.419318 ... ation node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-07T14:26:49.271460Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [38:16:2058] 2025-04-07T14:26:49.271632Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:12:2055], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-04-07T14:26:49.271810Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-07T14:26:49.271966Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 1 2025-04-07T14:26:49.272309Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:13:2056], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-04-07T14:26:49.272760Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:20:2062] to [38:12:2055] query id QueryId 2025-04-07T14:26:49.272869Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:12:2055] part id 0 query id QueryId 2025-04-07T14:26:49.272969Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:20:2062] to [38:12:2055] query id QueryId 2025-04-07T14:26:49.273053Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:21:2063] to [38:12:2055] query id QueryId 2025-04-07T14:26:49.273176Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:12:2055] part id 1 query id QueryId 2025-04-07T14:26:49.273265Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:21:2063] to [38:12:2055] query id QueryId 2025-04-07T14:26:49.273390Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:20:2062] to [38:13:2056] query id QueryId 2025-04-07T14:26:49.273498Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:13:2056] part id 0 query id QueryId 2025-04-07T14:26:49.273589Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:20:2062] to [38:13:2056] query id QueryId 2025-04-07T14:26:49.273680Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:21:2063] to [38:13:2056] query id QueryId 2025-04-07T14:26:49.273764Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:13:2056] part id 1 query id QueryId 2025-04-07T14:26:49.273844Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:21:2063] to [38:13:2056] query id QueryId 2025-04-07T14:26:49.273928Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:12:2055], reason ActorUnknown 2025-04-07T14:26:49.273986Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:12:2055] query id QueryId 2025-04-07T14:26:49.274215Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:13:2056], reason ActorUnknown 2025-04-07T14:26:49.274265Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:13:2056] query id QueryId 2025-04-07T14:26:49.274339Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:20:2062] 2025-04-07T14:26:49.274402Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:21:2063] 2025-04-07T14:26:49.379676Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [40:15:2057], tenant Tenant 2025-04-07T14:26:49.391048Z node 40 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [40:16:2058] 2025-04-07T14:26:49.391136Z node 40 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [40:17:2059] Successfully bootstrapped, local coordinator id [40:16:2058] 2025-04-07T14:26:49.391207Z node 40 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-04-07T14:26:49.391236Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-07T14:26:49.391262Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-07T14:26:49.391351Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [40:16:2058] 2025-04-07T14:26:49.391537Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:12:2055], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-04-07T14:26:49.391711Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-07T14:26:49.392151Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:13:2056], read group connection_id2, topicPath topic part id 0 query id QueryId cookie 1 2025-04-07T14:26:49.392331Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id2 topic topic part id 0 2025-04-07T14:26:49.392639Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:20:2062] to [40:12:2055] query id QueryId 2025-04-07T14:26:49.392966Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:12:2055] part id 0 query id QueryId 2025-04-07T14:26:49.393076Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:20:2062] to [40:12:2055] query id QueryId 2025-04-07T14:26:49.393173Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:21:2063] to [40:13:2056] query id QueryId 2025-04-07T14:26:49.393262Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:13:2056] part id 0 query id QueryId 2025-04-07T14:26:49.393339Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:21:2063] to [40:13:2056] query id QueryId 2025-04-07T14:26:49.393440Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:12:2055] topic topic query id QueryId 2025-04-07T14:26:49.393490Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:12:2055] query id QueryId 2025-04-07T14:26:49.393571Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:20:2062] 2025-04-07T14:26:49.393788Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:13:2056] topic topic query id QueryId 2025-04-07T14:26:49.393849Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:13:2056] query id QueryId 2025-04-07T14:26:49.393936Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:21:2063] 2025-04-07T14:26:49.535491Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [42:15:2057], tenant Tenant 2025-04-07T14:26:49.545134Z node 42 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [42:16:2058] 2025-04-07T14:26:49.545224Z node 42 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [42:17:2059] Successfully bootstrapped, local coordinator id [42:16:2058] 2025-04-07T14:26:49.545297Z node 42 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-04-07T14:26:49.545328Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-07T14:26:49.545356Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-07T14:26:49.545446Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [42:16:2058] 2025-04-07T14:26:49.545621Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [43:14:2052], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 42 2025-04-07T14:26:49.545839Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-07T14:26:49.553719Z node 42 :FQ_ROW_DISPATCHER ERROR: Create coordination node "YDB_DATABASE/RowDispatcher/Tenant" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): DNS resolution failed for YDB_ENDPOINT: UNKNOWN: Temporary failure in name resolution } {
: Error: Grpc error response on endpoint YDB_ENDPOINT } ] 2025-04-07T14:26:49.553990Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvTryConnect to node id 43 2025-04-07T14:26:49.554370Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: EvNodeConnected, node id 43 2025-04-07T14:26:49.554809Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:20:2062] to [43:14:2052] query id QueryId 2025-04-07T14:26:49.555160Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:14:2052] part id 0 query id QueryId 2025-04-07T14:26:49.555280Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:20:2062] to [43:14:2052] query id QueryId 2025-04-07T14:26:49.555545Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:14:2052], generation 41 2025-04-07T14:26:49.555617Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:20:2062] to [43:14:2052] query id QueryId 2025-04-07T14:26:49.555907Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:14:2052] part id 0 query id QueryId 2025-04-07T14:26:49.556006Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:20:2062] to [43:14:2052] query id QueryId 2025-04-07T14:26:49.556300Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:14:2052], generation 42 2025-04-07T14:26:49.556365Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [43:14:2052] query id QueryId 2025-04-07T14:26:49.556461Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [42:20:2062] 2025-04-07T14:26:49.666775Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [44:15:2057], tenant Tenant 2025-04-07T14:26:49.676877Z node 44 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [44:16:2058] 2025-04-07T14:26:49.676975Z node 44 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [44:17:2059] Successfully bootstrapped, local coordinator id [44:16:2058] 2025-04-07T14:26:49.677046Z node 44 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-04-07T14:26:49.677076Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-07T14:26:49.677105Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-07T14:26:49.677194Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [44:16:2058] 2025-04-07T14:26:49.677359Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [44:12:2055], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-04-07T14:26:49.677567Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-07T14:26:49.677955Z node 44 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [44:20:2062] to [44:12:2055] query id QueryId 2025-04-07T14:26:49.678061Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [44:12:2055] topic topic query id QueryId 2025-04-07T14:26:49.678111Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [44:12:2055] query id QueryId 2025-04-07T14:26:49.678197Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [44:20:2062] |99.1%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/ut/unittest >> TControlPlaneProxyTest::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendListConnections |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSessionNotFound::test_session_not_found >> TControlPlaneProxyTest::ShouldSendListConnections [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeConnection |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TSelectFromViewTest::OneTable [GOOD] >> TSelectFromViewTest::OneTableUsingRelativeName |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyConnection |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteConnection |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::TestReadFromTopic [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::Follower [GOOD] >> DataShardStats::Tli >> TDqPqReadActorTest::TestReadFromTopicFromNow >> TControlPlaneProxyTest::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendTestConnection |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TControlPlaneProxyTest::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateBinding >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::TestReadFromTopicFromNow [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendListBindings |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] >> TDqPqReadActorTest::ReadWithFreeSpace |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendListBindings [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeBinding |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyBinding >> TDqPqReadActorTest::ReadWithFreeSpace [GOOD] >> TDqPqReadActorTest::ReadNonExistentTopic [GOOD] >> TDqPqReadActorTest::TestSaveLoadPqRead |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TControlPlaneProxyTest::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteBinding >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TSelectFromViewTest::OneTableUsingRelativeName [GOOD] >> TSelectFromViewTest::DisabledFeatureFlag |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/control_plane_proxy/ut/unittest >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] Test command err: 2025-04-07T14:23:54.525686Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-04-07T14:23:54.857381Z node 2 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:23:55.308339Z node 3 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:23:55.838033Z node 4 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:23:56.263671Z node 5 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:23:56.664459Z node 6 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:23:57.160631Z node 7 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:23:57.593013Z node 8 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:23:58.034265Z node 9 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-04-07T14:23:58.475485Z node 10 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:23:58.931170Z node 11 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:23:59.377294Z node 12 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:23:59.838211Z node 13 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:00.289101Z node 14 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:00.768847Z node 15 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:01.232690Z node 16 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:01.679621Z node 17 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:02.164707Z node 18 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:02.652864Z node 19 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:03.119515Z node 20 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:03.675825Z node 21 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:04.156878Z node 22 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:04.644573Z node 23 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:05.209160Z node 24 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:37.021406Z node 72 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-04-07T14:24:37.806379Z node 73 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:38.520822Z node 74 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:39.130331Z node 75 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:39.837329Z node 76 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:40.616457Z node 77 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:41.407506Z node 78 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:42.185071Z node 79 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:42.810442Z node 80 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-04-07T14:24:43.531866Z node 81 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:44.233843Z node 82 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:44.949241Z node 83 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:45.653843Z node 84 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:46.306541Z node 85 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:46.963931Z node 86 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:47.598657Z node 87 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:48.333309Z node 88 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:49.052351Z node 89 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:49.678583Z node 90 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:50.345204Z node 91 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:51.168920Z node 92 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:51.963874Z node 93 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:52.755413Z node 94 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:24:53.712161Z node 95 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:25:59.755044Z node 163 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user_3@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-04-07T14:26:03.202916Z node 166 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:04.389031Z node 167 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:05.862365Z node 168 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:07.002455Z node 169 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:08.059607Z node 170 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:11.199899Z node 173 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:14.291866Z node 176 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:15.431645Z node 177 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:16.370476Z node 178 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:17.331551Z node 179 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:20.310849Z node 182 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:21.258065Z node 183 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:23.207045Z node 185 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:24.196580Z node 186 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:26.131778Z node 188 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:27.088717Z node 189 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:28.002458Z node 190 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:29.923158Z node 192 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user_4@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-04-07T14:26:30.942056Z node 193 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:31.925364Z node 194 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:32.891876Z node 195 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:33.842187Z node 196 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:34.793542Z node 197 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:35.801369Z node 198 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:36.716431Z node 199 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:37.638329Z node 200 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:38.577051Z node 201 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:39.480084Z node 202 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:40.522732Z node 203 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-07T14:26:41.466610Z node 204 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 |99.2%| [TM] {RESULT} ydb/core/fq/libs/control_plane_proxy/ut/unittest |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::Tli [GOOD] >> DataShardStats::HasSchemaChanges_BTreeIndex |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_drain.py::TestHive::test_drain_tablets |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_result_limits.py::TestResultLimits::test_many_rows |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TSelectFromViewTest::DisabledFeatureFlag [GOOD] >> TSelectFromViewTest::ReadTestCasesFromFiles |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/common/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] |99.3%| [TM] {RESULT} ydb/tests/fq/common/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> TDqPqReadActorTest::TestSaveLoadPqRead [GOOD] >> TDqPqReadActorTest::LoadCorruptedState >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query >> TDqPqReadActorTest::LoadCorruptedState [GOOD] >> TDqPqReadActorTest::TestLoadFromSeveralStates |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> DataShardStats::HasSchemaChanges_BTreeIndex [GOOD] >> DataShardStats::HasSchemaChanges_ByKeyFilter >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::TestLoadFromSeveralStates [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSessionNotFound::test_session_not_found [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] [GOOD] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/py3test >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] |99.3%| [TM] {RESULT} ydb/tests/fq/http_api/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> DataShardStats::HasSchemaChanges_ByKeyFilter [GOOD] >> DataShardStats::HasSchemaChanges_Columns >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] >> test_drain.py::TestHive::test_drain_on_stop |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds >> TPqWriterTest::TestWriteToTopic >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled >> TPqWriterTest::TestWriteToTopic [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_simple_table_profile_settings [GOOD] >> TPqWriterTest::TestWriteToTopicMultiBatch >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TPqWriterTest::TestWriteToTopicMultiBatch [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> TPqWriterTest::TestDeferredWriteToTopic >> TPqWriterTest::TestDeferredWriteToTopic [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TPqWriterTest::WriteNonExistentTopic >> TPqWriterTest::WriteNonExistentTopic [GOOD] >> TPqWriterTest::TestCheckpoints >> TSelectFromViewTest::ReadTestCasesFromFiles [GOOD] >> TSelectFromViewTest::QueryCacheIsUpdated >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::HasSchemaChanges_Columns [GOOD] >> DataShardStats::HasSchemaChanges_Families >> TPqWriterTest::TestCheckpoints [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TPqWriterTest::TestCheckpointWithEmptyBatch |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/pq_async_io/ut/unittest >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] Test command err: 2025-04-07T14:24:29.563128Z node 1 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [1:7490577022673684911:2052], metadatafields: , partitions: 666 2025-04-07T14:24:29.563402Z node 1 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-04-07T14:24:29.563490Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([1:7490577022673684911:2052]) 2025-04-07T14:24:29.569097Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. TEvCoordinatorChanged, new coordinator [1:7490577022673684912:2053] 2025-04-07T14:24:29.569164Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorRequest to coordinator [1:7490577022673684912:2053], partIds: 666 cookie 1 2025-04-07T14:24:29.570335Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. Received TEvCoordinatorResult from [1:7490577022673684912:2053], cookie 1 2025-04-07T14:24:29.570353Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. UpdateSessions, Sessions size 0 2025-04-07T14:24:29.570362Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. Distribution is changed, remove sessions 2025-04-07T14:24:29.570406Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. Create session to [1:7490577022673684914:2055], generation 1 2025-04-07T14:24:29.570609Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. Send TEvStartSession to [1:7490577022673684914:2055], connection id 1 partitions offsets (666 / ), 2025-04-07T14:24:29.570877Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. Received TEvStartSessionAck from [1:7490577022673684914:2055], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-04-07T14:24:29.571072Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. Received TEvNewDataArrived from [1:7490577022673684914:2055], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-07T14:24:29.574587Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. Received TEvMessageBatch from [1:7490577022673684914:2055], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-07T14:24:29.574634Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 1 2025-04-07T14:24:29.574641Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 2 2025-04-07T14:24:29.574661Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 1000 2025-04-07T14:24:29.574833Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. NextOffset 2 2025-04-07T14:24:29.574844Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-04-07T14:24:29.575714Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. PassAway 2025-04-07T14:24:29.575822Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. State: used buffer size 0 ready buffer event size 0 state 3 InFlyAsyncInputData 0 Counters: GetAsyncInputData 1 CoordinatorChanged 1 CoordinatorResult 0 MessageBatch 1 StartSessionAck 1 NewDataArrived 1 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 0 Retry 0 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 NotifyCA 1 [1:7490577022673684914:2055] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partId 666 next offset 2 is waiting batch 0 has pending data 0 2025-04-07T14:24:29.575870Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7490577022673684917:2048], TxId: query_1, task: 0. PQ source. Send StopSession to [1:7490577022673684914:2055] generation 1 2025-04-07T14:24:29.869295Z node 2 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [2:7490577020955865457:2052], metadatafields: , partitions: 666 2025-04-07T14:24:29.870150Z node 2 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-04-07T14:24:29.870209Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([2:7490577020955865457:2052]) 2025-04-07T14:24:29.870547Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. TEvCoordinatorChanged, new coordinator [2:7490577020955865458:2053] 2025-04-07T14:24:29.870591Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorRequest to coordinator [2:7490577020955865458:2053], partIds: 666 cookie 1 2025-04-07T14:24:29.870776Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Received TEvCoordinatorResult from [2:7490577020955865458:2053], cookie 1 2025-04-07T14:24:29.870785Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. UpdateSessions, Sessions size 0 2025-04-07T14:24:29.870792Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Distribution is changed, remove sessions 2025-04-07T14:24:29.870867Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Create session to [2:7490577020955865460:2055], generation 1 2025-04-07T14:24:29.870907Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Send TEvStartSession to [2:7490577020955865460:2055], connection id 1 partitions offsets (666 / ), 2025-04-07T14:24:29.871118Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Received TEvStartSessionAck from [2:7490577020955865460:2055], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-04-07T14:24:29.872525Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Received TEvNewDataArrived from [2:7490577020955865460:2055], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-07T14:24:29.873319Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Received TEvMessageBatch from [2:7490577020955865460:2055], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-07T14:24:29.873348Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 1 2025-04-07T14:24:29.873361Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 2 2025-04-07T14:24:29.873386Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 1000 2025-04-07T14:24:29.873474Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. NextOffset 2 2025-04-07T14:24:29.873492Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-04-07T14:24:29.873654Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Received TEvUndelivered, TSystem::Undelivered from [2:7490577020955865460:2055], reason Disconnected, cookie 999 2025-04-07T14:24:29.873786Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Received TEvNewDataArrived from [2:7490577020955865460:2055], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-07T14:24:29.874225Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Received TEvMessageBatch from [2:7490577020955865460:2055], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-07T14:24:29.874245Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 3 2025-04-07T14:24:29.874257Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 1000 2025-04-07T14:24:29.874292Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. NextOffset 3 2025-04-07T14:24:29.874301Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Return 1 rows, buffer size 0, free space 974, result size 26 2025-04-07T14:24:29.874631Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. PassAway 2025-04-07T14:24:29.874776Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. State: used buffer size 0 ready buffer event size 0 state 3 InFlyAsyncInputData 0 Counters: GetAsyncInputData 1 CoordinatorChanged 1 CoordinatorResult 0 MessageBatch 2 StartSessionAck 1 NewDataArrived 2 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 1 Retry 0 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 NotifyCA 2 [2:7490577020955865460:2055] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partId 666 next offset 3 is waiting batch 0 has pending data 0 2025-04-07T14:24:29.874796Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7490577020955865463:2048], TxId: query_1, task: 0. PQ source. Send StopSession to [2:7490577020955865460:2055] generation 1 2025-04-07T14:24:30.329497Z node 3 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [3:7490577028238017529:2052], metadatafields: , partitions: 666 2025-04-07T14:24:30.329637Z node 3 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-04-07T14:24:30.329663Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7490577028238017535:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([3:7490577028238017529:2052]) 2025-04-07T14:24:30.329843Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7490577028238017535:2048], TxId: query_1, task: 0. PQ source. TEvCoordinatorChanged, new coordinator [3:7490577028238017530:2053] 2025-04-07T14:24:30.329893Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7490577028238017535:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorRequest to coordinator [3:7490577028238017530:2053], partIds: 666 cookie 1 2025-04-07T14:24:30.330076Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7490577028238017535:2048], TxId: query_1, task: 0. PQ source. Received TEvCoordinatorResult from [3:7490577028238017530:2053], cookie 1 2025-04-07T14:24:30.330093Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7490577028238017535:2048], TxId: query_1, task: 0. PQ source. UpdateSessions, Sessions size 0 2025-04-07T14:24:30.330106Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7490577028238017535:2048], TxId: query_1, task: 0. PQ source. Distribution is changed, remove sessions 2025-04-07T14:24:30.330126Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7490577028238017535:2048], TxId: query_1, task: 0. PQ source. Create session ... -5256be62-ce0d6e21-1bdcf762] Starting single session 2025-04-07T14:27:50.299303Z :DEBUG: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:27:50.299367Z :DEBUG: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:27:50.299510Z :DEBUG: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] Reconnecting session to cluster in 0.000000s 2025-04-07T14:27:50.303001Z :DEBUG: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] Successfully connected. Initializing session 2025-04-07T14:27:50.315193Z :INFO: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] Server session id: test_client_1_22_6785318649153634328_v1 2025-04-07T14:27:50.315253Z :DEBUG: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-07T14:27:50.315475Z :DEBUG: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-07T14:27:50.321630Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [225d313e-a11ea0d4-cfe32bc5-23544e2d] Write session: OnReadDone gRpcStatusCode: 0 2025-04-07T14:27:50.321723Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [225d313e-a11ea0d4-cfe32bc5-23544e2d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744036070321 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:27:50.321866Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [225d313e-a11ea0d4-cfe32bc5-23544e2d] Write session established. Init response: last_seq_no: 5 session_id: "225d313e-a11ea0d4-cfe32bc5-23544e2d|f27df49e-ff965aec-d86b33e2-8db96c69_0" 2025-04-07T14:27:50.321919Z :TRACE: [local] TRACE_EVENT InitResponse partition_id=0 session_id=225d313e-a11ea0d4-cfe32bc5-23544e2d|f27df49e-ff965aec-d86b33e2-8db96c69_0 2025-04-07T14:27:50.321957Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [225d313e-a11ea0d4-cfe32bc5-23544e2d|f27df49e-ff965aec-d86b33e2-8db96c69_0] MessageGroupId [225d313e-a11ea0d4-cfe32bc5-23544e2d] Write session: set DirectWriteToPartitionId 0 2025-04-07T14:27:50.322192Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [225d313e-a11ea0d4-cfe32bc5-23544e2d|f27df49e-ff965aec-d86b33e2-8db96c69_0] PartitionId [0] Generation [0] Get partition location async, partition 0, delay 0.000000s 2025-04-07T14:27:50.322251Z :TRACE: [local] TRACE_EVENT DescribePartitionRequest path=local/Checkpoints partition_id=0 2025-04-07T14:27:50.322420Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [225d313e-a11ea0d4-cfe32bc5-23544e2d|f27df49e-ff965aec-d86b33e2-8db96c69_0] PartitionId [0] Generation [0] Getting partition location, partition 0 2025-04-07T14:27:50.330843Z :INFO: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "Checkpoints". Partition: 0. Read offset: (empty maybe) 2025-04-07T14:27:50.335386Z :DEBUG: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] Got ReadResponse, serverBytesSize = 1089, now ReadSizeBudget = 0, ReadSizeServerDelta = 52427711 2025-04-07T14:27:50.335587Z :DEBUG: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52427711 2025-04-07T14:27:50.335937Z :DEBUG: [local] Decompression task done. Partition/PartitionSessionId: 1 (0-4) 2025-04-07T14:27:50.336053Z :DEBUG: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] Returning serverBytesSize = 1089 to budget 2025-04-07T14:27:50.336129Z :DEBUG: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] In ContinueReadingDataImpl, ReadSizeBudget = 1089, ReadSizeServerDelta = 52427711 2025-04-07T14:27:50.336387Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [225d313e-a11ea0d4-cfe32bc5-23544e2d|f27df49e-ff965aec-d86b33e2-8db96c69_0] PartitionId [0] Generation [0] Got PartitionLocation response. Status SUCCESS, proto: partition { active: true partition_location { node_id: 1 generation: 1 } } 2025-04-07T14:27:50.336424Z :DEBUG: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-07T14:27:50.336450Z :TRACE: [local] TRACE_EVENT DescribePartitionResponse partition_id=0 active=1 pl_node_id=1 pl_generation=1 2025-04-07T14:27:50.336497Z :DEBUG: [local] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-07T14:27:50.336501Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [225d313e-a11ea0d4-cfe32bc5-23544e2d|f27df49e-ff965aec-d86b33e2-8db96c69_0] PartitionId [0] Generation [0] GetPreferredEndpoint: partitionId 0, partitionNodeId 1 exists in the endpoint pool. 2025-04-07T14:27:50.336546Z :TRACE: [local] TRACE_EVENT PreferredPartitionLocation Endpoint= NodeId=1 Generation=1 2025-04-07T14:27:50.336555Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-07T14:27:50.336583Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [225d313e-a11ea0d4-cfe32bc5-23544e2d|f27df49e-ff965aec-d86b33e2-8db96c69_0] PartitionId [0] Generation [1] Start write session. Will connect to nodeId: 1 2025-04-07T14:27:50.336589Z :DEBUG: [local] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-04-07T14:27:50.336606Z :DEBUG: [local] Take Data. Partition 0. Read: {3, 0} (3-3) 2025-04-07T14:27:50.336629Z :DEBUG: [local] Take Data. Partition 0. Read: {4, 0} (4-4) 2025-04-07T14:27:50.336840Z :DEBUG: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] The application data is transferred to the client. Number of messages 5, size 5 bytes 2025-04-07T14:27:50.336911Z :DEBUG: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] Returning serverBytesSize = 0 to budget 2025-04-07T14:27:50.337085Z :INFO: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] Closing read session. Close timeout: 0.000000s 2025-04-07T14:27:50.337183Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-04-07T14:27:50.337249Z :INFO: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] Counters: { Errors: 0 CurrentSessionLifetimeMs: 38 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:27:50.337429Z :NOTICE: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-07T14:27:50.337491Z :DEBUG: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] [] Abort session to cluster 2025-04-07T14:27:50.337814Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [225d313e-a11ea0d4-cfe32bc5-23544e2d|f27df49e-ff965aec-d86b33e2-8db96c69_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-04-07T14:27:50.338005Z :INFO: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] Closing read session. Close timeout: 0.000000s 2025-04-07T14:27:50.338054Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-04-07T14:27:50.338095Z :INFO: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] Counters: { Errors: 0 CurrentSessionLifetimeMs: 39 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-07T14:27:50.338228Z :NOTICE: [local] [local] [7ba83916-5256be62-ce0d6e21-1bdcf762] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-07T14:27:50.341276Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [225d313e-a11ea0d4-cfe32bc5-23544e2d|f27df49e-ff965aec-d86b33e2-8db96c69_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-07T14:27:50.341318Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [225d313e-a11ea0d4-cfe32bc5-23544e2d|f27df49e-ff965aec-d86b33e2-8db96c69_0] PartitionId [0] Generation [1] Write session will now close 2025-04-07T14:27:50.341366Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [225d313e-a11ea0d4-cfe32bc5-23544e2d|f27df49e-ff965aec-d86b33e2-8db96c69_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-07T14:27:50.341489Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [225d313e-a11ea0d4-cfe32bc5-23544e2d|f27df49e-ff965aec-d86b33e2-8db96c69_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-07T14:27:50.341553Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [225d313e-a11ea0d4-cfe32bc5-23544e2d|f27df49e-ff965aec-d86b33e2-8db96c69_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-07T14:27:51.332011Z node 40 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 0. Checkpoint: 1. Finished: 0 2025-04-07T14:27:51.348976Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [56125476-c6525733-8f75a735-93058806] Write session: try to update token 2025-04-07T14:27:51.349726Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [56125476-c6525733-8f75a735-93058806] Start write session. Will connect to nodeId: 0 2025-04-07T14:27:51.399611Z node 40 :KQP_COMPUTE DEBUG: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. [Checkpoint 0.0] Send checkpoint state immediately 2025-04-07T14:27:51.399857Z node 40 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Save checkpoint { Id: 0 Generation: 0 } state: { SourceId: "56125476-c6525733-8f75a735-93058806" } 2025-04-07T14:27:51.400414Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [56125476-c6525733-8f75a735-93058806] Write session: close. Timeout 0.000000s 2025-04-07T14:27:51.400458Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [56125476-c6525733-8f75a735-93058806] Write session will now close 2025-04-07T14:27:51.400511Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [56125476-c6525733-8f75a735-93058806] Write session: aborting 2025-04-07T14:27:51.400833Z :TRACE: [local] TRACE_EVENT Error status=CLIENT_CANCELLED 2025-04-07T14:27:51.400873Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [56125476-c6525733-8f75a735-93058806] Write session: gracefully shut down, all writes complete 2025-04-07T14:27:51.400906Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [56125476-c6525733-8f75a735-93058806] Write session is aborting and will not restart 2025-04-07T14:27:51.400969Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [56125476-c6525733-8f75a735-93058806] Write session: destroy |99.4%| [TM] {RESULT} ydb/tests/fq/pq_async_io/ut/unittest |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_public_api.py::TestBadSession::test_simple >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/view/unittest >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] Test command err: Trying to start YDB, gRPC: 21163, MsgBus: 16504 2025-04-07T14:24:51.513073Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7490577118291452118:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:24:51.513321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d91/r3tmp/tmpOGW4kb/pdisk_1.dat 2025-04-07T14:24:52.102892Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:52.146207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:52.146313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:52.153846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21163, node 1 2025-04-07T14:24:52.429796Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:24:52.429821Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:24:52.429829Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:24:52.429923Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16504 TClient is connected to server localhost:16504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:24:53.302523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:24:54.699386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7490577131176354513:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:54.699526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 16388, MsgBus: 21477 2025-04-07T14:24:56.145744Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7490577138961668132:2191];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d91/r3tmp/tmpYz413E/pdisk_1.dat 2025-04-07T14:24:56.265047Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-07T14:24:56.303908Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:56.330863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:56.330931Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:56.332766Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16388, node 2 2025-04-07T14:24:56.485245Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:24:56.485268Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:24:56.485280Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:24:56.485368Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21477 TClient is connected to server localhost:21477 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:24:56.918777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:24:56.930329Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-07T14:24:59.504439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7490577151846570493:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:24:59.504516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 6544, MsgBus: 16808 2025-04-07T14:25:00.546313Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7490577153552465330:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:25:00.546373Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d91/r3tmp/tmpNiXjzs/pdisk_1.dat 2025-04-07T14:25:00.697716Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:25:00.752657Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:25:00.760861Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:25:00.761982Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6544, node 3 2025-04-07T14:25:00.956588Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:25:00.956608Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:25:00.956615Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:25:00.956707Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16808 TClient is connected to server localhost:16808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-07T14:25:01.745843Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-07T14:25:01.769617Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-07T14:25:05.549107Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490577153552465330:2052];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:25:05.549181Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:25:06.275123Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7490577179322269740:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:25:06.275266Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 23499, MsgBus: 32367 2025-04-07T14:25:07.434419Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7490577187767361403:2053];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:25:07.434469Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d91/r3tmp/tmpjnppGT/pdisk_1.dat 2025-04-07T14:25:07.631987Z node 4 :IMPORT WARN: Table profiles were not loade ... ct '('"column2" (Member $12 'Count0)) '('"season_id" (Member $12 '"season_id")) '('"series_id" (Member $12 '"series_id")))))) '($5 $5) (lambda '($13) '((Member $13 '"series_id") (Member $13 '"season_id"))))) (let $7 '('('"mode" '"flush"))) (let $8 (KiExecDataQuery! world $1 (DataQueryBlocks (TKiDataQueryBlock '('($6 '() '0)) (KiEffects) '('('"db" '"/Root/episodes" '"Select")) '())) $7 (Void))) (let $9 (DataSink 'result)) (let $10 (ResPull! (Left! $8) $9 (Key) (Nth (Right! $8) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $10 $9) $1 $7)) ) 2025-04-07 14:27:46.813 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpLogical-ApplyExtractMembersToReadTableRanges 2025-04-07 14:27:46.819 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpLogical-RewriteAggregate 2025-04-07 14:27:46.828 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildReadTableRangesStage 2025-04-07 14:27:46.836 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpPhysical-PushAggregateCombineToStage 2025-04-07 14:27:46.844 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-07 14:27:46.855 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-07 14:27:46.868 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-07 14:27:46.881 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-07 14:27:46.906 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildShuffleStage 2025-04-07 14:27:46.919 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildSortStage 2025-04-07 14:27:46.943 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpPhysical-RewriteKqpReadTable 2025-04-07 14:27:47.135 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpPeepholeFinal-SetCombinerMemoryLimit 2025-04-07 14:27:47.252 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] kqp_host.cpp:1343: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/count_episodes")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) 2025-04-07 14:27:47.262 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/count_episodes" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) 2025-04-07 14:27:47.327 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291E602640) [KQP] kqp_host.cpp:1343: Compiled query: ( (let $1 (Right! (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/view_series"))) (Void) '()))) (let $2 '('('"query_ast" (RemoveSystemMembers (PersistableRepr (SqlProject $1 '((SqlProjectStarItem (TypeOf $1) '"" (lambda '($3) $3) '())))))) '('"query_text" '"SELECT * FROM `/Root/view_series`") '('"security_invoker" (Bool '"true")))) (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/read_from_one_view")) '('typeId (String '"VIEW"))) (Void) '('('mode 'createObject) '('features $2)))) ) 2025-04-07 14:27:47.372 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291E602640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 '('('"query_ast" (Right! (KiReadTable! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/series"))) (Void) '()))) '('"query_text" '"SELECT * FROM `/Root/view_series`") '('"security_invoker" (Bool '"true")))) (let $3 (KiCreateObject! world $1 '"/Root/read_from_one_view" '"VIEW" $2 '0 '0)) (return (Commit! $3 $1 '('('"mode" '"flush")))) ) 2025-04-07 14:27:47.439 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291E602640) [KQP] kqp_host.cpp:1343: Compiled query: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/read_from_one_view"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Right! $1)) (let $4 (Write! (Left! $1) $2 (Key) (RemoveSystemMembers (Sort (PersistableRepr (SqlProject $3 '((SqlProjectStarItem (TypeOf $3) '"" (lambda '($5) $5) '())))) (Bool 'true) (lambda '($6) (PersistableRepr (Member $6 '"series_id"))))) '('('type) '('autoref)))) (return (Commit! $4 $2)) ) 2025-04-07 14:27:47.561 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (Sort (Right! (KiReadTable! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/series"))) (Void) '())) (Bool 'true) (lambda '($7) (Member $7 '"series_id")))) (let $3 '('('"mode" '"flush"))) (let $4 (KiExecDataQuery! world $1 (DataQueryBlocks (TKiDataQueryBlock '('($2 '() '0)) (KiEffects) '('('"db" '"/Root/series" '"Select")) '())) $3 (Void))) (let $5 (DataSink 'result)) (let $6 (ResPull! (Left! $4) $5 (Key) (Nth (Right! $4) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $6 $5) $1 $3)) ) 2025-04-07 14:27:47.570 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildReadTableRangesStage 2025-04-07 14:27:47.575 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildSortStage 2025-04-07 14:27:47.582 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpPhysical-RemoveRedundantSortByPk 2025-04-07 14:27:47.594 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291EE03640) [KQP] yql_optimize.cpp:135: KqpPhysical-RewriteKqpReadTable 2025-04-07 14:27:47.719 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291E602640) [KQP] kqp_host.cpp:1343: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/read_from_one_view")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) 2025-04-07 14:27:47.726 INFO ydb-core-kqp-ut-view(pid=792657, tid=0x00007F291E602640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/read_from_one_view" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) Trying to start YDB, gRPC: 3347, MsgBus: 22930 2025-04-07T14:27:49.472345Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7490577881095220025:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:27:49.472486Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000d91/r3tmp/tmpAAu4DC/pdisk_1.dat 2025-04-07T14:27:49.668455Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:27:49.718636Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:27:49.718803Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:27:49.720872Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3347, node 23 2025-04-07T14:27:49.804959Z node 23 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-07T14:27:49.804995Z node 23 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-07T14:27:49.805012Z node 23 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-07T14:27:49.805231Z node 23 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22930 TClient is connected to server localhost:22930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-07T14:27:51.120637Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-07T14:27:54.472761Z node 23 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[23:7490577881095220025:2051];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:27:54.472911Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-07T14:27:57.201074Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7490577915454959050:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:27:57.201313Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:27:57.277170Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7490577915454959078:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:27:57.277403Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:27:57.277838Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7490577915454959083:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-07T14:27:57.285651Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-07T14:27:57.309505Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [23:7490577915454959085:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } |99.4%| [TM] {RESULT} ydb/core/kqp/ut/view/unittest >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> DataShardStats::HasSchemaChanges_Families [GOOD] >> test_result_limits.py::TestResultLimits::test_many_rows [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_stats/unittest >> DataShardStats::HasSchemaChanges_Families [GOOD] Test command err: 2025-04-07T14:24:12.335731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:88:2134], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-07T14:24:12.336014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-07T14:24:12.336198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/u6pr/000cb5/r3tmp/tmpvMOLcB/pdisk_1.dat 2025-04-07T14:24:12.840311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-07T14:24:12.916031Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-07T14:24:12.922537Z node 1 :TABLET_SAUSAGECACHE INFO: Config updated MemoryLimit: 33554432 2025-04-07T14:24:12.973264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-07T14:24:12.973443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-07T14:24:12.985966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-07T14:24:13.112228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-07T14:24:13.153895Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2025-04-07T14:24:13.154894Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2025-04-07T14:24:13.155346Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2025-04-07T14:24:13.155697Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-07T14:24:13.190503Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-07T14:24:13.191300Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-07T14:24:13.191407Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-07T14:24:13.193019Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-07T14:24:13.193119Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-07T14:24:13.193186Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-07T14:24:13.193570Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-07T14:24:13.226372Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-07T14:24:13.227499Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-07T14:24:13.227677Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2025-04-07T14:24:13.227723Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-07T14:24:13.227765Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-07T14:24:13.227795Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:13.228214Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:13.228261Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:13.229075Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-07T14:24:13.229170Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-07T14:24:13.230266Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:639:2541], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:13.230302Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:13.230346Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:639:2541], sessionId# [0:0:0] 2025-04-07T14:24:13.230420Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:13.230465Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:24:13.230512Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:24:13.230558Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:24:13.230600Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:24:13.230632Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-07T14:24:13.230668Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:24:13.230781Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:639:2541] 2025-04-07T14:24:13.230818Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-07T14:24:13.230953Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-07T14:24:13.232588Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-07T14:24:13.232645Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-07T14:24:13.232761Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-07T14:24:13.232810Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-07T14:24:13.232855Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-07T14:24:13.232898Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-07T14:24:13.232928Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:24:13.233186Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-07T14:24:13.233225Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-07T14:24:13.233277Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-07T14:24:13.233308Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:24:13.233397Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-07T14:24:13.233448Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-07T14:24:13.233488Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-07T14:24:13.233519Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:24:13.233544Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-07T14:24:13.235252Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-07T14:24:13.235302Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-07T14:24:13.246557Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-07T14:24:13.246635Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-07T14:24:13.246670Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-07T14:24:13.246723Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-07T14:24:13.246790Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-07T14:24:13.449239Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:669:2561], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:13.449320Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:24:13.449362Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2559], serverId# [1:669:2561], sessionId# [0:0:0] 2025-04-07T14:24:13.449777Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:538:2466], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-07T14:24:13.449816Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-07T14:24:13.449975Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-07T14:24:13.450023Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-07T14:24:13.450062Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-07T14:24:13.450096Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-07T14:24:13.458742Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 509 RawX2: 4294969746 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-07T14:24:13.458832Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:24:13.465394Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:13.465452Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-07T14:24:13.465513Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-07T14:24:13.465565Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-07T14:24:13.465602Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 720751862240 ... ute [25500:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-07T14:28:03.115585Z node 13 :TX_DATASHARD TRACE: Execution status for [25500:281474976715664] at 72075186224037888 is DelayComplete 2025-04-07T14:28:03.115648Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [25500:281474976715664] at 72075186224037888 executing on unit CompleteOperation 2025-04-07T14:28:03.115710Z node 13 :TX_DATASHARD TRACE: Add [25500:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2025-04-07T14:28:03.115761Z node 13 :TX_DATASHARD TRACE: Trying to execute [25500:281474976715664] at 72075186224037888 on unit CompletedOperations 2025-04-07T14:28:03.115805Z node 13 :TX_DATASHARD TRACE: Execution status for [25500:281474976715664] at 72075186224037888 is Executed 2025-04-07T14:28:03.115838Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [25500:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2025-04-07T14:28:03.115878Z node 13 :TX_DATASHARD TRACE: Execution plan for [25500:281474976715664] at 72075186224037888 has finished 2025-04-07T14:28:03.115943Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:28:03.116003Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-07T14:28:03.116066Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:28:03.116127Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:28:03.126012Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 25500 txid# 281474976715664} 2025-04-07T14:28:03.126151Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 25500} 2025-04-07T14:28:03.126276Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:28:03.127745Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-07T14:28:03.127820Z node 13 :TX_DATASHARD TRACE: Complete execution for [25500:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-07T14:28:03.127940Z node 13 :TX_DATASHARD DEBUG: Complete [25500 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [13:378:2373], exec latency: 0 ms, propose latency: 0 ms 2025-04-07T14:28:03.128022Z node 13 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715664 state Ready TxInFly 0 2025-04-07T14:28:03.128190Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-07T14:28:03.131859Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [13:1123:2941], Recipient [13:895:2728]: NKikimr::TEvTabletPipe::TEvClientConnected 2025-04-07T14:28:03.131931Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-07T14:28:03.133241Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [13:378:2373], Recipient [13:895:2728]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715664 2025-04-07T14:28:03.133308Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-07T14:28:03.133378Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2025-04-07T14:28:03.133475Z node 13 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 waiting for schema changes 2025-04-07T14:28:03.150147Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [13:1123:2941], Recipient [13:895:2728]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:28:03.150275Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-07T14:28:04.490916Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:895:2728]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:28:04.491061Z node 13 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2025-04-07T14:28:04.491335Z node 13 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037888, for tableId 2: RowCount 3, DataSize 130, IndexSize 82, PartCount 1, with schema changes 2025-04-07T14:28:04.491544Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 2 Round: 9 TableStats { DataSize: 130 RowCount: 3 IndexSize: 82 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 65 IndexSize: 82 } Channels { Channel: 2 DataSize: 65 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: true LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 4160 Memory: 124120 Storage: 254 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 13 StartTime: 5451 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-07T14:28:04.492944Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1159:2977], Recipient [13:895:2728]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-07T14:28:04.493027Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-07T14:28:04.493098Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:1158:2976], serverId# [13:1159:2977], sessionId# [0:0:0] 2025-04-07T14:28:04.493266Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [13:1157:2975], Recipient [13:895:2728]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-07T14:28:04.493480Z node 13 :TX_DATASHARD INFO: Started background compaction# 3 of 72075186224037888 tableId# 2 localTid# 1001, requested from [13:1157:2975], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-04-07T14:28:04.494955Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 2, ts 1970-01-01T00:00:20.451024Z 2025-04-07T14:28:04.495032Z node 13 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2025-04-07T14:28:04.495110Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 2, front# 3 2025-04-07T14:28:04.501732Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [13:892:2726], Recipient [13:895:2728]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-07T14:28:04.502523Z node 13 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186224037888, for tableId 2: RowCount 3, DataSize 130, IndexSize 82, PartCount 1, with schema changes, LoadedSize 82, Spent{0.000s wa 0.000s cnt 2}, HistogramKeys 0 2025-04-07T14:28:04.503138Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [13:1164:2981], Recipient [13:895:2728]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-07T14:28:04.503196Z node 13 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186224037888, for tableId 2 2025-04-07T14:28:04.503339Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-04-07T14:28:04.509225Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 3, ts 1970-01-01T00:00:30.451024Z 2025-04-07T14:28:04.509334Z node 13 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2025-04-07T14:28:04.509382Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 3, front# 3 2025-04-07T14:28:04.509467Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [13:1157:2975]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-07T14:28:04.511999Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [13:892:2726], Recipient [13:895:2728]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-07T14:28:04.517213Z node 13 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186224037888, for tableId 2: RowCount 3, DataSize 130, IndexSize 82, PartCount 1, LoadedSize 82, Spent{0.000s wa 0.000s cnt 2}, HistogramKeys 0 2025-04-07T14:28:04.517565Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [13:1171:2987], Recipient [13:895:2728]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-07T14:28:04.517617Z node 13 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186224037888, for tableId 2 2025-04-07T14:28:04.517699Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 waiting for no schema changes 2025-04-07T14:28:04.529254Z node 13 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186224037888, tableId# 2, last full compaction# 1970-01-01T00:00:30.451024Z 2025-04-07T14:28:05.821557Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [13:895:2728]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-07T14:28:05.821688Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-07T14:28:05.821811Z node 13 :TX_DATASHARD TRACE: No cleanup at 72075186224037888 outdated step 35000 last cleanup 0 2025-04-07T14:28:05.821908Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-07T14:28:05.821968Z node 13 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-07T14:28:05.822364Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-07T14:28:05.822428Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-07T14:28:05.823421Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:895:2728]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-07T14:28:05.823501Z node 13 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2025-04-07T14:28:05.823671Z node 13 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037888, for tableId 2: RowCount 3, DataSize 130, IndexSize 82, PartCount 1 2025-04-07T14:28:05.823863Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 2 Round: 12 TableStats { DataSize: 130 RowCount: 3 IndexSize: 82 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 30 HasLoanedParts: false Channels { Channel: 1 DataSize: 80 IndexSize: 82 } Channels { Channel: 2 DataSize: 50 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 2404 Memory: 124120 Storage: 254 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 13 StartTime: 5451 TableOwnerId: 72057594046644480 FollowerId: 0 |99.4%| [TM] {RESULT} ydb/core/tx/datashard/ut_stats/unittest |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TA] $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TA] {RESULT} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [FAIL] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [FAIL] |99.5%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.5%| [TM] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_public_api.py::TestBadSession::test_simple [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] >> test_public_api.py::TestDriverCanRecover::test_driver_recovery |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test_public_api.py::TestDriverCanRecover::test_driver_recovery [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] |99.6%| [TA] $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] >> test_drain.py::TestHive::test_drain_on_stop [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_public_api.py::TestJsonExample::test_json_unexpected_failure |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> test_public_api.py::TestJsonExample::test_json_unexpected_failure [GOOD] >> test_public_api.py::TestJsonExample::test_json_success >> test_public_api.py::TestJsonExample::test_json_success [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] |99.6%| [TA] $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup [GOOD] >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] >> test_public_api.py::TestRecursiveCreation::test_mkdir |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestRecursiveCreation::test_mkdir [GOOD] >> test_public_api.py::TestRecursiveCreation::test_create_table >> test_public_api.py::TestRecursiveCreation::test_create_table [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] |99.7%| [TM] {RESULT} ydb/tests/fq/restarts/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestAttributes::test_create_table |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestAttributes::test_create_table [GOOD] >> test_public_api.py::TestAttributes::test_copy_table >> test_public_api.py::TestAttributes::test_copy_table [GOOD] >> test_public_api.py::TestAttributes::test_create_indexed_table >> test_public_api.py::TestAttributes::test_create_indexed_table [GOOD] >> test_public_api.py::TestAttributes::test_alter_table >> test_public_api.py::TestAttributes::test_alter_table [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes1] >> test_public_api.py::TestAttributes::test_limits[attributes1] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes2] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes3] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes4] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes >> test_result_limits.py::TestResultLimits::test_large_row >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> test_public_api.py::TestDocApiTables::test_create_table |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_public_api.py::TestDocApiTables::test_create_table [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] |99.7%| [TA] $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [GOOD] |99.7%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> test_result_limits.py::TestResultLimits::test_large_row [GOOD] >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/py3test >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] [GOOD] 2025-04-07 14:34:11,773 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-07 14:34:12,099 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 778698 757M 755M 389M ydb-tests-fq-mem_alloc --basetemp /home/runner/.ya/build/build_root/u6pr/00152f/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 901103 651M 13.7M 363M ├─ ydb-tests-fq-mem_alloc --basetemp /home/runner/.ya/build/build_root/u6pr/00152f/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mo 901106 1.3G 1.3G 1006M └─ ydbd server --suppress-version-check --node=1 --tenant=/default --node-kind=yq --log-file-name=/home/runner/.ya/build/build_root/u6pr/00152f/ydb/tests/fq/mem_alloc/test- Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/fq/mem_alloc/test_alloc_default.py", line 94, in test_default_limits query_id = client.create_query("simple", sql, type=fq.QueryContent.QueryType.STREAMING).result.query_id File "ydb/tests/tools/fq_runner/fq_client.py", line 185, in create_query return self.create_query_impl(name=name, File "library/python/retry/__init__.py", line 214, in wrapped return _retry(obj.retry_conf, functools.partial(f, obj, *f_args, **f_kwargs)) File "library/python/retry/__init__.py", line 224, in _retry return f() File "ydb/tests/tools/fq_runner/fq_client.py", line 169, in create_query_impl response = self.service.CreateQuery( File "contrib/python/grpcio/py3/grpc/_channel.py", line 1028, in __call__ state, call, = self._blocking(request, timeout, metadata, credentials, File "contrib/python/grpcio/py3/grpc/_channel.py", line 1017, in _blocking event = call.next_event() File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Thread 0x00007fa573374640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/tools/python3/Lib/queue.py", line 180 in get File "contrib/python/ydb/py3/ydb/_sp_impl.py", line 376 in events_loop File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fa57206a640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fa57a0b7640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/tools/python3/Lib/queue.py", line 180 in get File "contrib/python/ydb/py3/ydb/_sp_impl.py", line 376 in events_loop File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fa576491640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fa57467e640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/tools/python3/Lib/queue.py", line 180 in get File "contrib/python/ydb/py3/ydb/_sp_impl.py", line 376 in events_loop File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fa5782a4640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fa57be36640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/tools/python3/Lib/queue.py", line 180 in get File "contrib/python/ydb/py3/ydb/_sp_impl.py", line 376 in events_loop File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fa57d9d5640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fa57b3c1640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/tools/python3/Lib/queue.py", line 180 in get File "contrib/python/ydb/py3/ydb/_sp_impl.py", line 376 in events_loop File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fa57ecdf640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007fa5ae73a940 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/daemon.py", line 171 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 196 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 422 in __stop_node File "ydb/tests/library/harness/kikimr_runner.py", line 441 in stop File "ydb/tests/tools/fq_runner/kikimr_runner.py", line 55 in stop File "ydb/tests/tools/fq_runner/kikimr_runner.py", line 734 in stop File "ydb/tests/fq/mem_alloc/test_alloc_default.py", line 52 in kikimr File "contrib/python/pytest/py3/_pytest/fixtures.py", line 911 in _teardown_yield_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1024 in finish File "contrib/python/pytest/py3/_pytest/fixtures.py", line 701 in File "contrib/python/pytest/py3/_pytest/runner.py", line 526 in teardown_exact File "contrib/python/pytest/py3/_pytest/runner.py", line 108 in pytest_sessionfinish File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 306 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 318 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ut has expired: Command '['/home/runner/.ya/build/build_root/u6pr/00152f/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc', '--basetemp', '/home/runner/.ya/build/build_root/u6pr/00152f/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/u6pr/00152f/ydb/tests/fq/mem_alloc/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/u6pr/00152f', '--source-root', '/home/runner/.ya/build/build_root/u6pr/00152f/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/u6pr/00152f/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/fq/mem_alloc', '--test-tool-bin', '/home/runner/.ya/tools/v4/7573699373/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/fq/mem_alloc', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1747, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ut has expired: Command '['/home/runner/.ya/build/build_root/u6pr/00152f/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc', '--basetemp', '/home/runner/.ya/build/build_root/u6pr/00152f/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/u6pr/00152f/ydb/tests/fq/mem_alloc/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/u6pr/00152f', '--source-root', '/home/runner/.ya/build/build_root/u6pr/00152f/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/u6pr/00152f/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/fq/mem_alloc', '--test-tool-bin', '/home/runner/.ya/tools/v4/7573699373/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/fq/mem_alloc', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) 2025-04-07 14:34:45,716 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-04-07 14:34:45,716 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.8%| [TM] {RESULT} ydb/tests/fq/mem_alloc/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.8%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] |99.9%| [TA] $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> test_encryption.py::TestEncryption::test_simple_encryption >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> KqpQueryService::ReplyPartLimitProxyNode >> KqpQueryService::ReplyPartLimitProxyNode [GOOD] >> NodeIdDescribe::HasDistribution >> S3PathStyleBackup::DisableVirtualAddressing >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/s3_path_style/unittest >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/backup/s3_path_style/unittest |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> Replication::Types |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> ConsistentIndexRead::InteractiveTx >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> Replication::Types [GOOD] >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/replication/unittest >> Replication::Types [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/replication/unittest >> NodeIdDescribe::HasDistribution [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] >> Backup::UuidValue >> KqpQuerySession::NoLocalAttach |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_svc/unittest >> NodeIdDescribe::HasDistribution [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_svc/unittest >> Backup::UuidValue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/unittest >> Backup::UuidValue [GOOD] Test command err: Found S3 object: "ProducerUuidValueBackup/data_00.csv" Found S3 object: "ProducerUuidValueBackup/metadata.json" Found S3 object: "ProducerUuidValueBackup/permissions.pb" Found S3 object: "ProducerUuidValueBackup/scheme.pb" |99.9%| [TM] {RESULT} ydb/tests/functional/backup/unittest |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> KqpQuerySession::NoLocalAttach [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_session/unittest >> KqpQuerySession::NoLocalAttach [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_session/unittest >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.9%| [TA] $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] |99.9%| [TA] $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> ConsistentIndexRead::InteractiveTx [GOOD] >> KqpExtTest::SecondaryIndexSelectUsingScripting >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_indexes/unittest >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_indexes/unittest |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] |99.9%| [TA] $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/py3test >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/encryption/py3test |99.9%| CLEANING BUILD ROOT Number of suites skipped by size: 104 ydb/tests/fq/mem_alloc [size:medium] ------ sole chunk ran 12 tests (total:643.46s - recipes:9.43s test:600.03s recipes:3.06s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_result_limits.py::TestResultLimits::test_many_rows (good) duration: 237.52s test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] (good) duration: 174.56s test_result_limits.py::TestResultLimits::test_large_row (good) duration: 105.40s test_result_limits.py::TestResultLimits::test_quotas[kikimr0] (good) duration: 67.46s test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] (good) duration: 22.73s 7 tests were not launched inside chunk. Killed by timeout (600 s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 8.4G (8830052K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 774866 81.6M 81.5M 7.1M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 775145 42.2M 25.9M 11.3M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 778698 723M 720M 315M │ └─ ydb-tests-fq-mem_alloc --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 880583 618M 13.4M 290M │ ├─ ydb-tests-fq-mem_alloc --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 880587 5.4G 5.4G 5.0G │ └─ ydbd server --suppress-version-check --node=1 --tenant=/default --node-kind=yq --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc 776235 2.0G 2.0G 1.5G └─ ydbd server --suppress-version-check --node=1 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/stderr ------ TIMEOUT: 5 - GOOD, 7 - NOT_LAUNCHED ydb/tests/fq/mem_alloc ------ [test_discovery.py] chunk ran 3 tests (total:333.25s - setup:0.03s test:333.12s) Info: Test run has exceeded 10.0G (10485760K) memory limit with 13.4G (14038128K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 812925 81.8M 81.8M 6.9M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 813794 40.8M 25.0M 9.8M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 813865 887M 891M 791M └─ ydb-tests-functional-api --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 846429 1.4G 1.4G 1011M ├─ ydbd server --suppress-version-check --node=1 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_o 846431 1.4G 1.4G 1009M ├─ ydbd server --suppress-version-check --node=2 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_o 846433 1.4G 1.4G 1005M ├─ ydbd server --suppress-version-check --node=3 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_o 846434 1.4G 1.4G 1.0G ├─ ydbd server --suppress-version-check --node=4 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_o 846435 1.4G 1.4G 1009M ├─ ydbd server --suppress-version-check --node=5 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_o 846437 1.4G 1.4G 975M ├─ ydbd server --suppress-version-check --node=6 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_o 846439 1.4G 1.4G 999M ├─ ydbd server --suppress-version-check --node=7 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_o 846440 1.4G 1.3G 964M ├─ ydbd server --suppress-version-check --node=8 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_o 846441 1.4G 1.4G 973M └─ ydbd server --suppress-version-check --node=9 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_o Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/stderr ydb/tests/functional/postgresql [size:medium] ------ sole chunk ran 14 tests (total:251.82s - setup:0.01s test:251.68s) [fail] test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [default-linux-x86_64-release-asan] (188.41s) teardown failed: ydb/tests/functional/postgresql/test_postgres.py:77: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:448: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. E Process exit_code = 100. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.horology/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.horology/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option E Current KQP shutdown state: spent 0 seconds, not started yet E Waiting for drain to complete: 1 tablets are online on node. E Drain completed. E Drain completed, but 1 tablet(s) are online. E warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 E warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 E warning: address range table at offset 0x60 has a premature terminator entry at offset 0x70 E warning: address range table at offset 0x90 has a premature terminator entry at offset 0xa0 E warning: address range table at offset 0xc0 has a premature terminator entry at offset 0xd0 E warning: address range table at offset 0xac0 has a premature terminator entry at offset 0xad0 E warning: address range table at offset 0xb20 has a premature terminator entry at offset 0xb30 E warning: address range table at offset 0xb50 has a premature terminator entry at offset 0xb60 E warning: address range table at offset 0x1150 has a premature terminator entry at offset 0x1160 E warning: address range table at offset 0x1180 has a premature terminator entry at offset 0x1190 E warning: address range table at offset 0x11b0 has a premature terminator entry at offset 0x11c0 E warning: address range table at offset 0x11e0 has a premature terminator entry at offset 0x11f0 E warning: address range table a ..[snippet truncated].. range table at offset 0x11b0 has a premature terminator entry at offset 0x11c0 E warning: address range table at offset 0x11e0 has a premature terminator entry at offset 0x11f0 E warning: address range table at offset 0x1210 has a premature terminator entry at offset 0x1220 E warning: address range table at offset 0x1240 has a premature terminator entry at offset 0x1250 E warning: address range table at offset 0x1270 has a premature terminator entry at offset 0x1280 E warning: address range table at offset 0x12a0 has a premature terminator entry at offset 0x12b0 E warning: address range table at offset 0x12d0 has a premature terminator entry at offset 0x12e0 E warning: address range table at offset 0x1300 has a premature terminator entry at offset 0x1310 E warning: address range table at offset 0x1330 has a premature terminator entry at offset 0x1340 E warning: address range table at offset 0x1360 has a premature terminator entry at offset 0x1370 E warning: address range table at offset 0x1390 has a premature terminator entry at offset 0x13a0 E warning: address range table at offset 0x13c0 has a premature terminator entry at offset 0x13d0 E warning: address range table at offset 0x13f0 has a premature terminator entry at offset 0x1400 E warning: address range table at offset 0x1420 has a premature terminator entry at offset 0x1430 E warning: address range table at offset 0x1450 has a premature terminator entry at offset 0x1460 E warning: address range table at offset 0x1480 has a premature terminator entry at offset 0x1490 E warning: address range table at offset 0x14e0 has a premature terminator entry at offset 0x14f0 E warning: address range table at offset 0x1510 has a premature terminator entry at offset 0x1520 E warning: address range table at offset 0x1540 has a premature terminator entry at offset 0x1550 E warning: address range table at offset 0x1570 has a premature terminator entry at offset 0x1580 E warning: address range table at offset 0x15a0 has a premature terminator entry at offset 0x15b0 E warning: address range table at offset 0x15d0 has a premature terminator entry at offset 0x15e0 E warning: address range table at offset 0x1600 has a premature terminator entry at offset 0x1610 E warning: address range table at offset 0x1630 has a premature terminator entry at offset 0x1640 E warning: address range table at offset 0x1660 has a premature terminator e... Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.float8.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff ------ FAIL: 13 - GOOD, 1 - FAIL ydb/tests/functional/postgresql ydb/tests/functional/serializable [size:medium] ------ sole chunk ran 4 tests (total:60.23s - recipes:10.01s test:45.50s recipes:4.61s) [fail] test.py::test_local [default-linux-x86_64-release-asan] (37.38s) ydb/tests/functional/serializable/test.py:110: in test_local asyncio.run(async_wrapper()) contrib/tools/python3/Lib/asyncio/runners.py:194: in run return runner.run(main) contrib/tools/python3/Lib/asyncio/runners.py:118: in run return self._loop.run_until_complete(task) contrib/tools/python3/Lib/asyncio/base_events.py:687: in run_until_complete return future.result() ydb/tests/functional/serializable/test.py:104: in async_wrapper await checker.async_run(options) ydb/tests/tools/ydb_serializable/lib/__init__.py:1018: in async_run await self.async_perform_test(history, table, options, checker) ydb/tests/tools/ydb_serializable/lib/__init__.py:931: in async_perform_test await asyncio.gather(*futures) ydb/tests/tools/ydb_serializable/lib/__init__.py:700: in async_perform_point_reads_writes rss = await self.async_retry_operation(perform, deadline) ydb/tests/tools/ydb_serializable/lib/__init__.py:434: in async_retry_operation result = await callable(session) ydb/tests/tools/ydb_serializable/lib/__init__.py:673: in perform rss = await tx.execute( contrib/python/ydb/py3/ydb/aio/table.py:182: in execute return await super().execute(query, parameters, commit_tx, settings) contrib/python/ydb/py3/ydb/aio/pool.py:254: in __call__ return await connection( contrib/python/ydb/py3/ydb/aio/connection.py:194: in __call__ return response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.PreconditionFailed: message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 4 column: 33 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 4 column: 33 } issue_code: 8001 severity: 2 } ,message: "Not enough resources to execute query locally and no information about other nodes (estimation: 58785792;ComputeTasks:4;NodeTasks:0;)" severity: 1 (server_code: 400120) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serializable/test-results/py3test/testing_out_stuff/test.py.test_local.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serializable/test-results/py3test/testing_out_stuff ------ FAIL: 3 - GOOD, 1 - FAIL ydb/tests/functional/serializable ydb/tests/functional/serverless [size:medium] nchunks:20 ------ [test_serverless.py 4/10] chunk ran 2 tests (total:241.52s - setup:0.01s test:240.32s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-release-asan] (68.28s) ydb/tests/functional/serverless/test_serverless.py:444: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:341: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:361: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "[DISK_SPACE_EXHAUSTED] Cannot perform writes: database is out of disk space" severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:443: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*OUT_OF_SPACE.*'): E AssertionError: Regex pattern did not match. E Regex: '.*OUT_OF_SPACE.*' E Input: 'message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "[DISK_SPACE_EXHAUSTED] Cannot perform writes: database is out of disk space" severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-release-asan] (164.22s) ydb/tests/functional/serverless/test_serverless.py:444: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:341: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:361: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "[DISK_SPACE_EXHAUSTED] Cannot perform writes: database is out of disk space" severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:443: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*OUT_OF_SPACE.*'): E AssertionError: Regex pattern did not match. E Regex: '.*OUT_OF_SPACE.*' E Input: 'message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "[DISK_SPACE_EXHAUSTED] Cannot perform writes: database is out of disk space" severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 18 - GOOD, 2 - FAIL ydb/tests/functional/serverless ------ sole chunk ran 1 test (total:261.73s - test:261.53s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 14.6G (15305140K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 776245 81.7M 81.7M 14.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 776448 41.8M 37.9M 17.6M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 776483 826M 827M 717M └─ functional-sqs-merge_split_common_table-std --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini - 778725 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --node=1 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/t 778726 2.4G 2.4G 1.3G ├─ ydbd server --suppress-version-check --node=2 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/t 778728 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --node=3 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/t 778729 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --node=4 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/t 778732 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --node=5 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/t 778733 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --node=6 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/t 778738 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --node=7 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/t 778742 1.6G 1.6G 1.1G └─ ydbd server --suppress-version-check --node=8 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/t Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff/stderr ------ [test_disk.py 0/10] chunk ran 1 test (total:179.24s - test:179.12s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.1G (13703252K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 741177 81.7M 81.7M 14.1M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 741223 41.1M 37.2M 16.9M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 741226 776M 777M 674M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 742362 1.4G 1.4G 990M ├─ ydbd server --suppress-version-check --node=1 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 742374 1.3G 1.4G 974M ├─ ydbd server --suppress-version-check --node=2 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 742386 1.3G 1.4G 980M ├─ ydbd server --suppress-version-check --node=3 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 742395 1.3G 1.3G 970M ├─ ydbd server --suppress-version-check --node=4 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 742403 1.3G 1.3G 968M ├─ ydbd server --suppress-version-check --node=5 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 742409 1.3G 1.3G 965M ├─ ydbd server --suppress-version-check --node=6 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 742448 1.3G 1.4G 981M ├─ ydbd server --suppress-version-check --node=7 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 742474 1.3G 1.3G 969M ├─ ydbd server --suppress-version-check --node=8 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 742485 1.4G 1.4G 982M └─ ydbd server --suppress-version-check --node=9 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ------ [test_tablet.py 0/10] chunk ran 1 test (total:250.27s - test:250.03s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.6G (14228476K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 741088 81.8M 81.8M 14.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 741119 41.2M 37.3M 17.1M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 741148 838M 841M 739M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 741745 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --node=1 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 741746 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --node=2 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 741747 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --node=3 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 741748 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --node=4 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 741749 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --node=5 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 741750 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --node=6 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 741751 1.5G 1.6G 1.2G ├─ ydbd server --suppress-version-check --node=7 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing 741752 1.6G 1.6G 1.2G └─ ydbd server --suppress-version-check --node=8 --log-file-name=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ydb/core/fq/libs/ydb/ut nchunks:10 ------ [0/10] chunk ran 2 tests (total:56.75s - recipes:21.18s test:30.77s recipes:4.71s) [fail] TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [default-linux-x86_64-release-asan] (2.11s) assertion failed at ydb/core/fq/libs/ydb/ut/ydb_ut.cpp:433, virtual void NFq::NTestSuiteTCheckGenerationTest::TTestCaseShouldRollbackTransactionWhenCheckFails2::Execute_(NUnitTest::TTestContext &): (status.IsSuccess())
: Warning: Table: local/ShouldRollbackTransactionWhenCheckFails/test, pk: masterId, current generation: 11, expected/new generation: 11, operation: Register, code: 400130 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:16 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:477:20 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/fq/libs/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/fq/libs/ydb/ut/test-results/unittest/testing_out_stuff/TCheckGenerationTest.ShouldRollbackTransactionWhenCheckFails2.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/fq/libs/ydb/ut/test-results/unittest/testing_out_stuff/TCheckGenerationTest.ShouldRollbackTransactionWhenCheckFails2.out ------ FAIL: 10 - GOOD, 1 - FAIL ydb/core/fq/libs/ydb/ut ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:17.82s - test:17.78s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (3.98s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:16 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:477:20 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:18.44s - test:18.38s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (3.92s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:16 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:477:20 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:17.19s - setup:0.01s test:17.15s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (3.85s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:16 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:477:20 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:17.63s - setup:0.01s test:17.59s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (3.97s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:16 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:477:20 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/data_integrity nchunks:50 ------ [4/50] chunk ran 1 test (total:66.37s - test:60.05s) Chunk exceeded 60s timeout and was killed List of the tests involved in the launch: KqpDataIntegrityTrails::UpsertEvWrite (timeout) duration: 63.98s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/data_integrity/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/data_integrity/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/data_integrity/test-results/unittest/testing_out_stuff/stderr [timeout] KqpDataIntegrityTrails::UpsertEvWrite [default-linux-x86_64-release-asan] (63.98s) Killed by timeout (60 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/data_integrity/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/data_integrity/test-results/unittest/testing_out_stuff/KqpDataIntegrityTrails.UpsertEvWrite.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/data_integrity/test-results/unittest/testing_out_stuff/KqpDataIntegrityTrails.UpsertEvWrite.out ------ TIMEOUT: 6 - GOOD, 1 - TIMEOUT ydb/core/kqp/ut/data_integrity ydb/core/kqp/ut/indexes [size:medium] nchunks:50 ------ [36/50] chunk ran 2 tests (total:138.04s - test:137.97s) [fail] KqpMultishardIndex::WriteIntoRenamingSyncIndex [default-linux-x86_64-release-asan] (76.53s) assertion failed at ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:2506, void NKikimr::NKqp::NTestSuiteKqpMultishardIndex::CheckWriteIntoRenamingIndex(bool): (vp.GetUint32() == startVal++) failed: (2147483649 != 1000) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x186EC68F 1. /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:2506: CheckWriteIntoRenamingIndex @ 0x17DD7E40 2. /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1353: operator() @ 0x17DED0F7 3. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1353:1) &> @ 0x17DED0F7 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1353:1) &> @ 0x17DED0F7 5. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17DED0F7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17DED0F7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x1872B588 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x1872B588 9. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x1872B588 10. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x186F31F8 11. /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1353: Execute @ 0x17DEC2C3 12. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x186F4AC5 13. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x187251CC 14. ??:0: ?? @ 0x7F2116792D8F 15. ??:0: ?? @ 0x7F2116792E3F 16. ??:0: ?? @ 0x15802028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/indexes/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/indexes/test-results/unittest/testing_out_stuff/KqpMultishardIndex.WriteIntoRenamingSyncIndex.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/indexes/test-results/unittest/testing_out_stuff/KqpMultishardIndex.WriteIntoRenamingSyncIndex.out ------ FAIL: 97 - GOOD, 1 - FAIL ydb/core/kqp/ut/indexes ydb/core/kqp/ut/join [size:medium] nchunks:50 ------ [27/50] chunk ran 5 tests (total:504.20s - test:503.80s) [fail] KqpJoinOrder::Chain65Nodes [default-linux-x86_64-release-asan] (371.62s) assertion failed at ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:201, void NKikimr::NKqp::TChainTester::JoinTables(): (result.GetStatus() == EStatus::SUCCESS) failed: (TIMEOUT != SUCCESS) , with diff: (TIM|SUCC)E(OUT|SS) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1831A3DF 1. /-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:201: JoinTables @ 0x17A58252 2. /-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:147: Test @ 0x17A30AD0 3. /-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:362: Execute_ @ 0x17A30AD0 4. /-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:360: operator() @ 0x17A54B37 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:360:1) &> @ 0x17A54B37 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:360:1) &> @ 0x17A54B37 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17A54B37 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17A54B37 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x183592D8 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x183592D8 11. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x183592D8 12. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18320F48 13. /-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:360: Execute @ 0x17A53D03 14. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18322815 15. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x18352F1C 16. ??:0: ?? @ 0x7FCA126FFD8F 17. ??:0: ?? @ 0x7FCA126FFE3F 18. ??:0: ?? @ 0x15781028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff/KqpJoinOrder.Chain65Nodes.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff/KqpJoinOrder.Chain65Nodes.out ------ FAIL: 254 - GOOD, 1 - FAIL ydb/core/kqp/ut/join ydb/core/kqp/ut/olap [size:medium] nchunks:200 ------ [137/200] chunk ran 1 test (total:15.48s - setup:0.01s test:15.43s) [crashed] KqpOlapBlobsSharing::BlobsSharingSplit1_1 [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.BlobsSharingSplit1_1.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.BlobsSharingSplit1_1.out ------ [138/200] chunk ran 1 test (total:22.46s - setup:0.02s test:22.35s) [crashed] KqpOlapBlobsSharing::BlobsSharingSplit1_1_clean [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.BlobsSharingSplit1_1_clean.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.BlobsSharingSplit1_1_clean.out ------ [139/200] chunk ran 1 test (total:17.13s - setup:0.01s test:17.08s) [crashed] KqpOlapBlobsSharing::BlobsSharingSplit1_1_clean_with_restarts [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.BlobsSharingSplit1_1_clean_with_restarts.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.BlobsSharingSplit1_1_clean_with_restarts.out ------ [140/200] chunk ran 1 test (total:17.33s - test:17.27s) [crashed] KqpOlapBlobsSharing::BlobsSharingSplit1_3_1 [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.BlobsSharingSplit1_3_1.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.BlobsSharingSplit1_3_1.out ------ [141/200] chunk ran 1 test (total:52.22s - test:52.04s) [crashed] KqpOlapBlobsSharing::BlobsSharingSplit1_3_2_1_clean [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.BlobsSharingSplit1_3_2_1_clean.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.BlobsSharingSplit1_3_2_1_clean.out ------ [142/200] chunk ran 1 test (total:15.66s - setup:0.03s test:15.54s) [crashed] KqpOlapBlobsSharing::BlobsSharingSplit3_1 [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.BlobsSharingSplit3_1.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.BlobsSharingSplit3_1.out ------ [143/200] chunk ran 1 test (total:126.44s - test:125.61s) [fail] KqpOlapBlobsSharing::ChangeSchemaAndSplit [default-linux-x86_64-release-asan] (117.91s) strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:553, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[9974u]]|[[20000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:553: CompareYson @ 0x463AF800 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x17AD8DC2 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x17ABC328 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:528: Execute_ @ 0x17ABC328 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7F5A23523D8F 18. ??:0: ?? @ 0x7F5A23523E3F 19. ??:0: ?? @ 0x15851028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.ChangeSchemaAndSplit.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.ChangeSchemaAndSplit.out ------ [144/200] chunk ran 1 test (total:260.42s - test:259.67s) [fail] KqpOlapBlobsSharing::HugeSchemeHistory [default-linux-x86_64-release-asan] (254.16s) strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:553, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[53u]]|[[100u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:553: CompareYson @ 0x463AF800 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x17AD8DC2 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x17ABEEBC 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:564: Execute_ @ 0x17ABEEBC 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7FD1D8EFFD8F 18. ??:0: ?? @ 0x7FD1D8EFFE3F 19. ??:0: ?? @ 0x15851028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.HugeSchemeHistory.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.HugeSchemeHistory.out ------ [145/200] chunk ran 1 test (total:123.76s - setup:0.03s test:123.50s) [fail] KqpOlapBlobsSharing::MultipleMerge [default-linux-x86_64-release-asan] (117.62s) strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:553, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[0u]]|[[10000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:553: CompareYson @ 0x463AF800 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x17AD8DC2 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x17AC162F 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:580: Execute_ @ 0x17AC162F 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7FBCCB133D8F 18. ??:0: ?? @ 0x7FBCCB133E3F 19. ??:0: ?? @ 0x15851028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleMerge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleMerge.out ------ [146/200] chunk ran 1 test (total:143.00s - test:142.15s) [fail] KqpOlapBlobsSharing::MultipleMergesWithRestartsAfterWait [default-linux-x86_64-release-asan] (136.00s) assertion failed at ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284, void NKikimr::NKqp::NTestSuiteKqpOlapBlobsSharing::TReshardingTest::WaitResharding(const TString &): (result.GetStatus() == NYdb::EStatus::SUCCESS) failed: (CLIENT_DEADLINE_EXCEEDED != SUCCESS)
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:30556 , with diff: (|SU)C(LIENT_DEADLINE_EX|)CE(EDED|SS) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284: WaitResharding @ 0x17AB979D 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:659: Execute_ @ 0x17AC593D 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 10. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 11. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 12. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 13. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 14. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 15. ??:0: ?? @ 0x7FEDAC710D8F 16. ??:0: ?? @ 0x7FEDAC710E3F 17. ??:0: ?? @ 0x15851028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleMergesWithRestartsAfterWait.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleMergesWithRestartsAfterWait.out ------ [147/200] chunk ran 1 test (total:146.74s - test:146.30s) [fail] KqpOlapBlobsSharing::MultipleMergesWithRestartsWhenWait [default-linux-x86_64-release-asan] (140.46s) assertion failed at ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284, void NKikimr::NKqp::NTestSuiteKqpOlapBlobsSharing::TReshardingTest::WaitResharding(const TString &): (result.GetStatus() == NYdb::EStatus::SUCCESS) failed: (CLIENT_DEADLINE_EXCEEDED != SUCCESS)
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:15122 , with diff: (|SU)C(LIENT_DEADLINE_EX|)CE(EDED|SS) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284: WaitResharding @ 0x17AB979D 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:675: Execute_ @ 0x17AC64C8 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 10. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 11. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 12. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 13. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 14. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 15. ??:0: ?? @ 0x7F6BD5BFDD8F 16. ??:0: ?? @ 0x7F6BD5BFDE3F 17. ??:0: ?? @ 0x15851028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleMergesWithRestartsWhenWait.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleMergesWithRestartsWhenWait.out ------ [148/200] chunk ran 1 test (total:115.74s - test:114.71s) [fail] KqpOlapBlobsSharing::MultipleSchemaVersions [default-linux-x86_64-release-asan] (108.00s) strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:553, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[2u]]|[[3u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:553: CompareYson @ 0x463AF800 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x17AD8DC2 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x17ABE84F 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:545: Execute_ @ 0x17ABE84F 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7F32EE7B2D8F 18. ??:0: ?? @ 0x7F32EE7B2E3F 19. ??:0: ?? @ 0x15851028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleSchemaVersions.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleSchemaVersions.out ------ [149/200] chunk ran 1 test (total:538.06s - test:537.69s) [crashed] KqpOlapBlobsSharing::MultipleSplits [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleSplits.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleSplits.out ------ [150/200] chunk ran 1 test (total:521.68s - test:521.53s) [crashed] KqpOlapBlobsSharing::MultipleSplitsThenMerges [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleSplitsThenMerges.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleSplitsThenMerges.out ------ [151/200] chunk ran 1 test (total:108.27s - setup:0.01s test:107.93s) [fail] KqpOlapBlobsSharing::MultipleSplitsWithRestartsAfterWait [default-linux-x86_64-release-asan] (102.25s) assertion failed at ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284, void NKikimr::NKqp::NTestSuiteKqpOlapBlobsSharing::TReshardingTest::WaitResharding(const TString &): (result.GetStatus() == NYdb::EStatus::SUCCESS) failed: (CLIENT_DEADLINE_EXCEEDED != SUCCESS)
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:24146 , with diff: (|SU)C(LIENT_DEADLINE_EX|)CE(EDED|SS) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284: WaitResharding @ 0x17AB979D 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:628: Execute_ @ 0x17AC3CB1 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 10. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 11. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 12. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 13. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 14. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 15. ??:0: ?? @ 0x7FCE9C5A4D8F 16. ??:0: ?? @ 0x7FCE9C5A4E3F 17. ??:0: ?? @ 0x15851028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleSplitsWithRestartsAfterWait.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleSplitsWithRestartsAfterWait.out ------ [152/200] chunk ran 1 test (total:84.05s - test:83.65s) [fail] KqpOlapBlobsSharing::MultipleSplitsWithRestartsWhenWait [default-linux-x86_64-release-asan] (70.23s) assertion failed at ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284, void NKikimr::NKqp::NTestSuiteKqpOlapBlobsSharing::TReshardingTest::WaitResharding(const TString &): (result.GetStatus() == NYdb::EStatus::SUCCESS) failed: (CLIENT_DEADLINE_EXCEEDED != SUCCESS)
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4715 , with diff: (|SU)C(LIENT_DEADLINE_EX|)CE(EDED|SS) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:284: WaitResharding @ 0x17AB979D 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:644: Execute_ @ 0x17AC483E 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 10. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 11. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 12. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 13. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 14. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 15. ??:0: ?? @ 0x7FCA16EB1D8F 16. ??:0: ?? @ 0x7FCA16EB1E3F 17. ??:0: ?? @ 0x15851028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleSplitsWithRestartsWhenWait.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.MultipleSplitsWithRestartsWhenWait.out ------ [154/200] chunk ran 1 test (total:50.14s - setup:0.01s test:50.05s) [fail] KqpOlapBlobsSharing::TableReshardingConsistency64 [default-linux-x86_64-release-asan] (43.92s) strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:553, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[57685u]]|[[230000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:553: CompareYson @ 0x463AF800 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x17AD8DC2 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:351: Execute @ 0x17AAF25E 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:401: Execute_ @ 0x17AAABB0 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7FD681244D8F 18. ??:0: ?? @ 0x7FD681244E3F 19. ??:0: ?? @ 0x15851028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.TableReshardingConsistency64.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.TableReshardingConsistency64.out ------ [155/200] chunk ran 1 test (total:56.18s - test:56.07s) [fail] KqpOlapBlobsSharing::TableReshardingModuloN [default-linux-x86_64-release-asan] (51.71s) strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:553, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[57643u]]|[[230000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:553: CompareYson @ 0x463AF800 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x17AD8DC2 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:351: Execute @ 0x17AAF25E 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:405: Execute_ @ 0x17AB51FA 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x17ACE957 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x17ACE957 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17ACE957 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17ACE957 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x17ACDB23 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 17. ??:0: ?? @ 0x7EFC7C007D8F 18. ??:0: ?? @ 0x7EFC7C007E3F 19. ??:0: ?? @ 0x15851028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.TableReshardingModuloN.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.TableReshardingModuloN.out ------ [156/200] chunk ran 1 test (total:41.52s - test:41.32s) [crashed] KqpOlapBlobsSharing::UpsertWhileSplitTest [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.UpsertWhileSplitTest.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapBlobsSharing.UpsertWhileSplitTest.out ------ [171/200] chunk ran 1 test (total:602.35s - test:600.03s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: KqpOlapSparsed::SwitchingMultiColumn (timeout) duration: 600.69s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/stderr [timeout] KqpOlapSparsed::SwitchingMultiColumn [default-linux-x86_64-release-asan] (600.69s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapSparsed.SwitchingMultiColumn.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpOlapSparsed.SwitchingMultiColumn.out ------ [2/200] chunk ran 2 tests (total:29.04s - test:28.96s) [crashed] KqpDecimalColumnShard::TestAggregation [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==295148==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000000 (pc 0x000021583120 bp 0x7f23f38c9aa0 sp 0x7f23f38c9a10 T95) ==295148==The signal is caused by a READ memory access. ==295148==Hint: address points to the zero page. 2025-04-07T14:01:42.289509Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7490571130052482342:2054];send_to=[0:7307199536658146131:7762515]; 2025-04-07T14:01:42.306199Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; AddressSanitizer:DEADLYSIGNAL warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 warning: address range table at offset 0x60 has a premature terminator entry at offset 0x70 warning: address range table at offset 0x90 has a premature terminator entry at offset 0xa0 warning: address range table at offset 0xc0 has a premature terminator entry at offset 0xd0 warning: address range table at offset 0xa20 has a premature terminator entry at offset 0xa30 warning: address range table at offset 0xa50 has a premature terminator entry at offset 0xa60 warning: address range table at offset 0xa80 has a premature terminator entry at offset 0xa90 warning: address range table at offset 0xab0 has a premature terminator entry at offset 0xac0 warning: address range table at offset 0xae0 has a premature terminator entry at offset 0xaf0 warning: address range table at offset 0xb10 has a premature terminator entry at offset 0xb20 warning: address range table at offset 0xb40 has a premature terminator entry at offset 0xb50 warning: address range table at offset 0xb70 has a premature terminator entry at offset 0xb80 warning: address range table at offset 0xba0 has a premature terminator entry at offset 0xbb0 warning: address range table at offset 0xbd0 has a premature terminator entry at offset 0xbe0 warning: address range table at offset 0xc00 has a premature terminator entry at offset 0xc10 warning: address range table at offset 0xc30 has a premature terminator entry at offset 0xc40 warning: address range table at offset 0xc60 has a premature terminator entry at offset 0xc70 warning: address range table at offset 0xc90 has a premature terminator entry ..[snippet truncated].. 0x17cf05a7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195:16 #23 0x17cf05a7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366:12 #24 0x185a0428 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519:16 #25 0x185a0428 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170:12 #26 0x185a0428 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #27 0x18568098 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #28 0x17cef773 in NKikimr::NKqp::NTestSuiteKqpDecimalColumnShard::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26:1 #29 0x18569965 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #30 0x1859a06c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #31 0x7f2402ff3d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) ==295148==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpDecimalColumnShard.TestAggregation.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpDecimalColumnShard.TestAggregation.out ------ [3/200] chunk ran 2 tests (total:31.76s - test:31.73s) [fail] KqpDecimalColumnShard::TestFilterCompare [default-linux-x86_64-release-asan] (13.91s) assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.cpp:806, TString NKikimr::NKqp::StreamResultToYsonImpl(TIterator &, TVector *, bool, const NYdb::EStatus &, const TString &) [TIterator = NYdb::NTable::TScanQueryPartIterator]: (streamPart.EOS())
: Fatal: Execution, code: 1060
: Fatal: ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp:331 ConvertValueToColumn(): requirement false failed, message: Unsupported content: Decimal, code: 1 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1856152F 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:806: StreamResultToYsonImpl @ 0x463C0D75 2. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:829: ?? @ 0x463BFD07 3. /-S/ydb/core/kqp/ut/common/columnshard.cpp:107: ReadData @ 0x4652AEF5 4. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:44: CheckQuery @ 0x17CE1BBE 5. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:166: operator() @ 0x17CE1BBE 6. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:179: Execute_ @ 0x17CE16E4 7. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26: operator() @ 0x17CF05A7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26:1) &> @ 0x17CF05A7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26:1) &> @ 0x17CF05A7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17CF05A7 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17CF05A7 12. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x185A0428 13. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x185A0428 14. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x185A0428 15. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18568098 16. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26: Execute @ 0x17CEF773 17. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18569965 18. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1859A06C 19. ??:0: ?? @ 0x7FA30AC02D8F 20. ??:0: ?? @ 0x7FA30AC02E3F 21. ??:0: ?? @ 0x15851028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpDecimalColumnShard.TestFilterCompare.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/KqpDecimalColumnShard.TestFilterCompare.out ------ TIMEOUT: 183 - GOOD, 11 - FAIL, 1 - TIMEOUT, 10 - CRASHED ydb/core/kqp/ut/olap ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [46/50] chunk ran 3 tests (total:159.74s - test:159.67s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (70.72s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:551, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1853261F 1. /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:551: Execute_ @ 0x17C8C178 2. /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:17: operator() @ 0x17C9EEF7 3. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:17:1) &> @ 0x17C9EEF7 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:17:1) &> @ 0x17C9EEF7 5. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17C9EEF7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17C9EEF7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x18571518 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x18571518 9. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x18571518 10. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18539188 11. /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:17: Execute @ 0x17C9E07B 12. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1853AA55 13. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1856B15C 14. ??:0: ?? @ 0x7F58271E5D8F 15. ??:0: ?? @ 0x7F58271E5E3F 16. ??:0: ?? @ 0x15810028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ FAIL: 147 - GOOD, 1 - FAIL ydb/core/kqp/ut/query ydb/core/kqp/ut/scan [size:medium] nchunks:50 ------ [1/50] chunk ran 3 tests (total:430.09s - test:430.03s) [fail] KqpRequestContext::TraceIdInErrorMessage [default-linux-x86_64-release-asan] (400.62s) (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:694: Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/scan/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/scan/test-results/unittest/testing_out_stuff/KqpRequestContext.TraceIdInErrorMessage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/scan/test-results/unittest/testing_out_stuff/KqpRequestContext.TraceIdInErrorMessage.out ------ FAIL: 117 - GOOD, 1 - FAIL ydb/core/kqp/ut/scan ydb/core/kqp/ut/scheme [size:medium] nchunks:50 ------ [13/50] chunk ran 6 tests (total:54.23s - test:54.20s) [fail] KqpOlapScheme::TenThousandColumns [default-linux-x86_64-release-asan] (26.02s) assertion failed at ydb/core/kqp/ut/common/columnshard.cpp:66, void NKikimr::NKqp::TTestHelper::CreateTable(const TColumnTableBase &, const EStatus): (result.GetStatus() == expectedStatus) failed: (SCHEME_ERROR != SUCCESS)
: Error: Execution, code: 1060
:1:179052: Error: Executing CREATE TABLE
: Error: Too many columns. new: 10001. Limit: 10000, code: 2003 , with diff: S(|UC)C(H|)E(ME_ERROR|SS) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1B02E13F 1. /-S/ydb/core/kqp/ut/common/columnshard.cpp:66: CreateTable @ 0x48FE2267 2. /-S/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp:9001: Execute_ @ 0x1A5A59C5 3. /-S/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp:8072: operator() @ 0x1A6D8277 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp:8072:1) &> @ 0x1A6D8277 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp:8072:1) &> @ 0x1A6D8277 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x1A6D8277 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x1A6D8277 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x1B06D038 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x1B06D038 10. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x1B06D038 11. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1B034CA8 12. /-S/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp:8072: Execute @ 0x1A6D7443 13. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1B036575 14. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1B066C7C 15. ??:0: ?? @ 0x7F15A2898D8F 16. ??:0: ?? @ 0x7F15A2898E3F 17. ??:0: ?? @ 0x17EEE028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/KqpOlapScheme.TenThousandColumns.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/KqpOlapScheme.TenThousandColumns.out ------ [16/50] chunk ran 6 tests (total:603.35s - test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: KqpScheme::AlterAsyncReplication (timeout) duration: 575.62s KqpScheme::AlterColumnTableTiering (good) duration: 8.57s KqpScheme::AlterGroup (good) duration: 4.28s KqpScheme::AddDropColumn (good) duration: 4.04s KqpScheme::AlterCompressionLevelInColumnFamily (good) duration: 4.04s KqpScheme::AlterColumnTableTtl (good) duration: 3.79s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/scheme/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/stderr [timeout] KqpScheme::AlterAsyncReplication [default-linux-x86_64-release-asan] (575.62s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/KqpScheme.AlterAsyncReplication.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/KqpScheme.AlterAsyncReplication.out ------ TIMEOUT: 266 - GOOD, 1 - FAIL, 1 - TIMEOUT ydb/core/kqp/ut/scheme ydb/core/kqp/ut/service [size:medium] nchunks:50 ------ [28/50] chunk ran 3 tests (total:86.30s - test:86.22s) [fail] KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex [default-linux-x86_64-release-asan] (66.30s) assertion failed at ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:3798, virtual void NKikimr::NKqp::NTestSuiteKqpQueryService::TTestCaseTableSink_OltpReplace::Execute_(NUnitTest::TTestContext &) [HasSecondaryIndex = true]: (it.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1AB9234F 1. /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:3798: Execute_ @ 0x1A20081D 2. /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:24: operator() @ 0x1A191EF7 3. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:24:1) &> @ 0x1A191EF7 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:24:1) &> @ 0x1A191EF7 5. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x1A191EF7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x1A191EF7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x1ABD1248 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x1ABD1248 9. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x1ABD1248 10. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1AB98EB8 11. /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:24: Execute @ 0x1A1910C3 12. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1AB9A785 13. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1ABCAE8C 14. ??:0: ?? @ 0x7F2730F2FD8F 15. ??:0: ?? @ 0x7F2730F2FE3F 16. ??:0: ?? @ 0x17DAD028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff/KqpQueryService.TableSink_OltpReplace.HasSecondaryIndex.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff/KqpQueryService.TableSink_OltpReplace.HasSecondaryIndex.out ------ [31/50] chunk ran 3 tests (total:449.24s - setup:0.01s test:448.75s) [crashed] KqpQueryService::TableSink_ReplaceFromSelectLargeOlap [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff/KqpQueryService.TableSink_ReplaceFromSelectLargeOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff/KqpQueryService.TableSink_ReplaceFromSelectLargeOlap.out ------ FAIL: 140 - GOOD, 1 - FAIL, 1 - CRASHED ydb/core/kqp/ut/service ydb/core/kqp/ut/view [size:medium] ------ sole chunk ran 23 tests (total:198.65s - test:198.49s) [fail] TCreateAndDropViewTest::CallDropViewOnTable [default-linux-x86_64-release-asan] (8.69s) (NYdb::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.CallDropViewOnTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.CallDropViewOnTable.out ------ FAIL: 22 - GOOD, 1 - FAIL ydb/core/kqp/ut/view ydb/core/mind/hive/ut [size:medium] nchunks:10 ------ [1/10] chunk ran 13 tests (total:607.09s - test:600.09s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: THeavyPerfTest::TTestLoadEverything (timeout) duration: 597.17s TCutHistoryRestrictions::EmptyAllowList (good) duration: 0.03s TCutHistoryRestrictions::EmptyDenyList (good) duration: 0.00s TCutHistoryRestrictions::SameTabletInBothLists (good) duration: 0.00s 9 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/mind/hive/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/mind/hive/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/mind/hive/ut/test-results/unittest/testing_out_stuff/stderr [timeout] THeavyPerfTest::TTestLoadEverything [default-linux-x86_64-release-asan] (597.17s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/mind/hive/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/mind/hive/ut/test-results/unittest/testing_out_stuff/THeavyPerfTest.TTestLoadEverything.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/mind/hive/ut/test-results/unittest/testing_out_stuff/THeavyPerfTest.TTestLoadEverything.out ------ TIMEOUT: 112 - GOOD, 9 - NOT_LAUNCHED, 1 - TIMEOUT ydb/core/mind/hive/ut ydb/core/persqueue/ut [size:medium] nchunks:40 ------ [14/40] chunk ran 5 tests (total:119.98s - setup:0.08s test:119.79s) [fail] TPQTest::TestReadAndDeleteConsumer [default-linux-x86_64-release-asan] (3.12s) assertion failed at ydb/core/persqueue/ut/pq_ut.cpp:2433, auto NKikimr::NPQ::NTestSuiteTPQTest::TTestCaseTestReadAndDeleteConsumer::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()(const TString &, std::function, bool &) const: ((int)consumerDeleteResult->Record.GetStatus() == (int)NKikimrPQ::EStatus::OK) failed: (2 != 0) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::NPQ::NTestSuiteTPQTest::TTestCaseTestReadAndDeleteConsumer::Execute_(NUnitTest::TTestContext&)::$_1::operator()(TBasicString> const&, std::__y1::function, bool&) const at /-S/ydb/core/persqueue/ut/pq_ut.cpp:2433:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:477:20 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:477:20 NKikimr::NPQ::NTestSuiteTPQTest::TTestCaseTestReadAndDeleteConsumer::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/persqueue/ut/pq_ut.cpp:2359:5 operator() at /-S/ydb/core/persqueue/ut/pq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:16 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:477:20 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 forced failure at ydb/core/testlib/tablet_helpers.cpp:806, void NKikimr::RunTestWithReboots(const TVector &, std::function, std::function, bool &)>, ui32, ui64, ui32, ui32, bool): Failed at dispatch Trace with exception GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::RunTestWithReboots(TVector> const&, std::__y1::function&)> ()>, std::__y1::function> const&, std::__y1::function, bool&)>, unsigned int, unsigned long, unsigned int, unsigned int, bool) at /-S/ydb/core/testlib/tablet_helpers.cpp:0:17 NKikimr::NPQ::NTestSuiteTPQTest::TTestCaseTestReadAndDeleteConsumer::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/persqueue/ut/pq_ut.cpp:2359:5 operator() at /-S/ydb/core/persqueue/ut/pq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:16 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:477:20 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/test-results/unittest/testing_out_stuff/TPQTest.TestReadAndDeleteConsumer.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/test-results/unittest/testing_out_stuff/TPQTest.TestReadAndDeleteConsumer.out ------ FAIL: 192 - GOOD, 1 - FAIL ydb/core/persqueue/ut ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [3/60] chunk ran 1 test (total:29.60s - test:29.57s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ FAIL: 4 - GOOD, 1 - CRASHED ydb/core/tx/tiering/ut ydb/core/viewer/ut [size:medium] nchunks:10 ------ [0/10] chunk ran 5 tests (total:115.28s - test:115.19s) [fail] Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [default-linux-x86_64-release-asan] (8.94s) (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation.out ------ [5/10] chunk ran 5 tests (total:611.34s - setup:0.01s test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: Viewer::JsonStorageListingV1PDiskIdFilter (timeout) duration: 227.64s Viewer::JsonStorageListingV2NodeIdFilter (good) duration: 140.09s Viewer::JsonStorageListingV2 (good) duration: 116.08s Viewer::JsonStorageListingV2GroupIdFilter (good) duration: 104.86s Viewer::JsonStorageListingV2PDiskIdFilter test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/stderr [timeout] Viewer::JsonStorageListingV1PDiskIdFilter [default-linux-x86_64-release-asan] (227.64s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.JsonStorageListingV1PDiskIdFilter.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.JsonStorageListingV1PDiskIdFilter.out ------ [6/10] chunk ran 5 tests (total:78.11s - setup:0.02s test:77.90s) [fail] Viewer::QueryExecuteScript [default-linux-x86_64-release-asan] (18.98s) (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.QueryExecuteScript.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.QueryExecuteScript.out [fail] Viewer::Plan2SvgOK [default-linux-x86_64-release-asan] (15.00s) (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.Plan2SvgOK.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.Plan2SvgOK.out ------ [9/10] chunk ran 5 tests (total:223.13s - setup:0.01s test:223.06s) [fail] Viewer::UseTransactionWhenExecuteDataActionQuery [default-linux-x86_64-release-asan] (9.78s) (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.UseTransactionWhenExecuteDataActionQuery.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.UseTransactionWhenExecuteDataActionQuery.out ------ TIMEOUT: 44 - GOOD, 4 - FAIL, 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/core/viewer/ut ydb/public/sdk/cpp/client/ydb_topic/ut [size:medium] nchunks:10 ------ [4/10] chunk ran 10 tests (total:230.70s - test:230.45s) [crashed] TxUsage::WriteToTopic_Demo_18_RestartAfterCommit [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==220009==ERROR: AddressSanitizer: heap-use-after-free on address 0x50c000361d88 at pc 0x00001f26e437 bp 0x7f2cbf691170 sp 0x7f2cbf691168 READ of size 4 at 0x50c000361d88 thread T1001 (ydb-publ.User) 2025-04-07T13:59:27.812485Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvInterconnect::TEvNodeInfo 2025-04-07T13:59:27.813089Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Registered with mediator time cast 2025-04-07T13:59:27.813415Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-07T13:59:27.813559Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] has a tx info 2025-04-07T13:59:27.813596Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] PlanStep 1744034366256, PlanTxId 281474976710673, ExecStep 1744034366256, ExecTxId 281474976710673 2025-04-07T13:59:27.813783Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Txs.size=0, PlannedTxs.size=0 2025-04-07T13:59:27.814649Z node 10 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-07T13:59:27.814668Z node 10 :PERSQUEUE INFO: [PQ: 72075186224037894] has a tx writes info 2025-04-07T13:59:27.815362Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvSubscribeLock for WriteId {10, 281474976710674} 2025-04-07T13:59:27.815426Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitConfigStep 2025-04-07T13:59:27.815612Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvLongTxService::TEvLockStatus LockId: 281474976710674 LockNode: 10 Status: STATUS_NOT_FOUND 2025-04-07T13:59:27.815637Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete partitions for WriteId {10, 281474976710674} 2025-04-07T13:59:27.815659Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvPQ::TEvDeletePartition to partition {0, {10, 281474976710674}, 100000} 2025-04-07T13:59:27.815855Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {10, 281474976710674}, 100000}, State: StateInit] HandleOnInit TEvPQ::TEvDeletePartition 2025-04-07T13:59:27.816008Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:59:27.816285Z node 10 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: {0, {10, 281474976710674}, 100000}, ..[snippet truncated].. 13:59:28.005453Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-07T13:59:28.006707Z node 10 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: {0, {10, 281474976710674}, 100000}, State: StateInit] bootstrapping {0, {10, 281474976710674}, 100000} [10:7490570570401643828:2661] 2025-04-07T13:59:28.011988Z node 10 :PERSQUEUE DEBUG: [topic_A:{0, {10, 281474976710674}, 100000}:Initializer] Start initializing step TInitDiskStatusStep 2025-04-07T13:59:28.012519Z node 10 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [10:7490570570401643038:2508]: Got location 2025-04-07T13:59:28.012788Z node 10 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [10:7490570570401643038:2508]: Something wrong on location, retry. Response: Status: false Locations { PartitionId: 0 SUMMARY: AddressSanitizer: heap-use-after-free /-S/ydb/core/tablet/tablet_counters.h:385:26 in NKikimr::TCountersArray::Reset(NKikimr::TCountersArray const&) Shadow bytes around the buggy address: 0x50c000361b00: fa fa fa fa fa fa fa fa 00 00 00 00 00 00 00 00 0x50c000361b80: 00 00 00 00 00 00 00 fa fa fa fa fa fa fa fa fa 0x50c000361c00: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0x50c000361c80: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x50c000361d00: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa =>0x50c000361d80: fd[fd]fd fd fd fd fd fd fd fd fd fd fd fd fd fd 0x50c000361e00: fa fa fa fa fa fa fa fa 00 00 00 00 00 00 00 00 0x50c000361e80: 00 00 00 00 00 00 00 00 fa fa fa fa fa fa fa fa 0x50c000361f00: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 fa 0x50c000361f80: fa fa fa fa fa fa fa fa 00 00 00 00 00 00 00 00 0x50c000362000: 00 00 00 00 00 00 00 00 fa fa fa fa fa fa fa fa Shadow byte legend (one shadow byte represents 8 application bytes): Addressable: 00 Partially addressable: 01 02 03 04 05 06 07 Heap left redzone: fa Freed heap region: fd Stack left redzone: f1 Stack mid redzone: f2 Stack right redzone: f3 Stack after return: f5 Stack use after scope: f8 Global redzone: f9 Global init order: f6 Poisoned by user: f7 Container overflow: fc Array cookie: ac Intra object redzone: bb ASan internal: fe Left alloca redzone: ca Right alloca redzone: cb ==220009==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/client/ydb_topic/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/client/ydb_topic/ut/test-results/unittest/testing_out_stuff/TxUsage.WriteToTopic_Demo_18_RestartAfterCommit.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/client/ydb_topic/ut/test-results/unittest/testing_out_stuff/TxUsage.WriteToTopic_Demo_18_RestartAfterCommit.out ------ FAIL: 101 - GOOD, 1 - CRASHED ydb/public/sdk/cpp/client/ydb_topic/ut ydb/services/persqueue_v1/ut [size:medium] nchunks:10 ------ [3/10] chunk ran 13 tests (total:604.61s - test:600.10s) Test was terminated by signal 3 Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TPersQueueTest::DefaultMeteringMode (good) duration: 34.44s TPersQueueTest::CreateTopicWithMeteringMode (good) duration: 31.74s TPersQueueTest::FetchRequest (good) duration: 19.53s TPersQueueTest::DisableDeduplication (good) duration: 19.06s TPersQueueTest::DisableWrongSettings (good) duration: 17.69s TPersQueueTest::Delete (good) duration: 15.66s TPersQueueTest::DirectReadNotCached (fail) duration: 14.55s TPersQueueTest::EventBatching (good) duration: 14.42s TPersQueueTest::DirectReadStop (good) duration: 12.96s TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder (good) duration: 12.81s 3 more tests with 34.89s total duration are not listed. Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/stderr [fail] TPersQueueTest::DirectReadNotCached [default-linux-x86_64-release-asan] (14.55s) assertion failed at ydb/services/persqueue_v1/persqueue_ut.cpp:907, void NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TDirectReadTestSetup::DoRead(ui64, ui64 &, ui32 &, ui32): (currTotalMessages == messageLimit) failed: (44 != 42) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TDirectReadTestSetup::DoRead(unsigned long, unsigned long&, unsigned int&, unsigned int) at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:907:13 NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseDirectReadNotCached::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:1035:9 operator() at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:16 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:477:20 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/TPersQueueTest.DirectReadNotCached.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/TPersQueueTest.DirectReadNotCached.out ------ TIMEOUT: 130 - GOOD, 1 - FAIL ydb/services/persqueue_v1/ut ydb/services/ydb/sdk_sessions_pool_ut [size:medium] nchunks:10 ------ [6/10] chunk ran 1 test (total:23.22s - test:23.17s) [fail] YdbSdkSessionsPool::StressTestSync1 [default-linux-x86_64-release-asan] (13.36s) assertion failed at ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp:304, virtual void NTestSuiteYdbSdkSessionsPool::TTestCaseStressTestSync1::Execute_(NUnitTest::TTestContext &): (client.GetCurrentPoolSize() == activeSessionsLimit) failed: (0 != 1) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbSdkSessionsPool::TTestCaseStressTestSync1::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp:304:9 operator() at /-S/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:16 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:477:20 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/testing_out_stuff/YdbSdkSessionsPool.StressTestSync1.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/testing_out_stuff/YdbSdkSessionsPool.StressTestSync1.out ------ FAIL: 11 - GOOD, 1 - FAIL ydb/services/ydb/sdk_sessions_pool_ut ydb/services/ydb/sdk_sessions_ut [size:medium] nchunks:10 ------ [2/10] chunk ran 2 tests (total:90.64s - setup:0.07s test:90.07s) [fail] YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [default-linux-x86_64-release-asan] (25.91s) assertion failed at ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:425, void NTestSuiteYdbSdkSessions::DoMultiThreadSessionPoolLimitSync() [TClient = NYdb::NQuery::TQueryClient]: (false) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:16 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:477:20 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff/YdbSdkSessions.MultiThreadSessionPoolLimitSyncQueryClient.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff/YdbSdkSessions.MultiThreadSessionPoolLimitSyncQueryClient.out ------ FAIL: 15 - GOOD, 1 - FAIL ydb/services/ydb/sdk_sessions_ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [28/60] chunk ran 5 tests (total:131.67s - test:131.54s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (7.42s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:458, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (SCHEME_ERROR != SUCCESS)
: Error: Haven't MAX-index for TTL column and TTL column is not first column in primary key , with diff: S(|UC)C(H|)E(ME_ERROR|SS) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:16 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:477:20 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ [50/60] chunk ran 4 tests (total:607.03s - test:600.14s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: YdbYqlClient::SimpleColumnFamilies (good) duration: 585.48s YdbYqlClient::TableKeyRangesSinglePartition (timeout) duration: 7.48s YdbYqlClient::SecurityTokenAuthMultiTenantSDK (good) duration: 6.22s YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync (good) duration: 5.54s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/stderr [timeout] YdbYqlClient::TableKeyRangesSinglePartition [default-linux-x86_64-release-asan] (7.48s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbYqlClient.TableKeyRangesSinglePartition.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbYqlClient.TableKeyRangesSinglePartition.out ------ TIMEOUT: 275 - GOOD, 1 - FAIL, 1 - TIMEOUT ydb/services/ydb/ut Total 382 suites: 358 - GOOD 16 - FAIL 8 - TIMEOUT Total 8987 tests: 8915 - GOOD 36 - FAIL 17 - NOT_LAUNCHED 6 - TIMEOUT 13 - CRASHED Cache efficiency ratio is 90.30% (44358 of 49122). Local: 0 (0.00%), dist: 8000 (16.29%), by dynamic uids: 0 (0.00%), avoided: 36358 (74.02%) Dist cache download: count=7561, size=22.48 GiB, speed=198.1 MiB/s Disk usage for tools/sdk at least 439.45 MiB Additional disk space consumed for build cache 1.02 TiB Critical path: [ 405 ms] [AR] [suXZEJGl55dFhPTpaa8nHQ tool]: $(BUILD_ROOT)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a [started: 0 (1744033486980), finished: 405 (1744033487385)] [ 318 ms] [LD] [mYtBMlnrZn8G2WPgNT8-LQ tool]: $(BUILD_ROOT)/contrib/tools/protoc/plugins/grpc_cpp/grpc_cpp [started: 2795 (1744033489775), finished: 3113 (1744033490093)] [ 1234 ms] [PB] [19OA1rrS_HD7Dlhoowl-nw default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/core/protos/config.{pb.h ... grpc.pb.h} [started: 3397 (1744033490377), finished: 4631 (1744033491611)] [ 27320 ms] [CC] [w-l0LFSjvT_j_U_eJOEWVA default-linux-x86_64 release asan]: $(SOURCE_ROOT)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp [started: 5954 (1744033492934), finished: 33274 (1744033520254)] [ 52 ms] [AR] [eU_x-wGFvPmYGzr_Y_CjOQ default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a [started: 33293 (1744033520273), finished: 33345 (1744033520325)] [113733 ms] [LD] [jnFOz2U6le4eEtD23TiQ3w default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/services/ydb/ut/ydb-services-ydb-ut [started: 100927 (1744033587907), finished: 214660 (1744033701640)] [607852 ms] [TM] [rnd-11205833443477315214 asan default-linux-x86_64 release]: ydb/services/ydb/ut/unittest [started: 323433 (1744033810413), finished: 931285 (1744034418265)] [ 7230 ms] [TA] [rnd-2w3c40fgjztfle8x]: $(BUILD_ROOT)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} [started: 2227575 (1744035714555), finished: 2234805 (1744035721785)] Time from start: 3504982.2548828125 ms, time elapsed by graph 758144 ms, time diff 2746838.2548828125 ms. The longest 10 tasks: [644090 ms] [TM] [rnd-gv65jix6s8mbn8j7 asan default-linux-x86_64 release]: ydb/tests/fq/mem_alloc/py3test [started: 1744035841780, finished: 1744036485870] [612502 ms] [TM] [rnd-2745714854840392312 asan default-linux-x86_64 release]: ydb/core/viewer/ut/unittest [started: 1744033643067, finished: 1744034255569] [607865 ms] [TM] [rnd-7961031750224341730 asan default-linux-x86_64 release]: ydb/core/mind/hive/ut/unittest [started: 1744035137176, finished: 1744035745041] [607852 ms] [TM] [rnd-11205833443477315214 asan default-linux-x86_64 release]: ydb/services/ydb/ut/unittest [started: 1744033810413, finished: 1744034418265] [605107 ms] [TM] [rnd-5162258128756051828 asan default-linux-x86_64 release]: ydb/services/persqueue_v1/ut/unittest [started: 1744035369866, finished: 1744035974973] [603825 ms] [TM] [rnd-12099218271368360986 asan default-linux-x86_64 release]: ydb/core/kqp/ut/scheme/unittest [started: 1744035082680, finished: 1744035686505] [602953 ms] [TM] [rnd-13282090674501142206 asan default-linux-x86_64 release]: ydb/core/kqp/ut/olap/unittest [started: 1744034461269, finished: 1744035064222] [570543 ms] [TM] [rnd-16004304907775288278 asan default-linux-x86_64 release]: ydb/tests/fq/multi_plane/py3test [started: 1744035566926, finished: 1744036137469] [547525 ms] [TM] [rnd-5601124753989631075 asan default-linux-x86_64 release]: ydb/core/statistics/service/ut/unittest [started: 1744035332806, finished: 1744035880331] [538746 ms] [TM] [rnd-2040552333984984769 asan default-linux-x86_64 release]: ydb/core/kqp/ut/olap/unittest [started: 1744034404324, finished: 1744034943070] Total time by type: [139000272 ms] [TM] [count: 3808, ave time 36502.17 msec] [ 13999872 ms] [prepare:get from dist cache] [count: 8000, ave time 1749.98 msec] [ 13182791 ms] [LD] [count: 330, ave time 39947.85 msec] [ 1356389 ms] [TS] [count: 221, ave time 6137.51 msec] [ 508596 ms] [prepare:tools] [count: 23, ave time 22112.87 msec] [ 330410 ms] [TA] [count: 195, ave time 1694.41 msec] [ 291614 ms] [prepare:bazel-store] [count: 3, ave time 97204.67 msec] [ 154887 ms] [prepare:put into local cache, clean build dir] [count: 8010, ave time 19.34 msec] [ 115728 ms] [prepare:AC] [count: 4, ave time 28932.00 msec] [ 101275 ms] [CC] [count: 14, ave time 7233.93 msec] [ 83265 ms] [prepare:put to dist cache] [count: 376, ave time 221.45 msec] [ 20413 ms] [PB] [count: 54, ave time 378.02 msec] [ 19025 ms] [AR] [count: 93, ave time 204.57 msec] [ 11154 ms] [PY] [count: 16, ave time 697.12 msec] [ 2436 ms] [PR] [count: 15, ave time 162.40 msec] [ 1905 ms] [prepare:resources] [count: 1, ave time 1905.00 msec] [ 717 ms] [ld] [count: 2, ave time 358.50 msec] [ 480 ms] [CF] [count: 2, ave time 240.00 msec] [ 442 ms] [BI] [count: 1, ave time 442.00 msec] [ 392 ms] [UN] [count: 1, ave time 392.00 msec] [ 303 ms] [SB] [count: 1, ave time 303.00 msec] [ 294 ms] [AS] [count: 9, ave time 32.67 msec] [ 152 ms] [CP] [count: 1, ave time 152.00 msec] [ 38 ms] [EN] [count: 1, ave time 38.00 msec] [ 10 ms] [prepare:clean] [count: 3, ave time 3.33 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 140687071 ms (91.34%) Total run tasks time - 154026983 ms Configure time - 31.8 s Statistics overhead 2021 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Ok + echo 0